From 67b7217191411814e3592d69a80ed3216e17619b Mon Sep 17 00:00:00 2001 From: Uddeshya Tyagi Date: Tue, 15 Sep 2020 12:20:08 +0530 Subject: [PATCH] Column names added --- demo.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/demo.py b/demo.py index 27c827b..c250a42 100644 --- a/demo.py +++ b/demo.py @@ -1,8 +1,8 @@ #The optimal values of m and b can be actually calculated with way less effort than doing a linear regression. #this is just to demonstrate gradient descent -from numpy import * - +import numpy as np +import pandas as pd # y = mx + b # m is slope, b is y-intercept def compute_error_for_line_given_points(b, m, points): @@ -34,15 +34,18 @@ def gradient_descent_runner(points, starting_b, starting_m, learning_rate, num_i return [b, m] def run(): - points = genfromtxt("data.csv", delimiter=",") + points = np.genfromtxt("data.csv", delimiter=",") + arr=np.array(['0','1']) + points=arr.append(points) + points.rename(columns={'0':'column1','1':'column2'}) learning_rate = 0.0001 initial_b = 0 # initial y-intercept guess initial_m = 0 # initial slope guess num_iterations = 1000 - print "Starting gradient descent at b = {0}, m = {1}, error = {2}".format(initial_b, initial_m, compute_error_for_line_given_points(initial_b, initial_m, points)) - print "Running..." + print("Starting gradient descent at b = {0}, m = {1}, error = {2}".format(initial_b, initial_m, compute_error_for_line_given_points(initial_b, initial_m, points))) + print("Running...") [b, m] = gradient_descent_runner(points, initial_b, initial_m, learning_rate, num_iterations) - print "After {0} iterations b = {1}, m = {2}, error = {3}".format(num_iterations, b, m, compute_error_for_line_given_points(b, m, points)) + print("After {0} iterations b = {1}, m = {2}, error = {3}".format(num_iterations, b, m, compute_error_for_line_given_points(b, m, points))) if __name__ == '__main__': run()