Linear regression

Let’s start with a question

A reasonable y is equal to ax plus b

⬇️⬇️⬇️⬇️⬇️⬇️

⬆️⬆️⬆️⬆️ port specifies the minimum value of the loss function

Ok, so we can go over the least square push-down one more time. The mathematics required for artificial intelligence – linear regression, this is the method of mathematics. So what about artificial intelligence and machine learning

Gradient descent method

One more question

To understand the

It’s a cycle

I’m going to find the slope at the next point, at a certain step size, until it converges

Machine learning to solve problems

So let’s look at the basic flow of machine learning problems

The first question

Single-factor housing price forecast

Import pandas as pd import numpy as NP from matplotlib import pyplot as PLT from sklearn.linear_model import LinearRegression from sklearn.metrics import mean_squared_error, R2_score # import pandas as pd data = pd.read_csv('task1_data.csv') data.head() print(type(data)) # x,y = x Loc [:, 'area '] x.ead () y = data.loc[:,' area '] x.ead () y = data.loc[:, # from matplotlib import pyplot as PLT fig1 = plt.figure() plt.figure() Print (type(x)) x = np.array(x) print(type(x), Print (type(x), x.shape) y = y.shape (-1, x) y = y.shape (-1, x) y = y.shape (-1, x) LinearRegression() print(model) # from sklearn. Linear_model import LinearRegression A = model.coef_ b = model.intercept_ print(a, Y_predict = a * x + b print(type(y_predict), Print (y_predict2 == y_predict2) print(y_predict2 == y_predict2) 0 y x_test = Np.array ([100]) x_test = x_test. 0 Print (y_test_predict = model. Predict (x_test) print(y_test_predict) # scatter(x, y) plt.plot(x, y_predict, Plt.ylabel ('price(y)') plt.legend() plt.show() # Model evaluation # from sklear.metrics import mean_squared_error, r2_score MSE = mean_squared_error(y, y_predict) R2 = r2_score(y, y_predict) print(MSE) print(R2)Copy the code

The second question

Multi-factor housing price forecast

import pandas as pd import numpy as np from matplotlib import pyplot as plt from sklearn.linear_model import LinearRegression from sklearn.metrics import mean_squared_error, R2_score # import pandas as pd data = pd.read_csv('task2_data.csv') data.head() # data assignment and visualization # form matplotlib Import Pyplot as PLT FIG = plt.figure(figsize=(20, 5)) Fig1 = plt.subplot(131) plt.loc [:, 'area '], Loc [:, 'Price '],) plt.title('Price VS Size') fig2 = plt.subplot(132) plt.loc [:, 'per capita income '], data.loc[:, Scatter (data.loc[:, 'average age '], data.loc[:,' average age '],) plt.title('Price VS Income') fig3 = plt.subplot(133) plt.loc [:, 'average age '], ) plt.title('Price VS House_age') plt.show() # x = data.loc[:, 'area '] y = data.loc[:, 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1) print(x.shape, LinearRegression model = LinearRegression() # LinearRegression model = LinearRegression() # LinearRegression model = LinearRegression() # LinearRegression model = LinearRegression() y) a = model.coef_ b = model.intercept_ print(a, Print (y_predict) # print(y_predict) # scatter(x, y) plt.figure() plt.plot(x, y) Y_predict, 'r') plt.show() r2_score MSE = mean_squared_error(y, y_predict) R2 = r2_score(y, Y_predict) print(R2) print(R2) print(R2) print(R2) print(R2) print(R2) Axis =1) x.read () print(x.shape) # create model_multi = LinearRegression() # train model_multi-.fit (x, MSE = mean_squared_error(y, y_predict) R2 = r2_score(y, Y_predict) print(MSE) print(R2) Y fig5 = scatter(y, Y_predict_multi) plt.xlabel('real price(y)') plt.ylabel('predicted price(x)') plt.show() # Figure 6 = plt.figure() plt.figure(y, Y_predict) plt.xlabel('real price(y)') plt.ylabel('predicted price(x)') plt.show() # 70000, 5]).reshape(1, -1) print(x_test) print(x_test.shape) y_test_predict = model_multi.predict(x_test) print(y_test_predict)Copy the code