吳恩達機器學習第二週程式設計作業(Python實現)

2021-09-25 11:57:19 字數 4191 閱讀 4986

課程作業 提取碼:3szr

1、單元線性回歸

ex1.py

from matplotlib.colors import lognorm

from mpl_toolkits.mplot3d import axes3d

from computecost import

*from plotdata import

*print

('plotting data...'

)data=np.loadtxt(

'./data/ex1data1.txt'

,delimiter=

',')

#載入txt格式資料集 每一行以「,」分隔

x=data[:,

0]y=data[:,

1]m=y.size

plt.figure(0)

plot_data(x,y)

# input()

print

('running gradient descent...'

)x=np.c_[np.ones(m)

,x]theta=np.zeros(2)

iterations=

1500

alpha=

0.01

print

('initial cost: '

+str

(compute_cost(x,y,theta))+

'(this value should be about 32.07'

)theta,j_history =gradient_descent(x,y,theta,alpha,iterations)

print

('theta found by gradient descent:'

+str

(theta.reshape(2)

))plt.figure(0)

line1,

=plt.plot(x[:,

1],np.dot(x,theta)

,label=

'linear regression'

)plot_data(x[:,

1],y)plt.legend(handles=

[line1]

)input

('program paused. press enter to continue'

)predict1=np.dot(np.array([1

,3.5])

,theta)

print

('for population=35,000,we predict a profit of (this value should be about 4519.77)'

.format

(predict1*

10000))

predict2=np.dot(np.array([1

,7])

,theta)

print

('for population = 70,000, we predict a profit of (this value should be about 45342.45)'

.format

(predict2*

10000))

input

('program paused. press enter to continue'

)print

('visualizing j(theta_0,theta_1)...'

)theta0_vals=np.linspace(-10

,10,100

)theta1_vals=np.linspace(-1

,4,100

)j_vals=np.zeros(

(theta0_vals.shape[0]

,theta1_vals.shape[0]

))print

(theta0_vals.shape[0]

)print

(theta1_vals.shape[0]

)for i in

range(0

,theta0_vals.shape[0]

):for j in

range(0

,theta1_vals.shape[0]

):t=np.array(

[theta0_vals[i]

,theta1_vals[j]])

j_vals[i]

[j]=compute_cost(x,y,t)

j_vals=np.transpose(j_vals)

fig=plt.figure(1)

ax=axes3d(fig)

xs,ys=np.meshgrid(theta0_vals,theta1_vals)

plt.title(

"visualizing j(theta_0,theta_1)"

)ax.plot_su***ce(xs,ys,j_vals)

ax.set_xlabel(

'$\theta_0$'

,color=

'r')

ax.set_ylabel(

'$\theta_1$'

,color=

'r')

plt.show(

)plt.figure(2)

lvls=np.logspace(-2

,3,20

)plt.contourf(xs,ys,j_vals,

10,levels=lvls,normal=lognorm())

plt.plot(theta[0]

, theta[1]

, c=

'r', marker=

"x")

plt.show(

)

plotdata.py

import matplotlib.pyplot as plt

defplot_data

(x,y)

: plt.scatter(x,y,marker=

'x',s=

50,c=

'r',alpha=

0.8)

plt.xlabel(

'population'

) plt.ylabel(

'profits'

) plt.show(

)

computecost.py(梯度下降函式包含在內)

吳恩達機器學習筆記 第二週

h x 0 1 x1 2x2 3x3 4x 4h theta x theta 0 theta 1x 1 theta 2x 2 theta 3x 3 theta 4x 4 h x 0 1 x1 2 x 2 3 x3 4 x4 為了表示方便,我們定義乙個單純為了計算方便的特徵,也就是x0 1x 0 1 ...

吳恩達機器學習第五周測驗及程式設計作業

程式設計練習 neural network learning 答案 答案 答案 答案 ad分析 a 使用梯度檢驗來檢查反向傳播是否正確,正確。b 梯度檢驗要比反向傳播計算損失函式的梯度慢的多,錯誤。c 梯度檢驗對梯度下降演算法來說非常有用,錯誤。d 為了保證效率,在使用反向傳播演算法前關閉梯度檢驗,...

吳恩達機器學習 程式設計練習7

本練習的主題是k means clustering and principal component analysis,即k均值聚類演算法和主要成分分析。因此這篇文章也分兩部分來討論,根據作業檔案的步驟來分別對k均值聚類演算法和pca進行討論原理以及如何實現。k means clustering al...