numpy梯度回傳線性迴歸

AlexLord發表於2024-08-17
 1 import math
 2 import numpy as np
 3 x_train = np.array([1.0, 2.0, 3.0])
 4 y_train = np.array([300.0, 350.0, 500])
 5 
 6 def compute_cost(x, y, w, b):
 7     m = x.shape[0]
 8     f_wb = w * x + b
 9     cost = ((f_wb - y) ** 2).sum()
10     total_cost = cost / (2 * m)
11     return total_cost
12 
13 def compute_gradient(x, y, w, b):     
14     m = x.shape[0]    
15     dj_dw, dj_db = 0, 0
16     
17     for i in range(m):  
18         f_wb = w * x[i] + b 
19         dj_dw_i = (f_wb - y[i]) * x[i] 
20         dj_db_i = f_wb - y[i] 
21         dj_db += dj_db_i
22         dj_dw += dj_dw_i 
23     dj_dw = dj_dw / m 
24     dj_db = dj_db / m 
25         
26     return dj_dw, dj_db
27 
28 def gradient_descent(x, y, w_in, b_in, alpha, num_iters, cost_function, gradient_function): 
29 
30     b = b_in
31     w = w_in
32             
33     for i in range(num_iters):
34         dj_dw, dj_db = gradient_function(x, y, w , b)
35         b = b - alpha * dj_db
36         w = w - alpha * dj_dw
37         cost = cost_function(x, y, w , b)
38         
39         if i% math.ceil(num_iters/10) == 0:
40             print(f"Iteration {i}: Cost {cost:.2f}, Gradient (dw: {dj_dw:.3f}, db: {dj_db:.3f}), Parameters (w: {w:.3f}, b: {b:.5f})")
41  
42     return w, b
43 
44 w_init = 0
45 b_init = 0
46 
47 iterations = 1000
48 tmp_alpha = 0.001
49 
50 w_final, b_final= gradient_descent(x_train ,y_train, w_init, b_init, tmp_alpha, 
51                                                     iterations, compute_cost, compute_gradient)
52 
53 print(f"(w,b) found by gradient descent: ({w_final:8.4f},{b_final:8.4f})")

相關文章