设计思路
输出结果
w_target.shape: (3,) [ 1.17881511 -5.13265596 -6.55556511]
Pre_Logistic_function
<class 'function'>
Product_x_function
[1. 0.10262954 0.43893794]
data_x
(300, 3) [[ 1. -0.15378708 0.9615284 ]
[ 1. 0.36965948 -0.0381362 ]
[ 1. -0.21576496 -0.31364397]
[ 1. 0.45809941 -0.12285551]
[ 1. -0.88064421 -0.20391149]]
核心代码
def gradient_descent(data_x, data_y, w_h=None, eta=1.0, max_iterations=10000, epsilon=0.001):
if w_h == None:
w_h = np.array([0.0 for i in range(data_x.shape[1])])
w_h_i = [np.copy(w_h)]
for i in range(max_iterations):
subset_indices = range(data_x.shape[0])
grad_E_in = np.mean(np.tile(- data_y[subset_indices] /
( 1.0 + np.exp(data_y[subset_indices] * w_h.dot(data_x[subset_indices].T)) ),
(data_x.shape[1], 1)).T * data_x[subset_indices], axis=0)
w_h -= eta * grad_E_in
w_h_i.append(np.copy(w_h))
if np.linalg.norm(grad_E_in) <= np.linalg.norm(w_h) * epsilon:
break
return np.array(w_h_i)
LoR = linear_model.LogisticRegression()
LoR.fit(data_x,data_y)
y_train=LoR.predict(data_x)
LoRpp_function = lambda z: LoR.predict_proba(z)[:,0]
BG_Grid_BaseLoR = apply_to_fill(z_grid, LoRpp_function)
full_N_fig = plot_dataset_and_hypothesis(3,data_x, data_y, xy_1, xy_2, BG_Grid_BaseLoR,title=r'LoR: Hypothesis, $N={:}$'.format(N))
SVM_Linear = svm.SVC(kernel='linear')
SVM_Linear.fit(data_x,data_y)
SVM_LinearPre_function = lambda z: SVM_Linear.predict(z)
BG_Grid_BaseSVM_Linear = apply_to_fill(z_grid, SVM_LinearPre_function)
full_N_fig = plot_dataset_and_hypothesis(5,data_x, data_y, xy_1, xy_2, BG_Grid_BaseSVM_Linear, title=r'SVM_Linear: Hypothesis, $N={:}$'.format(N))