我正在尝试实现逻辑回归。我已将这些特征映射到 x1^2*x2^0 + x1^1*x2^1 + 形式的多项式......现在我想绘制相同的决策边界。经过这个答案后,我写了下面的代码来使用轮廓函数
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
def map_features(x, degree):
x_old = x.copy()
x = pd.DataFrame({"intercept" : [1]*x.shape[0]})
column_index = 1
for i in range(1, degree+1):
for j in range(0, i+1):
x.insert(column_index, str(x_old.columns[1]) + "^" + str(i-j) + str(x_old.columns[2]) + "^" + str(j), np.multiply(x_old.iloc[:,1]**(i-j), x_old.iloc[:,2]**(j)))
column_index+=1
return x
def normalize_features(x):
for column_name in x.columns[1:]:
mean = x[column_name].mean()
std = x[column_name].std()
x[column_name] = (x[column_name] - mean) / std
return x
def normalize_features2(x):
for column_name in x.columns[1:-1]:
mean = x[column_name].mean()
std = x[column_name].std()
x[column_name] = (x[column_name] - mean) / std
return x
def sigmoid(z):
# print(z)
return 1/(1+np.exp(-z))
def predict(x):
global theta
probability = np.asscalar(sigmoid(np.dot(x,theta)))
if(probability >= 0.5):
return 1
else:
return 0
def predict2(x):
global theta
probability = np.asscalar(sigmoid(np.dot(x.T,theta)))
if(probability >= 0.5):
return 1
else:
return 0
def cost(x, y, theta):
m = x.shape[0]
h_theta = pd.DataFrame(sigmoid(np.dot(x,theta)))
cost = 1/m * ((-np.multiply(y,h_theta.apply(np.log)) - np.multiply(1-y, (1-h_theta).apply(np.log))).sum())
return cost
def gradient_descent(x, y, theta):
global cost_values
m = x.shape[0]
iterations = 1000
alpha = 0.03
cost_values = pd.DataFrame({'iteration' : [0], 'cost' : [cost(x,y,theta)]})
下面是我作为输出得到的图

我不确定我是否正确解释了这一点,但这条线应该更像是一条分隔这两个类的曲线。数据集在这里ex2data1.csv
慕容708150
随时随地看视频慕课网APP
相关分类