# 分類與邏輯迴歸

``````# 都是大於 0
print(coef[0] * data[normal_weight, 0] + coef[1] * data[normal_weight, 1])
# 都是小於 0
print(coef[0] * data[overweight, 0] + coef[1] * data[overweight, 1])
``````

sklearn 提供了 `sklearn.linear_model.LogisticRegression`，可使用邏輯迴歸進行分類，單就線性可分來說，只要將〈分類與感知器〉中的範例程式，從 `Perceptron` 改為 `LogisticRegression` 就可以了，例如：

``````import numpy as np
import matplotlib.pyplot as plt
from matplotlib import cm

from sklearn.linear_model import LogisticRegression

height_waist = data[:,0:2]
label = data[:,2]

normal_weight = label == 1
overweight = label == -1

# 使用 LogisticRegression
lg_reg = LogisticRegression()
# 提供資料與標記
lg_reg.fit(height_waist, label)
# 取得權重向量
coef = lg_reg.coef_[0]
# 截距
intercept = lg_reg.intercept_

plt.xlabel('height')
plt.ylabel('waist')
plt.gca().set_aspect(1)
plt.scatter(data[normal_weight, 0], data[normal_weight, 1], marker = 'o')
plt.scatter(data[overweight, 0], data[overweight, 1], marker = 'x')

height = height_waist[:,0]

h = np.arange(np.min(height), np.max(height))
w = -coef[0] / coef[1] * h - intercept
plt.plot(h, w, linestyle='dashed')

plt.show()
``````

``````155,128,0
183,134,0
181,60,1
161,106,0
144,125,0
181,93,1
...略
``````

``````import numpy as np
import matplotlib.pyplot as plt

height_weight = data[:,0:2]
label = data[:,2]

plt.xlabel('height')
plt.ylabel('weight')

normal_weight = label == 1
overweight = label == 0

plt.scatter(data[normal_weight, 0], data[normal_weight, 1], marker = 'o')
plt.scatter(data[overweight, 0], data[overweight, 1], marker = 'x')

plt.show()
``````

``````poly = PolynomialFeatures()                 # 二次多項式
feature = poly.fit_transform(height_weight) # 特徵值

lg_reg = LogisticRegression()  # 邏輯迴歸
lg_reg.fit(feature, label)
``````

``````# 顯示 [1. 0.]
print(
lg_reg.predict(
poly.fit_transform([[178, 60], [183, 100]])
)
)
``````

``````import numpy as np
import matplotlib.pyplot as plt

from sklearn.linear_model import LogisticRegression
from sklearn.preprocessing import PolynomialFeatures

# height weight
height_weight = data[:,0:2]
label = data[:,2]

poly = PolynomialFeatures()                 # 二次多項式
feature = poly.fit_transform(height_weight) # 特徵值

lg_reg = LogisticRegression()  # 邏輯迴歸
lg_reg.fit(feature, label)
coef = lg_reg.coef_[0]

plt.xlabel('height')
plt.ylabel('weight')

normal_weight = label == 1
overweight = label == 0

plt.scatter(data[normal_weight, 0], data[normal_weight, 1], marker = 'o')
plt.scatter(data[overweight, 0], data[overweight, 1], marker = 'x')

height = height_weight[:,0]
h = np.arange(np.min(height), np.max(height))

ycoef0 = [coef[5]] * h.size
ycoef1 = coef[2] + coef[4] * h
ycoef2 = coef[0] + coef[1] * h + coef[3] * (h ** 2)

ycoef = np.dstack((ycoef0, ycoef1, ycoef2))[0]
y = np.apply_along_axis(np.roots, 1, ycoef) # 解平方根
w = y[:,1] # 只需要正值部份

plt.plot(h, w, linestyle='dashed')

plt.show()
``````