1. 程式人生 > >梯度下降和邏輯迴歸例子(Python程式碼實現)

梯度下降和邏輯迴歸例子(Python程式碼實現)

import numpy as np
import pandas as pd
import os

data = pd.read_csv("iris.csv")  # 這裡的iris資料已做過處理
m, n = data.shape
dataMatIn = np.ones((m, n))
dataMatIn[:, :-1] = data.ix[:, :-1]
classLabels = data.ix[:, -1]

# sigmoid函式和初始化資料
def sigmoid(z):
    return 1 / (1 + np.exp(-z))

# 隨機梯度下降
def Stocgrad_descent(dataMatIn, classLabels):
    dataMatrix = np.mat(dataMatIn)  # 訓練集
    labelMat = np.mat(classLabels).transpose()  # y值
    m, n = np.shape(dataMatrix)  # m:dataMatrix的行數,n:dataMatrix的列數
    weights = np.ones((n, 1))  # 初始化迴歸係數(n, 1)
    alpha = 0.001  # 步長
    maxCycle = 500  # 最大迴圈次數
    epsilon = 0.001
    error = np.zeros((n,1))
    for i in range(maxCycle):
        for j in range(m):
            h = sigmoid(dataMatrix * weights)  # sigmoid 函式
            weights = weights + alpha * dataMatrix.transpose() * (labelMat - h)  # 梯度
        if np.linalg.norm(weights - error) < epsilon:
            break
        else:
            error = weights
        return weights

# 邏輯迴歸
def pred_result(dataMatIn):
    dataMatrix = np.mat(dataMatIn)
    r = Stocgrad_descent(dataMatIn, classLabels)
    p = sigmoid(dataMatrix * r)  # 根據模型預測的概率

    # 預測結果二值化
    pred = []
    for i in range(len(data)):
        if p[i] > 0.5:
            pred.append(1)
        else:
            pred.append(0)
    data["pred"] = pred
    os.remove("data_and_pred.csv")  # 刪除List_lost_customers資料集  # 第一次執行此程式碼時此步驟不要
    data.to_csv("data_and_pred.csv", index=False, encoding="utf_8_sig")  # 資料集儲存
pred_result(dataMatIn)

如果有錯誤的,請告訴我哈,比較菜!