logistic regression 逻辑回归与numpy实现

2021-12-15 08:37:13 浏览数 (1)

文章目录
  • 逻辑回归 logistic regression
  • numpy 复现

逻辑回归 logistic regression

  • 密度函数f(x)和分布函数F(x),函数图像
  • 实践的几率:是指时间发生的概率与事件不发生的概率的比值.事件发生的概率是P,几率
  • 该事件的对数几率:

numpy 复现

代码语言:javascript复制
# -*- coding:utf-8 -*-
# /usr/bin/python

from math import exp
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split

data = pd.read_csv("test.csv")
data = np.array(data.iloc[:100, [0,1,-1]])
X,Y = data[:,:2],data[:,-1]
X_train, X_test, Y_train, Y_test = train_test_split(X,Y, test_size=0.3)
class LogisticRegression():

    def __init__(self,max_iter=200,learning_rate=0.05):
        '''实例属性定义'''
        self.max_iter = max_iter
        self.learning_rate = learning_rate

    def sigmoid(self,x):
        return 1/(1 exp(-x))

    def data_matrix(self,x):
        data_mat = []
        for d in x:
            data_mat.append([1.0,*d])
        return data_mat

    def fit(self,X,Y):

        data_mat = self.data_matrix(X)
        self.weights = np.zeros((len(data_mat[0]),1),dtype=np.float32)

        for iter_ in range(self.max_iter):
            for i in range(len(X)):
                result = self.sigmoid(np.dot(data_mat[i],self.weights))
                error = Y[i]-result
                self.weights  = self.learning_rate*error*np.transpose([data[i]])

    def score(self,X_test,Y_test):
        right = 0
        X_test = self.data_matrix(X_test)
        for x, y in zip(X_test, Y_test):
            result = np.dot(x, self.weights)
            if (result > 0 and y == 1) or (result < 0 and y == 0):
                right  = 1
        return right / len(X_test)

lr = LogisticRegression()
lr.fit(X_train,Y_train)
print(lr.score(X_test,Y_test))

0 人点赞