第8篇 | 逻辑回归
逻辑回归虽然名字中包含回归但实际上是一种分类算法。它通过sigmoid函数将线性输出转换为概率广泛用于二分类问题。本篇将详细介绍逻辑回归的原理、实现和应用。一、逻辑回归概述逻辑回归用于处理二分类问题输出为样本属于某一类别的概率。它在线性回归的基础上加上sigmoid函数将连续的预测值映射到[0,1]区间。二、逻辑回归原理逻辑回归使用sigmoid函数σ(z) 1 / (1 e^(-z))。通过最大化似然函数或最小化对数损失来训练模型。对数损失函数为L -1/N * Σ(y*log(p) (1-y)*log(1-p))。代码示例逻辑回归基础import numpy as npimport pandas as pdimport matplotlib.pyplot as pltfrom sklearn.linear_model import LogisticRegressionfrom sklearn.model_selection import train_test_splitfrom sklearn.metrics import (accuracy_score, precision_score, recall_score,f1_score, roc_auc_score, roc_curve,confusion_matrix, classification_report)import warningswarnings.filterwarnings(ignore)print( * 60)print(第8篇逻辑回归)print( * 60)# 生成模拟数据二分类np.random.seed(42)n_samples 500# 两个类别的数据分布class_0 np.random.randn(250, 2) * 1.5 np.array([-2, -2])class_1 np.random.randn(250, 2) * 1.5 np.array([2, 2])# 合并数据X np.vstack([class_0, class_1])y np.array([0] * 250 [1] * 250)# 划分训练集和测试集X_train, X_test, y_train, y_test train_test_split(X, y, test_size0.2, random_state42)print(f训练集: {X_train.shape[0]} 样本)print(f测试集: {X_test.shape[0]} 样本)print(f类别分布: 0类{sum(y0)}, 1类{sum(y1)})# 创建逻辑回归模型model LogisticRegression(random_state42)model.fit(X_train, y_train)# 预测类别y_pred model.predict(X_test)# 预测概率y_prob model.predict_proba(X_test)[:, 1]print(f\n模型参数:)print(f 权重: {model.coef_[0]})print(f 偏置: {model.intercept_[0]:.4f})三、模型评估分类模型的评估指标包括准确率、精确率、召回率、F1分数、AUC-ROC等。代码示例模型评估print(\n * 50)print(模型评估指标)print( * 50)# 基本指标accuracy accuracy_score(y_test, y_pred)precision precision_score(y_test, y_pred)recall recall_score(y_test, y_pred)f1 f1_score(y_test, y_pred)print(f\n分类报告:)print(classification_report(y_test, y_pred, target_names[Class 0, Class 1]))print(f\n核心指标:)print(f 准确率 (Accuracy): {accuracy:.4f})print(f 精确率 (Precision): {precision:.4f})print(f 召回率 (Recall): {recall:.4f})print(f F1分数: {f1:.4f})# 混淆矩阵cm confusion_matrix(y_test, y_pred)print(f\n混淆矩阵:)print(cm)# AUC-ROCauc_score roc_auc_score(y_test, y_prob)print(f\nAUC-ROC: {auc_score:.4f})# 计算ROC曲线fpr, tpr, thresholds roc_curve(y_test, y_prob)# 可视化fig, axes plt.subplots(1, 3, figsize(15, 5))# 1. 混淆矩阵热力图import seaborn as snssns.heatmap(cm, annotTrue, fmtd, cmapBlues, axaxes[0])axes[0].set_title(Confusion Matrix)axes[0].set_xlabel(Predicted)axes[0].set_ylabel(Actual)# 2. ROC曲线axes[1].plot(fpr, tpr, colordarkorange, lw2, labelfROC (AUC {auc_score:.4f}))axes[1].plot([0, 1], [0, 1], colornavy, lw2, linestyle--)axes[1].set_xlim([0.0, 1.0])axes[1].set_ylim([0.0, 1.05])axes[1].set_xlabel(False Positive Rate)axes[1].set_ylabel(True Positive Rate)axes[1].set_title(ROC Curve)axes[1].legend(loclower right)# 3. 分类边界可视化h 0.02x_min, x_max X[:, 0].min() - 1, X[:, 0].max() 1y_min, y_max X[:, 1].min() - 1, X[:, 1].max() 1xx, yy np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))Z model.predict(np.c_[xx.ravel(), yy.ravel()])Z Z.reshape(xx.shape)axes[2].contourf(xx, yy, Z, alpha0.3, cmapplt.cm.RdYlBu)axes[2].scatter(X[:, 0], X[:, 1], cy, cmapplt.cm.RdYlBu, edgecolorsblack, alpha0.6)axes[2].set_xlabel(Feature 1)axes[2].set_ylabel(Feature 2)axes[2].set_title(Decision Boundary)plt.tight_layout()plt.savefig(logistic_regression_eval.png, dpi150)plt.show()四、多分类逻辑回归逻辑回归可以扩展到多分类问题常用的策略包括One-vs-RestOvR和多项式逻辑回归Softmax。代码示例多分类print(\n * 50)print(多分类逻辑回归)print( * 50)# 生成三类数据np.random.seed(42)n_samples 300class_0 np.random.randn(100, 2) * 0.8 np.array([0, 0])class_1 np.random.randn(100, 2) * 0.8 np.array([3, 3])class_2 np.random.randn(100, 2) * 0.8 np.array([0, 5])X_multi np.vstack([class_0, class_1, class_2])y_multi np.array([0] * 100 [1] * 100 [2] * 100)X_train_m, X_test_m, y_train_m, y_test_m train_test_split(X_multi, y_multi, test_size0.2, random_state42)# 多分类逻辑回归使用softmaxmodel_multi LogisticRegression(multi_classmultinomial, solverlbfgs, random_state42)model_multi.fit(X_train_m, y_train_m)y_pred_multi model_multi.predict(X_test_m)y_prob_multi model_multi.predict_proba(X_test_m)print(f多分类准确率: {accuracy_score(y_test_m, y_pred_multi):.4f})print(f\n分类报告:)print(classification_report(y_test_m, y_pred_multi, target_names[Class 0, Class 1, Class 2]))# 可视化fig, ax plt.subplots(figsize(8, 6))h 0.02x_min, x_max X_multi[:, 0].min() - 1, X_multi[:, 0].max() 1y_min, y_max X_multi[:, 1].min() - 1, X_multi[:, 1].max() 1xx, yy np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))Z model_multi.predict(np.c_[xx.ravel(), yy.ravel()])Z Z.reshape(xx.shape)plt.contourf(xx, yy, Z, alpha0.3, cmapplt.cm.tab10)scatter plt.scatter(X_multi[:, 0], X_multi[:, 1], cy_multi, cmapplt.cm.tab10, edgecolorsblack)plt.colorbar(scatter, labelClass)plt.xlabel(Feature 1)plt.ylabel(Feature 2)plt.title(Multi-class Logistic Regression)plt.tight_layout()plt.savefig(multiclass_logistic.png, dpi150)plt.show()五、逻辑回归的优缺点逻辑回归具有解释性强、训练速度快等优点但也有线性边界、难以处理复杂关系等局限性。代码示例正则化参数调优print(\n * 50)print(正则化参数C的影响)print( * 50)# C越小正则化越强C_values [0.001, 0.01, 0.1, 1, 10, 100]train_scores []test_scores []for C in C_values:model LogisticRegression(CC, random_state42, max_iter1000)model.fit(X_train, y_train)train_scores.append(model.score(X_train, y_train))test_scores.append(model.score(X_test, y_test))print(fC{C:.3f}: 训练准确率{train_scores[-1]:.4f}, 测试准确率{test_scores[-1]:.4f})# 可视化plt.figure(figsize(10, 5))plt.semilogx(C_values, train_scores, b-o, labelTrain)plt.semilogx(C_values, test_scores, r-s, labelTest)plt.xlabel(Regularization parameter C)plt.ylabel(Accuracy)plt.title(Effect of C on Logistic Regression)plt.legend()plt.grid(True)plt.savefig(logistic_reg_param.png, dpi150)plt.show()print(\n逻辑回归的C参数:)print(- C越大正则化越弱模型越复杂)print(- C越小正则化越强防止过拟合)print(- 需要通过交叉验证选择最优C值)六、总结逻辑回归是二分类问题的经典算法通过sigmoid函数将线性输出转换为概率评估指标包括准确率、精确率、召回率、F1、AUC可以通过正则化参数C控制模型复杂度可以扩展到多分类问题softmax
本文来自互联网用户投稿,该文观点仅代表作者本人,不代表本站立场。本站仅提供信息存储空间服务,不拥有所有权,不承担相关法律责任。如若转载,请注明出处:http://www.coloradmin.cn/o/2454194.html
如若内容造成侵权/违法违规/事实不符,请联系多彩编程网进行投诉反馈,一经查实,立即删除!