import pandas as pd from sklearn.model_selection import train_test_split from sklearn.ensemble import RandomForestClassifier from sklearn.metrics import classification_report, confusion_matrix import matplotlib.pyplot as plt import seaborn as sns # 数据集 URL data_url = 'https://archive.ics.uci.edu/static/public/15/data.csv' # 加载数据集 df = pd.read_csv(data_url) # 查看数据集的前几行 print("数据集的前几行:") print(df.head()) # 数据预处理 # 处理缺失值(将 '?' 替换为 NaN) df['Bare_nuclei'] = df['Bare_nuclei'].replace('?', None).astype(float) # 将 '?' 替换为 None df = df.dropna() # 删除含有缺失值的行 # 编码目标变量(将 2 和 4 转换为 0 和 1) df['Class'] = df['Class'].map({2: 0, 4: 1}) # 特征和目标 X = df.drop(columns=['Sample_code_number', 'Class']) # 特征 y = df['Class'] # 目标 # 划分训练集和测试集 X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42) # 训练模型 model = RandomForestClassifier(random_state=42) model.fit(X_train, y_train) # 预测 y_pred = model.predict(X_test) # 输出分类报告 print("\n分类报告:") print(classification_report(y_test, y_pred)) # 可视化混淆矩阵 cm = confusion_matrix(y_test, y_pred) plt.figure(figsize=(8, 6)) sns.heatmap(cm, annot=True, fmt='d', cmap='Blues', xticklabels=['Benign', 'Malignant'], yticklabels=['Benign', 'Malignant']) plt.ylabel('Actual') plt.xlabel('Predicted') plt.title('Confusion Matrix') plt.show() # 可视化特征重要性 feature_importances = model.feature_importances_ features = X.columns indices = range(len(features)) # 创建条形图 plt.figure(figsize=(12, 6)) sns.barplot(x=feature_importances, y=features) plt.title('Feature Importance') plt.xlabel('Importance') plt.ylabel('Feature') plt.show() ############################################### from ucimlrepo import fetch_ucirepo # fetch dataset breast_cancer_wisconsin_original = fetch_ucirepo(id=15) # data (as pandas dataframes) X = breast_cancer_wisconsin_original.data.features y = breast_cancer_wisconsin_original.data.targets # metadata print(breast_cancer_wisconsin_original.metadata) # variable information print(breast_cancer_wisconsin_original.variables) ########################################################## # 0 0.93 0.99 0.96 79 # 1 0.98 0.90 0.94 58 #accuracy 0.95 137