SE / multi_label_models.py
skywalker290's picture
Upload folder using huggingface_hub
74c6a41 verified
from sklearn.multioutput import MultiOutputClassifier
from sklearn.multiclass import OneVsRestClassifier
from sklearn.metrics import classification_report, accuracy_score
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LogisticRegression
from sklearn.tree import DecisionTreeClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.naive_bayes import MultinomialNB
from sklearn.svm import SVC
from xgboost import XGBClassifier
from sklearn.neural_network import MLPClassifier
import numpy as np
import pandas as pd
# Logistic Regression (use OneVsRest)
def multilabel_logistic_regression(X, y):
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
model = OneVsRestClassifier(LogisticRegression(solver='lbfgs', max_iter=1000))
model.fit(X_train, y_train)
y_pred = model.predict(X_test)
return classification_report(y_test, y_pred), accuracy_score(y_test, y_pred)
# Decision Tree
def multilabel_decision_tree(X, y):
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
model = MultiOutputClassifier(DecisionTreeClassifier())
model.fit(X_train, y_train)
y_pred = model.predict(X_test)
return classification_report(y_test, y_pred), accuracy_score(y_test, y_pred)
# Random Forest
def multilabel_random_forest(X, y):
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
model = MultiOutputClassifier(RandomForestClassifier(n_estimators=100, random_state=42))
model.fit(X_train, y_train)
y_pred = model.predict(X_test)
return classification_report(y_test, y_pred), accuracy_score(y_test, y_pred)
# SVM (with OneVsRest)
def multilabel_svm(X, y):
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
model = OneVsRestClassifier(SVC(kernel='rbf', probability=True))
model.fit(X_train, y_train)
y_pred = model.predict(X_test)
return classification_report(y_test, y_pred), accuracy_score(y_test, y_pred)
# k-NN (KNeighborsClassifier supports multi-label directly)
def multilabel_knn(X, y, k=5):
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
model = KNeighborsClassifier(n_neighbors=k)
model.fit(X_train, y_train)
y_pred = model.predict(X_test)
return classification_report(y_test, y_pred), accuracy_score(y_test, y_pred)
# Naive Bayes (MultinomialNB with OneVsRest)
def multilabel_naive_bayes(X, y):
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
model = OneVsRestClassifier(MultinomialNB())
model.fit(X_train, y_train)
y_pred = model.predict(X_test)
return classification_report(y_test, y_pred), accuracy_score(y_test, y_pred)
# XGBoost (with OneVsRest)
def multilabel_xgboost(X, y):
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
model = OneVsRestClassifier(XGBClassifier(use_label_encoder=False, eval_metric='logloss'))
model.fit(X_train, y_train)
y_pred = model.predict(X_test)
return classification_report(y_test, y_pred), accuracy_score(y_test, y_pred)
# MLP (Neural Net)
def multilabel_mlp(X, y):
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
model = MultiOutputClassifier(MLPClassifier(hidden_layer_sizes=(100,), max_iter=500))
model.fit(X_train, y_train)
y_pred = model.predict(X_test)
return classification_report(y_test, y_pred), accuracy_score(y_test, y_pred)