Join the conversation
we are importing from sklearn.neural_network import MLPClassifier # # Create a multi-layer perceptron classifier model = MLPClassifier() # # Train the model model.fit(X_train, y_train) y_pred =model.predict(X_test) cm = confusion_matrix(y_test,y_pred) cm print("confusion_matrix:",confusion_matrix(y_test,y_pred)) print("Precision_score:",precision_score(y_test,y_pred)) print("Recall_score:",recall_score(y_test,y_pred)) print("F1 score:",f1_score(y_test,y_pred))
Reply
from sklearn.model_selection import cross_val_score
from sklearn.metrics import accuracy_score, precision_score, recall_score, f1_score
from sklearn.linear_model import LogisticRegression
from sklearn.neighbors import KNeighborsClassifier
from sklearn.svm import SVC
from sklearn.naive_bayes import GaussianNB
from sklearn.tree import DecisionTreeClassifier, DecisionTreeRegressor# Initialize models
models = {
"Logistic Regression": LogisticRegression(),
"K Nearest Neighbors": KNeighborsClassifier(),
"Support Vector Machines": SVC(),
"Kernel SVM": SVC(kernel='rbf'),
"Naive Bayes": GaussianNB(),
"Decision Tree Classification": DecisionTreeClassifier(),
"Decision Tree Regression": DecisionTreeRegressor()
}# Load your data and split into features X and target variable y# Define evaluation metrics
scoring = {
'accuracy': 'accuracy',
'precision': 'precision_macro',
'recall': 'recall_macro',
'f1': 'f1_macro'
}# Perform model selection
results = {}
for name, model in models.items():
scores = cross_val_score(model, X, y, cv=5, scoring=scoring)
results[name] = {
'Accuracy': scores.mean(),
'Precision': scores.mean(),
'Recall': scores.mean(),
'F1': scores.mean()
}# Print results
for name, scores in results.items():
print(f"Model: {name}")
print(f"Accuracy: {scores['Accuracy']:.4f}")
print(f"Precision: {scores['Precision']:.4f}")
print(f"Recall: {scores['Recall']:.4f}")
print(f"F1: {scores['F1']:.4f}")
print()
Reply
from sklearn.ensemble import GradientBoostingClassifier# Create a gradient boosting classifier
model = GradientBoostingClassifier()# # Train the model
model.fit(X_train, y_train)
y_pred =model.predict(X_test)
cm = confusion_matrix(y_test,y_pred)
cmprint("confusion_matrix:",confusion_matrix(y_test,y_pred))
print("Precision_score:",precision_score(y_test,y_pred))
print("Recall_score:",recall_score(y_test,y_pred))
print("F1 score:",f1_score(y_test,y_pred))
Reply
from sklearn.neural_network import MLPClassifier# # Create a multi-layer perceptron classifier
model = MLPClassifier()# # Train the model
model.fit(X_train, y_train)
y_pred =model.predict(X_test)
cm = confusion_matrix(y_test,y_pred)
cmprint("confusion_matrix:",confusion_matrix(y_test,y_pred))
print("Precision_score:",precision_score(y_test,y_pred))
print("Recall_score:",recall_score(y_test,y_pred))
print("F1 score:",f1_score(y_test,y_pred))
from sklearn.ensemble import RandomForestClassifier
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score# Assuming you have a dataset with features X and corresponding labels y
# Split the dataset into training and testing sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)# Create a Random Forest classifier object
rf = RandomForestClassifier(n_estimators=100, random_state=42)# Train the classifier on the training data
rf.fit(X_train, y_train)# Make predictions on the test data
y_pred = rf.predict(X_test)# Evaluate the accuracy of the classifier
accuracy = accuracy_score(y_test, y_pred)
print("Accuracy:", accuracy)
Reply
Everything is clear. Alhamdulillah
Reply
Sir,to day lecturer us difficult.
Reply
currently looking difficult, hope so inshaALLAH after 1st oct it will be easy to understand when attempt it 2nd time.
Reply