ML Lab 6. Exp (sklearn decision tree)

 import pandas as pd 

from sklearn.tree import DecisionTreeClassifier

from sklearn.model_selection import train_test_split

from sklearn import metrics

col_names = ['pregnant', 'glucose', 'bp', 'skin', 'insulin', 'bmi', 'pedigree', 'age', 'label']

pima = pd.read_csv("/content/diabetes.csv", header=None, names=col_names)

pima.head()


#Feature Selection

#split dataset in features and target variable 

feature_cols = ['pregnant', 'insulin', 'bmi', 'age','glucose','bp','pedigree']

X = pima[feature_cols]

y = pima.label

X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, random_state=1)


#Building Decision Tree Model

clf = DecisionTreeClassifier()

clf = clf.fit(X_train,y_train)

y_pred = clf.predict(X_test)

#Evaluating the Model

print("Accuracy:",metrics.accuracy_score(y_test, y_pred))

#Accuracy: 0.658008658008658


#Optimizing Decision Tree Performance 

clf = DecisionTreeClassifier(criterion="entropy", max_depth=3)

clf = clf.fit(X_train,y_train)

y_pred = clf.predict(X_test) 

print("Accuracy:",metrics.accuracy_score(y_test, y_pred))

Accuracy: 0.7705627705627706


link

Comments

Popular posts from this blog

JAVA Lab Programs

HANGMAN GAME

Flutter Lab Program