Iris - Colab [Link]
1 of 11 04/11/2025, 12:29
Iris - Colab [Link]
import pandas as pd
import numpy as np
url ='[Link]
df = pd.read_csv(url)
[Link]()
sepal_length sepal_width petal_length petal_width species species_id
0 5.1 3.5 1.4 0.2 setosa 1
1 4.9 3.0 1.4 0.2 setosa 1
2 4.7 3.2 1.3 0.2 setosa 1
3 4.6 3.1 1.5 0.2 setosa 1
4 5.0 3.6 1.4 0.2 setosa 1
Start coding or generate with AI.
[Link]
(150, 6)
df['species'].value_counts()
count
species
setosa 50
versicolor 50
virginica 50
dtype: int64
[Link]()
<class '[Link]'>
RangeIndex: 150 entries, 0 to 149
Data columns (total 6 columns):
# Column Non-Null Count Dtype
--- ------ -------------- -----
0 sepal_length 150 non-null float64
1 sepal_width 150 non-null float64
2 of 11 04/11/2025, 12:29
Iris - Colab [Link]
1 sepal_width 150 non-null float64
2 petal_length 150 non-null float64
3 petal_width 150 non-null float64
4 species 150 non-null object
5 species_id 150 non-null int64
dtypes: float64(4), int64(1), object(1)
memory usage: 7.2+ KB
df['species_id'].value_counts()
count
species_id
1 50
2 50
3 50
dtype: int64
[Link]('species_id',axis=1,inplace=True)
[Link]()
sepal_length sepal_width petal_length petal_width species
0 5.1 3.5 1.4 0.2 setosa
1 4.9 3.0 1.4 0.2 setosa
2 4.7 3.2 1.3 0.2 setosa
3 4.6 3.1 1.5 0.2 setosa
4 5.0 3.6 1.4 0.2 setosa
import [Link] as plt
import seaborn as sns
from sklearn.model_selection import train_test_split
import [Link] as metrics
from [Link] import accuracy_score,classification_rep
from sklearn.linear_model import LogisticRegression
from [Link] import DecisionTreeClassifier
from [Link] import confusion_matrix, ConfusionMatrix
X = [Link]('species', axis=1)
3 of 11 04/11/2025, 12:29
Iris - Colab [Link]
y=df['species']
X_train, X_test, y_train, y_test = train_test_split(X, y, tes
print(f"Train data has {X_train.shape[0]} rows and {X_train.s
print(f"Test data has {X_test.shape[0]} rows and {X_test.shap
model = DecisionTreeClassifier()
[Link](X_train, y_train)
y_pred = [Link](X_test)
print(confusion_matrix(y_test, y_pred))
print(classification_report(y_test, y_pred))
print(accuracy_score(y_test, y_pred))
cm = confusion_matrix(y_test, y_pred)
# Display confusion matrix
disp = ConfusionMatrixDisplay(confusion_matrix=cm)
[Link](cmap='Blues') # Optional: use a color map for bett
[Link]("Confusion Matrix")
[Link]()
Train data has 105 rows and 4 columns.
Test data has 45 rows and 4 columns.
[[19 0 0]
[ 0 13 0]
[ 0 0 13]]
precision recall f1-score support
setosa 1.00 1.00 1.00 19
versicolor 1.00 1.00 1.00 13
virginica 1.00 1.00 1.00 13
accuracy 1.00 45
macro avg 1.00 1.00 1.00 45
weighted avg 1.00 1.00 1.00 45
1.0
4 of 11 04/11/2025, 12:29
Iris - Colab [Link]
Start coding or generate with AI.
print(accuracy_score(y_test, y_pred))
1.0
disp = ConfusionMatrixDisplay(confusion_matrix=cm, display_la
print("Confusion Matrix:\n", cm)
print([Link]())
Confusion Matrix:
[[19 0 0]
[ 0 13 0]
[ 0 0 13]]
<[Link]._plot.confusion_matrix.ConfusionMatrixDisplay object at 0x7e2f86788c90
5 of 11 04/11/2025, 12:29
Iris - Colab [Link]
import [Link] as plt
import seaborn as sns
from sklearn.model_selection import train_test_split
import [Link] as metrics
from [Link] import accuracy_score,classification_rep
from sklearn.linear_model import LogisticRegression
from [Link] import DecisionTreeClassifier
from [Link] import SVC
from [Link] import confusion_matrix, ConfusionMatrix
X = [Link]('species', axis=1)
y=df['species']
X_train, X_test, y_train, y_test = train_test_split(X, y, tes
print(f"Train data has {X_train.shape[0]} rows and {X_train.s
print(f"Test data has {X_test.shape[0]} rows and {X_test.shap
model = SVC()
[Link](X_train, y_train)
y_pred = [Link](X_test)
print(confusion_matrix(y_test, y_pred))
print(classification_report(y_test, y_pred))
print(accuracy_score(y_test, y_pred))
cm = confusion_matrix(y_test, y_pred)
# Display confusion matrix
disp = ConfusionMatrixDisplay(confusion_matrix=cm)
[Link](cmap='Blues') # Optional: use a color map for bett
[Link]("Confusion Matrix")
[Link]()
Train data has 105 rows and 4 columns.
Test data has 45 rows and 4 columns.
[[19 0 0]
[ 0 13 0]
[ 0 0 13]]
precision recall f1-score support
setosa 1.00 1.00 1.00 19
versicolor 1.00 1.00 1.00 13
virginica 1.00 1.00 1.00 13
6 of 11 04/11/2025, 12:29
Iris - Colab [Link]
virginica 1.00 1.00 1.00 13
accuracy 1.00 45
macro avg 1.00 1.00 1.00 45
weighted avg 1.00 1.00 1.00 45
1.0
from sklearn.neural_network import MLPClassifier
model = MLPClassifier()
[Link](X_train, y_train)
y_pred = [Link](X_test)
print(confusion_matrix(y_test, y_pred))
print(classification_report(y_test, y_pred))
print(accuracy_score(y_test, y_pred))
cm = confusion_matrix(y_test, y_pred)
# Display confusion matrix
disp = ConfusionMatrixDisplay(confusion_matrix=cm)
[Link](cmap='Blues') # Optional: use a color map for bett
[Link]("Confusion Matrix")
[Link]()
/usr/local/lib/python3.12/dist-packages/sklearn/neural_network/_multilayer_perceptron.p
[Link](
7 of 11 04/11/2025, 12:29
Iris - Colab [Link]
[Link](
[[19 0 0]
[ 0 13 0]
[ 0 0 13]]
precision recall f1-score support
setosa 1.00 1.00 1.00 19
versicolor 1.00 1.00 1.00 13
virginica 1.00 1.00 1.00 13
accuracy 1.00 45
macro avg 1.00 1.00 1.00 45
weighted avg 1.00 1.00 1.00 45
1.0
from [Link] import KNeighborsClassifier
model = KNeighborsClassifier()
[Link](X_train, y_train)
y_pred = [Link](X_test)
print(confusion_matrix(y_test, y_pred))
print(classification_report(y_test, y_pred))
print(accuracy_score(y_test, y_pred))
cm = confusion_matrix(y_test, y_pred)
8 of 11 04/11/2025, 12:29
Iris - Colab [Link]
cm = confusion_matrix(y_test, y_pred)
# Display confusion matrix
disp = ConfusionMatrixDisplay(confusion_matrix=cm)
[Link](cmap='Blues') # Optional: use a color map for bett
[Link]("Confusion Matrix")
[Link]()
[[19 0 0]
[ 0 13 0]
[ 0 0 13]]
precision recall f1-score support
setosa 1.00 1.00 1.00 19
versicolor 1.00 1.00 1.00 13
virginica 1.00 1.00 1.00 13
accuracy 1.00 45
macro avg 1.00 1.00 1.00 45
weighted avg 1.00 1.00 1.00 45
1.0
Start coding or generate with AI.
from sklearn.naive_bayes import GaussianNB
9 of 11 04/11/2025, 12:29
Iris - Colab [Link]
from sklearn.naive_bayes import GaussianNB
model = GaussianNB()
[Link](X_train, y_train)
y_pred = [Link](X_test)
print(confusion_matrix(y_test, y_pred))
print(classification_report(y_test, y_pred))
print(accuracy_score(y_test, y_pred))
cm = confusion_matrix(y_test, y_pred)
# Display confusion matrix
disp = ConfusionMatrixDisplay(confusion_matrix=cm)
[Link](cmap='Blues') # Optional: use a color map for bett
[Link]("Confusion Matrix")
[Link]()
[[19 0 0]
[ 0 12 1]
[ 0 0 13]]
precision recall f1-score support
setosa 1.00 1.00 1.00 19
versicolor 1.00 0.92 0.96 13
virginica 0.93 1.00 0.96 13
accuracy 0.98 45
macro avg 0.98 0.97 0.97 45
weighted avg 0.98 0.98 0.98 45
0.9777777777777777
10 of 11 04/11/2025, 12:29
Iris - Colab [Link]
11 of 11 04/11/2025, 12:29