ml-5_31
ml-5_31
[ ]: import numpy as np
from sklearn import datasets
from sklearn.svm import SVC
from sklearn.model_selection import train_test_split
from sklearn.metrics import hinge_loss
iris = datasets.load_iris()
X = iris.data
y = iris.target
X = X[y != 2]
y = y[y != 2]
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3,␣
↪random_state=42)
[ ]: import numpy as np
from sklearn import datasets
from sklearn.model_selection import train_test_split
from sklearn.svm import SVC
from sklearn.metrics import accuracy_score
import matplotlib.pyplot as plt
data = datasets.load_iris()
X = data.data
y = data.target
X = X[y != 2]
y = y[y != 2]
1
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3,␣
↪random_state=42)
svm = SVC(kernel='linear', C=1.0)
svm.fit(X_train, y_train)
y_pred = svm.predict(X_test)
accuracy = accuracy_score(y_test, y_pred)
print(f'Accuracy: {accuracy:.4f}')
plt.figure(figsize=(8, 6))
X_train_2d = X_train[:, :2]
X_test_2d = X_test[:, :2]
svm_2d = SVC(kernel='linear', C=1.0)
svm_2d.fit(X_train_2d, y_train)
xx, yy = np.meshgrid(np.linspace(X_train_2d[:, 0].min() - 1, X_train_2d[:, 0]. ↪max() + 1,
100),
np.linspace(X_train_2d[:, 1].min() - 1, X_train_2d[:, 1].
↪max() + 1, 100))
Z = svm_2d.predict(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
plt.contourf(xx, yy, Z, alpha=0.75)
plt.scatter(X_train_2d[:, 0], X_train_2d[:, 1], c=y_train, edgecolors='k',␣ ↪marker='o',
s=100, label='Train Data')
plt.scatter(X_test_2d[:, 0], X_test_2d[:, 1], c=y_test, edgecolors='r',␣ ↪marker='^',
s=100, label='Test Data')
plt.title('SVM Decision Boundary (2D)')
plt.xlabel('Feature 1 (Sepal Length)')
plt.ylabel('Feature 2 (Sepal Width)')
plt.legend()
plt.show()
Accuracy: 1.0000
2
[ ]: import numpy as np
from sklearn import datasets
from sklearn.model_selection import train_test_split
from sklearn.svm import SVC
from sklearn.metrics import accuracy_score
import matplotlib.pyplot as plt
data = datasets.load_iris()
X = data.data
y = data.target
X = X[y != 2]
y = y[y != 2]
def plot_svm_decision_boundary(C_values):
plt.figure(figsize=(12, 8))
3
accuracies = []
for C in C_values:
svm = SVC(kernel='linear', C=C)
svm.fit(X_train[:, :2], y_train)
Z = svm.predict(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
plt.subplot(2, 3, C_values.index(C) + 1)
plt.contourf(xx, yy, Z, alpha=0.75)
plt.scatter(X_train[:, 0], X_train[:, 1], c=y_train, edgecolors='k',␣ ↪marker='o',
s=100, label='Train Data')
plt.scatter(X_test[:, 0], X_test[:, 1], c=y_test, edgecolors='r',␣ ↪marker='^',
s=100, label='Test Data')
plt.title(f'SVM Decision Boundary for C={C}')
plt.xlabel('Feature 1 (Sepal Length)')
plt.ylabel('Feature 2 (Sepal Width)')
plt.legend()
plt.figure(figsize=(8, 6))
plt.plot(C_values, accuracies, marker='o', linestyle='-', color='b') plt.xscale('log')
plt.xlabel('Regularization Parameter (C)')
plt.ylabel('Test Accuracy')
plt.title('Effect of Regularization (C) on SVM Accuracy')
plt.grid(True)
plt.show()
plot_svm_decision_boundary(C_values)
5
6