Posts

Showing posts from March, 2025

5

import numpy as np, matplotlib.pyplot as plt from collections import Counter data = np.random.rand(100) train, test = data[:50], data[50:] labels = ['Class1' if x <= 0.5 else 'Class2' for x in train] def knn(x, k):     d = sorted([(abs(x - xi), li) for xi, li in zip(train, labels)])     return Counter([l for _, l in d[:k]]).most_common(1)[0][0] k_vals = [1, 2, 3, 4, 5, 20, 30] for k in k_vals:     print(f"\nResults for k = {k}:")     preds = [knn(x, k) for x in test]     for i, p in enumerate(preds, 51):         print(f"x{i} (value: {test[i-51]:.4f}) → {p}")     plt.figure()     plt.scatter(train, [0]*50, c=['blue' if l=='Class1' else 'red' for l in labels], label='Train')     plt.scatter([test[i] for i in range(50) if preds[i]=='Class1'], [1]*preds.count('Class1'), c='blue', marker='x', label='Class1')     plt.scatter([test[i] for i in range(50) if preds[i]=='Class2']...

4

import pandas as pd  def find_s_algorithm(file_path):      data = pd.read_csv(file_path)      print("Training data:")      print(data)     attributes = data.columns[:-1]      class_label = data.columns[-1]      hypothesis = ['?' for _ in attributes]      for index, row in data.iterrows():          if row[class_label] == 'Yes':              for i, value in enumerate(row[attributes]):                  if hypothesis[i] == '?' or hypothesis[i] == value:                      hypothesis[i] = value                  else:                      hypothesis[i] = '?'      return hypothesis  file_path = 'training_data.csv'...

2

  import pandas as pd import seaborn as sns import matplotlib.pyplot as plt from sklearn.datasets import fetch_california_housing   california_housing = fetch_california_housing () df = pd.DataFrame ( california_housing.data , columns=california_housing.feature_names ) df [ 'target' ] = california_housing.target   correlation_matrix = df.corr () plt.figure ( figsize= ( 12 , 8 )) sns.heatmap ( correlation_matrix , annot= True , cmap= 'coolwarm' , fmt= '.2f' , cbar= True ) plt.title ( "Correlation Matrix Heatmap" ) plt.show () sns.pairplot ( df , height= 2.5 , plot_kws= { 'alpha' : 0.7 }) plt.suptitle ( "Pair Plot of California Housing Dataset" , y= 1.02 ) plt.show ()

1

  import pandas as pd import seaborn as sns import matplotlib.pyplot as plt from sklearn.datasets import fetch_california_housing california_housing = fetch_california_housing () df = pd.DataFrame ( california_housing.data , columns=california_housing.feature_names ) df [ 'target' ] = california_housing.target print ( california_housing.DESCR ) print ( "First 5 rows of the dataset:" ) print ( df.head ()) df.hist ( bins= 30 , figsize= ( 12 , 10 )) plt.suptitle ( "Histograms of Numerical Features" , fontsize= 16 ) plt.show () plt.figure ( figsize= ( 12 , 10 )) for i , feature in enumerate ( df.columns [: -1 ]):   # Exclude 'target' column     plt.subplot ( 3 , 4 , i + 1 )   # 3 rows, 4 columns     sns.boxplot ( df [ feature ])     plt.title ( f 'Box Plot of { feature } ' ) plt.tight_layout () plt.show () print ( "Outliers Detection:" ) for feature in df.select_dtypes ( include= [ 'float64' , 'int64...

3

 import numpy as np import matplotlib.pyplot as plt from sklearn.datasets import load_iris from sklearn.preprocessing import StandardScaler from sklearn.decomposition import PCA iris = load_iris() X_scaled = StandardScaler().fit_transform(iris.data) pca = PCA(n_components=2) X_pca = pca.fit_transform(X_scaled) plt.figure(figsize=(10, 8)) for target, target_name in zip(range(3), iris.target_names):     plt.scatter(X_pca[iris.target == target, 0], X_pca[iris.target == target, 1], label=target_name, alpha=0.8) plt.xlabel(f'PC1 ({pca.explained_variance_ratio_[0]:.2%})') plt.ylabel(f'PC2 ({pca.explained_variance_ratio_[1]:.2%})') plt.title('PCA of Iris Dataset') plt.legend() plt.grid(True, alpha=0.3) plt.show() plt.plot(np.cumsum(pca.explained_variance_ratio_), 'bo-') plt.xlabel('Number of Components') plt.ylabel('Cumulative Explained Variance') plt.title('Explained Variance vs. Number of Components') plt.grid(True, alpha=0.3) plt.show...