Python Pandas tutorial

machine learning algorithms

machine learning algorithms: See The GitHub

 [1]:
import numpy as np
import sklearn

 

In [2]:
#features = [[140, 'smooth'], [130, 'smooth'], [150, 'bumpy'], [170, 'bumpy']]
#labels = ['apple', 'apple', 'orange', 'orange']
#this is how test and train data looks like

features = [[140, 1], [130, 1], [150, 0], [170, 0]]
labels = [0, 0, 1, 1]

 

In [3]:
#using decision tree classifier(function) , will learn about classifier in further codes see below
from sklearn import tree
clf =  tree.DecisionTreeClassifier()

#learning algorithm
clf = clf.fit(features, labels)
clf.predict([[150, 0]])  #must prdict orange= 1
Out[3]:
array([1])

 

In [ ]:

 

In [4]:
#working with iris dataset
#import dataset
#train classifier
#Predict for the flower

 

In [5]:
#iris dataset is already available in sklearn
from sklearn.datasets import load_iris
iris = load_iris()
print(iris.feature_names)
print(iris.target_names)
['sepal length (cm)', 'sepal width (cm)', 'petal length (cm)', 'petal width (cm)']
['setosa' 'versicolor' 'virginica']

 

In [6]:
#feature data
print(iris.data[0])
[5.1 3.5 1.4 0.2]

 

In [7]:
#label data 
iris.target[0]  #0 setosa
Out[7]:
0

 

In [8]:
#working with data
i = 0
for i in range(len(iris.target)):
    print(f'Example {i}: label {iris.target[i]}, features {iris.data[i]}')
    i += 1
Example 0: label 0, features [5.1 3.5 1.4 0.2]
Example 1: label 0, features [4.9 3.  1.4 0.2]
Example 2: label 0, features [4.7 3.2 1.3 0.2]
Example 3: label 0, features [4.6 3.1 1.5 0.2]
Example 4: label 0, features [5.  3.6 1.4 0.2]
Example 5: label 0, features [5.4 3.9 1.7 0.4]
Example 6: label 0, features [4.6 3.4 1.4 0.3]
Example 7: label 0, features [5.  3.4 1.5 0.2]
Example 8: label 0, features [4.4 2.9 1.4 0.2]
Example 9: label 0, features [4.9 3.1 1.5 0.1]
Example 10: label 0, features [5.4 3.7 1.5 0.2]
Example 11: label 0, features [4.8 3.4 1.6 0.2]
Example 12: label 0, features [4.8 3.  1.4 0.1]
Example 13: label 0, features [4.3 3.  1.1 0.1]
Example 14: label 0, features [5.8 4.  1.2 0.2]
Example 15: label 0, features [5.7 4.4 1.5 0.4]
Example 16: label 0, features [5.4 3.9 1.3 0.4]
Example 17: label 0, features [5.1 3.5 1.4 0.3]
Example 18: label 0, features [5.7 3.8 1.7 0.3]
Example 19: label 0, features [5.1 3.8 1.5 0.3]
Example 20: label 0, features [5.4 3.4 1.7 0.2]
Example 21: label 0, features [5.1 3.7 1.5 0.4]
Example 22: label 0, features [4.6 3.6 1.  0.2]
Example 23: label 0, features [5.1 3.3 1.7 0.5]
Example 24: label 0, features [4.8 3.4 1.9 0.2]
Example 25: label 0, features [5.  3.  1.6 0.2]
Example 26: label 0, features [5.  3.4 1.6 0.4]
Example 27: label 0, features [5.2 3.5 1.5 0.2]
Example 28: label 0, features [5.2 3.4 1.4 0.2]
Example 29: label 0, features [4.7 3.2 1.6 0.2]
Example 30: label 0, features [4.8 3.1 1.6 0.2]
Example 31: label 0, features [5.4 3.4 1.5 0.4]
Example 32: label 0, features [5.2 4.1 1.5 0.1]
Example 33: label 0, features [5.5 4.2 1.4 0.2]
Example 34: label 0, features [4.9 3.1 1.5 0.2]
Example 35: label 0, features [5.  3.2 1.2 0.2]
Example 36: label 0, features [5.5 3.5 1.3 0.2]
Example 37: label 0, features [4.9 3.6 1.4 0.1]
Example 38: label 0, features [4.4 3.  1.3 0.2]
Example 39: label 0, features [5.1 3.4 1.5 0.2]
Example 40: label 0, features [5.  3.5 1.3 0.3]
Example 41: label 0, features [4.5 2.3 1.3 0.3]
Example 42: label 0, features [4.4 3.2 1.3 0.2]
Example 43: label 0, features [5.  3.5 1.6 0.6]
Example 44: label 0, features [5.1 3.8 1.9 0.4]
Example 45: label 0, features [4.8 3.  1.4 0.3]
Example 46: label 0, features [5.1 3.8 1.6 0.2]
Example 47: label 0, features [4.6 3.2 1.4 0.2]
Example 48: label 0, features [5.3 3.7 1.5 0.2]
Example 49: label 0, features [5.  3.3 1.4 0.2]
Example 50: label 1, features [7.  3.2 4.7 1.4]
Example 51: label 1, features [6.4 3.2 4.5 1.5]
Example 52: label 1, features [6.9 3.1 4.9 1.5]
Example 53: label 1, features [5.5 2.3 4.  1.3]
Example 54: label 1, features [6.5 2.8 4.6 1.5]
Example 55: label 1, features [5.7 2.8 4.5 1.3]
Example 56: label 1, features [6.3 3.3 4.7 1.6]
Example 57: label 1, features [4.9 2.4 3.3 1. ]
Example 58: label 1, features [6.6 2.9 4.6 1.3]
Example 59: label 1, features [5.2 2.7 3.9 1.4]
Example 60: label 1, features [5.  2.  3.5 1. ]
Example 61: label 1, features [5.9 3.  4.2 1.5]
Example 62: label 1, features [6.  2.2 4.  1. ]
Example 63: label 1, features [6.1 2.9 4.7 1.4]
Example 64: label 1, features [5.6 2.9 3.6 1.3]
Example 65: label 1, features [6.7 3.1 4.4 1.4]
Example 66: label 1, features [5.6 3.  4.5 1.5]
Example 67: label 1, features [5.8 2.7 4.1 1. ]
Example 68: label 1, features [6.2 2.2 4.5 1.5]
Example 69: label 1, features [5.6 2.5 3.9 1.1]
Example 70: label 1, features [5.9 3.2 4.8 1.8]
Example 71: label 1, features [6.1 2.8 4.  1.3]
Example 72: label 1, features [6.3 2.5 4.9 1.5]
Example 73: label 1, features [6.1 2.8 4.7 1.2]
Example 74: label 1, features [6.4 2.9 4.3 1.3]
Example 75: label 1, features [6.6 3.  4.4 1.4]
Example 76: label 1, features [6.8 2.8 4.8 1.4]
Example 77: label 1, features [6.7 3.  5.  1.7]
Example 78: label 1, features [6.  2.9 4.5 1.5]
Example 79: label 1, features [5.7 2.6 3.5 1. ]
Example 80: label 1, features [5.5 2.4 3.8 1.1]
Example 81: label 1, features [5.5 2.4 3.7 1. ]
Example 82: label 1, features [5.8 2.7 3.9 1.2]
Example 83: label 1, features [6.  2.7 5.1 1.6]
Example 84: label 1, features [5.4 3.  4.5 1.5]
Example 85: label 1, features [6.  3.4 4.5 1.6]
Example 86: label 1, features [6.7 3.1 4.7 1.5]
Example 87: label 1, features [6.3 2.3 4.4 1.3]
Example 88: label 1, features [5.6 3.  4.1 1.3]
Example 89: label 1, features [5.5 2.5 4.  1.3]
Example 90: label 1, features [5.5 2.6 4.4 1.2]
Example 91: label 1, features [6.1 3.  4.6 1.4]
Example 92: label 1, features [5.8 2.6 4.  1.2]
Example 93: label 1, features [5.  2.3 3.3 1. ]
Example 94: label 1, features [5.6 2.7 4.2 1.3]
Example 95: label 1, features [5.7 3.  4.2 1.2]
Example 96: label 1, features [5.7 2.9 4.2 1.3]
Example 97: label 1, features [6.2 2.9 4.3 1.3]
Example 98: label 1, features [5.1 2.5 3.  1.1]
Example 99: label 1, features [5.7 2.8 4.1 1.3]
Example 100: label 2, features [6.3 3.3 6.  2.5]
Example 101: label 2, features [5.8 2.7 5.1 1.9]
Example 102: label 2, features [7.1 3.  5.9 2.1]
Example 103: label 2, features [6.3 2.9 5.6 1.8]
Example 104: label 2, features [6.5 3.  5.8 2.2]
Example 105: label 2, features [7.6 3.  6.6 2.1]
Example 106: label 2, features [4.9 2.5 4.5 1.7]
Example 107: label 2, features [7.3 2.9 6.3 1.8]
Example 108: label 2, features [6.7 2.5 5.8 1.8]
Example 109: label 2, features [7.2 3.6 6.1 2.5]
Example 110: label 2, features [6.5 3.2 5.1 2. ]
Example 111: label 2, features [6.4 2.7 5.3 1.9]
Example 112: label 2, features [6.8 3.  5.5 2.1]
Example 113: label 2, features [5.7 2.5 5.  2. ]
Example 114: label 2, features [5.8 2.8 5.1 2.4]
Example 115: label 2, features [6.4 3.2 5.3 2.3]
Example 116: label 2, features [6.5 3.  5.5 1.8]
Example 117: label 2, features [7.7 3.8 6.7 2.2]
Example 118: label 2, features [7.7 2.6 6.9 2.3]
Example 119: label 2, features [6.  2.2 5.  1.5]
Example 120: label 2, features [6.9 3.2 5.7 2.3]
Example 121: label 2, features [5.6 2.8 4.9 2. ]
Example 122: label 2, features [7.7 2.8 6.7 2. ]
Example 123: label 2, features [6.3 2.7 4.9 1.8]
Example 124: label 2, features [6.7 3.3 5.7 2.1]
Example 125: label 2, features [7.2 3.2 6.  1.8]
Example 126: label 2, features [6.2 2.8 4.8 1.8]
Example 127: label 2, features [6.1 3.  4.9 1.8]
Example 128: label 2, features [6.4 2.8 5.6 2.1]
Example 129: label 2, features [7.2 3.  5.8 1.6]
Example 130: label 2, features [7.4 2.8 6.1 1.9]
Example 131: label 2, features [7.9 3.8 6.4 2. ]
Example 132: label 2, features [6.4 2.8 5.6 2.2]
Example 133: label 2, features [6.3 2.8 5.1 1.5]
Example 134: label 2, features [6.1 2.6 5.6 1.4]
Example 135: label 2, features [7.7 3.  6.1 2.3]
Example 136: label 2, features [6.3 3.4 5.6 2.4]
Example 137: label 2, features [6.4 3.1 5.5 1.8]
Example 138: label 2, features [6.  3.  4.8 1.8]
Example 139: label 2, features [6.9 3.1 5.4 2.1]
Example 140: label 2, features [6.7 3.1 5.6 2.4]
Example 141: label 2, features [6.9 3.1 5.1 2.3]
Example 142: label 2, features [5.8 2.7 5.1 1.9]
Example 143: label 2, features [6.8 3.2 5.9 2.3]
Example 144: label 2, features [6.7 3.3 5.7 2.5]
Example 145: label 2, features [6.7 3.  5.2 2.3]
Example 146: label 2, features [6.3 2.5 5.  1.9]
Example 147: label 2, features [6.5 3.  5.2 2. ]
Example 148: label 2, features [6.2 3.4 5.4 2.3]
Example 149: label 2, features [5.9 3.  5.1 1.8]

 

In [9]:
# train classifier
#will remove some data from the origional data for testing purpose called test data
#not part of train data 
test_idx = [0, 50, 100]

#remover from label and data(feature)
train_target = np.delete(iris.target, test_idx)  #label, np nned dto mention axis b/c only one column
train_data = np.delete(iris.data, test_idx, axis = 0)  #data


#creatiing testing dataset
test_target = iris.target[test_idx]
test_data = iris.data[test_idx]

 

In [10]:
#creating decession tree classifier
clf = tree.DecisionTreeClassifier()
clf.fit(train_data, train_target)

print(test_target)
[0 1 2]

 

In [11]:
#predicting
print(clf.predict(test_data))
[0 1 2]

 

In [ ]:

 

In [12]:
#a good feature explain

 

In [13]:
import matplotlib.pyplot as plt

greyhounds = 500
labs = 500

grey_height = 28 + 4 * np.random.randn(greyhounds)
lab_height = 24 + 4 * np.random.randn(labs)

plt.hist([grey_height, lab_height], stacked= True,color = ['r', 'b'])
plt.show()
<Figure size 640x480 with 1 Axes>

 

In [ ]:

 

Machine learning: classifier is a function similar to

def classify(features):

do some logic

return label

 

In [14]:
#4
from sklearn import datasets
iris = datasets.load_iris()


#feature= x and label = y, b/c y = f(x) , classifier is as a function
X = iris.data
y = iris.target

#spliting into train and test
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.5)


#creating classifier
from sklearn import tree
my_classifier = tree.DecisionTreeClassifier()

my_classifier.fit(X_train, y_train)

predictions = my_classifier.predict(X_test)
# print(predictions)


#test acurracy
from sklearn.metrics import accuracy_score
print(accuracy_score(y_test, predictions))
0.9466666666666667

 

In [15]:
#now using different classifier to do the same above thing

#4
from sklearn import datasets
iris = datasets.load_iris()


#feature= x and label = y, b/c y = f(x) , classifier is as a function
X = iris.data
y = iris.target

#spliting into train and test
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.5)


#creating classifier, when want diff. classifier change below 2 lines
from sklearn.neighbors import KNeighborsClassifier
my_classifier = KNeighborsClassifier()

my_classifier.fit(X_train, y_train)

predictions = my_classifier.predict(X_test)
# print(predictions)


#test acurracy
from sklearn.metrics import accuracy_score
print(accuracy_score(y_test, predictions))
0.9466666666666667

 

In [16]:
#neural network is more sofisticated type of classifier like decessiontree and knn

 

In [ ]:

 

In [17]:
#creating my own classifier

 

In [21]:
import random

    
class ScrappyKNN():
    
    def fit(self,X_train, y_train):
        self.X_train = X_train
        self.y_train = y_train
        
    def predict(self, X_test):
        predictions = []
        
        for row in X_test:
            label = random.choice(self.y_train)
            predictions.append(label)
        return predictions
        
        
        
    
    
from sklearn import datasets
iris = datasets.load_iris()


#feature= x and label = y, b/c y = f(x) , classifier is as a function
X = iris.data
y = iris.target

#spliting into train and test
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.5)


#creating classifier, when want diff. classifier change below 2 lines
# from sklearn.neighbors import KNeighborsClassifier
my_classifier = ScrappyKNN()

my_classifier.fit(X_train, y_train)

predictions = my_classifier.predict(X_test)
# print(predictions)


#test acurracy
from sklearn.metrics import accuracy_score
print(accuracy_score(y_test, predictions))
0.37333333333333335

 

In [ ]:

 

In [ ]:
#increasing accuracy

 

In [29]:
import random
from scipy.spatial import distance

#euclidean distance
def euc(a,b):
    return distance.euclidean(a,b)
    
class ScrappyKNN():
    
    def fit(self,X_train, y_train):
        self.X_train = X_train
        self.y_train = y_train
        
    def predict(self, X_test):
        predictions = []
        
        for row in X_test:
            label = self.closest(row)  #************
            predictions.append(label)
        return predictions
        
    def closest(self, row):
        
        best_dist = euc(row, self.X_train[0])
        best_index = 0
        for i in range(1, len(self.X_train)):
            dist = euc(row, self.X_train[i])
            if dist < best_dist:
                best_dist = dist
                best_index = i
                
        return self.y_train[best_index]
    
    
from sklearn import datasets
iris = datasets.load_iris()


#feature= x and label = y, b/c y = f(x) , classifier is as a function
X = iris.data
y = iris.target

#spliting into train and test
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.5)


#creating classifier, when want diff. classifier change below 2 lines
# from sklearn.neighbors import KNeighborsClassifier
my_classifier = ScrappyKNN()

my_classifier.fit(X_train, y_train)

predictions = my_classifier.predict(X_test)
# print(predictions)


#test acurracy
from sklearn.metrics import accuracy_score
print(accuracy_score(y_test, predictions))

 

0.9466666666666667

Also Read: COMPUTER-VISION-USING-CV2

Leave a Reply

Your email address will not be published. Required fields are marked *