python實現KNN分類演算法

2021-10-10 11:16:34 字數 4137 閱讀 5535

import sklearn

import numpy as np

from sklearn.neighbors import kneighborsclassifier

import sklearn.datasets as data

import matplotlib as mpl

import matplotlib.pyplot as plt

from matplotlib.colors import listedcolormap

import pandas as pd

from tensorboard.notebook import display

import svgtest

class knn:

def __init__

(self, k)

: self.k = k

def fit

(self, x, y)

: self.x = np.

asarray

(x) self.y = np.

asarray

(y).

astype

(int

) def predict

(self,x)

: x = np.

asarray

(x) result =

for i in x:

# 計算距離

dis = np.

sqrt

(np.

sum(

(i - self.x)**

2, axis=1)

) # 對距離按索引排序

index = dis.

argsort()

# 找出設定的近鄰 k 前幾項

index = index[

:self.k]

# 元素必須是非負整數 採用距離的倒數權重增加 二版本,新增權重

count = np.

bincount

(self.y[index]

, weights=

1/ dis[index]

) result.

(count.

argmax()

)return np.

asarray

(result)

if __name__ ==

'__main__'

: iris = data.

load_iris()

x_train = iris.data

y_train = iris.target

my =

knn(k=3)

plt.

figure

(figsize=(10

,10))

# my.fit(x_train, y_train)

# print(my.predict(x_train))

# print((y_train == my.predict(x_train)) + 0)

# print(np.array(x_train))

# print(y_train[:, np.newaxis])

data = np.c_[x_train, y_train[

:,np.newaxis]

] data = pd.

dataframe

(data)

t0 = data[data.iloc[:,

4]==0

] t1 = data[data.iloc[:,

4]==1

] t2 = data[data.iloc[:,

4]==2

]print

(type

(t0)

) t0 = t0.

sample

(len

(t0)

, random_state=0)

t1 = t1.

sample

(len

(t1)

, random_state=0)

t2 = t2.

sample

(len

(t2)

, random_state=0)

train_x = pd.

concat

([t0.iloc[:40

,:-1

], t1.iloc[:40

,:-1

], t2.iloc[:40

,:-1

]],axis=0)

train_y = pd.

concat

([t0.iloc[:40

,-1]

, t1.iloc[:40

,-1]

, t2.iloc[:40

,-1]

],axis=0)

test_x = pd.

concat

([t0.iloc[40:

,:-1

], t1.iloc[40:

,:-1

], t2.iloc[40:

,:-1

]],axis=0)

test_y = pd.

concat

([t0.iloc[40:

,-1]

, t1.iloc[40:

,-1]

, t2.iloc[40:

,-1]

],axis=0)

my.fit(train_x, train_y)

result = my.

predict

(test_x)

print

(np.

sum(result == test_y)

/len

(result)

) # 設定中文字型,隨便乙個字型就可以不一定是黑體 後面那個是不使用unicode負號

mpl.rcparams[

'font.family']=

'simhei'

mpl.rcparams[

'axes.unicode_minus'

]= false

# 繪圖操作

plt.

scatter

(x=t0[0]

[:40]

, y = t0[1]

[:40]

, c=

'r', label =

'資料1'

) plt.

scatter

(x=t1[0]

[:40]

, y = t0[1]

[:40]

, c=

'g', label =

'資料2'

) plt.

scatter

(x=t2[0]

[:40]

, y = t0[1]

[:40]

, c=

'b', label =

'資料3'

) right = test_x[result == test_y]

wrong = test_x[result != test_y]

print

(right)

print

("*************"

)print

(wrong)

plt.

scatter

(x=right[0]

[:40]

, y=right[1]

[:40]

, c=

'c', marker=

'x',label =

'資料4'

) plt.

scatter

(x=wrong[0]

[:40]

, y=wrong[1]

[:40]

, c=

'm', marker=

'x', label=

'資料5'

) plt.

legend()

plt.

show

()

Python 實現 KNN 分類演算法

2.python 實現 本文將詳細講述 knn 演算法及其 python 實現 knn k nearest neighbour 即 k最近鄰,是分類演算法中最簡單的演算法之一。knn 演算法的核心思想是如果乙個樣本在特徵空間中的 k 個最相鄰的樣本中的大多數屬於某乙個類別,則將該樣本歸為該類別 有 ...

python實現KNN分類演算法

k近鄰演算法,簡稱knn演算法,作用就是分類 三大要素 import numpy import matplotlib.pyplot as plt 已知訓練集和訓練集類別 測試集 1.測試集廣播和訓練集一樣的規格 2.計算距離 3.對距離列表排序返回最近的k個點的下標 4.有幾個類別就設幾個標籤用來統...

分類 KNN分類演算法之Python實現

knn稱為k最近鄰。對於待分類資料,它先計算出與其最相近的k個的樣本,然後判斷這k個樣本中最多的類標籤,並將待分類資料標記為這個最多的類標籤。python樣例 import numpy as np from sklearn.neighbors import kneighborsclassifier ...