说明
1、PCA是最经典、最实用的降维技术,尤其在辅助图形识别中表现突出。
2、用来减少数据集的维度,同时保持数据集中对方差贡献最大的特征。
保持低阶主成分,而忽略高阶成分,低阶成分往往能保留数据的最重要部分。
实例
1
2
3
4
5
6
7
8
9
10
11
12
|
from sklearn.feature_selection import VarianceThreshold # 特征选择 VarianceThreshold删除低方差的特征(删除差别不大的特征) var = VarianceThreshold(threshold = 1.0 ) # 将方差小于等于1.0的特征删除。 默认threshold=0.0 data = var.fit_transform([[ 0 , 2 , 0 , 3 ], [ 0 , 1 , 4 , 3 ], [ 0 , 1 , 1 , 3 ]]) print (data) ''' [[0] [4] [1]] ''' |
内容扩展:
python实现拉普拉斯降维
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
|
def laplaEigen(dataMat,k,t): m,n = shape(dataMat) W = mat(zeros([m,m])) D = mat(zeros([m,m])) for i in range (m): k_index = knn(dataMat[i,:],dataMat,k) for j in range (k): sqDiffVector = dataMat[i,:] - dataMat[k_index[j],:] sqDiffVector = array(sqDiffVector) * * 2 sqDistances = sqDiffVector. sum () W[i,k_index[j]] = math.exp( - sqDistances / t) D[i,i] + = W[i,k_index[j]] L = D - W Dinv = np.linalg.inv(D) X = np.dot(D.I,L) lamda,f = np.linalg.eig(X) return lamda,f def knn(inX, dataSet, k): dataSetSize = dataSet.shape[ 0 ] diffMat = tile(inX, (dataSetSize, 1 )) - dataSet sqDiffMat = array(diffMat) * * 2 sqDistances = sqDiffMat. sum (axis = 1 ) distances = sqDistances * * 0.5 sortedDistIndicies = distances.argsort() return sortedDistIndicies[ 0 :k] dataMat, color = make_swiss_roll(n_samples = 2000 ) lamda,f = laplaEigen(dataMat, 11 , 5.0 ) fm,fn = shape(f) print 'fm,fn:' ,fm,fn lamdaIndicies = argsort(lamda) first = 0 second = 0 print lamdaIndicies[ 0 ], lamdaIndicies[ 1 ] for i in range (fm): if lamda[lamdaIndicies[i]].real> 1e - 5 : print lamda[lamdaIndicies[i]] first = lamdaIndicies[i] second = lamdaIndicies[i + 1 ] break print first, second redEigVects = f[:,lamdaIndicies] fig = plt.figure( 'origin' ) ax1 = fig.add_subplot( 111 , projection = '3d' ) ax1.scatter(dataMat[:, 0 ], dataMat[:, 1 ], dataMat[:, 2 ], c = color,cmap = plt.cm.Spectral) fig = plt.figure( 'lowdata' ) ax2 = fig.add_subplot( 111 ) ax2.scatter(f[:,first], f[:,second], c = color, cmap = plt.cm.Spectral) plt.show() |
到此这篇关于Python特征降维知识点总结的文章就介绍到这了,更多相关Python特征降维如何理解内容请搜索服务器之家以前的文章或继续浏览下面的相关文章希望大家以后多多支持服务器之家!
原文链接:https://www.py.cn/jishu/jichu/32598.html