您的位置:首页 > 其它

logistic回归

2017-11-24 10:51 246 查看
from numpy import *
def loadDataSet():dataMat = []; labelMat = []fr = open('testSet.txt')for line in fr.readlines():lineArr = line.strip().split()dataMat.append([1.0, float(lineArr[0]), float(lineArr[1])])labelMat.append(int(lineArr[2]))return dataMat,labelMatdef sigmoid(inX):return 1.0/(1+exp(-inX))def gradAscent(dataMatIn, classLabels):dataMatrix = mat(dataMatIn)             #convert to NumPy matrix,从列表到矩阵labelMat = mat(classLabels).transpose() #convert to NumPy matrixm,n = shape(dataMatrix)alpha = 0.001maxCycles = 500weights = ones((n,1))for k in range(maxCycles):              #heavy on matrix operationsh = sigmoid(dataMatrix*weights)     #matrix mult,得到一个m行1列的h矩阵error = (labelMat - h)              #vector subtractionweights = weights + alpha * dataMatrix.transpose()* error #matrix multreturn weightsdef plotBestFit(weights):import matplotlib.pyplot as pltdataMat,labelMat=loadDataSet()dataArr = array(dataMat)n = shape(dataArr)[0]xcord1 = []; ycord1 = []xcord2 = []; ycord2 = []for i in range(n):if int(labelMat[i])== 1:xcord1.append(dataArr[i,1]); ycord1.append(dataArr[i,2])else:xcord2.append(dataArr[i,1]); ycord2.append(dataArr[i,2])fig = plt.figure()ax = fig.add_subplot(111)ax.scatter(xcord1, ycord1, s=30, c='red', marker='s')ax.scatter(xcord2, ycord2, s=30, c='green')x = arange(-3.0, 3.0, 0.1)y = (-weights[0]-weights[1]*x)/weights[2]ax.plot(x, y)plt.xlabel('X1'); plt.ylabel('X2');plt.show()def stocGradAscent0(dataMatrix, classLabels):m,n = shape(dataMatrix)alpha = 0.01weights = ones(n)   #initialize to all onesfor i in range(m):h = sigmoid(sum(dataMatrix[i]*weights))#h是一个值error = classLabels[i] - hweights = weights + alpha * error * dataMatrix[i]#datamatrix是矩阵,数*矩阵,矩阵中的每个值*数return weightsdef stocGradAscent1(dataMatrix, classLabels, numIter=150):m,n = shape(dataMatrix)weights = ones(n)   #initialize to all onesfor j in range(numIter):dataIndex = range(m)for i in range(m):alpha = 4/(1.0+j+i)+0.0001    #apha decreases with iteration, does notrandIndex = int(random.uniform(0,len(dataIndex)))#go to 0 because of the constanth = sigmoid(sum(dataMatrix[randIndex]*weights))error = classLabels[randIndex] - hweights = weights + alpha * error * dataMatrix[randIndex]del(dataIndex[randIndex])return weightsdef classifyVector(inX, weights):prob = sigmoid(sum(inX*weights))if prob > 0.5: return 1.0else: return 0.0def colicTest():frTrain = open('horseColicTraining.txt'); frTest = open('horseColicTest.txt')trainingSet = []; trainingLabels = []for line in frTrain.readlines():currLine = line.strip().split('\t')lineArr =[]for i in range(21):lineArr.append(float(currLine[i]))trainingSet.append(lineArr)trainingLabels.append(float(currLine[21]))trainWeights = stocGradAscent1(array(trainingSet), trainingLabels, 1000)errorCount = 0; numTestVec = 0.0for line in frTest.readlines():numTestVec += 1.0currLine = line.strip().split('\t')lineArr =[]for i in range(21):lineArr.append(float(currLine[i]))if int(classifyVector(array(lineArr), trainWeights))!= int(currLine[21]):errorCount += 1errorRate = (float(errorCount)/numTestVec)print ("the error rate of this test is: %f" % errorRate)return errorRatedef multiTest():numTests = 10; errorSum=0.0for k in range(numTests):errorSum += colicTest()print ("after %d iterations the average error rate is: %f" % (numTests, errorSum/float(numTests)))
内容来自用户分享和网络整理,不保证内容的准确性,如有侵权内容,可联系管理员处理 点击这里给我发消息
标签: