粒子群优化算法-python实现
2015-10-06 22:28
573 查看
PSOIndividual.py
PSO.py
运行程序:
ObjFunction见简单遗传算法-python实现。
import numpy as np import ObjFunction import copy class PSOIndividual: ''' individual of PSO ''' def __init__(self, vardim, bound): ''' vardim: dimension of variables bound: boundaries of variables ''' self.vardim = vardim self.bound = bound self.fitness = 0. def generate(self): ''' generate a rondom chromsome ''' len = self.vardim rnd = np.random.random(size=len) self.chrom = np.zeros(len) self.velocity = np.random.random(size=len) for i in xrange(0, len): self.chrom[i] = self.bound[0, i] + \ (self.bound[1, i] - self.bound[0, i]) * rnd[i] self.bestPosition = np.zeros(len) self.bestFitness = 0. def calculateFitness(self): ''' calculate the fitness of the chromsome ''' self.fitness = ObjFunction.GrieFunc( self.vardim, self.chrom, self.bound)
PSO.py
import numpy as np from PSOIndividual import PSOIndividual import random import copy import matplotlib.pyplot as plt class ParticleSwarmOptimization: ''' the class for Particle Swarm Optimization ''' def __init__(self, sizepop, vardim, bound, MAXGEN, params): ''' sizepop: population sizepop vardim: dimension of variables bound: boundaries of variables MAXGEN: termination condition params: algorithm required parameters, it is a list which is consisting of[w, c1, c2] ''' self.sizepop = sizepop self.vardim = vardim self.bound = bound self.MAXGEN = MAXGEN self.params = params self.population = [] self.fitness = np.zeros((self.sizepop, 1)) self.trace = np.zeros((self.MAXGEN, 2)) def initialize(self): ''' initialize the population of pso ''' for i in xrange(0, self.sizepop): ind = PSOIndividual(self.vardim, self.bound) ind.generate() self.population.append(ind) def evaluation(self): ''' evaluation the fitness of the population ''' for i in xrange(0, self.sizepop): self.population[i].calculateFitness() self.fitness[i] = self.population[i].fitness if self.population[i].fitness > self.population[i].bestFitness: self.population[i].bestFitness = self.population[i].fitness self.population[i].bestIndex = copy.deepcopy( self.population[i].chrom) def update(self): ''' update the population of pso ''' for i in xrange(0, self.sizepop): self.population[i].velocity = self.params[0] * self.population[i].velocity + self.params[1] * np.random.random(self.vardim) * ( self.population[i].bestPosition - self.population[i].chrom) + self.params[2] * np.random.random(self.vardim) * (self.best.chrom - self.population[i].chrom) self.population[i].chrom = self.population[ i].chrom + self.population[i].velocity def solve(self): ''' the evolution process of the pso algorithm ''' self.t = 0 self.initialize() self.evaluation() best = np.max(self.fitness) bestIndex = np.argmax(self.fitness) self.best = copy.deepcopy(self.population[bestIndex]) self.avefitness = np.mean(self.fitness) self.trace[self.t, 0] = (1 - self.best.fitness) / self.best.fitness self.trace[self.t, 1] = (1 - self.avefitness) / self.avefitness print("Generation %d: optimal function value is: %f; average function value is %f" % ( self.t, self.trace[self.t, 0], self.trace[self.t, 1])) while self.t < self.MAXGEN - 1: self.t += 1 self.update() self.evaluation() best = np.max(self.fitness) bestIndex = np.argmax(self.fitness) if best > self.best.fitness: self.best = copy.deepcopy(self.population[bestIndex]) self.avefitness = np.mean(self.fitness) self.trace[self.t, 0] = (1 - self.best.fitness) / self.best.fitness self.trace[self.t, 1] = (1 - self.avefitness) / self.avefitness print("Generation %d: optimal function value is: %f; average function value is %f" % ( self.t, self.trace[self.t, 0], self.trace[self.t, 1])) print("Optimal function value is: %f; " % self.trace[self.t, 0]) print "Optimal solution is:" print self.best.chrom self.printResult() def printResult(self): ''' plot the result of pso algorithm ''' x = np.arange(0, self.MAXGEN) y1 = self.trace[:, 0] y2 = self.trace[:, 1] plt.plot(x, y1, 'r', label='optimal value') plt.plot(x, y2, 'g', label='average value') plt.xlabel("Iteration") plt.ylabel("function value") plt.title("Particle Swarm Optimization algorithm for function optimization") plt.legend() plt.show()
运行程序:
if __name__ == "__main__": bound = np.tile([[-600], [600]], 25) pso = PSO(60, 25, bound, 1000, [0.7298, 1.4962, 1.4962]) pso.solve()
ObjFunction见简单遗传算法-python实现。
相关文章推荐
- 差分进化算法-python实现
- 简单遗传算法-python实现
- 【Python】Learn Python the hard way, ex4 使用变量
- python requests的安装与简单运用
- 【Python】Learn Python the hard way, ex3 运算符,浮点数
- Python正则表达式学习小结
- 【Python】Learn Python the hard way, ex2 注释
- 【Python】Learn Python the hard way, ex1 简单print语句
- Python连接MySQL ---mysql-connector-python-2.1.3-py3.4-winx64.msi
- 【Python】Learn Python the hard way, ex0 学习使用Terminal
- python收集
- Python 面向对象
- Python FunnyPoint--Programing
- 用Python做科学计算
- Ubantu环境下利用Python3+PyQt5+Eric6进行GUI编程
- python 链接MS SQL
- python小细节
- Python笔记:logging模块使用
- 关于Python多线程的理解
- python内建函数——类型转化函数