DEIndividual.py

 import numpy as np
import ObjFunction class DEIndividual: '''
individual of differential evolution algorithm
''' def __init__(self, vardim, bound):
'''
vardim: dimension of variables
bound: boundaries of variables
'''
self.vardim = vardim
self.bound = bound
self.fitness = 0. def generate(self):
'''
generate a random chromsome for differential evolution algorithm
'''
len = self.vardim
rnd = np.random.random(size=len)
self.chrom = np.zeros(len)
for i in xrange(0, len):
self.chrom[i] = self.bound[0, i] + \
(self.bound[1, i] - self.bound[0, i]) * rnd[i] def calculateFitness(self):
'''
calculate the fitness of the chromsome
'''
self.fitness = ObjFunction.GrieFunc(
self.vardim, self.chrom, self.bound)

DE.py

 import numpy as np
from DEIndividual import DEIndividual
import random
import copy
import matplotlib.pyplot as plt class DifferentialEvolutionAlgorithm: '''
The class for differential evolution algorithm
''' def __init__(self, sizepop, vardim, bound, MAXGEN, params):
'''
sizepop: population sizepop
vardim: dimension of variables
bound: boundaries of variables
MAXGEN: termination condition
param: algorithm required parameters, it is a list which is consisting of [crossover rate CR, scaling factor F]
'''
self.sizepop = sizepop
self.MAXGEN = MAXGEN
self.vardim = vardim
self.bound = bound
self.population = []
self.fitness = np.zeros((self.sizepop, 1))
self.trace = np.zeros((self.MAXGEN, 2))
self.params = params def initialize(self):
'''
initialize the population
'''
for i in xrange(0, self.sizepop):
ind = DEIndividual(self.vardim, self.bound)
ind.generate()
self.population.append(ind) def evaluate(self, x):
'''
evaluation of the population fitnesses
'''
x.calculateFitness() def solve(self):
'''
evolution process of differential evolution algorithm
'''
self.t = 0
self.initialize()
for i in xrange(0, self.sizepop):
self.evaluate(self.population[i])
self.fitness[i] = self.population[i].fitness
best = np.max(self.fitness)
bestIndex = np.argmax(self.fitness)
self.best = copy.deepcopy(self.population[bestIndex])
self.avefitness = np.mean(self.fitness)
self.trace[self.t, 0] = (1 - self.best.fitness) / self.best.fitness
self.trace[self.t, 1] = (1 - self.avefitness) / self.avefitness
print("Generation %d: optimal function value is: %f; average function value is %f" % (
self.t, self.trace[self.t, 0], self.trace[self.t, 1]))
while (self.t < self.MAXGEN - 1):
self.t += 1
for i in xrange(0, self.sizepop):
vi = self.mutationOperation(i)
ui = self.crossoverOperation(i, vi)
xi_next = self.selectionOperation(i, ui)
self.population[i] = xi_next
for i in xrange(0, self.sizepop):
self.evaluate(self.population[i])
self.fitness[i] = self.population[i].fitness
best = np.max(self.fitness)
bestIndex = np.argmax(self.fitness)
if best > self.best.fitness:
self.best = copy.deepcopy(self.population[bestIndex])
self.avefitness = np.mean(self.fitness)
self.trace[self.t, 0] = (1 - self.best.fitness) / self.best.fitness
self.trace[self.t, 1] = (1 - self.avefitness) / self.avefitness
print("Generation %d: optimal function value is: %f; average function value is %f" % (
self.t, self.trace[self.t, 0], self.trace[self.t, 1])) print("Optimal function value is: %f; " %
self.trace[self.t, 0])
print "Optimal solution is:"
print self.best.chrom
self.printResult() def selectionOperation(self, i, ui):
'''
selection operation for differential evolution algorithm
'''
xi_next = copy.deepcopy(self.population[i])
xi_next.chrom = ui
self.evaluate(xi_next)
if xi_next.fitness > self.population[i].fitness:
return xi_next
else:
return self.population[i] def crossoverOperation(self, i, vi):
'''
crossover operation for differential evolution algorithm
'''
k = np.random.random_integers(0, self.vardim - 1)
ui = np.zeros(self.vardim)
for j in xrange(0, self.vardim):
pick = random.random()
if pick < self.params[0] or j == k:
ui[j] = vi[j]
else:
ui[j] = self.population[i].chrom[j]
return ui def mutationOperation(self, i):
'''
mutation operation for differential evolution algorithm
'''
a = np.random.random_integers(0, self.sizepop - 1)
while a == i:
a = np.random.random_integers(0, self.sizepop - 1)
b = np.random.random_integers(0, self.sizepop - 1)
while b == i or b == a:
b = np.random.random_integers(0, self.sizepop - 1)
c = np.random.random_integers(0, self.sizepop - 1)
while c == i or c == b or c == a:
c = np.random.random_integers(0, self.sizepop - 1)
vi = self.population[c].chrom + self.params[1] * \
(self.population[a].chrom - self.population[b].chrom)
for j in xrange(0, self.vardim):
if vi[j] < self.bound[0, j]:
vi[j] = self.bound[0, j]
if vi[j] > self.bound[1, j]:
vi[j] = self.bound[1, j]
return vi def printResult(self):
'''
plot the result of the differential evolution algorithm
'''
x = np.arange(0, self.MAXGEN)
y1 = self.trace[:, 0]
y2 = self.trace[:, 1]
plt.plot(x, y1, 'r', label='optimal value')
plt.plot(x, y2, 'g', label='average value')
plt.xlabel("Iteration")
plt.ylabel("function value")
plt.title("Differential Evolution Algorithm for function optimization")
plt.legend()
plt.show()

运行程序:

 if __name__ == "__main__":

     bound = np.tile([[-600], [600]], 25)
dea = DEA(60, 25, bound, 1000, [0.8, 0.6])
dea.solve()

ObjFunction见简单遗传算法-python实现

差分进化算法-python实现的更多相关文章

  1. 差分进化算法 DE-Differential Evolution

    差分进化算法 (Differential Evolution)   Differential Evolution(DE)是由Storn等人于1995年提出的,和其它演化算法一样,DE是一种模拟生物进化 ...

  2. 标准差分进化算法matlab程序实现(转载)

    标准差分进化算法matlab程序实现 自适应差分演化算法方面的Matlab和C++代码及论文 差分进化算法 DE-Differential Evolution matlab练习程序(差异演化DE) [ ...

  3. 差分进化算法介绍及matlab实现

    引言 差分进化算法是基于群体智能理论的优化算法,是通过群体内个体间的合作与竞争而产生的智能优化搜索算法,它保留了基于种群的全局搜索策略,采用实数编码.基于差分的简单变异操作和"一对一&quo ...

  4. 差分进化算法(DE)的C++面向对象方法实现

    代码来源于网络,写得非常棒 /*DE_test *对相应的Matlab程序进行测试 */ #include <iostream> #include <cmath> #inclu ...

  5. Python遗传和进化算法框架(一)Geatpy快速入门

    https://blog.csdn.net/qq_33353186/article/details/82014986 Geatpy是一个高性能的Python遗传算法库以及开放式进化算法框架,由华南理工 ...

  6. geatpy - 遗传和进化算法相关算子的库函数(python)

    Geatpy The Genetic and Evolutionary Algorithm Toolbox for Python Introduction Website (including doc ...

  7. 【Python Deap库】遗传算法/遗传编程 进化算法基于python DEAP库深度解析讲解

    目录 前言 概述 启发式的理解(重点) 优化问题的定义 个体编码 初始族群的创建 评价 配种选择 锦标赛 轮盘赌选择 随机普遍抽样选择 变异 单点交叉 两点交叉 均匀交叉 部分匹配交叉 突变 高斯突变 ...

  8. 离散的差分进化Discrete DE

    一般的差分算法的变异规则:Xmutation=Xr1+F(Xr2-Xr3),F为缩放因子, 离散差分进化DDE的变异规则:设每个解为K个元素的集合,则Xr2-Xr3:求出Xr2与Xr3有m个共同元素, ...

  9. [Evolutionary Algorithm] 进化算法简介

    进化算法,也被成为是演化算法(evolutionary algorithms,简称EAs),它不是一个具体的算法,而是一个“算法簇”.进化算法的产生的灵感借鉴了大自然中生物的进化操作,它一般包括基因编 ...

随机推荐

  1. WinterCamp2017 游记

    Winter is coming! Day0 Day0前一天打了一轮CF,做完了ABCD,Div2 Rank59.然后就去开开心心的睡觉,准备第二天的行程. 快到一点的时候躺在了床上,睡不着,翻来覆去 ...

  2. BZOJ 1833 【ZJOI2010】 数字计数

    题目链接:数字计数 没啥好说的,裸裸的数位\(dp\). 先枚举当前是算数字\(x\)出现的次数,设\(f_{i,j}\)表示从高位往低位\(dp\),\(dp\)完了前\(i\)位之后\(x\)出现 ...

  3. ✅javascript 语法:附加子节点

    received: function(data) { $("#notifications").prepend(data.html); } 如何用原生js写出jquery的功能: 先 ...

  4. hdu 4845 状压bfs(分层思想)

    拯救大兵瑞恩 Time Limit: 3000/1000 MS (Java/Others)    Memory Limit: 65535/32768 K (Java/Others)Total Subm ...

  5. UVA-1660 Cable TV Network (最小割)

    题目大意:给一张n个点.m条边的无向图,求最小点割集的基数. 题目分析:求无向图最小点割集的基数可以变成求最小割.考虑单源s单汇t的无向图,如果要求一个最小点集,使得去掉这个点集后图不再连通(连通分量 ...

  6. UVA-1605 Building for UN (构造)

    题目大意:n个国家的人要在一栋大厦里办公,要求任意两个国家的办公室要相邻(同层同边或邻层同面),设计一个满足要求的方案. 题目分析:题目限制较少,任意构造出一个解即可. 代码如下: # include ...

  7. 个人知识管理系统Version1.0开发记录(07)

    模 块 复 用 原本还要测试一会的,突然出现一连串诡异的问题,比如,编译少加载个类啊,输入地址少个字母啊,改几行代码一改就是半小时啊.这是在提醒我们大脑疲倦了,所以果断小结,下次继续.这一次简单完成了 ...

  8. iOS UI-线程(NSThread)及其安全隐患与通信

    一.基本使用 1.多线程的优缺点 多线程的优点 能适当提高程序的执行效率 能适当提高资源利用率(CPU.内存利用率) 多线程的缺点 开启线程需要占用一定的内存空间(默认情况下,主线程占用1M,子线程占 ...

  9. 基于深度学习的恶意样本行为检测(含源码) ----采用CNN深度学习算法对Cuckoo沙箱的动态行为日志进行检测和分类

    from:http://www.freebuf.com/articles/system/182566.html 0×01 前言 目前的恶意样本检测方法可以分为两大类:静态检测和动态检测.静态检测是指并 ...

  10. 如何写入和读取从 Microsoft 消息队列在 Visual C#

    注意:这篇文章是由无人工介入的微软自动的机器翻译软件翻译完成.微软很高兴能同时提供给您由人工翻译的和由机器翻译的文章, 以使您能使用您的语言访问所有的知识库文章.然而由机器翻译的文章并不总是完美的.它 ...