人工鱼群算法-python实现
生活随笔
收集整理的這篇文章主要介紹了
人工鱼群算法-python实现
小編覺得挺不錯的,現在分享給大家,幫大家做個參考.
AFSIndividual.py
1 import numpy as np 2 import ObjFunction 3 import copy 4 5 6 class AFSIndividual: 7 8 """class for AFSIndividual""" 9 10 def __init__(self, vardim, bound): 11 ''' 12 vardim: dimension of variables 13 bound: boundaries of variables 14 ''' 15 self.vardim = vardim 16 self.bound = bound 17 18 def generate(self): 19 ''' 20 generate a rondom chromsome 21 ''' 22 len = self.vardim 23 rnd = np.random.random(size=len) 24 self.chrom = np.zeros(len) 25 self.velocity = np.random.random(size=len) 26 for i in xrange(0, len): 27 self.chrom[i] = self.bound[0, i] + 28 (self.bound[1, i] - self.bound[0, i]) * rnd[i] 29 self.bestPosition = np.zeros(len) 30 self.bestFitness = 0. 31 32 def calculateFitness(self): 33 ''' 34 calculate the fitness of the chromsome 35 ''' 36 self.fitness = ObjFunction.GrieFunc( 37 self.vardim, self.chrom, self.bound)
AFS.py
1 import numpy as np
2 from AFSIndividual import AFSIndividual
3 import random
4 import copy
5 import matplotlib.pyplot as plt
6
7
8 class ArtificialFishSwarm:
9
10 """class for ArtificialFishSwarm"""
11
12 def __init__(self, sizepop, vardim, bound, MAXGEN, params):
13 '''
14 sizepop: population sizepop
15 vardim: dimension of variables
16 bound: boundaries of variables, 2*vardim
17 MAXGEN: termination condition
18 params: algorithm required parameters, it is a list which is consisting of[visual, step, delta, trynum]
19 '''
20 self.sizepop = sizepop
21 self.vardim = vardim
22 self.bound = bound
23 self.MAXGEN = MAXGEN
24 self.params = params
25 self.population = []
26 self.fitness = np.zeros((self.sizepop, 1))
27 self.trace = np.zeros((self.MAXGEN, 2))
28 self.lennorm = 6000
29
30 def initialize(self):
31 '''
32 initialize the population of afs
33 '''
34 for i in xrange(0, self.sizepop):
35 ind = AFSIndividual(self.vardim, self.bound)
36 ind.generate()
37 self.population.append(ind)
38
39 def evaluation(self, x):
40 '''
41 evaluation the fitness of the individual
42 '''
43 x.calculateFitness()
44
45 def forage(self, x):
46 '''
47 artificial fish foraging behavior
48 '''
49 newInd = copy.deepcopy(x)
50 found = False
51 for i in xrange(0, self.params[3]):
52 indi = self.randSearch(x, self.params[0])
53 if indi.fitness > x.fitness:
54 newInd.chrom = x.chrom + np.random.random(self.vardim) * self.params[1] * self.lennorm * (
55 indi.chrom - x.chrom) / np.linalg.norm(indi.chrom - x.chrom)
56 newInd = indi
57 found = True
58 break
59 if not (found):
60 newInd = self.randSearch(x, self.params[1])
61 return newInd
62
63 def randSearch(self, x, searLen):
64 '''
65 artificial fish random search behavior
66 '''
67 ind = copy.deepcopy(x)
68 ind.chrom += np.random.uniform(-1, 1,
69 self.vardim) * searLen * self.lennorm
70 for j in xrange(0, self.vardim):
71 if ind.chrom[j] < self.bound[0, j]:
72 ind.chrom[j] = self.bound[0, j]
73 if ind.chrom[j] > self.bound[1, j]:
74 ind.chrom[j] = self.bound[1, j]
75 self.evaluation(ind)
76 return ind
77
78 def huddle(self, x):
79 '''
80 artificial fish huddling behavior
81 '''
82 newInd = copy.deepcopy(x)
83 dist = self.distance(x)
84 index = []
85 for i in xrange(1, self.sizepop):
86 if dist[i] > 0 and dist[i] < self.params[0] * self.lennorm:
87 index.append(i)
88 nf = len(index)
89 if nf > 0:
90 xc = np.zeros(self.vardim)
91 for i in xrange(0, nf):
92 xc += self.population[index[i]].chrom
93 xc = xc / nf
94 cind = AFSIndividual(self.vardim, self.bound)
95 cind.chrom = xc
96 cind.calculateFitness()
97 if (cind.fitness / nf) > (self.params[2] * x.fitness):
98 xnext = x.chrom + np.random.random(
99 self.vardim) * self.params[1] * self.lennorm * (xc - x.chrom) / np.linalg.norm(xc - x.chrom)
100 for j in xrange(0, self.vardim):
101 if xnext[j] < self.bound[0, j]:
102 xnext[j] = self.bound[0, j]
103 if xnext[j] > self.bound[1, j]:
104 xnext[j] = self.bound[1, j]
105 newInd.chrom = xnext
106 self.evaluation(newInd)
107 # print "hudding"
108 return newInd
109 else:
110 return self.forage(x)
111 else:
112 return self.forage(x)
113
114 def follow(self, x):
115 '''
116 artificial fish following behivior
117 '''
118 newInd = copy.deepcopy(x)
119 dist = self.distance(x)
120 index = []
121 for i in xrange(1, self.sizepop):
122 if dist[i] > 0 and dist[i] < self.params[0] * self.lennorm:
123 index.append(i)
124 nf = len(index)
125 if nf > 0:
126 best = -999999999.
127 bestIndex = 0
128 for i in xrange(0, nf):
129 if self.population[index[i]].fitness > best:
130 best = self.population[index[i]].fitness
131 bestIndex = index[i]
132 if (self.population[bestIndex].fitness / nf) > (self.params[2] * x.fitness):
133 xnext = x.chrom + np.random.random(
134 self.vardim) * self.params[1] * self.lennorm * (self.population[bestIndex].chrom - x.chrom) / np.linalg.norm(self.population[bestIndex].chrom - x.chrom)
135 for j in xrange(0, self.vardim):
136 if xnext[j] < self.bound[0, j]:
137 xnext[j] = self.bound[0, j]
138 if xnext[j] > self.bound[1, j]:
139 xnext[j] = self.bound[1, j]
140 newInd.chrom = xnext
141 self.evaluation(newInd)
142 # print "follow"
143 return newInd
144 else:
145 return self.forage(x)
146 else:
147 return self.forage(x)
148
149 def solve(self):
150 '''
151 evolution process for afs algorithm
152 '''
153 self.t = 0
154 self.initialize()
155 # evaluation the population
156 for i in xrange(0, self.sizepop):
157 self.evaluation(self.population[i])
158 self.fitness[i] = self.population[i].fitness
159 best = np.max(self.fitness)
160 bestIndex = np.argmax(self.fitness)
161 self.best = copy.deepcopy(self.population[bestIndex])
162 self.avefitness = np.mean(self.fitness)
163 self.trace[self.t, 0] = (1 - self.best.fitness) / self.best.fitness
164 self.trace[self.t, 1] = (1 - self.avefitness) / self.avefitness
165 print("Generation %d: optimal function value is: %f; average function value is %f" % (
166 self.t, self.trace[self.t, 0], self.trace[self.t, 1]))
167 while self.t < self.MAXGEN - 1:
168 self.t += 1
169 # newpop = []
170 for i in xrange(0, self.sizepop):
171 xi1 = self.huddle(self.population[i])
172 xi2 = self.follow(self.population[i])
173 if xi1.fitness > xi2.fitness:
174 self.population[i] = xi1
175 self.fitness[i] = xi1.fitness
176 else:
177 self.population[i] = xi2
178 self.fitness[i] = xi2.fitness
179 best = np.max(self.fitness)
180 bestIndex = np.argmax(self.fitness)
181 if best > self.best.fitness:
182 self.best = copy.deepcopy(self.population[bestIndex])
183 self.avefitness = np.mean(self.fitness)
184 self.trace[self.t, 0] = (1 - self.best.fitness) / self.best.fitness
185 self.trace[self.t, 1] = (1 - self.avefitness) / self.avefitness
186 print("Generation %d: optimal function value is: %f; average function value is %f" % (
187 self.t, self.trace[self.t, 0], self.trace[self.t, 1]))
188
189 print("Optimal function value is: %f; " % self.trace[self.t, 0])
190 print "Optimal solution is:"
191 print self.best.chrom
192 self.printResult()
193
194 def distance(self, x):
195 '''
196 return the distance array to a individual
197 '''
198 dist = np.zeros(self.sizepop)
199 for i in xrange(0, self.sizepop):
200 dist[i] = np.linalg.norm(x.chrom - self.population[i].chrom) / 6000
201 return dist
202
203 def printResult(self):
204 '''
205 plot the result of afs algorithm
206 '''
207 x = np.arange(0, self.MAXGEN)
208 y1 = self.trace[:, 0]
209 y2 = self.trace[:, 1]
210 plt.plot(x, y1, 'r', label='optimal value')
211 plt.plot(x, y2, 'g', label='average value')
212 plt.xlabel("Iteration")
213 plt.ylabel("function value")
214 plt.title("Artificial Fish Swarm algorithm for function optimization")
215 plt.legend()
216 plt.show()
運行程序:
1 if __name__ == "__main__": 2 3 bound = np.tile([[-600], [600]], 25) 4 afs = AFS(60, 25, bound, 500, [0.001, 0.0001, 0.618, 40]) 5 afs.solve()
ObjFunction見簡單遺傳算法-python實現。
總結
以上是生活随笔為你收集整理的人工鱼群算法-python实现的全部內容,希望文章能夠幫你解決所遇到的問題。
- 上一篇: 《多元统计分析》学习笔记之聚类分析
- 下一篇: 《多元统计分析》学习笔记之主成分分析