人工鱼群算法-python实现

AFSIndividual.py

 1 import numpy as np
 2 import ObjFunction
 3 import copy
 4 
 5 
 6 class AFSIndividual:
 7 
 8     """class for AFSIndividual"""
 9 
10     def __init__self, vardim, bound):
11         '''
12         vardim: dimension of variables
13         bound: boundaries of variables
14         '''
15         self.vardim = vardim
16         self.bound = bound
17 
18     def generateself):
19         '''
20         generate a rondom chromsome
21         '''
22         len = self.vardim
23         rnd = np.random.randomsize=len)
24         self.chrom = np.zeroslen)
25         self.velocity = np.random.randomsize=len)
26         for i in xrange0, len):
27             self.chrom[i] = self.bound[0, i] + 
28                 self.bound[1, i] - self.bound[0, i]) * rnd[i]
29         self.bestPosition = np.zeroslen)
30         self.bestFitness = 0.
31 
32     def calculateFitnessself):
33         '''
34         calculate the fitness of the chromsome
35         '''
36         self.fitness = ObjFunction.GrieFunc
37             self.vardim, self.chrom, self.bound)

AFS.py

  1 import numpy as np
  2 from AFSIndividual import AFSIndividual
  3 import random
  4 import copy
  5 import matplotlib.pyplot as plt
  6 
  7 
  8 class ArtificialFishSwarm:
  9 
 10     """class for  ArtificialFishSwarm"""
 11 
 12     def __init__self, sizepop, vardim, bound, MAXGEN, params):
 13         '''
 14         sizepop: population sizepop
 15         vardim: dimension of variables
 16         bound: boundaries of variables, 2*vardim
 17         MAXGEN: termination condition
 18         params: algorithm required parameters, it is a list which is consisting of[visual, step, delta, trynum]
 19         '''
 20         self.sizepop = sizepop
 21         self.vardim = vardim
 22         self.bound = bound
 23         self.MAXGEN = MAXGEN
 24         self.params = params
 25         self.population = []
 26         self.fitness = np.zerosself.sizepop, 1))
 27         self.trace = np.zerosself.MAXGEN, 2))
 28         self.lennorm = 6000
 29 
 30     def initializeself):
 31         '''
 32         initialize the population of afs
 33         '''
 34         for i in xrange0, self.sizepop):
 35             ind = AFSIndividualself.vardim, self.bound)
 36             ind.generate)
 37             self.population.appendind)
 38 
 39     def evaluationself, x):
 40         '''
 41         evaluation the fitness of the individual
 42         '''
 43         x.calculateFitness)
 44 
 45     def forageself, x):
 46         '''
 47         artificial fish foraging behavior
 48         '''
 49         newInd = copy.deepcopyx)
 50         found = False
 51         for i in xrange0, self.params[3]):
 52             indi = self.randSearchx, self.params[0])
 53             if indi.fitness > x.fitness:
 54                 newInd.chrom = x.chrom + np.random.randomself.vardim) * self.params[1] * self.lennorm * 
 55                     indi.chrom - x.chrom) / np.linalg.normindi.chrom - x.chrom)
 56                 newInd = indi
 57                 found = True
 58                 break
 59         if not found):
 60             newInd = self.randSearchx, self.params[1])
 61         return newInd
 62 
 63     def randSearchself, x, searLen):
 64         '''
 65         artificial fish random search behavior
 66         '''
 67         ind = copy.deepcopyx)
 68         ind.chrom += np.random.uniform-1, 1,
 69                                        self.vardim) * searLen * self.lennorm
 70         for j in xrange0, self.vardim):
 71             if ind.chrom[j] < self.bound[0, j]:
 72                 ind.chrom[j] = self.bound[0, j]
 73             if ind.chrom[j] > self.bound[1, j]:
 74                 ind.chrom[j] = self.bound[1, j]
 75         self.evaluationind)
 76         return ind
 77 
 78     def huddleself, x):
 79         '''
 80         artificial fish huddling behavior
 81         '''
 82         newInd = copy.deepcopyx)
 83         dist = self.distancex)
 84         index = []
 85         for i in xrange1, self.sizepop):
 86             if dist[i] > 0 and dist[i] < self.params[0] * self.lennorm:
 87                 index.appendi)
 88         nf = lenindex)
 89         if nf > 0:
 90             xc = np.zerosself.vardim)
 91             for i in xrange0, nf):
 92                 xc += self.population[index[i]].chrom
 93             xc = xc / nf
 94             cind = AFSIndividualself.vardim, self.bound)
 95             cind.chrom = xc
 96             cind.calculateFitness)
 97             if cind.fitness / nf) > self.params[2] * x.fitness):
 98                 xnext = x.chrom + np.random.random
 99                     self.vardim) * self.params[1] * self.lennorm * xc - x.chrom) / np.linalg.normxc - x.chrom)
100                 for j in xrange0, self.vardim):
101                     if xnext[j] < self.bound[0, j]:
102                         xnext[j] = self.bound[0, j]
103                     if xnext[j] > self.bound[1, j]:
104                         xnext[j] = self.bound[1, j]
105                 newInd.chrom = xnext
106                 self.evaluationnewInd)
107                 # print "hudding"
108                 return newInd
109             else:
110                 return self.foragex)
111         else:
112             return self.foragex)
113 
114     def followself, x):
115         '''
116         artificial fish following behivior
117         '''
118         newInd = copy.deepcopyx)
119         dist = self.distancex)
120         index = []
121         for i in xrange1, self.sizepop):
122             if dist[i] > 0 and dist[i] < self.params[0] * self.lennorm:
123                 index.appendi)
124         nf = lenindex)
125         if nf > 0:
126             best = -999999999.
127             bestIndex = 0
128             for i in xrange0, nf):
129                 if self.population[index[i]].fitness > best:
130                     best = self.population[index[i]].fitness
131                     bestIndex = index[i]
132             if self.population[bestIndex].fitness / nf) > self.params[2] * x.fitness):
133                 xnext = x.chrom + np.random.random
134                     self.vardim) * self.params[1] * self.lennorm * self.population[bestIndex].chrom - x.chrom) / np.linalg.normself.population[bestIndex].chrom - x.chrom)
135                 for j in xrange0, self.vardim):
136                     if xnext[j] < self.bound[0, j]:
137                         xnext[j] = self.bound[0, j]
138                     if xnext[j] > self.bound[1, j]:
139                         xnext[j] = self.bound[1, j]
140                 newInd.chrom = xnext
141                 self.evaluationnewInd)
142                 # print "follow"
143                 return newInd
144             else:
145                 return self.foragex)
146         else:
147             return self.foragex)
148 
149     def solveself):
150         '''
151         evolution process for afs algorithm
152         '''
153         self.t = 0
154         self.initialize)
155         # evaluation the population
156         for i in xrange0, self.sizepop):
157             self.evaluationself.population[i])
158             self.fitness[i] = self.population[i].fitness
159         best = np.maxself.fitness)
160         bestIndex = np.argmaxself.fitness)
161         self.best = copy.deepcopyself.population[bestIndex])
162         self.avefitness = np.meanself.fitness)
163         self.trace[self.t, 0] = 1 - self.best.fitness) / self.best.fitness
164         self.trace[self.t, 1] = 1 - self.avefitness) / self.avefitness
165         print"Generation %d: optimal function value is: %f; average function value is %f" % 
166             self.t, self.trace[self.t, 0], self.trace[self.t, 1]))
167         while self.t < self.MAXGEN - 1:
168             self.t += 1
169             # newpop = []
170             for i in xrange0, self.sizepop):
171                 xi1 = self.huddleself.population[i])
172                 xi2 = self.followself.population[i])
173                 if xi1.fitness > xi2.fitness:
174                     self.population[i] = xi1
175                     self.fitness[i] = xi1.fitness
176                 else:
177                     self.population[i] = xi2
178                     self.fitness[i] = xi2.fitness
179             best = np.maxself.fitness)
180             bestIndex = np.argmaxself.fitness)
181             if best > self.best.fitness:
182                 self.best = copy.deepcopyself.population[bestIndex])
183             self.avefitness = np.meanself.fitness)
184             self.trace[self.t, 0] = 1 - self.best.fitness) / self.best.fitness
185             self.trace[self.t, 1] = 1 - self.avefitness) / self.avefitness
186             print"Generation %d: optimal function value is: %f; average function value is %f" % 
187                 self.t, self.trace[self.t, 0], self.trace[self.t, 1]))
188 
189         print"Optimal function value is: %f; " % self.trace[self.t, 0])
190         print "Optimal solution is:"
191         print self.best.chrom
192         self.printResult)
193 
194     def distanceself, x):
195         '''
196         return the distance array to a individual
197         '''
198         dist = np.zerosself.sizepop)
199         for i in xrange0, self.sizepop):
200             dist[i] = np.linalg.normx.chrom - self.population[i].chrom) / 6000
201         return dist
202 
203     def printResultself):
204         '''
205         plot the result of afs algorithm
206         '''
207         x = np.arange0, self.MAXGEN)
208         y1 = self.trace[:, 0]
209         y2 = self.trace[:, 1]
210         plt.plotx, y1, 'r', label='optimal value')
211         plt.plotx, y2, 'g', label='average value')
212         plt.xlabel"Iteration")
213         plt.ylabel"function value")
214         plt.title"Artificial Fish Swarm algorithm for function optimization")
215         plt.legend)
216         plt.show)

 运行程序:

1 if __name__ == "__main__":
2 
3     bound = np.tile[[-600], [600]], 25)
4     afs = AFS60, 25, bound, 500, [0.001, 0.0001, 0.618, 40])
5     afs.solve)

ObjFunction见简单遗传算法-python实现

Published by

风君子

独自遨游何稽首 揭天掀地慰生平

发表回复

您的电子邮箱地址不会被公开。 必填项已用 * 标注