简单遗传算法-python实现

ObjFunction.py

 1 import math
 2 
 3 
 4 def GrieFunc(vardim, x, bound):
 5     """
 6     Griewangk function
 7     """
 8     s1 = 0.
 9     s2 = 1.
10     for i in range(1, vardim + 1):
11         s1 = s1 + x[i - 1] ** 2
12         s2 = s2 * math.cos(x[i - 1] / math.sqrt(i))
13     y = (1. / 4000.) * s1 - s2 + 1
14     y = 1. / (1. + y)
15     return y
16 
17 
18 def RastFunc(vardim, x, bound):
19     """
20     Rastrigin function
21     """
22     s = 10 * 25
23     for i in range(1, vardim + 1):
24         s = s + x[i - 1] ** 2 - 10 * math.cos(2 * math.pi * x[i - 1])
25     return s

GAIndividual.py

 1 import numpy as np
 2 import ObjFunction
 3 
 4 
 5 class GAIndividual:
 6 
 7     '''
 8     individual of genetic algorithm
 9     '''
10 
11     def __init__(self,  vardim, bound):
12         '''
13         vardim: dimension of variables
14         bound: boundaries of variables
15         '''
16         self.vardim = vardim
17         self.bound = bound
18         self.fitness = 0.
19 
20     def generate(self):
21         '''
22         generate a random chromsome for genetic algorithm
23         '''
24         len = self.vardim
25         rnd = np.random.random(size=len)
26         self.chrom = np.zeros(len)
27         for i in xrange(0, len):
28             self.chrom[i] = self.bound[0, i] + \
29                 (self.bound[1, i] - self.bound[0, i]) * rnd[i]
30 
31     def calculateFitness(self):
32         '''
33         calculate the fitness of the chromsome
34         '''
35         self.fitness = ObjFunction.GrieFunc(
36             self.vardim, self.chrom, self.bound)

GeneticAlgorithm.py

  1 import numpy as np
  2 from GAIndividual import GAIndividual
  3 import random
  4 import copy
  5 import matplotlib.pyplot as plt
  6 
  7 
  8 class GeneticAlgorithm:
  9 
 10     '''
 11     The class for genetic algorithm
 12     '''
 13 
 14     def __init__(self, sizepop, vardim, bound, MAXGEN, params):
 15         '''
 16         sizepop: population sizepop
 17         vardim: dimension of variables
 18         bound: boundaries of variables
 19         MAXGEN: termination condition
 20         param: algorithm required parameters, it is a list which is consisting of crossover rate, mutation rate, alpha
 21         '''
 22         self.sizepop = sizepop
 23         self.MAXGEN = MAXGEN
 24         self.vardim = vardim
 25         self.bound = bound
 26         self.population = []
 27         self.fitness = np.zeros((self.sizepop, 1))
 28         self.trace = np.zeros((self.MAXGEN, 2))
 29         self.params = params
 30 
 31     def initialize(self):
 32         '''
 33         initialize the population
 34         '''
 35         for i in xrange(0, self.sizepop):
 36             ind = GAIndividual(self.vardim, self.bound)
 37             ind.generate()
 38             self.population.append(ind)
 39 
 40     def evaluate(self):
 41         '''
 42         evaluation of the population fitnesses
 43         '''
 44         for i in xrange(0, self.sizepop):
 45             self.population[i].calculateFitness()
 46             self.fitness[i] = self.population[i].fitness
 47 
 48     def solve(self):
 49         '''
 50         evolution process of genetic algorithm
 51         '''
 52         self.t = 0
 53         self.initialize()
 54         self.evaluate()
 55         best = np.max(self.fitness)
 56         bestIndex = np.argmax(self.fitness)
 57         self.best = copy.deepcopy(self.population[bestIndex])
 58         self.avefitness = np.mean(self.fitness)
 59         self.trace[self.t, 0] = (1 - self.best.fitness) / self.best.fitness
 60         self.trace[self.t, 1] = (1 - self.avefitness) / self.avefitness
 61         print("Generation %d: optimal function value is: %f; average function value is %f" % (
 62             self.t, self.trace[self.t, 0], self.trace[self.t, 1]))
 63         while (self.t < self.MAXGEN - 1):
 64             self.t += 1
 65             self.selectionOperation()
 66             self.crossoverOperation()
 67             self.mutationOperation()
 68             self.evaluate()
 69             best = np.max(self.fitness)
 70             bestIndex = np.argmax(self.fitness)
 71             if best > self.best.fitness:
 72                 self.best = copy.deepcopy(self.population[bestIndex])
 73             self.avefitness = np.mean(self.fitness)
 74             self.trace[self.t, 0] = (1 - self.best.fitness) / self.best.fitness
 75             self.trace[self.t, 1] = (1 - self.avefitness) / self.avefitness
 76             print("Generation %d: optimal function value is: %f; average function value is %f" % (
 77                 self.t, self.trace[self.t, 0], self.trace[self.t, 1]))
 78 
 79         print("Optimal function value is: %f; " %
 80               self.trace[self.t, 0])
 81         print "Optimal solution is:"
 82         print self.best.chrom
 83         self.printResult()
 84 
 85     def selectionOperation(self):
 86         '''
 87         selection operation for Genetic Algorithm
 88         '''
 89         newpop = []
 90         totalFitness = np.sum(self.fitness)
 91         accuFitness = np.zeros((self.sizepop, 1))
 92 
 93         sum1 = 0.
 94         for i in xrange(0, self.sizepop):
 95             accuFitness[i] = sum1 + self.fitness[i] / totalFitness
 96             sum1 = accuFitness[i]
 97 
 98         for i in xrange(0, self.sizepop):
 99             r = random.random()
100             idx = 0
101             for j in xrange(0, self.sizepop - 1):
102                 if j == 0 and r < accuFitness[j]:
103                     idx = 0
104                     break
105                 elif r >= accuFitness[j] and r < accuFitness[j + 1]:
106                     idx = j + 1
107                     break
108             newpop.append(self.population[idx])
109         self.population = newpop
110 
111     def crossoverOperation(self):
112         '''
113         crossover operation for genetic algorithm
114         '''
115         newpop = []
116         for i in xrange(0, self.sizepop, 2):
117             idx1 = random.randint(0, self.sizepop - 1)
118             idx2 = random.randint(0, self.sizepop - 1)
119             while idx2 == idx1:
120                 idx2 = random.randint(0, self.sizepop - 1)
121             newpop.append(copy.deepcopy(self.population[idx1]))
122             newpop.append(copy.deepcopy(self.population[idx2]))
123             r = random.random()
124             if r < self.params[0]:
125                 crossPos = random.randint(1, self.vardim - 1)
126                 for j in xrange(crossPos, self.vardim):
127                     newpop[i].chrom[j] = newpop[i].chrom[
128                         j] * self.params[2] + (1 - self.params[2]) * newpop[i + 1].chrom[j]
129                     newpop[i + 1].chrom[j] = newpop[i + 1].chrom[j] * self.params[2] + \
130                         (1 - self.params[2]) * newpop[i].chrom[j]
131         self.population = newpop
132 
133     def mutationOperation(self):
134         '''
135         mutation operation for genetic algorithm
136         '''
137         newpop = []
138         for i in xrange(0, self.sizepop):
139             newpop.append(copy.deepcopy(self.population[i]))
140             r = random.random()
141             if r < self.params[1]:
142                 mutatePos = random.randint(0, self.vardim - 1)
143                 theta = random.random()
144                 if theta > 0.5:
145                     newpop[i].chrom[mutatePos] = newpop[i].chrom[
146                         mutatePos] - (newpop[i].chrom[mutatePos] - self.bound[0, mutatePos]) * (1 - random.random() ** (1 - self.t / self.MAXGEN))
147                 else:
148                     newpop[i].chrom[mutatePos] = newpop[i].chrom[
149                         mutatePos] + (self.bound[1, mutatePos] - newpop[i].chrom[mutatePos]) * (1 - random.random() ** (1 - self.t / self.MAXGEN))
150         self.population = newpop
151 
152     def printResult(self):
153         '''
154         plot the result of the genetic algorithm
155         '''
156         x = np.arange(0, self.MAXGEN)
157         y1 = self.trace[:, 0]
158         y2 = self.trace[:, 1]
159         plt.plot(x, y1, 'r', label='optimal value')
160         plt.plot(x, y2, 'g', label='average value')
161         plt.xlabel("Iteration")
162         plt.ylabel("function value")
163         plt.title("Genetic algorithm for function optimization")
164         plt.legend()
165         plt.show()

 运行程序:

1 if __name__ == "__main__":
2 
3     bound = np.tile([[-600], [600]], 25)
4     ga = GA(60, 25, bound, 1000, [0.9, 0.1, 0.5])
5     ga.solve()

 

    原文作者:遗传算法
    原文地址: https://www.cnblogs.com/biaoyu/p/4857881.html
    本文转自网络文章,转载此文章仅为分享知识,如有侵权,请联系博主进行删除。
点赞