Soft Computing Techniques
Practical No: 1
A) Design a simple linear neural network model.
code:
# First Experiment in Soft Computing Techniques
# simple neural network.
import numpy
a= float(input("\n Enter the first input a = "))
wa = float(input("\n Enter the Weight Value for first input wa = "))
c= float(input("\n Enter the first input c= "))
wc = float(input("\n Enter the Weight Value for first input wc = "))
b = float(input("\n Enter the bias b= "))
net =( round((b+ a*wa + c*wc ) *10)) /10
print("\t **** --- Output ----- **** ")
print ("\n net = ", net)
print("\t **** --- Output ----- **** ")
if(net<0):
out = 0
elif (net >=0) & (net <= 1 ):
out = net
else :
out = 1
print (" output " , out)
Output:
B) Calculate the output of neural net using both binary and bipolar sigmoidal function.
code:
n= int(input("Enter number of elements:"))
print("enter the input ")
inputs=[]
for i in range(0,n):
ele = float(input())
[Link](ele) #adding the element
print(inputs)
print("enter the weights")
weights=[]
for i in range(0,n):
ele = float(input())
[Link](ele) #adding the element
print(weights)
print(" ---------------- ------------ ")
print("the net input can be calculated as Yin")
Yin =[]
for i in range (0,n):
[Link]( inputs[i]*weights[i])
print( "net value for input",(i+1) , round((Yin[i]),3))
print("sum of all the values ", round(sum(Yin),3))
Output:
Practical No: 2
A) Generate AND/NOT function using McCulloch-Pitts neural net.
code:
num_ip= int(input("Enter the number of inputs"))
w1 = 1
w2 = 2
print( " for the ", num_ip, "inputs calculate the net input using Yin x1W1 + X2W2")
x1 = []
x2 = []
for j in range(0,num_ip):
ele1 = int(input(" X1 = "))
ele2 = int(input(" X2 = "))
[Link](ele1)
[Link](ele2)
print("\n X1 = " , x1)
print("X2 = ", x2)
n = x1 * w1
m = x2 * w2
Yin = []
for i in range(0,num_ip):
[Link](n[i]+m[i])
print("Yin = ", Yin)
Yin = []
for i in range (0, num_ip):
[Link](n[i]-m[i])
print("after assuming one weignt as excitatory and other as inhibitoryu Yin = " , Yin)
Y = []
for i in range(0, num_ip):
if (Yin[i]>=1) :
ele = 1
[Link](ele)
if (Yin[i]< 1 ) :
ele=0
[Link](ele)
print("Y = ", Y )
#------------------
Output:
B) Generate XOR function using McCulloch-Pitts neural net.
code:
import numpy as np
print('Enter weights')
w11 = int(input('Weight w11='))
w12 = int(input('Weight w12='))
w21 = int(input('Weight w21='))
w22 = int(input('Weight w22='))
v1 = int(input('weight v1='))
v2 = int(input('weight v2='))
print('Enter Threshold Value')
theta = int(input('theta='))
x1 = [Link]([0, 0, 1, 1])
x2 = [Link]([0, 1, 0, 1])
z = [Link]([0, 1, 1, 0])
con = 1
y1 = [Link]((4,))
y2 = [Link]((4,))
y = [Link]((4,))
while con == 1:
zin1 = [Link]((4,))
zin2 = [Link]((4,))
zin1 = x1*w11+x2*w21
zin2 = x1*w21+x2*w22
print("z1", zin1)
print("z2", zin2)
for i in range(0, 4):
if zin1[i] >= theta:
y1[i] = 1
else:
y1[i] = 0
if zin2[i] >= theta:
y2[i] = 1
else:
y2[i] = 0
yin = [Link]([])
yin = y1*v1+y2*v2
for i in range(0, 4):
if (yin[i] >= theta):
y[i] = 1
else:
y[i] = 0
print("yin", yin)
print('Output of Net')
y = [Link](int)
print("y", y)
print("z", z)
if np.array_equal(y, z):
con = 0
else:
print("Net is not learning enter another se4t of weights and Threshold value")
w11 = input("Weight w11=")
w12 = input("Weight w12=")
w21 = input("Weight w21=")
w22 = input("Weight w22=")
v1 = input("Weight v1=")
v2 = input("Weight v2=")
theta = input("theta = ")
print("McCulloch-Pitts Net for XOR function")
print("Weights of Neuron Z1")
print(w11)
print(w21)
print("Weights of Neuron Z2")
print(w12)
print(w22)
print("Weights of Neuron Y")
print(v1)
print(v2)
print("threshold value")
print(theta)
Output:
Practical No: 3
A) Write a program to implement Hebb's rule.
code:
import numpy as np
x1=[Link]([1,-1,-1,1,-1,-1,1,1,1])
x2=[Link]([1,-1,1,1,-1,1,1,1,1])
b=0
y=[Link]([1,-1])
wtold=[Link]((9,))
wtnew=[Link]((9,))
wtnew=[Link](int)
wtold=[Link](int)
bais=0
print("First input with target = 1")
for i in range(0,9):
wtnew[i]=wtold[i]+x1[i]*y[0]
wtold=wtnew
b=b+y[0]
print("new wt=",wtnew)
print("Bias value=",b)
print("second input with target=-1")
for i in range(0,9):
wtnew[i]=wtold[i]+x2[i]*y[1]
b=b+y[1]
print("Bias value",b)
print("new wt=",wtnew)
print("Bias value",b)
Output:
B) Write a program to implement of delta rule.
#supervised learning
import numpy as np
import time
np.set_printoptions(precision=2)
x=[Link]((3,))
weights=[Link]((3,))
desired=[Link]((3,))
actual=[Link]((3,))
for i in range(0,3):
x[i]=float(input("Initial inputs:"))
for i in range(0,3):
weights[i]=float(input("Initial weights:"))
for i in range(0,3):
desired[i]=float(input("Desired output:"))
a=float(input("Enter learning rate:"))
actual=x*weights
print("actual",actual)
print("desired",desired)
while True:
if np.array_equal(desired,actual):
break #no change
else:
for i in range(0,3):
weights[i]=weights[i]+a*(desired[i]-actual[i])
actual=x*weights
print("weights",weights)
print("actual",actual)
print("desired",desired)
print("*"*30)
print("Final output")
print("Corrected weights",weights)
print("actual",actual)
print("desired",desired)
Output:
Practical No: 4
A) Write a program for Back Propagation Algorithm.
code:
import math
a0 = -1
t=-1
w10=float(input("Enter weights first network:"))
b10=float(input("Enter base first network:"))
w20=float(input("Enter weights second network:"))
b20=float(input("Enter base second network:"))
c=float(input("Enter learning coefficent:"))
n1=float(w10*c+b10)
a1=[Link](n1)
n2=float(w20*a1+b20)
a2=[Link](float(n2))
e=t-a2
s2=2*(1-a2*a2)*e
s1=(1-a1*a1)*w20*s2
w21=w20-(c*s2*a1)
w11=w10-(c*s1*a0)
b21=b20-(c*s2)
b11=b10-(c*s1)
print("The updated weight of first network w11=",w11)
print("The udated weight of second network w21=",w21)
print("The updated base of first network b10=",b10)
print("The updated base of second network b20=",b20)
Output:
B) Write a program for Error Propagation Algorithm.
code:
import numpy as np
import decimal
import math
np.set_printoptions(precision=2)
v1=[Link]([0.6,0.3])
v2=[Link]([-0.1,0.4])
w=[Link]([-0.2,0.4,0.1])
b1=0.3
b2=0.5
x1=0
x2=1
alpha=0.25
print("calculate net input to z1 layer")
zin1=round(b1+ x1*v1[0]+x2*v2[0],4)
print("z1=",round(zin1,3))
print("calculate net input to z2 layer")
zin2=round(b2+x1*v1[1],4)
print("z2=",round(zin2,4))
print("Apply activation function to calculate output")
z1=1/(1+[Link](-zin1))
z1=round(z1,4)
z2=1/(1+[Link](-zin2))
z2=round(z2,4)
print("z1=",z1)
print("z2=",z2)
print("calculate net input to output layer")
yin=w[0]+z1*w[1]+z2*w[2]
print("yin=",yin)
print("calculate net output")
y=1/(1+[Link](-yin))
print("y=",y)
fyin=y *(1-y)
dk=(1-y)*fyin
print("dk",dk)
dw1=alpha * dk * z1
dw2=alpha * dk * z2
dw0=alpha * dk
print("compute error portion in delta")
din1=dk* w[1]
din2=dk* w[2]
print("din1=",din1)
print("din2=",din2)
print("error in delta")
fzin1=z1 *(1-z1)
print("fzin1",fzin1)
d1=din1* fzin1
fzin2=z2*(1-z2)
print("fzin2,fzin2")
d2=din2* fzin2
print("d1=",d1)
print("d2=",d2)
print("Changes in weights between input and hidden layer")
dv11=alpha * d1 * x1
print("dv11=",dv11)
dv21=alpha * d1 * x2
print("dv21=",dv21)
dv01=alpha * d1
print("dv01=",dv01)
dv12=alpha * d2 * x1
print("dv12=",dv12)
dv22=alpha * d2 * x2
print("dv22=",dv22)
dv02=alpha * d2
print("dv02=",dv02)
print("Final weights of network")
v1[0]=v1[0]+dv11
v1[1]=v1[1]+dv12
print("v=",v1)
v2[0]=v2[0]+dv21
v2[1]=v2[1]+dv22
print("v2",v2)
w[1]=w[1]+dw1
w[2]=w[2]+dw2
b1=b1+dv01
b2=b2+dv02
w[0]=w[0]+dw0
print("w=",w)
print("bias b1=",b1, " b2=",b2)
Output:
Practical No: 5
A) Write a program for Hopfield Network.
code:
import hopfieldnetwork as network
from neurodynex3.hopfield_network import network, pattern_tools, plot_tools
pattern_size = 5
hopfield_net = [Link](nr_neurons= pattern_size**2)
factory = pattern_tools.PatternFactory(pattern_size, pattern_size)
checkerboard = factory.create_checkerboard()
pattern_list = [checkerboard]
pattern_list.extend(factory.create_random_pattern_list(nr_patterns=3, on_probability=0.5))
plot_tools.plot_pattern_list(pattern_list)
overlap_matrix = pattern_tools.compute_overlap_matrix(pattern_list)
plot_tools.plot_overlap_matrix(overlap_matrix)
hopfield_net.store_patterns(pattern_list)
noisy_init_state = pattern_tools.flip_n(checkerboard, nr_of_flips=4)
hopfield_net.set_state_from_pattern(noisy_init_state)
states = hopfield_net.run_with_monitoring(nr_steps=4)
states_as_patterns = factory.reshape_patterns(states)
plot_tools.plot_state_sequence_and_overlap(states_as_patterns, pattern_list,
reference_idx=0,
suptitle="Network dynamics")
Pip Command for Jupyter:
%pip install hopfieldnetwork
%pip install neurodynex3
Pip Command For Cmd:
pip3 install hopfieldnetwork
pip3 install neurodynex3
Output:
B) Write a program for Radial Basis function
code:
from scipy import *
from [Link] import norm, pinv
from matplotlib import pyplot as plt
from numpy import *
class RBF:
def __init__(self, indim, numCenters, outdim):
[Link] = indim
[Link] = outdim
[Link] = numCenters
[Link] = [[Link](-1, 1, indim) for i in range(numCenters)]
[Link] = 8
self.W = [Link](([Link], [Link]))
def _basisfunc(self, c, d):
assert len(d) == [Link]
return exp(-[Link] * norm(c-d)**2)
def _calcAct(self, X):
# calculate activations of RBFs
G = zeros(([Link][0], [Link]), float)
for ci, c in enumerate([Link]):
for xi, x in enumerate(X):
G[xi,ci] = self._basisfunc(c, x)
return G
def train(self, X, Y):
""" X: matrix of dimensions n x indim
y: column vector of dimension n x 1 """
# choose random center vectors from training set
rnd_idx = [Link]([Link][0])[:[Link]]
[Link] = [X[i,:] for i in rnd_idx]
print ("center", [Link])
# calculate activations of RBFs
G = self._calcAct(X)
# print (G)
# calculate output weights (pseudoinverse)
self.W = dot(pinv(G), Y)
def test(self, X):
""" X: matrix of dimensions n x indim """
G = self._calcAct(X)
Y = dot(G, self.W)
return Y
if __name__ == '__main__':
# ----- 1D Example ------------------------------------------------
n = 100
x = mgrid[-[Link]omplex(0,n)].reshape(n, 1)
# set y and add random noise
y = sin(3*(x+0.5)**3 - 1)
# y += [Link](0, 0.1, [Link])
# rbf regression
rbf = RBF(1, 10, 1)
[Link](x, y)
z = [Link](x)
# plot original data
[Link](figsize=(12, 8))
[Link](x, y, 'k-')
# plot learned model
[Link](x, z, 'r-', linewidth=2)
# plot rbfs
[Link]([Link], zeros([Link]), 'gs')
for c in [Link]:
# RF prediction lines
cx = arange(c-0.7, c+0.7, 0.01)
cy = [rbf._basisfunc(array([cx_]), array([c])) for cx_ in cx]
[Link](cx, cy, '-', color='gray', linewidth=0.2)
[Link](-1.2, 1.2)
[Link]()
Output:
Practical No: 6
A) Implement Kohonen Self Organizing Map
code:
# som_iris.py
# SOM for Iris dataset
# Anaconda3 5.2.0 (Python 3.6.5)
# ==================================================================
import numpy as np
import [Link] as plt
# note: if this fails, try >pip uninstall matplotlib
# and then >pip install matplotlib
defclosest_node(data, t, map, m_rows, m_cols):
# (row,col) of map node closest to data[t]
result = (0,0)
small_dist = 1.0e20
for i in range(m_rows):
for j in range(m_cols):
ed = euc_dist(map[i][j], data[t])
if ed<small_dist:
small_dist = ed
result = (i, j)
return result
defeuc_dist(v1, v2):
return [Link](v1 - v2)
defmanhattan_dist(r1, c1, r2, c2):
return [Link](r1-r2) + [Link](c1-c2)
defmost_common(lst, n):
# lst is a list of values 0 . . n
if len(lst) == 0: return -1
counts = [Link](shape=n, dtype=[Link])
for i in range(len(lst)):
counts[lst[i]] += 1
return [Link](counts)
# ==================================================================
def main():
# 0. get started
[Link](1)
Dim = 4
Rows = 30; Cols = 30
RangeMax = Rows + Cols
LearnMax = 0.5
StepsMax = 5000
# 1. load data
print("\nLoading Iris data into memory \n")
#Change Location of DataSet
data_file =
"C:/Users/User-10/Documents/Dimple_Baroliya/Soft_Computing/prac6/iris_data_012.txt"
data_x = [Link](data_file, delimiter=",", usecols=range(0,4),
dtype=np.float64)
data_y = [Link](data_file, delimiter=",", usecols=[4],
dtype=np.int64)
# option: normalize data
# 2. construct the SOM
print("Constructing a 30x30 SOM from the iris data")
map = [Link].random_sample(size=(Rows,Cols,Dim))
for s in range(StepsMax):
if s % (StepsMax/10) == 0: print("step = ", str(s))
pct_left = 1.0 - ((s * 1.0) / StepsMax)
curr_range = (int)(pct_left * RangeMax)
curr_rate = pct_left * LearnMax
t = [Link](len(data_x))
(bmu_row, bmu_col) = closest_node(data_x, t, map, Rows, Cols)
for i in range(Rows):
for j in range(Cols):
if manhattan_dist(bmu_row, bmu_col, i, j) <curr_range:
map[i][j] = map[i][j] + curr_rate * \
(data_x[t] - map[i][j])
print("SOM construction complete \n")
# 3. construct U-Matrix
print("Constructing U-Matrix from SOM")
u_matrix = [Link](shape=(Rows,Cols), dtype=np.float64)
for i in range(Rows):
for j in range(Cols):
v = map[i][j] # a vector
sum_dists = 0.0; ct = 0
if i-1 >= 0: # above
sum_dists += euc_dist(v, map[i-1][j]); ct += 1
if i+1 <= Rows-1: # below
sum_dists += euc_dist(v, map[i+1][j]); ct += 1
if j-1 >= 0: # left
sum_dists += euc_dist(v, map[i][j-1]); ct += 1
if j+1 <= Cols-1: # right
sum_dists += euc_dist(v, map[i][j+1]); ct += 1
u_matrix[i][j] = sum_dists / ct
print("U-Matrix constructed \n")
# display U-Matrix
[Link](u_matrix, cmap='gray') # black = close = clusters
[Link]()
# 4. because the data has labels, another possible visualization:
# associate each data label with a map node
print("Associating each data label to one map node ")
mapping = [Link](shape=(Rows,Cols), dtype=object)
for i in range(Rows):
for j in range(Cols):
mapping[i][j] = []
for t in range(len(data_x)):
(m_row, m_col) = closest_node(data_x, t, map, Rows, Cols)
mapping[m_row][m_col].append(data_y[t])
label_map = [Link](shape=(Rows,Cols), dtype=np.int64)
for i in range(Rows):
for j in range(Cols):
label_map[i][j] = most_common(mapping[i][j], 3)
[Link](label_map, cmap=[Link].get_cmap('terrain_r', 4))
[Link]()
[Link]()
# ==================================================================
if __name__=="__main__":
main()
Output:
B) Implement Adaptive resonance theory
Practical No: 7
A) Line Separation
Output:
Practical No: 8
A) Implement Membership and Identity Operators | in, not in.
def overlapping(list1, list2):
c=0
d=0
for i in list1:
c+=1
for i in list2:
d+=1
for i in range (0,c):
for j in range(0,d):
if(list1[i]==list2[j]):
return 1
return 0
list1=[1,2,3,4,5]
list2=[4,6,7,8,9]
list3=[6,7,8,9,10]
if(overlapping(list1,list2)):
print("overlapping")
else:
print("not overlapping")
if(overlapping(list1,list3)):
print("overlapping")
else:
print("not overlapping")
Output:
B) Implement Membership and Identity Operators is, is not.
x=5.2
if(type(x)is int):
print("true")
else:
print("false")
Output:
Practical No: 9
A) Aim: - Find ratios using fuzzy logic
Install mdules:
pip install fuzzywuzzy
from fuzzywuzzy import fuzz
from fuzzywuzzy import process
s1 = "I love fuzzysforfuzzys"
s2 = "I am loving fuzzysforfuzzys"
print ("FuzzyWuzzy Ratio:", [Link](s1, s2))
print ("FuzzyWuzzyPartialRatio: ", fuzz.partial_ratio(s1, s2))
print ("FuzzyWuzzyTokenSortRatio: ", fuzz.token_sort_ratio(s1, s2))
print ("FuzzyWuzzyTokenSetRatio: ", fuzz.token_set_ratio(s1, s2))
print ("FuzzyWuzzyWRatio: ", [Link](s1, s2),'\n\n')
# for process library,
query = 'fuzzys for fuzzys'
choices = ['fuzzy for fuzzy', 'fuzzy fuzzy', 'g. for fuzzys']
print ("List of ratios: ")
print ([Link](query, choices), '\n')
print ("Best among the above list: ",[Link](query, choices))
Output:
B) Aim: - Solve Tipping problem using fuzzy logic00:36 AM
Install mdules
pip install scikit-fuzzy
python -m pip install --upgrade pip
pip install matplotlib
Output:
Practical No: 10
A) Write a python to Implementation of Simple genetic algorithm.
# Python3 program to create target string, starting from
# random string using Genetic Algorithm
import random
# Number of individuals in each generation
POPULATION_SIZE = 100
# Valid genes
GENES = '''abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOP
QRSTUVWXYZ 1234567890, .-;:_!"#%&/()=?@${[]}'''
# Target string to be generated
TARGET = "I love GeeksforGeeks"
class Individual(object):
'''
Class representing individual in population
'''
def __init__(self, chromosome):
[Link] = chromosome
[Link] = self.cal_fitness()
@classmethod
defmutated_genes(self):
'''
create random genes for mutation
'''
global GENES
gene = [Link](GENES)
return gene
@classmethod
defcreate_gnome(self):
'''
create chromosome or string of genes
'''
global TARGET
gnome_len = len(TARGET)
return [self.mutated_genes() for _ in range(gnome_len)]
def mate(self, par2):
'''
Perform mating and produce new offspring
'''
# chromosome for offspring
child_chromosome = []
for gp1, gp2 in zip([Link], [Link]):
# random probability
prob = [Link]()
# if prob is less than 0.45, insert gene
# from parent 1
if prob< 0.45:
child_chromosome.append(gp1)
# if prob is between 0.45 and 0.90, insert
# gene from parent 2
elifprob< 0.90:
child_chromosome.append(gp2)
# otherwise insert random gene(mutate),
# for maintaining diversity
else:
child_chromosome.append(self.mutated_genes())
# create new Individual(offspring) using
# generated chromosome for offspring
return Individual(child_chromosome)
defcal_fitness(self):
'''
Calculate fitness score, it is the number of
characters in string which differ from target
string.
'''
global TARGET
fitness = 0
for gs, gt in zip([Link], TARGET):
if gs != gt: fitness+= 1
return fitness
# Driver code
def main():
global POPULATION_SIZE
#current generation
generation = 1
found = False
population = []
# create initial population
for _ in range(POPULATION_SIZE):
gnome = Individual.create_gnome()
[Link](Individual(gnome))
while not found:
# sort the population in increasing order of fitness score
population = sorted(population, key = lambda x:[Link])
# if the individual having lowest fitness score ie.
# 0 then we know that we have reached to the target
# and break the loop
if population[0].fitness <= 0:
found = True
break
# Otherwise generate new offsprings for new generation
new_generation = []
# Perform Elitism, that mean 10% of fittest population
# goes to the next generation
s = int((10*POPULATION_SIZE)/100)
new_generation.extend(population[:s])
# From 50% of fittest population, Individuals
# will mate to produce offspring
s = int((90*POPULATION_SIZE)/100)
for _ in range(s):
parent1 = [Link](population[:50])
parent2 = [Link](population[:50])
child = [Link](parent2)
new_generation.append(child)
population = new_generation
print("Generation: {}\tString: {}\tFitness: {}".\
format(generation,
"".join(population[0].chromosome),
population[0].fitness))
generation += 1
print("Generation: {}\tString: {}\tFitness: {}".\
format(generation,
"".join(population[0].chromosome),
population[0].fitness))
if __name__ == '__main__':
main()
Output:
B) Write a python to Create two classes: City and Fitness using Genetic algorithm.
Code:
import numpy as np, random, operator, pandas as pd, [Link] as plt
class City:
def __init__(self, x, y):
self.x = x
self.y = y
def distance(self, city):
xDis = abs(self.x - city.x)
yDis = abs(self.y - city.y)
distance = [Link]((xDis ** 2) + (yDis ** 2))
return distance
def __repr__(self):
return "(" + str(self.x) + "," + str(self.y) + ")"
class Fitness:
def __init__(self, route):
[Link] = route
[Link] = 0
[Link]= 0.0
defrouteDistance(self):
if [Link] ==0:
pathDistance = 0
for i in range(0, len([Link])):
fromCity = [Link][i]
toCity = None
if i + 1 <len([Link]):
toCity = [Link][i + 1]
else:
toCity = [Link][0]
pathDistance += [Link](toCity)
[Link] = pathDistance
return [Link]
defrouteFitness(self):
if [Link] == 0:
[Link] = 1 / float([Link]())
return [Link]
defcreateRoute(cityList):
route = [Link](cityList, len(cityList))
return route
definitialPopulation(popSize, cityList):
population = []
for i in range(0, popSize):
[Link](createRoute(cityList))
return population
defrankRoutes(population):
fitnessResults = {}
for i in range(0,len(population)):
fitnessResults[i] = Fitness(population[i]).routeFitness()
return sorted([Link](), key = [Link](1), reverse = True)
def selection(popRanked, eliteSize):
selectionResults = []
df = [Link]([Link](popRanked), columns=["Index","Fitness"])
df['cum_sum'] = [Link]()
df['cum_perc'] = 100*df.cum_sum/[Link]()
for i in range(0, eliteSize):
[Link](popRanked[i][0])
for i in range(0, len(popRanked) - eliteSize):
pick = 100*[Link]()
for i in range(0, len(popRanked)):
if pick <= [Link][i,3]:
[Link](popRanked[i][0])
break
return selectionResults
defmatingPool(population, selectionResults):
matingpool = []
for i in range(0, len(selectionResults)):
index = selectionResults[i]
[Link](population[index])
return matingpool
def breed(parent1, parent2):
child = []
childP1 = []
childP2 = []
geneA = int([Link]() * len(parent1))
geneB = int([Link]() * len(parent1))
startGene = min(geneA, geneB)
endGene = max(geneA, geneB)
for i in range(startGene, endGene):
[Link](parent1[i])
childP2 = [item for item in parent2 if item not in childP1]
child = childP1 + childP2
return child
defbreedPopulation(matingpool, eliteSize):
children = []
length = len(matingpool) - eliteSize
pool = [Link](matingpool, len(matingpool))
for i in range(0,eliteSize):
[Link](matingpool[i])
for i in range(0, length):
child = breed(pool[i], pool[len(matingpool)-i-1])
[Link](child)
return children
def mutate(individual, mutationRate):
for swapped in range(len(individual)):
if([Link]() <mutationRate):
swapWith = int([Link]() * len(individual))
city1 = individual[swapped]
city2 = individual[swapWith]
individual[swapped] = city2
individual[swapWith] = city1
return individual
defmutatePopulation(population, mutationRate):
mutatedPop = []
for ind in range(0, len(population)):
mutatedInd = mutate(population[ind], mutationRate)
[Link](mutatedInd)
return mutatedPop
defnextGeneration(currentGen, eliteSize, mutationRate):
popRanked = rankRoutes(currentGen)
selectionResults = selection(popRanked, eliteSize)
matingpool = matingPool(currentGen, selectionResults)
children = breedPopulation(matingpool, eliteSize)
nextGeneration = mutatePopulation(children, mutationRate)
return nextGeneration
defgeneticAlgorithm(population, popSize, eliteSize, mutationRate, generations):
pop = initialPopulation(popSize, population)
print("Initial distance: " + str(1 / rankRoutes(pop)[0][1]))
for i in range(0, generations):
pop = nextGeneration(pop, eliteSize, mutationRate)
print("Final distance: " + str(1 / rankRoutes(pop)[0][1]))
bestRouteIndex = rankRoutes(pop)[0][0]
bestRoute = pop[bestRouteIndex]
return bestRoute
cityList = []
for i in range(0,25):
[Link](City(x=int([Link]() * 200), y=int([Link]() * 200)))
defgeneticAlgorithmPlot(population, popSize, eliteSize, mutationRate, generations):
pop = initialPopulation(popSize, population)
progress = []
[Link](1 / rankRoutes(pop)[0][1])
for i in range(0, generations):
pop = nextGeneration(pop, eliteSize, mutationRate)
[Link](1 / rankRoutes(pop)[0][1])
[Link](progress)
[Link]('Distance')
[Link]('Generation')
[Link]()
geneticAlgorithmPlot(population=cityList, popSize=100, eliteSize=20, mutationRate=0.01,
generations=500)
Output: