0% found this document useful (0 votes)
33 views3 pages

Assignment 2 ANN - Colab

The document provides implementations and visualizations for various activation functions used in artificial neural networks, including sigmoid, tanh, ReLU, leaky ReLU, and softmax. Each function is defined with its mathematical formula, and plots are generated using Matplotlib to illustrate their behavior. The document includes code snippets for each function and their corresponding graphs.

Uploaded by

studylets138
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
0% found this document useful (0 votes)
33 views3 pages

Assignment 2 ANN - Colab

The document provides implementations and visualizations for various activation functions used in artificial neural networks, including sigmoid, tanh, ReLU, leaky ReLU, and softmax. Each function is defined with its mathematical formula, and plots are generated using Matplotlib to illustrate their behavior. The document includes code snippets for each function and their corresponding graphs.

Uploaded by

studylets138
Copyright
© © All Rights Reserved
We take content rights seriously. If you suspect this is your content, claim it here.
Available Formats
Download as PDF, TXT or read online on Scribd
You are on page 1/ 3

8/8/24, 11:05 PM Assignment 2 ANN - Colab

#implementation of sigmoid function


import matplotlib.pyplot as plt
import numpy as np
def sigmoid(x):
s=1/(1+np.exp(-x))
ds=s*(1-s)
return s,ds
x=np.arange(-10,10,0.01)
sigmoid(x)
fig, ax = plt.subplots(figsize=(9, 5))
ax.spines['left'].set_position('center')
ax.spines['right'].set_color('none')
ax.spines['top'].set_color('none')
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
ax.plot(x,sigmoid(x)[0], color="#307EC7", linewidth=3, label="sigmoid")
ax.plot(x,sigmoid(x)[1], color="#9621E2", linewidth=3, label="derivative")
ax.legend(loc="upper right", frameon=False)
plt.title('Sigmoid Activation Function')
fig.show()

#implementation of tanh function


import matplotlib.pyplot as plt
import numpy as np
def tanh(x):
t=(np.exp(x)-np.exp(-x))/(np.exp(x)+np.exp(-x))
dt=1-t**2
return t,dt
z=np.arange(-9, 9, 0.01)
tanh(z)[0].size,tanh(z)[1].size
fig, ax = plt.subplots(figsize=(9, 5))
ax.spines['left'].set_position('center')
ax.spines['bottom'].set_position('center')
ax.spines['right'].set_color('none')
ax.spines['top'].set_color('none')
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
ax.plot(z,tanh(z)[0], color="#307EC7", linewidth=3, label="tanh")
ax.plot(z,tanh(z)[1], color="#9621E2", linewidth=3, label="derivative")
ax.legend(loc="upper right", frameon=False)
plt.title('Tanh Activation Function')
fig.show()

https://colab.research.google.com/drive/1wK2XfTfftbyXIW9efWYxEJnlH2yH87HH#scrollTo=T37L74aFCt5t&printMode=true 1/3
8/8/24, 11:05 PM Assignment 2 ANN - Colab

#implemention of relu function


import numpy as np
import matplotlib.pyplot as plt

plt.figure(figsize=(8, 4))

def RectifiedLinearUnit(t):
lst = []
for i in t: # Use the parameter t instead of x
if i >= 0:
lst.append(i)
else:
lst.append(0)
return lst

arr = np.linspace(-8, 8, 100) # Added a step count to linspace


plt.plot(arr, RectifiedLinearUnit(arr))
plt.title('Rectified Linear Unit Activation Function')
plt.show()

#implementation of leaky relu function


import numpy as np
import matplotlib.pyplot as plt
import numpy as np
plt.figure(figsize=(8,4))
def LeakyRectifiedLinearUnit(arr):
lst=[]
for i in arr:
if i>=0:
lst.append(i)
else:
lst.append(0.01*i)
return lst
arr = np.linspace(-6, 6)
plt.plot(arr, LeakyRectifiedLinearUnit(arr))
plt.title('Leaky ReLU Activation Function')
plt.show()

https://colab.research.google.com/drive/1wK2XfTfftbyXIW9efWYxEJnlH2yH87HH#scrollTo=T37L74aFCt5t&printMode=true 2/3
8/8/24, 11:05 PM Assignment 2 ANN - Colab

#implementation of softmax function


import numpy as np
import matplotlib.pyplot as plt
import numpy as np
plt.figure(figsize=(8,4))
def softmax(t):
return np.exp(t) / np.sum(np.exp(t))
t = np.linspace(-5, 5)
plt.plot(t, softmax(t))
plt.title('Softmax Activation Function')
plt.show()

https://colab.research.google.com/drive/1wK2XfTfftbyXIW9efWYxEJnlH2yH87HH#scrollTo=T37L74aFCt5t&printMode=true 3/3

You might also like