Skip to content

Commit fe043db

Browse files
committed
2 parents fcfbafa + b7aff85 commit fe043db

File tree

6 files changed

+74
-10
lines changed

6 files changed

+74
-10
lines changed

Fall20/NeuralNetworks1/NN.py

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,17 @@ def linear_activation(z):
77
def tanh_activation(z):
88
return np.tanh(z)
99

10+
11+
def averageOf3(input1, input2, input3):
12+
w1 = 1.0 / 3.0
13+
w2 = 1.0 / 3.0
14+
w3 = 1.0 / 3.0
15+
bias = 0
16+
z = input1 * w1 + input2 * w2 + input3 * w3 + bias
17+
y = linear_activation(z)
18+
return y
19+
20+
1021
# 2 layer NN for implementation of OR gate
1122
def orgate(input1, input2):
1223
bias = -1
@@ -31,3 +42,5 @@ def boolToBinary(bool1,bool2):
3142

3243
input1, input2 = boolToBinary(True,True)
3344
print(orgate(input1,input2))
45+
46+
print(averageOf3(1, 0, 3))

Fall20/NeuralNetworks1/README.md

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,15 @@
11
# 1. Introduction to AI and Neural Networks
22

3-
Welcome to the repo for ACM AI's first workshop on AI and Neural Networks, hosted in fall 2020
3+
Welcome to the repo for ACM AI's first workshop on AI and Neural Networks, hosted in fall 2020. Slides are here:
4+
https://docs.google.com/presentation/d/1a9TwuZkqAwEvo1eCFcL-Hub1OiJNxzidvtBaL3OwLWQ/edit?usp=sharing
45

56
Here's a quick run down of what was in the workshop!
67

78
1. What is AI?
89
2. Stats and history of AI
910
3. What is AI all about? Is it just Neural Networks? (answer is no)
1011
4. Applications
11-
5. What is a Neural Network?
12+
5. What is a Neural Network?
1213
6. What is a Neuron? Biologically and Computationally?
1314
7. Weights and Biases of a Network
1415
8. Activation Functions
@@ -17,6 +18,7 @@ Here's a quick run down of what was in the workshop!
1718
11. Resources
1819

1920
In this folder there is also code for various work we demoed in the workshop. We use the `numpy` package to do our demos. To download it, do
21+
2022
```
2123
pip install numpy
22-
```
24+
```

Fall20/NeuralNetworks1/vectorized.py

Lines changed: 16 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -59,24 +59,34 @@ def random_nn(x):
5959

6060
return a_3
6161

62-
print("On 3 layer network, input {} fed forward gives {}".format(x, random_nn(x)))
62+
# print("On 3 layer network, input {} fed forward gives {}".format(x, random_nn(x)))
6363

6464
# 4 layer NN for computing whether absolute difference is between 1 and 3
6565
# if between 1 and 3 outputs >0 else output <=0
6666
def multilayer(x):
67+
68+
# layer 2
6769
w1 = np.array([1,-1])
6870
b1 = 0
6971
weighted_input1 = np.matmul(w1,x) + b1
70-
output1 = parametric_activation(-1,weighted_input1)
72+
73+
# output of layer 2
74+
output2 = parametric_activation(-1, weighted_input1)
75+
76+
# layer 3
7177
w2 = np.array([1])
7278
b2 = -2
73-
weighted_input2 = np.matmul(w2,[output1]) + b2
74-
output2 = parametric_activation(-1,weighted_input2)
79+
weighted_input2 = np.matmul(w2, [output2]) + b2
80+
81+
# output of layer 3
82+
output3 = parametric_activation(-1, weighted_input2)
83+
84+
# final layer!
7585
w3 = np.array([-1])
7686
b3 = 1
77-
weighted_input3 = np.matmul(w3,[output2]) + b3
87+
weighted_input3 = np.matmul(w3, [output3]) + b3
7888
y = tanh_activation(weighted_input3)
7989
return y
8090

8191
x = np.array([4,5.5])
82-
print(multilayer(x))
92+
# print(multilayer(x))
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
import numpy as np
2+
3+
# fill in this neural network and activation function to calculate the of average of 5 values using numpy and vectorization
4+
def activation(z):
5+
raise NotImplementedError
6+
7+
def average5NN(x):
8+
# this should be a simple 2 layer network, one input layer and one output layer, figure out how many neurons are in each layer, the weights, and biases!
9+
10+
# This should take input column vector x and apply an appropriate weight matrix and add a bias, apply the activation and then return the result
11+
12+
weights = np.array([]) # fill me in!
13+
14+
bias = 100 # fix me!
15+
16+
weighted_input = 0 # fill me in!
17+
18+
raise NotImplementedError
19+
20+
21+
# Your network works if you don't get any errors!
22+
assert average5NN(np.array([1, 1, 1, 1, 1])) == 1
23+
assert average5NN(np.array([1, -1, 1, -1, 0])) == 0
24+
assert average5NN(np.array([100, 200, 300, 400, -400])) == 1.2
Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,15 @@
1+
# fill in this neural network and activation function to calculate the square of the sum of 2 inputs
2+
def activation(z):
3+
raise NotImplementedError
4+
5+
def squareNN(x1, x2):
6+
# this should be a simple 2 layer network, one input layer and one output layer, figure out how many neurons are in each layer, the weights, and biases!
7+
# This should take x1 and x2 and return the square of the sum of the 2 inputs, but obviously in neural network style!
8+
raise NotImplementedError
9+
10+
11+
# Your network works if you don't get any errors!
12+
assert squareNN(1, 1) == 4
13+
assert squareNN(-1, 1) == 0
14+
assert squareNN(10, 2.5) == 156.25
15+
assert squareNN(0, 0) == 0

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,4 +2,4 @@
22

33
This is the repository with all workshop content code etc. for ACM AI, with content organized by quarter offered
44

5-
Check out https://ai.acmucsd.com for more details about upcoming workshops and the exciting stuff going on at ACM AI!
5+
Check out https://ai.acmucsd.com for more details about upcoming workshops and the exciting stuff going on at ACM AI!

0 commit comments

Comments
 (0)