Important components:
1) Graph : Holds information regarding order of computations
2) Operations: Nodes in the graph
Gradient computations are described in later sections
import numpy as np
class Graph():
def __init__(self):
self.operations = []
self.placeholders = []
self.variables = []
def set_as_default(self):
global _default_graph
_default_graph = self
class Operation():
def __init__(self,input_nodes=[]):
self.input_nodes = input_nodes
self.output_nodes = []
for node in input_nodes:
#add the current node to the output node of the input nodes passed
node.output_nodes.append(self)
_default_graph.operations.append(self)
#This is called when executing the graph with Session class
def compute(self):
#should be implemented by child class
pass
class add(Operation):
def __init__(self,x,y):
self.name = "add"
super().__init__([x,y])
def compute(self,x_var,y_var):
self.inputs = [x_var,y_var]
return x_var + y_var
class multiply(Operation):
def __init__(self,x,y):
self.name = "multiply"
super().__init__([x,y])
def compute(self,x_var,y_var):
self.inputs = [x_var,y_var]
return x_var * y_var
class matmul(Operation):
def __init__(self,x,y):
self.name = "matmul"
super().__init__([x,y])
def compute(self,x_var,y_var):
self.inputs = [x_var,y_var]
#assuming they are numpy array
return x_var.dot(y_var)
NOTE: In Tensorflow, variables are those nodes in a graph for which gradient have to be computed. But we ll be going into gradient computations in later section
class Variable():
def __init__(self,name,initial_value=None):
self.name = name
self.value = initial_value
self.output_nodes = []
_default_graph.variables.append(self)
These are data inputs
class Placeholder():
def __init__(self,name):
self.name = name
self.output_nodes = []
_default_graph.variables.append(self)
Building the Graph
g = Graph()
g.set_as_default()
A = Variable("A",10)
B = Variable("B",1)
x = Placeholder("x")
y = multiply(A,x)
z = add(y,B)
[node.name for node in A.output_nodes]
[node.name for node in x.output_nodes]
[node.name for node in y.input_nodes]
[node.name for node in y.output_nodes]
[node.name for node in B.output_nodes]
[node.name for node in z.input_nodes]
[node.name for node in z.output_nodes]
def traverse_postorder(operation):
nodes_postorder = []
def recurse(node):
if isinstance(node,Operation):
for input_node in node.input_nodes:
recurse(input_node)
nodes_postorder.append({node.name : node})
recurse(operation)
return nodes_postorder
tree = traverse_postorder(z)
tree
list(tree[0].values())[0]
class Session():
def run(self,operation,feed_dict={}):
#Get the order of execution
nodes_postorder = traverse_postorder(operation)
for node_d in nodes_postorder:
node = list(node_d.values())[0]
print(node.name)
if isinstance(node,Placeholder):
#Placeholder node
node.value = feed_dict[node]
elif isinstance(node, Operation):
#Operation node
node.inputs = [input_node.value for input_node in node.input_nodes]
#compute
node.value = node.compute(*node.inputs)
#* implies number of params is determined by size of list
print(node.name, " Inputs :", node.inputs, "Out: ", node.value)
if type(node.value) == list:
#for matmul to work
node.value = np.array(node.value)
return operation.value
sess = Session()
result = sess.run(z,feed_dict={x:3})
g = Graph()
g.set_as_default()
A = Variable("A",np.array([[10,20],[30,40]]))
b = Variable("b",np.array([1,2]))
x = Placeholder("x")
z = add(matmul(A,x),b)
sess = Session()
sess.run(z,feed_dict={x:10})
import matplotlib.pyplot as plt
%matplotlib inline
def sigmoid(z):
return 1/(1+np.exp(-z))
sample_z = np.linspace(-10,10,100)
sample_a = sigmoid(sample_z)
plt.plot(sample_z,sample_a)
Activation is a type of Operation
class Sigmoid(Operation):
def __init__(self,z):
self.name = "sigmoid"
super().__init__([z])
def compute(self,z_val):
return sigmoid(z_val)
from sklearn.datasets import make_blobs
data = make_blobs(n_samples=50,n_features=2,centers=2,random_state=75) #centers=[number of classes]
data
features = data[0]
labels = data[1]
#plt.plot(features[:,0],features[:,1],'r*') #also works the same except we dont have option to color acc to labels
plt.scatter(features[:,0],features[:,1],c=labels)
x = np.linspace(0,10,10)
y = -x+5
plt.scatter(features[:,0],features[:,1],c=labels)
plt.plot(x,y)
Equation of the classifier
y = -x + 5
Substitue the features for x and y and check on which side the result lies
f1 = -f0 + 5
f1 + f0 - 5 = 0
(1,1)*F - 5 = 0
(1,1) is the Weight and 5 is a bias
Eg, consider point (8,10)
res = np.array([1,1]).dot(np.array([[8],[10]])) - 5
print(res)
print(sigmoid(res))
13 > 0, which imples, it belongs to the yellow class
We build a graph for this classifier
g = Graph()
g.set_as_default()
F = Placeholder("F")
W = Variable("W",np.array([1,1]))
b = Variable("b",-5)
z = add(matmul(W,F),b)
a = Sigmoid(z)
sess = Session()
This point belongs to yellow class
sess.run(a,feed_dict={F:np.array([8,10])})
traverse_postorder(a)
This point belongs to violet class
sess.run(a,feed_dict={F:np.array([0,-5])})