-
Notifications
You must be signed in to change notification settings - Fork 12
/
Copy pathcontrolCNN.py
82 lines (69 loc) · 2.94 KB
/
controlCNN.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
import graphAttack as ga
import numpy as np
import scipy.optimize
"""Control script"""
def run():
"""Run the model"""
trainData = np.random.random((5, 1, 10, 10))
trainLabels = np.random.random((5, 10))
# ------ conv2D operation testing
mainGraph = ga.Graph()
feed = mainGraph.addOperation(ga.Variable(trainData), doGradient=False, feederOperation=True)
cnn1 = ga.addConv2dLayer(mainGraph,
inputOperation=feed,
nFilters=3,
filterHeigth=5,
filterWidth=5,
padding="SAME",
convStride=1,
activation=ga.ReLUActivation,
batchNormalisation=True,
pooling=ga.MaxPoolOperation,
poolHeight=2,
poolWidth=2,
poolStride=2)
flattenOp = mainGraph.addOperation(ga.FlattenFeaturesOperation(cnn1))
flattenDrop = mainGraph.addOperation(ga.DropoutOperation(
flattenOp, 0.0), doGradient=False, finalOperation=False)
l1 = ga.addDenseLayer(mainGraph, 20,
inputOperation=flattenDrop,
activation=ga.ReLUActivation,
dropoutRate=0.0,
batchNormalisation=False)
l2 = ga.addDenseLayer(mainGraph, 10,
inputOperation=l1,
activation=ga.SoftmaxActivation,
dropoutRate=0.0,
batchNormalisation=False)
fcost = mainGraph.addOperation(
ga.CrossEntropyCostSoftmax(l2, trainLabels),
doGradient=False,
finalOperation=True)
def fprime(p, data, labels):
mainGraph.feederOperation.assignData(data)
mainGraph.resetAll()
mainGraph.finalOperation.assignLabels(labels)
mainGraph.attachParameters(p)
c = mainGraph.feedForward()
mainGraph.feedBackward()
g = mainGraph.unrollGradients()
return c, g
def f(p):
data = trainData
labels = trainLabels
mainGraph.feederOperation.assignData(data)
mainGraph.resetAll()
mainGraph.finalOperation.assignLabels(labels)
mainGraph.attachParameters(p)
c = mainGraph.feedForward()
return c
params = mainGraph.unrollGradientParameters()
numGrad = scipy.optimize.approx_fprime(params, f, 1e-8)
analCostGraph, analGradientGraph = fprime(params, trainData, trainLabels)
return numGrad, analGradientGraph, analCostGraph, mainGraph
if (__name__ == "__main__"):
nGrad, aGrad, aCost, mainGraph = run()
params = mainGraph.unrollGradientParameters()
print(mainGraph)
print("\n%-16.16s %-16.16s" % ("Grad difference", "Total Gradient"))
print("%-16.8e %-16.8e" % (np.sum(np.abs(aGrad - nGrad)), np.sum(np.abs(aGrad))))