-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathRnnParams.m
75 lines (59 loc) · 2.46 KB
/
RnnParams.m
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
classdef RnnParams
properties
hidden_size;
vocab_size;
seq_length;
Wxh;
Whh;
Why;
bh;
by;
mWxh;
mWhh;
mWhy;
mbh;
mby;
loss;
smooth_loss;
end
methods
function obj = RnnParams(hidden_size, vocab_size, seq_length)
obj.loss = -log(1/vocab_size) * seq_length;
obj.smooth_loss = obj.loss;
obj.hidden_size = hidden_size;
obj.vocab_size = vocab_size;
obj.seq_length = seq_length;
%%% Initialize RNN model parameters - weights for bits
obj.Wxh = randn(hidden_size, vocab_size )*0.01; % weights: input to hidden
obj.Whh = randn(hidden_size, hidden_size)*0.01; % weights: hidden to hidden
obj.Why = randn(vocab_size , hidden_size)*0.01; % weights: hidden to output
obj.bh = zeros(hidden_size, 1); % bias: hidden
obj.by = zeros(vocab_size , 1); % bias: output
%%% Memory variables for adagrad - weights for bits
obj.mWxh = zeros(size(obj.Wxh));
obj.mWhh = zeros(size(obj.Whh));
obj.mWhy = zeros(size(obj.Why));
obj.mbh = zeros(size(obj.bh));
obj.mby = zeros(size(obj.by));
end
function obj = ParamUpdate(obj, grads, lr)
%%% Use gradients to update parameters with Adagrad
[obj.mWxh, obj.Wxh] = obj.adagrad_update(lr, obj.Wxh, grads.dWxh, obj.mWxh);
[obj.mWhh, obj.Whh] = obj.adagrad_update(lr, obj.Whh, grads.dWhh, obj.mWhh);
[obj.mWhy, obj.Why] = obj.adagrad_update(lr, obj.Why, grads.dWhy, obj.mWhy);
[obj.mbh , obj.bh ] = obj.adagrad_update(lr, obj.bh , grads.dbh , obj.mbh);
[obj.mby , obj.by ] = obj.adagrad_update(lr, obj.by , grads.dby , obj.mby);
end
function obj = LossUpdate(obj, newLoss)
b = 0.999; a = [1 -1+b];
obj.smooth_loss = obj.smooth_loss * 0.999 + obj.loss * 0.001;
end
end
methods(Static)
%% Support functions
function [mem, param] = adagrad_update(lr, param, dparam, mem)
mem = mem + dparam .* dparam;
param = param - lr * dparam ./ sqrt(mem + 1e-8);
end
end
end