Showing
2 changed files
with
65 additions
and
0 deletions
base_model.py
0 → 100644
1 | +import numpy as np | ||
2 | +import tensorflow as tf | ||
3 | + | ||
4 | +FLAGS = tf.app.flags.FLAGS | ||
5 | + | ||
6 | +class BaseModel(): | ||
7 | + | ||
8 | + def __init__(self,X,n_input,n_classes,n_hidden_1,n_hidden_2,n_hidden_3,n_hidden_4,n_hidden_5,n_hidden_6,is_training,keep_prob): | ||
9 | + self.X = X | ||
10 | + self.n_input = n_input | ||
11 | + self.is_training = is_training | ||
12 | + | ||
13 | + #Hyperparameters | ||
14 | + self.keep_prob = keep_prob # Dropout | ||
15 | + | ||
16 | + self.n_classes=n_classes | ||
17 | + self.n_hidden_1=n_hidden_1 | ||
18 | + self.n_hidden_2=n_hidden_2 | ||
19 | + self.n_hidden_3=n_hidden_3 | ||
20 | + self.n_hidden_4=n_hidden_4 | ||
21 | + self.n_hidden_5=n_hidden_5 | ||
22 | + self.n_hidden_6=n_hidden_6 | ||
23 | + | ||
24 | + def store_layer_weights_and_bias(self): | ||
25 | + self.weights = { | ||
26 | + 'h1_w': tf.get_variable('W1', shape=(self.n_input, self.n_hidden_1), initializer=tf.contrib.layers.variance_scaling_initializer()), | ||
27 | + 'h2_w': tf.get_variable('W2', shape=(self.n_hidden_1, self.n_hidden_2), initializer=tf.contrib.layers.variance_scaling_initializer()), | ||
28 | + 'h3_w': tf.get_variable('W3', shape=(self.n_hidden_2, self.n_hidden_3), initializer=tf.contrib.layers.variance_scaling_initializer()), | ||
29 | + 'h4_w': tf.get_variable('W4', shape=(self.n_hidden_3, self.n_hidden_4), initializer=tf.contrib.layers.variance_scaling_initializer()), | ||
30 | + 'h5_w': tf.get_variable('W5', shape=(self.n_hidden_4, self.n_hidden_5), initializer=tf.contrib.layers.variance_scaling_initializer()), | ||
31 | + 'h6_w': tf.get_variable('W6', shape=(self.n_hidden_5, self.n_hidden_6), initializer=tf.contrib.layers.variance_scaling_initializer()), | ||
32 | + 'out_w': tf.get_variable('W_out',shape=(self.n_hidden_6, self.n_classes), initializer=tf.contrib.layers.variance_scaling_initializer()) | ||
33 | + } | ||
34 | + self.biases = { | ||
35 | + 'h1_b': tf.get_variable('B1',shape=(self.n_hidden_1),initializer=tf.contrib.layers.variance_scaling_initializer()), | ||
36 | + 'h2_b': tf.get_variable('B2',shape=(self.n_hidden_2),initializer=tf.contrib.layers.variance_scaling_initializer()), | ||
37 | + 'h3_b': tf.get_variable('B3',shape=(self.n_hidden_3),initializer=tf.contrib.layers.variance_scaling_initializer()), | ||
38 | + 'h4_b': tf.get_variable('B4',shape=(self.n_hidden_4),initializer=tf.contrib.layers.variance_scaling_initializer()), | ||
39 | + 'h5_b': tf.get_variable('B5',shape=(self.n_hidden_5),initializer=tf.contrib.layers.variance_scaling_initializer()), | ||
40 | + 'h6_b': tf.get_variable('B6',shape=(self.n_hidden_6),initializer=tf.contrib.layers.variance_scaling_initializer()), | ||
41 | + 'out_b': tf.get_variable('B_out',shape=(self.n_classes),initializer=tf.contrib.layers.variance_scaling_initializer()) | ||
42 | + } | ||
43 | + | ||
44 | + def fc(self,input,weights,biases,name,dim): | ||
45 | + h = tf.add(tf.matmul(input, weights), biases) | ||
46 | + if FLAGS.bn: | ||
47 | + h = tf.layers.batch_normalization(h,training=self.is_training,name='bn_'+name) | ||
48 | + h = tf.nn.relu(h, name=name) | ||
49 | + h = tf.nn.dropout(h, self.keep_prob) | ||
50 | + return h | ||
51 | + | ||
52 | + def net(self): | ||
53 | + self.h1 = self.fc(self.X,self.weights['h1_w'],self.biases['h1_b'],name='layer1',dim=self.n_hidden_1) | ||
54 | + self.h2 = self.fc(self.h1,self.weights['h2_w'],self.biases['h2_b'],name='layer2',dim=self.n_hidden_2) | ||
55 | + self.h3 = self.fc(self.h2, self.weights['h3_w'], self.biases['h3_b'],name='layer3',dim=self.n_hidden_3) | ||
56 | + self.h4 = self.fc(self.h3,self.weights['h4_w'],self.biases['h4_b'],name='layer4',dim=self.n_hidden_4) | ||
57 | + self.h5 = self.fc(self.h4,self.weights['h5_w'],self.biases['h5_b'],name='layer5',dim=self.n_hidden_5) | ||
58 | + self.h6 = self.fc(self.h5, self.weights['h6_w'], self.biases['h6_b'],name='layer6',dim=self.n_hidden_6) | ||
59 | + output_layer = tf.add(tf.matmul(self.h6, self.weights['out_w']), self.biases['out_b'],name='output') | ||
60 | + return output_layer | ||
61 | + | ||
62 | + def __call__(self): | ||
63 | + self.store_layer_weights_and_bias() | ||
64 | + return self.net() | ||
65 | + |
train.py
0 → 100644
This diff is collapsed. Click to expand it.
-
Please register or login to post a comment