forked from cameronfabbri/Wasserstein-GAN-Tensorflow
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy patharchitecture.py
executable file
·70 lines (61 loc) · 2.58 KB
/
architecture.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
import tensorflow as tf
import tensorflow.contrib.layers as layers
import sys
'''
Leaky RELU
https://arxiv.org/pdf/1502.01852.pdf
'''
def lrelu(x, leak=0.2, name='lrelu'):
return tf.maximum(leak*x, x)
def netG(z, batch_size):
print 'GENERATOR'
z = layers.fully_connected(z, 4*4*1024, normalizer_fn=layers.batch_norm, activation_fn=tf.nn.relu, scope='g_z')
z = tf.reshape(z, [batch_size, 4, 4, 1024])
conv1 = layers.convolution2d_transpose(z, 512, 5, stride=2, normalizer_fn=layers.batch_norm, activation_fn=tf.nn.relu, scope='g_conv1')
conv2 = layers.convolution2d_transpose(conv1, 256, 5, stride=2, normalizer_fn=layers.batch_norm, activation_fn=tf.nn.relu, scope='g_conv2')
conv3 = layers.convolution2d_transpose(conv2, 128, 5, stride=2, normalizer_fn=layers.batch_norm, activation_fn=tf.nn.relu, scope='g_conv3')
conv4 = layers.convolution2d_transpose(conv3, 3, 5, stride=2, activation_fn=tf.nn.tanh, scope='g_conv4')
print 'z:',z
print 'conv1:',conv1
print 'conv2:',conv2
print 'conv3:',conv3
print 'conv4:',conv4
print
print 'END G'
print
tf.add_to_collection('vars', z)
tf.add_to_collection('vars', conv1)
tf.add_to_collection('vars', conv2)
tf.add_to_collection('vars', conv3)
tf.add_to_collection('vars', conv4)
return conv4
'''
Discriminator network
'''
def netD(input_images, batch_size, reuse=False):
print 'DISCRIMINATOR reuse = '+str(reuse)
sc = tf.get_variable_scope()
with tf.variable_scope(sc, reuse=reuse):
conv1 = layers.conv2d(input_images, 64, 5, stride=2, activation_fn=None, scope='d_conv1')
conv1 = lrelu(conv1)
conv2 = layers.conv2d(conv1, 128, 5, stride=2, normalizer_fn=layers.batch_norm, activation_fn=None, scope='d_conv2')
conv2 = lrelu(conv2)
conv3 = layers.conv2d(conv2, 256, 5, stride=2, normalizer_fn=layers.batch_norm, activation_fn=None, scope='d_conv3')
conv3 = lrelu(conv3)
conv4 = layers.conv2d(conv3, 512, 5, stride=2, normalizer_fn=layers.batch_norm, activation_fn=None, scope='d_conv4')
conv4 = lrelu(conv4)
conv5 = layers.conv2d(conv4, 1, 4, stride=1, activation_fn=None, scope='d_conv5')
conv5 = lrelu(conv5)
print 'input images:',input_images
print 'conv1:',conv1
print 'conv2:',conv2
print 'conv3:',conv3
print 'conv4:',conv4
print 'conv5:',conv5
print 'END D\n'
tf.add_to_collection('vars', conv1)
tf.add_to_collection('vars', conv2)
tf.add_to_collection('vars', conv3)
tf.add_to_collection('vars', conv4)
tf.add_to_collection('vars', conv5)
return conv5