-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathutils.py
134 lines (90 loc) · 3.44 KB
/
utils.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
import pdb
from PIL import Image
from matplotlib import pyplot as plt
import numpy as np
import tensorflow as tf
import sys
import os
tf.set_random_seed(1)
np.random.seed(1)
cufs_devkit_train_name = '../cufs/devkit/train.txt'
cufs_devkit_test_name = '../cufs/devkit/test.txt'
cufs_img_dir = '../cufs/imgs/'
celeba_img_dir = '../celeba/imgs/'
im_height =64
im_width = 64
celeba_im_height=64
celeba_im_width = 64
celeba_im_channels = 1
def fileparser():
true_img_names =[]
with open(cufs_devkit_train_name) as file:
for line in file:
l = line.split(" ")
true_img_names.append(l[0])
with open(cufs_devkit_test_name) as file:
for line in file:
l = line.split(" ")
true_img_names.append(l[0])
return true_img_names
def image_reader(true_img_names):
true_imgs = np.zeros([len(true_img_names), im_height, im_width, 1])
i = 0
for file in true_img_names:
true_imgs[i,:,:,0] = np.asarray(Image.open(cufs_img_dir + file), dtype=np.float32)
# Transform the image to [-1,1] range
true_imgs[i,:,:,0] = 2 * true_imgs[i,:,:,0]/255.0 - 1
i+=1
np.random.shuffle(true_imgs)
return true_imgs
def get_celeba_dataset(batch_size=100, option=0):
files = os.listdir(celeba_img_dir)
files = sorted(files)
print("Total Number of images: ", len(files))
true_imgs =np.zeros(shape=[len(files), celeba_im_height, celeba_im_width, celeba_im_channels], dtype=np.float32)
i=0
#print(files)
for file in files:
im=Image.open(celeba_img_dir +'/'+ file)
# true_imgs[i,:,:,0]=np.array(im, dtype=np.float32)
img=np.array(im, dtype=np.float32)
true_imgs[i,:,:,0] = 2 * img/255.0 - 1
i+=1
np.random.shuffle(true_imgs)
return true_imgs
def get_cufs_dataset(batch_size = 100, option = 0):
true_img_names = fileparser()
true_imgs = image_reader(true_img_names)
if(option == 0):
return true_imgs
true_imgs_tensor = tf.constant(true_imgs, dtype = tf.float32)
# Step 2: Create datasets and iterator
data = tf.data.Dataset.from_tensor_slices((true_imgs_tensor))
#data = data.shuffle(dataset_size)
true_imgs_batch = data.batch(batch_size)
return true_imgs_batch
def conv_bn_leaky_relu(scope_name, input, filter, k_size, stride=(1,1), padd='SAME'):
with tf.variable_scope(scope_name, reuse=tf.AUTO_REUSE) as scope:
conv = tf.layers.conv2d(inputs=input,
filters=filter,
kernel_size=k_size,
strides=stride,
padding=padd)
batch_norm=tf.layers.batch_normalization(inputs=conv, training=True)
a =tf.nn.leaky_relu(batch_norm, name=scope.name)
return a
def transpose_conv_bn_relu(scope_name, input, filter, k_size, stride=(1,1), padd='VALID'):
with tf.variable_scope(scope_name, reuse=tf.AUTO_REUSE) as scope:
tr_conv = tf.layers.conv2d_transpose(input, filter, k_size, stride, padd, activation=None)
batch_norm=tf.layers.batch_normalization(inputs=tr_conv, training=True)
a =tf.nn.relu(batch_norm, name=scope.name)
return a
def safe_mkdir(path):
""" Create a directory if there isn't one already. """
try:
os.mkdir(path)
except OSError:
pass
#tf.reset_default_graph()
#imgs = get_cufs_dataset(100)
#print(imgs)