From a419609b8f6750be97f8f54d6626f87af5cb1686 Mon Sep 17 00:00:00 2001 From: TITAN-PyCompat Date: Wed, 17 Apr 2019 17:18:51 +0800 Subject: [PATCH 1/6] API Update to adapt to tf v1.x --- .DS_Store | Bin 0 -> 6148 bytes Season1/.DS_Store | Bin 0 -> 6148 bytes Season1/10-11/dp.py | 19 ++++++++++++------ Season1/12-15/dp.py | 23 ++++++++++++++-------- Season1/12-15/dp_refined_api.py | 18 +++++++++++------ Season1/16-19/dp.py | 18 +++++++++++------ Season1/20/dp.py | 18 +++++++++++------ Season1_Tensorflow1.1_Python3.5/.DS_Store | Bin 0 -> 6148 bytes 8 files changed, 64 insertions(+), 32 deletions(-) create mode 100644 .DS_Store create mode 100644 Season1/.DS_Store create mode 100644 Season1_Tensorflow1.1_Python3.5/.DS_Store diff --git a/.DS_Store b/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..0c66cfd1e0a9d91ebf9fcf84e21130b45eece0fb GIT binary patch literal 6148 zcmeHK&2G~`5S~o~wTXm`#G#dXScwCdASL{?S1YufkU+Si2Y$j@wumKbx3Zn)5Jj?h z03M*d7Wy8*LvZ9JxHGf6L6pD|MA3{i`;F&s?a$V(7XZX)nhgOW00>YCJ0T9g5Q>wo zNXL5!h3#QL!!VuIhiSQz&5nPO0eW}qkU$O!YWU&(nG_>m)5Ef;o5A3-=&W@wU0x5u zE4{FHwHLlfCT5m2s+ra!RgI&qq{#AARR=}ZO!jlVSLR2#epEeFOG?)}+^kG2W~ezI1sd0fILG=8Gw zD5-R5^lOYPtCrb}%m6dM3|v44{8AS5F5tcMEi(hmz@KG+)(4477<+6T+NA>r`hBE$ zmyiT)I!h2rkFm$rAx2PyD@Amr!XGh&D@VWdd9laVp(_VrXU0B$X5mjL!p@F(OxT2U!jsvUhD9C k2^w}Q#$4KpH&CshUy^|sdu$z|2ZetGEDby`1Amo)Z_rL+UjP6A literal 0 HcmV?d00001 diff --git a/Season1/.DS_Store b/Season1/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..4b4c8e490fe774a13fc96c3852dbbb480feb7bb5 GIT binary patch literal 6148 zcmeHKPixdb6rX9gI?19&D71oy0TC}NWEf&_8@E$Vi7_ZC}Pb7 z^9R9o)EQY&Gc`aep3w}3{rw=xf|*!7p&8H&{C^DK-d!S1Vw0O-tNZtBu%E>J+_t~b zg~i&^x$}&f%)DSe^Sas4%l&-NZ~MI`IC|*k-9Y)8B-p6JNN?S@%5>KE(A zboCwUs`}=gILy1-QM#2x2T}T{iZ zZrpwJeDLb^o44=YfB5+6v&bS7qAttv#C`#X?0OmTSEa?o8+6pSk+iWD&FIu;II#jBt~V2z^#=qfBF!VHN15fC(JqZ#<84EzH1 Co}Zln literal 0 HcmV?d00001 diff --git a/Season1/10-11/dp.py b/Season1/10-11/dp.py index 03040f2..3a45abb 100644 --- a/Season1/10-11/dp.py +++ b/Season1/10-11/dp.py @@ -9,6 +9,13 @@ # 我们自己 import load +if(tf.__version__.startswith("1.")): + merge_all_summaries , scalar_summary , histogram_summary= tf.summary.merge_all , tf.summary.scalar , tf.summary.histogram + +else: + merge_all_summaries , scalar_summary , histogram_summary = tf.merge_all_summaries , tf.scalar_summary , tf.histogram_summary + + train_samples, train_labels = load._train_samples, load._train_labels test_samples, test_labels = load._test_samples, load._test_labels @@ -88,8 +95,8 @@ def define_graph(self): tf.truncated_normal([image_size * image_size, self.num_hidden], stddev=0.1), name='fc1_weights' ) fc1_biases = tf.Variable(tf.constant(0.1, shape=[self.num_hidden]), name='fc1_biases') - tf.histogram_summary('fc1_weights', fc1_weights) - tf.histogram_summary('fc1_biases', fc1_biases) + histogram_summary('fc1_weights', fc1_weights) + histogram_summary('fc1_biases', fc1_biases) # fully connected layer 2 --> output layer with tf.name_scope('fc2'): @@ -97,8 +104,8 @@ def define_graph(self): tf.truncated_normal([self.num_hidden, num_labels], stddev=0.1), name='fc2_weights' ) fc2_biases = tf.Variable(tf.constant(0.1, shape=[num_labels]), name='fc2_biases') - tf.histogram_summary('fc2_weights', fc2_weights) - tf.histogram_summary('fc2_biases', fc2_biases) + histogram_summary('fc2_weights', fc2_weights) + histogram_summary('fc2_biases', fc2_biases) # 想在来定义图谱的运算 @@ -121,7 +128,7 @@ def model(data): self.loss = tf.reduce_mean( tf.nn.softmax_cross_entropy_with_logits(logits, self.tf_train_labels) ) - tf.scalar_summary('Loss', self.loss) + scalar_summary('Loss', self.loss) # Optimizer. @@ -133,7 +140,7 @@ def model(data): self.train_prediction = tf.nn.softmax(logits, name='train_prediction') self.test_prediction = tf.nn.softmax(model(self.tf_test_samples), name='test_prediction') - self.merged = tf.merge_all_summaries() + self.merged = merge_all_summaries() def run(self): ''' diff --git a/Season1/12-15/dp.py b/Season1/12-15/dp.py index 617a8d3..28a37e6 100644 --- a/Season1/12-15/dp.py +++ b/Season1/12-15/dp.py @@ -9,6 +9,13 @@ # 我们自己 import load +if(tf.__version__.startswith("1.")): + image_summary , scalar_summary= tf.summary.image , tf.summary.scalar + merge_summary , histogram_summary = tf.summary.merge , tf.summary.histogram +else: + image_summary , scalar_summary = tf.image_summary , tf.scalar_summary + merge_summary , histogram_summary = tf.merge_summary , tf.histogram_summary + train_samples, train_labels = load._train_samples, load._train_labels test_samples, test_labels = load._test_samples, load._test_labels @@ -121,15 +128,15 @@ def define_graph(self): [(image_size // down_scale) * (image_size // down_scale) * self.last_conv_depth, self.num_hidden], stddev=0.1)) fc1_biases = tf.Variable(tf.constant(0.1, shape=[self.num_hidden])) - self.train_summaries.append(tf.histogram_summary('fc1_weights', fc1_weights)) - self.train_summaries.append(tf.histogram_summary('fc1_biases', fc1_biases)) + self.train_summaries.append(histogram_summary('fc1_weights', fc1_weights)) + self.train_summaries.append(histogram_summary('fc1_biases', fc1_biases)) # fully connected layer 2 --> output layer with tf.name_scope('fc2'): fc2_weights = tf.Variable(tf.truncated_normal([self.num_hidden, num_labels], stddev=0.1), name='fc2_weights') fc2_biases = tf.Variable(tf.constant(0.1, shape=[num_labels]), name='fc2_biases') - self.train_summaries.append(tf.histogram_summary('fc2_weights', fc2_weights)) - self.train_summaries.append(tf.histogram_summary('fc2_biases', fc2_biases)) + self.train_summaries.append(histogram_summary('fc2_weights', fc2_weights)) + self.train_summaries.append(histogram_summary('fc2_biases', fc2_biases)) # 想在来定义图谱的运算 def model(data, train=True): @@ -154,7 +161,7 @@ def model(data, train=True): filter_map = hidden[-1] filter_map = tf.transpose(filter_map, perm=[2, 0, 1]) filter_map = tf.reshape(filter_map, (self.conv1_depth, 32, 32, 1)) - self.test_summaries.append(tf.image_summary('conv1_relu', tensor=filter_map, max_images=self.conv1_depth)) + self.test_summaries.append(image_summary('conv1_relu', tensor=filter_map, max_images=self.conv1_depth)) with tf.name_scope('conv2_model'): with tf.name_scope('convolution'): @@ -206,7 +213,7 @@ def model(data, train=True): logits = model(self.tf_train_samples) with tf.name_scope('loss'): self.loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits, self.tf_train_labels)) - self.train_summaries.append(tf.scalar_summary('Loss', self.loss)) + self.train_summaries.append(scalar_summary('Loss', self.loss)) # Optimizer. with tf.name_scope('optimizer'): @@ -218,8 +225,8 @@ def model(data, train=True): with tf.name_scope('test'): self.test_prediction = tf.nn.softmax(model(self.tf_test_samples, train=False), name='test_prediction') - self.merged_train_summary = tf.merge_summary(self.train_summaries) - self.merged_test_summary = tf.merge_summary(self.test_summaries) + self.merged_train_summary = merge_summary(self.train_summaries) + self.merged_test_summary = merge_summary(self.test_summaries) def run(self): ''' diff --git a/Season1/12-15/dp_refined_api.py b/Season1/12-15/dp_refined_api.py index 0003d8d..dd825ed 100644 --- a/Season1/12-15/dp_refined_api.py +++ b/Season1/12-15/dp_refined_api.py @@ -3,6 +3,12 @@ from sklearn.metrics import confusion_matrix import numpy as np +if(tf.__version__.startswith("1.")): + image_summary , scalar_summary= tf.summary.image , tf.summary.scalar + merge_summary , histogram_summary = tf.summary.merge , tf.summary.histogram +else: + image_summary , scalar_summary = tf.image_summary , tf.scalar_summary + merge_summary , histogram_summary = tf.merge_summary , tf.histogram_summary class Network(): def __init__(self, train_batch_size, test_batch_size, pooling_scale): @@ -68,8 +74,8 @@ def add_fc(self, *, in_num_nodes, out_num_nodes, activation='relu', name): biases = tf.Variable(tf.constant(0.1, shape=[out_num_nodes])) self.fc_weights.append(weights) self.fc_biases.append(biases) - self.train_summaries.append(tf.histogram_summary(str(len(self.fc_weights))+'_weights', weights)) - self.train_summaries.append(tf.histogram_summary(str(len(self.fc_biases))+'_biases', biases)) + self.train_summaries.append(histogram_summary(str(len(self.fc_weights))+'_weights', weights)) + self.train_summaries.append(histogram_summary(str(len(self.fc_biases))+'_biases', biases)) # should make the definition as an exposed API, instead of implemented in the function def define_inputs(self, *, train_samples_shape, train_labels_shape, test_samples_shape): @@ -131,7 +137,7 @@ def model(data_flow, train=True): logits = model(self.tf_train_samples) with tf.name_scope('loss'): self.loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits, self.tf_train_labels)) - self.train_summaries.append(tf.scalar_summary('Loss', self.loss)) + self.train_summaries.append(scalar_summary('Loss', self.loss)) # Optimizer. with tf.name_scope('optimizer'): @@ -143,8 +149,8 @@ def model(data_flow, train=True): with tf.name_scope('test'): self.test_prediction = tf.nn.softmax(model(self.tf_test_samples, train=False), name='test_prediction') - self.merged_train_summary = tf.merge_summary(self.train_summaries) - self.merged_test_summary = tf.merge_summary(self.test_summaries) + self.merged_train_summary = merge_summary(self.train_summaries) + self.merged_test_summary = merge_summary(self.test_summaries) def run(self, data_iterator, train_samples, train_labels, test_samples, test_labels): ''' @@ -223,4 +229,4 @@ def visualize_filter_map(self, tensor, *, how_many, display_size, name): print(filter_map.get_shape()) filter_map = tf.reshape(filter_map, (how_many, display_size, display_size, 1)) print(how_many) - self.test_summaries.append(tf.image_summary(name, tensor=filter_map, max_images=how_many)) + self.test_summaries.append(image_summary(name, tensor=filter_map, max_images=how_many)) diff --git a/Season1/16-19/dp.py b/Season1/16-19/dp.py index 5744261..8b784fb 100644 --- a/Season1/16-19/dp.py +++ b/Season1/16-19/dp.py @@ -3,6 +3,12 @@ from sklearn.metrics import confusion_matrix import numpy as np +if(tf.__version__.startswith("1.")): + image_summary , scalar_summary= tf.summary.image , tf.summary.scalar + merge_summary , histogram_summary = tf.summary.merge , tf.summary.histogram +else: + image_summary , scalar_summary = tf.image_summary , tf.scalar_summary + merge_summary , histogram_summary = tf.merge_summary , tf.histogram_summary class Network(): def __init__(self, train_batch_size, test_batch_size, pooling_scale, @@ -71,8 +77,8 @@ def add_fc(self, *, in_num_nodes, out_num_nodes, activation='relu', name): biases = tf.Variable(tf.constant(0.1, shape=[out_num_nodes])) self.fc_weights.append(weights) self.fc_biases.append(biases) - self.train_summaries.append(tf.histogram_summary(str(len(self.fc_weights))+'_weights', weights)) - self.train_summaries.append(tf.histogram_summary(str(len(self.fc_biases))+'_biases', biases)) + self.train_summaries.append(histogram_summary(str(len(self.fc_weights))+'_weights', weights)) + self.train_summaries.append(histogram_summary(str(len(self.fc_biases))+'_biases', biases)) def apply_regularization(self, _lambda): # L2 regularization for the fully connected parameters @@ -149,7 +155,7 @@ def model(data_flow, train=True): with tf.name_scope('loss'): self.loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits, self.tf_train_labels)) self.loss += self.apply_regularization(_lambda=5e-4) - self.train_summaries.append(tf.scalar_summary('Loss', self.loss)) + self.train_summaries.append(scalar_summary('Loss', self.loss)) # learning rate decay global_step = tf.Variable(0) @@ -184,8 +190,8 @@ def model(data_flow, train=True): with tf.name_scope('test'): self.test_prediction = tf.nn.softmax(model(self.tf_test_samples, train=False), name='test_prediction') - self.merged_train_summary = tf.merge_summary(self.train_summaries) - self.merged_test_summary = tf.merge_summary(self.test_summaries) + self.merged_train_summary = merge_summary(self.train_summaries) + self.merged_test_summary = merge_summary(self.test_summaries) def run(self, data_iterator, train_samples, train_labels, test_samples, test_labels): ''' @@ -262,4 +268,4 @@ def visualize_filter_map(self, tensor, *, how_many, display_size, name): print(filter_map.get_shape()) filter_map = tf.reshape(filter_map, (how_many, display_size, display_size, 1)) print(how_many) - self.test_summaries.append(tf.image_summary(name, tensor=filter_map, max_images=how_many)) + self.test_summaries.append(image_summary(name, tensor=filter_map, max_images=how_many)) diff --git a/Season1/20/dp.py b/Season1/20/dp.py index 230be9d..593cdfe 100644 --- a/Season1/20/dp.py +++ b/Season1/20/dp.py @@ -3,6 +3,12 @@ from sklearn.metrics import confusion_matrix import numpy as np +if(tf.__version__.startswith("1.")): + image_summary , scalar_summary= tf.summary.image , tf.summary.scalar + merge_summary , histogram_summary = tf.summary.merge , tf.summary.histogram +else: + image_summary , scalar_summary = tf.image_summary , tf.scalar_summary + merge_summary , histogram_summary = tf.merge_summary , tf.histogram_summary class Network(): def __init__(self, train_batch_size, test_batch_size, pooling_scale, @@ -80,8 +86,8 @@ def add_fc(self, *, in_num_nodes, out_num_nodes, activation='relu', name): biases = tf.Variable(tf.constant(0.1, shape=[out_num_nodes])) self.fc_weights.append(weights) self.fc_biases.append(biases) - self.train_summaries.append(tf.histogram_summary(str(len(self.fc_weights))+'_weights', weights)) - self.train_summaries.append(tf.histogram_summary(str(len(self.fc_biases))+'_biases', biases)) + self.train_summaries.append(histogram_summary(str(len(self.fc_weights))+'_weights', weights)) + self.train_summaries.append(histogram_summary(str(len(self.fc_biases))+'_biases', biases)) def apply_regularization(self, _lambda): # L2 regularization for the fully connected parameters @@ -158,7 +164,7 @@ def model(data_flow, train=True): with tf.name_scope('loss'): self.loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits, self.tf_train_labels)) self.loss += self.apply_regularization(_lambda=5e-4) - self.train_summaries.append(tf.scalar_summary('Loss', self.loss)) + self.train_summaries.append(scalar_summary('Loss', self.loss)) # learning rate decay global_step = tf.Variable(0) @@ -198,8 +204,8 @@ def model(data_flow, train=True): self.single_prediction = tf.nn.softmax(model(single_input, train=False), name='single_prediction') tf.add_to_collection("prediction", self.single_prediction) - self.merged_train_summary = tf.merge_summary(self.train_summaries) - self.merged_test_summary = tf.merge_summary(self.test_summaries) + self.merged_train_summary = merge_summary(self.train_summaries) + self.merged_test_summary = merge_summary(self.test_summaries) # 放在定义Graph之后,保存这张计算图 self.saver = tf.train.Saver(tf.all_variables()) @@ -327,7 +333,7 @@ def visualize_filter_map(self, tensor, *, how_many, display_size, name): #print(filter_map.get_shape()) filter_map = tf.reshape(filter_map, (how_many, display_size, display_size, 1)) #print(how_many) - self.test_summaries.append(tf.image_summary(name, tensor=filter_map, max_images=how_many)) + self.test_summaries.append(image_summary(name, tensor=filter_map, max_images=how_many)) def print_confusion_matrix(self, confusionMatrix): print('Confusion Matrix:') diff --git a/Season1_Tensorflow1.1_Python3.5/.DS_Store b/Season1_Tensorflow1.1_Python3.5/.DS_Store new file mode 100644 index 0000000000000000000000000000000000000000..17b8aa5aa10f2de71df37f34822a7f588d4ca3dd GIT binary patch literal 6148 zcmeHKI|>3p3{CuiU}I@H7qC#U9~M1<7qAd)L_yGcE6?T8eEAg0VyA`j0?C`n7x@PC*9qVOAfpFZhyl&3N!|Ay1)V@m?cPMjN$PDqBZznV= zKn17(6`%rC;L{3ZhaHVSeKGH&0#xAV6|nC^fg9GuHqbvE2tEP;2S~eN?Xv{1SOQoR z+dyPs8dP9VHAf5$I`Sp!YGNB0bkQ6>G@q Date: Wed, 17 Apr 2019 18:06:21 +0800 Subject: [PATCH 2/6] API Update to adapt to tf v1.x --- Season1/.DS_Store | Bin 6148 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 Season1/.DS_Store diff --git a/Season1/.DS_Store b/Season1/.DS_Store deleted file mode 100644 index 4b4c8e490fe774a13fc96c3852dbbb480feb7bb5..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6148 zcmeHKPixdb6rX9gI?19&D71oy0TC}NWEf&_8@E$Vi7_ZC}Pb7 z^9R9o)EQY&Gc`aep3w}3{rw=xf|*!7p&8H&{C^DK-d!S1Vw0O-tNZtBu%E>J+_t~b zg~i&^x$}&f%)DSe^Sas4%l&-NZ~MI`IC|*k-9Y)8B-p6JNN?S@%5>KE(A zboCwUs`}=gILy1-QM#2x2T}T{iZ zZrpwJeDLb^o44=YfB5+6v&bS7qAttv#C`#X?0OmTSEa?o8+6pSk+iWD&FIu;II#jBt~V2z^#=qfBF!VHN15fC(JqZ#<84EzH1 Co}Zln From cbc21951842438762476b1b369cd3e4dfd0b6321 Mon Sep 17 00:00:00 2001 From: TITAN-PyCompat Date: Wed, 17 Apr 2019 18:08:05 +0800 Subject: [PATCH 3/6] API Update to adapt to tf v1.x --- .DS_Store | Bin 6148 -> 0 bytes .gitignore | 2 ++ 2 files changed, 2 insertions(+) delete mode 100644 .DS_Store diff --git a/.DS_Store b/.DS_Store deleted file mode 100644 index 0c66cfd1e0a9d91ebf9fcf84e21130b45eece0fb..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6148 zcmeHK&2G~`5S~o~wTXm`#G#dXScwCdASL{?S1YufkU+Si2Y$j@wumKbx3Zn)5Jj?h z03M*d7Wy8*LvZ9JxHGf6L6pD|MA3{i`;F&s?a$V(7XZX)nhgOW00>YCJ0T9g5Q>wo zNXL5!h3#QL!!VuIhiSQz&5nPO0eW}qkU$O!YWU&(nG_>m)5Ef;o5A3-=&W@wU0x5u zE4{FHwHLlfCT5m2s+ra!RgI&qq{#AARR=}ZO!jlVSLR2#epEeFOG?)}+^kG2W~ezI1sd0fILG=8Gw zD5-R5^lOYPtCrb}%m6dM3|v44{8AS5F5tcMEi(hmz@KG+)(4477<+6T+NA>r`hBE$ zmyiT)I!h2rkFm$rAx2PyD@Amr!XGh&D@VWdd9laVp(_VrXU0B$X5mjL!p@F(OxT2U!jsvUhD9C k2^w}Q#$4KpH&CshUy^|sdu$z|2ZetGEDby`1Amo)Z_rL+UjP6A diff --git a/.gitignore b/.gitignore index 897e050..0e9e84a 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,5 @@ +.gitignore +.DS_Store # IDE .idea* From 57f6dc41f1054d2575f935fad61ffd66391c8db9 Mon Sep 17 00:00:00 2001 From: TITAN-PyCompat Date: Wed, 17 Apr 2019 18:09:39 +0800 Subject: [PATCH 4/6] API Update to adapt to tf v1.x --- .gitignore | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index 0e9e84a..9012494 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,4 @@ -.gitignore -.DS_Store + # IDE .idea* From 98bc4c01093ad5039d530a66fd585a011d319d5e Mon Sep 17 00:00:00 2001 From: TITAN-PyCompat Date: Wed, 17 Apr 2019 18:10:26 +0800 Subject: [PATCH 5/6] API Update to adapt to tf v1.x --- .gitignore | 1 - 1 file changed, 1 deletion(-) diff --git a/.gitignore b/.gitignore index 9012494..897e050 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,3 @@ - # IDE .idea* From 48717cd11c9fcdb6028444ff7e31614758c28b62 Mon Sep 17 00:00:00 2001 From: TITAN-PyCompat Date: Wed, 17 Apr 2019 18:12:01 +0800 Subject: [PATCH 6/6] API Update to adapt to tf v1.x --- Season1_Tensorflow1.1_Python3.5/.DS_Store | Bin 6148 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 Season1_Tensorflow1.1_Python3.5/.DS_Store diff --git a/Season1_Tensorflow1.1_Python3.5/.DS_Store b/Season1_Tensorflow1.1_Python3.5/.DS_Store deleted file mode 100644 index 17b8aa5aa10f2de71df37f34822a7f588d4ca3dd..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 6148 zcmeHKI|>3p3{CuiU}I@H7qC#U9~M1<7qAd)L_yGcE6?T8eEAg0VyA`j0?C`n7x@PC*9qVOAfpFZhyl&3N!|Ay1)V@m?cPMjN$PDqBZznV= zKn17(6`%rC;L{3ZhaHVSeKGH&0#xAV6|nC^fg9GuHqbvE2tEP;2S~eN?Xv{1SOQoR z+dyPs8dP9VHAf5$I`Sp!YGNB0bkQ6>G@q