Skip to content

Commit

Permalink
agg
Browse files Browse the repository at this point in the history
  • Loading branch information
Le-Zheng committed May 28, 2019
1 parent 619bbf5 commit 35221cf
Show file tree
Hide file tree
Showing 4 changed files with 299 additions and 32 deletions.
85 changes: 56 additions & 29 deletions keras/4.4-overfitting-and-underfitting.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -16,14 +16,14 @@
"name": "stdout",
"output_type": "stream",
"text": [
"env: SPARK_DRIVER_MEMORY=32g\n",
"env: SPARK_DRIVER_MEMORY=12g\n",
"env: PYSPARK_PYTHON=/usr/bin/python3.5\n",
"env: PYSPARK_DRIVER_PYTHON=/usr/bin/python3.5\n"
]
}
],
"source": [
"%env SPARK_DRIVER_MEMORY=32g\n",
"%env SPARK_DRIVER_MEMORY=12g\n",
"%env PYSPARK_PYTHON=/usr/bin/python3.5\n",
"%env PYSPARK_DRIVER_PYTHON=/usr/bin/python3.5\n",
"\n",
Expand Down Expand Up @@ -86,7 +86,9 @@
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"from zoo.pipeline.api.keras.datasets import imdb\n",
Expand All @@ -107,6 +109,21 @@
"y_test = np.asarray(test_labels).astype('float32')"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"#Refer to memory saving approach at Chapter 3.5,zip the training data and label into RDD, which could save your SPARK_DRIVER_MEMORY to 12g.\n",
"from bigdl.util.common import to_sample_rdd\n",
"\n",
"train = to_sample_rdd(x_train, y_train)\n",
"val = to_sample_rdd(x_test, y_test)"
]
},
{
"cell_type": "markdown",
"metadata": {},
Expand Down Expand Up @@ -175,10 +192,10 @@
"import time\n",
"dir_name = '4-4 ' + str(time.ctime())\n",
"original_model.set_tensorboard('./', dir_name)\n",
"original_model.fit(x_train, y_train,\n",
"original_model.fit(train, None,\n",
" nb_epoch=20,\n",
" batch_size=512,\n",
" validation_data=(x_test, y_test))"
" validation_data=val)"
]
},
{
Expand Down Expand Up @@ -227,16 +244,18 @@
"\n",
"dir_name = '4-4 ' + str(time.ctime())\n",
"smaller_model.set_tensorboard('./', dir_name)\n",
"smaller_model.fit(x_train, y_train,\n",
" nb_epoch=20,\n",
" batch_size=512,\n",
" validation_data=(x_test, y_test))"
"smaller_model.fit(train, None,\n",
" nb_epoch=20,\n",
" batch_size=512,\n",
" validation_data=val)"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"import matplotlib.pyplot as plt\n",
Expand Down Expand Up @@ -325,15 +344,17 @@
{
"cell_type": "code",
"execution_count": 10,
"metadata": {},
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"dir_name = '4-4 ' + str(time.ctime())\n",
"bigger_model.set_tensorboard('./', dir_name)\n",
"bigger_model.fit(x_train, y_train,\n",
" nb_epoch=20,\n",
" batch_size=512,\n",
" validation_data=(x_test, y_test))"
"bigger_model.fit(train, None,\n",
" nb_epoch=20,\n",
" batch_size=512,\n",
" validation_data=val)"
]
},
{
Expand Down Expand Up @@ -470,15 +491,17 @@
{
"cell_type": "code",
"execution_count": 13,
"metadata": {},
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"dir_name = '4-4 ' + str(time.ctime())\n",
"l2_model.set_tensorboard('./', dir_name)\n",
"l2_model.fit(x_train, y_train,\n",
" nb_epoch=20,\n",
" batch_size=512,\n",
" validation_data=(x_test, y_test))"
"l2_model.fit(train, None,\n",
" nb_epoch=20,\n",
" batch_size=512,\n",
" validation_data=val)"
]
},
{
Expand Down Expand Up @@ -531,7 +554,9 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"from zoo.pipeline.api.keras import regularizers\n",
Expand All @@ -540,7 +565,7 @@
"regularizers.l1(0.001)\n",
"\n",
"# L1 and L2 regularization at the same time\n",
"regularizers.l1_l2(l1=0.001, l2=0.001)"
"regularizers.l1l2(l1=0.001, l2=0.001)"
]
},
{
Expand Down Expand Up @@ -581,10 +606,12 @@
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"model.add(layers.Dropout(0.5))"
"l2_model.add(layers.Dropout(0.5))"
]
},
{
Expand Down Expand Up @@ -629,10 +656,10 @@
"\n",
"dir_name = '4-4 ' + str(time.ctime())\n",
"dpt_model.set_tensorboard('./', dir_name)\n",
"dpt_model.fit(x_train, y_train,\n",
" nb_epoch=20,\n",
" batch_size=512,\n",
" validation_data=(x_test, y_test))"
"dpt_model.fit(train, None,\n",
" nb_epoch=20,\n",
" batch_size=512,\n",
" validation_data=val)"
]
},
{
Expand Down Expand Up @@ -703,7 +730,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.2"
"version": "3.5.4"
}
},
"nbformat": 4,
Expand Down
10 changes: 7 additions & 3 deletions keras/5.1-introduction-to-convnets.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,9 @@
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"model.summary()"
Expand Down Expand Up @@ -241,7 +243,9 @@
{
"cell_type": "code",
"execution_count": 7,
"metadata": {},
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"test_loss, test_acc = model.evaluate(test_images, test_labels)"
Expand Down Expand Up @@ -292,7 +296,7 @@
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.5.2"
"version": "3.5.4"
}
},
"nbformat": 4,
Expand Down
21 changes: 21 additions & 0 deletions keras/ipynb2py.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
#!/bin/bash

## Usage ################################
# ./ipynb2py <file-name without extension>
# Example:
# ipynb2py notebooks/neural_networks/rnn
#########################################
if [ $# -ne "1" ]; then
echo "Usage: ./nb2script <file-name without extension>"
else
cp $1.ipynb $1.tmp.ipynb
sed -i 's/%%/#/' $1.tmp.ipynb
sed -i 's/%pylab/#/' $1.tmp.ipynb

jupyter nbconvert $1.tmp.ipynb --to python

mv $1.tmp.py $1.py
sed -i '1i# -*- coding: utf-8 -*-' $1.py
sed -i '#!/usr/bin/python' $1.py
rm $1.tmp.ipynb
fi
Loading

0 comments on commit 35221cf

Please sign in to comment.