From 755612b73d9bf184c8cad9f5c20b8994b9ac0b4c Mon Sep 17 00:00:00 2001 From: Rajiv Shah Date: Fri, 2 Jun 2017 23:55:47 -0500 Subject: [PATCH] Minor grammar and fine tuning fix --- notebook.ipynb | 359 ++++++++++++++++++++++++++----------------------- 1 file changed, 188 insertions(+), 171 deletions(-) diff --git a/notebook.ipynb b/notebook.ipynb index fee424d..b44466d 100644 --- a/notebook.ipynb +++ b/notebook.ipynb @@ -80,7 +80,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "The github repo includes about 1500 images for this model. The original Kaggle dataset is much larger. The purpose of this demo is to show how you can build models with smaller size datasets. You should be able to improve this model by using more data." + "The github repo includes about 3000 images for this model. The original Kaggle dataset is much larger. The purpose of this demo is to show how you can build models with smaller size datasets. You should be able to improve this model by using more data." ] }, { @@ -121,11 +121,19 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 2, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "Using TensorFlow backend.\n" + ] + } + ], "source": [ "##Updated to Keras 2.0\n", "import os\n", @@ -286,71 +294,71 @@ "output_type": "stream", "text": [ "Epoch 1/30\n", - "64/64 [==============================] - 61s - loss: 0.7004 - acc: 0.5205 - val_loss: 0.6861 - val_acc: 0.5288\n", + "64/64 [==============================] - 4s - loss: 0.7019 - acc: 0.5312 - val_loss: 0.6602 - val_acc: 0.6430\n", "Epoch 2/30\n", - "64/64 [==============================] - 3s - loss: 0.6668 - acc: 0.6177 - val_loss: 0.6322 - val_acc: 0.6430\n", + "64/64 [==============================] - 3s - loss: 0.6585 - acc: 0.6201 - val_loss: 0.6155 - val_acc: 0.6599\n", "Epoch 3/30\n", - "64/64 [==============================] - 3s - loss: 0.6260 - acc: 0.6621 - val_loss: 0.6390 - val_acc: 0.6454\n", + "64/64 [==============================] - 3s - loss: 0.6011 - acc: 0.6802 - val_loss: 0.7000 - val_acc: 0.5829\n", "Epoch 4/30\n", - "64/64 [==============================] - 3s - loss: 0.5847 - acc: 0.7075 - val_loss: 0.5782 - val_acc: 0.6731\n", + "64/64 [==============================] - 3s - loss: 0.5623 - acc: 0.7124 - val_loss: 0.5634 - val_acc: 0.7007\n", "Epoch 5/30\n", - "64/64 [==============================] - 3s - loss: 0.5446 - acc: 0.7271 - val_loss: 0.6108 - val_acc: 0.6454\n", + "64/64 [==============================] - 3s - loss: 0.5211 - acc: 0.7598 - val_loss: 0.5417 - val_acc: 0.7103\n", "Epoch 6/30\n", - "64/64 [==============================] - 3s - loss: 0.5141 - acc: 0.7500 - val_loss: 0.5884 - val_acc: 0.6755\n", + "64/64 [==============================] - 3s - loss: 0.4809 - acc: 0.7632 - val_loss: 0.6005 - val_acc: 0.6863\n", "Epoch 7/30\n", - "64/64 [==============================] - 3s - loss: 0.4930 - acc: 0.7764 - val_loss: 0.5914 - val_acc: 0.6959\n", + "64/64 [==============================] - 3s - loss: 0.4411 - acc: 0.7954 - val_loss: 0.5165 - val_acc: 0.7440\n", "Epoch 8/30\n", - "64/64 [==============================] - 3s - loss: 0.4424 - acc: 0.7822 - val_loss: 0.6365 - val_acc: 0.7091\n", + "64/64 [==============================] - 3s - loss: 0.4147 - acc: 0.8145 - val_loss: 0.5743 - val_acc: 0.7224\n", "Epoch 9/30\n", - "64/64 [==============================] - 3s - loss: 0.4067 - acc: 0.8218 - val_loss: 0.5611 - val_acc: 0.7248\n", + "64/64 [==============================] - 3s - loss: 0.3629 - acc: 0.8428 - val_loss: 0.5564 - val_acc: 0.7368\n", "Epoch 10/30\n", - "64/64 [==============================] - 3s - loss: 0.3593 - acc: 0.8330 - val_loss: 0.5750 - val_acc: 0.7115\n", + "64/64 [==============================] - 3s - loss: 0.3078 - acc: 0.8706 - val_loss: 0.6492 - val_acc: 0.7356\n", "Epoch 11/30\n", - "64/64 [==============================] - 3s - loss: 0.3103 - acc: 0.8657 - val_loss: 0.6317 - val_acc: 0.6959\n", + "64/64 [==============================] - 3s - loss: 0.2555 - acc: 0.8911 - val_loss: 0.6599 - val_acc: 0.7500\n", "Epoch 12/30\n", - "64/64 [==============================] - 3s - loss: 0.2998 - acc: 0.8721 - val_loss: 0.6315 - val_acc: 0.7488\n", + "64/64 [==============================] - 3s - loss: 0.2126 - acc: 0.9146 - val_loss: 0.7828 - val_acc: 0.7091\n", "Epoch 13/30\n", - "64/64 [==============================] - 3s - loss: 0.2365 - acc: 0.8975 - val_loss: 0.6387 - val_acc: 0.7272\n", + "64/64 [==============================] - 3s - loss: 0.1898 - acc: 0.9214 - val_loss: 0.9111 - val_acc: 0.7188\n", "Epoch 14/30\n", - "64/64 [==============================] - 3s - loss: 0.2114 - acc: 0.9160 - val_loss: 0.8432 - val_acc: 0.7163\n", + "64/64 [==============================] - 3s - loss: 0.1560 - acc: 0.9419 - val_loss: 0.7646 - val_acc: 0.7308\n", "Epoch 15/30\n", - "64/64 [==============================] - 3s - loss: 0.1830 - acc: 0.9297 - val_loss: 0.7151 - val_acc: 0.7175\n", + "64/64 [==============================] - 3s - loss: 0.1281 - acc: 0.9521 - val_loss: 1.0182 - val_acc: 0.7344\n", "Epoch 16/30\n", - "64/64 [==============================] - 3s - loss: 0.1521 - acc: 0.9453 - val_loss: 0.9396 - val_acc: 0.7260\n", + "64/64 [==============================] - 3s - loss: 0.1319 - acc: 0.9561 - val_loss: 1.2820 - val_acc: 0.7091\n", "Epoch 17/30\n", - "64/64 [==============================] - 3s - loss: 0.1283 - acc: 0.9478 - val_loss: 1.0425 - val_acc: 0.7236\n", + "64/64 [==============================] - 3s - loss: 0.0875 - acc: 0.9658 - val_loss: 1.0670 - val_acc: 0.7248\n", "Epoch 18/30\n", - "64/64 [==============================] - 3s - loss: 0.1131 - acc: 0.9541 - val_loss: 1.0674 - val_acc: 0.7127\n", + "64/64 [==============================] - 3s - loss: 0.0849 - acc: 0.9683 - val_loss: 0.9959 - val_acc: 0.7440\n", "Epoch 19/30\n", - "64/64 [==============================] - 3s - loss: 0.1060 - acc: 0.9629 - val_loss: 1.0976 - val_acc: 0.7368\n", + "64/64 [==============================] - 3s - loss: 0.0690 - acc: 0.9780 - val_loss: 1.1287 - val_acc: 0.7512\n", "Epoch 20/30\n", - "64/64 [==============================] - 3s - loss: 0.0976 - acc: 0.9609 - val_loss: 1.1950 - val_acc: 0.7356\n", + "64/64 [==============================] - 3s - loss: 0.0638 - acc: 0.9751 - val_loss: 1.2906 - val_acc: 0.7320\n", "Epoch 21/30\n", - "64/64 [==============================] - 3s - loss: 0.0916 - acc: 0.9683 - val_loss: 1.2085 - val_acc: 0.7284\n", + "64/64 [==============================] - 3s - loss: 0.0523 - acc: 0.9834 - val_loss: 1.3546 - val_acc: 0.7416\n", "Epoch 22/30\n", - "64/64 [==============================] - 3s - loss: 0.0713 - acc: 0.9727 - val_loss: 1.7575 - val_acc: 0.7151\n", + "64/64 [==============================] - 3s - loss: 0.0522 - acc: 0.9795 - val_loss: 1.4303 - val_acc: 0.7356\n", "Epoch 23/30\n", - "64/64 [==============================] - 3s - loss: 0.0798 - acc: 0.9736 - val_loss: 1.4385 - val_acc: 0.7368\n", + "64/64 [==============================] - 3s - loss: 0.0534 - acc: 0.9805 - val_loss: 1.3081 - val_acc: 0.7488\n", "Epoch 24/30\n", - "64/64 [==============================] - 3s - loss: 0.0645 - acc: 0.9785 - val_loss: 1.5054 - val_acc: 0.7200\n", + "64/64 [==============================] - 3s - loss: 0.0432 - acc: 0.9854 - val_loss: 1.5992 - val_acc: 0.7476\n", "Epoch 25/30\n", - "64/64 [==============================] - 3s - loss: 0.0740 - acc: 0.9756 - val_loss: 1.7323 - val_acc: 0.6923\n", + "64/64 [==============================] - 3s - loss: 0.0643 - acc: 0.9810 - val_loss: 1.7533 - val_acc: 0.7248\n", "Epoch 26/30\n", - "64/64 [==============================] - 3s - loss: 0.0586 - acc: 0.9795 - val_loss: 1.6631 - val_acc: 0.7236\n", + "64/64 [==============================] - 3s - loss: 0.0505 - acc: 0.9834 - val_loss: 1.7377 - val_acc: 0.7236\n", "Epoch 27/30\n", - "64/64 [==============================] - 3s - loss: 0.0499 - acc: 0.9785 - val_loss: 2.0203 - val_acc: 0.7019\n", + "64/64 [==============================] - 3s - loss: 0.0318 - acc: 0.9893 - val_loss: 1.6935 - val_acc: 0.7488\n", "Epoch 28/30\n", - "64/64 [==============================] - 3s - loss: 0.0662 - acc: 0.9771 - val_loss: 1.8689 - val_acc: 0.7212\n", + "64/64 [==============================] - 3s - loss: 0.0476 - acc: 0.9893 - val_loss: 1.7834 - val_acc: 0.7272\n", "Epoch 29/30\n", - "64/64 [==============================] - 3s - loss: 0.0857 - acc: 0.9756 - val_loss: 1.9077 - val_acc: 0.6935\n", + "64/64 [==============================] - 3s - loss: 0.0451 - acc: 0.9849 - val_loss: 1.6228 - val_acc: 0.7392\n", "Epoch 30/30\n", - "64/64 [==============================] - 3s - loss: 0.0515 - acc: 0.9819 - val_loss: 1.6681 - val_acc: 0.7212\n" + "64/64 [==============================] - 3s - loss: 0.0342 - acc: 0.9902 - val_loss: 1.8900 - val_acc: 0.7212\n" ] }, { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 8, @@ -395,7 +403,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "If your model successfully runs at one epoch, go back and it for 30 epochs by changing nb_epoch above. I was able to get to an val_acc of 0.71 at 30 epochs.\n", + "If your model successfully runs at one epoch, go back and it for 30 epochs by changing epochs above. I was able to get to an val_acc of 0.71 at 30 epochs.\n", "A copy of a pretrained network is available in the pretrained folder." ] }, @@ -423,7 +431,7 @@ { "data": { "text/plain": [ - "[1.6358572365178798, 0.72472205528846156]" + "[1.9269507873731737, 0.72945462740384615]" ] }, "execution_count": 11, @@ -453,7 +461,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "**After ~10 epochs the neural network reach ~70% accuracy. We can witness overfitting, no progress is made over validation set in the next epochs**" + "**After ~10 epochs the neural network reach ~70% accuracy. We can witness overfitting, no progress is made over validation set in the next epochs.**" ] }, { @@ -521,71 +529,71 @@ "output_type": "stream", "text": [ "Epoch 1/30\n", - "64/64 [==============================] - 8s - loss: 0.6181 - acc: 0.7075 - val_loss: 0.5214 - val_acc: 0.7296\n", + "64/64 [==============================] - 8s - loss: 0.6084 - acc: 0.7173 - val_loss: 0.5901 - val_acc: 0.6911\n", "Epoch 2/30\n", - "64/64 [==============================] - 7s - loss: 0.5563 - acc: 0.7510 - val_loss: 0.5814 - val_acc: 0.7224\n", + "64/64 [==============================] - 7s - loss: 0.5393 - acc: 0.7476 - val_loss: 0.5237 - val_acc: 0.7548\n", "Epoch 3/30\n", - "64/64 [==============================] - 7s - loss: 0.4982 - acc: 0.7700 - val_loss: 0.6890 - val_acc: 0.7188\n", + "64/64 [==============================] - 7s - loss: 0.5114 - acc: 0.7637 - val_loss: 0.5793 - val_acc: 0.7248\n", "Epoch 4/30\n", - "64/64 [==============================] - 7s - loss: 0.5247 - acc: 0.7583 - val_loss: 0.5678 - val_acc: 0.7115\n", + "64/64 [==============================] - 7s - loss: 0.4931 - acc: 0.7700 - val_loss: 0.5768 - val_acc: 0.7260\n", "Epoch 5/30\n", - "64/64 [==============================] - 7s - loss: 0.4814 - acc: 0.7871 - val_loss: 0.5177 - val_acc: 0.7776\n", + "64/64 [==============================] - 7s - loss: 0.4846 - acc: 0.8013 - val_loss: 0.5024 - val_acc: 0.7536\n", "Epoch 6/30\n", - "64/64 [==============================] - 7s - loss: 0.4932 - acc: 0.7900 - val_loss: 0.5102 - val_acc: 0.7873\n", + "64/64 [==============================] - 7s - loss: 0.4590 - acc: 0.7910 - val_loss: 0.5223 - val_acc: 0.7728\n", "Epoch 7/30\n", - "64/64 [==============================] - 7s - loss: 0.4770 - acc: 0.8013 - val_loss: 0.4926 - val_acc: 0.7668\n", + "64/64 [==============================] - 7s - loss: 0.4643 - acc: 0.7910 - val_loss: 0.5221 - val_acc: 0.7440\n", "Epoch 8/30\n", - "64/64 [==============================] - 7s - loss: 0.4402 - acc: 0.8008 - val_loss: 0.5014 - val_acc: 0.7728\n", + "64/64 [==============================] - 7s - loss: 0.4606 - acc: 0.7891 - val_loss: 0.5439 - val_acc: 0.7620\n", "Epoch 9/30\n", - "64/64 [==============================] - 7s - loss: 0.4495 - acc: 0.7998 - val_loss: 0.5459 - val_acc: 0.7272\n", + "64/64 [==============================] - 7s - loss: 0.4445 - acc: 0.8047 - val_loss: 0.5710 - val_acc: 0.7416\n", "Epoch 10/30\n", - "64/64 [==============================] - 7s - loss: 0.4359 - acc: 0.7974 - val_loss: 0.5893 - val_acc: 0.7764\n", + "64/64 [==============================] - 7s - loss: 0.4639 - acc: 0.8013 - val_loss: 0.4849 - val_acc: 0.7680\n", "Epoch 11/30\n", - "64/64 [==============================] - 7s - loss: 0.4247 - acc: 0.8184 - val_loss: 0.5150 - val_acc: 0.7716\n", + "64/64 [==============================] - 7s - loss: 0.4269 - acc: 0.8149 - val_loss: 0.5280 - val_acc: 0.7404\n", "Epoch 12/30\n", - "64/64 [==============================] - 7s - loss: 0.4379 - acc: 0.8140 - val_loss: 0.4899 - val_acc: 0.7825\n", + "64/64 [==============================] - 7s - loss: 0.4253 - acc: 0.8110 - val_loss: 0.4985 - val_acc: 0.7873\n", "Epoch 13/30\n", - "64/64 [==============================] - 7s - loss: 0.4480 - acc: 0.7969 - val_loss: 0.4410 - val_acc: 0.8149\n", + "64/64 [==============================] - 7s - loss: 0.4004 - acc: 0.8237 - val_loss: 0.5526 - val_acc: 0.7488\n", "Epoch 14/30\n", - "64/64 [==============================] - 7s - loss: 0.4051 - acc: 0.8208 - val_loss: 0.8244 - val_acc: 0.7055\n", + "64/64 [==============================] - 7s - loss: 0.4373 - acc: 0.8154 - val_loss: 0.4997 - val_acc: 0.7704\n", "Epoch 15/30\n", - "64/64 [==============================] - 7s - loss: 0.4047 - acc: 0.8247 - val_loss: 0.4827 - val_acc: 0.8125\n", + "64/64 [==============================] - 7s - loss: 0.4180 - acc: 0.8242 - val_loss: 0.4911 - val_acc: 0.7740\n", "Epoch 16/30\n", - "64/64 [==============================] - 7s - loss: 0.4230 - acc: 0.8179 - val_loss: 0.4830 - val_acc: 0.8137\n", + "64/64 [==============================] - 7s - loss: 0.4083 - acc: 0.8179 - val_loss: 0.5443 - val_acc: 0.7800\n", "Epoch 17/30\n", - "64/64 [==============================] - 7s - loss: 0.4029 - acc: 0.8159 - val_loss: 0.4399 - val_acc: 0.8089\n", + "64/64 [==============================] - 7s - loss: 0.3963 - acc: 0.8184 - val_loss: 0.5221 - val_acc: 0.7897\n", "Epoch 18/30\n", - "64/64 [==============================] - 7s - loss: 0.4042 - acc: 0.8271 - val_loss: 0.4619 - val_acc: 0.7740\n", + "64/64 [==============================] - 7s - loss: 0.3970 - acc: 0.8228 - val_loss: 0.6026 - val_acc: 0.7849\n", "Epoch 19/30\n", - "64/64 [==============================] - 7s - loss: 0.4015 - acc: 0.8457 - val_loss: 0.5126 - val_acc: 0.7825\n", + "64/64 [==============================] - 7s - loss: 0.3943 - acc: 0.8306 - val_loss: 0.5247 - val_acc: 0.7632\n", "Epoch 20/30\n", - "64/64 [==============================] - 7s - loss: 0.4024 - acc: 0.8271 - val_loss: 0.4927 - val_acc: 0.7740\n", + "64/64 [==============================] - 7s - loss: 0.4219 - acc: 0.8159 - val_loss: 0.4838 - val_acc: 0.7837\n", "Epoch 21/30\n", - "64/64 [==============================] - 7s - loss: 0.3901 - acc: 0.8408 - val_loss: 0.4940 - val_acc: 0.7969\n", + "64/64 [==============================] - 7s - loss: 0.3867 - acc: 0.8262 - val_loss: 0.5173 - val_acc: 0.7452\n", "Epoch 22/30\n", - "64/64 [==============================] - 7s - loss: 0.3910 - acc: 0.8354 - val_loss: 0.5294 - val_acc: 0.7861\n", + "64/64 [==============================] - 7s - loss: 0.3681 - acc: 0.8394 - val_loss: 0.4905 - val_acc: 0.7909\n", "Epoch 23/30\n", - "64/64 [==============================] - 7s - loss: 0.3884 - acc: 0.8354 - val_loss: 0.4598 - val_acc: 0.7909\n", + "64/64 [==============================] - 7s - loss: 0.3838 - acc: 0.8408 - val_loss: 0.5536 - val_acc: 0.7356\n", "Epoch 24/30\n", - "64/64 [==============================] - 7s - loss: 0.3995 - acc: 0.8311 - val_loss: 0.4961 - val_acc: 0.7897\n", + "64/64 [==============================] - 7s - loss: 0.3725 - acc: 0.8394 - val_loss: 0.4998 - val_acc: 0.7692\n", "Epoch 25/30\n", - "64/64 [==============================] - 7s - loss: 0.3883 - acc: 0.8438 - val_loss: 0.5324 - val_acc: 0.7548\n", + "64/64 [==============================] - 7s - loss: 0.3701 - acc: 0.8472 - val_loss: 0.5808 - val_acc: 0.7800\n", "Epoch 26/30\n", - "64/64 [==============================] - 7s - loss: 0.3897 - acc: 0.8467 - val_loss: 0.4837 - val_acc: 0.7993\n", + "64/64 [==============================] - 7s - loss: 0.3527 - acc: 0.8564 - val_loss: 0.6402 - val_acc: 0.7752\n", "Epoch 27/30\n", - "64/64 [==============================] - 7s - loss: 0.3768 - acc: 0.8428 - val_loss: 0.4413 - val_acc: 0.8017\n", + "64/64 [==============================] - 7s - loss: 0.3804 - acc: 0.8350 - val_loss: 0.4937 - val_acc: 0.7656\n", "Epoch 28/30\n", - "64/64 [==============================] - 7s - loss: 0.3800 - acc: 0.8408 - val_loss: 0.5044 - val_acc: 0.7861\n", + "64/64 [==============================] - 7s - loss: 0.3754 - acc: 0.8398 - val_loss: 0.5054 - val_acc: 0.7981\n", "Epoch 29/30\n", - "64/64 [==============================] - 7s - loss: 0.3786 - acc: 0.8398 - val_loss: 0.5393 - val_acc: 0.7740\n", + "64/64 [==============================] - 7s - loss: 0.3574 - acc: 0.8452 - val_loss: 0.5606 - val_acc: 0.7885\n", "Epoch 30/30\n", - "64/64 [==============================] - 7s - loss: 0.3687 - acc: 0.8564 - val_loss: 0.5737 - val_acc: 0.7668\n" + "64/64 [==============================] - 7s - loss: 0.3523 - acc: 0.8481 - val_loss: 0.5420 - val_acc: 0.7692\n" ] }, { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 13, @@ -640,7 +648,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 16, "metadata": { "collapsed": false }, @@ -648,10 +656,10 @@ { "data": { "text/plain": [ - "[0.57709803022086048, 0.76551231971153844]" + "[0.54872930947189724, 0.77835787259615385]" ] }, - "execution_count": 15, + "execution_count": 16, "metadata": {}, "output_type": "execute_result" } @@ -692,7 +700,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "The process of training a convolutionnal neural network can be very time-consuming and require a lot of datas. \n", + "The process of training a convolutionnal neural network can be very time-consuming and require a lot of data. \n", "\n", "We can go beyond the previous models in terms of performance and efficiency by using a general-purpose, pre-trained image classifier. This example uses VGG16, a model trained on the ImageNet dataset - which contains millions of images classified in 1000 categories. \n", "\n", @@ -716,19 +724,11 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 17, "metadata": { "collapsed": false }, - "outputs": [ - { - "name": "stdout", - "output_type": "stream", - "text": [ - "Downloading data from https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5\n" - ] - } - ], + "outputs": [], "source": [ "model_vgg = applications.VGG16(include_top=False, weights='imagenet')" ] @@ -742,7 +742,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 18, "metadata": { "collapsed": false }, @@ -781,7 +781,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 19, "metadata": { "collapsed": false }, @@ -793,7 +793,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 20, "metadata": { "collapsed": false }, @@ -812,7 +812,7 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 21, "metadata": { "collapsed": false }, @@ -834,7 +834,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 22, "metadata": { "collapsed": false }, @@ -851,7 +851,7 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 23, "metadata": { "collapsed": false }, @@ -862,74 +862,74 @@ "text": [ "Train on 2048 samples, validate on 832 samples\n", "Epoch 1/30\n", - "2048/2048 [==============================] - 0s - loss: 3.3870 - acc: 0.6279 - val_loss: 0.3765 - val_acc: 0.8173\n", + "2048/2048 [==============================] - 0s - loss: 1.0117 - acc: 0.7373 - val_loss: 0.2824 - val_acc: 0.8846\n", "Epoch 2/30\n", - "2048/2048 [==============================] - 0s - loss: 0.4014 - acc: 0.8232 - val_loss: 0.2767 - val_acc: 0.8990\n", + "2048/2048 [==============================] - 0s - loss: 0.3854 - acc: 0.8354 - val_loss: 0.3104 - val_acc: 0.8654\n", "Epoch 3/30\n", - "2048/2048 [==============================] - 0s - loss: 0.3015 - acc: 0.8784 - val_loss: 0.2481 - val_acc: 0.8882\n", + "2048/2048 [==============================] - 0s - loss: 0.3131 - acc: 0.8740 - val_loss: 0.3117 - val_acc: 0.8666\n", "Epoch 4/30\n", - "2048/2048 [==============================] - 0s - loss: 0.2591 - acc: 0.8931 - val_loss: 0.4684 - val_acc: 0.8281\n", + "2048/2048 [==============================] - 0s - loss: 0.2750 - acc: 0.8916 - val_loss: 0.2966 - val_acc: 0.8750\n", "Epoch 5/30\n", - "2048/2048 [==============================] - 0s - loss: 0.2334 - acc: 0.9048 - val_loss: 0.2334 - val_acc: 0.9062\n", + "2048/2048 [==============================] - 0s - loss: 0.2181 - acc: 0.9126 - val_loss: 0.2443 - val_acc: 0.9050\n", "Epoch 6/30\n", - "2048/2048 [==============================] - 0s - loss: 0.2005 - acc: 0.9224 - val_loss: 0.2395 - val_acc: 0.9026\n", + "2048/2048 [==============================] - 0s - loss: 0.2031 - acc: 0.9204 - val_loss: 0.2397 - val_acc: 0.9135\n", "Epoch 7/30\n", - "2048/2048 [==============================] - 0s - loss: 0.1791 - acc: 0.9233 - val_loss: 0.6955 - val_acc: 0.7849\n", + "2048/2048 [==============================] - 0s - loss: 0.1917 - acc: 0.9209 - val_loss: 0.8980 - val_acc: 0.7212\n", "Epoch 8/30\n", - "2048/2048 [==============================] - 0s - loss: 0.1490 - acc: 0.9414 - val_loss: 0.2788 - val_acc: 0.8846\n", + "2048/2048 [==============================] - 0s - loss: 0.1669 - acc: 0.9375 - val_loss: 0.2565 - val_acc: 0.9026\n", "Epoch 9/30\n", - "2048/2048 [==============================] - 0s - loss: 0.1369 - acc: 0.9419 - val_loss: 0.4056 - val_acc: 0.8642\n", + "2048/2048 [==============================] - 0s - loss: 0.1234 - acc: 0.9570 - val_loss: 0.2883 - val_acc: 0.8978\n", "Epoch 10/30\n", - "2048/2048 [==============================] - 0s - loss: 0.1300 - acc: 0.9463 - val_loss: 0.2863 - val_acc: 0.8966\n", + "2048/2048 [==============================] - 0s - loss: 0.1344 - acc: 0.9473 - val_loss: 0.2794 - val_acc: 0.9099\n", "Epoch 11/30\n", - "2048/2048 [==============================] - 0s - loss: 0.1145 - acc: 0.9565 - val_loss: 0.2855 - val_acc: 0.9050\n", + "2048/2048 [==============================] - 0s - loss: 0.0999 - acc: 0.9604 - val_loss: 0.6976 - val_acc: 0.8041\n", "Epoch 12/30\n", - "2048/2048 [==============================] - 0s - loss: 0.0926 - acc: 0.9663 - val_loss: 0.4966 - val_acc: 0.8558\n", + "2048/2048 [==============================] - 0s - loss: 0.1082 - acc: 0.9551 - val_loss: 0.3539 - val_acc: 0.9050\n", "Epoch 13/30\n", - "2048/2048 [==============================] - 0s - loss: 0.0920 - acc: 0.9658 - val_loss: 0.4437 - val_acc: 0.8678\n", + "2048/2048 [==============================] - 0s - loss: 0.0857 - acc: 0.9683 - val_loss: 0.3202 - val_acc: 0.9123\n", "Epoch 14/30\n", - "2048/2048 [==============================] - 0s - loss: 0.0744 - acc: 0.9702 - val_loss: 0.6320 - val_acc: 0.8462\n", + "2048/2048 [==============================] - 0s - loss: 0.0643 - acc: 0.9722 - val_loss: 0.7492 - val_acc: 0.8209\n", "Epoch 15/30\n", - "2048/2048 [==============================] - 0s - loss: 0.0788 - acc: 0.9692 - val_loss: 0.3674 - val_acc: 0.9075\n", + "2048/2048 [==============================] - 0s - loss: 0.0790 - acc: 0.9727 - val_loss: 0.6080 - val_acc: 0.8702\n", "Epoch 16/30\n", - "2048/2048 [==============================] - 0s - loss: 0.0721 - acc: 0.9717 - val_loss: 0.3389 - val_acc: 0.9062\n", + "2048/2048 [==============================] - 0s - loss: 0.0605 - acc: 0.9780 - val_loss: 0.4586 - val_acc: 0.8990\n", "Epoch 17/30\n", - "2048/2048 [==============================] - 0s - loss: 0.0569 - acc: 0.9785 - val_loss: 0.3647 - val_acc: 0.9075\n", + "2048/2048 [==============================] - 0s - loss: 0.0592 - acc: 0.9775 - val_loss: 0.4327 - val_acc: 0.9062\n", "Epoch 18/30\n", - "2048/2048 [==============================] - 0s - loss: 0.0486 - acc: 0.9771 - val_loss: 0.5094 - val_acc: 0.8774\n", + "2048/2048 [==============================] - 0s - loss: 0.0517 - acc: 0.9775 - val_loss: 0.5250 - val_acc: 0.9014\n", "Epoch 19/30\n", - "2048/2048 [==============================] - 0s - loss: 0.0497 - acc: 0.9800 - val_loss: 0.3646 - val_acc: 0.9183\n", + "2048/2048 [==============================] - 0s - loss: 0.0445 - acc: 0.9839 - val_loss: 0.5150 - val_acc: 0.8906\n", "Epoch 20/30\n", - "2048/2048 [==============================] - 0s - loss: 0.0458 - acc: 0.9824 - val_loss: 0.4245 - val_acc: 0.9087\n", + "2048/2048 [==============================] - 0s - loss: 0.0495 - acc: 0.9814 - val_loss: 0.5910 - val_acc: 0.8930\n", "Epoch 21/30\n", - "2048/2048 [==============================] - 0s - loss: 0.0320 - acc: 0.9863 - val_loss: 0.4216 - val_acc: 0.9159\n", + "2048/2048 [==============================] - 0s - loss: 0.0357 - acc: 0.9863 - val_loss: 0.5589 - val_acc: 0.8918\n", "Epoch 22/30\n", - "2048/2048 [==============================] - 0s - loss: 0.0352 - acc: 0.9868 - val_loss: 0.4470 - val_acc: 0.9123\n", + "2048/2048 [==============================] - 0s - loss: 0.0313 - acc: 0.9902 - val_loss: 0.6116 - val_acc: 0.8834\n", "Epoch 23/30\n", - "2048/2048 [==============================] - 0s - loss: 0.0246 - acc: 0.9917 - val_loss: 0.4556 - val_acc: 0.9123\n", + "2048/2048 [==============================] - 0s - loss: 0.0278 - acc: 0.9883 - val_loss: 0.5449 - val_acc: 0.9062\n", "Epoch 24/30\n", - "2048/2048 [==============================] - 0s - loss: 0.0284 - acc: 0.9878 - val_loss: 0.4901 - val_acc: 0.9111\n", + "2048/2048 [==============================] - 0s - loss: 0.0366 - acc: 0.9868 - val_loss: 0.5645 - val_acc: 0.9050\n", "Epoch 25/30\n", - "2048/2048 [==============================] - 0s - loss: 0.0247 - acc: 0.9937 - val_loss: 0.4744 - val_acc: 0.9183\n", + "2048/2048 [==============================] - 0s - loss: 0.0231 - acc: 0.9907 - val_loss: 0.7700 - val_acc: 0.8666\n", "Epoch 26/30\n", - "2048/2048 [==============================] - 0s - loss: 0.0363 - acc: 0.9888 - val_loss: 0.5039 - val_acc: 0.9075\n", + "2048/2048 [==============================] - 0s - loss: 0.0316 - acc: 0.9893 - val_loss: 0.6023 - val_acc: 0.9111\n", "Epoch 27/30\n", - "2048/2048 [==============================] - 0s - loss: 0.0162 - acc: 0.9956 - val_loss: 0.4766 - val_acc: 0.9171\n", + "2048/2048 [==============================] - 0s - loss: 0.0207 - acc: 0.9922 - val_loss: 0.7544 - val_acc: 0.8918\n", "Epoch 28/30\n", - "2048/2048 [==============================] - 0s - loss: 0.0159 - acc: 0.9951 - val_loss: 0.5400 - val_acc: 0.9062\n", + "2048/2048 [==============================] - 0s - loss: 0.0265 - acc: 0.9907 - val_loss: 0.6361 - val_acc: 0.9002\n", "Epoch 29/30\n", - "2048/2048 [==============================] - 0s - loss: 0.0192 - acc: 0.9927 - val_loss: 0.5682 - val_acc: 0.9111\n", + "2048/2048 [==============================] - 0s - loss: 0.0206 - acc: 0.9927 - val_loss: 0.7019 - val_acc: 0.9026\n", "Epoch 30/30\n", - "2048/2048 [==============================] - 0s - loss: 0.0195 - acc: 0.9937 - val_loss: 0.5436 - val_acc: 0.9123\n" + "2048/2048 [==============================] - 0s - loss: 0.0284 - acc: 0.9897 - val_loss: 0.6048 - val_acc: 0.9026\n" ] }, { "data": { "text/plain": [ - "" + "" ] }, - "execution_count": 25, + "execution_count": 23, "metadata": {}, "output_type": "execute_result" } @@ -950,7 +950,7 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 24, "metadata": { "collapsed": false }, @@ -968,7 +968,7 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 25, "metadata": { "collapsed": false }, @@ -986,7 +986,7 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 26, "metadata": { "collapsed": false }, @@ -995,16 +995,17 @@ "name": "stdout", "output_type": "stream", "text": [ - "800/832 [===========================>..] - ETA: 0s" + "\r", + " 32/832 [>.............................] - ETA: 0s" ] }, { "data": { "text/plain": [ - "[0.54362334619061303, 0.91225961538461542]" + "[0.60475906758354259, 0.90264423076923073]" ] }, - "execution_count": 27, + "execution_count": 26, "metadata": {}, "output_type": "execute_result" } @@ -1054,7 +1055,7 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 27, "metadata": { "collapsed": true }, @@ -1072,7 +1073,7 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 29, "metadata": { "collapsed": false }, @@ -1084,7 +1085,7 @@ "top_model.add(Dropout(0.5))\n", "top_model.add(Dense(1, activation='sigmoid'))\n", "\n", - "top_model.load_weights('models/bottleneck_40_epochs.h5')\n", + "top_model.load_weights('models/bottleneck_30_epochs.h5')\n", "\n", "#model_vgg.add(top_model)\n", "model = Model(inputs = model_vgg.input, outputs = top_model(model_vgg.output))" @@ -1099,19 +1100,19 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": 30, "metadata": { "collapsed": false }, "outputs": [], "source": [ - "for layer in model_vgg.layers[:15]:\n", + "for layer in model.layers[:15]:\n", " layer.trainable = False" ] }, { "cell_type": "code", - "execution_count": 33, + "execution_count": 31, "metadata": { "collapsed": true }, @@ -1126,7 +1127,7 @@ }, { "cell_type": "code", - "execution_count": 34, + "execution_count": 32, "metadata": { "collapsed": false }, @@ -1165,7 +1166,7 @@ }, { "cell_type": "code", - "execution_count": 35, + "execution_count": 33, "metadata": { "collapsed": false }, @@ -1175,74 +1176,74 @@ "output_type": "stream", "text": [ "Epoch 1/30\n", - "64/64 [==============================] - 14s - loss: 0.3143 - acc: 0.8945 - val_loss: 0.3477 - val_acc: 0.9123\n", + "64/64 [==============================] - 14s - loss: 0.3501 - acc: 0.8999 - val_loss: 0.3482 - val_acc: 0.8990\n", "Epoch 2/30\n", - "64/64 [==============================] - 14s - loss: 0.1789 - acc: 0.9336 - val_loss: 0.2682 - val_acc: 0.9243\n", + "64/64 [==============================] - 14s - loss: 0.1985 - acc: 0.9268 - val_loss: 0.3713 - val_acc: 0.8954\n", "Epoch 3/30\n", - "64/64 [==============================] - 14s - loss: 0.1597 - acc: 0.9404 - val_loss: 0.2590 - val_acc: 0.9267\n", + "64/64 [==============================] - 14s - loss: 0.1624 - acc: 0.9336 - val_loss: 0.2705 - val_acc: 0.9111\n", "Epoch 4/30\n", - "64/64 [==============================] - 14s - loss: 0.1227 - acc: 0.9561 - val_loss: 0.3725 - val_acc: 0.9050\n", + "64/64 [==============================] - 14s - loss: 0.1224 - acc: 0.9521 - val_loss: 0.3526 - val_acc: 0.9050\n", "Epoch 5/30\n", - "64/64 [==============================] - 14s - loss: 0.1375 - acc: 0.9492 - val_loss: 0.3094 - val_acc: 0.9062\n", + "64/64 [==============================] - 14s - loss: 0.1321 - acc: 0.9502 - val_loss: 0.2383 - val_acc: 0.9231\n", "Epoch 6/30\n", - "64/64 [==============================] - 14s - loss: 0.0917 - acc: 0.9678 - val_loss: 0.2808 - val_acc: 0.9159\n", + "64/64 [==============================] - 14s - loss: 0.0970 - acc: 0.9624 - val_loss: 0.2955 - val_acc: 0.9135\n", "Epoch 7/30\n", - "64/64 [==============================] - 14s - loss: 0.1110 - acc: 0.9595 - val_loss: 0.3092 - val_acc: 0.9171\n", + "64/64 [==============================] - 14s - loss: 0.0981 - acc: 0.9644 - val_loss: 0.2939 - val_acc: 0.9243\n", "Epoch 8/30\n", - "64/64 [==============================] - 14s - loss: 0.1013 - acc: 0.9653 - val_loss: 0.3118 - val_acc: 0.9147\n", + "64/64 [==============================] - 14s - loss: 0.0655 - acc: 0.9761 - val_loss: 0.3388 - val_acc: 0.9219\n", "Epoch 9/30\n", - "64/64 [==============================] - 14s - loss: 0.0856 - acc: 0.9731 - val_loss: 0.3227 - val_acc: 0.9171\n", + "64/64 [==============================] - 14s - loss: 0.0664 - acc: 0.9746 - val_loss: 0.2822 - val_acc: 0.9171\n", "Epoch 10/30\n", - "64/64 [==============================] - 14s - loss: 0.0820 - acc: 0.9722 - val_loss: 0.3082 - val_acc: 0.9255\n", + "64/64 [==============================] - 14s - loss: 0.0778 - acc: 0.9702 - val_loss: 0.2287 - val_acc: 0.9327\n", "Epoch 11/30\n", - "64/64 [==============================] - 14s - loss: 0.0663 - acc: 0.9790 - val_loss: 0.2455 - val_acc: 0.9303\n", + "64/64 [==============================] - 14s - loss: 0.0466 - acc: 0.9839 - val_loss: 0.2978 - val_acc: 0.9207\n", "Epoch 12/30\n", - "64/64 [==============================] - 14s - loss: 0.0560 - acc: 0.9810 - val_loss: 0.3503 - val_acc: 0.9219\n", + "64/64 [==============================] - 14s - loss: 0.0616 - acc: 0.9785 - val_loss: 0.3196 - val_acc: 0.9171\n", "Epoch 13/30\n", - "64/64 [==============================] - 14s - loss: 0.0691 - acc: 0.9775 - val_loss: 0.2757 - val_acc: 0.9339\n", + "64/64 [==============================] - 14s - loss: 0.0532 - acc: 0.9814 - val_loss: 0.4721 - val_acc: 0.8978\n", "Epoch 14/30\n", - "64/64 [==============================] - 14s - loss: 0.0713 - acc: 0.9717 - val_loss: 0.2953 - val_acc: 0.9303\n", + "64/64 [==============================] - 14s - loss: 0.0786 - acc: 0.9707 - val_loss: 0.3044 - val_acc: 0.9147\n", "Epoch 15/30\n", - "64/64 [==============================] - 14s - loss: 0.0578 - acc: 0.9800 - val_loss: 0.2959 - val_acc: 0.9255\n", + "64/64 [==============================] - 14s - loss: 0.0518 - acc: 0.9814 - val_loss: 0.3593 - val_acc: 0.9195\n", "Epoch 16/30\n", - "64/64 [==============================] - 14s - loss: 0.0368 - acc: 0.9868 - val_loss: 0.4587 - val_acc: 0.9075\n", + "64/64 [==============================] - 14s - loss: 0.0527 - acc: 0.9824 - val_loss: 0.3035 - val_acc: 0.9183\n", "Epoch 17/30\n", - "64/64 [==============================] - 14s - loss: 0.0438 - acc: 0.9849 - val_loss: 0.3330 - val_acc: 0.9255\n", + "64/64 [==============================] - 14s - loss: 0.0349 - acc: 0.9868 - val_loss: 0.3020 - val_acc: 0.9195\n", "Epoch 18/30\n", - "64/64 [==============================] - 14s - loss: 0.0362 - acc: 0.9883 - val_loss: 0.3822 - val_acc: 0.9327\n", + "64/64 [==============================] - 14s - loss: 0.0332 - acc: 0.9888 - val_loss: 0.3253 - val_acc: 0.9291\n", "Epoch 19/30\n", - "64/64 [==============================] - 14s - loss: 0.0446 - acc: 0.9858 - val_loss: 0.2899 - val_acc: 0.9243\n", + "64/64 [==============================] - 14s - loss: 0.0433 - acc: 0.9849 - val_loss: 0.3177 - val_acc: 0.9279\n", "Epoch 20/30\n", - "64/64 [==============================] - 14s - loss: 0.0520 - acc: 0.9863 - val_loss: 0.3991 - val_acc: 0.9147\n", + "64/64 [==============================] - 14s - loss: 0.0401 - acc: 0.9873 - val_loss: 0.2912 - val_acc: 0.9255\n", "Epoch 21/30\n", - "64/64 [==============================] - 14s - loss: 0.0419 - acc: 0.9858 - val_loss: 0.3005 - val_acc: 0.9243\n", + "64/64 [==============================] - 14s - loss: 0.0356 - acc: 0.9897 - val_loss: 0.3278 - val_acc: 0.9207\n", "Epoch 22/30\n", - "64/64 [==============================] - 14s - loss: 0.0295 - acc: 0.9878 - val_loss: 0.3289 - val_acc: 0.9315\n", + "64/64 [==============================] - 14s - loss: 0.0325 - acc: 0.9888 - val_loss: 0.3212 - val_acc: 0.9291\n", "Epoch 23/30\n", - "64/64 [==============================] - 14s - loss: 0.0296 - acc: 0.9897 - val_loss: 0.3794 - val_acc: 0.9062\n", + "64/64 [==============================] - 14s - loss: 0.0362 - acc: 0.9849 - val_loss: 0.3038 - val_acc: 0.9303\n", "Epoch 24/30\n", - "64/64 [==============================] - 14s - loss: 0.0407 - acc: 0.9858 - val_loss: 0.3036 - val_acc: 0.9279\n", + "64/64 [==============================] - 14s - loss: 0.0303 - acc: 0.9893 - val_loss: 0.2688 - val_acc: 0.9279\n", "Epoch 25/30\n", - "64/64 [==============================] - 14s - loss: 0.0414 - acc: 0.9858 - val_loss: 0.3110 - val_acc: 0.9291\n", + "64/64 [==============================] - 14s - loss: 0.0268 - acc: 0.9912 - val_loss: 0.3272 - val_acc: 0.9315\n", "Epoch 26/30\n", - "64/64 [==============================] - 14s - loss: 0.0357 - acc: 0.9873 - val_loss: 0.3260 - val_acc: 0.9183\n", + "64/64 [==============================] - 14s - loss: 0.0214 - acc: 0.9922 - val_loss: 0.2904 - val_acc: 0.9327\n", "Epoch 27/30\n", - "64/64 [==============================] - 14s - loss: 0.0294 - acc: 0.9893 - val_loss: 0.3376 - val_acc: 0.9315\n", + "64/64 [==============================] - 14s - loss: 0.0232 - acc: 0.9917 - val_loss: 0.3277 - val_acc: 0.9351\n", "Epoch 28/30\n", - "64/64 [==============================] - 14s - loss: 0.0337 - acc: 0.9907 - val_loss: 0.2400 - val_acc: 0.9399\n", + "64/64 [==============================] - 14s - loss: 0.0227 - acc: 0.9917 - val_loss: 0.2777 - val_acc: 0.9399\n", "Epoch 29/30\n", - "64/64 [==============================] - 14s - loss: 0.0196 - acc: 0.9932 - val_loss: 0.3767 - val_acc: 0.9267\n", + "64/64 [==============================] - 14s - loss: 0.0161 - acc: 0.9946 - val_loss: 0.4184 - val_acc: 0.9195\n", "Epoch 30/30\n", - "64/64 [==============================] - 14s - loss: 0.0252 - acc: 0.9907 - val_loss: 0.3516 - val_acc: 0.9303\n" + "64/64 [==============================] - 14s - loss: 0.0217 - acc: 0.9941 - val_loss: 0.2723 - val_acc: 0.9399\n" ] }, { "data": { "text/plain": [ - "" + "" ] }, - "execution_count": 35, + "execution_count": 33, "metadata": {}, "output_type": "execute_result" } @@ -1259,7 +1260,7 @@ }, { "cell_type": "code", - "execution_count": 36, + "execution_count": 34, "metadata": { "collapsed": true }, @@ -1270,7 +1271,7 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": 35, "metadata": { "collapsed": true }, @@ -1295,7 +1296,7 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": 36, "metadata": { "collapsed": false }, @@ -1303,10 +1304,10 @@ { "data": { "text/plain": [ - "[0.33350689638357905, 0.93280498798076927]" + "[0.31461367263587958, 0.93137770432692313]" ] }, - "execution_count": 37, + "execution_count": 36, "metadata": {}, "output_type": "execute_result" } @@ -1314,6 +1315,22 @@ "source": [ "model.evaluate_generator(validation_generator, validation_samples)" ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**We reached a 93% accuracy on the validation with 8% of the samples originally available on the Kaggle competition!**" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] } ], "metadata": {