Spaces:
Running
Running
Upload 8.05 Generating images using GAN in TensorFlow.ipynb
Browse files
8.05 Generating images using GAN in TensorFlow.ipynb
ADDED
@@ -0,0 +1,562 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "markdown",
|
5 |
+
"metadata": {},
|
6 |
+
"source": [
|
7 |
+
"# Generating images using GAN in TensorFlow\n",
|
8 |
+
"\n",
|
9 |
+
"\n",
|
10 |
+
"\n",
|
11 |
+
"Let's strengthen our understanding of GANs by building them to generate images of handwritten digits in Tensorflow."
|
12 |
+
]
|
13 |
+
},
|
14 |
+
{
|
15 |
+
"cell_type": "markdown",
|
16 |
+
"metadata": {},
|
17 |
+
"source": [
|
18 |
+
"## Import libraries\n",
|
19 |
+
"\n",
|
20 |
+
"First, we will import all the necessary libraries:"
|
21 |
+
]
|
22 |
+
},
|
23 |
+
{
|
24 |
+
"cell_type": "code",
|
25 |
+
"execution_count": 1,
|
26 |
+
"metadata": {},
|
27 |
+
"outputs": [],
|
28 |
+
"source": [
|
29 |
+
"import warnings\n",
|
30 |
+
"warnings.filterwarnings('ignore')\n",
|
31 |
+
"\n",
|
32 |
+
"import numpy as np\n",
|
33 |
+
"import tensorflow as tf\n",
|
34 |
+
"from tensorflow.examples.tutorials.mnist import input_data\n",
|
35 |
+
"tf.logging.set_verbosity(tf.logging.ERROR)\n",
|
36 |
+
"\n",
|
37 |
+
"import matplotlib.pyplot as plt\n",
|
38 |
+
"%matplotlib inline\n",
|
39 |
+
"\n",
|
40 |
+
"tf.reset_default_graph()"
|
41 |
+
]
|
42 |
+
},
|
43 |
+
{
|
44 |
+
"cell_type": "markdown",
|
45 |
+
"metadata": {},
|
46 |
+
"source": [
|
47 |
+
"### Read the Dataset\n",
|
48 |
+
"\n",
|
49 |
+
"Load the MNIST dataset:"
|
50 |
+
]
|
51 |
+
},
|
52 |
+
{
|
53 |
+
"cell_type": "code",
|
54 |
+
"execution_count": 2,
|
55 |
+
"metadata": {},
|
56 |
+
"outputs": [
|
57 |
+
{
|
58 |
+
"name": "stdout",
|
59 |
+
"output_type": "stream",
|
60 |
+
"text": [
|
61 |
+
"Extracting data/mnist/train-images-idx3-ubyte.gz\n",
|
62 |
+
"Extracting data/mnist/train-labels-idx1-ubyte.gz\n",
|
63 |
+
"Extracting data/mnist/t10k-images-idx3-ubyte.gz\n",
|
64 |
+
"Extracting data/mnist/t10k-labels-idx1-ubyte.gz\n"
|
65 |
+
]
|
66 |
+
}
|
67 |
+
],
|
68 |
+
"source": [
|
69 |
+
"data = input_data.read_data_sets(\"data/mnist\",one_hot=True)"
|
70 |
+
]
|
71 |
+
},
|
72 |
+
{
|
73 |
+
"cell_type": "markdown",
|
74 |
+
"metadata": {},
|
75 |
+
"source": [
|
76 |
+
"Let's plot one image:"
|
77 |
+
]
|
78 |
+
},
|
79 |
+
{
|
80 |
+
"cell_type": "code",
|
81 |
+
"execution_count": 3,
|
82 |
+
"metadata": {
|
83 |
+
"scrolled": true
|
84 |
+
},
|
85 |
+
"outputs": [
|
86 |
+
{
|
87 |
+
"data": {
|
88 |
+
"text/plain": [
|
89 |
+
"<matplotlib.image.AxesImage at 0x7f1529d77990>"
|
90 |
+
]
|
91 |
+
},
|
92 |
+
"execution_count": 3,
|
93 |
+
"metadata": {},
|
94 |
+
"output_type": "execute_result"
|
95 |
+
},
|
96 |
+
{
|
97 |
+
"data": {
|
98 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAADmJJREFUeJzt3X+MFfW5x/HPA1IjQiJau66WCFZzAxoDzSpN6o/eWAlsqtjEKPyha2q6mNR4G40p8ZpcorkGbyw3/ccm24Bdml7pjT8JYlvES21jQ1h1i7i0lVsXgSysCrE2aBD3uX/s7O0W93zPcmbOmbP7vF/JZs+Z58zM4+BnZ+bMOfM1dxeAeKaU3QCAchB+ICjCDwRF+IGgCD8QFOEHgiL8QFCEHwiK8ANBndbIlZkZHycE6szdbTyvy7XnN7MlZvYnM9trZqvyLAtAY1mtn+03s6mS/izpekkHJO2UtMLd+xLzsOcH6qwRe/4rJe1197+4+3FJGyUty7E8AA2UJ/wXSNo/6vmBbNo/MLNOM+sxs54c6wJQsLq/4efuXZK6JA77gWaSZ89/UNLsUc+/nE0DMAHkCf9OSZeY2Vwz+4Kk5ZI2FdMWgHqr+bDf3U+Y2d2SfiVpqqT17v5WYZ0BqKuaL/XVtDLO+YG6a8iHfABMXIQfCIrwA0ERfiAowg8ERfiBoAg/EBThB4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgCD8QFOEHgiL8QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0ERfiAowg8EVfMQ3ZJkZv2SPpL0maQT7t5WRFPRnHfeecn60qVLk/V58+bVVJOk9vb2ZH3t2rXJ+pYtW5L1PXv2VKx9/PHHyXk//PDDZH3q1KnJ+u23316xNn369OS8XV1dyfqnn36arE8EucKf+Wd3f7+A5QBoIA77gaDyht8l/drMXjOzziIaAtAYeQ/7r3L3g2b2JUlbzeyP7v7K6BdkfxT4wwA0mVx7fnc/mP0elPSspCvHeE2Xu7fxZiDQXGoOv5mdaWYzRx5LWixpd1GNAaivPIf9LZKeNbOR5fyXu/+ykK4A1J25e+NWZta4lTWRjo6OZH39+vXJeiP/jU6W/XGvKE9v77zzTrJ+2223JevXXHNNsv7II4+cck8jFixYkKzv3t28B7nunv5Hy3CpDwiK8ANBEX4gKMIPBEX4gaAIPxAUl/oKcP755yfru3btStZnzZqVrNfz32jfvn3J+pw5c5L1iXoZ8oMPPkjOu2jRomS9v78/WS8Tl/oAJBF+ICjCDwRF+IGgCD8QFOEHgiL8QFBF3L03vOuuuy5ZP+uss3It/7nnnkvWV69eXfOyq13vPvfcc5P1av9tTzzxRMXahRdemJw3r6NHj1as3Xrrrcl5m/k6flHY8wNBEX4gKMIPBEX4gaAIPxAU4QeCIvxAUFznL8Bdd92Va/5jx44l6729vcn6kiVLal73zp07k/UrrrgiWb/hhhuS9Xpfy0/Zv39/xdr27dsb10iTYs8PBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0FVvW+/ma2X9C1Jg+5+WTbtbEm/kDRHUr+kW9y98pen/76sSXnf/jVr1iTr9913X7J+2mnpj1sMDQ2dck9FmTIlvX/I09snn3ySrD/66KPJ+v3335+sT58+vWKt2mcjtm7dmqw3syLv2/9TSSdvqVWStrn7JZK2Zc8BTCBVw+/ur0g6ctLkZZK6s8fdkm4quC8AdVbrOX+Luw9kjw9JaimoHwANkvuz/e7uqXN5M+uU1Jl3PQCKVeue/7CZtUpS9nuw0gvdvcvd29y9rcZ1AaiDWsO/SVJH9rhD0vPFtAOgUaqG38yelPR7Sf9kZgfM7E5JayRdb2ZvS/pm9hzABFL1nN/dV1QopW9WH8iqVekrndXufX/HHXck69U+i1FP1a7jv/vuu8n6G2+8UbH22GOPJed99dVXk/UZM2Yk6/fee2/F2mS+zj9efMIPCIrwA0ERfiAowg8ERfiBoAg/EBS37m6Ahx9+OFnfuHFjgzop3u7du5P1gYGBZL0s8+fPL7uF0rHnB4Ii/EBQhB8IivADQRF+ICjCDwRF+IGguM7fAP39/bnqGNu8efNqnrevr6/ATiYm9vxAUIQfCIrwA0ERfiAowg8ERfiBoAg/EFTVIboLXdkkHaIb9dHWlh7kaceOHcn64GDFgaR09dVXJ+fdu3dvst7MihyiG8AkRPiBoAg/EBThB4Ii/EBQhB8IivADQVX9Pr+ZrZf0LUmD7n5ZNm21pO9Kei972QPuvqVeTWJyOuOMM5L1F154IVmfMiW97zp06FDF2kS+jl+U8ez5fypprMHM/9PdF2Q/BB+YYKqG391fkXSkAb0AaKA85/x3m9kuM1tvZrMK6whAQ9Qa/h9L+oqkBZIGJP2w0gvNrNPMesysp8Z1AaiDmsLv7ofd/TN3H5L0E0lXJl7b5e5t7p7+lgaAhqop/GbWOurptyWlh2oF0HTGc6nvSUnfkPRFMzsg6d8kfcPMFkhySf2SVtaxRwB1UDX87r5ijMnr6tALJqGZM2dWrHV3dyfnPeecc5L1oaGhZH3z5s3JenR8wg8IivADQRF+ICjCDwRF+IGgCD8QFEN0o66WL19esXbjjTfmWnZvb2+y/vjjj+da/mTHnh8IivADQRF+ICjCDwRF+IGgCD8QFOEHguI6P5Kq3V573br0t7vb29trXndfX1+y/uCDDybrAwMDNa87Avb8QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxCUuXvjVmbWuJU1UEdHR7K+dOnSZP3aa69N1uv5b9TTkx5FbdGiRcl6tdtr53HxxRcn6/39/XVb90Tm7jae17HnB4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgql7nN7PZkjZIapHkkrrc/UdmdrakX0iaI6lf0i3ufrTKsibsdf7FixdXrL344ou5lj1lSvpvcLWhqOspb2/Hjx+vWFu5cmVy3g0bNiTrGFuR1/lPSLrP3edL+pqk75nZfEmrJG1z90skbcueA5ggqobf3Qfc/fXs8UeS9ki6QNIySd3Zy7ol3VSvJgEU75TO+c1sjqSFknZIanH3kfskHdLwaQGACWLc9/AzsxmSnpb0fXf/q9nfTyvc3Sudz5tZp6TOvI0CKNa49vxmNk3Dwf+5uz+TTT5sZq1ZvVXS4FjzunuXu7e5e1sRDQMoRtXw2/Aufp2kPe6+dlRpk6SRr7N1SHq++PYA1Mt4Dvu/Luk2SW+a2ciYyA9IWiPpv83sTkn7JN1Snxabw5IlSyrW8n7lttrlskZ+7fpkeXtLfWV4+/bttbSEglQNv7v/TlKl64bXFdsOgEbhE35AUIQfCIrwA0ERfiAowg8ERfiBoLh1d2bmzJnJ+ssvv1yxtnDhwlzrHv1R6bGUeZ2/nr1VG0L78ssvT9aPHk1+gzwsbt0NIInwA0ERfiAowg8ERfiBoAg/EBThB4Ia9228JruLLrooWc97Lb8smzdvTtZfeumlZL3adf577rknWZ87d27FWmtra3LeadOmJevIhz0/EBThB4Ii/EBQhB8IivADQRF+ICjCDwTFdf7Me++9l6w/9dRTFWs333xzrnUfO3YsWX/ooYeS9e7u7oq1I0eOJOc9ceJEsl5NtWG0Tz/99Iq1Sy+9NDlvte2CfNjzA0ERfiAowg8ERfiBoAg/EBThB4Ii/EBQVe/bb2azJW2Q1CLJJXW5+4/MbLWk70oauUD+gLtvqbKspr1vPzBZjPe+/eMJf6ukVnd/3cxmSnpN0k2SbpH0N3d/bLxNEX6g/sYb/qqf8HP3AUkD2eOPzGyPpAvytQegbKd0zm9mcyQtlLQjm3S3me0ys/VmNqvCPJ1m1mNmPbk6BVCocY/VZ2YzJP1G0r+7+zNm1iLpfQ2/D/Cwhk8NvlNlGRz2A3VW2Dm/JJnZNEmbJf3K3deOUZ8jabO7X1ZlOYQfqLPCBuq04du3rpO0Z3TwszcCR3xb0u5TbRJAecbzbv9Vkn4r6U1JQ9nkByStkLRAw4f9/ZJWZm8OppbFnh+os0IP+4tC+IH6K+ywH8DkRPiBoAg/EBThB4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgCD8QFOEHgiL8QFCEHwiq0UN0vy9p36jnX8ymNaNm7a1Z+5LorVZF9nbheF/Y0O/zf27lZj3u3lZaAwnN2luz9iXRW63K6o3DfiAowg8EVXb4u0pef0qz9tasfUn0VqtSeiv1nB9Aecre8wMoSSnhN7MlZvYnM9trZqvK6KESM+s3szfNrLfsIcayYdAGzWz3qGlnm9lWM3s7+z3mMGkl9bbazA5m267XzNpL6m22mf2PmfWZ2Vtm9i/Z9FK3XaKvUrZbww/7zWyqpD9Lul7SAUk7Ja1w976GNlKBmfVLanP30q8Jm9k1kv4macPIaEhm9h+Sjrj7muwP5yx3/0GT9LZapzhyc516qzSy9B0qcdsVOeJ1EcrY818paa+7/8Xdj0vaKGlZCX00PXd/RdKRkyYvk9SdPe7W8P88DVeht6bg7gPu/nr2+CNJIyNLl7rtEn2VoozwXyBp/6jnB9RcQ367pF+b2Wtm1ll2M2NoGTUy0iFJLWU2M4aqIzc30kkjSzfNtqtlxOui8Ybf513l7l+VtFTS97LD26bkw+dszXS55seSvqLhYdwGJP2wzGaykaWflvR9d//r6FqZ226MvkrZbmWE/6Ck2aOefzmb1hTc/WD2e1DSsxo+TWkmh0cGSc1+D5bcz/9z98Pu/pm7D0n6iUrcdtnI0k9L+rm7P5NNLn3bjdVXWdutjPDvlHSJmc01sy9IWi5pUwl9fI6ZnZm9ESMzO1PSYjXf6MObJHVkjzskPV9iL/+gWUZurjSytEredk034rW7N/xHUruG3/H/X0n/WkYPFfq6SNIfsp+3yu5N0pMaPgz8VMPvjdwp6RxJ2yS9LeklSWc3UW8/0/Bozrs0HLTWknq7SsOH9Lsk9WY/7WVvu0RfpWw3PuEHBMUbfkBQhB8IivADQRF+ICjCDwRF+IGgCD8QFOEHgvo/kd65TwXQV2UAAAAASUVORK5CYII=\n",
|
99 |
+
"text/plain": [
|
100 |
+
"<Figure size 432x288 with 1 Axes>"
|
101 |
+
]
|
102 |
+
},
|
103 |
+
"metadata": {},
|
104 |
+
"output_type": "display_data"
|
105 |
+
}
|
106 |
+
],
|
107 |
+
"source": [
|
108 |
+
"plt.imshow(data.train.images[13].reshape(28,28),cmap=\"gray\")"
|
109 |
+
]
|
110 |
+
},
|
111 |
+
{
|
112 |
+
"cell_type": "markdown",
|
113 |
+
"metadata": {},
|
114 |
+
"source": [
|
115 |
+
"## Defining Generator \n",
|
116 |
+
"\n",
|
117 |
+
"Generator $G$ takes the noise $z$ as an input and returns an image. We define the generator as a feedforward network with three layers. Instead of coding the generator network from scratch, we can use tf.layers.dense() which can be used to create a dense layer. It takes three parameters: inputs, number of units and the activation function. "
|
118 |
+
]
|
119 |
+
},
|
120 |
+
{
|
121 |
+
"cell_type": "code",
|
122 |
+
"execution_count": 4,
|
123 |
+
"metadata": {},
|
124 |
+
"outputs": [],
|
125 |
+
"source": [
|
126 |
+
"def generator(z,reuse=None):\n",
|
127 |
+
" \n",
|
128 |
+
" with tf.variable_scope('generator',reuse=reuse):\n",
|
129 |
+
" \n",
|
130 |
+
" hidden1 = tf.layers.dense(inputs=z,units=128,activation=tf.nn.leaky_relu)\n",
|
131 |
+
" hidden2 = tf.layers.dense(inputs=hidden1,units=128,activation=tf.nn.leaky_relu)\n",
|
132 |
+
" output = tf.layers.dense(inputs=hidden2,units=784,activation=tf.nn.tanh)\n",
|
133 |
+
" \n",
|
134 |
+
" return output"
|
135 |
+
]
|
136 |
+
},
|
137 |
+
{
|
138 |
+
"cell_type": "markdown",
|
139 |
+
"metadata": {},
|
140 |
+
"source": [
|
141 |
+
"## Defining Discriminator\n",
|
142 |
+
"\n",
|
143 |
+
"We know that discriminator $D$ returns the probability. i.e it will tell us the probability of the given image being real. We define the discriminator also as a feed forward network with 3 layers:"
|
144 |
+
]
|
145 |
+
},
|
146 |
+
{
|
147 |
+
"cell_type": "code",
|
148 |
+
"execution_count": 5,
|
149 |
+
"metadata": {},
|
150 |
+
"outputs": [],
|
151 |
+
"source": [
|
152 |
+
"def discriminator(X,reuse=None):\n",
|
153 |
+
" \n",
|
154 |
+
" with tf.variable_scope('discriminator',reuse=reuse):\n",
|
155 |
+
" \n",
|
156 |
+
" hidden1 = tf.layers.dense(inputs=X,units=128,activation=tf.nn.leaky_relu)\n",
|
157 |
+
" hidden2 = tf.layers.dense(inputs=hidden1,units=128,activation=tf.nn.leaky_relu)\n",
|
158 |
+
" logits = tf.layers.dense(inputs=hidden2,units=1)\n",
|
159 |
+
" output = tf.sigmoid(logits)\n",
|
160 |
+
" \n",
|
161 |
+
" return logits"
|
162 |
+
]
|
163 |
+
},
|
164 |
+
{
|
165 |
+
"cell_type": "markdown",
|
166 |
+
"metadata": {},
|
167 |
+
"source": [
|
168 |
+
"## Define the input placeholders\n",
|
169 |
+
"\n",
|
170 |
+
"\n",
|
171 |
+
"Now, we define the placeholder for the input $x$ and the noise $z$. "
|
172 |
+
]
|
173 |
+
},
|
174 |
+
{
|
175 |
+
"cell_type": "code",
|
176 |
+
"execution_count": 6,
|
177 |
+
"metadata": {},
|
178 |
+
"outputs": [],
|
179 |
+
"source": [
|
180 |
+
"x = tf.placeholder(tf.float32,shape=[None,784])\n",
|
181 |
+
"z = tf.placeholder(tf.float32,shape=[None,100])"
|
182 |
+
]
|
183 |
+
},
|
184 |
+
{
|
185 |
+
"cell_type": "markdown",
|
186 |
+
"metadata": {},
|
187 |
+
"source": [
|
188 |
+
"## Start the GAN!\n",
|
189 |
+
"\n",
|
190 |
+
"First, we feed the noise $z$ to the generator and it will output the fake image. i.e $ fake \\; x = G(z) $:"
|
191 |
+
]
|
192 |
+
},
|
193 |
+
{
|
194 |
+
"cell_type": "code",
|
195 |
+
"execution_count": 7,
|
196 |
+
"metadata": {},
|
197 |
+
"outputs": [],
|
198 |
+
"source": [
|
199 |
+
"fake_x = generator(z)"
|
200 |
+
]
|
201 |
+
},
|
202 |
+
{
|
203 |
+
"cell_type": "markdown",
|
204 |
+
"metadata": {},
|
205 |
+
"source": [
|
206 |
+
"Now, we feed the real image to the discriminator $D(x)$ and get the probabillty of the image being real:"
|
207 |
+
]
|
208 |
+
},
|
209 |
+
{
|
210 |
+
"cell_type": "code",
|
211 |
+
"execution_count": 8,
|
212 |
+
"metadata": {},
|
213 |
+
"outputs": [],
|
214 |
+
"source": [
|
215 |
+
"D_logits_real = discriminator(x) "
|
216 |
+
]
|
217 |
+
},
|
218 |
+
{
|
219 |
+
"cell_type": "markdown",
|
220 |
+
"metadata": {},
|
221 |
+
"source": [
|
222 |
+
"Similarly, we feed the fake image to the discriminator $D(z)$ and get the probabillty of the image being real:"
|
223 |
+
]
|
224 |
+
},
|
225 |
+
{
|
226 |
+
"cell_type": "code",
|
227 |
+
"execution_count": 9,
|
228 |
+
"metadata": {},
|
229 |
+
"outputs": [],
|
230 |
+
"source": [
|
231 |
+
"D_logits_fake = discriminator(fake_x,reuse=True) "
|
232 |
+
]
|
233 |
+
},
|
234 |
+
{
|
235 |
+
"cell_type": "markdown",
|
236 |
+
"metadata": {},
|
237 |
+
"source": [
|
238 |
+
"## Computing Loss Function\n",
|
239 |
+
"\n",
|
240 |
+
"Now, we will see, how to compute the loss function.\n",
|
241 |
+
"\n",
|
242 |
+
"\n",
|
243 |
+
"\n",
|
244 |
+
"### Discriminator Loss\n",
|
245 |
+
"\n",
|
246 |
+
"Discriminator loss is given as,\n",
|
247 |
+
"\n",
|
248 |
+
"${L ^{D} = - \\mathbb{E}_{x \\sim p_{r}(x)}[\\log D(x)] - \\mathbb{E}_{z \\sim p_{z}(z)}[\\log (1-D(G(z))]} $\n",
|
249 |
+
"\n",
|
250 |
+
"\n",
|
251 |
+
"<br>\n",
|
252 |
+
"\n",
|
253 |
+
"__First term__\n",
|
254 |
+
"\n",
|
255 |
+
"The first term i.e - $\\mathbb{E}_{x \\sim p_{r}(x)}[\\log D(x)]$ implies the expectations of the log likelihood of images sampled from the real data distribution being real.\n",
|
256 |
+
"\n",
|
257 |
+
"\n",
|
258 |
+
"It is basically the binary cross entropy loss. We can implement binary cross entropy loss with tensorflow function tf.nn.sigmoid_cross_entropy_with_logits(). It takes two parameters as inputs: logits and labels. \n",
|
259 |
+
"\n",
|
260 |
+
"\n",
|
261 |
+
"* Logits, as the name suggests, it is the logits of the network so it is D_logits_real. \n",
|
262 |
+
"\n",
|
263 |
+
"* Labels, as the name suggests, it is the true label. We learned that discriminator should return 1 for real image and 0 for fake image. Since we are calculating the loss for input images sampled from the real data distribution, the true label is 1. \n",
|
264 |
+
"\n",
|
265 |
+
"\n",
|
266 |
+
"We use tf.ones_likes() for setting the labels to 1 with the same shape as D_logits_real. That is, labels = tf.ones_like(D_logits_real)\n",
|
267 |
+
"\n",
|
268 |
+
"\n",
|
269 |
+
"Then we compute the mean loss using tf.reduce_mean(). If you notice, there is a minus sign in our loss function which we added for converting our loss to a minimization objective. But, in the below code, there is no minus sign, Because tensorflow optimizers will only minimize and not maximize. So we don't have to add minus sign in our implementation because anyways it will be minimized by the tensorflow optimizer. "
|
270 |
+
]
|
271 |
+
},
|
272 |
+
{
|
273 |
+
"cell_type": "code",
|
274 |
+
"execution_count": 10,
|
275 |
+
"metadata": {},
|
276 |
+
"outputs": [],
|
277 |
+
"source": [
|
278 |
+
"D_loss_real = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=D_logits_real,\n",
|
279 |
+
" labels=tf.ones_like(D_logits_real)))"
|
280 |
+
]
|
281 |
+
},
|
282 |
+
{
|
283 |
+
"cell_type": "markdown",
|
284 |
+
"metadata": {},
|
285 |
+
"source": [
|
286 |
+
"__Second Term__\n",
|
287 |
+
"\n",
|
288 |
+
"\n",
|
289 |
+
"The second term, - $\\mathbb{E}_{z \\sim p_{z}(z)}[\\log (1-D(G(z))]$ implies the expectations of the log likelihood of images generated by the generator being fake. \n",
|
290 |
+
"\n",
|
291 |
+
"\n",
|
292 |
+
"Similar to the first term we can use tf.nn.sigmoid_cross_entropy_with_logits() for caclauting the binary cross entorpy loss. \n",
|
293 |
+
"\n",
|
294 |
+
"\n",
|
295 |
+
"Here, \n",
|
296 |
+
"\n",
|
297 |
+
"* Logits is the D_logits_fake\n",
|
298 |
+
"\n",
|
299 |
+
"* Since we are calculating the loss for the fake images generated by the generator, the true label is 0.\n",
|
300 |
+
"\n",
|
301 |
+
"\n",
|
302 |
+
"We use tf.zeros_like() for setting the labels to 0 with the same shape as D_logits_fake. That is, labels = tf.zeros_like(D_logits_fake)"
|
303 |
+
]
|
304 |
+
},
|
305 |
+
{
|
306 |
+
"cell_type": "code",
|
307 |
+
"execution_count": 11,
|
308 |
+
"metadata": {},
|
309 |
+
"outputs": [],
|
310 |
+
"source": [
|
311 |
+
"D_loss_fake = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=D_logits_fake,\n",
|
312 |
+
" labels=tf.zeros_like(D_logits_fake)))"
|
313 |
+
]
|
314 |
+
},
|
315 |
+
{
|
316 |
+
"cell_type": "markdown",
|
317 |
+
"metadata": {},
|
318 |
+
"source": [
|
319 |
+
"__Final term__\n",
|
320 |
+
"\n",
|
321 |
+
"So, combining the above two terms, loss function of the discriminator is given as:"
|
322 |
+
]
|
323 |
+
},
|
324 |
+
{
|
325 |
+
"cell_type": "code",
|
326 |
+
"execution_count": 12,
|
327 |
+
"metadata": {},
|
328 |
+
"outputs": [],
|
329 |
+
"source": [
|
330 |
+
"D_loss = D_loss_real + D_loss_fake"
|
331 |
+
]
|
332 |
+
},
|
333 |
+
{
|
334 |
+
"cell_type": "markdown",
|
335 |
+
"metadata": {},
|
336 |
+
"source": [
|
337 |
+
"### Generator Loss\n",
|
338 |
+
"\n",
|
339 |
+
"Generator loss is given as, \n",
|
340 |
+
"\n",
|
341 |
+
"${L^{G}= - \\mathbb{E}_{z \\sim p_{z}(z)}[\\log (D(G(z)))] } $\n",
|
342 |
+
"\n",
|
343 |
+
"\n",
|
344 |
+
"It implies the probability of the fake image being classified as a real image. As we calculated binary cross entropy in the discriminator, we use tf.nn.sigmoid_cross_entropy_with_logits() for calculating the loss.\n",
|
345 |
+
"\n",
|
346 |
+
"\n",
|
347 |
+
"\n",
|
348 |
+
"Here,\n",
|
349 |
+
"\n",
|
350 |
+
"* Logits is the D_logits_fake\n",
|
351 |
+
"\n",
|
352 |
+
"* Labels: Since our loss implies the probability of the fake image being classified as real, the true label is 1. Because, as we learned, the goal of the generator is to generate the fake image and fool the discriminator to classify the fake image as a real image. \n",
|
353 |
+
"\n",
|
354 |
+
"We use tf.ones_like() for setting the labels to 1 with the same shape as D_logits_fake. That is, labels = tf.ones_like(D_logits_fake)\n"
|
355 |
+
]
|
356 |
+
},
|
357 |
+
{
|
358 |
+
"cell_type": "code",
|
359 |
+
"execution_count": 13,
|
360 |
+
"metadata": {},
|
361 |
+
"outputs": [],
|
362 |
+
"source": [
|
363 |
+
"G_loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(logits=D_logits_fake, \n",
|
364 |
+
" labels=tf.ones_like(D_logits_fake)))"
|
365 |
+
]
|
366 |
+
},
|
367 |
+
{
|
368 |
+
"cell_type": "markdown",
|
369 |
+
"metadata": {},
|
370 |
+
"source": [
|
371 |
+
"## Optimizing the Loss\n",
|
372 |
+
"\n",
|
373 |
+
"\n",
|
374 |
+
"Now we need to optimize our generator and discriminator. So, we collect the parameters of the discriminator and generator as $\\theta_D$ and $\\theta_G$ respectively. "
|
375 |
+
]
|
376 |
+
},
|
377 |
+
{
|
378 |
+
"cell_type": "code",
|
379 |
+
"execution_count": 14,
|
380 |
+
"metadata": {},
|
381 |
+
"outputs": [],
|
382 |
+
"source": [
|
383 |
+
"training_vars = tf.trainable_variables()\n",
|
384 |
+
"\n",
|
385 |
+
"theta_D = [var for var in training_vars if 'dis' in var.name]\n",
|
386 |
+
"theta_G = [var for var in training_vars if 'gen' in var.name]"
|
387 |
+
]
|
388 |
+
},
|
389 |
+
{
|
390 |
+
"cell_type": "markdown",
|
391 |
+
"metadata": {},
|
392 |
+
"source": [
|
393 |
+
"Optimize the loss using adam optimizer:"
|
394 |
+
]
|
395 |
+
},
|
396 |
+
{
|
397 |
+
"cell_type": "code",
|
398 |
+
"execution_count": 15,
|
399 |
+
"metadata": {},
|
400 |
+
"outputs": [],
|
401 |
+
"source": [
|
402 |
+
"D_optimizer = tf.train.AdamOptimizer(0.001).minimize(D_loss,var_list = theta_D)\n",
|
403 |
+
"G_optimizer = tf.train.AdamOptimizer(0.001).minimize(G_loss, var_list = theta_G)"
|
404 |
+
]
|
405 |
+
},
|
406 |
+
{
|
407 |
+
"cell_type": "markdown",
|
408 |
+
"metadata": {},
|
409 |
+
"source": [
|
410 |
+
"## Start the Training the GAN\n",
|
411 |
+
"\n",
|
412 |
+
"Define the batch size, number of epochs and initialize all variables:"
|
413 |
+
]
|
414 |
+
},
|
415 |
+
{
|
416 |
+
"cell_type": "code",
|
417 |
+
"execution_count": 16,
|
418 |
+
"metadata": {},
|
419 |
+
"outputs": [],
|
420 |
+
"source": [
|
421 |
+
"batch_size = 100\n",
|
422 |
+
"num_epochs = 1000\n",
|
423 |
+
"\n",
|
424 |
+
"init = tf.global_variables_initializer()"
|
425 |
+
]
|
426 |
+
},
|
427 |
+
{
|
428 |
+
"cell_type": "markdown",
|
429 |
+
"metadata": {},
|
430 |
+
"source": [
|
431 |
+
"### Generate Handwritten Digits"
|
432 |
+
]
|
433 |
+
},
|
434 |
+
{
|
435 |
+
"cell_type": "markdown",
|
436 |
+
"metadata": {},
|
437 |
+
"source": [
|
438 |
+
"Start the tensorflow session and generate handwritten digits:"
|
439 |
+
]
|
440 |
+
},
|
441 |
+
{
|
442 |
+
"cell_type": "code",
|
443 |
+
"execution_count": null,
|
444 |
+
"metadata": {},
|
445 |
+
"outputs": [
|
446 |
+
{
|
447 |
+
"name": "stdout",
|
448 |
+
"output_type": "stream",
|
449 |
+
"text": [
|
450 |
+
"Epoch: 0, iteration: 549, Discriminator Loss:4.96056985855, Generator Loss: 0.880222082138\n"
|
451 |
+
]
|
452 |
+
},
|
453 |
+
{
|
454 |
+
"data": {
|
455 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAGzdJREFUeJzt3Xtw3eV5J/Dvc46P7hdLli3fZMv4irmZIAykToDcBigb0kw2i5Om7DYTQzdsm26m25R2ZrPT2Z1skzRLdxNaU5iQbJPQDElhEppACBsnA2QxYGPAxja2jCXLkmXZuliXc6Tz7B867Aji9/sTsnyO6Pv9zHgsnUfv77znd85zfkd63ou5O0QkPqlSd0BESkPJLxIpJb9IpJT8IpFS8otESskvEiklv0iklPwikVLyi0RqXjHvLFNW7RUVDcF4aixH23tZuLueNto2lc3TeL6Mvw86OXwqmzBKMuEt1iZ4+6S+pUbC5y1fnqFtJyr4eZs3zM8bEkaIWm4iGMtX8r6NV/K+Zc7wvtlIljTmL/1cDY/P6z3D77uinMZh4ceWzyQ837nw4x7JnkZ2fJifuIJzSn4zuwHA3QDSAP7e3b/Efr6iogFtV90Zjh/sofeXbVkQjjWU0bZVRwZofKSllsbzmfD5rOoc4W3L0jSe7h+l8eHWOhqv2X0sGBtd10zbnlrHz1vT7mEaT2XDyQ0A6a6+YGzkoqW07YlLed8WP8P7lnn5CGncRNv2bOHxBff9XxpPt66icSdvPqPL+Wux4nj4jeeZfffStlPN+GO/maUBfB3AjQA2AthqZhtnejwRKa5z+Z1/M4CD7n7I3bMAvgfgltnploicb+eS/MsAHJ3yfUfhtjcxs21mttPMduZy/PckESme8/7Xfnff7u5t7t6WyVSf77sTkWk6l+TvBNAy5fvlhdtE5B3gXJL/WQBrzWyVmZUBuBXAI7PTLRE532Zc6nP3cTO7E8BPMVnqu9/dX05uGA6duXgxbdq/MlwXzgzzenP5SV53TY/ymnH/ynD7mld5qS6V4u+xdoaXCmt2D9G4V1UEY/PO8LETuRp+Xk6tr6LxbB0vKTfuC/dtaCmv8y95mpfyksYYDF6/LhirPsLPafVxXsI8+fubabwp4Tlj5d/Kw6do2+5rFwZjuaO8rDzVOdX53f1RAI+eyzFEpDQ0vFckUkp+kUgp+UUipeQXiZSSXyRSSn6RSBV1Pr/lHenR8WC8fxWvKVd3h2uv2Vr+PjbvNK+lZ47ymnJl0/JgjK0zAAD53XtpfP/ft9H4ksf58WsPh+dMHPgD3nbtPbwenSLPFwCM1/NxAv2rwnX++vYx2jbTcZLGu24KPycAsHhHeDrx4Lr5tG3fBl4vb3mcTxFHwhoN4/Xh6cqZAf6cwMJ1fkxrJv8kXflFIqXkF4mUkl8kUkp+kUgp+UUipeQXiVRRS30T5SkMXFAZjKf57FNUk3JceQ1f6XVsCV8R1ZpraLx+X38wluoNxwDAL7+Ixtd/nZchB9fwvo01hctpjTv4UzwRbgoA6F/Ny68T5by2VHE6PFX62Lv5ndcv/o1V4d6kcR8vFWYXhVeOGl7Er3sr/5aXZ4/97oU0vuyfXqfxitHwsuLDl7UEYwBgrPqasIr8VLryi0RKyS8SKSW/SKSU/CKRUvKLRErJLxIpJb9IpIpa5583mMWCJ8P1T7YENQAc+Vh4ae+VX3+Jth38AK/L1hwapPFUf3jabK51EW070pywXXOCFJ9Vi/Gq8Hu4J6zkfGod71vSNtljjfz4C58OT6s9vZpMTQWQzvGide8l/PWy7JGOYKy5JzzeBACGr15D4017+HLt2ZV8l9+xpvC4lNH5/Emr7Qy/INJJ28VPoSu/SKSU/CKRUvKLRErJLxIpJb9IpJT8IpFS8otE6pzq/GbWDmAQwASAcXena1DnKzIYXb8kGD/8Ud6d5Y+H65vWxAvOlccT6rKNvO473hKeU9+3nvc7O5/XXlf9Ex9jkC/nxz+xKTznvqqHbzU92sDf/2s7eftcHa9Jn/hyeJxA/inaFGea+bGruvm26tnl4ddEeow/rnmjCeetiW8vXtXJX2+ZwfDxqw/yZcEtF86DVJb3e6rZGORzvbv3zsJxRKSI9LFfJFLnmvwO4DEze87Mts1Gh0SkOM71Y/8Wd+80s0UAHjezfe6+Y+oPFN4UtgFAeQXfIklEiuecrvzu3ln4vwfADwFsPsvPbHf3Nndvy2TCCyqKSHHNOPnNrNrMat/4GsCHAPCpdSIyZ5zLx/5mAD80szeO8x13/8ms9EpEzrsZJ7+7HwJw2dtqZJNr94es+2Z4zjwATFSEu9vxr5bStqMLea190XO8ZjxWH+53RR8/doY/LHS9p47GK07y4+dJybnrPfy+F1zAt8H+zqX30/hN3/hPNH7yUEMw5heE164HgMpneC39PV94hsZ/9rfXBGP1h/kmEScv5PtALHvsBI1P1PK1Bk6tCx+/uZdvF2/9ZAxBnr+Op1KpTyRSSn6RSCn5RSKl5BeJlJJfJFJKfpFIFXXpbgAwUrUavIBvRV1xMlyeKRvg5bCFu/l2zuXtfGLigdvD20Wv/DHfYrvnSr7NdeUJXp6pvK2LH/9AczD2vne9Qts+uZNvH37tiT+i8Qdvv5vG/9294fYTm3hJa2gFL/U9tHcTjTedCb8m5g3xUt/SJ4doHH18W/ZTV/Glu2s7wlNvhy7gpd/aHJm22zP967mu/CKRUvKLRErJLxIpJb9IpJT8IpFS8otESskvEqmi1/nh4dpreozXu8ufOxiM1edX07bZ+fyhjlwZXlIcAFp/FK7lD67k0zczg0lTfhOm7N4druMDgN0QXh77l+0X0LZL/w8No/tKPrV1OM+3+C4jq1B/eP3ztO1Dz11L436YL7d+yX94MRj7xc8vpW3L+/iqU3Xt9TTOplkDQHokXKs3Mu0dAHquWRCM5bqnn9K68otESskvEiklv0iklPwikVLyi0RKyS8SKSW/SKTMSd19ttVXLPZrVvwe6U24Xg0ABz8drnenV/P5143f53Xb+l18KebDnwjfd66Gj09oeoGG0XczXw8gvZf3fe8d3wjG1u0g5xtAy3ZekO69jNfxkfDyGV4c/gFLWGW6YS+P916eMD6iNlxLb23t4Qf/ykIarnydz+c/eSWfz9+wJzwAYnglf75rd3YGY08d/y76s908kQp05ReJlJJfJFJKfpFIKflFIqXkF4mUkl8kUkp+kUglTv41s/sB3Aygx90vLtzWCOBBAK0A2gF83N1PJR3LM2nkls0P39c4r9u2PkzWeU+ladtDH6VhVPSG+wUAY6vC2yKXtfP5/NnahD0FfsjnpZ/YxNvf+KFbg7F5t/C9EHquoOHEeekpvvw9dnziy8HYu//5j2nbE1fza1P9y/w5X/qT48HY8Q/wLd2dL4OAyl28zl/TydfeP3l5OF55kqzLD2BsbXjMiZ9OeMKmmM6V/5sAbnjLbV8A8IS7rwXwROF7EXkHSUx+d98BoO8tN98C4IHC1w8A+Mgs90tEzrOZ/s7f7O5v7CF1HABfZ0pE5pxz/oOfT04OCP5SambbzGynme3M5s6c692JyCyZafJ3m9kSACj8H5wl4e7b3b3N3dvKMnzCgogUz0yT/xEAtxW+vg3Aw7PTHREplsTkN7PvAngawHoz6zCzTwP4EoAPmtkBAB8ofC8i7yCJdX533xoIvf/t3tl4RQqn1oRr4ile3sSCneGhBKPLeD17zXcGabznSl6XxelwrX3+fj4xfWQRf4899j7+wC3Lp2dX39MbjC37Cz6G4PRqPkah7xI+xqC8ha+jsPX2cC2/fg2vSf/iT79K4++q+/c03rA/vL59eow2RdkQf05HL1tB4+nhcX4H5LTW7g6PTwAAjGWDIRsNx95KI/xEIqXkF4mUkl8kUkp+kUgp+UUipeQXiVRRt+hOjznqD4dLEWUnyZRdAMOt4XJc30b+UJY/yktSSVY+Gi7dlPfxulHZ7/H7Xl3J44cfXEvj9ZvD041r/3t4W3MAOPj8hTQ+fxnZYxtA3nkZ8uQl4XJePuHVd9VT22i8sYEPF2//cEMwtuSXvJQ3b5THK47yKb0Y5+XbhS8eCsYGPsCfk/RIuG/5X/Et1afSlV8kUkp+kUgp+UUipeQXiZSSXyRSSn6RSCn5RSJV1Dq/zzOMLgjf5WALXz67tiM8RmDF97tp2/4rFtN42RCfutp5bbheXdHLa6s2wqd3lv9lPY0P38j79vNdG4Ox+lf4U5xZyo9duYM/J/2r+PVj/AoyhmE/X9kpvaeWxntb+HRklIUfW90BPrZi/+f51uRLHm6k8a6P8Km1+YHwFt6LnubntGo4YW/zadKVXyRSSn6RSCn5RSKl5BeJlJJfJFJKfpFIKflFIlXUOn9qLI/a9vCc/eGlfJnp8crwlsy9W/iWy2VneG208Rk+TmCgdUm4X1W0KXKv8lr5v76X73lyz9f5PqjLnwjPHb/ky8/Ttk/+7800fvNdP6fx+35+PY1XzAv3bbSGjzF44Jb/ReN/sOeTNN70lfDrqX8DX6p94U9oGAMr+XWz6acJYxCIBQmvxf13LArGsnv4+gpT6covEiklv0iklPwikVLyi0RKyS8SKSW/SKSU/CKRMndeazWz+wHcDKDH3S8u3PZFAJ8BcKLwY3e5+6NJd1ZXs8yvuvSOYNyN1yizDeF585lBPmd+qIXPz/aEt0FW1132C77fQPuH+fiFmnb+uPs3830Btqw/EIzt62umbU8c52sJVDeM0Hg6xcdPDHaE6+lbrthL23b9yQU03r2ZD7DIkDUacjX8nNce5evuDy8MjzkBAMsnbG0+EI6X9/P7rjwa3m7+mf33oX/42LSK/dO58n8TwA1nuf1r7r6p8C8x8UVkbklMfnffAaCvCH0RkSI6l9/57zSzF83sfjML74skInPSTJP/HgCrAWwC0AXgq6EfNLNtZrbTzHbmcnxvNREpnhklv7t3u/uEu+cB3AsgODvE3be7e5u7t2UyfMFGESmeGSW/mU2d4vY7AF6ane6ISLEkTuk1s+8CuA5Ak5l1APjPAK4zs00AHEA7gNvPYx9F5DxITH5333qWm++byZ152pCdH663T5Tz8mRVR/hvBv3r+Rrvgyv4h5zGfXycQNlAuP2hj/G52xXd/HENrOO18g0ru2h81/cvDsaS1hqo4OVqnMnx81Z2kh8gtSo8TiCfMLji4Cf5fgif+a0naPyxP3tvMNZ1NX/pL/9RL43XNPJfYfPl/Lx0XxEe+zFRxl8vqbHwfecPTf/DvEb4iURKyS8SKSW/SKSU/CKRUvKLRErJLxKpxCm9s6muZplfdXF4SMBIwtLdo/PD5ZN0lj+OsXpePjmznIYxUR4+fsMr/NhDLTye38i3i06l+GMb7Q2ft/XrO2nbw0+toPHadhpG/zoeH28Il1DrXglvew4AnjAxtXFvjsY7rwuX82qO8oOf3shLvzWHeamwpoOXb2uPhEugExX82KlseMrvsy98AwODnbM2pVdE/gVS8otESskvEiklv0iklPwikVLyi0RKyS8SqaJu0Z2tS+HoB8NTb1f+j920fW3zwnBwZJS27b55FY23PM6Xxx5eHJ5eOphQx684ScOo+zafEnzsE1karzkUfhrbFzTStk27eT362E283v2j6/k22h/99n8MxrLvDi9BDQC3rnuOxn/ccRGNZ0bC08frd/AxJVU9fEpu/S8O0vjIJj5+wjPh62551wBte2ZNeMlMT2uLbhFJoOQXiZSSXyRSSn6RSCn5RSKl5BeJlJJfJFJFrfOns0BNZ3hu+uiWC2n7kxeFa+2VJ/ic96oTfNvj/LyELZsPhZcNH9nK55Xjp7zWXrOLz7kf//BSGndSkm56kK/dffdX/ieN3/nnf0jjn1vxcRqfH949HP9l6z/Stn91x+/SeP/1fGnv9HD4OR1cxl8vaT60Aid+ezWNn05Y52DNX4a3Jz998yW0rZGue0p1fhFJoOQXiZSSXyRSSn6RSCn5RSKl5BeJlJJfJFKJdX4zawHwLQDNABzAdne/28waATwIoBVAO4CPu/spdqxUzlFzLFwTnyhP2A66P1zgtDyv22YGeZ0/yclLa4Kx7DMJ6/I38WN/8Kcv0/jen7XQ+OjC8Jz805v5ngB3vMxr6ZmE03akh49hGH9/eJ2Ev/jS79O2w3fw+f654YRa/f7wOgnND71K2x77xAYaX/pYD417mqw9AcA3tAZjDb8+RtsiF15jIT2UMEBhiulc+ccBfN7dNwK4GsBnzWwjgC8AeMLd1wJ4ovC9iLxDJCa/u3e5+/OFrwcB7AWwDMAtAB4o/NgDAD5yvjopIrPvbf3Ob2atAC4H8GsAze7eVQgdx+SvBSLyDjHt5DezGgAPAficu79pkTGf3PDvrL+Amdk2M9tpZjtz2fD4eBEprmklv5llMJn4/+DuPyjc3G1mSwrxJQDO+hcQd9/u7m3u3pYpq56NPovILEhMfjMzAPcB2Ovufz0l9AiA2wpf3wbg4dnvnoicL4lbdJvZFgC/BLAHwBs1pbsw+Xv/PwJYAeAIJkt9fexYdXXLva3ts8F4+eu0OU5duTgYmyjn5bYJvhs0Gg7ypb973hVe6rn6GF/++vh7E6Ybv86XibarT9N489+ES1qv/Rtezb1uU3hqKQA0lfFS4T8/eA2N119/PBg78QL/M1G+jJ+3yi5+7TrTEn5ealb207Y136un8fQY71t1Oz9v+crw89J9Jf+E3HAwXC5/4Zd/g8HTHdOa15tY53f3XwEIHez907kTEZl7NMJPJFJKfpFIKflFIqXkF4mUkl8kUkp+kUgVdeluTwG5mvBd9l+3hLZf+HRvMGbDvE7fe+1yGh9r4AMBajrCc1uPvT9haukgr+PnE8YgrPiv/D36mvueCcYO77mKtt3/1Y00XvYnL9J4hpezcXzfomBsfjtvO34jr8WX75lP4+bh87b8v/F907s+Ft4GGwCWPMinBPtSPqX31Ia6YGz+a3xb9PQoGVfCh5y8ia78IpFS8otESskvEiklv0iklPwikVLyi0RKyS8SqaLW+VPZPKpeHwjGc9W8bpuvCm/JnPQuVjbEC6CphPnZw03hWn3tfj59uqKPH7tiaxeNj/+K15x/cPiyYOyK1tdp2z0Xrafxx164mMZxCa9Jf/Lqp4OxHx94D2276GvhdQoAYLCFn9e69vBz/tof8i2208M0jKEtvH1V5wiNj1eEXzM2wR9Xf2s4DyZ2aYtuEUmg5BeJlJJfJFJKfpFIKflFIqXkF4mUkl8kUkWt8+fnpTC2KLwmecNTHbS9D4W3+7L54fnRAJCr4vXPzmv5nPvFT4Vrr5V9fAxBfys/9sRD4f0IAGDgUt735Z8Lr3Pw0qd4Hb/1EbqrOk5v5OvXjzbw68dPnt0SjDXt57XwTC9fLGDBqwlbWVeGxwnUvsbXxu+9nD/uM4v4c5qt4cfPksOncvz1VDYUfi2m3sZO9Lryi0RKyS8SKSW/SKSU/CKRUvKLRErJLxIpJb9IpBLr/GbWAuBbAJoBOIDt7n63mX0RwGcAnCj86F3u/ig7lqcNudrwXfpFfN3+stNj4X7meIFz/t5BGq8+xueOn1kaXly/9jCf/J0ZKqfxeUPh/dYBYHBlJY17Zfj4tUf43PDX/oxvGmAHE9YqOEHDqD0Wfl463scf18LdvG8V3VU0nh4Jn9eBtbW0bWYkYa2AXfz1NLaIP7by0+HrbmYgS9siNfO1AKaaziCfcQCfd/fnzawWwHNm9ngh9jV3/8q0701E5ozE5Hf3LgBdha8HzWwvgGXnu2Micn69rd/5zawVwOUAfl246U4ze9HM7jezs641ZWbbzGynme3MjSXs7SQiRTPt5DezGgAPAficuw8AuAfAagCbMPnJ4Ktna+fu2929zd3bMuU1s9BlEZkN00p+M8tgMvH/wd1/AADu3u3uE+6eB3AvgM3nr5siMtsSk9/MDMB9APa6+19PuX3qn+Z/B8BLs989ETlfpvPX/t8C8CkAe8xsV+G2uwBsNbNNmCz/tQO4PelAExVA34bwVMhFz/NynVu4xDGwnpduGp7tofHBixOmBNeG73tsAS8Tlp8KlygB/rgAoOGx/TSeX7U0fN/9/Jyu+Dv+EnjtU7wMmRkKLyMNAKfWhI+/6jvHaduxFr5kee8mXuorGwyXveoO8y3dj1/Fj12XUAIda+DntbY9XB4eWMOnA1f0vY15u8R0/tr/KwBne6S0pi8ic5tG+IlESskvEiklv0iklPwikVLyi0RKyS8SqaIu3V3WP4EVP+4Lxgc28C26228N120v/OO9tO3EhpU0Xn2c17OzI+FTNbyIn8ay03yKZiphOjIa+DLS+Ux47MREGa9Hn1rLpxuXJayO3biPb9Fd83R7MJZfvpC2najk16b6Q/w5G20MPy+DK/nYDE+4LE5U8+nGSJhZO7YgfN4bn2ynbfPNjcFY0rLfb/rZaf+kiPyLouQXiZSSXyRSSn6RSCn5RSKl5BeJlJJfJFLmPv2lfs/5zsxOADgy5aYmAOH9pUtrrvZtrvYLUN9majb7ttLd+QCKgqIm/2/cudlOd28rWQeIudq3udovQH2bqVL1TR/7RSKl5BeJVKmTf3uJ75+Zq32bq/0C1LeZKknfSvo7v4iUTqmv/CJSIiVJfjO7wcxeNbODZvaFUvQhxMzazWyPme0ys50l7sv9ZtZjZi9Nua3RzB43swOF//n61sXt2xfNrLNw7naZ2U0l6luLmT1pZq+Y2ctm9keF20t67ki/SnLeiv6x38zSAPYD+CCADgDPAtjq7q8UtSMBZtYOoM3dS14TNrP3AhgC8C13v7hw218B6HP3LxXeOBvc/U/nSN++CGCo1Ds3FzaUWTJ1Z2kAHwHwb1HCc0f69XGU4LyV4sq/GcBBdz/k7lkA3wNwSwn6Mee5+w4Ab1395BYADxS+fgCTL56iC/RtTnD3Lnd/vvD1IIA3dpYu6bkj/SqJUiT/MgBHp3zfgbm15bcDeMzMnjOzbaXuzFk0F7ZNB4DjAJpL2ZmzSNy5uZjesrP0nDl3M9nxerbpD36/aYu7vwvAjQA+W/h4Oyf55O9sc6lcM62dm4vlLDtL/3+lPHcz3fF6tpUi+TsBtEz5fnnhtjnB3TsL//cA+CHm3u7D3W9sklr4n29CWERzaefms+0sjTlw7ubSjtelSP5nAaw1s1VmVgbgVgCPlKAfv8HMqgt/iIGZVQP4EObe7sOPALit8PVtAB4uYV/eZK7s3BzaWRolPndzbsdrdy/6PwA3YfIv/q8B+PNS9CHQrwsA7C78e7nUfQPwXUx+DMxh8m8jnwawAMATAA4A+BmAxjnUt28D2APgRUwm2pIS9W0LJj/SvwhgV+HfTaU+d6RfJTlvGuEnEin9wU8kUkp+kUgp+UUipeQXiZSSXyRSSn6RSCn5RSKl5BeJ1P8DGjw636hDKlIAAAAASUVORK5CYII=\n",
|
456 |
+
"text/plain": [
|
457 |
+
"<Figure size 432x288 with 1 Axes>"
|
458 |
+
]
|
459 |
+
},
|
460 |
+
"metadata": {},
|
461 |
+
"output_type": "display_data"
|
462 |
+
},
|
463 |
+
{
|
464 |
+
"name": "stdout",
|
465 |
+
"output_type": "stream",
|
466 |
+
"text": [
|
467 |
+
"Epoch: 100, iteration: 549, Discriminator Loss:1.29050672054, Generator Loss: 0.897918999195\n"
|
468 |
+
]
|
469 |
+
},
|
470 |
+
{
|
471 |
+
"data": {
|
472 |
+
"image/png": "iVBORw0KGgoAAAANSUhEUgAAAP8AAAD8CAYAAAC4nHJkAAAABHNCSVQICAgIfAhkiAAAAAlwSFlzAAALEgAACxIB0t1+/AAAADl0RVh0U29mdHdhcmUAbWF0cGxvdGxpYiB2ZXJzaW9uIDIuMi4yLCBodHRwOi8vbWF0cGxvdGxpYi5vcmcvhp/UCwAAEChJREFUeJzt3X+MHPV5x/HPcz/8AxunNlBzsV0M1Inq0BScK5DURaQGCjStnTb8cKXUoRTzR6gSCaWhpihupVaoJRCiUIQpbkxF+FGBi5VQAli0lECAswsGQxrAMbWNf1ETYzDYd3tP/7ghOuDmO8fu7M6en/dLOt3ePDs7z639udnd78x8zd0FIJ6OqhsAUA3CDwRF+IGgCD8QFOEHgiL8QFCEHwiK8ANBEX4gqK5WbmycjfcJmtTKTQKhvKO3dNAP2Gju21D4zexsSddL6pT0T+5+der+EzRJp9iCRjYJIOEJXzvq+9b9st/MOiXdIOkcSXMlLTazufU+HoDWauQ9/8mSXnL3Te5+UNIdkhaW0xaAZmsk/DMkbRn289Zs2XuY2VIz6zOzvn4daGBzAMrU9E/73X2Fu/e6e2+3xjd7cwBGqZHwb5M0a9jPM7NlAMaARsL/lKQ5ZnasmY2TdKGkNeW0BaDZ6h7qc/cBM7tM0g81NNS30t03ltYZgKZqaJzf3e+TdF9JvQBoIQ7vBYIi/EBQhB8IivADQRF+ICjCDwRF+IGgCD8QFOEHgiL8QFCEHwiK8ANBEX4gqJZeurtRb55/am5t8l0/bmEnwNjHnh8IivADQRF+ICjCDwRF+IGgCD8QFOEHghpT4/yM5QPlYc8PBEX4gaAIPxAU4QeCIvxAUIQfCIrwA0E1NM5vZpsl7ZNUkzTg7r1lNNUUZum6e2v6iKajM7/mg+l1+TdpqjIO8vmsu79WwuMAaCFe9gNBNRp+l/SAma0zs6VlNASgNRp92T/f3beZ2S9LetDMfuLujwy/Q/ZHYakkTdBhDW4OQFka2vO7+7bs+y5JqyWdPMJ9Vrh7r7v3dmt8I5sDUKK6w29mk8zs8HdvSzpL0nNlNQaguRp52T9d0mobGkLrkvQ9d7+/lK4ANF3d4Xf3TZJ+o8RemmsMjxnXPjsvWf+TG9fk1m7afFpy3at+9QfJ+rc+cWKy/rOr0r0d/rP8Wtcf7U6ue8Sl7yTrA1u2JutIY6gPCIrwA0ERfiAowg8ERfiBoAg/EJR5C4fAptg0P8UWtGx7H0bn1KnJeu311+t+bOsel972zJ5k/ab/vC1Zn9k1ObfW77Xkut2WOOVW0t7Bt5P1j3RMTNbfHMwfrnvsncOT6+6p5f9ekrTyoj9I1u2xZxLFQ/MU7yd8rd7wPQW/3BD2/EBQhB8IivADQRF+ICjCDwRF+IGgCD8Q1JiaoruZGhnHL3Laur3J+rIjnyx4hPR4d0rROP7+wYPJ+gRL/xf5u9c+nqwvnbo+tzZv/M+T637mR4uT9V+6cn+yPvX3EsUxOo5fJvb8QFCEHwiK8ANBEX4gKMIPBEX4gaAIPxAU4/wlOHDObybry468uanbryWmuh5Q+nz+8+Z/IVkf3P1/yfruP/5ksv615c/n1tYdSK6q2o709G4HH0sf/zCwYEpurWvtuvTGA2DPDwRF+IGgCD8QFOEHgiL8QFCEHwiK8ANBFY7zm9lKSZ+TtMvdT8iWTZN0p6TZkjZLOt/dm3dCfAtsvjM9Xu2bJuXWVi++Nrnu67X0ueNTO9Pj2alxfEma9w+X5dZ6vpO+VoDXtiTrRee9T3mlP1n/5Io/z609dUn6efvJeTekH/um/MeWpL3H5s+XcERyzRhGs+f/rqSz37fsCklr3X2OpLXZzwDGkMLwu/sjkva8b/FCSauy26skLSq5LwBNVu97/unuvj27vUPS9JL6AdAiDX/g50OT/eW+MTSzpWbWZ2Z9/So4mBtAy9Qb/p1m1iNJ2fddeXd09xXu3uvuvd0aX+fmAJSt3vCvkbQku71E0r3ltAOgVQrDb2a3S3pc0sfNbKuZXSzpaklnmtmLks7IfgYwhhSO87t73sXTF5TcS6VmX7Ch7nV/tOj4ZP3iKVuT9Z/2v5Wsd+Z/pCJJOvpbj+XWvMnz0E/c+Gqy/vVv9+XWagW/1+uD7yTr4/Ylyzryv9/IrXHVfo7wA8Ii/EBQhB8IivADQRF+ICjCDwTV0kt3f/TX39Jfr8m/ZPI3jvtU3Y9t49NHD/qBBg8tTgyZ3Xtu+tLdSx59JVk/Z/XlyfrHrtqYrEuJMa+iobwGhwK3XDg7Wf/C5Htyax1KTx9+QOlTmWd8f0eyXntxU7IeHXt+ICjCDwRF+IGgCD8QFOEHgiL8QFCEHwiqpeP8rz47qaGx/JSGx/ELdCSOI6ht2ZZcd+Exn07Wj7ogvW3rLPgb3ZEYLx9MT9Hd6Cm9X7ro/mR9cseEuh/78bcnJuuF4/ipYxga/L0PBez5gaAIPxAU4QeCIvxAUIQfCIrwA0ERfiColo7zj2WD76QvI92IaQ+8nKzXfr63advuOvaYZP3Vc2Yk6xd95Jpkveb54/wXbDorue7+309P/y0VPC+M5Sex5weCIvxAUIQfCIrwA0ERfiAowg8ERfiBoArH+c1spaTPSdrl7idky5ZLukTS7uxuy9z9vsKtmcm6x+WWvf9gcceHoNru3cV3qlfBdfkHt+9M1q+5/PvJ+uSO9HwJKX85M/1fZtneU+p+bBQbzZ7/u5LOHmH5de5+YvZVHHwAbaUw/O7+iKQ9LegFQAs18p7/MjPbYGYrzWxqaR0BaIl6w3+jpOMlnShpu6Rv5t3RzJaaWZ+Z9fV7846PB/Dh1BV+d9/p7jV3H5R0s6STE/dd4e697t7bbfVfzBFAueoKv5n1DPvx85KeK6cdAK0ymqG+2yWdLulIM9sq6RuSTjezEyW5pM2SLm1ijwCaoDD87r54hMW31LU197Bj+ZUpOKd901UnJesLJv64YAOJOQMkbR14M7f2V792Rvqh+YyoqTjCDwiK8ANBEX4gKMIPBEX4gaAIPxAUl+4+xHXNmpms/8Ufrm5RJx/U/+m5yXrnw+tb1ElM7PmBoAg/EBThB4Ii/EBQhB8IivADQRF+ICjG+Q8Fictz11alT+md0f16sn7A09Nkj7fuZH3R33wtt3bEw48n10VzsecHgiL8QFCEHwiK8ANBEX4gKMIPBEX4gaDaapw/NX23dAhP4V0wjXbR5bc7Jk/OrV1//F3JdT/amb70dtGluf/j7fT+Y++c/NoRBVtGc7HnB4Ii/EBQhB8IivADQRF+ICjCDwRF+IGgCsf5zWyWpFslTZfkkla4+/VmNk3SnZJmS9os6Xx3T58cXuCQHccvUjCOX+TlZSfk1jr178l1J1r62Iq735qarP/jV85P1o+7PzHFd9HxDUUafN6iG82ef0DS5e4+V9Kpkr5sZnMlXSFprbvPkbQ2+xnAGFEYfnff7u7rs9v7JL0gaYakhZJWZXdbJWlRs5oEUL4P9Z7fzGZLOknSE5Kmu/v2rLRDQ28LAIwRow6/mU2WdLekr7r7G8Nr7u4a+jxgpPWWmlmfmfX160BDzQIoz6jCb2bdGgr+be5+T7Z4p5n1ZPUeSbtGWtfdV7h7r7v3dmt8GT0DKEFh+M3MJN0i6QV3v3ZYaY2kJdntJZLuLb89AM0ymlN6f0vSFyU9a2ZPZ8uWSbpa0l1mdrGkVySlx3xQt613fyJZX3/Kdbm133nmouS635n7vWT9zInbk/Xl89KX7p71w/z9S9eMnuS6A1u2JutFUqeIhx1WHqYw/O7+qKS8AdkF5bYDoFU4wg8IivADQRF+ICjCDwRF+IGgCD8QVFtdujuqfReemqzfOu/byfrkjgm5tSdP+tfkugcKzortffLPkvWjn0qPl1tH/mm7jY7jF6lyLN+60tHygYEWdZKPPT8QFOEHgiL8QFCEHwiK8ANBEX4gKMIPBMU4f6bjsMOS9cH9+5u27dcWpR/7U+PTl9duxD/vnZ2sH3XDxGS9+6G+ZD3qxbXbYRy/CHt+ICjCDwRF+IGgCD8QFOEHgiL8QFCEHwiKcf5MM8fxU9ePl6SpP5iUrNd+ezBZ77T8v+H9Xkuu+29fPD1Z7163LllXR2e6PpjePqrDnh8IivADQRF+ICjCDwRF+IGgCD8QFOEHgioc5zezWZJulTRdQ6dnr3D3681suaRLJO3O7rrM3e9rVqOS9OKt83JrH/vTDcl1qzy/2mvpse790/OvbS+lx/El6Y59U3Nrq8773eS6vmFjsl7I08cgoH2N5iCfAUmXu/t6Mztc0jozezCrXefu1zSvPQDNUhh+d98uaXt2e5+ZvSBpRrMbA9BcH+o9v5nNlnSSpCeyRZeZ2QYzW2lmI772NLOlZtZnZn39OtBQswDKM+rwm9lkSXdL+qq7vyHpRknHSzpRQ68MvjnSeu6+wt173b23W+NLaBlAGUYVfjPr1lDwb3P3eyTJ3Xe6e83dByXdLOnk5rUJoGyF4Tczk3SLpBfc/dphy3uG3e3zkp4rvz0AzWLu6Ysrm9l8Sf8l6VlJ747rLJO0WEMv+V3SZkmXZh8O5ppi0/wUW9BgyxWxxHBcwXPYbF09R+fWBnbuzq1J4pTbOm258jPJ+qy/faxFnbzXE75Wb/ie9NhxZjSf9j8qaaQHa+qYPoDm4gg/ICjCDwRF+IGgCD8QFOEHgiL8QFBcunu0mjiWP/DQryTrXWf8b3r97TvKbAeSOqdMSdarGscvE3t+ICjCDwRF+IGgCD8QFOEHgiL8QFCEHwiq8Hz+UjdmtlvSK8MWHSnptZY18OG0a2/t2pdEb/Uqs7dj3P2o0dyxpeH/wMbN+ty9t7IGEtq1t3btS6K3elXVGy/7gaAIPxBU1eFfUfH2U9q1t3btS6K3elXSW6Xv+QFUp+o9P4CKVBJ+MzvbzP7HzF4ysyuq6CGPmW02s2fN7Gkz66u4l5VmtsvMnhu2bJqZPWhmL2bf86fobX1vy81sW/bcPW1m51bU2ywze9jMnjezjWb2lWx5pc9doq9KnreWv+w3s05JP5V0pqStkp6StNjdn29pIznMbLOkXnevfEzYzE6T9KakW939hGzZ30va4+5XZ384p7r719ukt+WS3qx65uZsQpme4TNLS1ok6Uuq8LlL9HW+KnjeqtjznyzpJXff5O4HJd0haWEFfbQ9d39E0p73LV4oaVV2e5WG/vO0XE5vbcHdt7v7+uz2Pknvzixd6XOX6KsSVYR/hqQtw37eqvaa8tslPWBm68xsadXNjGD6sJmRdkiaXmUzIyicubmV3jezdNs8d/XMeF02PvD7oPnuPk/SOZK+nL28bUs+9J6tnYZrRjVzc6uMMLP0L1T53NU743XZqgj/Nkmzhv08M1vWFtx9W/Z9l6TVar/Zh3e+O0lq9n1Xxf38QjvN3DzSzNJqg+eunWa8riL8T0maY2bHmtk4SRdKWlNBHx9gZpOyD2JkZpMknaX2m314jaQl2e0lku6tsJf3aJeZm/NmllbFz13bzXjt7i3/knSuhj7xf1nSlVX0kNPXcZKeyb42Vt2bpNs19DKwX0OfjVws6QhJayW9KOkhSdPaqLd/0dBszhs0FLSeinqbr6GX9BskPZ19nVv1c5foq5LnjSP8gKD4wA8IivADQRF+ICjCDwRF+IGgCD8QFOEHgiL8QFD/D7f8994nW5FLAAAAAElFTkSuQmCC\n",
|
473 |
+
"text/plain": [
|
474 |
+
"<Figure size 432x288 with 1 Axes>"
|
475 |
+
]
|
476 |
+
},
|
477 |
+
"metadata": {},
|
478 |
+
"output_type": "display_data"
|
479 |
+
}
|
480 |
+
],
|
481 |
+
"source": [
|
482 |
+
"with tf.Session() as session:\n",
|
483 |
+
" \n",
|
484 |
+
" \n",
|
485 |
+
" #initialize all variables\n",
|
486 |
+
" session.run(init)\n",
|
487 |
+
" \n",
|
488 |
+
" #for each epoch\n",
|
489 |
+
" for epoch in range(num_epochs):\n",
|
490 |
+
" \n",
|
491 |
+
" #select number of batches\n",
|
492 |
+
" num_batches = data.train.num_examples // batch_size\n",
|
493 |
+
" \n",
|
494 |
+
" #for each batch\n",
|
495 |
+
" for i in range(num_batches):\n",
|
496 |
+
" \n",
|
497 |
+
" #get the batch of data according to the batch size\n",
|
498 |
+
" batch = data.train.next_batch(batch_size)\n",
|
499 |
+
" \n",
|
500 |
+
" #reshape the data \n",
|
501 |
+
" batch_images = batch[0].reshape((batch_size,784))\n",
|
502 |
+
" batch_images = batch_images * 2 - 1\n",
|
503 |
+
" \n",
|
504 |
+
" #sample batch noise\n",
|
505 |
+
" batch_noise = np.random.uniform(-1,1,size=(batch_size,100))\n",
|
506 |
+
" \n",
|
507 |
+
" #define the feed dictionaries with input x as batch_images and noise z as batch noise\n",
|
508 |
+
" feed_dict = {x: batch_images, z : batch_noise}\n",
|
509 |
+
"\n",
|
510 |
+
" \n",
|
511 |
+
" #train discriminator and generator\n",
|
512 |
+
" _ = session.run(D_optimizer,feed_dict = feed_dict)\n",
|
513 |
+
" _ = session.run(G_optimizer,feed_dict = feed_dict)\n",
|
514 |
+
"\n",
|
515 |
+
" \n",
|
516 |
+
" #compute loss of discriminator and generator\n",
|
517 |
+
" discriminator_loss = D_loss.eval(feed_dict)\n",
|
518 |
+
" generator_loss = G_loss.eval(feed_dict)\n",
|
519 |
+
" \n",
|
520 |
+
" \n",
|
521 |
+
" #feed the noise to a generator on every 100th epoch and generate an image\n",
|
522 |
+
" if epoch%100==0:\n",
|
523 |
+
" print(\"Epoch: {}, iteration: {}, Discriminator Loss:{}, Generator Loss: {}\".format(epoch,i,discriminator_loss,generator_loss))\n",
|
524 |
+
" \n",
|
525 |
+
" #generate a fake image\n",
|
526 |
+
" _fake_x = fake_x.eval(feed_dict)\n",
|
527 |
+
"\n",
|
528 |
+
" #plot the fake image generated by the generator\n",
|
529 |
+
" plt.imshow(_fake_x[0].reshape(28,28))\n",
|
530 |
+
" plt.show() "
|
531 |
+
]
|
532 |
+
},
|
533 |
+
{
|
534 |
+
"cell_type": "markdown",
|
535 |
+
"metadata": {},
|
536 |
+
"source": [
|
537 |
+
"Now that we learned how to generate images using GANs, in the next section we will learn about DCGAN which uses convolutional network instead of feedforward network in the generator and discriminator. "
|
538 |
+
]
|
539 |
+
}
|
540 |
+
],
|
541 |
+
"metadata": {
|
542 |
+
"kernelspec": {
|
543 |
+
"display_name": "Python 2",
|
544 |
+
"language": "python",
|
545 |
+
"name": "python2"
|
546 |
+
},
|
547 |
+
"language_info": {
|
548 |
+
"codemirror_mode": {
|
549 |
+
"name": "ipython",
|
550 |
+
"version": 2
|
551 |
+
},
|
552 |
+
"file_extension": ".py",
|
553 |
+
"mimetype": "text/x-python",
|
554 |
+
"name": "python",
|
555 |
+
"nbconvert_exporter": "python",
|
556 |
+
"pygments_lexer": "ipython2",
|
557 |
+
"version": "2.7.12"
|
558 |
+
}
|
559 |
+
},
|
560 |
+
"nbformat": 4,
|
561 |
+
"nbformat_minor": 2
|
562 |
+
}
|