{
 "cells": [
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "import srez_input_y\n",
    "import srez_model_sia\n",
    "import operator\n",
    "import os.path\n",
    "import random\n",
    "import numpy as np\n",
    "import numpy.random\n",
    "import pdb\n",
    "import random as rn\n",
    "import scipy.misc\n",
    "import os.path\n",
    "import tensorflow as tf\n",
    "import sys\n",
    "print(tf.__version__)\n",
    "\n",
    "FLAGS = tf.app.flags.FLAGS\n",
    "\n",
    "# Configuration (alphabetically)\n",
    "tf.app.flags.DEFINE_integer('batch_size', 500, \"Number of samples per batch.\")\n",
    "tf.app.flags.DEFINE_string('checkpoint_dir', \"../checkpoint/\", \"Output folder where checkpoints are dumped.\")\n",
    "tf.app.flags.DEFINE_integer('checkpoint_period', 10000, \"Number of batches in between checkpoints\")\n",
    "tf.app.flags.DEFINE_string('dataset', 'dataset', \"Path to the dataset directory.\")\n",
    "tf.app.flags.DEFINE_float('epsilon', 1e-8, \"Fuzz term to avoid numerical instability\")\n",
    "tf.app.flags.DEFINE_float('wei_lab', 1, \"Weight for label information\")\n",
    "tf.app.flags.DEFINE_string('run', 'demo', \"Which operation to run. [demo|train]\")\n",
    "tf.app.flags.DEFINE_float('gene_l1_factor', 0.7, \"Multiplier for generator L1 loss term\")\n",
    "tf.app.flags.DEFINE_float('learning_beta1', 0.5, \"Beta1 parameter used for AdamOptimizer\")\n",
    "tf.app.flags.DEFINE_float('learning_rate_start', 0.00020, \"Starting learning rate used for AdamOptimizer\")\n",
    "tf.app.flags.DEFINE_integer('learning_rate_half_life', 5000, \"Number of batches until learning rate is halved\")\n",
    "tf.app.flags.DEFINE_bool('LargeG', False, \"Log the device where variables are placed.\")\n",
    "tf.app.flags.DEFINE_integer('num_ID', 500, \"How much the labels will be test...\")\n",
    "tf.app.flags.DEFINE_integer('sample_size', 64, \"Image sample size in pixels. Range [64,128]\")\n",
    "tf.app.flags.DEFINE_integer('summary_period', 1000,\"Number of batches between summary data dumps\")\n",
    "tf.app.flags.DEFINE_integer('random_seed', 0, \"Seed used to initialize rng.\")\n",
    "tf.app.flags.DEFINE_integer('test_vectors', 16,  \"\"\"Number of features to use for testing\"\"\")\n",
    "tf.app.flags.DEFINE_string('train_dir', 'train', \"\")\n",
    "tf.app.flags.DEFINE_string('test_dir', 'test', \"Dir that saves super-resolved images.\")\n",
    "tf.app.flags.DEFINE_string('HRLR_dir', 'LR_HR', \"Dir that saves HR-LR images.\")                     \n",
    "tf.app.flags.DEFINE_string('training_img_dir', '../CASIA/CASIA-WebFace/', \"training image dir.\")\n",
    "tf.app.flags.DEFINE_string('testing_img_dir', '../CASIA/CASIA-WebFace/', \"testing image dir.\")\n",
    "tf.app.flags.DEFINE_string('txt', 'val-all.txt', \"File list for test set.\")\n",
    "tf.app.flags.DEFINE_integer('train_time', 2000,  \"Time in minutes to train the model\")\n",
    "tf.app.flags.DEFINE_integer('init_layer_size', 512, \"Seed used to initialize rng.\")"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "def setup_tensorflow():\n",
    "    # Create session\n",
    "    config = tf.ConfigProto()\n",
    "    config.gpu_options.allow_growth = True\n",
    "    sess = tf.Session(config=config)\n",
    "\n",
    "    # Initialize rng with a deterministic seed\n",
    "    with sess.graph.as_default():\n",
    "        tf.set_random_seed(FLAGS.random_seed)\n",
    "        \n",
    "    random.seed(FLAGS.random_seed)\n",
    "    np.random.seed(FLAGS.random_seed)\n",
    "\n",
    "    summary_writer = tf.summary.FileWriter(FLAGS.train_dir, sess.graph)\n",
    "\n",
    "    return sess, summary_writer"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "def demo():\n",
    "\n",
    "    # Setup global tensorflow state\n",
    "    sess, summary_writer = setup_tensorflow()\n",
    "\n",
    "    # Prepare directories\n",
    "    print(\"Open list file %s to read images\"%(FLAGS.testing_img_dir+FLAGS.txt))\n",
    "    with open(FLAGS.testing_img_dir+FLAGS.txt) as fn:\n",
    "        dx = fn.readlines()\t\n",
    "    len1 = len(dx)\n",
    "\n",
    "    FLAGS.batch_size=len1\n",
    "    test_features,  test_labels, _,fn = srez_input_y.setup_inputs(sess,  FLAGS.testing_img_dir+FLAGS.txt, image_size=32, crop_size=128, isTest=True) # image_size 128 for CASIA , 97 for LFW\n",
    "    FLAGS.batch_size=100\n",
    "    \n",
    "    test_feature, test_label ,fn= sess.run([test_features, test_labels,fn])\n",
    "    s1 = test_feature.shape\n",
    "    print(s1)\n",
    "    tid = FLAGS.num_ID\n",
    "    \n",
    "    HRsize = 32\n",
    "    LRsize = 8\n",
    "            \n",
    "    # Create and initialize model\n",
    "    fea1 = tf.placeholder(tf.float32, shape=[None, LRsize,LRsize,3])\n",
    "    lab1 = tf.placeholder(tf.float32,shape=[None, HRsize,HRsize,3])\n",
    "    [gene_minput, gene_moutput,\n",
    "     gene_output, gene_var_list,\n",
    "    disc_real_output, disc_fake_output, disc_var_list, \n",
    "    gene_minput2, gene_moutput2,\n",
    "     gene_output2, gene_var_list2,\n",
    "    disc_real_output2, disc_fake_output2, disc_var_list2, _, _] = srez_model_sia.create_model(sess, fea1, lab1, fea1, lab1, False)\n",
    "\n",
    "    # Restore variables from checkpoint\n",
    "    saver = tf.train.Saver()\n",
    "    filename = 'checkpoint_new.txt'\n",
    "    filename = os.path.join(FLAGS.checkpoint_dir, filename)\n",
    "    saver.restore(sess, filename)\n",
    "\n",
    "    # Execute demo\n",
    "    gout = []\n",
    "    fnn=[]\n",
    "    HR=[]\n",
    "    LR=[]\n",
    "    didx=[]\n",
    "    bs = 100\n",
    "    channels=3\n",
    "    \n",
    "    for k in range(0,s1[0]+bs,bs):\n",
    "        if k+bs>s1[0]:\n",
    "             st=range(k,s1[0])\n",
    "             st=np.asarray(st)\n",
    "             st=np.concatenate((st,np.zeros(bs-len(st)) ), axis=0 )\n",
    "             st=np.asarray(st,dtype=int)\n",
    "        else:\n",
    "            st = range(k, k+bs)\n",
    "         #~ pdb.set_trace()\n",
    "        aa=sess.run(gene_moutput, feed_dict = {gene_minput: test_feature[st,:,:,:]})\n",
    "        fnn.append(fn[st])\n",
    "        HR.append(test_label[st,:,:,:])\n",
    "        LR.append(test_feature[st,:,:,:])\n",
    "            \n",
    "        gout.append(np.reshape(np.asarray(aa), [-1, HRsize,HRsize, 3]))\n",
    "            \n",
    "        if (k>1) & ((k%bs==0) | ((k+1)==s1[0])):\n",
    "            output(sess,gout, LR, HR, fnn, HRsize, LRsize)\n",
    "            gout=[]\n",
    "            LR=[]\n",
    "            HR=[]\n",
    "            fnn=[]\n",
    "                \n",
    "            \n",
    "            \n",
    "        if (k%(s1[0]/100)<1):\n",
    "            sys.stdout.write('|')\n",
    "            sys.stdout.flush()\n",
    "\n",
    "    \n",
    "def output(sess, gout, test_feature, test_label, fn, HRsize, LRsize):\n",
    "\t\n",
    "    vlen= len(gout)\n",
    "    size = HRsize,HRsize\n",
    "    #~ pdb.set_trace()\n",
    "    test_feature=np.reshape(np.asarray(test_feature), [-1, LRsize, LRsize, 3 ])\n",
    "    test_label=np.reshape(np.asarray(test_label), [-1, HRsize,HRsize,3])\n",
    "    clipped = np.reshape(np.asarray(gout), [-1, HRsize,HRsize, 3]) \n",
    "    fn=np.reshape(fn,[-1])\n",
    "    #~ image=clipped/np.max(clipped)\n",
    "    \n",
    "    NN = tf.image.resize_nearest_neighbor(test_feature, size)\n",
    "    NN = tf.maximum(tf.minimum(NN, 1.0), 0.0)\n",
    "    NN = sess.run(NN)\n",
    "\n",
    "    bicubic = tf.image.resize_bicubic(test_feature, size)\n",
    "    bicubic = tf.maximum(tf.minimum(bicubic, 1.0), 0.0)\n",
    "    bicubic = sess.run(bicubic)\n",
    "    \n",
    "    hr= tf.maximum(tf.minimum(test_label, 1.0), 0.0)\n",
    "    hr = sess.run(hr)\n",
    "    combo   = tf.cast(tf.concat( [NN, bicubic,clipped, hr], 2), tf.float32)\n",
    "    combo = sess.run(combo)\n",
    "    \n",
    "    testdir = FLAGS.checkpoint_dir+FLAGS.test_dir\n",
    "    if not os.path.exists(FLAGS.HRLR_dir):\n",
    "        os.makedirs(FLAGS.HRLR_dir) \n",
    "    if not os.path.exists(testdir):\n",
    "        os.makedirs(testdir) \n",
    "    \n",
    "    for i in range(len(fn)):\n",
    "        #~ pdb.set_trace()\n",
    "        fx=str(fn[i]).split('/')\n",
    "        t1=len(fx)\n",
    "        fx=fx[t1-2]+\"-\"+fx[t1-1].replace(\"'\",\"\")\n",
    "        filename = '%s.jpg' % (fx)\n",
    "        \n",
    "        #filename=os.path.basename(str(fn[i]))\n",
    "        #filename=filename.replace(\"'\",\"\")\n",
    "        #print(filename)\n",
    "        #fx=filename\n",
    "        recFN = os.path.join(testdir, filename)\n",
    "        scipy.misc.toimage(clipped[i,:,:,0:3], cmin=0., cmax=1.).save(recFN)\n",
    "        BIFN = os.path.join(FLAGS.HRLR_dir, 'Bicubic-%s.png' % (fx))\n",
    "        LRFN = os.path.join(FLAGS.HRLR_dir, 'LR-%s.png' % (fx))\n",
    "        HRFN = os.path.join(FLAGS.HRLR_dir, 'HR-%s.png' % (fx))\n",
    "        Combo2 = os.path.join(FLAGS.HRLR_dir, 'Combo-%s.png' % (fx))\n",
    "        scipy.misc.toimage(bicubic[i,:,:,:], cmin=0., cmax=1.).save(BIFN)\n",
    "        scipy.misc.toimage(NN[i,:,:,:], cmin=0., cmax=1.).save(LRFN)\n",
    "        scipy.misc.toimage(hr[i,:,:,:], cmin=0., cmax=1.).save(HRFN)\n",
    "        scipy.misc.toimage(combo[i,:,:,:], cmin=0., cmax=1.).save(Combo2)\n",
    "    print(\"Saved %d imagees to %s!!\" % (len(fn), FLAGS.test_dir))\n",
    "\n",
    "\n",
    "def main(argv=None):\n",
    "    demo()\n",
    "\n",
    "if __name__ == '__main__':\n",
    "    tf.app.run()"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": []
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.6.7"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}
