# Minimal code to successfully export tf model
import pathlib
import time
import os
import tensorflow as tf
from tensorflow import lite

trained_checkpoint_prefix = './checkpoints/model.ckpt'
# export_dir = os.path.join('./models', time.strftime("%Y%m%d-%H%M%S"))
# loaded_graph = tf.Graph()
# with tf.Session(graph=loaded_graph) as sess:
#     # Restore from checkpoint
#     loader = tf.train.import_meta_graph(trained_checkpoint_prefix + '.meta')
#     loader.restore(sess, trained_checkpoint_prefix)
#
#     # Export checkpoint to SavedModel
#     builder = tf.saved_model.builder.SavedModelBuilder(export_dir)
#     builder.add_meta_graph_and_variables(sess,
#                                          [tf.saved_model.tag_constants.TRAINING],
#                                          strip_default_attrs=True)
# builder.add_meta_graph([tf.saved_model.tag_constants.SERVING], strip_default_attrs=True)
# builder.save()
saved_model_dir = './models/1'
converter = lite.TFLiteConverter.from_saved_model(saved_model_dir)
# converter.optimizations = [tf.lite.Optimize.OPTIMIZE_FOR_SIZE] NOT WORKING
converter.post_training_quantize = True  # https://github.com/tensorflow/tensorflow/issues/26413
tflite_quant_model = converter.convert()
tflite_models_dir = pathlib.Path("./tflite")
tflite_models_dir.mkdir(exist_ok=True, parents=True)
tflite_model_file = tflite_models_dir/"quantized_mobilenet_0.75.tflite"
tflite_model_file.write_bytes(tflite_quant_model)