#! /usr/bin/env python3

import os
from os import path as osp
import subprocess as sp
from itertools import product
from torch_geometric.datasets import *

from analysis import parseTimingTable, FakeFile

MaskedData = [
   # "Reddit",
   # "FacebookPagePage"
]

function_keys = ["cusparse,spmm,sddmm", "sgemm", "elementwise", "reduce", "loss", "softmax", "sort,agent,select"]

# MaskedData += ["GNNAdvData."+x for x in os.listdir("data/GNNAdvData/processed")]
# MaskedData += ["Twitch."+x for x in os.listdir("data/Twitch")]
# MaskedData += ["Planetoid."+x for x in os.listdir("data/Planetoid")]
# MaskedData += ["SNAPDataset."+x for x in os.listdir("data/SNAPDataset")]
# MaskedData += ["Coauthor."+x for x in os.listdir("data/Coauthor")]
MaskedData += ["PPI."+str(x) for x in range(20)]

nsys = ["/home/limingyi/spack/opt/spack/linux-ubuntu20.04-cascadelake/gcc-10.3.0/cuda-11.3.1-cgdsuufdkmtyfpl4xewki6yiugryip7m/bin/nsys",
      "profile",
      "-t",
      "cuda",
      "--stats=true",
      "--force-overwrite=true",
      "-o"]
python_intpr = "/home/limingyi/.local/virtualenv/GNN-env/bin/python"
workspace = "/home/limingyi/gnn-workspace"

def download_all():
   for sname in MaskedData:
      print("==============working on {}==============".format(sname))
      if sname == 'WebKB':
         for subset in ['cornell', 'texas', 'wisconsin']:
            eval(sname+"('data/{}', '{}')".format(sname, subset)) 
      elif sname == 'WikipediaNetwork':
         for subset in ['chameleon', 'squirrel']:
            eval(sname+"('data/{}', '{}')".format(sname, subset))
      else:
         eval(sname+"('data/{}')".format(sname))


def run_all_gnn(model, run_types, run_mode, dataset, profile=False):
   # dataset = ['Cora', 'PubMed', 'CiteSeer', 'Reddit']
   assert model in ['gcn', 'gat']
   assert run_mode in ['train', 'test', 'both']
   ret = {}
   for t in run_types:
      type_res = {}
      for data in dataset:
         print('runging: {}-{}'.format(data, t))
         failed = False
         # nsys profile -t cuda -o workspace/nsys --stats=true 
         # ~/.local/virtualenv/GNN-env/bin/python GNNSwitch/gcn.py 
         # --dataset=Planetoid.Cora --run_type=dgl --run_mode=train > workspace/out_Cora.txt
         exec_cmd = [python_intpr, osp.join(workspace, 'GNNSwitch', 'tests', f'{model}.py'),
                     '--dataset', data, '--run_type', t, '--run_mode', run_mode, 
                     "--hidden_size=32", "--num_layer=3", "--layer_heads=4,2,1"]
         if profile:
            exec_cmd = [*nsys, f"workspace/nsys/{data}"] + exec_cmd
         try:
            out = sp.check_output(exec_cmd)
         except sp.CalledProcessError:
            failed = True
            print('execution {}-{} failed'.format(data, t))

         if profile:
            if not failed:
               out = str(out, encoding='utf-8').strip().split("\n")
               print("time = {}".format(out[0]))
               fake_file = FakeFile([x+'\n' for x in out[1:]])
               type_res[data] = parseTimingTable(fake_file, function_keys, 'sum')
            else:
               type_res[data] = [-1.0]*function_keys
         else:
            if not failed:
               print("...OK")
               out = str(out, encoding='utf-8').strip()
               type_res[data] = float(out)
      ret[t] = type_res

   return ret

def run_all_planetoid_on_kernel(run_types, dataset, tune_param):
   # dataset = ['Cora', 'PubMed', 'CiteSeer', 'Reddit']
   ret = {}
   for t in run_types:
      type_res = {}
      for data in dataset:
         print('runging: {}-{}'.format(data, t), end="  ")
         out = sp.check_output(['/home/limingyi/gnn_switch/.pyenv/bin/python',
               '/home/limingyi/gnn_switch/GNNSwitch/tests/spmm_test.py',
               '--dataset', data, '--kernel', t, "--tune", tune_param])

         print("time = {}".format(out))
         type_res[data] = float(out)
      ret[t] = type_res

   return ret

def compile_with_def(thd, bsz, h_thresh):
   compile_cmd = "cd build && cmake -DTHD={} -DBSZ={} -DH_TRESH={} .. && make AgradTest -j3 -s && cd ..".format(thd, 256*bsz, 256*h_thresh)

   os.system(compile_cmd)

      
def tune():
   dset = ['Cora', 'PubMed', 'CiteSeer']
   rt = ['chunk']

   thd_range = [256, 512]
   bsz_range = [2]
   thresh_range = list(range(2,))

   outs = {}

   for thd,bsz,thresh in product(thd_range, bsz_range, thresh_range):
      # compile_with_def(thd,bsz,thresh)
      res = run_all_gnn(rt, dset, ",".join([str(thd), str(bsz*thd), str(thresh*thd)]))
      outs[(thd, bsz, thresh)] = res

   return outs

def analyze_tune(fname):
   def best_on(tune_log, dataset):
      best = 1<<30
      best_k = None
      for k, v in tune_log.items():
         time = v['chunk'][dataset]
         if best > time:
            best = time
            best_k = k
      
      return best_k

   with open(fname, 'r') as f:
      s = f.read()
      data = eval(s)

      # best on cora
      print(best_on(data, 'Cora'))
      print(best_on(data, 'CiteSeer'))
      print(best_on(data, 'PubMed'))

def pretty_table(tab, profile):
   if profile:
      rows = list(tab.keys())
      cols = list(tab[rows[0]].keys())
      print(f"|{framework}"+("|{}"*7).format(*cols)+"|")
      print(f"|--"*8+"|")
      for r in rows:
         val_list = list(tab[r].values())
         print(f"|{r}"+("|{:.3}"*7).format(*val_list)+"|")
   else:
      print(f'|frameworks|' + ('{}|'*len(tab)).format(tab.keys()))
      print('|--|'+':--:|'*len(tab))
      frmwks = [x for x in tab.values()]
      for grph in frmwks[0].keys():
         print(f'|{grph}|', end="")
         for fwk in frmwks:
            print(f'{fwk[grph]}|', end="")
         print()
   print()

if __name__ == '__main__':
   # with open("tune_out.txt", 'w+') as f:
   #     out = tune()
   #     s = str(out)
   #     f.write(s)
   #     f.close()
   #     print(s)
   
   # analyze_tune('tune_out.txt')

   # --- analyze load distribution

   # framwork = 'dgl'
   # prof = True
   # tab = run_all_gnn('gat', [framwork], 'test', MaskedData, profile=prof)
   # pretty_table(tab[framwork])
   # tab = run_all_gnn('gat', [framwork], 'train', MaskedData, profile=prof)
   # pretty_table(tab[framwork])
   
   # --- e2e time test
   prof=False
   fwk = ["my", "dgl", "pyg"]
   print("GAT runs")
   tab = run_all_gnn('gat', fwk, 'train', MaskedData, profile=prof)
   pretty_table(tab, prof)
   tab = run_all_gnn('gat', fwk, 'test', MaskedData, profile=prof)
   pretty_table(tab, prof)
   print("GCN runs")
   tab = run_all_gnn('gcn', fwk, 'train', MaskedData, profile=prof)
   pretty_table(tab, prof)
   tab = run_all_gnn('gcn', fwk, 'test', MaskedData, profile=prof)
   pretty_table(tab, prof)




   