import warnings
warnings.filterwarnings("ignore")

import os
# os.environ['CUDA_VISIBLE_DEVICES'] = ""
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"

from ast import arg

from pre_import import *

import tensorflow as tf
tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)

import numpy as np
from anomaly_detection import AnomalyDetectionRunner
from utils import *
from tensorboardX import SummaryWriter



flags = tf.compat.v1.flags
FLAGS = flags.FLAGS

embed_dim=128
# print("### embed_dim=", embed_dim)

flags.DEFINE_integer('test_every', 50, 'test_every.')
flags.DEFINE_integer('discriminator_out', 0, 'discriminator_out.')
flags.DEFINE_float('discriminator_learning_rate', 0.001, 'Initial learning rate.')
flags.DEFINE_float('learning_rate', 0.001, 'Initial learning rate.')
flags.DEFINE_integer('hidden1', embed_dim*2, 'Number of units in hidden layer 1.')
flags.DEFINE_integer('hidden2', embed_dim, 'Number of units in hidden layer 2.')
flags.DEFINE_float('weight_decay', 0., 'Weight for L2 loss on embedding matrix.')
flags.DEFINE_float('dropout', 0., 'Dropout rate (1 - keep probability).')
flags.DEFINE_integer('features', 1, 'Whether to use features (1) or not (0).')
flags.DEFINE_integer('seed', 7, 'seed for fixing the results.')
flags.DEFINE_integer('iterations', 25, 'number of iterations.')
flags.DEFINE_float('alpha', 0.7, 'balance parameter') # for attribute cost
flags.DEFINE_float('eta', 18, 'balance parameter') # for attribute
flags.DEFINE_float('theta', 65, 'balance parameter') # for structure
flags.DEFINE_string('dataset', 'ours', 'dataset')

np.random.seed(FLAGS.seed)
tf.compat.v1.set_random_seed(FLAGS.seed)

detection_method = 'lpa'

dataset_str = FLAGS.dataset
print('dataset:', dataset_str)

# BlogCatalog 2 40
# ACM 3 10
# Flickr 8 90
# ours 18 65


settings = {'data_name': dataset_str,
            'iterations': FLAGS.iterations,
            'detection_method':detection_method
            }

# results_dir = os.path.sep.join(['results', dataset_str])
# log_dir = os.path.sep.join(['logs', dataset_str, '{}_{}_{}'.format(FLAGS.eta, FLAGS.theta, FLAGS.alpha)])

# if not os.path.exists(results_dir):
#     os.makedirs(results_dir)

# if not os.path.exists(log_dir):
#     os.makedirs(log_dir)

# file2print = '{}/{}_{}_{}_{}_{}.json'.format(results_dir, dataset_str,
                                                # FLAGS.eta, FLAGS.theta, FLAGS.alpha, embed_dim)

runner = AnomalyDetectionRunner(settings)

# writer = SummaryWriter(log_dir)

runner.erun()
