# -*- coding: utf-8 -*-
import json
import os
import glob
import db
import datetime
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
import sys
import io
import numpy
import holiday
from domain import Project,Issue,ChangeLog
from injector import DaoProject,DaoChangeLog,DaoIssue

class Parser:
    def __init__(self,base = None):
        self.engine = create_engine('mysql+pymysql://root:123456@localhost:3306/myjira?charset=utf8mb4')
        DBSession = sessionmaker(bind = self.engine)
        self.db_session = DBSession()
        self.base = base
        if self.base is None:
            self.base = "./data"        

    def project2db(self,project):
        dao = DaoProject()
        dao.oid = str(project.id)
        dao.name = project.name
        dao.project_key = project.key
        dao.statuses = str(len(project.statuses))
        dao.versions = str(len(project.versions))
        dao.issues = project.total
        if project.updated:
            dao.updated_at = datetime.datetime.strptime(project.updated[:19], "%Y-%m-%dT%H:%M:%S")
        return dao

    def issue2db(self,issue,project_key):
        dao = DaoIssue()
        dao.issue_key = issue.key
        dao.project = project_key
        dao.status = issue.status
        dao.summary = issue.summary
        dao.sprint = issue.sprint
        dao.created_at = issue.created_at
        dao.updated_at = issue.updated_at
        dao.creator = issue.creator
        return dao

    def change2db(self,change,project_key):
        dao = DaoChangeLog()
        dao.issue_id = change.issue.key
        dao.project_key = project_key
        dao.oid = change.id
        dao.items = change.items
        dao.updated_at = datetime.datetime.strptime(change.created[:19], "%Y-%m-%dT%H:%M:%S")
        dao.author = change.author
        return dao

    def load_project(self,project):
        # project = Project()
        project.workflowscheme = self.load_workflowscheme(project.key)
        project.statuses = self.load_status(project.key)
        project.versions = self.load_versions(project.key)
        return project

    def load_projects(self):
        print("loading site projects from {}/projects.json".format(self.base))
        data = json.load(open("{}/projects.json".format(self.base), encoding = 'utf8'))
        projects = list(map(lambda x:Project.fromJson(x),data))
        print("total projects {}".format(len(projects)))
        for i in projects:
            project = self.load_project(i)
            self.db_session.add(self.project2db(project))
        self.db_session.commit()
        return projects

    def load_versions(self,project):
        data = json.load(open("{}/{}_versions.json".format(self.base,project), encoding='utf8'))
        return data
        
    def load_status(self,project):
        data = json.load(open("{}/{}_statuses.json".format(self.base,project), encoding='utf8'))
        try:
            story = next(i for i in data if i["name"] == "Story")
            return story["statuses"]
        except:
            return []

    def load_workflowscheme(self,project):
        path = "{}/{}_workflowscheme.json".format(self.base,project)
        if not os.path.exists(path):
            return None
        with open(path, encoding='utf8') as file:
            data = json.load(file)
            return data

    def parse_issues(self,project_key,path = "data"):
        path = "{}/{}/*.json".format(path,project_key)
        print("reading path:{}".format(path))
        files = glob.glob(path)
        issues = list(map(lambda x: Issue.from_file(x),files))
        for i in issues:
            self.db_session.add(self.issue2db(i,project_key))
            for j in i.change_logs:
                self.db_session.add(self.change2db(j,project_key))
        self.db_session.commit()

# def ingestIssues(project, mapping):
#     engine.execute("delete from issue where project = '{}'".format(project))
#     files = glob.glob("../data/{}/*.json".format(project))
#     for i in files:
#         issue = parseIssueFromFile(i, project, mapping)
#         if issue is not None:
#             try:
#                 db_session.add(issue)
#                 db_session.commit()
#             except Exception as ex:
#                 print("insert failed.{}".format(issue))
#                 print(ex)
#                 break
#         else:
#             print("error read {}".format(i))

# def listIssueStatus(project):
#     status = []
#     files = glob.glob("../data/{}/*.json".format(project))
#     files = sorted(files, key=lambda k: int(k.split('-')[1][0:-5]))
#     for file in files:
#         data = json.load(open(file, encoding='utf8'))
#         if "issuetype" in data["fields"]:
#             # print("{} {}".format(data["fields"]["issuetype"]["name"],data["fields"]["created"]))
#             if data["fields"]["issuetype"]["name"] != "Story" and data["fields"]["issuetype"]["name"] != "Bug":
#                 continue
#         if "errorMessages" in data:
#             continue
#         changes = list(map(lambda x: x["status"], integrateChanges(
#             data["changelog"]["histories"])))
#         if len(changes) == 0:
#             continue
#         print("{} {} {}".format(data["key"],
#                                 data["fields"]["created"], changes))
#         for i in changes:
#             if not i in status:
#                 status.append(i)
#     status.sort()
#     return status


# def parseIssueFromFile(file, project, mapping):
#     print("processing {}".format(file))
#     data = json.load(open(file, encoding='utf8'))
#     return parseIssueFromJson(project, data, mapping)


# def getItem(data, key):
#     if key is None:
#         return None
#     return data["fields"][key]["value"] if (key in data["fields"] and data["fields"][key] is not None) else None


# def grepChangeTimeUsage(issue, changes, mapping):
#     if len(changes) > 0:
#         for i in changes:
#             caculateUsage(issue, mapping, i)
#         issue.changelogs = ','.join(list(map(lambda x: x["status"], changes)))
#     return issue


# def getWorkDays(time_start, time_end):
#     ret = 0
#     day = time_start
#     while day <= time_end:
#         if holiday.is_workday(day):
#             ret = ret + 1
#         day = day + datetime.timedelta(days=1)
#     return ret


# def caculateUsage(issue, mapping, step):
#     days = getWorkDays(step["status_in"], step["status_out"]) + \
#         round((step["status_out"] - step["status_in"]).seconds/(24*3600), 1)
#     if step["status"] in mapping["efforts"]["BA"]:
#         issue.time_ba_used = issue.time_ba_used + days
#         issue.ba = step["author"]
#     if step["status"] in mapping["efforts"]["DEV"]:
#         issue.time_dev_used = issue.time_dev_used + days
#         issue.time_dev_count = issue.time_dev_count + 1
#         issue.developer = step["author"]
#         if not issue.time_dev_start:
#             issue.time_dev_start = step["status_in"]
#         issue.time_dev_end = step["status_out"]
#     if step["status"] in mapping["efforts"]["QA"]:
#         issue.time_qa_used = issue.time_qa_used + days
#         issue.qa = step["author"]


# def parseIssueFromJson(projectName, data, mapping):
#     if "errorMessages" in data:
#         return None
#     statuses = list(numpy.concatenate(list(mapping["status"].values())))
#     mapping["endStatus"] = statuses[-1]
#     team = getItem(data, mapping.get("team", None))
#     estimated = None
#     if mapping.get("estimated", None) and mapping["estimated"] in data["fields"]:
#         estimated = data["fields"][mapping["estimated"]]
#     fix_versions = data["fields"]["fixVersions"][0]["name"] if len(
#         data["fields"]["fixVersions"]) > 0 else ""
#     issue = db.Issue(issue_key = data["key"],
#                      project = projectName,
#                      issuetype = data["fields"]["issuetype"]["name"],
#                      summary = data["fields"]["summary"],
#                      time_ba_used=0, time_dev_used=0, time_qa_used=0, time_uat_used=0,
#                      time_dev_count=0,
#                      ba="", developer="", qa="",
#                      fix_versions=fix_versions,
#                      team=team,
#                      estimated=estimated,
#                      created_at=datetime.datetime.strptime(
#                          data["fields"]["created"][:19], "%Y-%m-%dT%H:%M:%S"),
#                      updated_at=datetime.datetime.strptime(
#                          data["fields"]["updated"][:19], "%Y-%m-%dT%H:%M:%S")  # 2017-06-07T07:39:02.387+0800)
#                      )
#     if "status" in data["fields"]:
#         issue.status = data["fields"]["status"]["name"].upper()
#         issue.istatus = statuses.index(
#             issue.status) if issue.status in statuses else -1
#     if "sprint" in data["fields"]:
#         issue.sprint = data["fields"]["sprint"]["name"]
#     if "closedSprints" in data["fields"]:
#         issue.sprint = data["fields"]["closedSprints"][-1]["name"]
#     changelogs = sorted(data["changelog"]["histories"],
#                         key=lambda x: x["created"], reverse=False)
#     changes = integrateChanges(changelogs)

#     issue = grepChangeTimeUsage(issue, changes, mapping)
#     issue.start_at, issue.end_at = getStartEndTime(changelogs,mapping)
#     issue.caculateUsedDays()

#     return issue


# def getProjectInfo(projectName):
#     getProjectReleases(projectName)
#     issuses = getProjectIssues(projectName)
#     getIssuesHistory(projectName)


# # def printIssueChanges(issue, changes):
# #     print("{}\t{}".format(issue.issue_key, "\t".join(map(lambda x: "{},{},{}".format(
# #         x["status"], x["status_in"], x["status_out"]), changes))))


# def updateIssueWithChanges(issue, changes):
#     if "DONE" in issue.status:
#         issue.start_at = changes[0]["time"]
#         issue.end_at = changes[-1]["time"]
#     for i in changes:
#         if i["status"] == "IN DEV":
#             issue.time_dev_start = i["time"]
#             issue.developer = i["author"]
#         # if i["status"] == "READY FOR QA": # OTAS
#         #     issue.time_dev_end = i["time"]
#         if "QA" in i["status"]:  # for OTR
#             issue.time_dev_end = i["time"]


# def getStartEndTime(histories,mapping):
#     start_at, end_at = None, None
#     start_from,start_to = mapping["leadtime"]["START_FROM"],mapping["leadtime"]["START_TO"]
#     end_from,end_to = mapping["leadtime"]["END_FROM"],mapping["leadtime"]["END_TO"]
#     for i in histories:
#         item = getStatusChange(i)
#         if not item is None:
#             if start_at is None and item[0] == start_from and item[1] == start_to:
#                 start_at = item[2]
#             if end_at is None and item[0] == end_from and item[1] == end_to:
#                 end_at = item[2]
#     if end_at is None:
#         end_at = datetime.datetime.now()
#     return start_at, end_at


# def getStatusChange(changelog):
#     status_items = list(
#         filter(lambda x: x["field"] == "status", changelog["items"]))
#     if len(status_items) == 0:
#         return None
#     item = status_items[0]
#     return item["fromString"].upper(), item["toString"].upper(), datetime.datetime.strptime(changelog["created"][:19], "%Y-%m-%dT%H:%M:%S")


# def integrateChanges(histories):
#     changes = []
#     for i in histories:
#         item = getStatusChange(i)
#         if item is None:
#             continue
#         _from, _to, _time = item
#         author = i["author"]["name"]
#         if(len(changes) > 0):
#             changes[-1]["status_out"] = _time
#         changes.append({"time": _time, "status": _to,
#                         "author": author, "status_in": _time, "status_out": _time})

#     if len(changes) > 0 and changes[-1]["status_in"] == changes[-1]["status_out"]:
#         changes[-1]["status_out"] = datetime.datetime.now()
#     return changes


# def formatAction(status):
#     if "DEV" in status:
#         return "1.DEV"
#     if "QA" in status:
#         return "2.QA"
#     if "UAT" in status:
#         return "3.UAT"
#     return "4.OTHER"


# # def ingestHistory(projectName):


# def analysisBug(projectName):
#     issues = db_session.query(db.Issue).filter(
#         db.Issue.project == projectName).filter(db.Issue.issuetype == "Bug")
#     count, timespent = 0, 0
#     for i in issues:
#         if i.end_at is None:
#             continue
#         timespent = timespent + (i.end_at - i.start_at).days
#         count = count + 1
#     print("bugs:{}, time spend:{} average:{}".format(
#         count, timespent, timespent/count))


# def parseHistory(issue,mapping):
#     file = "../data/{}/{}.json".format(issue.project, issue.issue_key)
#     print("read {}".format(file))
#     data = json.load(open(file), encoding='utf8')
#     changelogs = sorted(data["changelog"]["histories"],
#                         key=lambda x: x["created"], reverse=False)
#     issue.start_at, issue.end_at = getStartEndTime(changelogs,mapping)
#     issue.caculateUsedDays()
#     return integrateChanges(changelogs)
#     # updateIssueWithChanges(issue,changes)
#     # db_session.commit()


# def tryParse(project, issueKey, mapping):
#     mapping["endStatus"] = list(numpy.array(
#         list(mapping["status"].values())).flat)[-1]
#     issue = db.Issue(project=project, issue_key=issueKey, time_ba_used=0,
#                      time_dev_used=0, time_qa_used=0, time_uat_used=0, time_dev_count=0)
#     changes = parseHistory(issue,mapping)
#     for item in changes:
#         print(item)
#     return grepChangeTimeUsage(issue, changes, mapping)
# Parser().parse_change_logs("AC")