"""Train tickets query via command-line.

Usage:
    jirat site test <site> <user> <token>
    jirat site test <site> <session>
    jirat project crawl [<site>] [<user> <token>]
    jirat project crawl <site> <session>
    jirat project parse [<path>]
    jirat workflow <project>
    jirat issues crawl <project> [<days>] [<path>]
    jirat issues parse <project> [<path>]

Options: 
    -h,--help        显示帮助菜单
    -p               获取项目列表 
    -i               获取Issue列表

Example:
    export JIRA_SITE=https://xxx
    export JIRA_USER=xxx
    export JIRA_TOKEN=xxx
    jirat project crawl
    jirat project parse
    jirat project parse
    jirat issues crawl abc 60 
"""
from docopt import docopt
import crawler
from parser import Parser
from domain import Project
import yaml
import io
import os

if __name__ == '__main__':
    arguments = docopt(__doc__, version='Jira Crawler')
    site = os.environ['JIRA_SITE']
    user = os.environ['JIRA_USER']
    token = os.environ['JIRA_TOKEN']
    # configFile = arguments["--config"] if arguments["--config"] else "config.yaml"
    # with io.open(configFile, 'r') as file:
    #     try:
    #         config = yaml.load(file,Loader=yaml.FullLoader)
    #     except yaml.YAMLError as exc:
    #         print(exc)
    if True: #arguments["crawl"]:
        if arguments["<user>"] and arguments["<token>"]:
            site, user, token = arguments["<site>"],arguments["<user>"],arguments["<token>"]
        organization = crawler.Organization(site,user,token)
        print("connect tp jira server {} with user {}".format(site,user))
        if arguments["<session>"]:
            crawler = crawler.Crawler(organization,session_id = arguments["<session>"])
        else:
            crawler = crawler.Crawler(organization)

    if True: #arguments["parse"]:
        parser = Parser(arguments["<path>"])

    if arguments["project"]:
        if arguments["crawl"]:
            crawler.getProjects()
        if arguments["parse"]:
            projects = parser.load_projects()
            for i in projects:
                if i.workflowscheme:
                    print("{} workflowscheme: {}".format(i.key,i.workflowscheme["id"]))

    if arguments["issues"]:
        if arguments["crawl"]:
            if arguments["<path>"]:
                crawler.data_path = arguments["<path>"]
            crawler.getProjectIssues(arguments["<project>"],arguments["<days>"])
        if arguments["parse"]:
            parser = Parser()
            parser.parse_issues(arguments["<project>"])

    if arguments["workflow"]:
        parser = Parser(arguments["<path>"])
        project = Project()
        project.key = arguments["<project>"]
        parser.load_project(project)
        for i in crawler.getWorkflow(project.workflowscheme["id"]):
            print(i["workflow"])
    # if arguments["-c"]:
    #     organization = crawler.Organization(site = arguments["<site>"],user = arguments["<user>"],token = arguments["<token>"])
    #     crawler = crawler.Crawler(organization)
    #     if(arguments["-c"] and arguments["<action>"]=="issues"):
    #         crawler.getProjectIssues(arguments["<project>"])

    #     if(arguments["-c"] and arguments["<action>"]=="projects"):
    #         for i in crawler.getProjects():
    #             print(i)

    # if(arguments["-s"]):
    #     print(parser.listIssueStatus(arguments["<project>"]))

    # if(arguments["-p"]):
    #     parser.tryParse(arguments["<project>"],arguments["<issue>"],config)

    # if(arguments["-i"]):
    #     parser.ingestIssues(arguments["<project>"],config)
