# LuckPay Web Manage System
#
# Copyright (c) 2016 Lucky Byte, Inc.
#
express = require 'express'
fs      = require 'fs'
pgsql   = require 'pg'
table_const = require '../lib/tables'
router = express.Router()
module.exports = router


router.get '/', (req, res, next) ->
    res.redirect '/audit/merch'


router.all '/merch', (req, res, next) ->
    res.locals.category = 'merch'

    res.locals.page_no = parseInt(req.body.page_no) || 1
    page_size = 10
    offset = (res.locals.page_no - 1) * page_size

    where_cond = """
        (json_extract_path_text(a.new_data, 'merno') like $1 or
         json_extract_path_text(a.new_data, 'name') like $1)
    """

    if req.body.start_time and req.body.start_time.length > 0
        where_cond += " and a.timestamp >= '#{req.body.start_time}'"
        res.locals.start_time = req.body.start_time

    if req.body.end_time and req.body.end_time.length > 0
        where_cond += " and a.timestamp <= '#{req.body.end_time}'"
        res.locals.end_time = req.body.end_time

    await pgsql.connect settings.pgsql_url, defer err, client, done
    if err then done(client); return next(err)

    # 查询唯一的商户数量(用于分页)
    await client.query \
        "select json_extract_path_text(a.new_data, 'uuid') as uuid
            from web_audit as a
            where a.table_name = 'pay_merch' and #{where_cond}
            group by uuid", [
            "%#{res.locals.keyword}%"
        ], defer err, result
    if err then done(client); return next(err)
    res.locals.page_nums =
        Math.floor (result.rows.length + page_size - 1) / page_size

    # 查询唯一的商户(每个商户一条线索)
    await client.query \
        "select distinct on(uuid)
            json_extract_path_text(a.new_data, 'uuid') as uuid,
            json_extract_path_text(a.new_data, 'merno') as merno,
            json_extract_path_text(a.new_data, 'name') as name
            from web_audit as a
        where a.table_name = 'pay_merch' and #{where_cond}
        order by uuid, a.timestamp desc offset $2 limit $3", [
            "%#{res.locals.keyword}%", offset, page_size
        ], defer err, result
    if err then done(client); return next(err)
    res.locals.merchs = result.rows

    # 以单个商户组织为线索，需要逐条查询，对于大数据量可能是个问题
    for merch in res.locals.merchs
        await client.query \
            "select a.*,
                u.realname as input_user, u2.realname as check_user,
                u3.realname as old_input_user, u4.realname as old_check_user,
                f.name as fee_name, f2.name as old_fee_name,
                m2.merno as mapto_merno, m2.name as mapto_mer_name,
                m3.merno as old_mapto_merno, m3.name as old_mapto_mer_name,
                m4.merno as settl_merno, m4.name as settl_mer_name,
                m5.merno as old_settl_merno, m5.name as old_settl_mer_name
            from web_audit as a
            left join web_users as u on
                u.uuid = json_extract_path_text(a.new_data, 'inputer')
            left join web_users as u2 on
                u2.uuid = json_extract_path_text(a.new_data, 'checker')
            left join web_users as u3 on
                u3.uuid = json_extract_path_text(a.old_data, 'inputer')
            left join web_users as u4 on
                u4.uuid = json_extract_path_text(a.old_data, 'checker')
            left join pay_fee as f on
                f.uuid = json_extract_path_text(a.new_data, 'fee')
            left join pay_fee as f2 on
                f2.uuid = json_extract_path_text(a.old_data, 'fee')
            left join pay_merch as m2 on
                m2.uuid = json_extract_path_text(a.new_data, 'mapto_merid')
            left join pay_merch as m3 on
                m3.uuid = json_extract_path_text(a.old_data, 'mapto_merid')
            left join pay_merch as m4 on
                m4.uuid = json_extract_path_text(a.new_data, 'settl_merid')
            left join pay_merch as m5 on
                m5.uuid = json_extract_path_text(a.old_data, 'settl_merid')
            where #{where_cond} and
                json_extract_path_text(a.new_data, 'uuid') = $2
            order by a.timestamp desc", [
                "%#{res.locals.keyword}%", merch.uuid
            ], defer err, result
        if err then done(client); return next(err)

        # 获取原数据与新数据的差异，同时进行过滤(去除一些不需要展示的差异)
        merch.threads = []
        for thread in result.rows
            thread.modified_fields = []
            if thread.action is 'I'
                for k, v of thread.new_data when v isnt null and v.toString().length > 0
                    thread.modified_fields.push key: k, new_v: v.toString()
            else if thread.action is 'D'
                for k, v of thread.old_data when v isnt null and v.toString().length > 0
                    thread.modified_fields.push key: k, old_v: v.toString()
            else if thread.action is 'U'
                for k, v of thread.new_data
                    v = if v is null then '' else v.toString()
                    old_v = if thread.old_data[k] is null then '' else
                        thread.old_data[k].toString()
                    if old_v isnt v
                        thread.modified_fields.push key: k, new_v: v, old_v: old_v

            # 如果没有差异数据，则不需要展示
            continue if thread.modified_fields.length == 0

            # 对于系统运行时修改的字段，不需要展示给用户
            append = false
            for field in thread.modified_fields
                if not (field.key in ['order_id', 'trace_num'])
                    append = true
                    break
            merch.threads.push thread if append
    done()

    # 这个用于将字段名称映射为中文
    res.locals.pay_merch = table_const.pay_merch
    res.render 'audit/index'


router.all '/terms', (req, res, next) ->
    res.locals.category = 'terms'

    res.locals.page_no = parseInt(req.body.page_no) || 1
    page_size = 10
    offset = (res.locals.page_no - 1) * page_size

    where_cond = """
        (json_extract_path_text(a.new_data, 'termid') like $1)
    """

    if req.body.start_time and req.body.start_time.length > 0
        where_cond += " and a.timestamp >= '#{req.body.start_time}'"
        res.locals.start_time = req.body.start_time

    if req.body.end_time and req.body.end_time.length > 0
        where_cond += " and a.timestamp <= '#{req.body.end_time}'"
        res.locals.end_time = req.body.end_time

    await pgsql.connect settings.pgsql_url, defer err, client, done
    if err then done(client); return next(err)

    # 查询唯一的终端数量(用于分页)
    await client.query \
        "select json_extract_path_text(a.new_data, 'uuid') as uuid
            from web_audit as a
            where a.table_name = 'pay_terms' and #{where_cond}
            group by uuid", [
            "%#{res.locals.keyword}%"
        ], defer err, result
    if err then done(client); return next(err)
    res.locals.page_nums =
        Math.floor (result.rows.length + page_size - 1) / page_size

    # 查询唯一的终端(每个一条线索)
    await client.query \
        "select distinct on(uuid)
            json_extract_path_text(a.new_data, 'uuid') as uuid,
            json_extract_path_text(a.new_data, 'termid') as termid,
            m.merno as merno, m.name as mer_name,
            m.inside as mer_inside, m.chnl as mer_chnl
        from web_audit as a
            left join pay_merch as m on
                json_extract_path_text(a.new_data, 'merid') = m.uuid
        where a.table_name = 'pay_terms' and #{where_cond}
        order by uuid, a.timestamp desc offset $2 limit $3", [
            "%#{res.locals.keyword}%", offset, page_size
        ], defer err, result
    if err then done(client); return next(err)
    res.locals.terms = result.rows

    # 以单个商户组织为线索，需要逐条查询，对于大数据量可能是个问题
    for term in res.locals.terms
        await client.query \
            "select a.*,
                u.realname as input_user, u2.realname as check_user,
                u3.realname as old_input_user, u4.realname as old_check_user,
                t2.termid as mapto_termid, t3.termid as old_mapto_termid,
                t4.termid as assoc_termid, t5.termid as old_assoc_termid,
                m.merno as merno, m.name as mer_name,
                m2.merno as old_merno, m2.name as old_mer_name
            from web_audit as a
            left join web_users as u on
                u.uuid = json_extract_path_text(a.new_data, 'inputer')
            left join web_users as u2 on
                u2.uuid = json_extract_path_text(a.new_data, 'checker')
            left join web_users as u3 on
                u3.uuid = json_extract_path_text(a.old_data, 'inputer')
            left join web_users as u4 on
                u4.uuid = json_extract_path_text(a.old_data, 'checker')
            left join pay_terms as t2 on
                t2.uuid = json_extract_path_text(a.new_data, 'mapto_term')
            left join pay_terms as t3 on
                t3.uuid = json_extract_path_text(a.old_data, 'mapto_term')
            left join pay_terms as t4 on
                t4.uuid = json_extract_path_text(a.new_data, 'assoc_term')
            left join pay_terms as t5 on
                t5.uuid = json_extract_path_text(a.old_data, 'assoc_term')
            left join pay_merch as m on
                m.uuid = json_extract_path_text(a.new_data, 'merid')
            left join pay_merch as m2 on
                m2.uuid = json_extract_path_text(a.old_data, 'merid')
            where #{where_cond} and
                json_extract_path_text(a.new_data, 'uuid') = $2
            order by a.timestamp desc", [
                "%#{res.locals.keyword}%", term.uuid
            ], defer err, result
        if err then done(client); return next(err)

        # 获取原数据与新数据的差异，同时进行过滤(去除一些不需要展示的差异)
        term.threads = []
        for thread in result.rows
            thread.modified_fields = []
            if thread.action is 'I'
                for k, v of thread.new_data when v isnt null and v.toString().length > 0
                    thread.modified_fields.push key: k, new_v: v.toString()
            else if thread.action is 'D'
                for k, v of thread.old_data when v isnt null and v.toString().length > 0
                    thread.modified_fields.push key: k, old_v: v.toString()
            else if thread.action is 'U'
                for k, v of thread.new_data
                    v = if v is null then '' else v.toString()
                    old_v = if thread.old_data[k] is null then '' else
                        thread.old_data[k].toString()
                    if old_v isnt v
                        thread.modified_fields.push key: k, new_v: v, old_v: old_v

            # 如果没有差异数据，则不需要展示
            continue if thread.modified_fields.length == 0

            # 对于系统运行时修改的字段，不需要展示给用户
            append = false
            for field in thread.modified_fields
                if not (field.key in ['batch_num', 'trace_num'])
                    append = true
                    break
            term.threads.push thread if append
    done()

    # 这个用于将字段名称映射为中文
    res.locals.pay_terms = table_const.pay_terms
    res.render 'audit/index'
