#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2015  Data Enlighten Technology (Beijing) Co.,Ltd

import pyamf.flex.messaging as messaging
import pyamf.remoting.client as service
import pyamf.remoting as remoting
import pyamf
import time
import logging
import os
import requests
import requests.cookies
import urllib.parse
import pickle
import json

logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)-5.5s [%(name)s] %(message)s')


def request_with_relogin(func):

    def func_wrapper(self, *args, **kwargs):
        returned = func(self, *args, **kwargs)
        if returned.status_code in (302, 502) or returned.headers['content-type'].startswith('text/html'):
            print("received status {0} for request".format(str(returned.status_code)))
            self.update_cookies()
            return func(self, *args, **kwargs)
        return returned

    return func_wrapper


def decode_amf(func):

    def func_wrapper(self, *args, **kwargs):
        returned = func(self, *args, **kwargs)
        if not returned:
            print('Possible login error')
            return None

        if returned.status_code < 300 and returned.headers['content-type'].startswith('application/x-amf'):
            envelope = remoting.decode(returned.content)
            return envelope
        else:
            print('Possible login error: {0}'.format(returned.content))

        return None

    return func_wrapper

class FordBrand:
    cookie_file = "ford_cookie.txt"
    url_file = "ford_url.txt"
    user_name = "zhangmin@forland-group.com.cn"
    password = "20170418"
    company_id = '53f62452e4b009ed8037682a'
    subscription_id = 'DYN0000000001A83DD'
    dsid = 'CE99517D-F11A-13A0-7F48-E3C369BB1ECB'
    message_id = '31EE06F8-0ACF-3813-2BA7-086FC45DFAFF'
    url = ''


class FordWebSource:
    LOGIN = 'https://login.superservice.com/landingpage/services/authentication/loginandgoto.json'
    USER = 'https://login.superservice.com/landingpage/services/user/current.json'
    DISCOVERY = 'https://login.superservice.com/landingpage/services/application' + \
                '/info.json?preferredCompanyId={0}&rand=1487419496639'

    def __init__(self, root_path='.', profile=FordBrand):
        #self.logger = logging.get_logger("mj_crawler", "FordWebSource." + profile.__class__.__name__)

        self.profile = profile

        self.root_path = root_path
        self.cookie_path = os.path.join(root_path, 'conf')
        self.cookie_file = os.path.join(root_path, 'conf', profile.cookie_file)
        self.cookie_str = None

        self.url_file = os.path.join(root_path, 'conf', profile.url_file)
        self.data_path = os.path.join(root_path, "source\\data")

        self.timestamp = str(int(time.time() * 1000))
        self.update_cookies()

    def query_vin(self, vin):
        message = {
            'body': ['0', vin, '', 100, False, False, True, False],
            'destination': 'catalog',
            'operation': 'searchCatalogWithSections',
            'messageId': self.profile.message_id,
            'headers': {
                'X-IFM-SID': self.profile.subscription_id,
                'DSId': self.profile.dsid,
                'MARKET': 'CN',
                'X-IFM-SESSIONID': None,
                'DSEndPoint': None
            }
        }
        return self.query(message)

    def query_section(self, section_id):
        message_kwargs = {
            'body': ['CCG', section_id,
                     '<interpretation catalog="CCG" type="GCAT" showlatestpart="0" level="2">\n  <avs data="WSPAD0" type="W"/>\n  <avs data="VLCG0" type="VL"/>\n</interpretation>',
                     True, False],
            'destination': 'catalog',
            'operation': 'getSectionParts',
            'messageId': '31EE06F8-0ACF-3813-2BA7-086FC45DFAFF',
            'headers': {
                'X-IFM-SID': 'DYN0000000001A83DD',
                'DSId': 'CE99517D-F11A-13A0-7F48-E3C369BB1ECB',
                'MARKET': 'CN',
                'X-IFM-SESSIONID': None,
                'DSEndPoint': None
            }
        }
        return self.query(message_kwargs)

    def query_part_detail(self, group, sub):
        message_kwargs = {
            'body': [group, sub,
                     '<interpretation catalog="' + group + '" type="CAIMAN" showlatestpart="0" level="2">\n  <avs data="WSPAD0" type="W"/>\n  <avs data="VLCG0" type="VL"/>\n</interpretation>',
                     True, False],
            'destination': 'catalog',
            'operation': 'getSectionParts',
            'messageId': self.profile.message_id,
            'headers': {
                'X-IFM-SID': self.profile.subscription_id,
                'DSId': self.profile.dsid,
                'MARKET': 'CN',
                'X-IFM-SESSIONID': None,
                'DSEndPoint': None
            }
        }
        result = self.query(message_kwargs)
        # print(group + ' -> ' + sub)
        return result

    @decode_amf
    @request_with_relogin
    def query(self, message_kwargs):
        client = service.RemotingService(self.profile.url, amf_version=pyamf.AMF3)
        message = (messaging.RemotingMessage(**message_kwargs),)
        request_wrapper = client.addRequest('null', *message)
        req_content = remoting.encode(client.getAMFRequest([request_wrapper])).getvalue()

        header = {
            'Content-Type': remoting.CONTENT_TYPE,
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36',
            'Referer': ''
        }

        result = requests.post(self.profile.url, data=req_content, headers=header, cookies=self.cookie_str, allow_redirects=False, verify=False)
        return result

    def save_url(self):
        if not os.path.exists(self.cookie_path):
            os.makedirs(self.cookie_path)
        with open(self.url_file, 'w') as fo:
            fo.write(self.profile.url)

    def load_url(self):
        with open(self.url_file, 'r') as fi:
            url = fi.read()
            if url:
                self.profile.url = url

    def save_cookies(self):
        if not os.path.exists(self.cookie_path):
            os.makedirs(self.cookie_path)
        with open(self.cookie_file, 'wb') as fo:
            pickle.dump(self.cookie_str, fo)

    def update_cookies(self):
        login_data = {
            "errorgoto": "https://login.superservice.com/login/index.html?errorCode=1",
            "goto": "https://login.superservice.com/landingpage/en-au/",
            "username": self.profile.user_name,
            "password": self.profile.password
        }
        headers = {
            'referer': self.profile.url
        }
        returned = requests.post(FordWebSource.LOGIN, data=login_data, allow_redirects=False, headers=headers,
                                 verify=False)
        if returned.status_code == 303:
            cookie_obj = returned.json()
            self.cookie_str = requests.cookies.RequestsCookieJar()
            self.cookie_str.set('Authorization', cookie_obj['accessToken'], domain='login.superservice.com',
                                path='/')

            returned = requests.get(self.USER, headers=headers, cookies=self.cookie_str, verify=False)
            if returned.status_code == 200:
                self.profile.company_id = returned.json()['primaryAccountId']
                self.timestamp = str(int(time.time() * 1000))

            discovery_url = self.DISCOVERY.format(self.profile.company_id)
            returned = requests.get(discovery_url, headers=headers, cookies=self.cookie_str, verify=False)
            if returned.status_code == 200:
                obj = returned.json()
                self.profile.url = obj['applicationInfo'][0]['url'] + '/epc/amfbroker/amf'
                self.profile.subscription_id = obj['applicationInfo'][0]['reference']['subscriptionAssignmentId']
                self.save_url()

            parsed = urllib.parse.urlparse(self.profile.url, scheme='https')
            self.cookie_str.set('Authorization', cookie_obj['accessToken'], domain=parsed.hostname,
                                path='/')
            self.save_cookies()

    def load_cookie(self):
        self.cookie_str = requests.cookies.RequestsCookieJar()
        if not os.path.exists(self.cookie_file):
            return
        with open(self.cookie_file, 'rb') as fi:
            self.cookie_str = pickle.load(fi)

    def get_part_detail(self, group, subgroup):

        envelope = self.query_part_detail(group, subgroup)
        if envelope is None:
            print('query_part_detail encounter issue')

        content = envelope.bodies[0][1]
        payload = content.body.body
        catalog = payload.get('catalogs', None)
        illstrations = payload.get('illustrations', None)
        if catalog is None:
            print('catalog is empty')
        if illstrations is None:
            print('illustrations is empty')

        catalog_json = json.dumps(catalog, ensure_ascii=False)
        oe_file = os.path.join(self.data_path, '{0}_{1}_oe.json'.format(group, subgroup))
        with open(oe_file, 'w', encoding='utf-8') as fo:
            fo.write(catalog_json)

        illstrations_json = json.dumps(illstrations, ensure_ascii=False)
        image_file = os.path.join(self.data_path, '{0}_{1}_image.json'.format(group, subgroup))
        with open(image_file, 'w', encoding='utf-8') as fo:
            fo.write(illstrations_json)

    def parse_model(self, file_name):
        result_list = []
        fobj = open(file_name, encoding='utf-8')
        f_buff = fobj.read()
        catalog = json.loads(f_buff)
        sections = catalog['clientSectionList']['sections']

        for sec in sections:
            if len(sec['uniqueId']) > 5 and len(sec['childrenIndex']) == 0:
                dict_item = {}
                dict_item['section'] = sec['uniqueId']
                dict_item['label'] = sec['label']
                dict_item['image'] = sec['largeImagePaths'][0]

                result_list.append(dict_item)
                # print(dict_item)
        return result_list

    def query_image(self, group, section, image):
        header = {
            'Accept-Encoding': 'gzip, deflate, sdch, br',
            'X-Requested-With': 'ShockwaveFlash/24.0.0.221',
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/55.0.2883.87 Safari/537.36',
            'Referer': 'https://login.superservice.com/landingpage/en-au/app.html'
        }
        # print(self.cookie_str)

        # image_url  = 'https://static.superservice.com/images/20170221/FAP_Weekly/Week9/FAP/cat/' + group + '/' + section + '.png'
        # image_url  = 'https://static.superservice.com/images/20170221/FAP_Weekly/Week9/' + image + '.png'
        image_url = 'https://static.superservice.com/images/20171219/FAP_Weekly/Week1/' + image + '.png'
        returned = requests.get(image_url, headers=header, cookies=self.cookie_str, verify=False,
                                allow_redirects=False)

        b = 'ford/' + group
        if os.path.exists(b) is False:
            os.makedirs(b)
        with open('ford/' + group + '/' + section + '.png', 'wb') as fo:
            fo.write(returned.content)


def amf_decode(content):
    obj = pyamf.remoting.decode(content)
    print(obj)


if __name__ == '__main__':

    plan = ['A6', 'A7', 'A8', 'B6', 'BT', 'CB8', 'CCG', 'CCK', 'CCN', 'CCY', 'CED', 'CEW', 'CF', 'CP', 'CZG', 'D2', 'EG', 'F1', 'FD', 'GT', 'NH', 'RF', 'TE4', 'TFC', 'TM1', 'TM7', 'TQ1', 'TUB', 'WW', 'KD']
    # plan = ['KD']

    crawler = FordWebSource()

    for group in plan:
        sections = crawler.parse_model('source\\title\\' + group + '_flatsections.json')
        flag_do = False
        while (not flag_do):
            try:
                # print(sections)
                for sec in sections:
                    print('======================================' + group + ' -> ' + sec['section'])
                    b = 'ford/' + group + '/' + sec['section'] + '.png'
                    if os.path.exists(b) is False:
                        crawler.get_part_detail(group, sec['section'])
                        crawler.query_image(group, sec['section'], sec['image'])
                        time.sleep(2)
                flag_do = True
            except Exception as ex:
                print(ex)
                time.sleep(60)

    print('end')