# -*- utf-8 -*-
# author : li shi jie
# Email : yr785339493@qq.com

import requests
requests.urllib3.disable_warnings()
import pymysql
import json
import time

class TmallSpider:

    base_url = 'https://fordearme.m.tmall.com/shop/shop_auction_search.do?&suid=3308044329&p=3'

    headers = {
        'cookie': 'hng=CN%7Czh-CN%7CCNY%7C156; cna=8goBFBm4QAMCAX15JGtF6JcY; lid=evil5214; otherx=e%3D1%26p%3D*%26s%3D0%26c%3D0%26f%3D0%26g%3D0%26t%3D0; tk_trace=1; t=de00596301fff7361c428d2a1094c717; tracknick=evil5214; lgc=evil5214; _tb_token_=356b3f3de583b; cookie2=1e632e5f48227167b15ace0150f7d4b5; uc1=cookie16=Vq8l%2BKCLySLZMFWHxqs8fwqnEw%3D%3D&cookie21=WqG3DMC9FxUx&cookie15=URm48syIIVrSKA%3D%3D&existShop=false&pas=0&cookie14=UoTYNkL%2BQo1ttQ%3D%3D&tag=8&lng=zh_CN; uc3=vt3=F8dByRjKAHl2k6zIdmk%3D&id2=UoH8WASUrKk1qQ%3D%3D&nk2=BuUIbFdbzlY%3D&lg2=URm48syIIVrSKA%3D%3D; _l_g_=Ug%3D%3D; ck1=""; unb=1035577032; cookie1=VWZ4RTK4VhG5qTIKnriegU13Ch0%2BX1CyXE5NyFINxfM%3D; login=true; cookie17=UoH8WASUrKk1qQ%3D%3D; _nk_=evil5214; uss=""; csg=9167c301; skt=ebc7ada28b4fa02c; x=__ll%3D-1%26_ato%3D0; _m_h5_tk=d7d42a17ab1b52c5dbb6384963f322f1_1540540919242; _m_h5_tk_enc=b666a1287dd2ecf0fb8f378c442fb2a6; x5sec=7b227477736d3b32223a2231366133623064313765326232303761346230656239376234623262343065384349665879743446454f572f365961647a4b4c704b786f4d4d54417a4e5455334e7a417a4d6a7332227d; whl=-1%260%260%260; isg=BMXFP-j0MpaYRhYbjkb6Ijya1AE_Kni4LRzysscqh_wJXuXQj9bw5FP2bMINHpHM',
        'user-agent': 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Mobile Safari/537.36',
    }
    def parse(self):
        time.sleep(0.5)
        response = requests.get(self.base_url, headers=self.headers, verify=False)
        return response.json()
    def collection(self, html):
        items_list = html.get('items')
        field = {}
        for item in items_list:
            field['item_id'] = item.get('item_id')
            field['title'] = item.get('title')
            yield field

if __name__ == '__main__':

    tmall = TmallSpider()
    tmall_json = tmall.parse()
    filed = tmall.collection(tmall_json)
    for filed in filed:
        with open('item.csv', 'a', encoding='utf-8') as f:
            f.write(json.dumps(filed, ensure_ascii=False) + '\n')
