# -*- coding: <UTF-8> -*-
# -*- author: <Zhan Xinjian> -*-
import requests
import json
from utils import SqlUtil


# 获取港口船只信息
def get_ships(token, portId):
    # 获取港口船只信息
    reqUrl = "https://www.myships.com/ms/label/getShipsByPortId"
    # cookie 动态改变，最好登录后复制到此处，立即爬取
    cookie = "access_token=" + token
    data = {
        "portId": portId
    }
    header = {
        "Accept": "application/json, text/javascript, */*; q=0.01",
        "Content-Type": "application/json; charset=UTF-8",
        'User-Agent': 'Chrome/85.0.4183.102',
        "Cookie": cookie}
    repose = requests.post(url=reqUrl, headers=header, data=json.dumps(data))
    if (repose.status_code != 200):
        print(repose.text)
        return []
    print("数据爬取中，请稍候。。。")
    ships = json.loads(repose.text)['data']
    return ships


def get_ship_info(mmsi):
    reqUrl = "http://www.shipxy.com/ship/GetShip"
    reqParam = {
        "mmsi": mmsi
    }
    reqHeader = {
        "Accept": "application/json, text/javascript, */*; q=0.01",
        "Connection": "keep-alive",
        "User-Agent": "Chrome/96.0.4664.45",
        # 注意复制最新的cookie
        "Cookie": "FD857C2AF68165D4=pH5xgnKajBP+xgCBCLmnp2mQePqZ90aXuPpociWqJpttBx221YrosJyv12fTSaux; language=%7B%22name%22%3A%22language%22%2C%22path%22%3A%22//api.shipxy.com/h5/api/languages/%22%2C%22mode%22%3A%22both%22%2C%22language%22%3A%22cn%22%7D; _elane_maptype=MT_GOOGLE; tc_TC=; _elane_shipfilter_type=%u8D27%u8239%2C%u96C6%u88C5%u7BB1%u8239%2C%u6CB9%u8F6E%2C%u5F15%u822A%u8239%2C%u62D6%u8F6E%2C%u62D6%u5F15%2C%u6E14%u8239%2C%u6355%u635E%2C%u5BA2%u8239%2C%u641C%u6551%u8239%2C%u6E2F%u53E3%u4F9B%u5E94%u8239%2C%u88C5%u6709%u9632%u6C61%u88C5%u7F6E%u548C%u8BBE%u5907%u7684%u8239%u8236%2C%u6267%u6CD5%u8247%2C%u5907%u7528-%u7528%u4E8E%u5F53%u5730%u8239%u8236%u7684%u4EFB%u52A1%u5206%u914D%2C%u5907%u7528-%u7528%u4E8E%u5F53%u5730%u8239%u8236%u7684%u4EFB%u52A1%u5206%u914D%2C%u533B%u7597%u8239%2C%u7B26%u540818%u53F7%u51B3%u8BAE%28Mob-83%29%u7684%u8239%u8236%2C%u62D6%u5F15%u5E76%u4E14%u8239%u957F%3E200m%u6216%u8239%u5BBD%3E25m%2C%u758F%u6D5A%u6216%u6C34%u4E0B%u4F5C%u4E1A%2C%u6F5C%u6C34%u4F5C%u4E1A%2C%u53C2%u4E0E%u519B%u4E8B%u884C%u52A8%2C%u5E06%u8239%u822A%u884C%2C%u5A31%u4E50%u8239%2C%u5730%u6548%u5E94%u8239%2C%u9AD8%u901F%u8239%2C%u5176%u4ED6%u7C7B%u578B%u7684%u8239%u8236%2C%u5176%u4ED6; _filter_flag=-1; _elane_shipfilter_length=0%2C40%2C41%2C80%2C81%2C120%2C121%2C160%2C161%2C240%2C241%2C320%2C321%2C9999; _elane_shipfilter_sog=0%2C1; _elane_shipfilter_one=2; _elane_shipfilter_country=0%2C1%2C2; _elane_shipfilter_olength=; tc_QX=; Hm_lvt_adc1d4b64be85a31d37dd5e88526cc47=1638838024,1638838096,1638838381,1638839039; ASP.NET_SessionId=kkvkwzyudnqzj21hjwyub42b; gdxidpyhxdE=12rfn%2B7OsoK8TTvQsIbzuAcB1qfOg15hVL%5CQYxtK9EBnuxezPMDfsdNdMdm%2F1zh4MYi%2FY2CCaLK5T0Xlf3n%2FXIa0cNo4bm7T0%2BrqlLyLDBRt5NlwLOi6TW%5CqjhvMjzaSaWvu%2BrazZy0%2FpVP7X6wYaIcKXddTl%2Bq698eRHKUu8LoXEYAa%3A1638841796782; _9755xjdesxxd_=32; YD00803672648830%3AWM_NI=GckvSvsDrAmZCXS756zX%2FT6pDIAgn8zYHaOmHECPGQKqyYycpaPMTsAFD1QRhbW31GZHolqPeBmx0l7nq7iiLgliRS7R%2Ft7yRSJVmgL6vGtHgTTs06kPUPxCE6ObeJKqTUE%3D; YD00803672648830%3AWM_NIKE=9ca17ae2e6ffcda170e2e6eea5f77eb1aebdd4f448b0ef8fa6c85a978f9aabf865f79383d1d62582ac8fabd72af0fea7c3b92a85bda597d27eb19cbea2f05281889dd8b83eba88e1b5b57c88978f88ed34ba878882ce678c9b8db1c65b89afb7bafc69b4bfa0aed779839a8cd2d45aacbe99d4b44d9a98a18df45ca79ea399b368f5b2bfd8ec6390b9a48ad159f58ee195b673928d86b7f06d90eabaa8f744a58c9c94b33ea9909bb1fb4987ea8183e84289af9cb6c837e2a3; YD00803672648830%3AWM_TID=iEITujE3aVFBFUFREANrssMckSrA5QIG; .UserAuth2=BD91149D348AA203A41B418036C833D135337C48F2BDE74675B28B4C321C15B5E5049F32D09676F886915DCA2676FEFD6E81E4A249318F2429DBCA50714EB6DE211688E3DE06D7BB5E2E84C151492EE1A82FA4047A58E4067A99EEE237F2EB3DDC34D0A3200206B781DB8E87877CAC4886396F37CE4DBB985D631BEC84499CC747C5663BDBD1C4FCE08ECD63A2EAC1B701F8015C; UserAuthUCenter=eyJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiIsIng1dCI6ImtfS1JMRk9TM1dPYk9ZcUN2ZEpKS2I3ZGo1TSIsImtpZCI6ImtfS1JMRk9TM1dPYk9ZcUN2ZEpKS2I3ZGo1TSJ9.eyJpc3MiOiJodHRwOi8vaWQyLmVsYW5lLmNvbS9jb3JlIiwiYXVkIjoiaHR0cDovL2lkMi5lbGFuZS5jb20vY29yZS9yZXNvdXJjZXMiLCJleHAiOjE2NDY2MTY5NTksIm5iZiI6MTYzODg0MDk1OSwiY2xpZW50X2lkIjoiSFlRX1Jlc19BbGxJbiIsInNjb3BlIjpbIm9wZW5pZCIsInByb2ZpbGUiLCJ1c2VyYXBpIl0sInN1YiI6IjYwMjFmMDdkNTQzODkxOWIiLCJhdXRoX3RpbWUiOjE2Mzg4NDA5NTksImlkcCI6Imlkc3J2IiwiYW1yIjpbInBhc3N3b3JkIl19.kGzduSNqknCKCnab9kczf6EBCoqeAgWAvUgwSQH-z3uD3PK5cZQRMfip9ZkDQ2laSB5vnpnuY7y3LtyMAag4iMOHMnX8Lqrj5S1pr2q6Pdj6bsyJL9tjMj6h2wT1l06VFkXFSroRCHIzrL3z_Nxu5WDJQkeIgkPCt_Yt2LRnimDOsITDOydn2zTreYvYeJQmwFGGO84GBOPTc0B_CbGr81gg0M0BPaTzLVneKRaHeh1oSA4s10CNdDJ-NC5K2jdehjNxuVK0qQw6iSs3LNv-3nbfO0dTSjXJPKROIAhH8pZEUZ1LFgVH5Clx2tPBsu2IeaHam1tF8xWwovdCcJLXew; shipxy_v3_history_serch=s%u2606JIA%20XING%2069%u2606413555250%u2606100%u2606MMSI%uFF1A413555250%7Cs%u2606NS12%2090%u2606412000012%u2606%u2606MMSI%uFF1A412000012%7Cs%u2606BAOKEQI12%2090%u260641200012%u2606%u2606MMSI%uFF1A41200012%7Cs%u2606SHANGHAI%u2606209316000%u260670%u2606MMSI%uFF1A209316000%7Cp%u2606%u821F%u5C71%u260623224%u260630.007483%u2606122.099517%7Cp%u2606%u5E7F%u5DDE%u260627464%u260622.844311%u2606113.571136%7Cp%u2606%u4E0A%u6D77%u260627999%u260631.379783%u2606121.584133%7Cs%u2606SHANGHAI%u2606209316000%u260670%u2606IMO%uFF1A9220988; Hm_lpvt_adc1d4b64be85a31d37dd5e88526cc47=1638845354; SERVERID=9d5fe2e75816d586ec8202c8a7e5ae11|1638845353|1638837297"
    }
    repose = requests.get(reqUrl, params=reqParam, headers=reqHeader)
    if (repose.status_code != 200):
        print(repose.text)
        return []
    return json.loads(repose.text)['data'][0]


def save_data(list):
    dbUtil = SqlUtil.DBUtil()
    SqlUtil.DBUtil.get_conn(dbUtil)
    for ship in list:
        insertSql = "INSERT INTO `ship_spider`.`ship_info`(`source`, `mmsi`, `shipid`, `tradetype`, `type`" \
                    ", `imo`, `name`, `matchtype`, `cnname`, `callsign`, `length`, `width`, `left`, `trail`" \
                    ", `draught`, `dest`, `eta`, `laststa`, `lon`, `lat`, `sog`, `cog`, `hdg`, `rot`, `navistatus`" \
                    ", `lastdyn`, `satelliteutc`) VALUES ( "
        values = str(ship['source']) + ',' + ship['mmsi'] + ',' + '\'' + ship['shipid'] + '\'' + ',' + str(
            ship['tradetype']) \
                 + ',' + str(ship['type']) + ',' + ship['imo'] + ',' + '\'' + ship['name'] + '\'' + ',' + str(
            ship['matchtype']) \
                 + ',' + '\'' + ship['cnname'] + '\'' + ',' + '\'' + ship['callsign'] + '\'' + ',' + str(
            ship['length']) + ',' + str(ship['width']) \
                 + ',' + str(ship['left']) + ',' + str(ship['trail']) + ',' + str(ship['draught']) + ',' + '\'' + ship[
                     'dest'] + '\'' \
                 + ',' + '\'' + ship['eta'] + '\'' + ',' + str(ship['laststa']) + ',' + str(ship['lon']) + ',' + str(
            ship['lat']) \
                 + ',' + str(ship['sog']) + ',' + str(ship['cog']) + ',' + str(ship['hdg']) + ',' + str(ship['rot']) \
                 + ',' + str(ship['navistatus']) + ',' + str(ship['lastdyn']) + ',' + str(ship['satelliteutc'])
        insertSql += values + " )"
        print("插入数据到数据库中", ship)
        SqlUtil.DBUtil.insert(dbUtil, insertSql)
    # 使用后再关闭数据库
    SqlUtil.DBUtil.close_conn(dbUtil)


if __name__ == '__main__':
    token = input("请输入https://www.myships.com/index.html 中的用户token：")
    f = open('port.json', 'r', encoding='UTF-8')
    ports = json.loads(f.read())
    for port in ports:
        portId = port['portId']
        # 定义list
        ships = []
        for ship in get_ships(token, portId):
            shipInfo = None
            try:
                shipInfo = get_ship_info(ship['mmsi'])
            except Exception as e:
                print("网络爬虫异常:", str(e))
            if shipInfo is not None:
                ships.append(shipInfo)
        # 保存数据到数据库中
        save_data(ships)
