# -*- coding: utf-8 -*-
import socket
import socks
from urllib.parse import unquote, urlencode
import requests
from PIL import Image
from io import BytesIO
import sys, os
import json
import time
from multiprocessing import Pool, cpu_count
import urllib.parse
import itertools

from loguru import logger

requests.packages.urllib3.disable_warnings()


def jd(obj):
    return json.dumps(obj, ensure_ascii=False)


class SP:

    @staticmethod
    def read_file(file):
        with open(file, 'r', encoding='utf-8') as f:
            content = f.read()
            return content

    @staticmethod
    def trans_json_text_to_json(text_file_path):
        with open(text_file_path, 'r', encoding='utf8') as f:
            json_str = f.read()
        json_obj = json.loads(json_str)
        return json_obj

    @staticmethod
    def save_text(file=None, text=None, mode='w'):
        file_path = os.path.abspath(file)
        file_dir_path = os.path.dirname(file_path)
        if not os.path.exists(file_dir_path):
            os.makedirs(file_dir_path)
        with open(file, mode=mode, encoding='utf8') as f:
            f.write(text)


angle_num = 8
ap = int(360 / angle_num)
headers = {
    'Accept': 'application/json, text/plain, */*',
    'Accept-Encoding': 'gzip, deflate',
    'Accept-Language': 'zh-CN,zh;q=0.9,en;q=0.8',
    'Connection': 'keep-alive',
    'Dnt': '1',
    'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/113.0.0.0 Safari/537.36'
}

dl_failed_file = 'dl_failed.txt'


class Spider:
    pictures_dir = 'Map_imgs'
    api = 'https://maps.googleapis.com/maps/api/streetview?size=640x640&'
    google_map_api = 'https://maps.googleapis.com/maps/api/streetview'
    key = 'AIzaSyBlUJr1IlMi7AtbvJLkNG9r1m6awY8vguY'
    zbd_file = 'Taipei_0_150m.json'
    req_times_limit = 25000
    circle_angle = 360
    distence = '150m'
    angles = [str(int(i * ap)) for i in range(angle_num)]
    dl_succeed = 0
    dl_failed = 0

    @classmethod
    def get_zbds(cls, json_file):
        data = SP.trans_json_text_to_json(json_file)['data']
        points = [i[2] for i in data]
        return points

    @classmethod
    def dl_with_zbd(cls, zbd):
        lat, lon = zbd
        for heading in cls.angles:
            cls.download_pic(lat, lon, heading=heading)

    @classmethod
    def download_pic(cls, lat, lon, heading):
        params = {
            'size': '640x640',
            'location': f'{lat},{lon}',
            'heading': f'{heading}',
            'key': cls.key
        }
        query_string = urlencode(params)
        img_url = f'{cls.google_map_api}?{query_string}'
        pic_path = os.path.join(
            cls.pictures_dir,
            cls.distence,
            f"{lat}_{lon}",
            f"{heading}.jpeg"
        )
        pic_dir = os.path.dirname(pic_path)
        if not os.path.exists(pic_dir):
            os.makedirs(pic_dir)
        downloaded = False
        for i in range(3):
            downloaded = cls.download(img_url, pic_path)
            if downloaded:
                break
        if not downloaded:
            dl_failed_str = jd(dict(
                img_url=img_url,
                pic_path=pic_path
            ))
            with open(dl_failed_file, 'a', encoding='utf-8') as f:
                f.write(dl_failed_str + '\n')
            cls.dl_failed += 1
        else:
            cls.dl_succeed += 1

    @classmethod
    def download(cls, url, pic_path):
        file_name = os.path.basename(pic_path)
        try:
            r = requests.get(url, stream=True, verify=False, timeout=10)
            with open(pic_path, 'wb') as f:
                f.write(r.content)
            logger.success(f'{file_name} 下载完成')
            return True
        except Exception as e:
            logger.warning(f'url: {unquote(url)} 下载报错：{e}')

    @classmethod
    def get_metadata(cls, lat, lon):
        metadata_api = 'https://maps.googleapis.com/maps/api/streetview/metadata'
        params = {
            'size': '640x640',
            'location': f'{lat},{lon}',
            'pitch': '015',
            'key': cls.key,
        }
        query_string = urlencode(params)
        meta_url = f'{metadata_api}?{query_string}'
        logger.warning(f'meta_url: {meta_url}')
        r = requests.get(
            meta_url,
            headers=headers
        )
        # data = r.json()
        logger.warning(r.text)
        exit()
        if data['status'] == 'OK':
            return True

    @classmethod
    def run(cls):
        circle_num = 3000
        zbds = cls.get_zbds(cls.zbd_file)
        zbds_day_0 = zbds[0:4]
        zbds_day_1 = zbds[0:3000]
        zbds_day_2 = zbds[3000:6000]
        zbds_day_3 = zbds[6000:]
        # cc = cpu_count()
        cc = 4
        print(f"CPU内核数:{cc}")
        print(f'当前母进程: {os.getpid()}')
        pool = Pool(processes=cc)
        for zbd in zbds_day_0:
            pool.apply_async(func=cls.dl_with_zbd, args=(zbd,))
        pool.close()
        pool.join()
        msg = rf'''
        ok,所有坐标点图片均已下载!
        成功 {cls.dl_succeed}个
        失败 {cls.dl_failed}个
        '''
        logger.success(msg)

    @classmethod
    def t(cls):
        zbds = cls.get_zbds(cls.zbd_file)
        cls.dl_with_zbd(zbds[0])
        msg = rf'''
        ok,所有坐标点图片均已下载!
        成功 {cls.dl_succeed}个
        失败 {cls.dl_failed}个
        '''
        logger.success(msg)


if __name__ == '__main__':
    Spider.run()
    # Spider.t()
