import random, re
import logging, datetime

logger = logging.getLogger(__name__)
dt = datetime.datetime.now().strftime('%Y%m%d')
factory_log_path = 'Icrawler9/fileStores/logs/factory/{}_{}.log'
dataplatform_log_path = 'Icrawler9/fileStores/logs/dataplatform/{}_{}.log'

impersonates = ["chrome99", "chrome101", "chrome110", "edge99", "edge101", "chrome107"]

# 型号 key特征
part_numbers = [re.sub('\s+', ' ', p).strip().lower() for p in ['Type number', 'parts no', 'part number', 'Product name', 'Product', 'Catalog #', 'Part #', '型号']]
# 封装 key特征
packages = [re.sub('\s+', ' ', p).strip().lower() for p in ['Pkg. Type', 'package type', 'package', '封装']]
# 包装 key特征
Packagings = [re.sub('\s+', ' ', p).strip().lower() for p in ['Packing', 'Packaging Method', 'Carrier Type', '包装', '包装方式', 'packaging']]
# 描述 key特征
descriptions = [re.sub('\s+', ' ', p).strip().lower() for p in ['Part Description', 'description', '描述']]
# 产品状态 key特征
product_status = [re.sub('\s+', ' ', p).strip().lower() for p in ['status', 'product status', '产品状态']]
# 系列 key特征
series = [re.sub('\s+', ' ', p).strip().lower() for p in ['series', 'Product Range', '系列']]
# ROHS key特征
rohs = [re.sub('\s+', ' ', p).strip().lower() for p in ['rohs', 'RoHS-Compliant']]
# 是否无铅 key特征
pbfrees = [re.sub('\s+', ' ', p).strip().lower() for p in ['pbfree', 'Pb-free', 'Lead (Pb)-Free']]

# 符合ROHS标准特征
rohs_descs = ['1', 'yes', 'details', 'compliant', 'true', 'ECOPACK'.lower(), 'roHSY'.lower(), '○']

# 不符合ROHS标准特征
rohs_descs0 = ['0', 'no', 'in transition', 'uncompliant', 'noncompliant', 'false', 'roHN'.lower(), 'Non-Compliant'.lower()]

# 是否无铅value特征
pbfree_descs = ['yes', 'leaded', 'lead free', 'lead-free']

# wiki key特征
overviews_ = ['概述', '产品介绍', 'overview', 'overviews']
features_ = ['特点', '特性', '产品特点', 'feature', 'features', 'featuredlist', 'features&benefits']
applications_ = ['应用', '用途', '应用场景', 'application', 'applications']

# list_json val需替换为''的情况
REP_VUL = ['---', '--', '-', ' ', '—', '_', '  ', '-', '-', '–', '*', '**', '--', '--------', '---------', '℃ to ℃', 'NON-PoE', '无', '/', '\\']

# not use proxy or RandomUser
dnot_use_proxy_spiders = []
dnot_use_randomuser_spiders = []

# 允许经过json.dumps管道的items
RESOURCE_CLASSNAMES = ['DatasItem', 'PartsItem', 'SiemensPartsItem', 'PinsformItem', 'MouserDetailItem']

# 允许经过datafilter管道的items
RESOURCE_PARTS_CLASSNAMES = ['PartsItem', 'SiemensPartsItem', 'DatasItem']
RESOURCE_PINS_CLASSNAMES = ['PinsformItem', 'Datasheet5PinsItem', 'SemieePinsItem']

proxy = {
    'PROXY_USER': "",
    'PROXY_PASS': "",
    'PROXY_SERVER': "http://http-short.xiaoxiangdaili.com:10010",
    'PROXY_SERVERS': ["http://http-short.xiaoxiangdaili.com:10010"]
}

# basic_settings
basic_setting = {
    'RETRY_ENABLED': True,
    'RETRY_TIMES': 5,
    'DOWNLOAD_TIMEOUT': 300,
    'RETRY_HTTP_CODES': [],
    'HTTPERROR_ALLOWED_CODES': [200, 404],
    # 'HTTPERROR_ALLOWED_CODES': [200],
    # 'HTTPERROR_ALLOW_ALL': True,
    'PROXY_USER': proxy.get('PROXY_USER'),
    'PROXY_PASS': proxy.get('PROXY_PASS'),
    'PROXY_SERVER': proxy.get('PROXY_SERVER'),
    'PROXY_SERVERS': proxy.get('PROXY_SERVERS'),
    # 'LOG_ENABLED': True,
    # 'LOG_ENCODING': 'UTF-8',
    # 'LOG_FILE': '{}.log',
    'LOG_LEVEL': 'ERROR',
}

# factory_settings
factory_settings = {k:v for k, v in basic_setting.items()}
factory_settings['REDIS_URL'] = 'redis://root:aabb8899@127.0.0.1:6379/12' 
# factory_settings['JOBDIR'] = './Icrawler9/jobs/{}'
factory_settings['RETRY_TIMES'] = 10
factory_settings['ITEM_PIPELINES'] ={
    # 'Icrawler9.pipelines.S3Pipeline.S3FilesPipeline': 100,
    # 'Icrawler9.pipelines.RedisPipelines.DataplatformDupPipeline': 200,
    # 'Icrawler9.pipelines.dbPipelines.mongodbPipeline': 400,
}


# dataplatform settings
dataplatform_settings = {k:v for k, v in basic_setting.items()}
dataplatform_settings['CONCURRENT_REQUESTS'] = 100
dataplatform_settings['DUPEFILTER_CLASS'] = "scrapy_redis.dupefilter.RFPDupeFilter"
dataplatform_settings['SCHEDULER'] = "scrapy_redis.scheduler.Scheduler"
# dataplatform_settings['DEFAULT_DUPEFILTER_KEY'] = ""
# dataplatform_settings['REDIS_ITEMS_KEY'] = ""
# dataplatform_settings['REDIS_PARAMS']= {'password': 'aws2023', 'db': '10'}
dataplatform_settings['REDIS_RETRY_TIMES'] = 12*60*60
dataplatform_settings['SCHEDULER_PERSIST'] = True
dataplatform_settings['ITEM_PIPELINES'] ={
    # 'Icrawler9.pipelines.dbPipelines.MySQLPipeline': 400,
    'Icrawler9.pipelines.dbPipelines.mongodbPipeline': 400,
    'scrapy_redis.pipelines.RedisPipeline': 500,
    # "crawlab.pipelines.CrawlabMongoPipeline": 888,
}


# pintopin settings
pintopin_settings = {k:v for k, v in basic_setting.items()}
# pintopin_settings['REDIS_ITEMS_KEY '] = ""
pintopin_settings['DUPEFILTER_CLASS'] = "scrapy_redis.dupefilter.RFPDupeFilter"
pintopin_settings['SCHEDULER'] = "scrapy_redis.scheduler.Scheduler"
pintopin_settings['SCHEDULER_PERSIST'] = True
pintopin_settings['CONCURRENT_REQUESTS'] = 256
pintopin_settings['CONCURRENT_REQUESTS_PER_DOMAIN'] = 256
pintopin_settings['RETRY_TIMES'] = 3
pintopin_settings['DOWNLOAD_TIMEOUT'] = 15
# pintopin_settings['RETRY_HTTP_CODES'] = [429, 304, 307, 500, 301, 502, 403]
pintopin_settings['RETRY_HTTP_CODES'] = []
pintopin_settings['HTTPERROR_ALLOWED_CODES'] = [200]
# pintopin_settings['REDIS_PARAMS']= {'password': 'aws2023', 'db': '10'}
pintopin_settings['ITEM_PIPELINES'] ={
    # 'Icrawler9.pipelines.ossImagePipelines.OssImgPipeline': 200,
    # 'Icrawler9.pipelines.ossFilePipelines.OssFilesPipeline': 300,
    # 'Icrawler9.pipelines.dbPipelines.MySQLPipeline': 400,
    'Icrawler9.pipelines.dbPipelines.mongodbPipeline': 400,
    # 'scrapy_redis.pipelines.RedisPipeline': 500,
}
pintopin_settings['DOWNLOADER_MIDDLEWARES'] = {
    'Icrawler9.middlewares.downloaderMiddlewares.RandomUserAgent': 666,
    'Icrawler9.middlewares.downloaderMiddlewares.ProxyMiddleware': 888,
}
pintopin_settings['DEFAULT_REQUEST_HEADERS'] = {
    'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.0.0 Safari/537.36',
    # 'user-agent': "Mozilla/5.0 (Windows NT 10.0; WOW64; rv:45.0) Gecko/20100101 Firefox/45.0",
}

# scada settings
scada_settings = {k:v for k, v in basic_setting.items()}
scada_settings['REDIS_URL'] = 'redis://root:aabb8899@127.0.0.1:6379/9' 
scada_settings['DUPEFILTER_CLASS'] = "scrapy_redis.dupefilter.RFPDupeFilter"
scada_settings['SCHEDULER'] = "scrapy_redis.scheduler.Scheduler"
scada_settings['REDIS_RETRY_TIMES'] = 12*60*60
scada_settings['SCHEDULER_PERSIST'] = True
scada_settings['ITEM_PIPELINES'] ={
    'Icrawler9.pipelines.dbPipelines.mongodbPipeline': 400,
    'scrapy_redis.pipelines.RedisPipeline': 500,
}

# 热搜settings
resou_settings = {k:v for k, v in basic_setting.items()}
resou_settings['UABROWSER'] = 'chrome'
resou_settings['MYSQL_DATABASE'] = 'bd-crawler'
resou_settings['ITEM_PIPELINES'] ={
    # 'Icrawler9.pipelines.ossImagePipelines.OssImgPipeline': 200,
    # 'Icrawler9.pipelines.ossFilePipelines.OssFilesPipeline': 300,
    'Icrawler9.pipelines.dbPipelines.MySQLPipeline': 400,
}
resou_settings['DOWNLOADER_MIDDLEWARES'] = {
    # 'Icrawler9.middlewares.downloaderMiddlewares.RandomUserAgent': 666,
    # 'Icrawler9.middlewares.downloaderMiddlewares.ProxyMiddleware': 888,
}
resou_settings['DEFAULT_REQUEST_HEADERS'] = {
    'user-agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.45 Safari/537.36',
}


