import arrow
import datetime
from datetime import timedelta
import os
import sys
import pymongo
import yaml

DATETIME_FMT = '%Y-%m-%d %H:%M:%S'
DATETIME_FMT_S = '%Y/%m/%d %H:%M:%S'

BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))

SECRET_KEY = os.environ.get("SECRET_KEY", 'x 刈刂')
# MONGO_URI = os.environ.get("MONGO_URI", 'mongodb://192.168.56.101:27018/cloudmonitor')
REDIS_URL = os.environ.get("REDIS_URL", 'redis://127.0.0.1:6379/0')
BROKER_URL = "redis://127.0.0.1:6379/10"

# 扫描器API URL
SCANNER_SERVER = "http://200-chengdu-yjc-scan2.intra.jiasule.com:8082"
SCANNER_IP = ['1.2.3.4', '1.2.3.4']

# Setup the Flask-JWT-Extended extension
JWT_SECRET_KEY = os.environ.get("SECRET_KEY", 'x 刈刂')  # Change this!
JWT_ACCESS_TOKEN_EXPIRES = timedelta(days=7)
JWT_TOKEN_LOCATION = ["cookies"]
JWT_DECODE_LEEWAY = timedelta(hours=4)  # 冗余4小时的过期时间
JWT_IDENTITY_CLAIM = 'id'
JWT_ACCESS_COOKIE_NAME = 'token'
JWT_COOKIE_CSRF_PROTECT = False

DAYS_OF_ONE_MONTH = 30

# 校验云安全盾
YUNAQ_DOMAIN = {
    'api': 'https://vision.yunaq.com/api/inner/user_domain_valid/',
    'credentials': 'c2NhbnZtYXg6LzhwcWRYdHpMT3J1WnZIL1Bn'
}
SERVICE_API = ""

SENDCLOUD = {'api_url': '', 'api_user': '', 'api_key': 'SZmBpyK6zrB2Enuj', 'fromuser': '', 'fromname': ''}
YUNPIAN = {'key': '', 'url': {'tpl': '', 'normal': ''}, 'host': 'yunpian.com', 'port': '80'}
WECHAT = {'api_url': '', 'auth_username': '', 'auth_password': ''}
SCANVHOST = "https://console.scanv.com"
# 忽略威胁时通知企业微信机器人
IGNORE_RISK_WECOM = ""
# 是否需要通知到微信群，测试环境一般不需要
IGNORE_RISK_IS_NOTIFY = False

# IP数据库地址
IPV4_DATA = f'{BASE_DIR}/dataip/dataipv4.ipdb'
IPV6_DATA = f'{BASE_DIR}/dataip/dataipv6.ipdb'

# seebug 地址
SEEBUG = {'url': "www.seebug.org/api/get_vul_statistics", 'token': ''}

# cas 地址
CAS_CONFIG = {
    "secret": "",
    "appid": "scanv",
    "base_url": "https://sso.yunaq.com",
    "is_verify_ssl": True
}

UTC_PREFIX = 8

DEBUG = False

REPORT_FILE_DIR = "/var/log/report/"

# 主机监测默认模版, 当前阶段引擎不支持，默认传0, 以后可能做到页面用户选择
NETWORK_SCAN_PROFILE = 0

ASSET_TASK_INTERVAL = 604800000

# 从celery_task表读取数据的起始日期，考虑到历史数据格式不兼容且可能存在重要数据，所以默认从2023年03.09开始
CELERY_TASK_NEW_DATA_CREATED_AT = "2023-03-09T00:00:00.00000"

# 爬虫URL记录上线时间, UTC时区时间
SPIDER_URL_TIME_STR = "2023-11-16 03:11:12"
SPIDER_URL_TIME = datetime.datetime.strptime(SPIDER_URL_TIME_STR, '%Y-%m-%d %H:%M:%S')

OUTPUT_CONCURRENCY = 4
OUTPUT_QUEUE = "scanv_output_tasks"

# API 下发任务低优先级用户 ID
API_USER_LIST = []

UA = "Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/27.0.1453.94 Safari/537.36"
FREE_PKG_KEY = "scanv_free"
USER_INIT_PKG_KEY = "scanv_free"

CLICKHOUSE_CONFIG = {
    "api_url": "",
    "user": "",
    "password": ""
}
SITE_PORTRAIT_INTERVAL = 6

try:
    with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), 'settings.yaml')) as f:
        config = yaml.load(f.read(), Loader=yaml.FullLoader)
except ImportError:
    print('You must have a conf/settings.yaml file', file=sys.stderr)
    sys.exit(1)

globals().update(config)

UTC_PREFIX = datetime.timedelta(hours=UTC_PREFIX)

# 可用性任务执行的队列
AVAILABLE_JOB_QUEUE = 'job:cloud:1'

# 主机监测任务队列
HOSTVUL_JOB_QUEUE = 'job.network:1'

