import re
import time
import json
import simplejson
from api.base import *
from common.logger import logger
from common.retcode import RetCode
from common.types import FileInfo, PathInfo, FolderTree
from common.models import FileList, PathList, TreeList

class FolderAction(BaseAction):
	def __init__(self, sess_wp: SessionWrapper):
		super(FolderAction, self).__init__(sess_wp)

	def _get_more_page(self, resp: dict, r_path=False) -> (list, bool):
		"""处理可能需要翻页的请求信息"""
		if resp['pageNum'] * resp['pageSize'] >= resp['recordCount']:
			done = True  # 没有了
		else:
			done = False
		if r_path:
			return [resp['data'], resp['path']], done
		else:
			return resp['data'], done

	def get_file_list(self, fid) -> (FileList, PathList):
		"""获取文件列表"""
		file_list = FileList()
		path_list = PathList()
		page = 1
		data_path = []
		data = []
		path = []
		url = self._host_url + "/v2/listFiles.action"
		while True:
			params = {
				"fileId": str(fid),
				"inGroupSpace": "false",
				"orderBy": "1",
				"order": "ASC",
				"pageNum": page,
				"pageSize": 60
			}
			resp = self._get(url, params=params)
			if not resp:
				logger.error(f"File list: {fid}network error!")
				return file_list, path_list
			try:
				resp = resp.json()
			except (json.JSONDecodeError, simplejson.errors.JSONDecodeError):
				# 如果 fid 文件夹被删掉，resp 是 200 但是无法使用 json 方法
				logger.error(f"File list: {fid} not exit")
				return file_list, path_list
			if 'errorCode' in resp:
				logger.error(f"Get file: {resp}")
				return file_list, path_list
			data_, done = self._get_more_page(resp, r_path=True)
			data_path.append(data_)
			if done:
				break
			page += 1
			time.sleep(0.5)  # 大量请求可能会被限制
		for data_ in data_path:
			data.extend(data_[0])
			if not path:
				path = data_[1]  # 不同 page 路径因该是一样的
		for item in data:
			name = item['fileName']
			id_ = int(item['fileId'])
			pid = int(item['parentId'])
			ctime = item['createTime']
			optime = item['lastOpTime']
			size = item['fileSize'] if 'fileSize' in item else ''
			ftype = item['fileType']
			durl = item['downloadUrl'] if 'downloadUrl' in item else ''
			isFolder = item['isFolder']
			isStarred = item['isStarred']
			file_list.append(FileInfo(name=name, id=id_, pid=pid, ctime=ctime, optime=optime, size=size,
									  ftype=ftype, durl=durl, isFolder=isFolder, isStarred=isStarred))
		for item in path:
			path_list.append(PathInfo(name=item['fileName'], id=int(item['fileId']),
									  isCoShare=item['isCoShare']))

		return file_list, path_list

	def get_file_info_by_id(self, fid) -> (int, FileInfo):
		'''获取文件(夹) 详细信息'''
		url = self._host_url + "/v2/getFileInfo.action"
		resp = self._get(url, params={'fileId': fid})
		if not resp:
			return RetCode.NETWORK_ERROR, FileInfo()

		resp = resp.json()
		logger.debug(resp)

		# createAccount     # createTime
		# fileId            # fileIdDigest
		# fileName          # fileSize
		# fileType          # isFolder
		# lastOpTime        # parentId
		# subFileCount
		name = resp['fileName']
		id_ = int(resp['fileId'])
		pid = int(resp['parentId'])
		ctime = resp['createTime']
		optime = resp['lastOpTime']
		size = resp['fileSize'] if 'fileSize' in resp else ''
		ftype = resp['fileType'] if 'fileType' in resp else ''
		isFolder = resp['isFolder']
		account = resp['createAccount']
		durl = resp['downloadUrl'] if 'downloadUrl' in resp else ''
		count = resp['subFileCount'] if 'subFileCount' in resp else ''
		return RetCode.SUCCESS, FileInfo(name=name, id=id_, pid=pid, ctime=ctime, optime=optime,
										  size=size, ftype=ftype, isFolder=isFolder, account=account,
										  durl=durl, count=count)

	def get_folder_nodes(self, fid=None, max_deep=5) -> TreeList:
		'''获取子文件夹信息
		:param fid:      需要获取子文件夹的文件夹id，None 表示获取所有文件夹
		:param max_deep: 子文件夹最大递归深度
		:return:         TreeList 类
		'''
		tree = TreeList()
		url = self._host_url + "/getObjectFolderNodes.action"
		post_data = {"orderBy": '1', 'order': 'ASC'}
		deep = 1

		def _get_sub_folder(fid, deep):
			if fid:
				post_data.update({"id": str(fid)})
			params = {'pageNum': 1, 'pageSize': 500}  # 应该没有大于 500 个文件夹的吧？
			resp = self._post(url, params=params, data=post_data)
			if not resp:
				return
			for folder in resp.json():
				name = folder['name']
				id_ = int(folder['id'])
				pid = int(folder['pId']) if 'pId' in folder else ''
				isParent = folder['isParent']  # str
				tree.append(FolderTree(name=name, id=id_, pid=pid,
									   isParent=isParent), repeat=False)
				logger.debug(
					f"Sub Folder: {name}, {id_}, {pid}, {isParent}")
				if deep < max_deep:
					_get_sub_folder(id_, deep + 1)

		_get_sub_folder(fid, deep)
		logger.debug(f"Sub Folder Tree len: {len(tree)}")
		return tree

	def mkdir(self, parent_id, fname):
		'''新建文件夹, 如果存在该文件夹，会返回存在的文件夹 id'''
		url = self._host_url + '/v2/createFolder.action'
		result = self._get(
			url, params={'parentId': str(parent_id), 'fileName': fname})
		if not result:
			logger.error("Mkdir: network error!")
			return RetCode.NETWORK_ERROR
		result = result.json()
		if 'fileId' in result:
			return RetCode.SUCCESS, result['fileId']
		else:
			logger.error(f"Mkdir: unknown error {result}")
			return RetCode.MKDIR_ERROR

	def rename(self, fid, fname):
		''''重命名文件(夹)'''
		url = self._host_url + '/v2/renameFile.action'
		resp = self._get(url, params={'fileId': str(fid), 'fileName': fname})
		if not resp:
			logger.error("Rename: network error!")
			return RetCode.NETWORK_ERROR
		resp = resp.json()
		if 'success' in resp:
			return RetCode.SUCCESS
		logger.error(f"Rename:  unknown error {resp}, {fid}, {fname}")
		return RetCode.FAILED

	def _batch_task(self, file_info, action: str, target_id: str = '') -> int:
		"""公共批处理请求
		:param file_info: FolderInfo、RecInfo、RecInfo
		:param action:    RESTORE、DELETE、MOVE、COPY
		:param target_id: 移动文件的目标文件夹 id
		:return:          状态码
		"""
		task_info = {
			"fileId": str(file_info.id),                # str
			"srcParentId": str(file_info.pid),          # str
			"fileName": file_info.name,                 # str
			"isFolder": 1 if file_info.isFolder else 0  # int
		}

		create_url = self._host_url + "/createBatchTask.action"
		post_data = {"type": action, "taskInfos": json.dumps([task_info, ])}
		if target_id:
			post_data.update({"targetFolderId": target_id})
		resp = self._post(create_url, data=post_data)
		task_id = resp.text.strip('"').strip('\'')
		logger.debug(f"Text: {resp.text}, {task_id}, {action}, {target_id}")
		if not task_id:
			logger.debug(f"Batch_task: {resp.status_code}")
			return RetCode.FAILED

		def _check_task(task_id):
			check_url = self._host_url + '/checkBatchTask.action'
			post_data = {"type": action, "taskId": task_id}
			resp = self._post(check_url, data=post_data)
			if not resp:
				logger.debug("BatchTask[_check] Error!")
				return 5
			resp = resp.json()
			if 'taskStatus' in resp:
				return resp['taskStatus']
			else:
				logger.debug(f"BatchTask[_check]: {post_data},{task_id},{resp}")
				return 5  # 防止无限循环

		task_status = 0
		while task_status != 4:
			time.sleep(0.5)
			task_status = _check_task(task_id)
		return RetCode.SUCCESS

	def delete_by_id(self, fid):
		'''删除文件(夹)'''
		code, infos = self.get_file_info_by_id(fid)
		if code != RetCode.SUCCESS:
			logger.error(f"Delete by id: get file's {fid} details failed!")
			return code

		return self._batch_task(infos, 'DELETE')

	def move_file(self, info, target_id):
		'''移动文件(夹)'''
		return self._batch_task(info, 'MOVE', str(target_id))

	def copy_file(self, fid):
		'''复制文件(夹)'''
		code, infos = self.get_file_info_by_id(fid)
		if code != RetCode.SUCCESS:
			logger.error(f"Copy by id: get file's {fid} details failed!")
			return code

		return self._batch_task(infos, 'COPY')

	