import base64
import json
from concurrent.futures import ThreadPoolExecutor, as_completed
import cv2
import records
import requests
from fdfs_client.client import *
import numpy as np



def readfile(imgId):
    tracker_path = get_tracker_conf('/home/bq/fdfs_client.conf')
    print(1)
    client = Fdfs_client(tracker_path)
    print(2)
    print(type(imgId))
    ret_read = client.download_to_buffer(imgId.encode())
    print(3)
    nparr = np.fromstring(ret_read['Content'], np.uint8)
    print(4)

def http_post(url, json_data):
    json_data = json.dumps(json_data)
    json_data_dummy = json.dumps({'a': 'b'})
    with requests.session() as s:
        s.keep_alive = False
        headers = {'Content-Type': 'application/json'}
        headers_tail = {'Connection': 'close'}
        response = s.post(url=url, data=json_data, headers=headers)
        time.sleep(0.01)
        s.post(url=url, data=json_data_dummy, headers=headers_tail)
        json_res = response.json()
        print(json_res)
    return json_res

def readfrombase64(imgstr):
    imgData = base64.decodebytes(imgstr.encode())
    nparr = np.fromstring(imgData, np.uint8)
    img_decode = cv2.imdecode(nparr, cv2.IMREAD_COLOR)
    return img_decode
def test():
    return "ok"
def pool():
    executor = ThreadPoolExecutor(max_workers=20)
    all_task = []
    start = time.time()
    for i in range(20):
        all_task.append(executor.submit(test))
    for future in as_completed(all_task):
        result = future.result()
        print(result)

    # for i in range(20):
    #     result = test()
    #     print(result)
    end = time.time()
    print(f'time:{end - start}')

if __name__ == '__main__':
    db = records.Database('mysql+pymysql://root')
    rows = db.query('select * from t')
    print(rows[0].imageid)