import gzip
import os

import boto3
import pymssql

conn = pymssql.connect(
    host='daikinivrvtest.cobn9rpg5ow9.rds.cn-north-1.amazonaws.com.cn',
    database='tankinivrvIT',
    user='sa',
    password='Dkivrv2017!',
    charset='utf8',
)

# Specify the bucket name for S3
AWS_BUCKET_NAME = 'inlm-system-data-archive-776133769069-it'

# Get an S3 resource object
s3 = boto3.resource('s3')


def upload_file_to_s3(file_path):
    data = open(os.path.normpath(file_path), 'rb')
    file_basename = os.path.basename(file_path)
    s3.Bucket(AWS_BUCKET_NAME).put_object(Key=file_basename, Body=data)


# 获取游标
cursor = conn.cursor(as_dict=True)
sql = "select t.term_mac,p.power_data,p.occur_dt,p.collect_time from power_consumption_collection p left join terminal_mst t on p.terminal_id = t.terminal_id where p.collect_time< '2018-09-01 00:00:00'"
cursor.execute(sql)
lst = cursor.fetchall()

with gzip.open("2019-before.gz", "wb") as f:
    for data in lst:
        term_mac = data.get('term_mac')
        power_data = str(data.get('power_data'))
        occur_dt = str(format(data.get('occur_dt'), '%Y-%m-%d %H:%M:%S'))
        collect_time = str(format(data.get('collect_time'), '%Y-%m-%d %H:%M:%S'))
        line = f'{term_mac},{power_data},{occur_dt},{collect_time}\r\n'
        f.write(line.encode('utf-8'))
