import pandas as pd
import csv
import json
from utils.common.logger import log


def separateBigCSV(infile: str, rowSize=5000):
    try:
        for i, chunk in enumerate(pd.read_csv(infile, chunksize=rowSize)):
            chunk.to_csv('chunk{}.csv'.format(i), index=False)
    except Exception as e:
        log.logger.error(e)


def csv_to_json(csvFilePath: str, jsonFilePath: str):
    jsonArray = []

    # read csv file
    try:
        with open(csvFilePath, encoding='utf-8') as csvf:
            # load csv file data using csv library's dictionary reader
            csvReader = csv.DictReader(csvf)

            # convert each csv row into python dict
            for row in csvReader:
                # add this python dict to json array
                jsonArray.append(row)
    except Exception as read_error:
        log.logger.error(read_error)

    # convert python jsonArray to JSON String and write to file
    try:
        with open(jsonFilePath, 'w', encoding='utf-8') as jsonf:
            jsonString = json.dumps(jsonArray, indent=4, ensure_ascii=False)
            jsonf.write(jsonString)
    except Exception as write_error:
        log.logger.error(write_error)


def saveJson(data: list, filepath: str):
    outData = json.dumps(data, indent=4, ensure_ascii=False)
    with open(filepath, "w", encoding="UTF-8") as f:
        f.write(outData)
