import {RemoteStorage} from "./storage";
import {IParams} from "./params";
import * as step from '@ali/flow-step-toolkit'
import path from 'path';
import fs from "fs";
import {CACHE_FILE_MAX_SIZE} from "./cache";

export async function upload(remoteStorage: RemoteStorage, params: IParams, artifactFullName: string): Promise<void> {
    const reservedCacheDirs = ["/", "/root", "/root/workspace"];
    let cachesPath = "";

    const archivePaths = params.archivePath.split(" ")
    for (const subPath of archivePaths) {
        if (subPath.includes("..")) {
            step.warnV1(`cache dir ${subPath} should not contain '..', skip`);
            continue;
        }

        if (subPath.length > 256) {
            step.warnV1(`cache dir ${subPath} length should be limited to 256 characters, skip`);
            continue;
        }

        const fpath = path.resolve(subPath);
        step.infoV1(fpath);

        let reserved = false;
        for (const item of reservedCacheDirs) {
            if (fpath === item) {
                reserved = true;
                break;
            }
        }

        if (reserved) {
            step.warnV1("cannot use reserved dirs '/', '/root', '/root/workspace' as cache dirs, skip");
            continue;
        }

        if (fs.existsSync(fpath)) {
            step.infoV1(`prepare cache file ${fpath} -> ${params.fileID}`);
            cachesPath += ` ${fpath}`;
        } else {
            step.infoV1(`cache folder ${fpath}  not exist, skip.`);
        }
    }

    if (cachesPath.length === 0) {
        step.infoV1("could find any folder to cache, skip");
        return
    }

    // TODO command tar and pigz only support in linux/darwin , use other tool in windows
    let platform = step.platform.getPlatform();
    if (platform !== step.platform.PLATFORM_LINUX && platform !== step.platform.PLATFORM_DARWIN) {
        throw new Error(`cache upload in ${platform} not implemented now`);
    }

    await step.exec.call("/bin/bash", ["-c", `tar --exclude="${params.workspace}/params" -cPf - ${cachesPath} | pigz -p 4 -k > ${artifactFullName}`], {
        listener: {
            stdoutLine: (line: string) => step.infoV1(line),
            stderrLine: (line: string) => step.warnV1(line),
        }
    });

    const cacheFileSize = fs.statSync(artifactFullName).size;
    step.infoV1(`${artifactFullName} size ${cacheFileSize}`);

    if (cacheFileSize > CACHE_FILE_MAX_SIZE) {
        step.infoV1(`${artifactFullName} size ${cacheFileSize} exceeding max size limit ${CACHE_FILE_MAX_SIZE}, abort cache uploading.`);
    } else {
        step.infoV1(`try to upload ${artifactFullName}`);

        // TODO ossutil cp -r --jobs 80 -f $PACKAGE_DIR/ oss://$BUCKET_NAME/$FILE_PARENT_ID --tagging "caches=true"
        const headers = {
            'x-oss-tagging': 'caches=true',
        };
        const result = await remoteStorage.put(params.fileID, artifactFullName, {headers})
        if (result.res.status == 200) {
            step.infoV1("upload cache file successfully");
        } else {
            step.warnV1("failed to upload cache file");
        }
    }
}