package framework.storage;

import com.amazonaws.ClientConfiguration;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.AWSCredentialsProvider;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.client.builder.AwsClientBuilder;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3Client;
import com.amazonaws.services.s3.model.*;
import framework.config.S3Config;
import lombok.Getter;

import java.io.*;
import java.util.ArrayList;
import java.util.List;
import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream;

/**
 * 使用本地存储的文件存储实现
 */
public class FileStorageS3 implements FileStorage {
    private static final int BUFFER_SIZE = 1 * 1024 * 1024;
    @Getter
    private S3Config config;

    public FileStorageS3(S3Config config) {
        this.config = config;
    }

    /**
     * 获取要操作的存储桶
     *
     * @return
     */
    protected String getBucket() {
        return this.config.getBucket();
    }

    @Override
    public String[] compressionPathStarts() {
        return this.config.getCompressPathStarts();
    }

    /**
     * 获得s3对象
     *
     * @return s3
     */
    protected AmazonS3 s3() {
        ClientConfiguration config = new ClientConfiguration();
        AwsClientBuilder.EndpointConfiguration endpointConfig = new AwsClientBuilder.EndpointConfiguration(this.config.getEndPoint(), this.config.getRegion());
        AWSCredentials awsCredentials = new BasicAWSCredentials(this.config.getAccessKey(), this.config.getSecretKey());
        AWSCredentialsProvider awsCredentialsProvider = new AWSStaticCredentialsProvider(awsCredentials);
        AmazonS3 s3 = AmazonS3Client.builder()
                .withEndpointConfiguration(endpointConfig)
                .withClientConfiguration(config)
                .withCredentials(awsCredentialsProvider)
                .disableChunkedEncoding()
                .withPathStyleAccessEnabled(this.config.getPathStyle())
                .build();
        return s3;
    }

    @Override
    public long save(InputStream inputStream, String filePath, boolean compression) throws IOException {
        if (compression) {
            filePath += ".gz";
        }
        int maxMemory = this.config.getSaveMemoryMax();
        byte[] buffer = new byte[BUFFER_SIZE];
        try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(BUFFER_SIZE)) {
            //
            int bytesRead;
            int total = 0;
            while ((bytesRead = inputStream.read(buffer)) != -1) {
                byteArrayOutputStream.write(buffer, 0, bytesRead);
                total += bytesRead;
                //若大于限制大小，转为分片传输
                if (total > maxMemory) {
                    if (compression)
                        return this.multipartPutCompression(byteArrayOutputStream.toByteArray(), inputStream, filePath);
                    return this.multipartPut(byteArrayOutputStream.toByteArray(), inputStream, filePath);
                }
            }

            //compression
            byte[] bytes = byteArrayOutputStream.toByteArray();
            if (compression) {
                byteArrayOutputStream.reset();
                try (GZIPOutputStream gzipOutputStream = new GZIPOutputStream(byteArrayOutputStream)) {
                    gzipOutputStream.write(bytes);
                    gzipOutputStream.finish();
                    bytes = byteArrayOutputStream.toByteArray();
                }
            }

            //
            try (InputStream inputStream2 = new ByteArrayInputStream(bytes)) {
                ObjectMetadata objectMetadata = new ObjectMetadata();
                objectMetadata.setContentLength(bytes.length);
                s3().putObject(this.getBucket(), filePath, inputStream2, objectMetadata);
            }
            return bytes.length;
        }
    }

    @Override
    public InputStream getStream(String filePath, boolean compression) throws IOException {
        if (compression) {
            filePath += ".gz";
        }
        S3ObjectInputStream objectContent;
        try {
            S3Object object = s3().getObject(this.getBucket(), filePath);
            objectContent = object.getObjectContent();
        } catch (AmazonS3Exception exception) {
            if (exception.getStatusCode() == 404) {
                throw new FileNotFoundException("File not found (" + exception.getMessage() + ")");
            }
            throw new IOException("File not found", exception);
        }
        if (compression)
            return new GZIPInputStream(objectContent);
        return objectContent;
    }

    @Override
    public void delete(String filePath, boolean compression) throws IOException {
        if (compression) {
            filePath += ".gz";
        }
        s3().deleteObject(this.getBucket(), filePath);
    }

    @Override
    public long length(String filePath, boolean compression) throws IOException {
        if (compression) {
            filePath += ".gz";
        }
        try {
            ObjectMetadata metadata = s3().getObjectMetadata(getConfig().getBucket(), filePath);
            return metadata.getContentLength();
        } catch (AmazonS3Exception exception) {
            if (exception.getStatusCode() == 404) {
                return -1L;
            }
            throw new IOException("File not found", exception);
        }
    }

    private long multipartPut(byte[] data, InputStream inputStream, String filePath) throws IOException {
        AmazonS3 s3Client = s3();
        String bucketName = this.getBucket();
        InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(bucketName, filePath);
        InitiateMultipartUploadResult initResponse = s3Client.initiateMultipartUpload(initRequest);
        List<PartETag> partETags = new ArrayList<>();

        int partNumber = 1;
        long total = 0;

        try {

            //分片
            int partSize = this.config.getPartSize();
            if (partSize < 5 * 1024 * 1024) {
                throw new IOException("config sys.s3.partSize must than 5MB");
            }

            //
            try (ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream(Math.max(partSize, data.length))) {

                //上载首片(已预读取到的数据)
                total += data.length;
                byteArrayOutputStream.write(data);

                //上载余量
                byte[] buffer = new byte[partSize];
                boolean loop = true;
                while (loop) {
                    //读取一片
                    while (byteArrayOutputStream.size() < partSize) {
                        int read = inputStream.read(buffer, 0, partSize - byteArrayOutputStream.size());
                        if (read == -1) {
                            //结束
                            loop = false;
                            break;
                        }
                        byteArrayOutputStream.write(buffer, 0, read);
                        total += read;
                    }

                    //写入一片
                    byte[] bytes = byteArrayOutputStream.toByteArray();
                    if (bytes.length > 0) {
                        try (ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes)) {
                            UploadPartRequest uploadRequest = new UploadPartRequest()
                                    .withBucketName(bucketName)
                                    .withKey(filePath)
                                    .withUploadId(initResponse.getUploadId())
                                    .withPartNumber(partNumber++)
                                    .withPartSize(bytes.length)
                                    .withInputStream(byteArrayInputStream);

                            UploadPartResult uploadResult = s3Client.uploadPart(uploadRequest);
                            partETags.add(uploadResult.getPartETag());
                        }
                    }

                    // 重置
                    byteArrayOutputStream.reset();
                }
            }

            // completed
            CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest(bucketName, filePath,
                    initResponse.getUploadId(), partETags);
            s3Client.completeMultipartUpload(compRequest);

        } catch (Exception exception) {
            s3Client.abortMultipartUpload(new AbortMultipartUploadRequest(bucketName, filePath, initResponse.getUploadId()));
            throw exception;
        }

        return total;
    }

    private long multipartPutCompression(byte[] data, InputStream inputStream, String filePath) throws IOException {
        AmazonS3 s3Client = s3();
        String bucketName = this.getBucket();
        InitiateMultipartUploadRequest initRequest = new InitiateMultipartUploadRequest(bucketName, filePath);
        InitiateMultipartUploadResult initResponse = s3Client.initiateMultipartUpload(initRequest);
        List<PartETag> partETags = new ArrayList<>();

        //
        int partNumber = 1;
        long total = 0;

        //
        try (ByteArrayOutputStream compressionOutputStream = new ByteArrayOutputStream(Math.max(BUFFER_SIZE, data.length));
             GZIPOutputStream gzipOutputStream = new GZIPOutputStream(compressionOutputStream)) {

            //上载首片(已预读取到的数据)
            total += data.length;
            gzipOutputStream.write(data);

            //分片
            int partSize = this.config.getPartSize();
            if (partSize < 5 * 1024 * 1024) {
                throw new IOException("config sys.s3.partSize must than 5MB");
            }

            //上载余量
            byte[] buffer = new byte[partSize];
            boolean loop = true;
            while (loop) {

                //读取一片
                while (compressionOutputStream.size() < partSize) {
                    int read = inputStream.read(buffer);
                    if (read == -1) {
                        //结束
                        loop = false;
                        gzipOutputStream.finish();
                        break;
                    }
                    //
                    gzipOutputStream.write(buffer, 0, read);
                    total += read;
                }

                //写入一片
                byte[] bytes = compressionOutputStream.toByteArray();
                if (bytes.length > 0) {
                    try (ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(bytes)) {
                        UploadPartRequest uploadRequest = new UploadPartRequest()
                                .withBucketName(bucketName)
                                .withKey(filePath)
                                .withUploadId(initResponse.getUploadId())
                                .withPartNumber(partNumber++)
                                .withPartSize(bytes.length)
                                .withInputStream(byteArrayInputStream);
                        //
                        UploadPartResult uploadResult = s3Client.uploadPart(uploadRequest);
                        partETags.add(uploadResult.getPartETag());
                    }
                }

                // 重置
                compressionOutputStream.reset();
            }

            // completed
            CompleteMultipartUploadRequest compRequest = new CompleteMultipartUploadRequest(bucketName, filePath,
                    initResponse.getUploadId(), partETags);
            s3Client.completeMultipartUpload(compRequest);

        } catch (Exception exception) {
            s3Client.abortMultipartUpload(new AbortMultipartUploadRequest(bucketName, filePath, initResponse.getUploadId()));
            throw exception;
        }

        return total;
    }
}
