package gridfs.dao;

import com.mongodb.client.MongoCollection;
import com.mongodb.client.model.UpdateOptions;
import gridfs.exception.NotFoundException;
import gridfs.exception.OutOfRangeException;
import jnr.ffi.Pointer;
import org.bson.Document;
import org.bson.types.Binary;
import org.bson.types.ObjectId;

final class GridFileWrite {// final不能被继承，没有子类

    private final MongoCollection<Document> filesCollection;
    private final MongoCollection<Document> chunkCollection;

    GridFileWrite(MongoCollection<Document> filesCollection, MongoCollection<Document> chunkCollection) {
        this.filesCollection = filesCollection;
        this.chunkCollection = chunkCollection;
    }

    void exec(ObjectId id, Pointer buffer, long offset, long bufSize) {
        Document gridFile = filesCollection.find(new Document("_id", id)).first();
        if (null == gridFile) {
            throw new NotFoundException(String.format("file[id=%s] not found.", id));
        }

        long fileSize = gridFile.get("length", Number.class).longValue();
        int chunkSize = gridFile.get("chunkSize", Number.class).intValue();
        if (fileSize != 0 && offset + bufSize > fileSize) {
            throw new OutOfRangeException(String.format("file[id=%s] offset out of range, size=%s", id, fileSize));
        }
        long uploadSize = offset + bufSize;
        int lastChunkSize = (int) uploadSize % chunkSize;

        int uploadChunkCount = (int) (uploadSize % chunkSize == 0 ? uploadSize / chunkSize : (uploadSize / chunkSize) + 1);
        int lastChunkNr = uploadChunkCount - 1;

        int firstWriteNr = (int) (offset / chunkSize);
        int firstWriteOffset = (int) (offset % chunkSize);

        long writeBytes = 0;
        long position = 0;
        int currSize = 0;
        byte[] bytesToWrite;
        for (int writeNr = firstWriteNr;position < bufSize;position += currSize) {

            if (writeNr == firstWriteNr) {
                if(firstWriteNr == lastChunkNr) currSize = lastChunkSize - firstWriteOffset;
                else currSize = chunkSize - firstWriteOffset;
            } else if (writeNr == lastChunkNr) {
                currSize = lastChunkSize;
            } else {
                currSize = chunkSize;
            }
            bytesToWrite = new byte[currSize];
            buffer.get(position, bytesToWrite, 0, currSize);

            writeBytes = writeBytes + currSize;

            int writeOffset = writeNr == firstWriteNr ? firstWriteOffset : 0;

            if (writeNr == firstWriteNr) {
                writePartialChunk(id, writeNr, writeOffset, currSize, bytesToWrite, chunkSize);
                writeNr++;
                continue;
            }

            if (writeNr == lastChunkNr) {
                writeLastChunk(id, writeNr, writeOffset, currSize, bytesToWrite, chunkSize);
                break;
            }

            writeChunk(id, writeNr, currSize, bytesToWrite, chunkSize);
            writeNr++;
        }
    }


    private void writeChunk(ObjectId id, int nr, int writeSize, byte[] data, int chunkSize) {
        byte[] buf = new byte[chunkSize];
        System.arraycopy(data, 0, buf, 0, writeSize);
        chunkCollection.updateOne(
                new Document("files_id", id).append("n", nr),
                new Document("$set", new Document("files_id", id).append("n", nr).append("data", new Binary(buf))),
                new UpdateOptions().upsert(true)
        );
    }

    private void writePartialChunk(ObjectId id, int nr, int writeOffset, int writeSize, byte[] data, int chunkSize) {
        Document chunk = chunkCollection.find(new Document("files_id", id).append("n", nr)).first();
        byte[] chunkData = null != chunk ? chunk.get("data", Binary.class).getData() : new byte[chunkSize];
        System.arraycopy(data, 0, chunkData, writeOffset, writeSize);
        chunkCollection.updateOne(
                new Document("files_id", id).append("n", nr),
                new Document("$set", new Document("files_id", id).append("n", nr).append("data", new Binary(chunkData))),
                new UpdateOptions().upsert(true)
        );
    }

    private void writeLastChunk(ObjectId id, int nr, int writeOffset, int writeSize, byte[] data, int chunkBytes) {
        Document chunk = chunkCollection.find(new Document("files_id", id).append("n", nr)).first();
        byte[] chunkData = null != chunk ? chunk.get("data", Binary.class).getData() : new byte[chunkBytes];
        System.arraycopy(data, 0, chunkData, writeOffset, writeSize);
        chunkCollection.updateOne(
                new Document("files_id", id).append("n", nr),
                new Document("$set", new Document("files_id", id).append("n", nr).append("data", new Binary(chunkData))),
                new UpdateOptions().upsert(true)
        );
    }

}
