import 'dart:convert';
import 'dart:io';
import 'dart:typed_data';

import 'package:archive/archive.dart';
import 'package:dbio_utils/bam/record.dart';
import 'package:flutter/services.dart';

import '../generic_filehandle/generic_file_handle.dart';
import '../tabix/index_file.dart';

import 'bai.dart';
import '../tabix/csi.dart';
import '../tabix/chunk.dart';
import '../base/lru_cache.dart';
import '../tabix/util.dart';
import '../bgzf_filehandle/unzip.dart';
import 'sam.dart';

var BAM_MAGIC = 21840194;
var blockLen = 1 << 16;

class BamFile {
  RenameRefSeq renameRefSeq;
  GenericFileHandle bam;
  IndexFile index;
  int chunkSizeLimit;
  int fetchSizeLimit;
  var header;

  LruCache featureCache;

  Map chrToIndex;
  var indexToChr;

  BamFile({
    this.bam,
    this.index,
    this.fetchSizeLimit = 500000000,
    this.chunkSizeLimit = 300000000,
    int cacheSize = 50,
  }) {
    featureCache = LruCache(cacheSize);
  }

  Future getHeader(Map params) async {
    Map opts = params;
    var indexData = await this.index.parse(params);
    var ret = indexData.firstDataLine != null ? indexData.firstDataLine.blockPosition + 65535 : null;
    Uint8List bytes;
    if (ret != null) {
      bytes = await this.bam.read(0, ret + blockLen, 0);
      if (bytes == null || bytes.length <= 0) {
        throw Exception('Error reading header');
      }
    } else {
      bytes = (await this.bam.readFile());
    }

    List<int> uncba = unzip(bytes);
    ByteData bd = ByteData.view(Uint8List.fromList(uncba).buffer);

    if (bd.getInt32(0, Endian.little) != BAM_MAGIC) {
      throw Exception('Not a BAM file');
    }
    int headLen = bd.getInt32(4, Endian.little);

    this.header = utf8.decode(uncba.sublist(8, 9 + headLen)); // uncba.toString('utf8', 8, 8 + headLen);
    Map map = await this._readRefSeqs(headLen + 8, 65535, opts);
    this.chrToIndex = map['chrToIndex'];
    this.indexToChr = map['indexToChr'];

    return parseHeaderText(this.header);
  }

  // the full length of the refseq block is not given in advance so this grabs a chunk and
  // doubles it if all refseqs haven't been processed
  _readRefSeqs(int start, int refSeqBytes, [Map opts]) async {
    if (start > refSeqBytes) {
      return this._readRefSeqs(start, refSeqBytes * 2, opts);
    }
    Uint8List bytes = await this.bam.read(0, refSeqBytes, 0);
    if (bytes == null || bytes.length <= 0) {
      throw Exception('Error reading header');
    }

    List<int> __bytes = unzip(bytes);
    ByteData uncba = ByteData.view(Uint8List.fromList(__bytes).buffer);
    int nRef = uncba.getInt32(start, Endian.little);
    int p = start + 4;
    Map chrToIndex = {};
    List<Map> indexToChr = [];
    for (int i = 0; i < nRef; i += 1) {
      abortBreakPoint(opts['signal']);
      int lName = uncba.getInt32(p, Endian.little);
      String refName = utf8.decode(__bytes.sublist(p + 4, p + 4 + lName - 1)); // uncba.toString('utf8', p + 4, p + 4 + lName - 1)
      refName = this.renameRefSeq(refName);
      int lRef = uncba.getInt32(p + lName + 4, Endian.little);

      chrToIndex[refName] = i;
      indexToChr.add({'refName': refName, 'length': lRef});

      p = p + 8 + lName;
      if (p > uncba.lengthInBytes) {
        print('BAM header is very big.  Re-fetching ${refSeqBytes} bytes.');
        return this._readRefSeqs(start, refSeqBytes * 2, opts);
      }
    }
    return {'chrToIndex': chrToIndex, 'indexToChr': indexToChr};
  }

  Future getRecordsForRange(
    String chr,
    int min,
    int max, [
    Map opts = const {
      'viewAsPairs': false,
      'pairAcrossChr': false,
      'maxInsertSize': 200000,
    },
  ]) async {
    List<BamRecord> records = [];
    await for (var chunk in this.streamRecordsForRange(chr, min, max, opts)) {
      records.add(chunk);
    }
    return records;
  }

  streamRecordsForRange(
    String chr,
    int min,
    int max, [
    Map opts = const {'viewAsPairs': false, 'pairAcrossChr': false, 'maxInsertSize': 200000},
  ]) async* {
    var chrId = this.chrToIndex != null ? this.chrToIndex[chr] : -1;
    List<Chunk> chunks;
    if (!(chrId >= 0)) {
      chunks = [];
    } else {
      chunks = await this.index.blocksForRange(refName: chrId, start: min - 1, end: max, opts: opts);
      if (chunks == null) {
        throw Exception('Error in index fetch');
      }
    }

    for (int i = 0; i < chunks.length; i += 1) {
      abortBreakPoint(opts['signal']);
      int size = chunks[i].fetchedSize();
      if (size > this.chunkSizeLimit) {
        throw Exception('Too many BAM features. BAM chunk size ${size} bytes exceeds chunkSizeLimit of ${this.chunkSizeLimit}');
      }
    }

    var totalSize = chunks.map((Chunk s) => s.fetchedSize()).reduce((num a, num b) => a + b);
    if (totalSize > this.fetchSizeLimit) {
      throw Exception('data size of ${totalSize} bytes exceeded fetch size limit of ${this.fetchSizeLimit} bytes');
    }
    yield* this.fetchChunkFeatures(chunks, chrId, min, max, opts);
  }

  fetchChunkFeatures(
    List<Chunk> chunks,
    int chrId,
    int min,
    int max, [
    Map opts,
  ]) async {
    List featPromises = [];
    var done = false;

    for (Chunk c in chunks) {
      ChunkSlice chunkSlice = await readChunk(c, opts);
      Future<List<BamRecord>> featureFutures = this.readBamFeatures(chunkSlice.buffer, chunkSlice.cpositions, chunkSlice.dpositions, chunkSlice.chunk).then((List<BamRecord> records) {
        List<BamRecord> recs = [];
        for (var feature in records) {
          if (feature.seq_id() == chrId) {
            if (feature.get('start') >= max) {
              // past end of range, can stop iterating
              done = true;
              break;
            } else if (feature.get('end') >= min) {
              // must be in range
              recs.add(feature);
            }
          }
        }
        return recs;
      });
      featPromises.add(featureFutures);
      await featureFutures;
      if (done) {
        break;
      }
    }

    checkAbortSignal(opts['signal']);

    for (var i = 0; i < featPromises.length; i++) {
      await featPromises[i];
    }
    checkAbortSignal(opts['signal']);
    if (opts['viewAsPairs']) {
      await this.fetchPairs(chrId, featPromises, opts);
    }
  }

  fetchPairs(int chrId, List<Future<List<BamRecord>>> featPromises, Map opts) async {
    Map<String, bool> unmatedPairs = {};
    Map<String, int> readIds = {};
    await Future.wait(
      featPromises.map((f) async {
        var ret = await f;
        Map<String, int> readNames = {};
        for (var i = 0; i < ret.length; i++) {
          var name = ret[i].name();
          var id = ret[i].id();
          if (readNames[name] != 0) {
            readNames[name] = 0;
          }
          readNames[name]++;
          readIds[id] = 1;
        }
        readNames.forEach((k, v) => {if (v == 1) unmatedPairs[k] = true});
      }),
    );

    List<Future<List<Chunk>>> matePromises = [];
    await Future.wait(
      featPromises.map((f) async {
        var ret = await f;
        for (var i = 0; i < ret.length; i++) {
          var name = ret[i].name();
          if (unmatedPairs[name] != null &&
              (opts['pairAcrossChr'] || (ret[i].get('_next_refid') == chrId && (ret[i].get('start') - ret[i].get('_next_pos')).abs() < (opts['maxInsertSize'] ?? 200000)))) {
            matePromises.add(
              this.index.blocksForRange(
                    refName: ret[i].get('_next_refid'),
                    start: ret[i].get('_next_pos'),
                    end: ret[i].get('_next_pos') + 1,
                    opts: opts,
                  ),
            );
          }
        }
      }),
    );

    var mateBlocks = await Future.wait(matePromises);
    List<Chunk> mateChunks = [];
    for (var i = 0; i < mateBlocks.length; i++) {
      mateChunks.addAll(mateBlocks[i]);
    }
    // filter out duplicate chunks (the blocks are lists of chunks, blocks are concatenated, then filter dup chunks)
    int index = -1;
    // bool _where(Chunk chunk){
    //   return index++ == 0 || chunk.toString() != mateChunks[index - 1].toString();
    // }
    mateChunks = mateChunks
      ..sort()
      ..where((chunk) => index++ == 0 || chunk.toString() != mateChunks[index - 1].toString());

    var mateTotalSize = mateChunks.map((s) => s.fetchedSize()).reduce((a, b) => a + b);
    if (mateTotalSize > this.fetchSizeLimit) {
      throw new Exception('data size of ${mateTotalSize} bytes exceeded fetch size limit of ${this.fetchSizeLimit} bytes');
    }
    Iterable<Future> mateFeatPromises = mateChunks.map((c) async {
      ChunkSlice chunkslice = await this.featureCache.get(c.toString());
      var feats = await this.readBamFeatures(chunkslice.buffer, chunkslice.cpositions, chunkslice.dpositions, chunkslice.chunk);
      var mateRecs = [];
      for (var i = 0; i < feats.length; i += 1) {
        var feature = feats[i];
        if (unmatedPairs[feature.get('name')] && readIds[feature.id()] != 0) {
          mateRecs.add(feature);
        }
      }
      return mateRecs;
    });
    var newMateFeats = await Future.wait(mateFeatPromises);
    List<BamRecord> featuresRet = [];
    if (newMateFeats.length > 0) {
      List<BamRecord> newMates = newMateFeats.reduce((result, current) => result.concat(current));
      featuresRet.addAll(newMates);
    }
    return featuresRet;
  }

  Future<ChunkSlice> readChunk(Chunk chunk, Map opts) async {
    ChunkSlice chunkSlice = featureCache.get(chunk.toString());
    if (null != chunkSlice) {
      return chunkSlice;
    }

    Chunk c = chunk;
    var bufsize = c.fetchedSize();
    var bytes = await this.bam.read(0, bufsize, c.minv.blockPosition);

    checkAbortSignal(opts['abortSignal']);
    if (bytes == null) {
      return null;
    }

    ChunkSlice slice = unzipChunkSlice(bytes, chunk);
    featureCache.save(chunk.toString(), slice);
    checkAbortSignal(opts['abortSignal']);

    return slice;
  }

  Future<List<BamRecord>> readBamFeatures(List<int> buffer, List<int> cpositions, List<int> dpositions, Chunk chunk) async {
    ByteData ba = ByteData.view(Uint8List.fromList(buffer).buffer);
    var blockStart = 0;
    List<BamRecord> sink = [];
    var pos = 0;
    var featsSinceLastTimeout = 0;

    while (blockStart + 4 < buffer.length) {
      int blockSize = ba.getInt32(blockStart, Endian.little);
      var blockEnd = blockStart + 4 + blockSize - 1;

      // increment position to the current decompressed status
      if (dpositions != null) {
        while (blockStart + chunk.minv.dataPosition >= dpositions[pos++]) {}
        pos--;
      }

      // only try to read the feature if we have all the bytes for it
      if (blockEnd < ba.lengthInBytes) {
        var feature = new BamRecord(
          bytes: {
            'byteArray': ba,
            'start': blockStart,
            'end': blockEnd,
          },
          // the below results in an automatically calculated file-offset based ID
          // if the info for that is available, otherwise crc32 of the features
          //
          // cpositions[pos] refers to actual file offset of a bgzip block boundaries
          //
          // we multiply by (1 <<8) in order to make sure each block has a "unique"
          // address space so that data in that block could never overlap
          //
          // then the blockStart-dpositions is an uncompressed file offset from
          // that bgzip block boundary, and since the cpositions are multiplied by
          // (1 << 8) these uncompressed offsets get a unique space
          //
          // this has an extra chunk.minv.dataPosition added on because it blockStart
          // starts at 0 instead of chunk.minv.dataPosition
          //
          // the +1 is just to avoid any possible uniqueId 0 but this does not realistically happen
          fileOffset: cpositions != null
              ? cpositions[pos] * (1 << 8) + (blockStart - dpositions[pos]) + chunk.minv.dataPosition + 1
              : //
              (Crc32()..add(buffer.sublist(blockStart, blockEnd))).hash,
        );

        sink.add(feature);
        featsSinceLastTimeout++;
        if (featsSinceLastTimeout > 500) {
          await Future.delayed(Duration(seconds: 1));
          featsSinceLastTimeout = 0;
        }
      }

      blockStart = blockEnd + 1;
    }
    return sink;
  }

  hasRefSeq(String seqName) async {
    var refId = this.chrToIndex ?? [seqName];
    return this.index.hasRefSeq(seqId: refId);
  }

  lineCount(StringCodec seqName) async {
    var refId = this.chrToIndex ?? [seqName];
    return this.index.lineCount(refId, {});
  }

  indexCov(String seqName, int start, int end) async {
    await this.index.parse({});
    var seqId = this.chrToIndex ?? [seqName];
    return (this.index as BAI).indexCov(seqId, start, end, {});
  }
}
