import 'dart:convert';
import 'dart:typed_data';

import 'package:archive/archive.dart';
import 'package:dbio_utils/generic_filehandle/generic_file_handle.dart';
import 'package:dbio_utils/tabix/chunk.dart';
import 'package:dbio_utils/tabix/index_file.dart';
import 'package:dbio_utils/tabix/util.dart';
import 'dart:math' show pow;

import 'package:flutter/services.dart';

import 'virtual_offset.dart';

var CSI1_MAGIC = 21582659; // CSI\1
var CSI2_MAGIC = 38359875; // CSI\2

num lshift(num num, num bits) {
  return num * pow(2, bits);
}

num rshift(num num, num bits) {
  return (num / pow(2, bits)).floor();
}

class CSI extends IndexFile {
  num maxBinNumber;
  num depth;
  num minShift;

  CSI({
    GenericFileHandle fileHandle,
    Function renameRefSeq,
  }) : super(
          fileHandle: fileHandle,
          renameRefSeq: renameRefSeq,
        ) {
    this.maxBinNumber = 0;
    this.depth = 0;
    this.minShift = 0;
  }

  @override
  Future<int> lineCount(String refName, Map<dynamic, dynamic> options) async {
    IndexBean indexData = await this.parse(options);
    if (indexData == null) return -1;
    var refId = indexData['refNameToId'][refName];
    if (refId == null) return -1;
    var idx = indexData['indices'][refId];
    if (idx == null) return -1;
    var stats = idx['stats'];
    if (stats != null) return stats['lineCount'];
    return -1;
  }

  indexCov() {
    throw Exception('CSI indexes do not support indexcov');
    return [];
  }

  Map parseAuxData(Uint8List bytes, num offset, num auxLength) {
    if (auxLength < 30) {
      return {
        'refIdToName': [],
        'refNameToId': {},
      };
    }

    ByteData _bytes = ByteData.view(bytes.buffer);

    var formatFlags = _bytes.getInt32(offset, Endian.little);
    var coordinateType = formatFlags & 0x10000 != 0 ? 'zero-based-half-open' : '1-based-closed';
    var format = ({0: 'generic', 1: 'SAM', 2: 'VCF'})[formatFlags & 0xf];
    if (format == null) throw Exception('invalid Tabix preset format flags $formatFlags');
    var columnNumbers = {
      'ref': _bytes.getInt32(offset + 4, Endian.little),
      'start': _bytes.getInt32(offset + 8, Endian.little),
      'end': _bytes.getInt32(offset + 12, Endian.little),
    };
    var metaValue = _bytes.getInt32(offset + 16, Endian.little);
    var metaChar = metaValue != null ? String.fromCharCode(metaValue) : '';
    var skipLines = _bytes.getInt32(offset + 20, Endian.little);
    var nameSectionLength = _bytes.getInt32(offset + 24, Endian.little);

    var _map = this._parseNameBytes(bytes.sublist(offset + 28, offset + 28 + nameSectionLength));

    return {
      'refIdToName': _map['refIdToName'],
      'refNameToId': _map['refNameToId'],
      'skipLines': skipLines,
      'metaChar': metaChar,
      'columnNumbers': columnNumbers,
      'format': format,
      'coordinateType': coordinateType,
    };
  }

  Map _parseNameBytes(Uint8List namesBytes) {
    var currRefId = 0;
    var currNameStart = 0;
    var refIdToName = [];
    var refNameToId = {};
    for (var i = 0; i < namesBytes.length; i += 1) {
      if (namesBytes[i] == 0) {
        if (currNameStart < i) {
          var refName = utf8.decode(namesBytes.sublist(currNameStart, i)); //String.fromCharCodes(namesBytes.sublist(currNameStart, i));
          refName = this.renameRefSeq(refName);
          // refIdToName[currRefId] = refName;
          refIdToName.add(refName);
          refNameToId[refName] = currRefId;
        }
        currNameStart = i + 1;
        currRefId += 1;
      }
    }
    return {'refNameToId': refNameToId, 'refIdToName': refIdToName};
  }

  @override
  Future<CsiBean> parseImpl(Map opts) async {
    // var bytes = await unzip((await this.filehandle.readFile(opts)) as Buffer)

    Uint8List __bytes = await this.fileHandle.readFile();
    List<int> _bytes = GZipDecoder().decodeBytes(__bytes);

    ByteData bytes = ByteData.view(Uint8List.fromList(_bytes).buffer);
    // check TBI magic numbers
    var csiVersion;

    var _magic = bytes.getInt32(0, Endian.little);
    if (_magic == CSI1_MAGIC) {
      csiVersion = 1;
    } else if (_magic == CSI2_MAGIC) {
      csiVersion = 2;
    } else {
      throw Exception('Not a CSI file');
      // TODO: do we need to support big-endian CSI files?
    }

    this.minShift = bytes.getInt32(4, Endian.little);
    this.depth = bytes.getInt32(8, Endian.little);
    this.maxBinNumber = ((1 << ((this.depth + 1) * 3)) - 1) / 7;
    var maxRefLength = pow(2, this.minShift + this.depth * 3);

    var auxLength = bytes.getInt32(12, Endian.little);
    Map aux = {
      'refIdToName': [],
      'refNameToId': {},
    };
    if (auxLength != null && auxLength > 0) {
      aux = this.parseAuxData(_bytes, 16, auxLength);
    }
    var refCount = bytes.getInt32(16 + auxLength, Endian.little);

    // read the indexes for each reference sequence
    VirtualOffset firstDataLine;
    var currOffset = 16 + auxLength + 4;
    var indices = List.filled(refCount, (e) => 0).map((e) {
      // the binning index
      var binCount = bytes.getInt32(currOffset, Endian.little);
      currOffset += 4;
      Map<int, List<Chunk>> binIndex = {};
      var stats; // < provided by parsing a pseudo-bin, if present
      for (var j = 0; j < binCount; j += 1) {
        var bin = bytes.getInt32(currOffset, Endian.little);
        if (bin > this.maxBinNumber) {
          // this is a fake bin that actually has stats information
          // about the reference sequence in it
          stats = this.parsePseudoBin(_bytes, currOffset + 4);
          currOffset += 4 + 8 + 4 + 16 + 16;
        } else {
          var loffset = VirtualOffset.fromBytes(bytes: _bytes, offset: currOffset + 4);
          firstDataLine = findFirstData(firstDataLine, loffset);
          var chunkCount = bytes.getInt32(currOffset + 12, Endian.little);
          currOffset += 16;
          var chunks = List<Chunk>(chunkCount);
          for (var k = 0; k < chunkCount; k += 1) {
            var u = VirtualOffset.fromBytes(bytes: _bytes, offset: currOffset);
            var v = VirtualOffset.fromBytes(bytes: _bytes, offset: currOffset + 8);

            currOffset += 16;
            // this._findFirstData(data, u)
            chunks[k] = Chunk(u, v, bin);
          }
          binIndex[bin] = chunks;
        }
      }

      return {'binIndex': binIndex, 'stats': stats};
    }).toList();

    var map = {
      ...aux,
      'csi': true,
      'refCount': refCount,
      'maxBlockSize': 1 << 16,
      'firstDataLine': firstDataLine,
      'csiVersion': csiVersion,
      'indices': indices,
      'depth': this.depth,
      'maxBinNumber': this.maxBinNumber,
      'maxRefLength': maxRefLength,
    };
    return CsiBean(map);
  }

  Map parsePseudoBin(Uint8List bytes, num offset) {
    var lineCount = ByteData.view(bytes.buffer).getInt32(offset + 28, Endian.little);
    return {'lineCount': lineCount};
  }

  @override
  Future<List<Chunk>> blocksForRange({
    String refName,
    int start,
    int end,
    Map opts,
  }) async {
    if (start < 0) start = 0;

    var indexData = await this.parse(opts);
    if (indexData == null) return [];
    var refId = indexData.refNameToId[refName];
    var indexes = indexData.indices[refId];
    if (indexes == null) return [];

    var binIndex = indexes['binIndex'];

    var bins = this.reg2bins(start, end);

    var l;
    var numOffsets = 0;
    for (var i = 0; i < bins.length; i += 1) {
      if (binIndex[bins[i]] != null) numOffsets += binIndex[bins[i]].length;
    }

    if (numOffsets == 0) return [];

    List<Chunk> off = [];
    numOffsets = 0;
    for (var i = 0; i < bins.length; i += 1) {
      List<Chunk> chunks = binIndex[bins[i]];
      if (chunks != null)
        for (var j = 0; j < chunks.length; j += 1) {
          off.add(Chunk(
            chunks[j].minv,
            chunks[j].maxv,
            chunks[j].bin,
          ));
          numOffsets += 1;
        }
    }

    if (off.length == 0) return [];

    off.sort((a, b) => a.compareTo(b));

    // resolve completely contained adjacent blocks
    l = 0;
    for (var i = 1; i < numOffsets; i += 1) {
      if (off[l].maxv.compareTo(off[i].maxv) < 0) {
        l += 1;
        off[l].minv = off[i].minv;
        off[l].maxv = off[i].maxv;
      }
    }
    numOffsets = l + 1;

    // resolve overlaps between adjacent blocks; this may happen due to the merge in indexing
    for (var i = 1; i < numOffsets; i += 1) {
      if (off[i - 1].maxv.compareTo(off[i].minv) >= 0) {
        off[i - 1].maxv = off[i].minv;
      }
    }

    // merge adjacent blocks
    l = 0;
    for (var i = 1; i < numOffsets; i += 1) {
      if (off[l].maxv.blockPosition == off[i].minv.blockPosition)
        off[l].maxv = off[i].maxv;
      else {
        l += 1;
        off[l].minv = off[i].minv;
        off[l].maxv = off[i].maxv;
      }
    }
    numOffsets = l + 1;

    return off.sublist(0, numOffsets);
  }

  /// calculate the list of bins that may overlap with region [beg,end) (zero-based half-open)
  /// @returns {Array[number]}
  List reg2bins(num beg, num end) {
    beg -= 1; // < convert to 1-based closed
    if (beg < 1) beg = 1;
    if (end > pow(2, 50)) end = pow(2, 34); // 17 GiB ought to be enough for anybody
    end -= 1;
    var l = 0;
    var t = 0;
    var s = this.minShift + this.depth * 3;
    var bins = [];
    for (; l <= this.depth; s -= 3, t += lshift(1, l * 3), l += 1) {
      var b = t + rshift(beg, s);
      var e = t + rshift(end, s);
      if (e - b + bins.length > this.maxBinNumber)
        throw Exception(
          'query ${beg}-${end} is too large for current binning scheme (shift ${this.minShift}, depth ${this.depth}), try a smaller query or a coarser index binning scheme',
        );
      for (var i = b; i <= e; i += 1) bins.add(i);
    }
    return bins;
  }
}
