const { spawn, spawnSync } = require('child_process')
const path = require('path')
const fs = require('fs')

if (!global.sleep) global.sleep = async (ttl) => { return await new Promise(res => { setTimeout(res, ttl) }) }

const ops = {
  eqIndex(buf, str, offset, col, reverse) { // 全等于匹配
    return buf[reverse](Buffer.concat([Buffer.alloc(1), Buffer.from(str, col.encoding), Buffer.alloc(1)]), offset)
  },
  eqIndexBuf(buf, str, offset, col, reverse) { // 全等于匹配
    return buf[reverse](str, offset)
  },
  likeIndex(buf, str, offset, col, reverse) { // 包含匹配
    return buf[reverse](Buffer.from(str, col.encoding), offset)
  },
  neIndex(buf, str, offset, col) { // 不等于 逐行查找
    while(true) {
      if (offset >= buf.length - 1) return -1 // 找不到
      const bi = buf.slice(offset + 1, offset + 1 + col.len).indexOf(str)
      if (bi === -1) return offset // 找到不相同的行
      offset += col.len
    }
  },
  neArrIndex(buf, arr, offset, col) { // 不等于 数组 逐行查找
    if (Acan.isStr(arr)) arr = [arr]
    while(true) {
      if (offset >= buf.length - 1) return -1 // 找不到
      if (buf.slice(offset + 1, offset + 2).equals(Buffer.alloc(1))) return -1 // 已经到最后一行
      const val = this.types.string(buf.slice(offset + 1, offset + 1 + col.len, col.encoding))
      if (arr.includes(val)) offset += col.len // 继续匹配下一行
      else return offset
    }
  },
  arrIndex(buf, arr, offset, col, reverse, fn = ops.eqIndex) { // 全等于 数组匹配
    if (Acan.isStr(arr)) arr = [arr]
    let mrr = []
    for (const val of arr) {
      const bi = fn(buf, val, offset, col, reverse)
      if (bi >= 0) mrr.push(bi)
    }
    if (mrr.length > 0) { mrr.sort(); return mrr[0] } // 匹配到
    return -1
  }
}

const fdCaches = {}

const mod = {
  _t() {
    return parseInt(process.uptime() * 1000000)
  },
  _tf(t) { return parseFloat((t / 1000).toFixed(3)) },
  _t16() {
    return parseInt((new Date()).getTime() / 1000).toString(16)
  },
  _id() {
    // _id = [Acan.time().toString(16), ip, pid, inc ]
    //        4                         4   2    2
    const buf = Buffer.alloc(12)
    buf.write(Acan.time().toString(16), 0, 'hex')
    conf.realIp.split('.').map((v, i) => {
      buf.write(parseInt(v).toString(16), 4 + i, 'hex')
    })
    buf.write(process.pid.toString(16), 8, 'hex')
    const rand = _inc.toString(16)
    _inc++
    if (rand.length > 2) buf.write(rand, 10, 'hex')
    else buf.write(rand, 11, 'hex')
    return buf
  },
  _idParse(_id) {
    const buf = Buffer.isBuffer(_id) ? _id : Buffer.from(_id, 'hex')
    const rs = {}
    rs.time = parseInt(buf.slice(0, 4).toString('hex'), 16)
    const ipr = []
    for (let i = 0; i < 4; i++) {
      ipr.push(parseInt(buf.slice(4 + i, 5 + i).toString('hex'), 16))
    }
    rs.ip = ipr.join('.')
    rs.pid = parseInt(buf.slice(8, 10).toString('hex'), 16)
    rs.inc = parseInt(buf.slice(10, 12).toString('hex'), 16)
    return rs
  },
  types: {
    time(buf) { return parseInt(buf.toString('hex'), 16) },
    _id(buf) { return buf.toString('hex') },
    bool(buf) { return buf.readIntBE(0, 1) === 1 ? true : false },
    double(buf) { return buf.readDoubleBE() },
    float(buf) { return buf.readFloatBE() },
    int(buf) { return buf.readIntBE(0, buf.length) },
    string(buf, encoding) { return buf.toString(encoding).split('\x00')[0] }
  },
  typesWrite: {
    time(buf, offset, col) { return buf.write(mod._t16(), offset, col.len - 1, 'hex') },
    _id(buf, offset, col, val) { const _id = val ? Buffer.from(val, 'hex') : mod._id(); _id.copy(buf, offset, 0); return _id },
    bool(buf, offset, col, val) { if (Acan.isStr(val)) val = JSON.parse(val); return buf.writeIntBE(val === true ? 1 : 2, offset, 1) },
    double(buf, offset, col, val) { return buf.writeDoubleBE(parseFloat(val), offset, 8) },
    float(buf, offset, col, val) { return buf.writeFloatBE(parseFloat(val), offset, 4) },
    int(buf, offset, col, val) { return buf.writeIntBE(parseInt(val), offset, col.len - 1) },
    string(buf, offset, col, val = '') { if (Acan.isInt(val)) val += ''; if (!Acan.isStr(val)) throw `Type Error: ${col.type},${col.len}, ${val}`; const nb = Buffer.alloc(col.len); nb.write(val, 0, col.encoding); return nb.copy(buf, offset, 0) }
  },
  _sort(arr, sorts = null) { // 多属性依次排序
    if (!sorts) return arr
    const scall = (a, b, key, ad = 1) => {
      return ad === -1 ? (a[key] > b[key] ? -1 : 1) : (a[key] > b[key] ? 1 : -1) // 倒序 : 正序
    }
    const call = (a, b, srr) => {
      const key = srr.shift()
      if(a[key] === b[key]) {
        if (srr.length === 0) return 0
        else return call(a, b, srr)
      } else return scall(a, b, key, sorts[key])
    }
    arr.sort(function(a, b) {
      return call(a, b, Object.keys(sorts))
    })
  },
  async loadFd(table, date, k) {
    const key = date + '/' + k
    if (table.fd[key]) return
    const rawName = `${conf.db}/${table.db}/${table.name}/${key}.raw`
    const zstName = rawName.substr(0, rawName.length - 4) + '.zst'
    const ifRaw = fs.existsSync(rawName)
    const ifZst = fs.existsSync(zstName)
    if (!ifRaw && !ifZst) return // 该时间段没有数据
    const zstSize = ifZst ? fs.lstatSync(zstName).size : 0
    const call = (buf) => {
      if (k === '_id') table.fd.inc = buf.length / 13
      if (k === '_t') table.fd.inc = buf.length / 5
      table.fd[key] = { buf: Buffer.concat([Buffer.alloc(1), buf]), size: buf.length, zstSize }
    }
    return new Promise(res => {
      if (ifZst) {
        this.zstdToBuf(zstName).then(buf => {
          call(buf)
          res()
        })
      } else {
        fs.readFile(rawName, (err, buf) => {
          call(buf)
          res()
        })
      }
    })
    if (!ifRaw) {
      // fs.writeFile(rawName, buf, () => {
      //   table.fd[key].fd = fs.openSync(rawName, 'r')
      // })
    }
  },
  async loadFds(table, date, select = []) { // 加载磁盘数据到内存
    const _t = this._t()
    const prr = []
    if (table.cols._id && !select.includes('_id')) select.unshift('_id')
    if (table.cols._t && !select.includes('_t')) select.unshift('_t')
    if (select.includes('*')) select = Object.keys(table.cols)
    for (const k of select) {
      prr.push(this.loadFd(table, date, k))
    }
    if (fdCaches[date]) {
      fdCaches[date].last = Acan.time()
      this.log('clean fd update', table.name)
      return
    }
    fdCaches[date] = {
      last: Acan.time(),
      clean() {
        if (Acan.time() - this.last < 600) return this.timeout() // 小于10分钟的本次不处理
        for (const k in table.cols) {
          const key = date + '/' + k
          table.fd[key] = undefined
          Adebug.log('clean fd ok', table.name, date, table.fd[key])
        }
        fdCaches[date] = undefined
      },
      timeout() { // 1分钟后检查内存回收
        setTimeout(() => { this.clean() }, 60 * 1000)
      }
    }
    fdCaches[date].timeout()
    this.log('clean fd init', table.name, date)
    return await Promise.all(prr).then(() => {
      this.log('load fds ok', date, this._tf(this._t() - _t), 'ms')
    })
  },
  async zstdToBuf(filename, buf) { // 解压数据
    if (!fs.existsSync(filename)) return // 不存在
    const proc = spawn(conf.zstd || 'zstd', ['-d', '-c', filename], { maxBuffer: 500 * 1024 * 1024 })
    const arr = []
    let bi = 0
    proc.stdout.on('data', (data) => {
      if (buf) data.copy(buf, bi)
      else arr.push(data)
      bi += data.length
    })
    return new Promise(res => {
      proc.on('close', () => {
        res(buf || Buffer.concat(arr))
      })
    })
  },
  zstdToFile(input, output) { // 解压数据到文件
    if (!fs.existsSync(input)) return // 不存在
    return spawnSync(conf.zstd, ['-d', input, '-o', output])
  },
  bufToZstd(buf, filename, cb) { // 压缩保存到文件
    const _t = this._t()
    if (!fs.existsSync(filename)) { fs.mkdirSync(path.dirname(filename), { recursive: true }) } // 创建目录
    const proc = spawn(conf.zstd, ['-f', '-o', filename])
    proc.on('close', (err) => { if (cb) cb({ hs: this._tf(this._t() - _t), filename }) })
    proc.stdin.end(buf)
  },
  toZstd(input, output, cb) { // 压缩文件到zstd文件
    const _t = this._t()
    const proc = spawn(conf.zstd, ['-f', input, '-o', output])
    proc.on('close', (err) => {
      if (cb) cb({ hs: this._tf(this._t() - _t), output })
      try {
        fs.unlinkSync(input)
      } catch (error) {}
    })
  },
  fdIndexOf(fd, v, col, i = 0, reverse) { // 磁盘中直接查找所在位置
    if (!fd) return -1 // 不存在列
    return this.bufIndexOne(fd.buf, v, col, i, reverse)
  },
  queryToBuf(con, cols) {
    const rs = Acan.clone(con)
    Acan.each(rs, (v, k) => {
      const col = cols[k]
      if (!col) return
      const buf = Buffer.alloc(col.len + 1)
      if (Acan.isStr(v)) {
        this.typesWrite[col.type](buf, 1, col, v)
        rs[k] = buf
      } else if (v.$ne) {
        this.typesWrite[col.type](buf, 1, col, v.$ne)
        v.$ne =buf.slice(1)
      }
    })
    return rs
  },
  bufIndexOne(buf, v, col, i = 0, reverse = 'indexOf') { // i = buf 的 offset, return 行索引位置
    let fi = -1
    if (v.$distinct) { // 字段唯一
    } else if (v.$in) { // 包含
      fi = ops.arrIndex(buf, v.$in, i, col, reverse, ops.eqIndex)
    } else if (v.$nin) { // 不包含
      fi = ops.neArrIndex(buf, v.$nin, i, col, reverse)
    } else if (v.$ne) { // 不等于
      fi = ops.neIndex(buf, v.$ne, i, col, reverse)
    } else if (v.$like) { // 包含 支持数组
      fi = ops.arrIndex(buf, v.$like, i, col, reverse, ops.likeIndex)
    } else { // 全等于
      fi = ops.eqIndexBuf(buf, v, i, col, reverse)
    }
    if (fi === -1) return fi // console.log(v, i) // 找不到了
    return parseInt(fi / col.len) // 转化成行位置
  },
  getZstdFilename(o, k, ext = 'zst', nt = undefined) {
    return `${conf.db}/${o.db}/${o.name}/${Acan.time('Ymd/H', nt)}/${k}.${ext}`
  },
  async snapShotLoad(o, k, col, opt, isInit) { // 加载快照到内存
    const _t = this._t()
    const zstdName = `${conf.db}/snapShot/${o.ns}.${k}.zstd`
    // o._sab[k] = new ArrayBuffer(col.len * opt.hotLimit + 1)
    // o._buf[k] = Buffer.from(o._sab[k])
    o._buf[k] = Buffer.alloc(col.len * opt.hotLimit + 1)
    if (isInit && fs.existsSync(zstdName)) { // 启动的时候加载老数据
      const incFile = `${conf.db}/snapShot/${o.ns}.inc`
      if (fs.existsSync(incFile)) {
        o.inc = JSON.parse(fs.readFileSync(incFile, { encoding: 'utf-8' }) || 0)
        fs.unlinkSync(incFile)
      }
      await this.zstdToBuf(zstdName, o._buf[k])
      fs.unlink(zstdName, () => {})
      this.log(`${zstdName} load ok ${this._tf(this._t() - _t)}ms`)
    }
  },
  async snapShot() { // 内存数据快照到磁盘
    const _t = this._t()
    const prr = []
    for (const name in tables) {
      await this.snapShotTable(tables[name])
    }
    return { hs: this._tf(this._t() - _t) }
  },
  async snapShotTable(table) { // 压缩保存到文件
    Adebug.log(`dump zstd ${table.name}`)
    this.log(`dump zstd ${table.name}`)
    fs.writeFileSync(`${conf.db}/snapShot/${table.ns}.inc`, JSON.stringify(table.inc), { encoding: 'utf-8' })
    const prr = []
    for (const k in table.cols) {
      prr.push(new Promise(res => {
        this.bufToZstd(table._buf[k], `${conf.db}/snapShot/${table.ns}.${k}.zstd`, res)
      }))
    }

    await Promise.all(prr)
  },
  async toFile() { // 全部保存到文件
    const _t = this._t()
    for (const name in tables) {
      await this.tableToFile(tables[name])
    }
    return { hs: this._tf(this._t() - _t) }
  },
  async tableToFile(table) { // 全部保存到文件
    this.log(`start toFile ${table.name}`)
    while(table.inc > 0) {
      await this.bufOverToFile(table, 1)
    }
  },
  async bufOverToFile(o, moveRate = 0.1) { // 当buf使用率达到 95%，则移动最早的 10% 到 文件中
    if (o.overMoveLock) return
    if (moveRate < 1 && o.inc < parseInt(o.opt.hotLimit * 0.95)) return // 无需处理
    o.overMoveLock = true // 打标记锁，防止重复执行
    const _t = this._t()
    let moveNum = parseInt(o.opt.hotLimit * moveRate)
    if (moveNum > o.inc) moveNum = o.inc
    if (o.oldInc) { // 删除旧数据 无需保存到文件
      if (moveNum > o.oldInc) moveNum = o.oldInc
    }
    const len = o._buf._id ? 13 : 5
    const tbuf = (o._buf._id ? o._buf._id : o._buf._t).slice(1)
    const firstTime = this.types.time(tbuf.slice(0, 4))
    const firstHour = Acan.time('H', firstTime)
    const lastStart = (moveNum - 1) * len
    const lastHour = Acan.time('H', this.types.time(tbuf.slice(lastStart, lastStart + 4)))
    if (firstHour !== lastHour) { // 跨时间段处理 重新计算移动大小 保证在同一小时内的
      const maxTime = Acan.time('s', Acan.time('Y-m-d H:59:59', firstTime))
      let i = 0
      for (i = 0; i < moveNum; i++) {
        if (this.types.time(tbuf.slice(i * len, i * len + 4)) > maxTime) { break }
      }
      moveNum = i + 1
    }
    // if (moveRate === 1)  console.log(moveNum, firstHour, lastHour, tbuf.slice(lastStart, lastStart + 4))
    let moveBuf
    const prr = []
    if (!o.oldInc) {
      for (const k in o.cols) {
        const moveBufNum = moveNum * o.cols[k].len
        const filename = this.getZstdFilename(o, k, 'raw')
        if (!fs.existsSync(path.dirname(filename))) fs.mkdirSync(path.dirname(filename), { recursive: true }) // 创建文件夹 
        prr.push(new Promise(res => { fs.appendFile(filename, o._buf[k].slice(1, moveBufNum + 1), res) }))
      }
    }
    return await Promise.all(prr).then(() => {
      // 移动数据，并清空移动部分
      o.createLock = true
      const mt = this._t()
      for (const k in o.cols) {
        const moveBufNum = moveNum * o.cols[k].len
        o._buf[k].slice(moveBufNum + 1).copy(o._buf[k], 1)
        o._buf[k].slice(o._buf[k].length - moveBufNum).fill(0x0)
      }
      o.inc -= moveNum
      if (o.oldInc) { // 删除旧数据 无需保存到文件
        o.oldInc -= moveNum
        if (o.oldInc <= 0) o.oldInc = undefined
      }
      o.createLock = false
      o.overMoveLock = false
      const msg = `move to raw ${firstHour} - ${lastHour}, moveRate: ${moveRate}, inc: ${o.inc}, oldInc: ${o.oldInc || ''}, num: ${moveNum}, hs: ${this._tf(this._t() - _t)}ms`
      Adebug.log(msg)
      this.log(msg)
      return o
    }).catch(e => {
      o.overMoveLock = false
    })
  },
  log(str) {
    fs.appendFile(`${conf.db}/log/${Acan.time('Ymd')}/run.log`, `${Acan.time('m-d H:i:s')} ${str}\n`, { encoding: 'utf-8' }, () => {})
  },
  async countHourInfo(ns, isId, hour) { // 统计小时分片信息
    const key = isId ? '_id' : '_t'
    const len = isId ? 13 : 5
    const hourPath = `${conf.db}/${ns.replace('.', '/')}/${hour}/`
    const infoPath = `${hourPath}info.json`
    const info = { hour, size: 0, zst: 0, inc: 0, sizes: {}, zsts: {} }
    if (fs.existsSync(infoPath)) try { Object.assign(info, JSON.parse(fs.readFileSync(infoPath, { encoding: 'utf8' })))} catch (error) {}
    info.zst = 0
    info.size = 0
    if (!info.zsts) info.zsts = {}
    for (const v of fs.readdirSync(hourPath)) {
      if (!['.raw', '.zst'].includes(v.substr(-4))) continue
      const k = v.substr(0, v.length - 4)
      let buf
      if (info.sizes[k] && info.zsts[k]) continue
      if (v.substr(-4) === '.zst') {
        if (info.sizes[k] && !info.zsts[k]) { // 计算压缩后的大小
          const o = fs.lstatSync(`${hourPath}${v}`)
          info.zsts[k] = o.size
          continue
        }
        buf = await this.zstdToBuf(`${hourPath}${v}`)
        if (k === key) info.inc = buf.byteLength / len
        info.sizes[k] = buf.byteLength
      } else {
        const o = fs.lstatSync(`${hourPath}${v}`)
        if (k === key) info.inc = o.size / len
        info.sizes[k] = o.size
      }
    }
    Acan.each(info.sizes, (v, k) => { info.size += v })
    Acan.each(info.zsts, (v, k) => { info.zst += v })
    fs.writeFileSync(`${hourPath}info.json`, JSON.stringify(info), { encoding: 'utf8' })
    return info
  },
  zstdRaw() { // 压缩上上小时的文件为zstd
    const today = Acan.time('Ymd')
    const dh = Acan.time('H', Acan.time() - 7200)
    const list = {}
    const fsOpt = { withFileTypes: true }
    let ns = ''
    let dirs = []
    const tables = {}
    const dirFn = (arr) => {
      try {
        dirs = fs.readdirSync(arr.join('/'), fsOpt)
      } catch (error) {
      }
      dirs.map(dir => {
        const nrr = Acan.clone(arr)
        nrr.push(dir.name)
        if (!dir.isDirectory() && arr.length < 5) return
        if (arr.length === 2) {
          ns = nrr.slice(1, 3).join('.')
          list[ns] = []
          const confPath = nrr.join('/') + '/conf.json'
          if (fs.existsSync(confPath)) try {
            tables[ns] = JSON.parse(fs.readFileSync(confPath, { encoding: 'utf-8' }))
          } catch (error) {}
        } else if (arr.length === 4) {
          const dayHour = nrr.slice(3, 5).join('/')
          list[ns].push(dayHour)
          if (dir.name > dh && today === arr[arr.length - 1]) return // Adebug.log('no time', nrr) // 未到处理时间
          this.countHourInfo(ns, !!tables[ns].cols._id, dayHour)
        } else if (arr.length === 5) {
          if (dir.name.substr(-4) !== '.raw') return // 没有需要处理的文件
          const input = nrr.join('/')
          const output = input.substr(0, input.length - 4) + '.zst'
          this.toZstd(input, output, (rs) => {
            this.log(`${input} zstd ok ${rs.hs}ms`)
            Adebug.log(`${input} zstd ok ${rs.hs}ms`)
          })
          return
        }
        dirFn(nrr)
      })
    }
    dirFn([conf.db])
    Acan.each(list, (arr, ns) => {
      fs.writeFileSync([conf.db, ns.replace('.', '/'), 'list.json'].join('/'), JSON.stringify(arr), { encoding: "utf-8" })
    })
    return list
  }
}

module.exports = mod
