import utils from '../utils'
import ShpConverterText from '../shpconverter-text'
import { BinArray } from '../utils'

const Dbf = {}
Dbf.MAX_STRING_LEN = 254
function BufferPool() {
  const n = 5000
  let pool = null
  let i = null
  newPool()

  function newPool() {
    pool = new Uint8Array(n)
    i = 0
  }

  return {
    reserve: function(bytes) {
      if (i + bytes > n) newPool()
      i += bytes
      return pool.subarray(i - bytes, i)
    },
    putBack: function(bytes) {
      i -= bytes
    }
  }
}

Dbf.bufferPool = new BufferPool()
Dbf.exportRecords = function(records, encoding, fieldOrder) {
  const rows = records.length
  const fields = utils.findFieldNames(records, fieldOrder)
  const dbfFields = Dbf.convertFieldNames(fields)
  const fieldData = fields.map((name, i) => {
    const info = Dbf.getFieldInfo(records, name, encoding || 'utf8')
    const name2 = dbfFields[i]
    info.name = name2
    if (name != name2) {
      // message('Changed field name from "' + name + '" to "' + name2 + '"')
    }
    if (info.warning) {
      // message('[' + name + '] ' + info.warning)
    }
    return info
  })
  const headerBytes = Dbf.getHeaderSize(fieldData.length)
  const recordBytes = Dbf.getRecordSize(utils.pluck(fieldData, 'size'))
  const fileBytes = headerBytes + rows * recordBytes + 1

  const buffer = new ArrayBuffer(fileBytes)
  const bin = new BinArray(buffer).littleEndian()
  const now = new Date()

  // write header
  bin.writeUint8(3)
  bin.writeUint8(now.getFullYear() - 1900)
  bin.writeUint8(now.getMonth() + 1)
  bin.writeUint8(now.getDate())
  bin.writeUint32(rows)
  bin.writeUint16(headerBytes)
  bin.writeUint16(recordBytes)
  bin.skipBytes(17)
  bin.writeUint8(0) // language flag; TODO: improve this
  bin.skipBytes(2)


  // field subrecords
  fieldData.reduce((recordOffset, obj) => {
    bin.writeCString(obj.name, 11)
    bin.writeUint8(obj.type.charCodeAt(0))
    bin.writeUint32(recordOffset)
    bin.writeUint8(obj.size)
    bin.writeUint8(obj.decimals)
    bin.skipBytes(14)
    return recordOffset + obj.size
  }, 1)

  bin.writeUint8(0x0d) // "field descriptor terminator"
  if (bin.position() != headerBytes) {
    utils.error("Dbf#exportRecords() header size mismatch; expected:", headerBytes, "written:", bin.position());
  }

  records.forEach((rec, i) => {
    const start = bin.position()
    bin.writeUint8(0x20) // delete flag; 0x20 valid 0x2a deleted
    for (let j = 0, n = fieldData.length; j < n; j += 1) {
      fieldData[j].write(i, bin)
    }
    if (bin.position() - start != recordBytes) {
      utils.error("#exportRecords() Error exporting record:", rec)
    }
  })

  bin.writeUint8(0x1a) // end-of-file

  if (bin.position() != fileBytes) {
    utils.error("Dbf#exportRecords() file size mismatch; expected:", fileBytes, "written:", bin.position())
  }
  return buffer
}
Dbf.getHeaderSize = function(numFields) {
  return 33 + numFields * 32
}
Dbf.getRecordSize = function(fieldSizes) {
  return utils.sum(fieldSizes) + 1 // delete byte plus data bytes
}
Dbf.initNumericField = function(info, arr, name) {
  const MAX_FIELD_SIZE = 18
  const data = this.getNumericFieldInfo(arr, name)
  info.decimals = data.decimals
  let size = Math.max(data.max.toFixed(info.decimals).length,
      data.min.toFixed(info.decimals).length)
  if (size > MAX_FIELD_SIZE) {
    size = MAX_FIELD_SIZE
    info.decimals -= size - MAX_FIELD_SIZE
    if (info.decimals < 0) {
      utils.error("Dbf#getFieldInfo() Out-of-range error.")
    }
  }
  info.size = size

  const formatter = Dbf.getDecimalFormatter(size, info.decimals)
  info.write = function(i, bin) {
    const rec = arr[i]
    let str = formatter(rec[name])
    if (str.length < size) {
      str = utils.lpad(str, size, ' ')
    }
    bin.writeString(str, size)
  }
}
Dbf.initBooleanField = function(info, arr, name) {
  info.size = 1
  info.write = function(i, bin) {
    const val = arr[i][name]
    let c = ''
    if (val === true) c = 'T'
    else if (val === false) c = 'F'
    else c = '?'
    bin.writeString(c)
  }
}
Dbf.initDateField = function(info, arr, name) {
  info.size = 8
  info.write = function(i, bin) {
    const d = arr[i][name]
    let str = ''
    if (d instanceof Date === false) {
      str = '00000000'
    } else {
      str = utils.lpad(d.getUTCFullYear(), 4, '0') +
            utils.lpad(d.getUTCMonth() + 1, 2, '0') +
            utils.lpad(d.getUTCDate(), 2, '0')
    }
    bin.writeString(str)
  }
}
Dbf.convertValueToString = function(s) {
  return s === undefined || s === null ? '' : String(s)
}
Dbf.initStringField = function(info, arr, name, encoding) {
  const formatter = encoding == 'ascii' ? Dbf.encodeValueAsAscii : Dbf.getStringWriterEncoded(encoding)
  let size = 0
  let truncated = 0
  const buffers = arr.map((rec) => {
    const strval = Dbf.convertValueToString(rec[name])
    let buf = formatter(strval)
    if (buf.length > Dbf.MAX_STRING_LEN) {
      if (encoding == 'ascii') {
        buf = buf.subarray(0, Dbf.MAX_STRING_LEN)
      } else {
        buf = Dbf.truncateEncodedString(buf, encoding, Dbf.MAX_STRING_LEN)
      }
      truncated++
    }
    size = Math.max(size, buf.length)
    return buf
  })
  info.size = size
  info.write = function(i, bin) {
    const buf = buffers[i]
    const n = Math.min(size, buf.length)
    const dest = bin._bytes
    const pos = bin.position()
    for (let j = 0; j < n; j += 1) {
      dest[j + pos] = buf[j]
    }
    bin.position(pos + size)
  }
  if (truncated > 0) {
    info.warning = 'Truncated ' + truncated + ' string' + (truncated == 1 ? '' : 's') + ' to fit the 254-byte limit'
  }
}
Dbf.convertFieldNames = function(names) {
  return utils.getUniqFieldNames(names.map(Dbf.cleanFieldName), 10)
}
Dbf.cleanFieldName = function(name) {
  return name.replace(/[^A-Za-z0-9]+/g, '_')
}
Dbf.getFieldInfo = function(arr, name, encoding) {
  const type = this.discoverFieldType(arr, name)
  const info = {
        type: type,
        decimals: 0
      }
  if (type == 'N') {
    Dbf.initNumericField(info, arr, name)
  } else if (type == 'C') {
    Dbf.initStringField(info, arr, name, encoding)
  } else if (type == 'L') {
    Dbf.initBooleanField(info, arr, name)
  } else if (type == 'D') {
    Dbf.initDateField(info, arr, name)
  } else {
    info.size = 0
    info.type = 'N'
    if (type) {
      info.warning = 'Unable to export ' + type + '-type data, writing null values'
    }
    info.write = function() {}
  }
  return info
}
Dbf.discoverFieldType = function(arr, name) {
  let val = null
  for (let i = 0, n = arr.length; i < n; i += 1) {
    val = arr[i][name]
    if (utils.isString(val)) return "C"
    if (utils.isNumber(val)) return "N"
    if (utils.isBoolean(val)) return "L"
    if (val instanceof Date) return "D"
    if (val) return (typeof val)
  }
  return null
}
Dbf.getDecimalFormatter = function(size, decimals) {
  // TODO: find better way to handle nulls
  const nullValue = ' ' // ArcGIS may use 0
  return function(val) {
    // TODO: handle invalid values better
    const valid = utils.isFiniteNumber(val)
    const strval = valid ? val.toFixed(decimals) : String(nullValue)
    return utils.lpad(strval, size, ' ')
  }
}
Dbf.getNumericFieldInfo = function(arr, name) {
  let min = 0
  let max = 0
  let k = 1
  let power = 1
  let decimals = 0
  let eps = 1e-15
  let val = null
  for (let i = 0, n = arr.length; i < n; i += 1) {
    val = arr[i][name]
    if (!utils.isFiniteNumber(val)) {
      continue
    }
    if (val < min || val > max) {
      if (val < min) min = val
      if (val > max) max = val
      while (Math.abs(val) >= power) {
        power *= 10
        eps *= 10
      }
    }
    while (Math.abs(Math.round(val * k) - val * k) > eps) {
      if (decimals == 15) { // dbf limit
        // TODO: round overflowing values ?
        break
      }
      decimals++
      eps *= 10
      k *= 10
    }
  }
  return {
    decimals: decimals,
    min: min,
    max: max
  }
}
// return an array buffer or null if value contains non-ascii chars
Dbf.encodeValueAsAscii = function(val, strict) {
  const str = String(val)
  const n = str.length
  let view = Dbf.bufferPool.reserve(n)
  let i = 0
  for (i = 0; i < n; i += 1) {
    let c = str.charCodeAt(i)
    if (c > 127) {
      if (strict) {
        view = null
        i = 0 // return all bytes to pool
        break
      }
      c = '?'.charCodeAt(0)
    }
    view[i] = c
  }
  Dbf.bufferPool.putBack(n-i)
  return view ? view.subarray(0, i) : null
}
Dbf.getStringWriterEncoded = function(encoding) {
  return function(val) {
    // optimization -- large majority of strings in real-world datasets are
    // ascii. Try (faster) ascii encoding first, fall back to text encoder.
    let buf = Dbf.encodeValueAsAscii(val, true)
    if (buf === null) {
      buf = ShpConverterText.encodeString(String(val), encoding)
    }
    return buf
  }
}
// try to remove partial multi-byte characters from the end of an encoded string.
Dbf.truncateEncodedString = function(buf, encoding, maxLen) {
  let truncated = buf.slice(0, maxLen)
  let len = maxLen
  let tmp = null
  let str = null
  while (len > 0 && len >= maxLen - 3) {
    tmp = len == maxLen ? truncated : buf.slice(0, len)
    str = ShpConverterText.decodeString(tmp, encoding)
    if (str.charAt(str.length-1) != '\ufffd') {
      truncated = tmp
      break
    }
    len--
  }
  return truncated
}

export default {
  exportRecords: Dbf.exportRecords,
}