import {dsvFormat} from 'd3-dsv'
import Data from '../data'
import {DataTable} from '../data'
import commonds from '../commands'
import utils from '../utils'
import cli from '../cli'
import io from '../io'
import {BufferReader} from '../io'
import ShpConverterText from './index.js'

function importDelim2(data, opts) {
  // TODO: remove duplication with importJSON()
  let readFromFile = !data.content && data.content !== ''
  let content = data.content
  let reader = null
  let records = null
  let delimiter = null
  opts = opts || {}
  const filter = getImportFilterFunction(opts)

  // read content of all but very large files into a buffer
  if (readFromFile && cli.fileSize(data.filename) < 2e9) {
    content = cli.readFile(data.filename)
    readFromFile = false
  }
  if (readFromFile) {
    // try to read data incrementally from file, if content is missing
    reader = new FileReader(data.filename)
  } else if (content instanceof ArrayBuffer || content instanceof Buffer) {
    // Web API may import as ArrayBuffer, to support larger files
    reader = new BufferReader(content)
    content = null
  } else if (utils.isString(content)) {
    // import as string
  } else {
    utils.error("Unexpected object type")
  }
  if (reader && !ShpConverterText.encodingIsAsciiCompat(opts.encoding)) {
    // Currently, incremental reading assumes ascii-compatible data.
    // Incompatible encodings must be parsed as strings.
    content = reader.toString(opts.encoding)
    reader = null
  }
  if (reader) {
    delimiter = guessDelimiter(io.readFirstChars(reader, 2000))
    records = ShpConverterText.readDelimRecords(reader, delimiter, opts.encoding, filter)
  } else {
    delimiter = guessDelimiter(content);
    records = dsvFormat(delimiter).parse(content, filter);
    delete records.columns; // added by d3-dsv
  }
  if (records.length === 0) {
    // message("Unable to read any data records")
  }
  adjustRecordTypes(records, opts)
  table = new DataTable(records)
  utils.deleteFields(table, internal.isInvalidFieldName)
  return {
    layers: [{data: table}],
    info: {input_delimiter: delimiter}
  }
}
const supportedDelimiters = ['|', '\t', ',', ';']
function guessDelimiter(content) {
  return utils.find(supportedDelimiters, (delim) => {
    const rxp = getDelimiterRxp(delim)
    return rxp.test(content)
  }) || ','
}
// Get RegExp to test for a delimiter before first line break of a string
// Assumes that the first line does not contain alternate delim chars (this will
// be true if the first line has field headers composed of word characters).
function getDelimiterRxp(delim) {
  const rxp = "^[^\\n\\r]+" + utils.regexEscape(delim)
  return new RegExp(rxp)
}
function getFieldTypeHints(opts) {
  const hints = {}
  opts = opts || {}
  if (opts.string_fields) {
    opts.string_fields.forEach((f) => {
      hints[f] = 'string'
    })
  }
  if (opts.field_types) {
    opts.field_types.forEach((raw) => {
      let name = null
      let type = null
      if (raw.indexOf(':') != -1) {
        const parts = raw.split(':')
        name = parts[0]
        type = validateFieldType(parts[1])
      } else if (raw[0] === '+') { // d3-style type hint: unary plus
        name = raw.substr(1)
        type = 'number'
      }
      if (type) {
        hints[name] = type
      } else {
        // message("Invalid type hint (expected :str or :num) [" + raw + "]")
      }
    })
  }
  return hints
}
// Detect and convert data types of data from csv files.
// TODO: decide how to handle records with inconstent properties. Mapshaper
//    currently assumes tabular data
function adjustRecordTypes(records, opts) {
  const typeIndex = getFieldTypeHints(opts)
  const singleType = typeIndex['*'] // support for setting all fields to a single type
  const fields = Object.keys(records[0] || [])
  const detectedNumFields = []
  const replacements = {}
  fields.forEach((key) => {
    const typeHint = typeIndex[key]
    let values = null
    if (typeHint == 'number' || singleType == 'number') {
      values = convertDataField(key, records, utils.parseNumber);
    } else if (typeHint == 'string' || singleType == 'string') {
      values = convertDataField(key, records, utils.parseString);
    } else {
      values = tryNumericField(key, records);
      if (values) detectedNumFields.push(key);
    }
    if (values) replacements[key] = values;
  });
  if (Object.keys(replacements).length > 0) {
    updateFieldsInRecords(fields, records, replacements)
  }
  if (detectedNumFields.length > 0) {
    // message(utils.format("Auto-detected number field%s: %s",
    //     detectedNumFields.length == 1 ? '' : 's', detectedNumFields.join(', ')))
  }
}
// Copy original data properties and replacements to a new set of records
// (Better performance in v8 than making in-place replacements)
function updateFieldsInRecords(fields, records, replacements) {
  records.forEach((rec, recId) => {
    const rec2 = {}
    for (let i=0, n=fields.length; i<n; i++) {
      let f = fields[i]
      if (f in replacements) {
        rec2[f] = replacements[f][recId]
      } else {
        rec2[f] = rec[f]
      }
    }
    records[recId] = rec2
  })
}
function tryNumericField(key, records) {
  const arr = []
  let count = 0
  for (let i=0, n=records.length; i<n; i++) {
    let raw = records[i][key]
    let num = utils.parseNumber(raw)
    if (num === null) {
      let str = raw ? raw.trim() : ''
      if (str.length > 0 && str != 'NA' && str != 'NaN') { // ignore NA values ("NA" seen in R output)
        return null // unparseable value -- fail
      }
    } else {
      count++
    }
    arr.push(num)
  }
  return count > 0 ? arr : null
}
function convertDataField(name, records, f) {
  const values = []
  for (let i=0, n=records.length; i<n; i++) {
    values.push(f(records[i][name]))
  }
  return values
}
// Accept a type hint from a header like "FIPS:str"
// Return standard type name (number|string) or null if hint is not recognized
function validateFieldType(hint) {
  const str = hint.toLowerCase()
  let type = null
  if (str[0] == 'n') {
    type = 'number'
  } else if (str[0] == 's') {
    type = 'string'
  }
  return type
}
// Returns a d3-dsv compatible function for filtering records and fields on import
// TODO: look into using more code from standard expressions.
function getImportFilterFunction(opts) {
  const recordFilter = opts.csv_filter ? Data.compileExpressionToFunction(opts.csv_filter, {returns: true}) : null
  const fieldFilter = opts.csv_fields ? commonds.getRecordMapper(commonds.mapFieldNames(opts.csv_fields)) : null
  const ctx = Data.getBaseContext();
  if (!recordFilter && !fieldFilter) return null
  return function(rec) {
    let val = null
    try {
      val = recordFilter ? recordFilter.call(null, rec, ctx) : true
    } catch(e) {
      // stop(e.name, "in expression [" + exp + "]:", e.message)
      utils.error(`${e.name} in expression [${exp}]:${e.message}`)
    }
    if (val === false) {
      return null
    } else if (val !== true) {
      // stop("Filter expression must return true or false")
      utils.error(`Filter expression must return true or false`)
    }
    return fieldFilter ? fieldFilter(rec) : rec
  };
}
export default {
  importDelim2,
}