import utils from '../utils'
import Dataset from '../dataset'
import shpConverterTopology from '../shpconverter-topology'
import Geojson from '../geojson'
import Geom from '../geom'
import Shapefile from '../shapefile'
import io from './index.js'

// @targets - non-empty output from Catalog#findCommandTargets()
function exportTargetLayers(targets, opts) {
  // convert target fmt to dataset fmt
  const datasets = targets.map((target) => {
    return utils.defaults({layers: target.layers}, target.dataset)
  })
  return exportDatasets(datasets, opts)
}
function exportDatasets(datasets, opts) {
  const format = io.getOutputFormat(datasets[0], opts)
  if (format == 'svg' || format == 'topojson' || format == 'geojson' && opts.combine_layers) {
    // multi-layer formats: combine multiple datasets into one
    if (datasets.length > 1) {
      datasets = [Dataset.mergeDatasetsForExport(datasets)]
      if (format == 'topojson') {
        // Build topology, in case user has loaded several
        // files derived from the same source, with matching coordinates
        // (Downsides: useless work if geometry is unrelated;
        // could create many small arcs if layers are partially related)
        shpConverterTopology.buildTopology(datasets[0])
      }
      // KLUDGE let exporter know that copying is not needed
      // (because shape data was deep-copied during merge)
      opts = utils.defaults({final: true}, opts)
    }
  } else {
    datasets = datasets.map(Dataset.copyDatasetForRenaming)
    assignUniqueLayerNames2(datasets)
  }
  const files = datasets.reduce((memo, dataset) => {
    utils.sortOn(dataset.layers, 'stack_id', true)
    return memo.concat(exportFileContent(dataset, opts))
  }, [])
  // need unique names for multiple output files
  assignUniqueFileNames(files)
  return files
}
// const exporters = {
//   geojson: Geojson.exportGeoJSON,
//   // topojson: internal.exportTopoJSON,
//   shapefile: Shapefile.exportShapefile,
//   // dsv: internal.exportDelim,
//   dbf: Shapefile.exportDbf,
//   // json: internal.exportJSON,
//   // svg: internal.exportSVG
// }
// Return an array of objects with "filename" and "content" members.
function exportFileContent(dataset, opts) {
  const outFmt = opts.format = io.getOutputFormat(dataset, opts)
  // const exporter = exporters[outFmt]
  let exporter = null
  switch(outFmt){
    case 'geojson':
      exporter = Geojson.exportGeoJSON
      break
    case 'shapefile':
      exporter = Shapefile.exportShapefile
      break
  }
  let files = []

  if (!outFmt) {
    utils.error("Missing output format")
  } else if (!exporter) {
    utils.error(`Unknown output format:${outFmt} `)
  }

  // shallow-copy dataset and layers, so layers can be renamed for export
  dataset = utils.defaults({
    layers: dataset.layers.map(function(lyr) {return utils.extend({}, lyr)})
  }, dataset)

  // Adjust layer names, so they can be used as output file names
  // (except for multi-layer formats TopoJSON and SVG)
  if (opts.file && outFmt != 'topojson' && outFmt != 'svg') {
    dataset.layers.forEach(function(lyr) {
      lyr.name = utils.getFileBase(opts.file)
    })
  }
  assignUniqueLayerNames(dataset.layers)

  // apply coordinate precision, except:
  //   svg precision is applied by the SVG exporter, after rescaling
  //   GeoJSON precision is applied by the exporter, to handle default precision
  //   TopoJSON precision is applied to avoid redundant copying
  if (opts.precision && outFmt != 'svg' && outFmt != 'geojson' && outFmt != 'topojson') {
    dataset = Dataset.copyDatasetForExport(dataset)
    Geom.setCoordinatePrecision(dataset, opts.precision)
  }

  if (opts.cut_table) {
    files = exportDataTables(dataset.layers, opts).concat(files)
  }

  if (opts.extension) {
    opts.extension = fixFileExtension(opts.extension, outFmt)
  }

  validateLayerData(dataset.layers)

  files = exporter(dataset, opts).concat(files)
  // If rounding or quantization are applied during export, bounds may
  // change somewhat... consider adding a bounds property to each layer during
  // export when appropriate.
  if (opts.bbox_index) {
    files.push(createIndexFile(dataset))
  }

  validateFileNames(files)
  return files
}
// Generate json file with bounding boxes and names of each export layer
// TODO: consider making this a command, or at least make format settable
//
function createIndexFile(dataset) {
  const index = dataset.layers.map(function(lyr) {
    const bounds = Dataset.getLayerBounds(lyr, dataset.arcs)
    return {
      bbox: bounds.toArray(),
      name: lyr.name
    }
  })

  return {
    content: JSON.stringify(index),
    filename: "bbox-index.json"
  }
}
// Throw errors for various error conditions
function validateLayerData(layers) {
  layers.forEach(function(lyr) {
    if (!lyr.geometry_type) {
      // allowing data-only layers
      if (lyr.shapes && utils.some(lyr.shapes, function(o) {
        return !!o
      })) {
        utils.error("A layer contains shape records and a null geometry type")
      }
    } else {
      if (!utils.contains(['polygon', 'polyline', 'point'], lyr.geometry_type)) {
        utils.error (`A layer has an invalid geometry type:${lyr.geometry_type}`)
      }
      if (!lyr.shapes) {
        utils.error ("A layer is missing shape data")
      }
    }
  })
}
function validateFileNames(files) {
  const index = {}
  files.forEach(function(file, i) {
    const filename = file.filename
    if (!filename) utils.error(`Missing a filename for file ${i}`)
    if (filename in index) utils.error(`Duplicate filename ${filename}`)
    index[filename] = true
  })
}
function assignUniqueLayerNames(layers) {
  const names = layers.map((lyr) => {
    return lyr.name || "layer"
  })
  const uniqueNames = utils.uniqifyNames(names)
  layers.forEach((lyr, i) => {
    lyr.name = uniqueNames[i]
  })
}
// Assign unique layer names across multiple datasets
function assignUniqueLayerNames2(datasets) {
  const layers = datasets.reduce((memo, dataset) => {
    return memo.concat(dataset.layers)
  }, [])
  assignUniqueLayerNames(layers)
}
function assignUniqueFileNames(output) {
  const names = output.map(o => o.filename)
  const uniqnames = utils.uniqifyNames(names, formatVersionedFileName)
  output.forEach((o, i) => o.filename = uniqnames[i])
}
// TODO: remove this -- format=json creates the same output
//   (but need to make sure there's a way to prevent names of json data files
//    from colliding with names of GeoJSON or TopoJSON files)
function exportDataTables(layers, opts) {
  const tables = []
  layers.forEach(function(lyr) {
    if (lyr.data) {
      tables.push({
        content: JSON.stringify(lyr.data),
        filename: (lyr.name ? lyr.name + '-' : '') + 'table.json'
      })
    }
  })
  return tables
}
function formatVersionedFileName(filename, i) {
  const parts = filename.split('.')
  if (parts.length < 2) {
    return utils.formatVersionedName(filename, i)
  }
  const ext = parts.pop()
  const base = parts.join('.')
  return utils.formatVersionedName(base, i) + '.' + ext
}

function fixFileExtension(ext, fmt) {
  // TODO: use fmt to validate
  return ext.replace(/^\.+/, '')
}
export default {
  exportTargetLayers,
}