import JSZip from 'jszip'
import { EventEmitter } from 'events'
import domain from 'domain'
import utils from './utils'
import io from './io'
import Common from './common.js'
import {ShapefileTable} from './shapefile'
import { Model } from './dataset'

const commonVal = {}
function initCommonVal() {
  commonVal.model = new Model()
  commonVal.queuedFiles = []
  commonVal.importOpts = {
    no_repair: false,
    snap: false,
  }
  commonVal.cachedFiles = {}
  commonVal.readingCount = 0
  commonVal.exportOpts = {
    format: 'geojson',
    filename: 'export'
  }
}
/**
 * 导出ShapeFile 
 * @param {*} input zip压缩包 or shp相关文件 or json文件 or json对象
 * @param {*} opts 备用参数
 */
function exportShp(input, opts) {
  initCommonVal()
  if (opts) {
    const {exportFilename} = opts || {}
    commonVal.exportOpts.filename = exportFilename || 'export'
  }
  commonVal.exportOpts.format = 'shapefile'
  const files = getFormatInput(input)
  if (!files) {// 参数错误
    return new Promise((resolve, reject) => reject(`参数错误，请传入FileList或Object`))
  }
  return exportFile(files)
}
/**
 * 导出GeoJSON
 * @param {*} input 
 * @param {*} opts 
 */
function exportGeojson(input, opts) {
  initCommonVal()
  if (opts) {
    const {exportFilename} = opts || {}
    commonVal.exportOpts.filename = exportFilename || 'export'
  }
  commonVal.exportOpts.format = 'geojson'
  const files = getFormatInput(input)
  return exportFile(files)
}
function getFormatInput(input) {
  if (input instanceof FileList || input instanceof Array) {
    return input
  } else if (input instanceof Object) {
    const blob = new Blob([JSON.stringify(input)], {type:'text/plain;charset=utf-8'})
    blob.name = `${commonVal.exportOpts.filename}.json`
    return [blob]
  } else {
    return false
  }
}
function exportFile(files) {
  return new Promise((resolve, reject) => {
    try {
      files = utils.toArray(files)
      const domainInImportFiles = domain.create()
      const errEmitter = new EventEmitter()
      domainInImportFiles.on('error', err => reject(err.message))
      domainInImportFiles.add(errEmitter)
      importFiles(files)
        .then(newFiles => {
          try {
            newFiles = newFiles.filter(f => !/\.txt$/i.test(f.name))
            commonVal.queuedFiles = addFilesToQueue(newFiles)
            procNextQueuedFile((e) => {
              // if (e) utils.error(e.message)
              if (e) {
                errEmitter.emit('error',new Error(e.message))
              } else {
                const {model, exportOpts} = commonVal
                const targetLayers = getTargetLayers(model)
                const exportFiles = io.exportTargetLayers(targetLayers, exportOpts)
                io.writeFiles(exportFiles)
                  .then(result => {
                    initCommonVal()
                    resolve(result)
                  })
                  .catch(e => errEmitter.emit('error',new Error(e.message)))
                }
            })
          } catch (e) {
            errEmitter.emit('error',new Error(e.message))
          }
        })
        .catch(e => errEmitter.emit('error',new Error(e.message)))
    } catch (e) {
      reject(e.message)
    }
  })
}
function importFiles(files) {
  return new Promise((resolve, reject) => {
    if (files.length === 1 && io.isZipFile(files[0].name)) {
      JSZip.loadAsync(files[0])
        .then(zip => {
          const promises = []
          zip.forEach((filename, file) => {
            if (!file.dir) {
              promises.push(
                zip.file(filename).async('blob')
                  .then(blob => {
                    blob.name = filename
                    return blob
                  })
              )
            }
          })
          if (promises.length === 0) reject(new Error(`压缩包内无有效内容`))
          return promises
        })
        .then(promises => Promise.all(promises))
        .then(results => resolve(results))
        .catch(e => reject(new Error(`解压失败：${e.message}`)))
    } else {
      resolve(files)
    }
  })
}
function addFilesToQueue(files) {
  if (files.length < 1) utils.error(`没有可以转换的文件`)
  const index = {}
  return files.reduce((memo, f) => {
    if (isReadableFileType(f.name) && f.name in index === false) {
      index[f.name] = true
      memo.push(f)
    }
    return memo
  }, [])
}
function isReadableFileType(filename) {
  return !!io.guessInputFileType(filename) || io.couldBeDsvFile(filename) || io.isZipFile(filename)
}
/**
 * 处理已经加载的文件
 * @param {*} cb 当所有文件队列中的内容处理结束后执行该回调
 */
function procNextQueuedFile(cb) {
  if (commonVal.queuedFiles.length === 0) {
    return
  } else {
    commonVal.queuedFiles = sortQueue(commonVal.queuedFiles)
    // readFile 参数中的回调函数cb 用于在内部递归调用procNextQueuedFile时使用
    readFile(commonVal.queuedFiles.shift(), cb)
      .then(isImported => {
        if (isImported) cb()
      })
      .catch(e => cb(e))
  }
}
function sortQueue(queue) {
  const nextFile = queue[0]
  let basename = ''
  const parts = []
  if (!isShapefilePart(nextFile.name)) {
    return queue
  }
  basename = utils.getFileBase(nextFile.name).toLowerCase()
  queue = queue.filter((file) => {
    if (utils.getFileBase(file.name).toLowerCase() === basename) {
      parts.push(file)
      return false
    }
    return true
  })
  parts.sort((a, b) => {
    return a.name.toLowerCase() < b.name.toLowerCase() ? 1 : -1
  })
  return parts.concat(queue)
}
function readFile(file, cb) {
  return new Promise((resolve, reject) => {
    try {
      const { name } = file
      const reader = new FileReader()
      const useBinary = io.isSupportedBinaryInputType(name) ||
        io.isZipFile(name) ||
        io.guessInputFileType(name) === 'json' ||
        io.guessInputFileType(name) === 'text'
      reader.addEventListener('loadend', (e) => {
        try {
          let isImported = false
          if (!reader.result) {
            utils.error(`Unable to load the file ${name}`)
          } else {
            try {
              importFileContent(name, reader.result, cb)
              commonVal.readingCount -= 1
              if (commonVal.queuedFiles.length === 0 && commonVal.readingCount === 0) {
                isImported = true
              }
            } catch (e) {
              utils.error(`文件 ${name} 导入失败：${e.message}`)
            }
          }
          resolve(isImported)
        } catch (e) {
          reject(e)
        }
      })
      if (useBinary) {
        reader.readAsArrayBuffer(file)
      } else {
        reader.readAsText(file, 'UTF-8')
      }
      commonVal.readingCount += 1
    } catch (e) {
      reject(e)
    }
  })
}
function importFileContent(fileName, content, cb) {
  try {
    const fileType = io.guessInputType(fileName, content)
    const importOpts = utils.extend({}, commonVal.importOpts)
    const matches = findMatchingShp(fileName, commonVal.model)
    let dataset = {}
    let lyr = {}
    if (fileType === 'dbf' && matches.length > 0 ) {
      dataset = matches.reduce((memo, d) => {
        if (!d.layers[0].data) {
          memo = d
        }
        return memo
      }, null)
      if (dataset) {
        lyr = dataset.layers[0]
        lyr.data = new ShapefileTable(content, importOpts.encoding)
        if (lyr.shapes && lyr.data.size() != lyr.shapes.length) {
          utils.error('Different number of records in .shp and .dbf files')
        }
        procNextQueuedFile(cb)
        return
      }
    }
    if (fileType == 'shx') {
      // save .shx for use when importing .shp
      // (queue should be sorted so that .shx is processed before .shp)
      commonVal.cachedFiles[fileName.toLowerCase()] = {filename: fileName, content: content}
      procNextQueuedFile(cb)
      return
    }
    // Add .prj file to previously imported .shp file
    if (fileType == 'prj') {
      matches.forEach(function(d) {
        if (!d.info.prj) {
          d.info.prj = content
        }
      })
      procNextQueuedFile(cb)
      return
    }
    importNewDataset(fileType, fileName, content, importOpts, cb)
  } catch (e) {
    utils.error(e.message)
  }
}
function importNewDataset(fileType, fileName, content, importOpts, cb) {
  let dataset = null
  const input = {}
  try {
    input[fileType] = {filename: fileName, content: content}
    if (fileType == 'shp') {
      // shx file should already be cached, if it was added together with the shp
      input.shx = commonVal.cachedFiles[fileName.replace(/shp$/i, 'shx').toLowerCase()] || null
    }
    dataset = io.importContent(input, importOpts)
    dataset.info.import_options = importOpts
    addDataset(dataset, cb)
  } catch (e) {
    utils.error(`importNewDataset-error,${e.message}`, null)
  }
}
function findMatchingShp(filename, model) {
  const base = utils.getPathBase(filename).toLowerCase()
  return model.getDatasets().filter((d) => {
    const fname = d.info.input_files && d.info.input_files[0] || ''
    const ext = utils.getFileExtension(fname).toLowerCase()
    const base2 = utils.getPathBase(fname).toLowerCase()
    return base === base2 && ext === 'shp'
  })
}
function getTargetLayers(model) {
  const ids = '1'
  return ids ? model.findCommandTargets(ids) : []
}
function addDataset(dataset, cb) {
  if (!datasetIsEmpty(dataset)) {
    commonVal.model.addDataset(dataset)
  }
  procNextQueuedFile(cb)
}
function datasetIsEmpty(dataset) {
  return dataset.layers.every((lyr) => {
    return Common.getFeatureCount(lyr) === 0
  })
}
function isShapefilePart(name) {
  return /\.(shp|shx|dbf|prj)$/i.test(name)
}
export default {
  exportGeojson,
  exportShp,
}