package com.jonas.sparkwork.answer2

import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.spark.SparkContext

/**
 * @author Jonas Gao
 * @since 2022/4/16
 */
case class Options(from: Path, to: Path, max: Int)

object Options {
  def parse(args: Array[String]): Options = {
    val from = args(0)
    val to = args(1)
    var max = -1
    if (args.length > 2) {
      max = args(2).toInt
    }
    if (!from.startsWith("hdfs://") || !to.startsWith("hdfs://")) {
      // 主要为了简化作业，所以只支持 hdfs 了
      throw new UnsupportedOperationException("仅支持 hdfs 文件系统")
    }
    new Options(new Path(from), new Path(to), max)
  }
}
