val lines = sc.textFile("file:///usr/local/spark/sparksqldata/chapter5-data1.txt")
val par = lines.map(row=>row.split(",")(0))     
val distinct_par = par.distinct()  //去重操作
distinct_par.count //取得总数