# -*- coding:utf-8 -*-
from pyspark import SparkConf,SparkContext
import json
conf = SparkConf().setMaster("local[2]").setAppName("SparkDemo")
sc = SparkContext(conf=conf)
# result = sc.textFile("./words.txt").flatMap(lambda line: line.split(" "))\
#     .map(lambda word: (word,1))\
#     .reduceByKey(lambda  a,b: a+b).collect()
# for item in result:
#     if item[1] > 3:
#         print(item)
res = sc.textFile("file:///D:/mldong/bigdata/公交站点经纬度").map(lambda item:json.loads(item.replace('\'','\"')))\
    .filter(lambda item: len(item['location'])==0).collect()
print(len(res))