
'''将job_data.txt输入到hive表中，用python方式
需要这些库，但是再安装sasl时候会报错，处理起来比较麻烦，不建议
pip install sasl
pip install thrift
pip install thrift-sasl
pip install PyHive
抓取时候输出的，job_data.txt已经用’#‘和’\n‘进行分割和换行了。
所以只需要手动在hive中创建表，并进行从本地的插入操作
#删除原来的表
DROP TABLE IF EXISTS job_bigdata
CREATE TABLE job_bigdata(
  title string,
  job_city string,
  salary_min float,
  salary_max float,
  experience_year string,
  education_need string,
  job_advantage_tags string,
  publish_date string,
  position_info string,
  url string
)ROW FORMAT DELIMITED FIELDS TERMINATED BY '#' LINES TERMINATED BY '\n' STORED AS TEXTFILE;
#从本地加载数据
load data local inpath '/home/hduser/job_data.txt' into table job_bigdata;

'''
'''from pyhive import hive  # or import hive
conn = hive.connect('192.168.0.100')
cursor=conn.cursor()

cursor.execute("DROP TABLE IF EXISTS job_bigdata")
sql="CREATE TABLE job_bigdata(title string,job_city string,salary_min float," \
    "salary_max float,experience_year string,education_need string," \
    "job_advantage_tags string,publish_date string,position_info string,url string)" \
    "ROW FORMAT DELIMITED FIELDS TERMINATED BY '#' LINES TERMINATED BY '\n' STORED AS TEXTFILE"
cursor.execute(sql)
cursor.execute("load data local inpath '/home/hduser/job_data.txt' into table job_bigdata")
conn.commit()
cursor.execute("select * from job_bigdata limit 5")
for result in cursor.fetchall():
     print(result)
cursor.close();
conn.close();
'''