#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymongo import MongoClient
from bs4 import BeautifulSoup
import requests
import csv
import json
import sys
reload(sys)
sys.setdefaultencoding('utf-8')


def json2csv(inf='data/jobs.json', of='data/jobs.csv'):
    print 'format json data to csv --------------------'
    print 'start formating >>'
    with open(of, 'w') as csvfile:
        fieldnames = ['position_name', 'salary', 'job_request', 'description',
                      'advantage', 'company_name', 'address', 'url', 'company_homepage']
        csvfile.write(','.join(fieldnames))
        spamwriter = csv.DictWriter(csvfile, fieldnames=fieldnames)
        with open(inf, 'r') as inf:
            text = inf.readlines()
            for line in text:
                url = json.loads(line)['_id']
                row = json.loads(line)['result']
                content = {
                    'url': url,
                    'position_name': row["positionName"].replace('\n', ' '),
                    'job_request': row["job_request"].replace('\n', ' '),
                    'salary': row["salary"].replace('\n', ' '),
                    'description': row["info"]["jobDescription"].replace('\n', ' '),
                    'advantage': row["info"]["jobAdvantage"].replace('\n', ' '),
                    'company_name': row["company"]["name"].replace('\n', ' '),
                    'company_homepage': row["company"]["lagou_homepage"].replace('\n', ' '),
                    'address': row['address'].replace('\n', ' ')
                }
                spamwriter.writerow(content)
    print '<< completed --------------------------------'


def write_json(josn_obj, of):
    '''write parsed josn data to file
    '''
    with open(of, 'w+') as of:
        json.dump(josn_obj + '\n', of)


def lg_job_category():
    '''obtain jobs' category from lagou homepage
    '''
    r = requests.get('https://www.lagou.com/')
    html = r.content
    bshtml = BeautifulSoup(html, 'lxml')
    job_hopping = bshtml.find_all('div', class_='menu_main job_hopping')
    for div in job_hopping:
        for a in div.find_all('a'):
            print a.get_text()
    job_menu = bshtml.find_all('div', class_="menu_sub dn")
    for div in job_menu:
        for a in div.find_all('a'):
            print a.get_text()


class ToMongo(object):

    def __init__(self, client=None):
        self.client = MongoClient(
            'localhost', 27017, maxPoolSize=200, connect=False,) if client is None else client
        self.db = self.client.lagou
        self.jobs = self.db.pretty_jobs

    def __contains__(self, url):
        try:
            self[url]
        except KeyError:
            return False
        else:
            return True

    def __getitem__(self, url):
        record = self.jobs.find_one({'_id': url})
        if record:
            return record['content']
        else:
            raise KeyError(url + 'does not exist')

    def __setitem__(self, url, content):
        """Save value for this URL
        """
        record = {'content': content}
        self.jobs.update({'_id': url}, {'$set': record}, upsert=True)

    def clear(self):
        self.jobs.drop()


# def store2mongo(input, client=None):
#     print 'store data to mongodb -------------------'
#     print 'start storing >>'
#     tmongo = ToMongo(client)
#     with open(input, 'r') as inf:
#         text = inf.readlines()
#         for line in text:
#             url = json.loads(line)['url']
#             content = json.loads(line)
#             tmongo[url] = content
#         inf.close()
#     print 'OK << ------------------------------------'


if __name__ == '__main__':
    # json2csv()
    pass
