#!/usr/bin/env python
# coding=utf-8
# __author__ = 'Yunchao Ling'

import urllib
import urllib2
import MySQLdb

"""
本脚本是将文献对应得descripter在solr上建立索引，输入文件是json格式，每条数据一行。
元数据示例：

{"_id":{"$oid":"55fedc12a7c0a05e7aed5465"},"descriptor":["D001926","D003422","D006801","D010347","D014019"],"pmid":10641110}
{"_id":{"$oid":"55fedc12a7c0a05e7aed546b"},"descriptor":["D000818","D005121","D005734","D010939","D012756","D012757"],"pmid":1060005}

本脚本使用了mysql.connector模块连接数据库，请先确认python安装了此模块

/usr/local/python-3.5.0/bin/python3 ipaper_artilce_mesh_descriptor_solr.py /export/bigdata/literaturedata/mesh_dump/mongo_dump.json
"""

SOLR_URL = "http://10.10.31.13:8080/solr"
DB_CONNECTION = None  # 数据库连接全局变量


def get_db_connection():
    """
        打开数据库连接
    """
    global DB_CONNECTION
    if DB_CONNECTION is not None:
        return
    DATABASE_NAME = 'pmc_meta'
    HOST = '10.10.31.17'
    USER_NAME = 'pmc'
    PASSWORD = 'pmc'
    CHAR_SET = 'utf8'
    try:
        DB_CONNECTION = MySQLdb.connect(host=HOST, user=USER_NAME, passwd=PASSWORD, db=DATABASE_NAME, charset=CHAR_SET)
    except:
        print("cannot connect database")


def close_db_connection():
    """
       关闭数据库连接
    """
    if DB_CONNECTION is not None:
        DB_CONNECTION.close()


def search_article_title(pmid):
    get_db_connection()
    cursor = DB_CONNECTION.cursor()
    sql = "select title from tb_dds_article where pmid=%s"
    title = ""
    try:
        cursor.execute(sql, (pmid,))
        for row in cursor:
            title = row[0]
    except MySQLdb.connection.error(self) as e:
        print(e)
    finally:
        cursor.close()
    return title


def index_data():
    try:
        get_db_connection()
        cursor = DB_CONNECTION.cursor()
        sql = "select id,name from tb_dds_organization"
        cursor.execute(sql)
        batch_docs = []
        num = 0
        for row in cursor.fetchall():
            id = str(row[0])
            name = row[1]
            batch_docs.append('"add":{"overwrite":true, "doc": {"id": ' + id + ',"name":"' + name + '"}}')
            # 每100条提交一次
            if len(batch_docs) == 500:
                json_data = '{"commit": {}, ' + (','.join(batch_docs)) + '}'
                send_data2solr(json_data)
                num = num + len(batch_docs)
                batch_docs.clear()
                print(num)
        if len(batch_docs) > 0:
            json_data = '{"commit": {}, ' + (','.join(batch_docs)) + '}'
            send_data2solr(json_data)
            num = num + len(batch_docs)
            batch_docs.clear()
            print(num)
    except Exception as e:
        print(e)
    finally:
        close_db_connection()


def send_data2solr(jdata):
    try:
        params = urllib.urlencode(query={"commit": "true", "stream.body": str(jdata)}, encoding="utf-8").encode()
        url = SOLR_URL + "/ipaper_organization/update/json?wt=json&commit=true"
        req = urllib2.Request(url, params)
        urllib2.urlopen(req)
    except Exception as e:
        print(e)


if __name__ == '__main__':
    index_data()
