# -*- coding: utf-8 -*-

# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://doc.scrapy.org/en/latest/topics/item-pipeline.html
import pymysql


class SpiderscriptPipeline(object):
    def process_item(self, item, spider):
        conn = pymysql.connect(host='mysqlinstance.caykn1i6pbqc.us-east-2.rds.amazonaws.com', port=3306, user='aws_mysql01', password='mysqlpassword', db='spider',
                               charset='utf8', cursorclass=pymysql.cursors.DictCursor)
        areas = item['areas']
        tree_level = item['tree_level']
        tree_sorts = '30,'
        for i in range(0, tree_level):
            tree_sorts += '30,'
        try:
            cursor = conn.cursor()
            if len(areas) > 0:
                for area in areas:
                    sql = "insert into SYS_AREA_CN (AREA_CODE, PARENT_CODE, PARENT_CODES, TREE_SORT, TREE_SORTS, TREE_LEAF, TREE_LEVEL, TREE_NAMES,AREA_NAME, AREA_TYPE, STATUS, CREATE_BY, CREATE_DATE, UPDATE_BY, UPDATE_DATE) values ('{}', '{}', '{},', 30, '{},', '{}', '{}', '{}', '{}', '{}', '0', 'system', sysdate(), 'system', sysdate())".format(area['area_code'], item['parent_code'], item['parent_codes'], tree_sorts, item['tree_leaf'], tree_level, area['tree_names'], area['area_name'], item['area_type'])
                    cursor.execut(sql)

        finally:
            conn.close()
        return item
