# -*- coding: utf-8 -*-
from scrapy import log
import logging
from scrapy.log import ScrapyFileLogObserver
from scrapy.spider import BaseSpider
from scrapy.selector import HtmlXPathSelector
from stox.items import *
from scrapy.http import Request, Response
from scrapy.http.cookies import CookieJar


class MySpider(BaseSpider):
    name = "stox"
    allowed_domains = ["stox.vn"]    
    start_urls =["http://companyaz.stox.vn/Financial?cId=113&iId=217&iIdL=202&eId=0&tId=2&status=1&id=-1&cats=&ticker=FPT"        ]    
    
        
    def __init__(self):
        #write log file here
        logfile = open('testlog.log', 'w')
        log_observer = ScrapyFileLogObserver(logfile, level=logging.DEBUG)
        log_observer.start() #start logging
    
    def start_requests(self):
      
        # extract url file and call parse()
        f = open("urls.txt")
        start_urls = [url.strip() for url in f.readlines()]
        f.close()
        for url in start_urls:
            yield Request(url, self.parse)
        
    def parse(self, response):
        
        #get the ticker
        hxs = HtmlXPathSelector(response)        
        ticker = "".join(hxs.select("//div[@class='stock-ticker-title']/label/text()").extract()).strip()
        
        #get type
        strType=""     
        pos =  response.url.find('tId=')        
        s_type = response.url[pos+len("tId=")]        
        if(s_type=="0"):  #is bank
            strType = "Bank"        
                        
        #get the cookie of start_url
        cookieJar = response.meta.setdefault('cookie_jar', CookieJar())
        cookieJar.extract_cookies(response, response.request)        
        
        #call the request for IS BS and FI
        is_url = "http://companyaz.stox.vn/Financial/PV_Index%s?filter=1&unit=1000000&ticker=%s" %(strType,ticker)
        if(s_type=="0"):  #is bank
            request = Request(is_url, callback = self.extractISBankItem,
                          meta = {'dont_merge_cookies': True, 'cookie_jar': cookieJar})
        else:
            request = Request(is_url, callback = self.extractISItem,
                          meta = {'dont_merge_cookies': True, 'cookie_jar': cookieJar})
        cookieJar.add_cookie_header(request) # apply Set-Cookie ourselves
        yield request                       #call request
        
                
        bs_url = "http://companyaz.stox.vn/Financial/PV_BalanceSheet%s?filter=1&unit=1000000&ticker=%s" %(strType, ticker)
        if(s_type=="0"):  #is bank     
            request2 = Request(bs_url, callback = self.extractBSBankItem,
                          meta = {'dont_merge_cookies': True, 'cookie_jar': cookieJar})
        else:
            request2 = Request(bs_url, callback = self.extractBSItem,
                          meta = {'dont_merge_cookies': True, 'cookie_jar': cookieJar})
        cookieJar.add_cookie_header(request2) # apply Set-Cookie ourselves
        yield request2                       #call request
        
        
        fi_url = "http://companyaz.stox.vn/Financial/PV_FinancialRatio%s?filter=0&unit=1000000&ticker=%s" %(strType, ticker)
        if(s_type=="0"):  #is bank     
            request3 = Request(fi_url, callback = self.extractFIBankItem,
                          meta = {'dont_merge_cookies': True, 'cookie_jar': cookieJar})
        else:
            request3 = Request(fi_url, callback = self.extractFIItem,
                          meta = {'dont_merge_cookies': True, 'cookie_jar': cookieJar})
        cookieJar.add_cookie_header(request3) # apply Set-Cookie ourselves
        yield request3                       #call request
        
    
    
    def extractISItem(self, response):
        items = [];
                    
        #extract ticker from url
        pos =  response.url.find('ticker=')
        l = len("ticker=")
        ticker = response.url[pos+l:]
                
        f = open("is/%s.csv" % ticker, 'w')
        
        #get the XPath        
        hxs = HtmlXPathSelector(response)        
        titles = hxs.select("//p[@data-time]/..")        
        for title in titles:
            item = StoxISItem()
            item ["ticker"] = ticker;
            qtime = "".join(title.select("./p/@data-time").extract())            
            if qtime :                
                item ["year"] = qtime[0:4]
                item ["quarter"] = qtime[4:]            
            item ["doanh_thu_thuan"] = ''.join(title.select("./div[1]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["gia_von_hang_ban"] = ''.join(title.select("./div[1]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')#.encode('utf-8')                        
            item ["lai_gop"] = ''.join(title.select("./div[2]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["thu_nhap_tai_chinh"] = ''.join(title.select("./div[2]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["chi_phi_tai_chinh"] = ''.join(title.select("./div[2]/p[3]/text()").extract()).strip().replace('.','').replace(',','.') 
            item ["chi_phi_tien_lai_vay"] = ''.join(title.select("./div[2]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["chi_phi_ban_hang"] = ''.join(title.select("./div[2]/p[5]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["chi_phi_quan_ly"] = ''.join(title.select("./div[2]/p[6]/text()").extract()).strip().replace('.','').replace(',','.')            
            item ["lai_tu_hdkd"] = ''.join(title.select("./div[3]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["lai_cty_ld_lk"] = ''.join(title.select("./div[3]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["thu_nhap_khac"] = ''.join(title.select("./div[3]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["lai_truoc_thue"] = ''.join(title.select("./div[4]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')                    
            item ["thue_TNDN_ht"] = ''.join(title.select("./div[4]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["thue_TNDN_hl"] = ''.join(title.select("./div[4]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["lai_sau_thue"] = ''.join(title.select("./div[5]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["loi_ich_CDTS"] = ''.join(title.select("./div[5]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["lai_co_dong_cty_me"] = ''.join(title.select("./div[6]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["chi_phi_khau_hao_TSCD"] = ''.join(title.select("./div[7]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')            
            items.append(item)
            
            
            #write to file
            temp = "\"%s\",%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s\n" 
            str = temp % (  item ["ticker"],
                            item ["year"],
                            item ["quarter"],
                            item ["doanh_thu_thuan"],
                            item ["gia_von_hang_ban"],
                            item ["lai_gop"],
                            item ["thu_nhap_tai_chinh"],
                            item ["chi_phi_tai_chinh"],
                            item ["chi_phi_tien_lai_vay"],
                            item ["chi_phi_ban_hang"],
                            item ["chi_phi_quan_ly"],
                            item ["lai_tu_hdkd"],
                            item ["lai_cty_ld_lk"],
                            item ["thu_nhap_khac"],
                            item ["lai_truoc_thue"],
                            item ["thue_TNDN_ht"],
                            item ["thue_TNDN_hl"],
                            item ["lai_sau_thue"],
                            item ["loi_ich_CDTS"],
                            item ["lai_co_dong_cty_me"],
                            item ["chi_phi_khau_hao_TSCD"])
            f.write(str)
            
        #print "Item %r " %items;
        f.close()
        
        return items
    
    def extractISBankItem(self, response):
        items = [];
                    
        #extract ticker from url
        pos =  response.url.find('ticker=')
        l = len("ticker=")
        ticker = response.url[pos+l:]
                
        f = open("is/bank/%s.csv" % ticker, 'w')
        
        #get the XPath        
        hxs = HtmlXPathSelector(response)        
        titles = hxs.select("//p[@data-time]/..")                
        for title in titles:
            item = StoxISBankItem()
            item ["ticker"] = ticker;
            qtime = "".join(title.select("./p/@data-time").extract())
            if qtime :                
                item ["year"] = qtime[0:4]
                item ["quarter"] = qtime[4:] 
            item ["thu_nhap_lai_va_khoan_tt"]= ''.join(title.select("./div[1]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["chi_phi_lai"]= ''.join(title.select("./div[1]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')            
            item ["thu_nhap_lai_thuan"]= ''.join(title.select("./div[2]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["thu_nhap_hd_dich_vu"]= ''.join(title.select("./div[2]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["chi_phi_hd_dich_vu"]= ''.join(title.select("./div[2]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["lai_thuan_hd_dich_vu"]= ''.join(title.select("./div[3]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["kd_ngoai_hoi_vang"]= ''.join(title.select("./div[3]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["chung_khoan_KD"]= ''.join(title.select("./div[3]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["chung_khoan_dau_tu"]= ''.join(title.select("./div[3]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["hoat_dong_khac"]= ''.join(title.select("./div[3]/p[5]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["thu_nhap_gop_von_CP"]= ''.join(title.select("./div[3]/p[6]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["tong_thu_nhap_HD"]= ''.join(title.select("./div[4]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["tong_chi_phi_HD"]= ''.join(title.select("./div[5]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["LN_HDKD_truoc_CF_du_phong"]= ''.join(title.select("./div[6]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["chi_phi_du_phong_RR"]= ''.join(title.select("./div[6]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["loi_nhuan_tt"]= ''.join(title.select("./div[7]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["thue_TNDN"]= ''.join(title.select("./div[7]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["co_dong_thieu_so"]= ''.join(title.select("./div[7]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["loi_nhuan_thuan"] = ''.join(title.select("./div[8]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            items.append(item)            
            #write to file
            str = "\"%s\",%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s\n"
            str = str % (   item ["ticker"],
                            item ["year"],
                            item ["quarter"],
                            item ["thu_nhap_lai_va_khoan_tt"],
                            item ["chi_phi_lai"],
                            item ["thu_nhap_lai_thuan"],
                            item ["thu_nhap_hd_dich_vu"],
                            item ["chi_phi_hd_dich_vu"],
                            item ["lai_thuan_hd_dich_vu"],
                            item ["kd_ngoai_hoi_vang"],
                            item ["chung_khoan_KD"],
                            item ["chung_khoan_dau_tu"],
                            item ["hoat_dong_khac"],
                            item ["thu_nhap_gop_von_CP"],
                            item ["tong_thu_nhap_HD"],
                            item ["tong_chi_phi_HD"],
                            item ["LN_HDKD_truoc_CF_du_phong"],
                            item ["chi_phi_du_phong_RR"],
                            item ["loi_nhuan_tt"],
                            item ["thue_TNDN"],
                            item ["co_dong_thieu_so"],
                            item ["loi_nhuan_thuan"]    )
            f.write(str)
            
        #print "Item %r " %items;
        f.close()
        return items
    
    
    def extractBSItem(self, response):
        items = [];            
                    
        #extract ticker from url
        pos =  response.url.find('ticker=')
        l = len("ticker=")
        ticker = response.url[pos+l:]
                
        f = open("bs/%s.csv" % ticker, 'w')
        
        #get the XPath        
        hxs = HtmlXPathSelector(response)        
        titles = hxs.select("//p[@data-time]/..")                
        for title in titles:
            item = StoxBSItem()
            item ["ticker"] = ticker;
            qtime = "".join(title.select("./p/@data-time").extract())
            if qtime:
                item ["year"] = qtime[0:4]
                item ["quarter"] = qtime[4:]            
            
            item ["ts_ngan_han"] = ''.join(title.select("./div[1]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["tien_va_td_tien"] = ''.join(title.select("./div[1]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["gt_thuan_dau_tu_ngan_han"] = ''.join(title.select("./div[1]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["khoan_phai_thu"] = ''.join(title.select("./div[1]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["hang_ton_kho"] = ''.join(title.select("./div[1]/p[5]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["ts_luu_dong_khac"] = ''.join(title.select("./div[1]/p[6]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["TS_dai_han"] = ''.join(title.select("./div[2]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["phai_thu_dai_han"] = ''.join(title.select("./div[2]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["ts_co_dinh"] = ''.join(title.select("./div[2]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["ts_dau_tu"] = ''.join(title.select("./div[2]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["dau_tu_dai_han"] = ''.join(title.select("./div[2]/p[5]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["loi_the_thuong_mai"] = ''.join(title.select("./div[2]/p[6]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["ts_dai_han_khac"] = ''.join(title.select("./div[2]/p[7]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["tong_ts"] = ''.join(title.select("./div[3]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            
            item ["no_phai_tra"] = ''.join(title.select("./div[4]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["no_ngan_han"] = ''.join(title.select("./div[4]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["no_dai_han"] = ''.join(title.select("./div[4]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            
            item ["von_CSH"] = ''.join(title.select("./div[5]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["von_va_cac_quy"] = ''.join(title.select("./div[5]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["quy_khac"] = ''.join(title.select("./div[5]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["lai_chua_phan_phoi"] = ''.join(title.select("./div[5]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["von_ngan_sach_NH_quy"] = ''.join(title.select("./div[5]/p[5]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["loi_ich_CDTS"] = ''.join(title.select("./div[6]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["tong_cong_nguon_von"] = ''.join(title.select("./div[7]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            
            items.append(item)            
            #write to file
            str = "\"%s\",%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s\n"
            str = str % (item ["ticker"],
                        item ["year"],
                        item ["quarter"],
                        item ["ts_ngan_han"] ,
                        item ["tien_va_td_tien"],
                        item ["gt_thuan_dau_tu_ngan_han"],
                        item ["khoan_phai_thu"],
                        item ["hang_ton_kho"],
                        item ["ts_luu_dong_khac"],
                        item ["TS_dai_han"],
                        item ["phai_thu_dai_han"],
                        item ["ts_co_dinh"],
                        item ["ts_dau_tu"],
                        item ["dau_tu_dai_han"],
                        item ["loi_the_thuong_mai"],
                        item ["ts_dai_han_khac"],
                        item ["tong_ts"],                                            
                        item ["no_phai_tra"],
                        item ["no_ngan_han"],
                        item ["no_dai_han"],                                            
                        item ["von_CSH"],
                        item ["von_va_cac_quy"],
                        item ["quy_khac"],
                        item ["lai_chua_phan_phoi"],
                        item ["von_ngan_sach_NH_quy"],
                        item ["loi_ich_CDTS"],
                        item ["tong_cong_nguon_von"])
            f.write(str)
            
        #print "Item %r " %items;
        f.close()
        return items    
    
    def extractBSBankItem(self, response):
        items = [];
        #return items    # for testing, the url just die
                    
        #extract ticker from url
        pos =  response.url.find('ticker=')
        l = len("ticker=")
        ticker = response.url[pos+l:]
                
        # open file and write to csv
        f = open("bs/bank/%s.csv" % ticker, 'w')
        
        #get the XPath        
        hxs = HtmlXPathSelector(response)        
        titles = hxs.select("//p[@data-time]/..")                
        for title in titles:
            item = StoxBSBankItem()
            item ["ticker"] = ticker;
            qtime = "".join(title.select("./p/@data-time").extract())
            if qtime :                
                item ["year"] = qtime[0:4]
                item ["quarter"] = qtime[4:] 
            #item ["thu_nhap_lai_va_khoan_tt"]= ''.join(title.select("./div[1]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            
            item ["tong_TS"] = ''.join(title.select("./div[1]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["tien_mat_vang_bac_da_quy"] = ''.join(title.select("./div[2]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["tien_gui_NHNN"] = ''.join(title.select("./div[3]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["tien_gui_cac_tctd_khac"] = ''.join(title.select("./div[4]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["tong_ck_kinh_doanh"] = ''.join(title.select("./div[5]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["ck_kinh_doanh"] = ''.join(title.select("./div[5]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["du_phong_giam_gia_ck_kinh_doanh"] = ''.join(title.select("./div[5]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["tc_phai_sinh_va_no_tc_khac_ckkd"] = ''.join(title.select("./div[5]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["tong_cho_vay_khach_hang"] = ''.join(title.select("./div[6]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["cho_vay_khach_hang"] = ''.join(title.select("./div[6]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["du_phong_rr_cho_vay_KH"] = ''.join(title.select("./div[6]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["ck_dau_tu"] = ''.join(title.select("./div[7]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["ck_dau_tu_san_sang_de_ban"] = ''.join(title.select("./div[7]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["ck_dau_tu_giu_den_dao_han"] = ''.join(title.select("./div[7]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["du_phong_giam_gia_ck_dau_tu"] = ''.join(title.select("./div[7]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["gop_von_dau_tu_dai_han"] = ''.join(title.select("./div[8]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["dt_vao_cong_ty_con"] = ''.join(title.select("./div[8]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["dt_vao_cong_ty_lien_doanh"] = ''.join(title.select("./div[8]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["dt_dai_han_khac"] = ''.join(title.select("./div[8]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["du_phong_giam_gia_dt_dai_han"] = ''.join(title.select("./div[8]/p[5]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["tai_san_co_dinh"] = ''.join(title.select("./div[9]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["tai_san_co_dinh_huu_hinh"] = ''.join(title.select("./div[9]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["tai_san_co_dinh_thue_tc"] = ''.join(title.select("./div[9]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["tai_san_co_dinh_vo_hinh"] = ''.join(title.select("./div[9]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["bat_dong_san_dau_tu"] = ''.join(title.select("./div[10]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["tai_san_co_khac"] = ''.join(title.select("./div[11]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["no_phai_tra_va_von_CSH"] = ''.join(title.select("./div[12]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["tong_no_phai_tra"] = ''.join(title.select("./div[13]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["cac_khoan_no_cp_va_NHNN"] = ''.join(title.select("./div[13]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["tien_gui_vay_tctd_khac"] = ''.join(title.select("./div[13]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["tien_gui_cua_KH"] = ''.join(title.select("./div[13]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["tc_phai_sinh_va_no_tc_khac_no"] = ''.join(title.select("./div[13]/p[5]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["von_tai_tro_utdt_cp_tctd_khac"] = ''.join(title.select("./div[13]/p[6]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["phat_hanh_giay_to_co_gia"] = ''.join(title.select("./div[13]/p[7]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["khoan_no_khac"] = ''.join(title.select("./div[13]/p[8]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["von_CSH"] = ''.join(title.select("./div[14]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["von_cua_tctd"] = ''.join(title.select("./div[14]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["quy_cua_tctd"] = ''.join(title.select("./div[14]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["chenh_lech_tg_hoi_doai"] = ''.join(title.select("./div[14]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["chenh_lech_danh_gia_lai_TS"] = ''.join(title.select("./div[14]/p[5]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["loi_nhuan_chua_phan_phoi"] = ''.join(title.select("./div[14]/p[6]/text()").extract()).strip().replace('.','').replace(',','.')
            item ["loi_ich_cua_CDTS"] = ''.join(title.select("./div[15]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            
            items.append(item)            
            #write to file
            str = "\"%s\",%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s\n"
            str = str % (   item ["ticker"],
                        item ["year"],
                        item ["quarter"],
                        item ["tong_TS"],
                        item ["tien_mat_vang_bac_da_quy"],
                        item ["tien_gui_NHNN"],
                        item ["tien_gui_cac_tctd_khac"],
                        item ["tong_ck_kinh_doanh"],
                        item ["ck_kinh_doanh"],
                        item ["du_phong_giam_gia_ck_kinh_doanh"],
                        item ["tc_phai_sinh_va_no_tc_khac_ckkd"],
                        item ["tong_cho_vay_khach_hang"],
                        item ["cho_vay_khach_hang"],
                        item ["du_phong_rr_cho_vay_KH"],
                        item ["ck_dau_tu"],
                        item ["ck_dau_tu_san_sang_de_ban"],
                        item ["ck_dau_tu_giu_den_dao_han"],
                        item ["du_phong_giam_gia_ck_dau_tu"],
                        item ["gop_von_dau_tu_dai_han"],
                        item ["dt_vao_cong_ty_con"],
                        item ["dt_vao_cong_ty_lien_doanh"],
                        item ["dt_dai_han_khac"],
                        item ["du_phong_giam_gia_dt_dai_han"],
                        item ["tai_san_co_dinh"],
                        item ["tai_san_co_dinh_huu_hinh"],
                        item ["tai_san_co_dinh_thue_tc"],
                        item ["tai_san_co_dinh_vo_hinh"],
                        item ["bat_dong_san_dau_tu"],
                        item ["tai_san_co_khac"],
                        item ["no_phai_tra_va_von_CSH"],
                        item ["tong_no_phai_tra"],
                        item ["cac_khoan_no_cp_va_NHNN"],
                        item ["tien_gui_vay_tctd_khac"],
                        item ["tien_gui_cua_KH"],
                        item ["tc_phai_sinh_va_no_tc_khac_no"],
                        item ["von_tai_tro_utdt_cp_tctd_khac"],
                        item ["phat_hanh_giay_to_co_gia"],
                        item ["khoan_no_khac"],
                        item ["von_CSH"],
                        item ["von_cua_tctd"],
                        item ["quy_cua_tctd"],
                        item ["chenh_lech_tg_hoi_doai"],
                        item ["chenh_lech_danh_gia_lai_TS"],
                        item ["loi_nhuan_chua_phan_phoi"],
                        item ["loi_ich_cua_CDTS"])
            f.write(str)
                    
        f.close()
        return items
        
        
    def extractFIItem(self, response):
        items = [];
        #return items    # for testing, the url just die
                    
        #extract ticker from url
        pos =  response.url.find('ticker=')
        l = len("ticker=")
        ticker = response.url[pos+l:]
                
        # open file and write to csv
        f = open("fi/%s.csv" % ticker, 'w')
        
        #get the XPath        
        hxs = HtmlXPathSelector(response)        
        titles = hxs.select("//p[@data-time]/..")                
        for title in titles:
            item = StoxFIItem()
            item ["ticker"] = ticker;
            qtime = "".join(title.select("./p/@data-time").extract())
            if qtime :                
                item ["year"] = qtime[0:4]
                item ["quarter"] = qtime[4:] 
            #item ["thu_nhap_lai_va_khoan_tt"]= ''.join(title.select("./div[1]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            
            item["gia"] = ''.join(title.select("./div[1]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item["sl_CPLH"] = ''.join(title.select("./div[1]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item["von_hoa"] = ''.join(title.select("./div[1]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            item["co_tuc"] = ''.join(title.select("./div[1]/p[5]/text()").extract()).strip().replace('.','').replace(',','.')
            
            item["eps"] = ''.join(title.select("./div[2]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item["pe"] = ''.join(title.select("./div[2]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item["bvs"] = ''.join(title.select("./div[2]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            item["pb"] = ''.join(title.select("./div[2]/p[5]/text()").extract()).strip().replace('.','').replace(',','.')
            
            item["ts_loi_nhuan_gop"] = ''.join(title.select("./div[3]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item["ts_loi_nhuan_truoc_thue"] = ''.join(title.select("./div[3]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item["ts_loi_nhuan_rong"] = ''.join(title.select("./div[3]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            item["ebit"] = ''.join(title.select("./div[3]/p[5]/text()").extract()).strip().replace('.','').replace(',','.')
            item["ebitda"] = ''.join(title.select("./div[3]/p[6]/text()").extract()).strip().replace('.','').replace(',','.')
            item["roe"] = ''.join(title.select("./div[3]/p[7]/text()").extract()).strip().replace('.','').replace(',','.')
            item["roa"] = ''.join(title.select("./div[3]/p[8]/text()").extract()).strip().replace('.','').replace(',','.')
                        
            item["vong_quay_hang_ton_kho"] = ''.join(title.select("./div[4]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item["vong_quay_khoan_phai_thu"] = ''.join(title.select("./div[4]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item["vong_quay_khoan_phai_tra"] = ''.join(title.select("./div[4]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            item["vong_quay_tong_tai_san"] = ''.join(title.select("./div[4]/p[5]/text()").extract()).strip().replace('.','').replace(',','.')
            item["kha_nang_tra_lai_vay"] = ''.join(title.select("./div[4]/p[6]/text()").extract()).strip().replace('.','').replace(',','.')
            
            item["no_t_tong_tai_san"] = ''.join(title.select("./div[5]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item["no_t_von_CSH"] = ''.join(title.select("./div[5]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item["vay_dai_han_t_tong_ts"] = ''.join(title.select("./div[5]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            item["vay_dai_han_t_von_CSH"] = ''.join(title.select("./div[5]/p[5]/text()").extract()).strip().replace('.','').replace(',','.')
            item["no_ngan_han_t_tong_TS"] = ''.join(title.select("./div[5]/p[6]/text()").extract()).strip().replace('.','').replace(',','.')
            item["no_ngan_han_t_von_CSH"] = ''.join(title.select("./div[5]/p[7]/text()").extract()).strip().replace('.','').replace(',','.')
            
            item["tang_truong_doanh_thu"] = ''.join(title.select("./div[6]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item["tang_truong_LN_rong"] = ''.join(title.select("./div[6]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            
            item["ts_thanh_toan_hien_hanh"] = ''.join(title.select("./div[7]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item["ts_thanh_toan_nhanh"] = ''.join(title.select("./div[7]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item["ts_thanh_toan_tien_mat"] = ''.join(title.select("./div[7]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            
            items.append(item)            
            #write to file
            str = "\"%s\"" + ",%s"*33 + "\n"
            str = str % (   item ["ticker"],
                        item ["year"],
                        item ["quarter"],
                        
                        item["gia"],
                        item["sl_CPLH"],
                        item["von_hoa"],
                        item["co_tuc"],
                        
                        item["eps"],
                        item["pe"],
                        item["bvs"],
                        item["pb"],
                        
                        item["ts_loi_nhuan_gop"],
                        item["ts_loi_nhuan_truoc_thue"],
                        item["ts_loi_nhuan_rong"],
                        item["ebit"],
                        item["ebitda"],
                        item["roe"],
                        item["roa"],
                        
                        item["vong_quay_hang_ton_kho"],
                        item["vong_quay_khoan_phai_thu"],
                        item["vong_quay_khoan_phai_tra"],
                        item["vong_quay_tong_tai_san"],
                        item["kha_nang_tra_lai_vay"],
                        
                        item["no_t_tong_tai_san"],
                        item["no_t_von_CSH"],
                        item["vay_dai_han_t_tong_ts"],
                        item["vay_dai_han_t_von_CSH"],
                        item["no_ngan_han_t_tong_TS"],
                        item["no_ngan_han_t_von_CSH"],
                        
                        item["tang_truong_doanh_thu"],
                        item["tang_truong_LN_rong"],
                        
                        item["ts_thanh_toan_hien_hanh"],
                        item["ts_thanh_toan_nhanh"],
                        item["ts_thanh_toan_tien_mat"])
            f.write(str)
                    
        f.close()
        return items
    
    
    def extractFIBankItem(self, response):
        items = [];
        #return items    # for testing, the url just die
                    
        #extract ticker from url
        pos =  response.url.find('ticker=')
        l = len("ticker=")
        ticker = response.url[pos+l:]
                
        # open file and write to csv
        f = open("fi/bank/%s.csv" % ticker, 'w')
        
        #get the XPath        
        hxs = HtmlXPathSelector(response)        
        titles = hxs.select("//p[@data-time]/..")                
        for title in titles:
            item = StoxFIBankItem()
            item ["ticker"] = ticker;
            qtime = "".join(title.select("./p/@data-time").extract())
            if qtime :                
                item ["year"] = qtime[0:4]
                item ["quarter"] = qtime[4:] 
            #item ["thu_nhap_lai_va_khoan_tt"]= ''.join(title.select("./div[1]/p[1]/text()").extract()).strip().replace('.','').replace(',','.')
            
            item["gia"] = ''.join(title.select("./div[1]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item["sl_CPLH"] = ''.join(title.select("./div[1]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item["von_hoa"] = ''.join(title.select("./div[1]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            item["co_tuc"] = ''.join(title.select("./div[1]/p[5]/text()").extract()).strip().replace('.','').replace(',','.')
            
            item["eps"] = ''.join(title.select("./div[2]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item["pe"] = ''.join(title.select("./div[2]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item["bvs"] = ''.join(title.select("./div[2]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            item["pb"] = ''.join(title.select("./div[2]/p[5]/text()").extract()).strip().replace('.','').replace(',','.')
            
            item["ts_loi_nhuan_rong"] = ''.join(title.select("./div[3]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item["ts_sinh_lai"] = ''.join(title.select("./div[3]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item["no_chiu_lai"] = ''.join(title.select("./div[3]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            item["chi_phi_t_tren_TN_hoat_dong"] = ''.join(title.select("./div[3]/p[5]/text()").extract()).strip().replace('.','').replace(',','.')
            item["ts_sinh_loi_cua_TS"] = ''.join(title.select("./div[3]/p[6]/text()").extract()).strip().replace('.','').replace(',','.')
            item["chi_phi_von"] = ''.join(title.select("./div[3]/p[7]/text()").extract()).strip().replace('.','').replace(',','.')
            item["NIS"] = ''.join(title.select("./div[3]/p[8]/text()").extract()).strip().replace('.','').replace(',','.')
            item["NIM"] = ''.join(title.select("./div[3]/p[9]/text()").extract()).strip().replace('.','').replace(',','.')
            
            item["roa"] = ''.join(title.select("./div[4]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item["roe"] = ''.join(title.select("./div[4]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item["roa_tdp"] = ''.join(title.select("./div[4]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            
            item["tang_truong_LN_rong"] = ''.join(title.select("./div[5]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item["tang_truong_tong_TS"] = ''.join(title.select("./div[5]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item["tang_truong_cho_vay"] = ''.join(title.select("./div[5]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            item["tang_truong_huy_dong"] = ''.join(title.select("./div[5]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            item["tang_truong_chi_phi_hoat_dong"] = ''.join(title.select("./div[5]/p[5]/text()").extract()).strip().replace('.','').replace(',','.')
            item["tang_truong_LN_truoc_du_phong"] = ''.join(title.select("./div[5]/p[6]/text()").extract()).strip().replace('.','').replace(',','.')
            item["tang_truong_TN_lai_thuan"] = ''.join(title.select("./div[5]/p[7]/text()").extract()).strip().replace('.','').replace(',','.')
            item["tang_truong_TN_ngoai_lai"] = ''.join(title.select("./div[5]/p[8]/text()").extract()).strip().replace('.','').replace(',','.')
            
            item["don_bay_tai_chinh"] = ''.join(title.select("./div[6]/p[2]/text()").extract()).strip().replace('.','').replace(',','.')
            item["cho_vay_t_huy_dong"] = ''.join(title.select("./div[6]/p[3]/text()").extract()).strip().replace('.','').replace(',','.')
            item["no_xau"] = ''.join(title.select("./div[6]/p[4]/text()").extract()).strip().replace('.','').replace(',','.')
            item["no_xau_t_cho_vay"] = ''.join(title.select("./div[6]/p[5]/text()").extract()).strip().replace('.','').replace(',','.')
            item["du_phong_t_no_xau"] = ''.join(title.select("./div[6]/p[6]/text()").extract()).strip().replace('.','').replace(',','.')
            item["chi_phi_du_phong_t_cho_vay"] = ''.join(title.select("./div[6]/p[7]/text()").extract()).strip().replace('.','').replace(',','.')

            
            items.append(item)            
            #write to file
            str = "\"%s\"" + ",%s"*35 + "\n"
            str = str % (   item ["ticker"],
                        item ["year"],
                        item ["quarter"],
                        
                        item["gia"],                        
                        item["sl_CPLH"],
                        item["von_hoa"],
                        item["co_tuc"],
                        
                        item["eps"],
                        item["pe"],
                        item["bvs"],
                        item["pb"],
                        
                        item["ts_loi_nhuan_rong"],
                        item["ts_sinh_lai"],
                        item["no_chiu_lai"],
                        item["chi_phi_t_tren_TN_hoat_dong"],
                        item["ts_sinh_loi_cua_TS"],
                        item["chi_phi_von"],
                        item["NIS"],
                        item["NIM"],
                        
                        item["roa"],
                        item["roe"],
                        item["roa_tdp"],
                        
                        item["tang_truong_LN_rong"],
                        item["tang_truong_tong_TS"],
                        item["tang_truong_cho_vay"],
                        item["tang_truong_huy_dong"],
                        item["tang_truong_chi_phi_hoat_dong"],
                        item["tang_truong_LN_truoc_du_phong"],
                        item["tang_truong_TN_lai_thuan"],
                        item["tang_truong_TN_ngoai_lai"],
                        
                        item["don_bay_tai_chinh"],
                        item["cho_vay_t_huy_dong"],
                        item["no_xau"],
                        item["no_xau_t_cho_vay"],
                        item["du_phong_t_no_xau"],
                        item["chi_phi_du_phong_t_cho_vay"])
            f.write(str)
                    
        f.close()
        return items 
