#!/bin/env  python

import sys

path = "/home/spider/atscript/lib/"
sys.path.append(path)

from atscript import *
from hadoop import *
from assert_func import *

conf = config("rankget.yaml")
sample = base(conf.get("CONF"), conf.get("SCHEDULE"), conf.get("JOBLIST"))
hadoop = hadoop(conf)

class Test_rg_getcslog(at_case):
    def __init__(self, casename, base, jobname, input, output):
        at_case.__init__(self, casename, base, jobname, input, output)
    
    def set_input(self):
        line1="NOTICE: 06-30 17:27:09: GET OK 219.145.111.75 200 24 1293 2 127 0 1 0 www.snxunyi.gov.cn/contents/450/7927.html"
        line2="NOTICE: 06-30 17:23:46: GET FAIL_NOTFIND 78.140.142.57 404 27 849 1 127 0 1 0 www.tubebigcock.com/pigtails"
        line3="NOTICE: 06-30 17:01:37: GET FAIL_NORMAL 210.83.85.148 5 27 50605 0 127 0 1 0 www.lejutuan.com/"
        line4="NOTICE: 06-30 17:27:09: CHK OK 219.145.111.75 200 24 1293 2 127 0 1 0 www.snxunyi.gov.cn/contents/450/7927.html"
        line5="NOTICE: 06-30 17:23:46: CHK FAIL_NORMAL 78.140.142.57 404 27 849 1 127 0 1 0 www.tubebigcock.com/pigtails"
        line6="NOTICE: 06-30 17:01:37: CHK FAIL_NOTFIND 210.83.85.148 5 27 50605 0 127 0 1 0 test.com/chk_notfind"
        line7="NOTICE: 06-30 17:01:37: CHK FAIL_NOTFIND 210.83.85.148 5 27 50605 0 127 0 1 0 http://test.com/http_chk_notfind"
        line8="NOTICE: 06-30 17:01:37: CHK FAIL_NOTFIND 210.83.85.148 5 27 50605 0 127 0 1 0 https://test.com/https_chk_notfind" 
       
        input_content = "\n".join([line1, line2, line3, line4, line5, line6, line7, line8])
        
        temp_input_file = self.casename + ".input"
        try:
            file_handle = open(temp_input_file, 'w')
            file_handle.write(input_content)
            file_handle.write("\n")
        except IOError, e:
            raise IOError
        finally:
            file_handle.close()

        hadoop.rmfile("/user/spider/at-script/rankget/input/" + temp_input_file)
        hadoop.put(temp_input_file, "/user/spider/at-script/rankget/input/")

    def run(self):
        hadoop.rmdir(self.output)
        hadoop.mkdir(self.output)
        
        print self.run_cmd
        self.execute_cmd(self.run_cmd, True)

    def check_result(self):
        expect_out = self.casename + ".expect_out"
        real_out = self.casename + ".real_out"

        self.execute_cmd("rm -f %s" % real_out)        
        hadoop.getmerge(self.output, real_out)
          
        try:
            file_handle = open(expect_out, 'w')
            file_handle.write("test.com/chk_notfind\tCSLOG_FAIL\n")
            file_handle.write("test.com/http_chk_notfind\tCSLOG_FAIL\n")
            file_handle.write("test.com/https_chk_notfind\tCSLOG_FAIL\n")
        except IOError, e:
            raise IOError
        finally:
            file_handle.close()
        
        try:
           assert_file_equal(expect_out, real_out)        
        except AssertionError, e:
           print >>sys.stdout, "case %s failed" % self.casename
           print >>sys.stdout, "%s not equal to %s" % (real_out, expect_out)
           sys.exit(1)
        print >>sys.stdout, "case %s passed" % self.casename

if __name__ == "__main__":
     test = Test_rg_getcslog('Test_rg_getcslog', sample, 'rg_getcslog', '/user/spider/rankget/input/', '/user/spider/rankget/data/cslog/')
     #test.set_input()
     #test.run()
     test.check_result()  
