#coding: utf-8
import re
xtable = {
    "amazon.cn" : re.compile("dp/[A-Z0-9]+"),
    "escentual.com" : re.compile(".+"),
    "luisaviaroma.com" : re.compile("index.aspx.*"),
    "wiggle.cn" :  re.compile(".+"),
    "jomashop.com" : re.compile("\\.html"),
    "amazon.ca" : re.compile("product/[A-Z0-9]+"),
    "6pm.com" : re.compile("product/\\d+"),
    "1.macys.com" : re.compile("product/.+"),
    "finishline.com" : re.compile("product\\?.+"),
    "amazon.co.uk" : re.compile("(?:dp|product)/[A-Z0-9]+"),
    "drugstore.com" : re.compile(".catid+"),
    "carters.com" : re.compile(".carters+"),
    "sears.com" : re.compile(".+"),
    "amazon.co.jp" : re.compile("(?:dp|product)/[A-Z0-9]+"),
    "store.nba.com" : re.compile(".+"),
    "lookfantastic.com" : re.compile(".list+|(?:[0-9]+.html)+"),
    "rei.com" : re.compile("product/[0-9]+"),
    "newbalance.com.cn" : re.compile("[index.]+"),
    "swarovski.com.cn" : re.compile(".product/+"),
    "mdreams.com" : re.compile(".+"),
    "converse.com.cn" : re.compile(".item+"),
    "microsoftstore.com.cn" : re.compile("/p/+"),
    "godiva.com" : re.compile(".[0-9]+"),
    "kiehls.com.br" : re.compile(".+"),
    "disneystore.com" : re.compile("./[0-9]+"),
    "groupon.com" : re.compile("deals/+"),
    "footlocker.com" : re.compile("product/+"),
    "campsaver.com" : re.compile(".+"),
    "joesnewbalanceoutlet.com" : re.compile(b".?style+"),
    "tiffany.cn" : re.compile(".+"),
    "amazon.fr" : re.compile("(?:dp|product)/[A-Z0-9]+"),
    "rakuten.com" : re.compile("prod/+"),
''' "rakuten.com" : re.compile("prod/+")'''
    "wn.pos.baidu.com" : re.compile(".+")
    
    }
def urlsplit(line):
   x,a,b,c,d,*url = line.split(' ')
#   print(d)
#   print(url)
   url1,*y = str(url).split(' ')
   print(url1)
   return url1.split('/',1)def matchs(urls):
    host,path = urlsplit(urls)
    print(host)
    print(path)
    return xtable[host].search(path)
pass
def mains():
    url_count = {}
    url_list = []
    file = open(r'C:\Users\john\Desktop\re\tcp-url-2016-01-05-12-test.log','r')
    try:
        for line in file:
            url_list.append(line)
    except ValueError as e:
        pass
    print(url_list)
    for i in map(matchs,url_list):
        print(i)
            
mains()