#coding:utf8
'''
python超级爬虫
by jadesoul
@ 2010-11-22
'''

from utils import *
import sys

#all configs
dburl="mysql://root:gbsoft@localhost/super_spider_pku"

#种子host, 最好是导航网站
seed_host="www.pku.edu.cn"
seed_weight=1
min_rank=3
if len(sys.argv)==2:
	min_rank=int(sys.argv[1])

prefix="%d\t" % min_rank

#global vars
db=database.dbgen(dburl)
db.connect()

#initial seed url
#db.run("truncate table hosts")
if db.run("select * from hosts")==0:
	seed_weight=3
	seed_ip=network.get_ip_by_host(seed_host)
	db.run("insert into hosts (host_name, host_ip, num_links_from_outside) values ('%s', '%s', %d)" % (seed_host, seed_ip, seed_weight))

while 1:
	
	try:
		link_map={}
		
		row=db.one("select host_id, host_ip, host_name, num_links_from_outside from hosts where is_visited=0 order by num_links_from_outside desc limit 0, 1")
		if not row:
			print prefix, "没有host了，爬取任务结束"
			break
		
		host_id, host_ip, host_name, num=row["host_id"], row["host_ip"], row["host_name"], row["num_links_from_outside"]
		print prefix, "1, ready to visit: %s, host id=%d, rank=%d" % (host_name, host_id, num)
		print prefix, "2, host ip:", host_ip
		try:
			html, real_url, real_host=network.get_html_by_host(host_name)
		except:
			print prefix, "there are some error with the network: ", host_name
			db.run("update hosts set index_html='net error', is_visited=1 where host_id=%d" % host_id)
			print prefix, "now try the next host..."
			continue
		print prefix, "3, visited: ", real_host, real_url
		if real_host!=host_name:
			link_map[real_host]=set()
		
		print prefix, "4, ready to parse html, length of html: ", len(html) 
		
		try:
			dom=htmlparser.parse(html)
		except:
			print prefix, "html parse error", host_name
			db.run("update hosts set index_html='parse error', is_visited=1 where host_id=%d" % host_id)
			print prefix, "now try the next host..."
			continue
		
		#text=dom.text
		
		all_links=dom("a")
		print prefix, "5, links cout: ", len(all_links)
		
		num_links_inside=0
		num_links_to_outside=0
		access_time=datetime.datetime.now()
		
		
		for link in all_links:
			try:
				url=link["href"]
			except:
				continue
			url=get_nice_url(url, host_name)
			if url:
				host=network.get_host_by_url(url)
				if not host: continue
				
				if host==host_name:
					num_links_inside+=1
				else:
					num_links_to_outside+=1
				#print host, url
				if not host in link_map:
					link_map[host]=set()
				link_map[host].add(url)
				
		#print link_map
		db.run("update hosts set num_links_inside=num_links_inside+%d, num_links_to_outside=num_links_to_outside+%d, access_time='%s', is_visited=1 where host_id=%d" \
			% (num_links_inside, num_links_to_outside, access_time, host_id))
		print prefix, "accumulate num_links_inside by %d and accumulate num_links_to_outside by %d" % (num_links_inside, num_links_to_outside)
		
		for host in link_map:
			if host==host_name: continue
			s=link_map[host]
			#让种子host下的所有host具有高权值
			num_links_from_outside=len(s) * seed_weight
			#print host
			try:
				ip=network.get_ip_by_host(host)
			except socket.gaierror, e:
				print prefix, "ip not found: ", host
				ip=""
			
			if host.endswith("pku.edu.cn"):
				try:
					#可能有重复主机名而无法插入成功
					db.run("insert into hosts (host_name, host_ip, num_links_from_outside) values ('%s', '%s', %d)" % (host, ip, num_links_from_outside))
					print prefix, "added a new host: ", host, ip, num_links_from_outside
				except Exception, e:
					#改插入为更新
					#print "insert error: ", e.args
					db.run("update hosts set num_links_from_outside=num_links_from_outside+%d where host_name='%s'" % (num_links_from_outside, host))
					print prefix, "accumulated a exist host: ", host, ip, num_links_from_outside
	except:
		print prefix, "unknow error", host_name, host_ip
		db.run("update hosts set index_html='unknow error', is_visited=1 where host_id=%d" % host_id)
		continue
	
	seed_weight=1
	
	
	
	
	
	
	