from bs4 import BeautifulSoup
import requests
from queue import Queue
from time import sleep
import os
from urllib.parse import urljoin
import json
from _thread import start_new_thread
from sys import exit

delay=0
header={}

sess=requests.session()
sess.keep_alive=False
requests.adapters.DEFAULT_RETRIES=15

running=False
basic_url=""
match_url=""
forbidden_url=[]

def restart_session():
	print("Session Restarting...")
	global sess
	del sess
	sess=requests.session()
	sess.keep_alive=False
	print("Session Restarted!")
	return

def check_forbidden(a):
	global forbidden_url
	for i in forbidden_url:
		if i in a:
			return True
	return False

def save_to_path(url, img):
	url.replace("?","ASK")
	url.replace("#","POUND")
	url.replace(".","DOT")
	url.replace("=","EQUAL")
	url.replace("&","AND")
	target=url[url.index("//")+2:]
	dirpath=target[:target.rindex("/")]
	print("Saved to",dirpath,"Target=",target)
	if not os.path.isdir(dirpath):
		os.makedirs(dirpath)
	f=open(target,"wb")
	f.write(img)
	f.close()

def save_img(url):
	url.replace("\t","")
	url.replace(" ","")
	global sess
	print("Img Downloading... url=",url)
	if (url.find("jpg")==-1)and(url.find("bmp")==-1)and(url.find("png")==-1)and(url.find("gif")==-1):
		print("Downloading Failed")
		return
	if check_forbidden(now):
		print("SKIP")
		return
	try:
		global delay
		sleep(delay)
		resp=sess.get(url,headers=header,stream=False)
		img=resp.content
		resp.close()
		save_to_path(url, img)
	except:
		print("Downloading Failed")
		restart_session()
		pass
		return

def save_status():
	global forbidden_url, vst, vst_img
	f=open("mission.stt", "w")
	f.write("{\"fbd_url\":"+json.dumps(forbidden_url)+",\"vst\":"+json.dumps(vst)+",\"vst_img\":"+json.dumps(vst_img)+"}")
	f.close()

def load_status():
	global forbidden_url, vst, vst_img, q
	while not q.empty():
		q.get()
	f=open("mission.stt","r")
	tmp=json.loads(f.read())
	forbidden_url=tmp["fbd_url"]
	vst=tmp["vst"]
	vst_img=tmp["vst_img"]
	for i in vst:
		if vst[i]==0:
			q.put(i)
def run_once():
	# print("Running...")
	global q,delay,sess
	now=q.get()
	now.replace("\t","")
	now.replace(" ","")
	print("Page Downloading... url=",now)
	if check_forbidden(now):
		print("SKIP")
		return
	try:
		sleep(delay)
		resp=sess.get(now,headers=header,stream=False)
		r=resp.content.decode("utf-8")
		print("Page downloaded")
		resp.close()
		soup=BeautifulSoup(r,"html.parser")
		for i in soup.find_all('a'):
			if i.has_attr("href"):
				s=i.attrs["href"]
				if s.find("mailto")!=-1:
					continue
				if s.find("http")!=-1:
					if s.find(match_url)!=-1:
						url=s
					else:
						continue
				else:
					url=urljoin(basic_url,s)
				if check_forbidden(url):
					continue
				if url not in vst:
					q.put(url)
					vst.update({url:0})
		for i in soup.find_all("img"):
			if i.has_attr("src"):
				s=i.attrs["src"]
				if s.find("http")!=-1:
					if s.find(match_url)!=-1:
						url=s
					else:
						continue
				else:
					url=urljoin(basic_url,s)
				if url not in vst_img:
					sleep(delay)
					save_img(url)
					vst_img.append(url)
	except:
		print("Downloading Failed")
		restart_session()
		pass
	vst[now]=1

def run_thread():
	global q,running
	while not q.empty():
		if not running:
			continue
		run_once()
	print("Thread End.")
if __name__ == "__main__":
	while True:
		s=input()
		if s=="save":
			save_status()
		if s=="load":
			load_status()
		if s=="start":
			try:
				start_new_thread(run_thread, ())
				print("Thread started")
			except:
				print("Unable to start thread")
				exit(0)
			running=True
		if s=="resume":
			running=True
		if s=="stop":
			running=False
		if s=="init":
			vst={basic_url:0}
			q=Queue()
			q.put(basic_url)
			vst_img=[]

		if s=="new":
			basic_url=input("Input Basic URL: ")
			match_url=input("Input Match URL: ")
			forbidden_url=[]
			print("Input Forbidden URL:")
			while True:
				s=input()
				if s=="":
					break
				else:
					forbidden_url+=[s]
		if s=="debug":
			print(running)
			print(vst)
			print(vst_img)