#coding=utf-8

import sys
reload(sys)
sys.setdefaultencoding("utf8")

import urllib2
from pyquery import PyQuery
import time
import os
from splinter import Browser

host="http://siph0n.net"
addr="/exploits.php?id=966"
_leaks="/leaks.php"
leaks_path="leaks/"

pages=""

count=1

def writeFile(path,texts):
    try:
        print "----writing file----"
        file=open(path,"w")
        file.write(texts)
        file.close()
        return True
    except BaseException,e:
        print e.message
        return False

def getContent(url,browser):
    global count
    browser.click_link_by_href(url)
    time.sleep(3)
    browser.windows.current = browser.windows[1]
    # print browser.title
    text = browser.find_by_css("pre").first.text
    writeFile(leaks_path + browser.title + ".txt", text)
    browser.windows.current = browser.windows[0]
    browser.windows[1].close()
    # browser.windows.current=browser.back()
    # browser.windows.current.close()
    # time.sleep(5)
    print "--- %d complete----" % (count)
    count += 1

def getLeaks(browser):
    try:
        count=1
        if os.path.exists(leaks_path) == False:
            os.makedirs(leaks_path)
        pages=host+_leaks
        browser.visit(host+_leaks)
        #browser.find_by_text("Leaks").click()
        while True:
            try:
                elements=browser.find_by_xpath('//tr[@class="submit"]')
                if elements.__len__()==0:
                    break;
                for ele in elements:
                    query=PyQuery(ele.outer_html)
                    url=query("[target=_blank]").attr("href")
                    browser.click_link_by_href(url)
                    time.sleep(3)
                    browser.windows.current=browser.windows[1]
                    #print browser.title
                    text = browser.find_by_css("pre").first.text
                    writeFile(leaks_path+browser.title+".txt",text)
                    browser.windows.current=browser.windows[0]
                    browser.windows[1].close()
                    #browser.windows.current=browser.back()
                    #browser.windows.current.close()
                    #time.sleep(5)
                    print "--- %d complete----" %(count)
                    count+=1
                browser.find_by_text("Next »").first.click()
                pages=browser.url
                time.sleep(3)
            except BaseException,e:
                for window in browser.windows:
                    window.close()
                browser.visit(pages)
        return True
    except BaseException,e:
        print e.message
        return False

def crawl():
    browser=Browser("chrome")
    getLeaks(browser)
    browser.quit()

crawl()