﻿#!/usr/bin/env python
# -*- coding: utf-8 -*-
##20090530

import UserDict; UserDict.UserDict = dict
import urllib2
import hashlib
import time
import string
import random
import hunnyb
import socket
import gzip, cStringIO

class btjump(object):
    def __init__(self, port=6881):
        #fake a useragent
        self.useragent = 'Deluge 1.1.8'
        # Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1)
        # Bram's bittorrent 5.2.2 client's user_agent: M5-2-2-- or None
        
        #fake a offical Bram's bittorrent 5.2.2 client:M5-2-2--
        #fake a Deluge1.18 bittorrent client:-DE1180-
        random.seed(id(self.useragent))
        self.peer_id = '-DE1180-' + ''.join(random.Random().sample(\
                    string.letters+string.digits, 12))
        self.port = port
        self.info_hash = int(hashlib.sha1(time.strftime('%Y%W', \
                time.gmtime())).hexdigest(), 16)
        
    def getjumpseed(self):
        '''from some bt tracker get the hotesd seed's info_hash'''
        httpscrape = ('http://bt.ali213.net:8000/scrape',
        'http://bt.hliang.com:2710/scrape')
        
        opener = urllib2.build_opener()
        #fake a webbrowser and change the deault Python-urllib mark
        opener.addheaders = [('Accept-encoding', 'gzip, deflate'), \
                ('user-agent', self.useragent)]

        #query bittorrent tracker servers for the nearest hot seed present
        jumpseed = []
        for url in httpscrape:
            try:
                f = opener.open(url, timeout=10)
                r = f.read()
            except:
                print url+' HTTPError'
                continue
            else:
                print url+' read ok'
            
            try:
                r = gzip.GzipFile(fileobj=cStringIO.StringIO(r)).read()
            except IOError:
                print url + ' unzip Error'
                continue
            else:
                print url+' unzip ok'
            
            try:
                alltorrents = hunnyb.decode(r)['files']
            except HunnyBError:
                print url + ' bdecodeError'
                continue
            
            for k, v in alltorrents.items():
                #max 200 peers for speed find firends
                number = v['complete'] + v['downloaded'] + v['incomplete']
                if number >= 80 and number <= 300:
                    alltorrents[k] = (int(k.encode('hex'), 16) ^ self.info_hash, number)
                else:
                    del alltorrents[k]
            print url+' xor ok'
            
            jumpkey = sorted(alltorrents, key=lambda i: alltorrents.get(i))[:5]
            url = url.replace('scrape','announce')
            for i in jumpkey:
                jumpseed.append([alltorrents[i][0], i, alltorrents[i][1], url])
                
        jumpseed.sort()
        jumpseed = jumpseed[:5]
        for i in jumpseed:
            del i[0]
        return jumpseed
    
    def getpeerlist(self, info_hash, url, event=None):
        #event in('started', 'stopped', None)
        opener = urllib2.build_opener()
        
        geturl=''.join([url, '?', 'info_hash=', urllib2.quote(\
        info_hash, safe='') , '&peer_id=', self.peer_id, \
        '&port=', str(self.port),\
        '&uploaded=0&downloaded=0&left=262144&compact=1'\
        '&supportcrypto=1&numwant=200'])
        if event is not None:
            geturl=''.join([geturl, '&event=', event])
        
        opener.addheaders = [('Accept-encoding', 'text/plain'), \
                        ('user-agent', self.useragent)]
        try:
            f = opener.open(geturl, timeout=10)
            r = f.read()
            now=time.strftime('%Y%m%d %H:%M:%S', time.gmtime())
        except:
            return -1
            
        try:
            Response = hunnyb.decode(r)
        except HunnyBError:
            return -2
        if 'failure reason' in Response:
            print Response
        else:
            interval = Response['interval']
            mininterval = Response['min interval']
            peerslist = Response['peers']
            print interval, mininterval, now, info_hash, url
            for addr in (peerslist[x*6:x*6+6] for x in xrange(len(peerslist)/6)):
                print socket.inet_ntoa(addr[0:4]), addr[0:4], \
                    int(addr[4:][::-1].encode('hex'), 16)
        