index
int64
0
95k
input
stringlengths
0
248k
gt
stringlengths
3
39.5k
hash
int64
-9,223,358,438,122,498,000
9,222,739,030B
full_line
stringlengths
6
256k
94,700
"""<STR_LIT>""" <EOL> __docformat__ = "<STR_LIT>" <EOL> from . etree import ElementTree , ElementClass <EOL> from . exceptions import BadRequestProtocolError <EOL> from . stanza import Stanza <EOL> PRESENCE_TYPES = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT:error>" ) <EOL> ACCEPT_RESPONSES = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } <EOL> DENY_RESPONSES = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } <EOL> class Presence ( Stanza ) : <EOL> """<STR_LIT>""" <EOL> element_name = "<STR_LIT>" <EOL> def __init__ ( self , element = None , from_jid = None , to_jid = None , <EOL> stanza_type = None , stanza_id = None , <EOL> error = None , error_cond = None , return_path = None , <EOL> language = None , <EOL> show = None , status = None , priority = None ) : <EOL> """<STR_LIT>""" <EOL> self . _show = None <EOL> self . _status = None <EOL> self . _priority = <NUM_LIT:0> <EOL> if element is None : <EOL> element = "<STR_LIT>" <EOL> elif not isinstance ( element , ElementClass ) : <EOL> raise TypeError ( "<STR_LIT>" + repr ( element ) ) <EOL> if stanza_type is not None and stanza_type not in PRESENCE_TYPES : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> elif stanza_type == '<STR_LIT>' : <EOL> stanza_type = None <EOL> Stanza . __init__ ( self , element , from_jid = from_jid , to_jid = to_jid , <EOL> stanza_type = stanza_type , stanza_id = stanza_id , <EOL> error = error , error_cond = error_cond ,
return_path = return_path , language = language )
4,859,812,007,261,156,000
"""<STR_LIT>""" <EOL> __docformat__ = "<STR_LIT>" <EOL> from . etree import ElementTree , ElementClass <EOL> from . exceptions import BadRequestProtocolError <EOL> from . stanza import Stanza <EOL> PRESENCE_TYPES = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT:error>" ) <EOL> ACCEPT_RESPONSES = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } <EOL> DENY_RESPONSES = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } <EOL> class Presence ( Stanza ) : <EOL> """<STR_LIT>""" <EOL> element_name = "<STR_LIT>" <EOL> def __init__ ( self , element = None , from_jid = None , to_jid = None , <EOL> stanza_type = None , stanza_id = None , <EOL> error = None , error_cond = None , return_path = None , <EOL> language = None , <EOL> show = None , status = None , priority = None ) : <EOL> """<STR_LIT>""" <EOL> self . _show = None <EOL> self . _status = None <EOL> self . _priority = <NUM_LIT:0> <EOL> if element is None : <EOL> element = "<STR_LIT>" <EOL> elif not isinstance ( element , ElementClass ) : <EOL> raise TypeError ( "<STR_LIT>" + repr ( element ) ) <EOL> if stanza_type is not None and stanza_type not in PRESENCE_TYPES : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> elif stanza_type == '<STR_LIT>' : <EOL> stanza_type = None <EOL> Stanza . __init__ ( self , element , from_jid = from_jid , to_jid = to_jid , <EOL> stanza_type = stanza_type , stanza_id = stanza_id , <EOL> error = error , error_cond = error_cond , <EOL> return_path = return_path , language = language ) <EOL> if self . element_name != "<STR_LIT>" : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> self . _show_tag = self . _ns_prefix + "<STR_LIT>" <EOL> self . _status_tag = self . _ns_prefix + "<STR_LIT:status>" <EOL> self . _priority_tag = self . _ns_prefix + "<STR_LIT>" <EOL> if self . _element is not None : <EOL> self . _decode_subelements ( ) <EOL> if show is not None : <EOL> self . show = show <EOL> if status is not None : <EOL> self . status = status <EOL> if priority is not None : <EOL> self . priority = priority <EOL> def _decode_subelements ( self ) : <EOL> """<STR_LIT>""" <EOL> for child in self . _element : <EOL> if child . tag == self . _show_tag : <EOL> self . _show = child . text <EOL> elif child . tag == self . _status_tag : <EOL> self . _status = child . text <EOL> elif child . tag == self . _priority_tag : <EOL> try : <EOL> self . _priority = int ( child . text . strip ( ) ) <EOL> if self . _priority < - <NUM_LIT> or self . _priority > <NUM_LIT> : <EOL> raise ValueError <EOL> except ValueError : <EOL> raise BadRequestProtocolError ( <EOL> "<STR_LIT>" ) <EOL> def as_xml ( self ) : <EOL> """<STR_LIT>""" <EOL> result = Stanza . as_xml ( self ) <EOL> if self . _show : <EOL> child = ElementTree . SubElement ( result , self . _show_tag ) <EOL> child . text = self . _show <EOL> if self . _status : <EOL> child = ElementTree . SubElement ( result , self . _status_tag ) <EOL> child . text = self . _status <EOL> if self . _priority : <EOL> child = ElementTree . SubElement ( result , self . _priority_tag ) <EOL> child . text = str ( self . _priority ) <EOL> return result <EOL> def copy ( self ) : <EOL> """<STR_LIT>""" <EOL> result = Presence ( None , self . from_jid , self . to_jid , <EOL> self . stanza_type , self . stanza_id , self . error , <EOL> self . _return_path ( ) , <EOL> self . _show , self . _status , self . _priority ) <EOL> if self . _payload is None : <EOL> self . decode_payload ( ) <EOL> for payload in self . _payload : <EOL> result . add_payload ( payload . copy ( ) ) <EOL> return result <EOL> @ property <EOL> def show ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _show <EOL> @ show . setter <EOL> def show ( self , show ) : <EOL> self . _show = str ( show ) <EOL> self . _dirty = True <EOL> @ property <EOL> def status ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _status <EOL> @ status . setter <EOL> def status ( self , status ) : <EOL> self . _status = str ( status ) <EOL> self . _dirty = True <EOL> @ property <EOL> def priority ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . _priority <EOL> @ priority . setter <EOL> def priority ( self , priority ) : <EOL> priority = int ( priority ) <EOL> if priority < - <NUM_LIT> or priority > <NUM_LIT> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> self . _priority = priority <EOL> self . _dirty = True <EOL> def make_accept_response ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . stanza_type not in ( "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> stanza = Presence ( stanza_type = ACCEPT_RESPONSES [ self . stanza_type ] , <EOL> from_jid = self . to_jid , to_jid = self . from_jid , <EOL> stanza_id = self . stanza_id ) <EOL> return stanza <EOL> def make_deny_response ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . stanza_type not in ( "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ) : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> stanza = Presence ( stanza_type = DENY_RESPONSES [ self . stanza_type ] , <EOL> from_jid = self . to_jid , to_jid = self . from_jid , <EOL> stanza_id = self . stanza_id ) <EOL> return stanza <EOL> def make_error_response ( self , cond ) : <EOL> """<STR_LIT>""" <EOL> if self . stanza_type == "<STR_LIT:error>" : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> stanza = Presence ( stanza_type = "<STR_LIT:error>" , from_jid = self . from_jid , <EOL> to_jid = self . to_jid , stanza_id = self . stanza_id , <EOL> status = self . _status , show = self . _show , <EOL> priority = self . _priority , error_cond = cond ) <EOL> if self . _payload is None : <EOL> self . decode_payload ( ) <EOL> for payload in self . _payload : <EOL> stanza . add_payload ( payload ) <EOL> return stanza </s>
94,701
'''<STR_LIT>''' <EOL> import os . path <EOL> import srinath_path_utils <EOL> RESOURCE_TYPE_LAYOUT = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> RESOURCE_TYPE_DRAWABLE = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> RESOURCE_TYPE_DATA = [ "<STR_LIT:data>" , ] <EOL> def get_resource_pathname ( resource_filename , resource_type ) : <EOL> retpath = srinath_path_utils . get_exec_file_dir ( ) <EOL> for subdir in resource_type : <EOL> retpath = os . path . join ( retpath , subdir ) <EOL> return os . path . join ( retpath , resource_filename ) <EOL> def get_drawable_pathname ( drawable_filename ) : <EOL> return "<STR_LIT>" + get_resource_pathname ( drawable_filename , RESOURCE_TYPE_DRAWABLE ) <EOL> def get_layout_pathname ( layout_filename ) : <EOL> return get_resource_pathname ( layout_filename , RESOURCE_TYPE_LAYOUT ) <EOL> def get_data_pathname ( data_filename ) : <EOL> return get_resource_pathname ( data_filename , RESOURCE_TYPE_DATA ) <EOL> def read_layout_xml ( layout_filename ) : <EOL> fil = open ( get_layout_pathname ( layout_filename ) ) <EOL> xmldata = fil . read ( ) <EOL> fil . close ( )
return xmldata
7,407,808,899,966,079,000
'''<STR_LIT>''' <EOL> import os . path <EOL> import srinath_path_utils <EOL> RESOURCE_TYPE_LAYOUT = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> RESOURCE_TYPE_DRAWABLE = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> RESOURCE_TYPE_DATA = [ "<STR_LIT:data>" , ] <EOL> def get_resource_pathname ( resource_filename , resource_type ) : <EOL> retpath = srinath_path_utils . get_exec_file_dir ( ) <EOL> for subdir in resource_type : <EOL> retpath = os . path . join ( retpath , subdir ) <EOL> return os . path . join ( retpath , resource_filename ) <EOL> def get_drawable_pathname ( drawable_filename ) : <EOL> return "<STR_LIT>" + get_resource_pathname ( drawable_filename , RESOURCE_TYPE_DRAWABLE ) <EOL> def get_layout_pathname ( layout_filename ) : <EOL> return get_resource_pathname ( layout_filename , RESOURCE_TYPE_LAYOUT ) <EOL> def get_data_pathname ( data_filename ) : <EOL> return get_resource_pathname ( data_filename , RESOURCE_TYPE_DATA ) <EOL> def read_layout_xml ( layout_filename ) : <EOL> fil = open ( get_layout_pathname ( layout_filename ) ) <EOL> xmldata = fil . read ( ) <EOL> fil . close ( ) <EOL> return xmldata <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> print get_data_pathname ( "<STR_LIT>" ) </s>
94,702
<s> class PyAsn1Error ( StandardError ) : pass
-4,916,918,878,623,626,000
class PyAsn1Error ( StandardError ) : pass <EOL> class ValueConstraintError ( PyAsn1Error ) : pass <EOL> class SubstrateUnderrunError ( PyAsn1Error ) : pass </s>
94,703
from __future__ import absolute_import , unicode_literals <EOL> import datetime <EOL> timezonenames = { <EOL> '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT:z>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : - <NUM_LIT:3> , '<STR_LIT>' : - <NUM_LIT:4> , '<STR_LIT>' : - <NUM_LIT:4> , <EOL> '<STR_LIT>' : - <NUM_LIT:4> , '<STR_LIT>' : - <NUM_LIT:5> , '<STR_LIT>' : - <NUM_LIT:5> , <EOL> '<STR_LIT>' : - <NUM_LIT:5> , '<STR_LIT>' : - <NUM_LIT:6> , '<STR_LIT>' : - <NUM_LIT:6> , <EOL> '<STR_LIT>' : - <NUM_LIT:6> , '<STR_LIT>' : - <NUM_LIT:7> , '<STR_LIT>' : - <NUM_LIT:7> , <EOL> '<STR_LIT>' : - <NUM_LIT:7> , '<STR_LIT>' : - <NUM_LIT:8> , '<STR_LIT>' : - <NUM_LIT:8> , <EOL> '<STR_LIT:a>' : - <NUM_LIT:1> , '<STR_LIT:n>' : <NUM_LIT:1> , <EOL> '<STR_LIT:m>' : - <NUM_LIT:12> , '<STR_LIT:y>' : <NUM_LIT:12> , <EOL> } <EOL> def _parse_date_w3dtf ( datestr ) : <EOL> if not datestr . strip ( ) : <EOL> return None <EOL> parts = datestr . lower ( ) . split ( '<STR_LIT:t>' ) <EOL> if len ( parts ) == <NUM_LIT:1> : <EOL> parts = parts [ <NUM_LIT:0> ] . split ( ) <EOL> if len ( parts ) == <NUM_LIT:1> : <EOL> parts . append ( '<STR_LIT>' ) <EOL> elif len ( parts ) > <NUM_LIT:2> : <EOL> return None <EOL> date = parts [ <NUM_LIT:0> ] . split ( '<STR_LIT:->' , <NUM_LIT:2> ) <EOL> if not date or len ( date [ <NUM_LIT:0> ] ) != <NUM_LIT:4> : <EOL> return None <EOL> date . extend ( [ '<STR_LIT:1>' ] * ( <NUM_LIT:3> - len ( date ) ) ) <EOL> try : <EOL> year , month , day = [ int ( i ) for i in date ] <EOL> except ValueError : <EOL> return None <EOL> if parts [ <NUM_LIT:1> ] . endswith ( '<STR_LIT:z>' ) : <EOL> parts [ <NUM_LIT:1> ] = parts [ <NUM_LIT:1> ] [ : - <NUM_LIT:1> ] <EOL> parts . append ( '<STR_LIT:z>' ) <EOL> loc = parts [ <NUM_LIT:1> ] . find ( '<STR_LIT:->' ) + <NUM_LIT:1> or parts [ <NUM_LIT:1> ] . find ( '<STR_LIT:+>' ) + <NUM_LIT:1> or len ( parts [ <NUM_LIT:1> ] ) + <NUM_LIT:1> <EOL> loc = loc - <NUM_LIT:1> <EOL> parts . append ( parts [ <NUM_LIT:1> ] [ loc : ] ) <EOL> parts [ <NUM_LIT:1> ] = parts [ <NUM_LIT:1> ] [ : loc ] <EOL> time = parts [ <NUM_LIT:1> ] . split ( '<STR_LIT::>' , <NUM_LIT:2> ) <EOL> time . extend ( [ '<STR_LIT:0>' ] * ( <NUM_LIT:3> - len ( time ) ) ) <EOL> tzhour = <NUM_LIT:0> <EOL> tzmin = <NUM_LIT:0> <EOL> if parts [ <NUM_LIT:2> ] [ : <NUM_LIT:1> ] in ( '<STR_LIT:->' , '<STR_LIT:+>' ) : <EOL> try : <EOL> tzhour = int ( parts [ <NUM_LIT:2> ] [ <NUM_LIT:1> : <NUM_LIT:3> ] ) <EOL> tzmin = int ( parts [ <NUM_LIT:2> ] [ <NUM_LIT:4> : ] ) <EOL> except ValueError : <EOL> return None <EOL> if parts [ <NUM_LIT:2> ] . startswith ( '<STR_LIT:->' ) : <EOL> tzhour = tzhour * - <NUM_LIT:1> <EOL> tzmin = tzmin * - <NUM_LIT:1> <EOL> else : <EOL> tzhour = timezonenames . get ( parts [ <NUM_LIT:2> ] , <NUM_LIT:0> ) <EOL> try : <EOL> hour , minute , second = [ int ( float ( i ) ) for i in time ] <EOL> except ValueError : <EOL> return None <EOL> try : <EOL> stamp = datetime . datetime ( year , month , day , hour , minute , second ) <EOL> except ValueError : <EOL> return None <EOL> delta = datetime . timedelta ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , tzmin , tzhour ) <EOL> try : <EOL> return ( stamp - delta ) . utctimetuple ( ) <EOL> except ( OverflowError , ValueError ) :
return None </s>
-433,070,456,835,644
from __future__ import absolute_import , unicode_literals <EOL> import datetime <EOL> timezonenames = { <EOL> '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT>' : <NUM_LIT:0> , '<STR_LIT:z>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : - <NUM_LIT:3> , '<STR_LIT>' : - <NUM_LIT:4> , '<STR_LIT>' : - <NUM_LIT:4> , <EOL> '<STR_LIT>' : - <NUM_LIT:4> , '<STR_LIT>' : - <NUM_LIT:5> , '<STR_LIT>' : - <NUM_LIT:5> , <EOL> '<STR_LIT>' : - <NUM_LIT:5> , '<STR_LIT>' : - <NUM_LIT:6> , '<STR_LIT>' : - <NUM_LIT:6> , <EOL> '<STR_LIT>' : - <NUM_LIT:6> , '<STR_LIT>' : - <NUM_LIT:7> , '<STR_LIT>' : - <NUM_LIT:7> , <EOL> '<STR_LIT>' : - <NUM_LIT:7> , '<STR_LIT>' : - <NUM_LIT:8> , '<STR_LIT>' : - <NUM_LIT:8> , <EOL> '<STR_LIT:a>' : - <NUM_LIT:1> , '<STR_LIT:n>' : <NUM_LIT:1> , <EOL> '<STR_LIT:m>' : - <NUM_LIT:12> , '<STR_LIT:y>' : <NUM_LIT:12> , <EOL> } <EOL> def _parse_date_w3dtf ( datestr ) : <EOL> if not datestr . strip ( ) : <EOL> return None <EOL> parts = datestr . lower ( ) . split ( '<STR_LIT:t>' ) <EOL> if len ( parts ) == <NUM_LIT:1> : <EOL> parts = parts [ <NUM_LIT:0> ] . split ( ) <EOL> if len ( parts ) == <NUM_LIT:1> : <EOL> parts . append ( '<STR_LIT>' ) <EOL> elif len ( parts ) > <NUM_LIT:2> : <EOL> return None <EOL> date = parts [ <NUM_LIT:0> ] . split ( '<STR_LIT:->' , <NUM_LIT:2> ) <EOL> if not date or len ( date [ <NUM_LIT:0> ] ) != <NUM_LIT:4> : <EOL> return None <EOL> date . extend ( [ '<STR_LIT:1>' ] * ( <NUM_LIT:3> - len ( date ) ) ) <EOL> try : <EOL> year , month , day = [ int ( i ) for i in date ] <EOL> except ValueError : <EOL> return None <EOL> if parts [ <NUM_LIT:1> ] . endswith ( '<STR_LIT:z>' ) : <EOL> parts [ <NUM_LIT:1> ] = parts [ <NUM_LIT:1> ] [ : - <NUM_LIT:1> ] <EOL> parts . append ( '<STR_LIT:z>' ) <EOL> loc = parts [ <NUM_LIT:1> ] . find ( '<STR_LIT:->' ) + <NUM_LIT:1> or parts [ <NUM_LIT:1> ] . find ( '<STR_LIT:+>' ) + <NUM_LIT:1> or len ( parts [ <NUM_LIT:1> ] ) + <NUM_LIT:1> <EOL> loc = loc - <NUM_LIT:1> <EOL> parts . append ( parts [ <NUM_LIT:1> ] [ loc : ] ) <EOL> parts [ <NUM_LIT:1> ] = parts [ <NUM_LIT:1> ] [ : loc ] <EOL> time = parts [ <NUM_LIT:1> ] . split ( '<STR_LIT::>' , <NUM_LIT:2> ) <EOL> time . extend ( [ '<STR_LIT:0>' ] * ( <NUM_LIT:3> - len ( time ) ) ) <EOL> tzhour = <NUM_LIT:0> <EOL> tzmin = <NUM_LIT:0> <EOL> if parts [ <NUM_LIT:2> ] [ : <NUM_LIT:1> ] in ( '<STR_LIT:->' , '<STR_LIT:+>' ) : <EOL> try : <EOL> tzhour = int ( parts [ <NUM_LIT:2> ] [ <NUM_LIT:1> : <NUM_LIT:3> ] ) <EOL> tzmin = int ( parts [ <NUM_LIT:2> ] [ <NUM_LIT:4> : ] ) <EOL> except ValueError : <EOL> return None <EOL> if parts [ <NUM_LIT:2> ] . startswith ( '<STR_LIT:->' ) : <EOL> tzhour = tzhour * - <NUM_LIT:1> <EOL> tzmin = tzmin * - <NUM_LIT:1> <EOL> else : <EOL> tzhour = timezonenames . get ( parts [ <NUM_LIT:2> ] , <NUM_LIT:0> ) <EOL> try : <EOL> hour , minute , second = [ int ( float ( i ) ) for i in time ] <EOL> except ValueError : <EOL> return None <EOL> try : <EOL> stamp = datetime . datetime ( year , month , day , hour , minute , second ) <EOL> except ValueError : <EOL> return None <EOL> delta = datetime . timedelta ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , tzmin , tzhour ) <EOL> try : <EOL> return ( stamp - delta ) . utctimetuple ( ) <EOL> except ( OverflowError , ValueError ) : <EOL> return None </s>
94,704
from retask import Task <EOL> from retask import Queue <EOL> queue = Queue ( '<STR_LIT>' ) <EOL> info1 = { '<STR_LIT:user>' : '<STR_LIT>' , '<STR_LIT:url>' : '<STR_LIT>' }
info2 = { '<STR_LIT:user>' : '<STR_LIT>' , '<STR_LIT:url>' : '<STR_LIT>' }
1,447,294,438,928,397,000
from retask import Task <EOL> from retask import Queue <EOL> queue = Queue ( '<STR_LIT>' ) <EOL> info1 = { '<STR_LIT:user>' : '<STR_LIT>' , '<STR_LIT:url>' : '<STR_LIT>' } <EOL> info2 = { '<STR_LIT:user>' : '<STR_LIT>' , '<STR_LIT:url>' : '<STR_LIT>' } <EOL> task1 = Task ( info1 ) <EOL> task2 = Task ( info2 ) <EOL> queue . connect ( ) <EOL> queue . enqueue ( task1 ) <EOL> queue . enqueue ( task2 ) </s>
94,705
from flask import render_template <EOL> from flask . views import MethodView <EOL> from . exceptions import ImproperlyConfigured <EOL> class BaseView ( MethodView ) :
"""<STR_LIT>"""
846,546,368,651,755,300
from flask import render_template <EOL> from flask . views import MethodView <EOL> from . exceptions import ImproperlyConfigured <EOL> class BaseView ( MethodView ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> for key , value in kwargs . iteritems ( ) : <EOL> setattr ( self , key , value ) <EOL> def dispatch_request ( self , * args , ** kwargs ) : <EOL> self . args = args <EOL> self . kwargs = kwargs <EOL> return super ( BaseView , self ) . dispatch_request ( * args , ** kwargs ) <EOL> class TemplateView ( BaseView ) : <EOL> """<STR_LIT>""" <EOL> methods = [ '<STR_LIT:GET>' ] <EOL> template = None <EOL> context = { } <EOL> def get_template ( self ) : <EOL> if not self . template : <EOL> raise ImproperlyConfigured ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> return self . template <EOL> def get_context ( self , ** kwargs ) : <EOL> context = { } <EOL> context . update ( self . context ) <EOL> context [ '<STR_LIT>' ] = kwargs <EOL> return context <EOL> def render ( self , ** kwargs ) : <EOL> template = self . get_template ( ) <EOL> context = self . get_context ( ** kwargs ) <EOL> return render_template ( template , ** context ) <EOL> def get ( self , ** kwargs ) : <EOL> return self . render ( ** kwargs ) </s>
94,706
import sys <EOL> import os <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( '<STR_LIT:..>' ) ) <EOL> from validators import __version__ <EOL> extensions = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = __version__ <EOL> release = version
exclude_patterns = [ '<STR_LIT>' ]
-2,950,641,497,325,624,300
import sys <EOL> import os <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . abspath ( '<STR_LIT:..>' ) ) <EOL> from validators import __version__ <EOL> extensions = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = __version__ <EOL> release = version <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT:default>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] <EOL> intersphinx_mapping = { '<STR_LIT>' : None } </s>
94,707
import six <EOL> from . utils import validator <EOL> @ validator <EOL> def truthy ( value ) : <EOL> """<STR_LIT>"""
return (
-1,590,758,048,162,473,700
import six <EOL> from . utils import validator <EOL> @ validator <EOL> def truthy ( value ) : <EOL> """<STR_LIT>""" <EOL> return ( <EOL> value and <EOL> ( not isinstance ( value , six . string_types ) or value . strip ( ) ) <EOL> ) </s>
94,708
import sqlalchemy as sa <EOL> from wtforms import Form <EOL> from wtforms . compat import text_type <EOL> from wtforms_alchemy import ( <EOL> GroupedQuerySelectField , <EOL> QuerySelectField , <EOL> QuerySelectMultipleField <EOL> ) <EOL> class DummyPostData ( dict ) : <EOL> def getlist ( self , key ) : <EOL> v = self [ key ] <EOL> if not isinstance ( v , ( list , tuple ) ) : <EOL> v = [ v ] <EOL> return v <EOL> class LazySelect ( object ) : <EOL> def __call__ ( self , field , ** kwargs ) : <EOL> return list ( <EOL> ( val , text_type ( label ) , selected ) <EOL> for val , label , selected in field . iter_choices ( ) <EOL> ) <EOL> class Base ( object ) : <EOL> def __init__ ( self , ** kwargs ) : <EOL> for k , v in kwargs . items ( ) : <EOL> setattr ( self , k , v ) <EOL> class TestBase ( object ) : <EOL> def _do_tables ( self , mapper , engine ) : <EOL> metadata = sa . MetaData ( ) <EOL> test_table = sa . Table ( <EOL> '<STR_LIT:test>' , metadata , <EOL> sa . Column ( '<STR_LIT:id>' , sa . Integer , primary_key = True , nullable = False ) , <EOL> sa . Column ( '<STR_LIT:name>' , sa . String , nullable = False ) , <EOL> ) <EOL> pk_test_table = sa . Table ( <EOL> '<STR_LIT>' , metadata , <EOL> sa . Column ( '<STR_LIT>' , sa . String , primary_key = True , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . String , nullable = False ) , <EOL> ) <EOL> Test = type ( str ( '<STR_LIT>' ) , ( Base , ) , { } ) <EOL> PKTest = type ( str ( '<STR_LIT>' ) , ( Base , ) , { <EOL> '<STR_LIT>' : lambda x : x . baz , <EOL> '<STR_LIT>' : lambda x : x . baz , <EOL> } ) <EOL> mapper ( Test , test_table , order_by = [ test_table . c . name ] ) <EOL> mapper ( PKTest , pk_test_table , order_by = [ pk_test_table . c . baz ] ) <EOL> self . Test = Test <EOL> self . PKTest = PKTest <EOL> metadata . create_all ( bind = engine ) <EOL> def _fill ( self , sess ) : <EOL> for i , n in [ ( <NUM_LIT:1> , '<STR_LIT>' ) , ( <NUM_LIT:2> , '<STR_LIT>' ) ] : <EOL> s = self . Test ( id = i , name = n ) <EOL> p = self . PKTest ( foobar = '<STR_LIT>' % ( i , ) , baz = n ) <EOL> sess . add ( s ) <EOL> sess . add ( p ) <EOL> sess . flush ( ) <EOL> sess . commit ( ) <EOL> class TestQuerySelectField ( TestBase ) : <EOL> def setup_method ( self , method ) : <EOL> engine = sa . create_engine ( '<STR_LIT>' , echo = False ) <EOL> self . Session = sa . orm . session . sessionmaker ( bind = engine ) <EOL> from sqlalchemy . orm import mapper <EOL> self . _do_tables ( mapper , engine ) <EOL> def test_without_factory ( self ) : <EOL> sess = self . Session ( ) <EOL> self . _fill ( sess ) <EOL> class F ( Form ) : <EOL> a = QuerySelectField ( <EOL> get_label = '<STR_LIT:name>' , <EOL> widget = LazySelect ( ) , <EOL> get_pk = lambda x : x . id <EOL> ) <EOL> form = F ( DummyPostData ( a = [ '<STR_LIT:1>' ] ) ) <EOL> form . a . query = sess . query ( self . Test ) <EOL> assert form . a . data is not None <EOL> assert form . a . data . id , <NUM_LIT:1> <EOL> assert form . a ( ) , [ ( '<STR_LIT:1>' , '<STR_LIT>' , True ) , ( '<STR_LIT:2>' , '<STR_LIT>' , False ) ] <EOL> assert form . validate ( ) <EOL> form = F ( a = sess . query ( self . Test ) . filter_by ( name = '<STR_LIT>' ) . first ( ) ) <EOL> form . a . query = sess . query ( self . Test ) . filter ( self . Test . name != '<STR_LIT>' ) <EOL> assert not form . validate ( ) <EOL> assert form . a . errors , [ '<STR_LIT>' ] <EOL> def test_with_query_factory ( self ) : <EOL> sess = self . Session ( ) <EOL> self . _fill ( sess ) <EOL> class F ( Form ) : <EOL> a = QuerySelectField ( <EOL> get_label = ( lambda model : model . name ) , <EOL> query_factory = lambda : sess . query ( self . Test ) , <EOL> widget = LazySelect ( ) <EOL> ) <EOL> b = QuerySelectField ( <EOL> allow_blank = True , <EOL> query_factory = lambda : sess . query ( self . PKTest ) , <EOL> widget = LazySelect ( ) <EOL> )
form = F ( )
-1,012,651,103,507,892,200
import sqlalchemy as sa <EOL> from wtforms import Form <EOL> from wtforms . compat import text_type <EOL> from wtforms_alchemy import ( <EOL> GroupedQuerySelectField , <EOL> QuerySelectField , <EOL> QuerySelectMultipleField <EOL> ) <EOL> class DummyPostData ( dict ) : <EOL> def getlist ( self , key ) : <EOL> v = self [ key ] <EOL> if not isinstance ( v , ( list , tuple ) ) : <EOL> v = [ v ] <EOL> return v <EOL> class LazySelect ( object ) : <EOL> def __call__ ( self , field , ** kwargs ) : <EOL> return list ( <EOL> ( val , text_type ( label ) , selected ) <EOL> for val , label , selected in field . iter_choices ( ) <EOL> ) <EOL> class Base ( object ) : <EOL> def __init__ ( self , ** kwargs ) : <EOL> for k , v in kwargs . items ( ) : <EOL> setattr ( self , k , v ) <EOL> class TestBase ( object ) : <EOL> def _do_tables ( self , mapper , engine ) : <EOL> metadata = sa . MetaData ( ) <EOL> test_table = sa . Table ( <EOL> '<STR_LIT:test>' , metadata , <EOL> sa . Column ( '<STR_LIT:id>' , sa . Integer , primary_key = True , nullable = False ) , <EOL> sa . Column ( '<STR_LIT:name>' , sa . String , nullable = False ) , <EOL> ) <EOL> pk_test_table = sa . Table ( <EOL> '<STR_LIT>' , metadata , <EOL> sa . Column ( '<STR_LIT>' , sa . String , primary_key = True , nullable = False ) , <EOL> sa . Column ( '<STR_LIT>' , sa . String , nullable = False ) , <EOL> ) <EOL> Test = type ( str ( '<STR_LIT>' ) , ( Base , ) , { } ) <EOL> PKTest = type ( str ( '<STR_LIT>' ) , ( Base , ) , { <EOL> '<STR_LIT>' : lambda x : x . baz , <EOL> '<STR_LIT>' : lambda x : x . baz , <EOL> } ) <EOL> mapper ( Test , test_table , order_by = [ test_table . c . name ] ) <EOL> mapper ( PKTest , pk_test_table , order_by = [ pk_test_table . c . baz ] ) <EOL> self . Test = Test <EOL> self . PKTest = PKTest <EOL> metadata . create_all ( bind = engine ) <EOL> def _fill ( self , sess ) : <EOL> for i , n in [ ( <NUM_LIT:1> , '<STR_LIT>' ) , ( <NUM_LIT:2> , '<STR_LIT>' ) ] : <EOL> s = self . Test ( id = i , name = n ) <EOL> p = self . PKTest ( foobar = '<STR_LIT>' % ( i , ) , baz = n ) <EOL> sess . add ( s ) <EOL> sess . add ( p ) <EOL> sess . flush ( ) <EOL> sess . commit ( ) <EOL> class TestQuerySelectField ( TestBase ) : <EOL> def setup_method ( self , method ) : <EOL> engine = sa . create_engine ( '<STR_LIT>' , echo = False ) <EOL> self . Session = sa . orm . session . sessionmaker ( bind = engine ) <EOL> from sqlalchemy . orm import mapper <EOL> self . _do_tables ( mapper , engine ) <EOL> def test_without_factory ( self ) : <EOL> sess = self . Session ( ) <EOL> self . _fill ( sess ) <EOL> class F ( Form ) : <EOL> a = QuerySelectField ( <EOL> get_label = '<STR_LIT:name>' , <EOL> widget = LazySelect ( ) , <EOL> get_pk = lambda x : x . id <EOL> ) <EOL> form = F ( DummyPostData ( a = [ '<STR_LIT:1>' ] ) ) <EOL> form . a . query = sess . query ( self . Test ) <EOL> assert form . a . data is not None <EOL> assert form . a . data . id , <NUM_LIT:1> <EOL> assert form . a ( ) , [ ( '<STR_LIT:1>' , '<STR_LIT>' , True ) , ( '<STR_LIT:2>' , '<STR_LIT>' , False ) ] <EOL> assert form . validate ( ) <EOL> form = F ( a = sess . query ( self . Test ) . filter_by ( name = '<STR_LIT>' ) . first ( ) ) <EOL> form . a . query = sess . query ( self . Test ) . filter ( self . Test . name != '<STR_LIT>' ) <EOL> assert not form . validate ( ) <EOL> assert form . a . errors , [ '<STR_LIT>' ] <EOL> def test_with_query_factory ( self ) : <EOL> sess = self . Session ( ) <EOL> self . _fill ( sess ) <EOL> class F ( Form ) : <EOL> a = QuerySelectField ( <EOL> get_label = ( lambda model : model . name ) , <EOL> query_factory = lambda : sess . query ( self . Test ) , <EOL> widget = LazySelect ( ) <EOL> ) <EOL> b = QuerySelectField ( <EOL> allow_blank = True , <EOL> query_factory = lambda : sess . query ( self . PKTest ) , <EOL> widget = LazySelect ( ) <EOL> ) <EOL> form = F ( ) <EOL> assert form . a . data is None <EOL> assert form . a ( ) == [ ( '<STR_LIT:1>' , '<STR_LIT>' , False ) , ( '<STR_LIT:2>' , '<STR_LIT>' , False ) ] <EOL> assert form . b . data is None <EOL> assert form . b ( ) == [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' , True ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , False ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , False ) <EOL> ] <EOL> assert not form . validate ( ) <EOL> form = F ( DummyPostData ( a = [ '<STR_LIT:1>' ] , b = [ '<STR_LIT>' ] ) ) <EOL> assert form . a . data . id == <NUM_LIT:1> <EOL> assert form . a ( ) == [ ( '<STR_LIT:1>' , '<STR_LIT>' , True ) , ( '<STR_LIT:2>' , '<STR_LIT>' , False ) ] <EOL> assert form . b . data . baz == '<STR_LIT>' <EOL> assert form . b ( ) == [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' , False ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , False ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , True ) <EOL> ] <EOL> assert form . validate ( ) <EOL> sess . add ( self . Test ( id = <NUM_LIT:3> , name = '<STR_LIT>' ) ) <EOL> sess . flush ( ) <EOL> sess . commit ( ) <EOL> assert form . a ( ) == [ ( '<STR_LIT:1>' , '<STR_LIT>' , True ) , ( '<STR_LIT:2>' , '<STR_LIT>' , False ) ] <EOL> form . a . _object_list = None <EOL> assert form . a ( ) == [ <EOL> ( '<STR_LIT:1>' , '<STR_LIT>' , True ) , ( '<STR_LIT:2>' , '<STR_LIT>' , False ) , ( '<STR_LIT:3>' , '<STR_LIT>' , False ) <EOL> ] <EOL> form = F ( DummyPostData ( b = [ '<STR_LIT>' ] , a = [ '<STR_LIT>' ] ) ) <EOL> assert not form . validate ( ) <EOL> assert form . a . errors == [ '<STR_LIT>' ] <EOL> assert form . b . errors == [ ] <EOL> assert form . b . data is None <EOL> class TestQuerySelectMultipleField ( TestBase ) : <EOL> def setup_method ( self , method ) : <EOL> from sqlalchemy . orm import mapper <EOL> engine = sa . create_engine ( '<STR_LIT>' , echo = False ) <EOL> Session = sa . orm . session . sessionmaker ( bind = engine ) <EOL> self . _do_tables ( mapper , engine ) <EOL> self . sess = Session ( ) <EOL> self . _fill ( self . sess ) <EOL> class F ( Form ) : <EOL> a = QuerySelectMultipleField ( get_label = '<STR_LIT:name>' , widget = LazySelect ( ) ) <EOL> def test_unpopulated_default ( self ) : <EOL> form = self . F ( ) <EOL> assert [ ] == form . a . data <EOL> def test_single_value_without_factory ( self ) : <EOL> form = self . F ( DummyPostData ( a = [ '<STR_LIT:1>' ] ) ) <EOL> form . a . query = self . sess . query ( self . Test ) <EOL> assert [ <NUM_LIT:1> ] == [ v . id for v in form . a . data ] <EOL> assert form . a ( ) == [ ( '<STR_LIT:1>' , '<STR_LIT>' , True ) , ( '<STR_LIT:2>' , '<STR_LIT>' , False ) ] <EOL> assert form . validate ( ) <EOL> def test_multiple_values_without_query_factory ( self ) : <EOL> form = self . F ( DummyPostData ( a = [ '<STR_LIT:1>' , '<STR_LIT:2>' ] ) ) <EOL> form . a . query = self . sess . query ( self . Test ) <EOL> assert [ <NUM_LIT:1> , <NUM_LIT:2> ] == [ v . id for v in form . a . data ] <EOL> assert form . a ( ) == [ ( '<STR_LIT:1>' , '<STR_LIT>' , True ) , ( '<STR_LIT:2>' , '<STR_LIT>' , True ) ] <EOL> assert form . validate ( ) <EOL> form = self . F ( DummyPostData ( a = [ '<STR_LIT:1>' , '<STR_LIT:3>' ] ) ) <EOL> form . a . query = self . sess . query ( self . Test ) <EOL> assert [ x . id for x in form . a . data ] , [ <NUM_LIT:1> ] <EOL> assert not form . validate ( ) <EOL> def test_single_default_value ( self ) : <EOL> first_test = self . sess . query ( self . Test ) . get ( <NUM_LIT:2> ) <EOL> class F ( Form ) : <EOL> a = QuerySelectMultipleField ( <EOL> get_label = '<STR_LIT:name>' , <EOL> default = [ first_test ] , <EOL> widget = LazySelect ( ) , <EOL> query_factory = lambda : self . sess . query ( self . Test ) <EOL> ) <EOL> form = F ( ) <EOL> assert [ v . id for v in form . a . data ] , [ <NUM_LIT:2> ] <EOL> assert form . a ( ) , [ ( '<STR_LIT:1>' , '<STR_LIT>' , False ) , ( '<STR_LIT:2>' , '<STR_LIT>' , True ) ] <EOL> assert form . validate ( ) <EOL> class DatabaseTestCase ( object ) : <EOL> def setup_method ( self , method ) : <EOL> self . engine = sa . create_engine ( '<STR_LIT>' ) <EOL> self . base = sa . ext . declarative . declarative_base ( ) <EOL> self . create_models ( ) <EOL> self . base . metadata . create_all ( self . engine ) <EOL> Session = sa . orm . session . sessionmaker ( bind = self . engine ) <EOL> self . session = Session ( ) <EOL> def teardown_method ( self , method ) : <EOL> self . session . close_all ( ) <EOL> self . base . metadata . drop_all ( self . engine ) <EOL> self . engine . dispose ( ) <EOL> class TestGroupedQuerySelectField ( DatabaseTestCase ) : <EOL> def create_models ( self ) : <EOL> class City ( self . base ) : <EOL> __tablename__ = '<STR_LIT>' <EOL> id = sa . Column ( sa . Integer , primary_key = True ) <EOL> name = sa . Column ( sa . String ) <EOL> country = sa . Column ( sa . String ) <EOL> self . City = City <EOL> def create_cities ( self ) : <EOL> self . session . add_all ( [ <EOL> self . City ( name = '<STR_LIT>' , country = '<STR_LIT>' ) , <EOL> self . City ( name = '<STR_LIT>' , country = '<STR_LIT>' ) , <EOL> self . City ( name = '<STR_LIT>' , country = '<STR_LIT>' ) , <EOL> self . City ( name = '<STR_LIT>' , country = '<STR_LIT>' ) , <EOL> self . City ( name = '<STR_LIT>' , country = '<STR_LIT>' ) , <EOL> ] ) <EOL> def create_form ( self , ** kwargs ) : <EOL> query = self . session . query ( self . City ) . order_by ( '<STR_LIT:name>' , '<STR_LIT>' ) <EOL> class MyForm ( Form ) : <EOL> city = GroupedQuerySelectField ( <EOL> label = kwargs . get ( '<STR_LIT:label>' , '<STR_LIT>' ) , <EOL> query_factory = kwargs . get ( '<STR_LIT>' , lambda : query ) , <EOL> get_label = kwargs . get ( '<STR_LIT>' , lambda c : c . name ) , <EOL> get_group = kwargs . get ( '<STR_LIT>' , lambda c : c . country ) , <EOL> allow_blank = kwargs . get ( '<STR_LIT>' , False ) , <EOL> blank_text = kwargs . get ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> blank_value = kwargs . get ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) <EOL> return MyForm <EOL> def test_rendering ( self ) : <EOL> MyForm = self . create_form ( ) <EOL> self . create_cities ( ) <EOL> assert str ( MyForm ( ) . city ) . replace ( '<STR_LIT:\n>' , '<STR_LIT>' ) == ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> def test_custom_none_value ( self ) : <EOL> self . create_cities ( ) <EOL> MyForm = self . create_form ( <EOL> allow_blank = True , <EOL> blank_text = '<STR_LIT>' , <EOL> blank_value = '<STR_LIT>' <EOL> ) <EOL> form = MyForm ( DummyPostData ( { '<STR_LIT>' : '<STR_LIT>' } ) ) <EOL> assert form . validate ( ) , form . errors <EOL> assert '<STR_LIT>' in ( <EOL> str ( form . city ) <EOL> ) </s>
94,709
from wtforms import Form <EOL> from wtforms_test import FormTestCase <EOL> from tests import MultiDict <EOL> from wtforms_components import SelectMultipleField <EOL> class Dummy ( object ) : <EOL> fruits = [ ] <EOL> class TestSelectMultipleField ( FormTestCase ) : <EOL> choices = ( <EOL> ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) ) , <EOL> ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) ) <EOL> ) <EOL> def init_form ( self , ** kwargs ) : <EOL> class TestForm ( Form ) :
fruits = SelectMultipleField ( ** kwargs )
-5,134,724,509,133,651,000
from wtforms import Form <EOL> from wtforms_test import FormTestCase <EOL> from tests import MultiDict <EOL> from wtforms_components import SelectMultipleField <EOL> class Dummy ( object ) : <EOL> fruits = [ ] <EOL> class TestSelectMultipleField ( FormTestCase ) : <EOL> choices = ( <EOL> ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) ) , <EOL> ( '<STR_LIT>' , ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ) ) <EOL> ) <EOL> def init_form ( self , ** kwargs ) : <EOL> class TestForm ( Form ) : <EOL> fruits = SelectMultipleField ( ** kwargs ) <EOL> self . form_class = TestForm <EOL> return self . form_class <EOL> def test_understands_nested_choices ( self ) : <EOL> form_class = self . init_form ( choices = self . choices ) <EOL> form = form_class ( <EOL> MultiDict ( [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ] ) <EOL> ) <EOL> form . validate ( ) <EOL> assert form . errors == { <EOL> '<STR_LIT>' : [ u"<STR_LIT>" ] <EOL> } <EOL> def test_option_selected ( self ) : <EOL> form_class = self . init_form ( choices = self . choices ) <EOL> obj = Dummy ( ) <EOL> obj . fruits = [ '<STR_LIT>' ] <EOL> form = form_class ( <EOL> obj = obj <EOL> ) <EOL> assert ( <EOL> '<STR_LIT>' in <EOL> str ( form . fruits ) <EOL> ) </s>
94,710
<s> from wtforms import Form , IntegerField
6,283,029,391,515,920,000
from wtforms import Form , IntegerField <EOL> class MyForm ( Form ) : <EOL> a = IntegerField ( ) <EOL> def test_errors ( ) : <EOL> form = MyForm . from_json ( { '<STR_LIT:a>' : '<STR_LIT>' } ) <EOL> form . validate ( ) <EOL> assert form . errors == { '<STR_LIT:a>' : [ u'<STR_LIT>' ] } </s>
94,711
from ramp . store import Storable
class Result ( Storable ) :
5,803,022,259,312,432,000
from ramp . store import Storable <EOL> class Result ( Storable ) : <EOL> def __init__ ( self , x_train , x_test , y_train , y_test , y_preds , model_def , fitted_model , original_data ) : <EOL> """<STR_LIT>""" <EOL> self . x_train = x_train <EOL> self . x_test = x_test <EOL> self . y_train = y_train <EOL> self . y_test = y_test <EOL> self . y_preds = y_preds <EOL> self . model_def = model_def <EOL> self . fitted_model = fitted_model <EOL> self . original_data = original_data </s>
94,712
"""<STR_LIT>""" <EOL> import logging <EOL> import numpy as np <EOL> from phy . cluster . manual . gui_component import ManualClustering <EOL> from phy . cluster . manual . views import ( WaveformView , <EOL> TraceView , <EOL> FeatureView , <EOL> CorrelogramView , <EOL> select_traces , <EOL> extract_spikes , <EOL> ) <EOL> from phy . gui import GUI <EOL> from phy . io . array import _get_data_lim , concat_per_cluster <EOL> from phy . io import Context , Selector <EOL> from phy . plot . transform import _normalize <EOL> from phy . stats . clusters import ( mean , <EOL> get_waveform_amplitude ,
)
3,799,262,579,800,155,600
"""<STR_LIT>""" <EOL> import logging <EOL> import numpy as np <EOL> from phy . cluster . manual . gui_component import ManualClustering <EOL> from phy . cluster . manual . views import ( WaveformView , <EOL> TraceView , <EOL> FeatureView , <EOL> CorrelogramView , <EOL> select_traces , <EOL> extract_spikes , <EOL> ) <EOL> from phy . gui import GUI <EOL> from phy . io . array import _get_data_lim , concat_per_cluster <EOL> from phy . io import Context , Selector <EOL> from phy . plot . transform import _normalize <EOL> from phy . stats . clusters import ( mean , <EOL> get_waveform_amplitude , <EOL> ) <EOL> from phy . utils import Bunch , load_master_config , get_plugin , EventEmitter <EOL> logger = logging . getLogger ( __name__ ) <EOL> class Controller ( EventEmitter ) : <EOL> """<STR_LIT>""" <EOL> gui_name = '<STR_LIT>' <EOL> n_spikes_waveforms = <NUM_LIT:100> <EOL> n_spikes_waveforms_lim = <NUM_LIT:100> <EOL> n_spikes_masks = <NUM_LIT:100> <EOL> n_spikes_features = <NUM_LIT> <EOL> n_spikes_background_features = <NUM_LIT> <EOL> n_spikes_features_lim = <NUM_LIT:100> <EOL> n_spikes_close_clusters = <NUM_LIT:100> <EOL> def __init__ ( self , plugins = None , config_dir = None ) : <EOL> super ( Controller , self ) . __init__ ( ) <EOL> self . config_dir = config_dir <EOL> self . _init_data ( ) <EOL> self . _init_selector ( ) <EOL> self . _init_context ( ) <EOL> self . _set_manual_clustering ( ) <EOL> self . n_spikes = len ( self . spike_times ) <EOL> plugins = plugins or [ ] <EOL> config = load_master_config ( config_dir = config_dir ) <EOL> c = config . get ( self . gui_name or self . __class__ . __name__ ) <EOL> default_plugins = c . plugins if c else [ ] <EOL> if len ( default_plugins ) : <EOL> plugins = default_plugins + plugins <EOL> for plugin in plugins : <EOL> get_plugin ( plugin ) ( ) . attach_to_controller ( self ) <EOL> self . emit ( '<STR_LIT>' ) <EOL> def _init_data ( self ) : <EOL> self . cache_dir = None <EOL> self . spike_times = None <EOL> self . spike_clusters = None <EOL> self . cluster_groups = None <EOL> self . cluster_ids = None <EOL> self . channel_positions = None <EOL> self . n_samples_waveforms = None <EOL> self . n_channels = None <EOL> self . n_features_per_channel = None <EOL> self . sample_rate = None <EOL> self . duration = None <EOL> self . all_masks = None <EOL> self . all_waveforms = None <EOL> self . all_features = None <EOL> self . all_traces = None <EOL> def _init_selector ( self ) : <EOL> self . selector = Selector ( self . spikes_per_cluster ) <EOL> def _init_context ( self ) : <EOL> assert self . cache_dir <EOL> self . context = Context ( self . cache_dir ) <EOL> ctx = self . context <EOL> self . get_masks = concat_per_cluster ( ctx . cache ( self . get_masks ) ) <EOL> self . get_features = concat_per_cluster ( ctx . cache ( self . get_features ) ) <EOL> self . get_waveforms = concat_per_cluster ( ctx . cache ( self . get_waveforms ) ) <EOL> self . get_background_features = ctx . cache ( self . get_background_features ) <EOL> self . get_mean_masks = ctx . memcache ( self . get_mean_masks ) <EOL> self . get_mean_features = ctx . memcache ( self . get_mean_features ) <EOL> self . get_mean_waveforms = ctx . memcache ( self . get_mean_waveforms ) <EOL> self . get_waveforms_amplitude = ctx . memcache ( <EOL> self . get_waveforms_amplitude ) <EOL> self . get_waveform_lims = ctx . memcache ( self . get_waveform_lims ) <EOL> self . get_feature_lim = ctx . memcache ( self . get_feature_lim ) <EOL> self . get_close_clusters = ctx . memcache ( <EOL> self . get_close_clusters ) <EOL> self . get_probe_depth = ctx . memcache ( <EOL> self . get_probe_depth ) <EOL> self . spikes_per_cluster = ctx . memcache ( self . spikes_per_cluster ) <EOL> def _set_manual_clustering ( self ) : <EOL> new_cluster_id = self . context . load ( '<STR_LIT>' ) . get ( '<STR_LIT>' , None ) <EOL> mc = ManualClustering ( self . spike_clusters , <EOL> self . spikes_per_cluster , <EOL> best_channel = self . get_best_channel , <EOL> similarity = self . similarity , <EOL> cluster_groups = self . cluster_groups , <EOL> new_cluster_id = new_cluster_id , <EOL> ) <EOL> @ mc . clustering . connect <EOL> def on_cluster ( up ) : <EOL> new_cluster_id = mc . clustering . new_cluster_id ( ) <EOL> logger . debug ( "<STR_LIT>" , new_cluster_id ) <EOL> self . context . save ( '<STR_LIT>' , <EOL> dict ( new_cluster_id = new_cluster_id ) ) <EOL> self . manual_clustering = mc <EOL> mc . add_column ( self . get_probe_depth , name = '<STR_LIT>' ) <EOL> def _select_spikes ( self , cluster_id , n_max = None ) : <EOL> assert isinstance ( cluster_id , int ) <EOL> assert cluster_id >= <NUM_LIT:0> <EOL> return self . selector . select_spikes ( [ cluster_id ] , n_max ) <EOL> def _select_data ( self , cluster_id , arr , n_max = None ) : <EOL> spike_ids = self . _select_spikes ( cluster_id , n_max ) <EOL> b = Bunch ( ) <EOL> b . data = arr [ spike_ids ] <EOL> b . spike_ids = spike_ids <EOL> b . spike_clusters = self . spike_clusters [ spike_ids ] <EOL> b . masks = self . all_masks [ spike_ids ] <EOL> return b <EOL> def _data_lim ( self , arr , n_max = None ) : <EOL> return _get_data_lim ( arr , n_spikes = n_max ) <EOL> def get_masks ( self , cluster_id ) : <EOL> return self . _select_data ( cluster_id , <EOL> self . all_masks , <EOL> self . n_spikes_masks , <EOL> ) <EOL> def get_mean_masks ( self , cluster_id ) : <EOL> return mean ( self . get_masks ( cluster_id ) . data ) <EOL> def get_waveforms ( self , cluster_id ) : <EOL> data = self . _select_data ( cluster_id , <EOL> self . all_waveforms , <EOL> self . n_spikes_waveforms , <EOL> ) <EOL> m , M = self . get_waveform_lims ( ) <EOL> data . data = _normalize ( data . data , m , M ) <EOL> return [ data ] <EOL> def get_mean_waveforms ( self , cluster_id ) : <EOL> return mean ( self . get_waveforms ( cluster_id ) [ <NUM_LIT:0> ] . data ) <EOL> def get_waveform_lims ( self ) : <EOL> n_spikes = self . n_spikes_waveforms_lim <EOL> arr = self . all_waveforms <EOL> n = arr . shape [ <NUM_LIT:0> ] <EOL> k = max ( <NUM_LIT:1> , n // n_spikes ) <EOL> arr = arr [ : : k ] <EOL> masks = self . all_masks [ : : k ] . copy ( ) <EOL> arr = arr * masks [ : , np . newaxis , : ] <EOL> m = np . percentile ( arr , <NUM_LIT> ) <EOL> M = np . percentile ( arr , <NUM_LIT> ) <EOL> return m , M <EOL> def get_waveforms_amplitude ( self , cluster_id ) : <EOL> mm = self . get_mean_masks ( cluster_id ) <EOL> mw = self . get_mean_waveforms ( cluster_id ) <EOL> assert mw . ndim == <NUM_LIT:2> <EOL> return get_waveform_amplitude ( mm , mw ) <EOL> def get_features ( self , cluster_id , load_all = False ) : <EOL> data = self . _select_data ( cluster_id , <EOL> self . all_features , <EOL> ( self . n_spikes_features <EOL> if not load_all else None ) , <EOL> ) <EOL> m = self . get_feature_lim ( ) <EOL> data . data = _normalize ( data . data . copy ( ) , - m , + m ) <EOL> return data <EOL> def get_background_features ( self ) : <EOL> k = max ( <NUM_LIT:1> , int ( self . n_spikes // self . n_spikes_background_features ) ) <EOL> spike_ids = slice ( None , None , k ) <EOL> b = Bunch ( ) <EOL> b . data = self . all_features [ spike_ids ] <EOL> b . spike_ids = spike_ids <EOL> b . spike_clusters = self . spike_clusters [ spike_ids ] <EOL> b . masks = self . all_masks [ spike_ids ] <EOL> return b <EOL> def get_mean_features ( self , cluster_id ) : <EOL> return mean ( self . get_features ( cluster_id ) . data ) <EOL> def get_feature_lim ( self ) : <EOL> return self . _data_lim ( self . all_features , self . n_spikes_features_lim ) <EOL> def get_traces ( self , interval ) : <EOL> tr = select_traces ( self . all_traces , interval , <EOL> sample_rate = self . sample_rate , <EOL> ) <EOL> return [ Bunch ( traces = tr ) ] <EOL> def get_spikes_traces ( self , interval , traces ) : <EOL> traces = traces [ <NUM_LIT:0> ] . traces <EOL> b = extract_spikes ( traces , interval , <EOL> sample_rate = self . sample_rate , <EOL> spike_times = self . spike_times , <EOL> spike_clusters = self . spike_clusters , <EOL> cluster_groups = self . cluster_groups , <EOL> all_masks = self . all_masks , <EOL> n_samples_waveforms = self . n_samples_waveforms , <EOL> ) <EOL> return b <EOL> def get_best_channel ( self , cluster_id ) : <EOL> wa = self . get_waveforms_amplitude ( cluster_id ) <EOL> return int ( wa . argmax ( ) ) <EOL> def get_best_channels ( self , cluster_ids ) : <EOL> channels = [ self . get_best_channel ( cluster_id ) <EOL> for cluster_id in cluster_ids ] <EOL> return list ( set ( channels ) ) <EOL> def get_channels_by_amplitude ( self , cluster_ids ) : <EOL> wa = self . get_waveforms_amplitude ( cluster_ids [ <NUM_LIT:0> ] ) <EOL> return np . argsort ( wa ) [ : : - <NUM_LIT:1> ] . tolist ( ) <EOL> def get_best_channel_position ( self , cluster_id ) : <EOL> cha = self . get_best_channel ( cluster_id ) <EOL> return tuple ( self . channel_positions [ cha ] ) <EOL> def get_probe_depth ( self , cluster_id ) : <EOL> return self . get_best_channel_position ( cluster_id ) [ <NUM_LIT:1> ] <EOL> def get_close_clusters ( self , cluster_id ) : <EOL> assert isinstance ( cluster_id , int ) <EOL> pos0 = self . get_best_channel_position ( cluster_id ) <EOL> n = len ( pos0 ) <EOL> assert n in ( <NUM_LIT:2> , <NUM_LIT:3> ) <EOL> clusters = self . cluster_ids <EOL> pos = np . vstack ( [ self . get_best_channel_position ( int ( clu ) ) <EOL> for clu in clusters ] ) <EOL> assert pos . shape == ( len ( clusters ) , n ) <EOL> dist = ( pos - pos0 ) ** <NUM_LIT:2> <EOL> assert dist . shape == ( len ( clusters ) , n ) <EOL> dist = np . sum ( dist , axis = <NUM_LIT:1> ) ** <NUM_LIT> <EOL> assert dist . shape == ( len ( clusters ) , ) <EOL> ind = np . argsort ( dist ) <EOL> ind = ind [ : self . n_spikes_close_clusters ] <EOL> return [ ( int ( clusters [ i ] ) , float ( dist [ i ] ) ) for i in ind ] <EOL> def spikes_per_cluster ( self , cluster_id ) : <EOL> return np . nonzero ( self . spike_clusters == cluster_id ) [ <NUM_LIT:0> ] <EOL> def _add_view ( self , gui , view ) : <EOL> view . attach ( gui ) <EOL> self . emit ( '<STR_LIT>' , gui , view ) <EOL> return view <EOL> def add_waveform_view ( self , gui ) : <EOL> v = WaveformView ( waveforms = self . get_waveforms , <EOL> channel_positions = self . channel_positions , <EOL> best_channels = self . get_best_channels , <EOL> ) <EOL> return self . _add_view ( gui , v ) <EOL> def add_trace_view ( self , gui ) : <EOL> v = TraceView ( traces = self . get_traces , <EOL> spikes = self . get_spikes_traces , <EOL> sample_rate = self . sample_rate , <EOL> duration = self . duration , <EOL> n_channels = self . n_channels , <EOL> ) <EOL> return self . _add_view ( gui , v ) <EOL> def add_feature_view ( self , gui ) : <EOL> v = FeatureView ( features = self . get_features , <EOL> background_features = self . get_background_features ( ) , <EOL> spike_times = self . spike_times , <EOL> n_channels = self . n_channels , <EOL> n_features_per_channel = self . n_features_per_channel , <EOL> feature_lim = self . get_feature_lim ( ) , <EOL> best_channels = self . get_channels_by_amplitude , <EOL> ) <EOL> return self . _add_view ( gui , v ) <EOL> def add_correlogram_view ( self , gui ) : <EOL> v = CorrelogramView ( spike_times = self . spike_times , <EOL> spike_clusters = self . spike_clusters , <EOL> sample_rate = self . sample_rate , <EOL> ) <EOL> return self . _add_view ( gui , v ) <EOL> def similarity ( self , cluster_id ) : <EOL> return self . get_close_clusters ( cluster_id ) <EOL> def create_gui ( self , name = None , <EOL> subtitle = None , <EOL> config_dir = None , <EOL> add_default_views = True , <EOL> ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> config_dir = config_dir or self . config_dir <EOL> gui = GUI ( name = name or self . gui_name , <EOL> subtitle = subtitle , <EOL> config_dir = config_dir , ** kwargs ) <EOL> gui . controller = self <EOL> self . manual_clustering . attach ( gui ) <EOL> if add_default_views : <EOL> self . add_correlogram_view ( gui ) <EOL> if self . all_features is not None : <EOL> self . add_feature_view ( gui ) <EOL> if self . all_waveforms is not None : <EOL> self . add_waveform_view ( gui ) <EOL> if self . all_traces is not None : <EOL> self . add_trace_view ( gui ) <EOL> self . emit ( '<STR_LIT>' , gui ) <EOL> return gui </s>
94,713
"""<STR_LIT>""" <EOL> import numpy as np <EOL> import numpy . random as nr <EOL> def artificial_waveforms ( n_spikes = None , n_samples = None , n_channels = None ) : <EOL> return <NUM_LIT> * nr . normal ( size = ( n_spikes , n_samples , n_channels ) ) <EOL> def artificial_features ( * args ) : <EOL> return <NUM_LIT> * nr . normal ( size = args ) <EOL> def artificial_masks ( n_spikes = None , n_channels = None ) : <EOL> masks = nr . uniform ( size = ( n_spikes , n_channels ) )
masks [ masks < <NUM_LIT> ] = <NUM_LIT:0>
9,198,196,416,510,620,000
"""<STR_LIT>""" <EOL> import numpy as np <EOL> import numpy . random as nr <EOL> def artificial_waveforms ( n_spikes = None , n_samples = None , n_channels = None ) : <EOL> return <NUM_LIT> * nr . normal ( size = ( n_spikes , n_samples , n_channels ) ) <EOL> def artificial_features ( * args ) : <EOL> return <NUM_LIT> * nr . normal ( size = args ) <EOL> def artificial_masks ( n_spikes = None , n_channels = None ) : <EOL> masks = nr . uniform ( size = ( n_spikes , n_channels ) ) <EOL> masks [ masks < <NUM_LIT> ] = <NUM_LIT:0> <EOL> return masks <EOL> def artificial_traces ( n_samples , n_channels ) : <EOL> return <NUM_LIT> * nr . normal ( size = ( n_samples , n_channels ) ) <EOL> def artificial_spike_clusters ( n_spikes , n_clusters , low = <NUM_LIT:0> ) : <EOL> return nr . randint ( size = n_spikes , low = low , high = max ( <NUM_LIT:1> , n_clusters ) ) <EOL> def artificial_spike_samples ( n_spikes , max_isi = <NUM_LIT:50> ) : <EOL> return np . cumsum ( nr . randint ( low = <NUM_LIT:0> , high = max_isi , size = n_spikes ) ) <EOL> def artificial_correlograms ( n_clusters , n_samples ) : <EOL> return nr . uniform ( size = ( n_clusters , n_clusters , n_samples ) ) </s>
94,714
"""<STR_LIT>""" <EOL> import numpy as np <EOL> from scipy import signal <EOL> from . . utils . _types import _as_array <EOL> def bandpass_filter ( rate = None , low = None , high = None , order = None ) : <EOL> """<STR_LIT>""" <EOL> assert low < high <EOL> assert order >= <NUM_LIT:1> <EOL> return signal . butter ( order , <EOL> ( low / ( rate / <NUM_LIT> ) , high / ( rate / <NUM_LIT> ) ) , <EOL> '<STR_LIT>' ) <EOL> def apply_filter ( x , filter = None , axis = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> x = _as_array ( x ) <EOL> if x . shape [ axis ] == <NUM_LIT:0> : <EOL> return x <EOL> b , a = filter <EOL> return signal . filtfilt ( b , a , x , axis = axis ) <EOL> class Filter ( object ) : <EOL> """<STR_LIT>"""
def __init__ ( self , rate = None , low = None , high = None , order = None ) :
-6,352,829,267,671,747,000
"""<STR_LIT>""" <EOL> import numpy as np <EOL> from scipy import signal <EOL> from . . utils . _types import _as_array <EOL> def bandpass_filter ( rate = None , low = None , high = None , order = None ) : <EOL> """<STR_LIT>""" <EOL> assert low < high <EOL> assert order >= <NUM_LIT:1> <EOL> return signal . butter ( order , <EOL> ( low / ( rate / <NUM_LIT> ) , high / ( rate / <NUM_LIT> ) ) , <EOL> '<STR_LIT>' ) <EOL> def apply_filter ( x , filter = None , axis = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> x = _as_array ( x ) <EOL> if x . shape [ axis ] == <NUM_LIT:0> : <EOL> return x <EOL> b , a = filter <EOL> return signal . filtfilt ( b , a , x , axis = axis ) <EOL> class Filter ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , rate = None , low = None , high = None , order = None ) : <EOL> self . _filter = bandpass_filter ( rate = rate , <EOL> low = low , <EOL> high = high , <EOL> order = order , <EOL> ) <EOL> def __call__ ( self , data ) : <EOL> return apply_filter ( data , filter = self . _filter ) <EOL> class Whitening ( object ) : <EOL> """<STR_LIT>""" <EOL> def fit ( self , x , fudge = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> assert x . ndim == <NUM_LIT:2> <EOL> ns , nc = x . shape <EOL> x_cov = np . cov ( x , rowvar = <NUM_LIT:0> ) <EOL> assert x_cov . shape == ( nc , nc ) <EOL> d , v = np . linalg . eigh ( x_cov ) <EOL> d = np . diag ( <NUM_LIT:1.> / np . sqrt ( d + fudge ) ) <EOL> w = np . dot ( np . dot ( v , d ) , v . T ) <EOL> self . _matrix = w <EOL> return w <EOL> def transform ( self , x ) : <EOL> """<STR_LIT>""" <EOL> return np . dot ( x , self . _matrix ) </s>
94,715
"""<STR_LIT>""" <EOL> import contextlib <EOL> import hashlib <EOL> import os <EOL> import time <EOL> from dnsclient import exceptions <EOL> from dnsclient import utils <EOL> def getid ( obj ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return obj . id <EOL> except AttributeError : <EOL> return obj <EOL> class Manager ( utils . HookableMixin ) : <EOL> """<STR_LIT>""" <EOL> resource_class = None <EOL> def __init__ ( self , api ) : <EOL> self . api = api <EOL> def _list ( self , url , response_key , obj_class = None , body = None ) : <EOL> if body : <EOL> _resp , body = self . api . client . post ( url , body = body ) <EOL> else : <EOL> _resp , body = self . api . client . get ( url ) <EOL> if obj_class is None : <EOL> obj_class = self . resource_class <EOL> data = body [ response_key ] <EOL> if isinstance ( data , dict ) : <EOL> try : <EOL> data = data [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> pass <EOL> with self . completion_cache ( '<STR_LIT>' , obj_class , mode = "<STR_LIT:w>" ) : <EOL> with self . completion_cache ( '<STR_LIT>' , obj_class , mode = "<STR_LIT:w>" ) : <EOL> return [ obj_class ( self , res , loaded = True )
for res in data if res ]
-6,023,504,353,624,509,000
"""<STR_LIT>""" <EOL> import contextlib <EOL> import hashlib <EOL> import os <EOL> import time <EOL> from dnsclient import exceptions <EOL> from dnsclient import utils <EOL> def getid ( obj ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return obj . id <EOL> except AttributeError : <EOL> return obj <EOL> class Manager ( utils . HookableMixin ) : <EOL> """<STR_LIT>""" <EOL> resource_class = None <EOL> def __init__ ( self , api ) : <EOL> self . api = api <EOL> def _list ( self , url , response_key , obj_class = None , body = None ) : <EOL> if body : <EOL> _resp , body = self . api . client . post ( url , body = body ) <EOL> else : <EOL> _resp , body = self . api . client . get ( url ) <EOL> if obj_class is None : <EOL> obj_class = self . resource_class <EOL> data = body [ response_key ] <EOL> if isinstance ( data , dict ) : <EOL> try : <EOL> data = data [ '<STR_LIT>' ] <EOL> except KeyError : <EOL> pass <EOL> with self . completion_cache ( '<STR_LIT>' , obj_class , mode = "<STR_LIT:w>" ) : <EOL> with self . completion_cache ( '<STR_LIT>' , obj_class , mode = "<STR_LIT:w>" ) : <EOL> return [ obj_class ( self , res , loaded = True ) <EOL> for res in data if res ] <EOL> @ contextlib . contextmanager <EOL> def completion_cache ( self , cache_type , obj_class , mode ) : <EOL> """<STR_LIT>""" <EOL> base_dir = utils . env ( '<STR_LIT>' , <EOL> default = "<STR_LIT>" ) <EOL> username = utils . env ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> url = utils . env ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> uniqifier = hashlib . md5 ( username + url ) . hexdigest ( ) <EOL> cache_dir = os . path . expanduser ( os . path . join ( base_dir , uniqifier ) ) <EOL> try : <EOL> os . makedirs ( cache_dir , <NUM_LIT:0> <NUM_LIT> ) <EOL> except OSError : <EOL> pass <EOL> resource = obj_class . __name__ . lower ( ) <EOL> filename = "<STR_LIT>" % ( resource , cache_type . replace ( '<STR_LIT:_>' , '<STR_LIT:->' ) ) <EOL> path = os . path . join ( cache_dir , filename ) <EOL> cache_attr = "<STR_LIT>" % cache_type <EOL> try : <EOL> setattr ( self , cache_attr , open ( path , mode ) ) <EOL> except IOError : <EOL> pass <EOL> try : <EOL> yield <EOL> finally : <EOL> cache = getattr ( self , cache_attr , None ) <EOL> if cache : <EOL> cache . close ( ) <EOL> delattr ( self , cache_attr ) <EOL> def write_to_completion_cache ( self , cache_type , val ) : <EOL> cache = getattr ( self , "<STR_LIT>" % cache_type , None ) <EOL> if cache : <EOL> cache . write ( "<STR_LIT>" % val ) <EOL> def _get_async ( self , url , response_key = None ) : <EOL> async_resp = self . _get ( url , "<STR_LIT>" ) <EOL> n = <NUM_LIT:0> <EOL> while async_resp . status == "<STR_LIT>" : <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> async_resp = self . _get ( "<STR_LIT>" % async_resp . jobId , "<STR_LIT>" ) <EOL> n = n + <NUM_LIT:1> <EOL> if n > <NUM_LIT:10> : <EOL> break <EOL> if async_resp . status == "<STR_LIT>" : <EOL> return self . _get ( "<STR_LIT>" % async_resp . jobId , "<STR_LIT:error>" ) <EOL> else : <EOL> return self . _get ( "<STR_LIT>" % async_resp . jobId , "<STR_LIT>" ) <EOL> def _get ( self , url , response_key = None ) : <EOL> _resp , body = self . api . client . get ( url ) <EOL> if response_key : <EOL> return self . resource_class ( self , body [ response_key ] , loaded = True ) <EOL> else : <EOL> return self . resource_class ( self , body , loaded = True ) <EOL> def _create_async ( self , url , body , response_key , return_raw = False , ** kwargs ) : <EOL> async_resp = self . _create ( url , body , response_key , return_raw , ** kwargs ) <EOL> time . sleep ( <NUM_LIT:1> ) <EOL> return self . _get_async ( "<STR_LIT>" % async_resp . jobId , "<STR_LIT>" ) <EOL> def _create ( self , url , body , response_key , return_raw = False , ** kwargs ) : <EOL> self . run_hooks ( '<STR_LIT>' , body , ** kwargs ) <EOL> _resp , body = self . api . client . post ( url , body = body ) <EOL> if return_raw : <EOL> return body [ response_key ] <EOL> if response_key : <EOL> with self . completion_cache ( '<STR_LIT>' , self . resource_class , mode = "<STR_LIT:a>" ) : <EOL> with self . completion_cache ( '<STR_LIT>' , self . resource_class , mode = "<STR_LIT:a>" ) : <EOL> return self . resource_class ( self , body [ response_key ] ) <EOL> else : <EOL> return self . resource_class ( self , body , loaded = True ) <EOL> def _delete ( self , url ) : <EOL> _resp , _body = self . api . client . delete ( url ) <EOL> def _update ( self , url , body , ** kwargs ) : <EOL> self . run_hooks ( '<STR_LIT>' , body , ** kwargs ) <EOL> _resp , body = self . api . client . put ( url , body = body ) <EOL> return body <EOL> class ManagerWithFind ( Manager ) : <EOL> """<STR_LIT>""" <EOL> def find ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> matches = self . findall ( ** kwargs ) <EOL> num_matches = len ( matches ) <EOL> if num_matches == <NUM_LIT:0> : <EOL> msg = "<STR_LIT>" % ( self . resource_class . __name__ , kwargs ) <EOL> raise exceptions . NotFound ( <NUM_LIT> , msg ) <EOL> elif num_matches > <NUM_LIT:1> : <EOL> raise exceptions . NoUniqueMatch <EOL> else : <EOL> return matches [ <NUM_LIT:0> ] <EOL> def findall ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> found = [ ] <EOL> searches = kwargs . items ( ) <EOL> for obj in self . list ( ) : <EOL> try : <EOL> if all ( getattr ( obj , attr ) == value <EOL> for ( attr , value ) in searches ) : <EOL> found . append ( obj ) <EOL> except AttributeError : <EOL> continue <EOL> return found <EOL> def list ( self ) : <EOL> raise NotImplementedError <EOL> class Resource ( object ) : <EOL> """<STR_LIT>""" <EOL> HUMAN_ID = False <EOL> NAME_ATTR = '<STR_LIT:name>' <EOL> def __init__ ( self , manager , info , loaded = False ) : <EOL> self . manager = manager <EOL> self . _info = info <EOL> self . _add_details ( info ) <EOL> self . _loaded = loaded <EOL> if '<STR_LIT:id>' in self . __dict__ and len ( str ( self . id ) ) == <NUM_LIT> : <EOL> self . manager . write_to_completion_cache ( '<STR_LIT>' , self . id ) <EOL> human_id = self . human_id <EOL> if human_id : <EOL> self . manager . write_to_completion_cache ( '<STR_LIT>' , human_id ) <EOL> @ property <EOL> def human_id ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . NAME_ATTR in self . __dict__ and self . HUMAN_ID : <EOL> return utils . slugify ( getattr ( self , self . NAME_ATTR ) ) <EOL> return None <EOL> def _add_details ( self , info ) : <EOL> for ( k , v ) in info . iteritems ( ) : <EOL> try : <EOL> setattr ( self , k , v ) <EOL> except AttributeError : <EOL> pass <EOL> def __getattr__ ( self , k ) : <EOL> if k not in self . __dict__ : <EOL> if not self . is_loaded ( ) : <EOL> self . get ( ) <EOL> return self . __getattr__ ( k ) <EOL> raise AttributeError ( k ) <EOL> else : <EOL> return self . __dict__ [ k ] <EOL> def __repr__ ( self ) : <EOL> reprkeys = sorted ( k for k in self . __dict__ . keys ( ) if k [ <NUM_LIT:0> ] != '<STR_LIT:_>' and <EOL> k != '<STR_LIT>' ) <EOL> info = "<STR_LIT:U+002CU+0020>" . join ( "<STR_LIT>" % ( k , getattr ( self , k ) ) for k in reprkeys ) <EOL> return "<STR_LIT>" % ( self . __class__ . __name__ , info ) <EOL> def get ( self ) : <EOL> self . set_loaded ( True ) <EOL> if not hasattr ( self . manager , '<STR_LIT>' ) : <EOL> return <EOL> new = self . manager . get ( self . id ) <EOL> if new : <EOL> self . _add_details ( new . _info ) <EOL> def __eq__ ( self , other ) : <EOL> if not isinstance ( other , self . __class__ ) : <EOL> return False <EOL> if hasattr ( self , '<STR_LIT:id>' ) and hasattr ( other , '<STR_LIT:id>' ) : <EOL> return self . id == other . id <EOL> return self . _info == other . _info <EOL> def is_loaded ( self ) : <EOL> return self . _loaded <EOL> def set_loaded ( self , val ) : <EOL> self . _loaded = val </s>
94,716
import unittest , os , sys <EOL> from custom_test_case import CustomTestCase <EOL> PROJECT_ROOT = os . path . dirname ( __file__ ) <EOL> sys . path . append ( os . path . join ( PROJECT_ROOT , "<STR_LIT:..>" ) ) <EOL> from CodeConverter import CodeConverter <EOL> class TestBasic ( unittest . TestCase , CustomTestCase ) : <EOL> def test_initialize ( self ) : <EOL> self . assertSentence ( CodeConverter ( '<STR_LIT:foo>' ) . s , '<STR_LIT:foo>' )
if __name__ == '<STR_LIT:__main__>' :
-7,883,791,503,704,931,000
import unittest , os , sys <EOL> from custom_test_case import CustomTestCase <EOL> PROJECT_ROOT = os . path . dirname ( __file__ ) <EOL> sys . path . append ( os . path . join ( PROJECT_ROOT , "<STR_LIT:..>" ) ) <EOL> from CodeConverter import CodeConverter <EOL> class TestBasic ( unittest . TestCase , CustomTestCase ) : <EOL> def test_initialize ( self ) : <EOL> self . assertSentence ( CodeConverter ( '<STR_LIT:foo>' ) . s , '<STR_LIT:foo>' ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . main ( ) </s>
94,717
from django . conf . urls . defaults import * <EOL> from projects . models import Project <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , { <EOL> '<STR_LIT>' : Project . objects . all ( ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , { '<STR_LIT:url>' : '<STR_LIT>' } , '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = '<STR_LIT>' ) ,
) </s>
-5,393,078,284,083,090,000
from django . conf . urls . defaults import * <EOL> from projects . models import Project <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , { <EOL> '<STR_LIT>' : Project . objects . all ( ) , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , { '<STR_LIT:url>' : '<STR_LIT>' } , '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , '<STR_LIT>' , name = '<STR_LIT>' ) , <EOL> ) </s>
94,718
import datetime , time <EOL> from django . db . models . fields import DateTimeField <EOL> from django . http import Http404 <EOL> from django . views . generic import list_detail <EOL> def archive_index ( request , queryset , date_field , allow_future = False , ** kwargs ) : <EOL> if not allow_future : <EOL> queryset = queryset . filter ( ** { '<STR_LIT>' % date_field : datetime . datetime . now ( ) } ) <EOL> queryset = queryset . order_by ( '<STR_LIT:->' + date_field ) <EOL> return list_detail . object_list ( request , queryset = queryset , ** kwargs )
def archive_year ( request , year , queryset , date_field , allow_future = False , ** kwargs ) :
4,221,766,212,304,064,500
import datetime , time <EOL> from django . db . models . fields import DateTimeField <EOL> from django . http import Http404 <EOL> from django . views . generic import list_detail <EOL> def archive_index ( request , queryset , date_field , allow_future = False , ** kwargs ) : <EOL> if not allow_future : <EOL> queryset = queryset . filter ( ** { '<STR_LIT>' % date_field : datetime . datetime . now ( ) } ) <EOL> queryset = queryset . order_by ( '<STR_LIT:->' + date_field ) <EOL> return list_detail . object_list ( request , queryset = queryset , ** kwargs ) <EOL> def archive_year ( request , year , queryset , date_field , allow_future = False , ** kwargs ) : <EOL> now = datetime . datetime . now ( ) <EOL> lookup_kwargs = { '<STR_LIT>' % date_field : year } <EOL> if int ( year ) >= now . year and not allow_future : <EOL> lookup_kwargs [ '<STR_LIT>' % date_field ] = now <EOL> queryset = queryset . filter ( ** lookup_kwargs ) . order_by ( '<STR_LIT:->' + date_field ) <EOL> return list_detail . object_list ( request , queryset = queryset , ** kwargs ) <EOL> def archive_month ( request , year , month , queryset , date_field , allow_future = False , month_format = '<STR_LIT>' , ** kwargs ) : <EOL> try : <EOL> date = datetime . date ( * time . strptime ( year + month , '<STR_LIT>' + month_format ) [ : <NUM_LIT:3> ] ) <EOL> except ValueError : <EOL> raise Http404 <EOL> now = datetime . datetime . now ( ) <EOL> first_day = date . replace ( day = <NUM_LIT:1> ) <EOL> if first_day . month == <NUM_LIT:12> : <EOL> last_day = first_day . replace ( year = first_day . year + <NUM_LIT:1> , month = <NUM_LIT:1> ) <EOL> else : <EOL> last_day = first_day . replace ( month = first_day . month + <NUM_LIT:1> ) <EOL> lookup_kwargs = { <EOL> '<STR_LIT>' % date_field : first_day , <EOL> '<STR_LIT>' % date_field : last_day , <EOL> } <EOL> if last_day >= now . date ( ) and not allow_future : <EOL> lookup_kwargs [ '<STR_LIT>' % date_field ] = now <EOL> queryset = queryset . filter ( ** lookup_kwargs ) . order_by ( '<STR_LIT:->' + date_field ) <EOL> return list_detail . object_list ( request , queryset = queryset , ** kwargs ) <EOL> def archive_week ( request , year , week , queryset , date_field , allow_future = False , ** kwargs ) : <EOL> try : <EOL> date = datetime . date ( * time . strptime ( year + '<STR_LIT>' + week , '<STR_LIT>' ) [ : <NUM_LIT:3> ] ) <EOL> except ValueError : <EOL> raise Http404 <EOL> now = datetime . datetime . now ( ) <EOL> first_day = date <EOL> last_day = date + datetime . timedelta ( days = <NUM_LIT:7> ) <EOL> lookup_kwargs = { <EOL> '<STR_LIT>' % date_field : first_day , <EOL> '<STR_LIT>' % date_field : last_day , <EOL> } <EOL> if last_day >= now . date ( ) and not allow_future : <EOL> lookup_kwargs [ '<STR_LIT>' % date_field ] = now <EOL> queryset = queryset . filter ( ** lookup_kwargs ) . order_by ( '<STR_LIT:->' + date_field ) <EOL> return list_detail . object_list ( request , queryset = queryset , ** kwargs ) <EOL> def archive_day ( request , year , month , day , queryset , date_field , allow_future = False , month_format = '<STR_LIT>' , day_format = '<STR_LIT>' , ** kwargs ) : <EOL> try : <EOL> date = datetime . date ( * time . strptime ( year + month + day , '<STR_LIT>' + month_format + day_format ) [ : <NUM_LIT:3> ] ) <EOL> except ValueError : <EOL> raise Http404 <EOL> model = queryset . model <EOL> now = datetime . datetime . now ( ) <EOL> if isinstance ( model . _meta . get_field ( date_field ) , DateTimeField ) : <EOL> lookup_kwargs = { '<STR_LIT>' % date_field : ( datetime . datetime . combine ( date , datetime . time . min ) , datetime . datetime . combine ( date , datetime . time . max ) ) } <EOL> else : <EOL> lookup_kwargs = { date_field : date } <EOL> if date >= now . date ( ) and not allow_future : <EOL> lookup_kwargs [ '<STR_LIT>' % date_field ] = now <EOL> queryset = queryset . filter ( ** lookup_kwargs ) . order_by ( '<STR_LIT:->' + date_field ) <EOL> return list_detail . object_list ( request , queryset = queryset , ** kwargs ) <EOL> def archive_today ( request , ** kwargs ) : <EOL> today = datetime . date . today ( ) <EOL> kwargs . update ( { <EOL> '<STR_LIT>' : str ( today . year ) , <EOL> '<STR_LIT>' : today . strftime ( '<STR_LIT>' ) . lower ( ) , <EOL> '<STR_LIT>' : str ( today . day ) , <EOL> } ) <EOL> return archive_day ( request , ** kwargs ) <EOL> def object_detail ( request , year , month , day , queryset , date_field , allow_future = False , month_format = '<STR_LIT>' , day_format = '<STR_LIT>' , ** kwargs ) : <EOL> try : <EOL> date = datetime . date ( * time . strptime ( year + month + day , '<STR_LIT>' + month_format + day_format ) [ : <NUM_LIT:3> ] ) <EOL> except ValueError : <EOL> raise Http404 <EOL> model = queryset . model <EOL> now = datetime . datetime . now ( ) <EOL> if isinstance ( model . _meta . get_field ( date_field ) , DateTimeField ) : <EOL> lookup_kwargs = { '<STR_LIT>' % date_field : ( datetime . datetime . combine ( date , datetime . time . min ) , datetime . datetime . combine ( date , datetime . time . max ) ) } <EOL> else : <EOL> lookup_kwargs = { date_field : date } <EOL> if date >= now . date ( ) and not allow_future : <EOL> lookup_kwargs [ '<STR_LIT>' % date_field ] = now <EOL> queryset = queryset . filter ( ** lookup_kwargs ) . order_by ( '<STR_LIT:->' + date_field ) <EOL> return list_detail . object_detail ( request , queryset = queryset , ** kwargs ) </s>
94,719
import base64 <EOL> try : <EOL> from collections import OrderedDict <EOL> except ImportError : <EOL> from ordereddict import OrderedDict <EOL> import json <EOL> import re <EOL> import sys <EOL> import time <EOL> try : <EOL> from urllib . parse import unquote <EOL> except ImportError : <EOL> from urllib2 import unquote <EOL> try : <EOL> from urllib . parse import urljoin , urlparse , urlunparse <EOL> except ImportError : <EOL> from urlparse import urljoin , urlparse , urlunparse <EOL> from Crypto . PublicKey import RSA <EOL> from Crypto . Signature import PKCS1_v1_5 <EOL> from Crypto . Hash import SHA <EOL> from django . conf import settings <EOL> from django . utils . http import urlquote <EOL> from cuddlybuddly . storage . s3 import CallingFormat <EOL> from cuddlybuddly . storage . s3 . lib import QueryStringAuthGenerator <EOL> from cuddlybuddly . storage . s3 . middleware import request_is_secure <EOL> def create_signed_url ( file , expires = <NUM_LIT> , secure = False , private_cloudfront = False , expires_at = None ) : <EOL> if not private_cloudfront : <EOL> generator = QueryStringAuthGenerator ( <EOL> settings . AWS_ACCESS_KEY_ID , <EOL> settings . AWS_SECRET_ACCESS_KEY , <EOL> calling_format = getattr ( settings , '<STR_LIT>' , <EOL> CallingFormat . SUBDOMAIN ) , <EOL> is_secure = secure ) <EOL> generator . set_expires_in ( expires ) <EOL> return generator . generate_url (
'<STR_LIT:GET>' ,
-322,663,899,923,411,900
import base64 <EOL> try : <EOL> from collections import OrderedDict <EOL> except ImportError : <EOL> from ordereddict import OrderedDict <EOL> import json <EOL> import re <EOL> import sys <EOL> import time <EOL> try : <EOL> from urllib . parse import unquote <EOL> except ImportError : <EOL> from urllib2 import unquote <EOL> try : <EOL> from urllib . parse import urljoin , urlparse , urlunparse <EOL> except ImportError : <EOL> from urlparse import urljoin , urlparse , urlunparse <EOL> from Crypto . PublicKey import RSA <EOL> from Crypto . Signature import PKCS1_v1_5 <EOL> from Crypto . Hash import SHA <EOL> from django . conf import settings <EOL> from django . utils . http import urlquote <EOL> from cuddlybuddly . storage . s3 import CallingFormat <EOL> from cuddlybuddly . storage . s3 . lib import QueryStringAuthGenerator <EOL> from cuddlybuddly . storage . s3 . middleware import request_is_secure <EOL> def create_signed_url ( file , expires = <NUM_LIT> , secure = False , private_cloudfront = False , expires_at = None ) : <EOL> if not private_cloudfront : <EOL> generator = QueryStringAuthGenerator ( <EOL> settings . AWS_ACCESS_KEY_ID , <EOL> settings . AWS_SECRET_ACCESS_KEY , <EOL> calling_format = getattr ( settings , '<STR_LIT>' , <EOL> CallingFormat . SUBDOMAIN ) , <EOL> is_secure = secure ) <EOL> generator . set_expires_in ( expires ) <EOL> return generator . generate_url ( <EOL> '<STR_LIT:GET>' , <EOL> settings . AWS_STORAGE_BUCKET_NAME , <EOL> file <EOL> ) <EOL> url = settings . MEDIA_URL <EOL> if not isinstance ( settings . MEDIA_URL , CloudFrontURLs ) : <EOL> url = CloudFrontURLs ( settings . MEDIA_URL ) <EOL> url = url . get_url ( file , force_https = True if secure else False ) <EOL> if url . startswith ( '<STR_LIT>' ) : <EOL> if secure : <EOL> url = '<STR_LIT>' + url <EOL> else : <EOL> url = '<STR_LIT>' + url <EOL> if expires_at is None : <EOL> expires = int ( time . time ( ) + expires ) <EOL> else : <EOL> expires = expires_at <EOL> policy = OrderedDict ( ) <EOL> policy [ '<STR_LIT>' ] = url <EOL> policy [ '<STR_LIT>' ] = { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : expires <EOL> } <EOL> } <EOL> policy = { <EOL> '<STR_LIT>' : [ <EOL> policy <EOL> ] <EOL> } <EOL> policy = json . dumps ( policy , separators = ( '<STR_LIT:U+002C>' , '<STR_LIT::>' ) ) <EOL> key = settings . CUDDLYBUDDLY_STORAGE_S3_KEY_PAIR <EOL> dig = SHA . new ( ) <EOL> dig . update ( policy . encode ( '<STR_LIT:utf-8>' ) ) <EOL> sig = PKCS1_v1_5 . new ( RSA . importKey ( key [ <NUM_LIT:1> ] ) ) <EOL> sig = sig . sign ( dig ) <EOL> sig = base64 . b64encode ( sig ) . decode ( '<STR_LIT:utf-8>' ) <EOL> sig = sig . replace ( '<STR_LIT:+>' , '<STR_LIT:->' ) . replace ( '<STR_LIT:=>' , '<STR_LIT:_>' ) . replace ( '<STR_LIT:/>' , '<STR_LIT>' ) <EOL> return '<STR_LIT>' % ( <EOL> url , <EOL> '<STR_LIT:&>' if '<STR_LIT:?>' in url else '<STR_LIT:?>' , <EOL> expires , <EOL> sig , <EOL> key [ <NUM_LIT:0> ] <EOL> ) <EOL> try : <EOL> extend = unicode <EOL> except NameError : <EOL> extend = str <EOL> class CloudFrontURLs ( extend ) : <EOL> def __new__ ( cls , default , patterns = { } , https = None ) : <EOL> obj = super ( CloudFrontURLs , cls ) . __new__ ( cls , default ) <EOL> obj . _patterns = [ ] <EOL> for key , value in patterns . items ( ) : <EOL> obj . _patterns . append ( ( re . compile ( key ) , '<STR_LIT:%s>' % value ) ) <EOL> obj . _https = https <EOL> return obj <EOL> def match ( self , name ) : <EOL> for pattern in self . _patterns : <EOL> if pattern [ <NUM_LIT:0> ] . match ( name ) : <EOL> return pattern [ <NUM_LIT:1> ] <EOL> return self <EOL> def https ( self ) : <EOL> if self . _https is not None : <EOL> return '<STR_LIT:%s>' % self . _https <EOL> return self . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def get_url ( self , path , force_https = False ) : <EOL> if force_https or request_is_secure ( ) : <EOL> url = self . https ( ) <EOL> else : <EOL> url = self . match ( path ) . replace ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> url = list ( urlparse ( urljoin ( url , path ) ) ) <EOL> if sys . version_info [ <NUM_LIT:0> ] == <NUM_LIT:2> : <EOL> url [ <NUM_LIT:2> ] = url [ <NUM_LIT:2> ] . encode ( '<STR_LIT:utf-8>' ) <EOL> url [ <NUM_LIT:2> ] = urlquote ( unquote ( url [ <NUM_LIT:2> ] ) ) <EOL> return urlunparse ( url ) </s>
94,720
from redwind import app , db , util <EOL> from redwind . models import Post <EOL> import itertools <EOL> db . engine . execute ( '<STR_LIT>' ) <EOL> db . engine . execute ( '<STR_LIT>' ) <EOL> for post in Post . query . all ( ) : <EOL> print ( post . historic_path )
if not post . slug :
-5,169,084,320,565,843,000
from redwind import app , db , util <EOL> from redwind . models import Post <EOL> import itertools <EOL> db . engine . execute ( '<STR_LIT>' ) <EOL> db . engine . execute ( '<STR_LIT>' ) <EOL> for post in Post . query . all ( ) : <EOL> print ( post . historic_path ) <EOL> if not post . slug : <EOL> post . slug = post . generate_slug ( ) <EOL> post . path = '<STR_LIT>' . format ( post . published . year , <EOL> post . published . month , <EOL> post . slug ) <EOL> db . session . commit ( ) </s>
94,721
from redwind import hooks , util <EOL> from redwind . tasks import get_queue , async_app_context <EOL> from redwind . models import Post , Context , Setting , get_settings <EOL> from redwind . extensions import db <EOL> from flask . ext . login import login_required <EOL> from flask import request , redirect , url_for , make_response , render_template <EOL> from flask import flash , abort , has_request_context , Blueprint , current_app <EOL> from flask import jsonify <EOL> import brevity <EOL> import collections <EOL> import requests <EOL> import re <EOL> import json <EOL> import datetime <EOL> from tempfile import mkstemp <EOL> from urllib . parse import urljoin <EOL> from bs4 import BeautifulSoup <EOL> from requests_oauthlib import OAuth1Session , OAuth1 <EOL> twitter = Blueprint ( '<STR_LIT>' , __name__ ) <EOL> REQUEST_TOKEN_URL = '<STR_LIT>' <EOL> AUTHORIZE_URL = '<STR_LIT>' <EOL> ACCESS_TOKEN_URL = '<STR_LIT>' <EOL> URL_CHAR_LENGTH = <NUM_LIT> <EOL> MEDIA_CHAR_LENGTH = <NUM_LIT> <EOL> TWEET_CHAR_LENGTH = <NUM_LIT> <EOL> PERMALINK_RE = util . TWITTER_RE <EOL> USERMENTION_RE = util . AT_USERNAME_RE <EOL> def register ( app ) : <EOL> app . register_blueprint ( twitter ) <EOL> hooks . register ( '<STR_LIT>' , create_context ) <EOL> hooks . register ( '<STR_LIT>' , send_to_twitter ) <EOL> @ twitter . context_processor <EOL> def inject_settings_variable ( ) : <EOL> return { <EOL> '<STR_LIT>' : get_settings ( ) <EOL> } <EOL> @ twitter . route ( '<STR_LIT>' ) <EOL> @ login_required <EOL> def authorize_twitter ( ) : <EOL> """<STR_LIT>""" <EOL> callback_url = url_for ( '<STR_LIT>' , _external = True ) <EOL> try : <EOL> oauth = OAuth1Session ( <EOL> client_key = get_settings ( ) . twitter_api_key , <EOL> client_secret = get_settings ( ) . twitter_api_secret , <EOL> callback_uri = callback_url ) <EOL> oauth . fetch_request_token ( REQUEST_TOKEN_URL ) <EOL> return redirect ( oauth . authorization_url ( AUTHORIZE_URL ) ) <EOL> except requests . RequestException as e : <EOL> return make_response ( str ( e ) ) <EOL> @ twitter . route ( '<STR_LIT>' ) <EOL> def twitter_callback ( ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> oauth = OAuth1Session ( <EOL> client_key = get_settings ( ) . twitter_api_key , <EOL> client_secret = get_settings ( ) . twitter_api_secret ) <EOL> oauth . parse_authorization_response ( request . url ) <EOL> response = oauth . fetch_access_token ( ACCESS_TOKEN_URL ) <EOL> access_token = response . get ( '<STR_LIT>' ) <EOL> access_token_secret = response . get ( '<STR_LIT>' ) <EOL> Setting . query . get ( '<STR_LIT>' ) . value = access_token <EOL> Setting . query . get ( '<STR_LIT>' ) . value = access_token_secret <EOL> db . session . commit ( ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> except requests . RequestException as e : <EOL> return make_response ( str ( e ) ) <EOL> def collect_images ( post ) : <EOL> """<STR_LIT>""" <EOL> if type ( post ) == Post and post . attachments : <EOL> for photo in post . attachments : <EOL> yield photo . url <EOL> else : <EOL> if type ( post ) == Post : <EOL> html = util . markdown_filter ( <EOL> post . content , img_path = post . get_image_path ( ) ) <EOL> else : <EOL> html = post . content <EOL> if html : <EOL> soup = BeautifulSoup ( html ) <EOL> for img in soup . find_all ( '<STR_LIT>' ) : <EOL> if not img . find_parent ( class_ = '<STR_LIT>' ) : <EOL> src = img . get ( '<STR_LIT:src>' ) <EOL> if src : <EOL> yield urljoin ( get_settings ( ) . site_url , src ) <EOL> def send_to_twitter ( post , args ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in args . getlist ( '<STR_LIT>' ) : <EOL> if not is_twitter_authorized ( ) : <EOL> return False , '<STR_LIT>' <EOL> try : <EOL> current_app . logger . debug ( '<STR_LIT>' , post . id ) <EOL> get_queue ( ) . enqueue ( <EOL> do_send_to_twitter , post . id , current_app . config ) <EOL> return True , '<STR_LIT>' <EOL> except Exception as e : <EOL> current_app . logger . exception ( '<STR_LIT>' ) <EOL> return ( False , '<STR_LIT>' <EOL> . format ( e ) ) <EOL> def do_send_to_twitter ( post_id , app_config ) : <EOL> with async_app_context ( app_config ) : <EOL> current_app . logger . debug ( '<STR_LIT>' , post_id ) <EOL> post = Post . load_by_id ( post_id ) <EOL> in_reply_to , repost_of , like_of = util . posse_post_discovery ( <EOL> post , PERMALINK_RE ) <EOL> if post . in_reply_to and not in_reply_to : <EOL> current_app . logger . warn ( <EOL> '<STR_LIT>' , post . in_reply_to ) <EOL> return None <EOL> elif post . repost_of and not repost_of : <EOL> current_app . logger . warn ( <EOL> '<STR_LIT>' , post . repost_of ) <EOL> preview , img_url = guess_raw_share_tweet_content ( post ) <EOL> elif post . like_of and not like_of : <EOL> current_app . logger . warn ( <EOL> '<STR_LIT>' , post . like_of ) <EOL> return None <EOL> else : <EOL> preview , img_url = guess_tweet_content ( post , in_reply_to ) <EOL> response = do_tweet ( post_id , preview , img_url , in_reply_to , repost_of , <EOL> like_of ) <EOL> return str ( response ) <EOL> @ twitter . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> @ login_required <EOL> def share_on_twitter ( ) : <EOL> if request . method == '<STR_LIT:GET>' : <EOL> id = request . args . get ( '<STR_LIT:id>' ) <EOL> if not id : <EOL> abort ( <NUM_LIT> ) <EOL> post = Post . load_by_id ( id ) <EOL> if not post : <EOL> abort ( <NUM_LIT> ) <EOL> current_app . logger . debug ( '<STR_LIT>' , post ) <EOL> in_reply_to , repost_of , like_of = util . posse_post_discovery ( post , PERMALINK_RE ) <EOL> current_app . logger . debug ( <EOL> '<STR_LIT>' , <EOL> in_reply_to , repost_of , like_of ) <EOL> if post . repost_of and not repost_of : <EOL> preview , _ = guess_raw_share_tweet_content ( post ) <EOL> imgs = list ( collect_images ( post . repost_contexts [ <NUM_LIT:0> ] ) ) <EOL> else : <EOL> preview , _ = guess_tweet_content ( post , in_reply_to ) <EOL> imgs = list ( collect_images ( post ) ) <EOL> current_app . logger . debug ( '<STR_LIT>' , imgs ) <EOL> return render_template ( '<STR_LIT>' , <EOL> preview = preview , <EOL> post = post , in_reply_to = in_reply_to , <EOL> repost_of = repost_of , like_of = like_of , imgs = imgs ) <EOL> post_id = request . form . get ( '<STR_LIT>' ) <EOL> preview = request . form . get ( '<STR_LIT>' ) <EOL> img_url = request . form . get ( '<STR_LIT>' ) <EOL> in_reply_to = request . form . get ( '<STR_LIT>' ) <EOL> repost_of = request . form . get ( '<STR_LIT>' ) <EOL> like_of = request . form . get ( '<STR_LIT>' ) <EOL> return do_tweet ( post_id , preview , img_url , in_reply_to , repost_of , <EOL> like_of ) <EOL> def format_markdown_as_tweet ( data ) : <EOL> def to_twitter_handle ( contact , nick ) : <EOL> """<STR_LIT>""" <EOL> if contact : <EOL> for url in contact . social : <EOL> m = util . TWITTER_PROFILE_RE . match ( url ) <EOL> if m : <EOL> nick = m . group ( <NUM_LIT:1> ) <EOL> break <EOL> return '<STR_LIT:@>' + nick <EOL> html = util . markdown_filter ( data ) <EOL> html = util . process_people ( to_twitter_handle , html ) <EOL> return util . format_as_text ( html ) <EOL> def get_auth ( ) : <EOL> return OAuth1 ( <EOL> client_key = get_settings ( ) . twitter_api_key , <EOL> client_secret = get_settings ( ) . twitter_api_secret , <EOL> resource_owner_key = get_settings ( ) . twitter_oauth_token , <EOL> resource_owner_secret = get_settings ( ) . twitter_oauth_token_secret ) <EOL> def repost_preview ( url ) : <EOL> if not is_twitter_authorized ( ) : <EOL> current_app . logger . warn ( '<STR_LIT>' ) <EOL> return <EOL> match = PERMALINK_RE . match ( url ) <EOL> if match : <EOL> tweet_id = match . group ( <NUM_LIT:2> ) <EOL> embed_response = requests . get ( <EOL> '<STR_LIT>' , <EOL> params = { '<STR_LIT:id>' : tweet_id } , <EOL> auth = get_auth ( ) ) <EOL> if embed_response . status_code // <NUM_LIT:2> == <NUM_LIT:100> : <EOL> return embed_response . json ( ) . get ( '<STR_LIT:html>' ) <EOL> def create_context ( url ) : <EOL> match = PERMALINK_RE . match ( url ) <EOL> if not match : <EOL> current_app . logger . debug ( '<STR_LIT>' , url ) <EOL> return <EOL> current_app . logger . debug ( '<STR_LIT>' ) <EOL> tweet_id = match . group ( <NUM_LIT:2> ) <EOL> status_response = requests . get ( <EOL> '<STR_LIT>' . format ( tweet_id ) , <EOL> auth = get_auth ( ) ) <EOL> if status_response . status_code // <NUM_LIT:2> != <NUM_LIT:100> : <EOL> current_app . logger . warn ( <EOL> '<STR_LIT>' , status_response , <EOL> status_response . content ) <EOL> return <EOL> status_data = status_response . json ( ) <EOL> current_app . logger . debug ( '<STR_LIT>' , status_data ) <EOL> pub_date = datetime . datetime . strptime ( status_data [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' ) <EOL> real_name = status_data [ '<STR_LIT:user>' ] [ '<STR_LIT:name>' ] <EOL> screen_name = status_data [ '<STR_LIT:user>' ] [ '<STR_LIT>' ] <EOL> author_name = real_name <EOL> author_url = status_data [ '<STR_LIT:user>' ] [ '<STR_LIT:url>' ] <EOL> if author_url : <EOL> author_url = expand_link ( author_url ) <EOL> else : <EOL> author_url = '<STR_LIT>' . format ( screen_name ) <EOL> author_image = status_data [ '<STR_LIT:user>' ] [ '<STR_LIT>' ] <EOL> tweet_content = expand_links ( status_data ) <EOL> tweet_plain = expand_links ( status_data , as_html = False ) <EOL> author_image = re . sub ( '<STR_LIT>' , '<STR_LIT>' , author_image ) <EOL> for media in status_data . get ( '<STR_LIT>' , { } ) . get ( '<STR_LIT>' , [ ] ) : <EOL> if media . get ( '<STR_LIT:type>' ) == '<STR_LIT>' : <EOL> media_url = media . get ( '<STR_LIT>' ) <EOL> if media_url : <EOL> tweet_content += '<STR_LIT>' . format ( media_url ) <EOL> tweet_plain += media_url <EOL> context = Context ( ) <EOL> context . url = context . permalink = url <EOL> context . author_name = author_name <EOL> context . author_image = author_image <EOL> context . author_url = author_url <EOL> context . published = pub_date <EOL> context . title = None <EOL> context . content = tweet_content <EOL> context . content_plain = tweet_plain <EOL> return context <EOL> def expand_links ( status_data , as_html = True ) : <EOL> text = status_data [ '<STR_LIT:text>' ] <EOL> urls = status_data . get ( '<STR_LIT>' , { } ) . get ( '<STR_LIT>' , [ ] ) <EOL> for um in status_data . get ( '<STR_LIT>' , { } ) . get ( '<STR_LIT>' , [ ] ) : <EOL> um = um . copy ( ) <EOL> um . update ( { <EOL> '<STR_LIT>' : '<STR_LIT:@>' + um . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : '<STR_LIT>' . format ( <EOL> um . get ( '<STR_LIT>' ) ) , <EOL> } ) <EOL> urls . append ( um ) <EOL> urls = sorted ( <EOL> urls , key = lambda url_data : url_data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] , reverse = True ) <EOL> for url_data in urls : <EOL> current_app . logger . debug ( '<STR_LIT>' , url_data ) <EOL> start_idx = url_data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> end_idx = url_data [ '<STR_LIT>' ] [ <NUM_LIT:1> ] <EOL> if as_html : <EOL> link_text = '<STR_LIT>' . format ( <EOL> url_data [ '<STR_LIT>' ] , url_data [ '<STR_LIT>' ] ) <EOL> else : <EOL> link_text = url_data [ '<STR_LIT>' ] <EOL> text = text [ : start_idx ] + link_text + text [ end_idx : ] <EOL> return text <EOL> def expand_link ( url ) : <EOL> current_app . logger . debug ( '<STR_LIT>' , url ) <EOL> try : <EOL> r = requests . head ( url , allow_redirects = True , timeout = <NUM_LIT:30> ) <EOL> if r and r . status_code // <NUM_LIT:100> == <NUM_LIT:2> : <EOL> current_app . logger . debug ( '<STR_LIT>' , r . url ) <EOL> url = r . url <EOL> except Exception as e : <EOL> current_app . logger . debug ( '<STR_LIT>' , url , e ) <EOL> return url <EOL> def get_authed_twitter_account ( ) : <EOL> """<STR_LIT>""" <EOL> if not is_twitter_authorized ( ) : <EOL> return None <EOL> user_response = requests . get ( <EOL> '<STR_LIT>' , <EOL> auth = get_auth ( ) ) <EOL> if user_response . status_code // <NUM_LIT:2> != <NUM_LIT:100> : <EOL> current_app . logger . warn ( '<STR_LIT>' , <EOL> user_response , user_response . content ) <EOL> return None <EOL> current_app . logger . debug ( '<STR_LIT>' , user_response ) <EOL> return user_response . json ( ) <EOL> def prepend_twitter_name ( name , tweet , exclude_me = True ) : <EOL> my_screen_name = get_authed_twitter_account ( ) . get ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) . lower ( ) <EOL> if ( ( exclude_me and name . lower ( ) == my_screen_name ) <EOL> or ( name . lower ( ) in tweet . lower ( ) ) ) : <EOL> return tweet <EOL> return '<STR_LIT>' . format ( name , tweet ) <EOL> def guess_tweet_content ( post , in_reply_to ) : <EOL> """<STR_LIT>""" <EOL> preview = '<STR_LIT>' <EOL> if post . title : <EOL> preview += post . title <EOL> elif post . post_type == '<STR_LIT>' and post . venue : <EOL> preview = '<STR_LIT>' + post . venue . name <EOL> text_content = format_markdown_as_tweet ( post . content ) <EOL> if text_content : <EOL> preview += ( '<STR_LIT>' if preview else '<STR_LIT>' ) + text_content <EOL> if in_reply_to : <EOL> reply_match = PERMALINK_RE . match ( in_reply_to ) <EOL> if reply_match : <EOL> status_response = requests . get ( <EOL> '<STR_LIT>' . format ( <EOL> reply_match . group ( <NUM_LIT:2> ) ) , <EOL> auth = get_auth ( ) ) <EOL> if status_response . status_code // <NUM_LIT:2> != <NUM_LIT:100> : <EOL> current_app . logger . warn ( <EOL> '<STR_LIT>' , <EOL> status_response , status_response . content ) <EOL> status_data = { } <EOL> else : <EOL> status_data = status_response . json ( ) <EOL> mentioned_users = [ ] <EOL> my_screen_name = get_authed_twitter_account ( ) . get ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> for user in status_data . get ( '<STR_LIT>' , { } ) . get ( '<STR_LIT>' , [ ] ) : <EOL> screen_name = user . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if screen_name and screen_name . lower ( ) != my_screen_name . lower ( ) : <EOL> mentioned_users . append ( screen_name ) <EOL> mentioned_users . append ( reply_match . group ( <NUM_LIT:1> ) ) <EOL> current_app . logger . debug ( '<STR_LIT>' , mentioned_users ) <EOL> mention_match = USERMENTION_RE . findall ( preview ) <EOL> for match in mention_match : <EOL> if match [ <NUM_LIT:0> ] in mentioned_users : <EOL> break <EOL> else :
for user in mentioned_users :
1,915,345,133,592,148,700
from redwind import hooks , util <EOL> from redwind . tasks import get_queue , async_app_context <EOL> from redwind . models import Post , Context , Setting , get_settings <EOL> from redwind . extensions import db <EOL> from flask . ext . login import login_required <EOL> from flask import request , redirect , url_for , make_response , render_template <EOL> from flask import flash , abort , has_request_context , Blueprint , current_app <EOL> from flask import jsonify <EOL> import brevity <EOL> import collections <EOL> import requests <EOL> import re <EOL> import json <EOL> import datetime <EOL> from tempfile import mkstemp <EOL> from urllib . parse import urljoin <EOL> from bs4 import BeautifulSoup <EOL> from requests_oauthlib import OAuth1Session , OAuth1 <EOL> twitter = Blueprint ( '<STR_LIT>' , __name__ ) <EOL> REQUEST_TOKEN_URL = '<STR_LIT>' <EOL> AUTHORIZE_URL = '<STR_LIT>' <EOL> ACCESS_TOKEN_URL = '<STR_LIT>' <EOL> URL_CHAR_LENGTH = <NUM_LIT> <EOL> MEDIA_CHAR_LENGTH = <NUM_LIT> <EOL> TWEET_CHAR_LENGTH = <NUM_LIT> <EOL> PERMALINK_RE = util . TWITTER_RE <EOL> USERMENTION_RE = util . AT_USERNAME_RE <EOL> def register ( app ) : <EOL> app . register_blueprint ( twitter ) <EOL> hooks . register ( '<STR_LIT>' , create_context ) <EOL> hooks . register ( '<STR_LIT>' , send_to_twitter ) <EOL> @ twitter . context_processor <EOL> def inject_settings_variable ( ) : <EOL> return { <EOL> '<STR_LIT>' : get_settings ( ) <EOL> } <EOL> @ twitter . route ( '<STR_LIT>' ) <EOL> @ login_required <EOL> def authorize_twitter ( ) : <EOL> """<STR_LIT>""" <EOL> callback_url = url_for ( '<STR_LIT>' , _external = True ) <EOL> try : <EOL> oauth = OAuth1Session ( <EOL> client_key = get_settings ( ) . twitter_api_key , <EOL> client_secret = get_settings ( ) . twitter_api_secret , <EOL> callback_uri = callback_url ) <EOL> oauth . fetch_request_token ( REQUEST_TOKEN_URL ) <EOL> return redirect ( oauth . authorization_url ( AUTHORIZE_URL ) ) <EOL> except requests . RequestException as e : <EOL> return make_response ( str ( e ) ) <EOL> @ twitter . route ( '<STR_LIT>' ) <EOL> def twitter_callback ( ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> oauth = OAuth1Session ( <EOL> client_key = get_settings ( ) . twitter_api_key , <EOL> client_secret = get_settings ( ) . twitter_api_secret ) <EOL> oauth . parse_authorization_response ( request . url ) <EOL> response = oauth . fetch_access_token ( ACCESS_TOKEN_URL ) <EOL> access_token = response . get ( '<STR_LIT>' ) <EOL> access_token_secret = response . get ( '<STR_LIT>' ) <EOL> Setting . query . get ( '<STR_LIT>' ) . value = access_token <EOL> Setting . query . get ( '<STR_LIT>' ) . value = access_token_secret <EOL> db . session . commit ( ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> except requests . RequestException as e : <EOL> return make_response ( str ( e ) ) <EOL> def collect_images ( post ) : <EOL> """<STR_LIT>""" <EOL> if type ( post ) == Post and post . attachments : <EOL> for photo in post . attachments : <EOL> yield photo . url <EOL> else : <EOL> if type ( post ) == Post : <EOL> html = util . markdown_filter ( <EOL> post . content , img_path = post . get_image_path ( ) ) <EOL> else : <EOL> html = post . content <EOL> if html : <EOL> soup = BeautifulSoup ( html ) <EOL> for img in soup . find_all ( '<STR_LIT>' ) : <EOL> if not img . find_parent ( class_ = '<STR_LIT>' ) : <EOL> src = img . get ( '<STR_LIT:src>' ) <EOL> if src : <EOL> yield urljoin ( get_settings ( ) . site_url , src ) <EOL> def send_to_twitter ( post , args ) : <EOL> """<STR_LIT>""" <EOL> if '<STR_LIT>' in args . getlist ( '<STR_LIT>' ) : <EOL> if not is_twitter_authorized ( ) : <EOL> return False , '<STR_LIT>' <EOL> try : <EOL> current_app . logger . debug ( '<STR_LIT>' , post . id ) <EOL> get_queue ( ) . enqueue ( <EOL> do_send_to_twitter , post . id , current_app . config ) <EOL> return True , '<STR_LIT>' <EOL> except Exception as e : <EOL> current_app . logger . exception ( '<STR_LIT>' ) <EOL> return ( False , '<STR_LIT>' <EOL> . format ( e ) ) <EOL> def do_send_to_twitter ( post_id , app_config ) : <EOL> with async_app_context ( app_config ) : <EOL> current_app . logger . debug ( '<STR_LIT>' , post_id ) <EOL> post = Post . load_by_id ( post_id ) <EOL> in_reply_to , repost_of , like_of = util . posse_post_discovery ( <EOL> post , PERMALINK_RE ) <EOL> if post . in_reply_to and not in_reply_to : <EOL> current_app . logger . warn ( <EOL> '<STR_LIT>' , post . in_reply_to ) <EOL> return None <EOL> elif post . repost_of and not repost_of : <EOL> current_app . logger . warn ( <EOL> '<STR_LIT>' , post . repost_of ) <EOL> preview , img_url = guess_raw_share_tweet_content ( post ) <EOL> elif post . like_of and not like_of : <EOL> current_app . logger . warn ( <EOL> '<STR_LIT>' , post . like_of ) <EOL> return None <EOL> else : <EOL> preview , img_url = guess_tweet_content ( post , in_reply_to ) <EOL> response = do_tweet ( post_id , preview , img_url , in_reply_to , repost_of , <EOL> like_of ) <EOL> return str ( response ) <EOL> @ twitter . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> @ login_required <EOL> def share_on_twitter ( ) : <EOL> if request . method == '<STR_LIT:GET>' : <EOL> id = request . args . get ( '<STR_LIT:id>' ) <EOL> if not id : <EOL> abort ( <NUM_LIT> ) <EOL> post = Post . load_by_id ( id ) <EOL> if not post : <EOL> abort ( <NUM_LIT> ) <EOL> current_app . logger . debug ( '<STR_LIT>' , post ) <EOL> in_reply_to , repost_of , like_of = util . posse_post_discovery ( post , PERMALINK_RE ) <EOL> current_app . logger . debug ( <EOL> '<STR_LIT>' , <EOL> in_reply_to , repost_of , like_of ) <EOL> if post . repost_of and not repost_of : <EOL> preview , _ = guess_raw_share_tweet_content ( post ) <EOL> imgs = list ( collect_images ( post . repost_contexts [ <NUM_LIT:0> ] ) ) <EOL> else : <EOL> preview , _ = guess_tweet_content ( post , in_reply_to ) <EOL> imgs = list ( collect_images ( post ) ) <EOL> current_app . logger . debug ( '<STR_LIT>' , imgs ) <EOL> return render_template ( '<STR_LIT>' , <EOL> preview = preview , <EOL> post = post , in_reply_to = in_reply_to , <EOL> repost_of = repost_of , like_of = like_of , imgs = imgs ) <EOL> post_id = request . form . get ( '<STR_LIT>' ) <EOL> preview = request . form . get ( '<STR_LIT>' ) <EOL> img_url = request . form . get ( '<STR_LIT>' ) <EOL> in_reply_to = request . form . get ( '<STR_LIT>' ) <EOL> repost_of = request . form . get ( '<STR_LIT>' ) <EOL> like_of = request . form . get ( '<STR_LIT>' ) <EOL> return do_tweet ( post_id , preview , img_url , in_reply_to , repost_of , <EOL> like_of ) <EOL> def format_markdown_as_tweet ( data ) : <EOL> def to_twitter_handle ( contact , nick ) : <EOL> """<STR_LIT>""" <EOL> if contact : <EOL> for url in contact . social : <EOL> m = util . TWITTER_PROFILE_RE . match ( url ) <EOL> if m : <EOL> nick = m . group ( <NUM_LIT:1> ) <EOL> break <EOL> return '<STR_LIT:@>' + nick <EOL> html = util . markdown_filter ( data ) <EOL> html = util . process_people ( to_twitter_handle , html ) <EOL> return util . format_as_text ( html ) <EOL> def get_auth ( ) : <EOL> return OAuth1 ( <EOL> client_key = get_settings ( ) . twitter_api_key , <EOL> client_secret = get_settings ( ) . twitter_api_secret , <EOL> resource_owner_key = get_settings ( ) . twitter_oauth_token , <EOL> resource_owner_secret = get_settings ( ) . twitter_oauth_token_secret ) <EOL> def repost_preview ( url ) : <EOL> if not is_twitter_authorized ( ) : <EOL> current_app . logger . warn ( '<STR_LIT>' ) <EOL> return <EOL> match = PERMALINK_RE . match ( url ) <EOL> if match : <EOL> tweet_id = match . group ( <NUM_LIT:2> ) <EOL> embed_response = requests . get ( <EOL> '<STR_LIT>' , <EOL> params = { '<STR_LIT:id>' : tweet_id } , <EOL> auth = get_auth ( ) ) <EOL> if embed_response . status_code // <NUM_LIT:2> == <NUM_LIT:100> : <EOL> return embed_response . json ( ) . get ( '<STR_LIT:html>' ) <EOL> def create_context ( url ) : <EOL> match = PERMALINK_RE . match ( url ) <EOL> if not match : <EOL> current_app . logger . debug ( '<STR_LIT>' , url ) <EOL> return <EOL> current_app . logger . debug ( '<STR_LIT>' ) <EOL> tweet_id = match . group ( <NUM_LIT:2> ) <EOL> status_response = requests . get ( <EOL> '<STR_LIT>' . format ( tweet_id ) , <EOL> auth = get_auth ( ) ) <EOL> if status_response . status_code // <NUM_LIT:2> != <NUM_LIT:100> : <EOL> current_app . logger . warn ( <EOL> '<STR_LIT>' , status_response , <EOL> status_response . content ) <EOL> return <EOL> status_data = status_response . json ( ) <EOL> current_app . logger . debug ( '<STR_LIT>' , status_data ) <EOL> pub_date = datetime . datetime . strptime ( status_data [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' ) <EOL> real_name = status_data [ '<STR_LIT:user>' ] [ '<STR_LIT:name>' ] <EOL> screen_name = status_data [ '<STR_LIT:user>' ] [ '<STR_LIT>' ] <EOL> author_name = real_name <EOL> author_url = status_data [ '<STR_LIT:user>' ] [ '<STR_LIT:url>' ] <EOL> if author_url : <EOL> author_url = expand_link ( author_url ) <EOL> else : <EOL> author_url = '<STR_LIT>' . format ( screen_name ) <EOL> author_image = status_data [ '<STR_LIT:user>' ] [ '<STR_LIT>' ] <EOL> tweet_content = expand_links ( status_data ) <EOL> tweet_plain = expand_links ( status_data , as_html = False ) <EOL> author_image = re . sub ( '<STR_LIT>' , '<STR_LIT>' , author_image ) <EOL> for media in status_data . get ( '<STR_LIT>' , { } ) . get ( '<STR_LIT>' , [ ] ) : <EOL> if media . get ( '<STR_LIT:type>' ) == '<STR_LIT>' : <EOL> media_url = media . get ( '<STR_LIT>' ) <EOL> if media_url : <EOL> tweet_content += '<STR_LIT>' . format ( media_url ) <EOL> tweet_plain += media_url <EOL> context = Context ( ) <EOL> context . url = context . permalink = url <EOL> context . author_name = author_name <EOL> context . author_image = author_image <EOL> context . author_url = author_url <EOL> context . published = pub_date <EOL> context . title = None <EOL> context . content = tweet_content <EOL> context . content_plain = tweet_plain <EOL> return context <EOL> def expand_links ( status_data , as_html = True ) : <EOL> text = status_data [ '<STR_LIT:text>' ] <EOL> urls = status_data . get ( '<STR_LIT>' , { } ) . get ( '<STR_LIT>' , [ ] ) <EOL> for um in status_data . get ( '<STR_LIT>' , { } ) . get ( '<STR_LIT>' , [ ] ) : <EOL> um = um . copy ( ) <EOL> um . update ( { <EOL> '<STR_LIT>' : '<STR_LIT:@>' + um . get ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : '<STR_LIT>' . format ( <EOL> um . get ( '<STR_LIT>' ) ) , <EOL> } ) <EOL> urls . append ( um ) <EOL> urls = sorted ( <EOL> urls , key = lambda url_data : url_data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] , reverse = True ) <EOL> for url_data in urls : <EOL> current_app . logger . debug ( '<STR_LIT>' , url_data ) <EOL> start_idx = url_data [ '<STR_LIT>' ] [ <NUM_LIT:0> ] <EOL> end_idx = url_data [ '<STR_LIT>' ] [ <NUM_LIT:1> ] <EOL> if as_html : <EOL> link_text = '<STR_LIT>' . format ( <EOL> url_data [ '<STR_LIT>' ] , url_data [ '<STR_LIT>' ] ) <EOL> else : <EOL> link_text = url_data [ '<STR_LIT>' ] <EOL> text = text [ : start_idx ] + link_text + text [ end_idx : ] <EOL> return text <EOL> def expand_link ( url ) : <EOL> current_app . logger . debug ( '<STR_LIT>' , url ) <EOL> try : <EOL> r = requests . head ( url , allow_redirects = True , timeout = <NUM_LIT:30> ) <EOL> if r and r . status_code // <NUM_LIT:100> == <NUM_LIT:2> : <EOL> current_app . logger . debug ( '<STR_LIT>' , r . url ) <EOL> url = r . url <EOL> except Exception as e : <EOL> current_app . logger . debug ( '<STR_LIT>' , url , e ) <EOL> return url <EOL> def get_authed_twitter_account ( ) : <EOL> """<STR_LIT>""" <EOL> if not is_twitter_authorized ( ) : <EOL> return None <EOL> user_response = requests . get ( <EOL> '<STR_LIT>' , <EOL> auth = get_auth ( ) ) <EOL> if user_response . status_code // <NUM_LIT:2> != <NUM_LIT:100> : <EOL> current_app . logger . warn ( '<STR_LIT>' , <EOL> user_response , user_response . content ) <EOL> return None <EOL> current_app . logger . debug ( '<STR_LIT>' , user_response ) <EOL> return user_response . json ( ) <EOL> def prepend_twitter_name ( name , tweet , exclude_me = True ) : <EOL> my_screen_name = get_authed_twitter_account ( ) . get ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) . lower ( ) <EOL> if ( ( exclude_me and name . lower ( ) == my_screen_name ) <EOL> or ( name . lower ( ) in tweet . lower ( ) ) ) : <EOL> return tweet <EOL> return '<STR_LIT>' . format ( name , tweet ) <EOL> def guess_tweet_content ( post , in_reply_to ) : <EOL> """<STR_LIT>""" <EOL> preview = '<STR_LIT>' <EOL> if post . title : <EOL> preview += post . title <EOL> elif post . post_type == '<STR_LIT>' and post . venue : <EOL> preview = '<STR_LIT>' + post . venue . name <EOL> text_content = format_markdown_as_tweet ( post . content ) <EOL> if text_content : <EOL> preview += ( '<STR_LIT>' if preview else '<STR_LIT>' ) + text_content <EOL> if in_reply_to : <EOL> reply_match = PERMALINK_RE . match ( in_reply_to ) <EOL> if reply_match : <EOL> status_response = requests . get ( <EOL> '<STR_LIT>' . format ( <EOL> reply_match . group ( <NUM_LIT:2> ) ) , <EOL> auth = get_auth ( ) ) <EOL> if status_response . status_code // <NUM_LIT:2> != <NUM_LIT:100> : <EOL> current_app . logger . warn ( <EOL> '<STR_LIT>' , <EOL> status_response , status_response . content ) <EOL> status_data = { } <EOL> else : <EOL> status_data = status_response . json ( ) <EOL> mentioned_users = [ ] <EOL> my_screen_name = get_authed_twitter_account ( ) . get ( <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> for user in status_data . get ( '<STR_LIT>' , { } ) . get ( '<STR_LIT>' , [ ] ) : <EOL> screen_name = user . get ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> if screen_name and screen_name . lower ( ) != my_screen_name . lower ( ) : <EOL> mentioned_users . append ( screen_name ) <EOL> mentioned_users . append ( reply_match . group ( <NUM_LIT:1> ) ) <EOL> current_app . logger . debug ( '<STR_LIT>' , mentioned_users ) <EOL> mention_match = USERMENTION_RE . findall ( preview ) <EOL> for match in mention_match : <EOL> if match [ <NUM_LIT:0> ] in mentioned_users : <EOL> break <EOL> else : <EOL> for user in mentioned_users : <EOL> preview = prepend_twitter_name ( user , preview ) <EOL> target_length = TWEET_CHAR_LENGTH <EOL> img_url = None <EOL> if post . post_type == '<STR_LIT>' and post . attachments : <EOL> img_url = post . attachments [ <NUM_LIT:0> ] . url <EOL> target_length -= MEDIA_CHAR_LENGTH <EOL> preview = brevity . shorten ( preview , permalink = post . permalink , <EOL> target_length = target_length ) <EOL> return preview , img_url <EOL> def guess_raw_share_tweet_content ( post ) : <EOL> preview = '<STR_LIT>' <EOL> if not post . repost_contexts : <EOL> current_app . logger . debug ( <EOL> '<STR_LIT>' , post . id ) <EOL> return None <EOL> context = post . repost_contexts [ <NUM_LIT:0> ] <EOL> if context . title : <EOL> preview += context . title <EOL> if context . author_name : <EOL> preview += '<STR_LIT>' + context . author_name <EOL> elif context . content : <EOL> if context . author_name : <EOL> preview += context . author_name + '<STR_LIT>' <EOL> preview += context . content_plain <EOL> preview += ( '<STR_LIT:U+0020>' if preview else '<STR_LIT>' ) + context . permalink <EOL> target_length = TWEET_CHAR_LENGTH <EOL> imgs = list ( collect_images ( context ) ) <EOL> img_url = imgs [ <NUM_LIT:0> ] if imgs else None <EOL> preview = brevity . shorten ( preview , permalink = context . permalink , <EOL> target_length = target_length ) <EOL> return preview , img_url <EOL> def do_tweet ( post_id , preview , img_url , in_reply_to , <EOL> repost_of , like_of ) : <EOL> try : <EOL> post = Post . load_by_id ( post_id ) <EOL> twitter_url = handle_new_or_edit ( <EOL> post , preview , img_url , in_reply_to , repost_of , like_of ) <EOL> db . session . commit ( ) <EOL> if has_request_context ( ) : <EOL> flash ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> . format ( post . permalink , twitter_url ) ) <EOL> return redirect ( post . permalink ) <EOL> except Exception as e : <EOL> current_app . logger . exception ( '<STR_LIT>' ) <EOL> if has_request_context ( ) : <EOL> flash ( '<STR_LIT>' . format ( e ) ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> def handle_new_or_edit ( post , preview , img , in_reply_to , <EOL> repost_of , like_of ) : <EOL> if not is_twitter_authorized ( ) : <EOL> current_app . logger . warn ( '<STR_LIT>' ) <EOL> return <EOL> is_retweet = False <EOL> if repost_of : <EOL> repost_match = PERMALINK_RE . match ( repost_of ) <EOL> if repost_match : <EOL> is_retweet = True <EOL> tweet_id = repost_match . group ( <NUM_LIT:2> ) <EOL> result = requests . post ( <EOL> '<STR_LIT>' <EOL> . format ( tweet_id ) , <EOL> auth = get_auth ( ) ) <EOL> if result . status_code // <NUM_LIT:2> != <NUM_LIT:100> : <EOL> raise RuntimeError ( "<STR_LIT>" . format ( result , <EOL> result . content ) ) <EOL> is_favorite = False <EOL> if like_of : <EOL> like_match = PERMALINK_RE . match ( like_of ) <EOL> if like_match : <EOL> is_favorite = True <EOL> tweet_id = like_match . group ( <NUM_LIT:2> ) <EOL> result = requests . post ( <EOL> '<STR_LIT>' , <EOL> data = { '<STR_LIT:id>' : tweet_id } , <EOL> auth = get_auth ( ) ) <EOL> if result . status_code // <NUM_LIT:2> != <NUM_LIT:100> : <EOL> raise RuntimeError ( "<STR_LIT>" . format ( <EOL> result , result . content ) ) <EOL> if not is_retweet and not is_favorite : <EOL> data = { } <EOL> data [ '<STR_LIT:status>' ] = preview <EOL> loc = ( post . venue and post . venue . location ) or post . location <EOL> if loc : <EOL> data [ '<STR_LIT>' ] = str ( loc . get ( '<STR_LIT>' ) ) <EOL> data [ '<STR_LIT>' ] = str ( loc . get ( '<STR_LIT>' ) ) <EOL> if in_reply_to : <EOL> reply_match = PERMALINK_RE . match ( in_reply_to ) <EOL> if reply_match : <EOL> data [ '<STR_LIT>' ] = reply_match . group ( <NUM_LIT:2> ) <EOL> current_app . logger . debug ( '<STR_LIT>' , json . dumps ( data ) ) <EOL> if img : <EOL> tempfile = download_image_to_temp ( img ) <EOL> result = requests . post ( <EOL> '<STR_LIT>' , <EOL> data = data , <EOL> files = { '<STR_LIT>' : open ( tempfile , '<STR_LIT:rb>' ) } , <EOL> auth = get_auth ( ) ) <EOL> else : <EOL> result = requests . post ( <EOL> '<STR_LIT>' , <EOL> data = data , auth = get_auth ( ) ) <EOL> if result . status_code // <NUM_LIT:2> != <NUM_LIT:100> : <EOL> raise RuntimeError ( "<STR_LIT>" <EOL> . format ( result . status_code , result . headers , <EOL> result . content ) ) <EOL> result_json = result . json ( ) <EOL> current_app . logger . debug ( "<STR_LIT>" . format ( <EOL> json . dumps ( result_json , indent = True ) ) ) <EOL> twitter_url = '<STR_LIT>' . format ( <EOL> result_json . get ( '<STR_LIT:user>' , { } ) . get ( '<STR_LIT>' ) , <EOL> result_json . get ( '<STR_LIT>' ) ) <EOL> if not is_favorite : <EOL> post . add_syndication_url ( twitter_url ) <EOL> return twitter_url <EOL> def download_image_to_temp ( url ) : <EOL> _ , tempfile = mkstemp ( ) <EOL> util . download_resource ( url , tempfile ) <EOL> return tempfile <EOL> def is_twitter_authorized ( ) : <EOL> return ( get_settings ( ) . twitter_oauth_token <EOL> and get_settings ( ) . twitter_oauth_token_secret ) </s>
94,722
import six <EOL> from . import Utility <EOL> from . import Color <EOL> class Alignment ( object ) : <EOL> def __init__ ( self , horizontal = '<STR_LIT:left>' , vertical = '<STR_LIT>' , rotation = <NUM_LIT:0> , wrap_text = False ) : <EOL> self . _horizontal = horizontal <EOL> self . _vertical = vertical <EOL> self . _rotation = rotation <EOL> self . _wrap_text = wrap_text <EOL> @ property <EOL> def wrap_text ( self ) : <EOL> return self . _wrap_text <EOL> @ wrap_text . setter <EOL> def wrap_text ( self , value ) : <EOL> if value not in ( True , False ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> self . _wrap_text = value <EOL> @ property <EOL> def horizontal ( self ) : <EOL> return self . _horizontal <EOL> @ horizontal . setter <EOL> def horizontal ( self , value ) : <EOL> if value not in ( '<STR_LIT:left>' , '<STR_LIT>' , '<STR_LIT:right>' ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> self . _horizontal = value <EOL> @ property <EOL> def vertical ( self ) : <EOL> return self . _vertical <EOL> @ vertical . setter <EOL> def vertical ( self , value ) : <EOL> if value not in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> self . _vertical = value <EOL> @ property <EOL> def rotation ( self ) : <EOL> return self . _rotation <EOL> @ rotation . setter <EOL> def rotation ( self , value ) : <EOL> self . _rotation = ( value % <NUM_LIT> ) <EOL> @ property <EOL> def is_default ( self ) : <EOL> return self . _horizontal == '<STR_LIT:left>' and self . _vertical == '<STR_LIT>' and self . _rotation == <NUM_LIT:0> and not self . _wrap_text <EOL> def get_xml_string ( self ) : <EOL> return "<STR_LIT>" % ( self . _horizontal , self . _vertical , self . _rotation , <NUM_LIT:1> if self . _wrap_text else <NUM_LIT:0> ) <EOL> def __or__ ( self , other ) : <EOL> return self . _binary_operation ( other , Utility . nonboolean_or ) <EOL> def __and__ ( self , other ) : <EOL> return self . _binary_operation ( other , Utility . nonboolean_and ) <EOL> def __xor__ ( self , other ) : <EOL> return self . _binary_operation ( other , Utility . nonboolean_xor ) <EOL> def _binary_operation ( self , other , operation ) : <EOL> return Alignment ( horizontal = operation ( self . _horizontal , other . _horizontal , '<STR_LIT:left>' ) , vertical = operation ( self . _vertical , other . _vertical , '<STR_LIT>' ) , rotation = operation ( self . _rotation , other . _rotation , <NUM_LIT:0> ) , wrap_text = operation ( self . _wrap_text , other . _wrap_text , False ) <EOL> ) <EOL> def __eq__ ( self , other ) : <EOL> if other is None : <EOL> return self . is_default <EOL> elif Utility . YOLO : <EOL> return self . _vertical == other . _vertical and self . _rotation == other . _rotation
else :
2,578,924,359,673,506,000
import six <EOL> from . import Utility <EOL> from . import Color <EOL> class Alignment ( object ) : <EOL> def __init__ ( self , horizontal = '<STR_LIT:left>' , vertical = '<STR_LIT>' , rotation = <NUM_LIT:0> , wrap_text = False ) : <EOL> self . _horizontal = horizontal <EOL> self . _vertical = vertical <EOL> self . _rotation = rotation <EOL> self . _wrap_text = wrap_text <EOL> @ property <EOL> def wrap_text ( self ) : <EOL> return self . _wrap_text <EOL> @ wrap_text . setter <EOL> def wrap_text ( self , value ) : <EOL> if value not in ( True , False ) : <EOL> raise TypeError ( '<STR_LIT>' ) <EOL> self . _wrap_text = value <EOL> @ property <EOL> def horizontal ( self ) : <EOL> return self . _horizontal <EOL> @ horizontal . setter <EOL> def horizontal ( self , value ) : <EOL> if value not in ( '<STR_LIT:left>' , '<STR_LIT>' , '<STR_LIT:right>' ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> self . _horizontal = value <EOL> @ property <EOL> def vertical ( self ) : <EOL> return self . _vertical <EOL> @ vertical . setter <EOL> def vertical ( self , value ) : <EOL> if value not in ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> self . _vertical = value <EOL> @ property <EOL> def rotation ( self ) : <EOL> return self . _rotation <EOL> @ rotation . setter <EOL> def rotation ( self , value ) : <EOL> self . _rotation = ( value % <NUM_LIT> ) <EOL> @ property <EOL> def is_default ( self ) : <EOL> return self . _horizontal == '<STR_LIT:left>' and self . _vertical == '<STR_LIT>' and self . _rotation == <NUM_LIT:0> and not self . _wrap_text <EOL> def get_xml_string ( self ) : <EOL> return "<STR_LIT>" % ( self . _horizontal , self . _vertical , self . _rotation , <NUM_LIT:1> if self . _wrap_text else <NUM_LIT:0> ) <EOL> def __or__ ( self , other ) : <EOL> return self . _binary_operation ( other , Utility . nonboolean_or ) <EOL> def __and__ ( self , other ) : <EOL> return self . _binary_operation ( other , Utility . nonboolean_and ) <EOL> def __xor__ ( self , other ) : <EOL> return self . _binary_operation ( other , Utility . nonboolean_xor ) <EOL> def _binary_operation ( self , other , operation ) : <EOL> return Alignment ( horizontal = operation ( self . _horizontal , other . _horizontal , '<STR_LIT:left>' ) , vertical = operation ( self . _vertical , other . _vertical , '<STR_LIT>' ) , rotation = operation ( self . _rotation , other . _rotation , <NUM_LIT:0> ) , wrap_text = operation ( self . _wrap_text , other . _wrap_text , False ) <EOL> ) <EOL> def __eq__ ( self , other ) : <EOL> if other is None : <EOL> return self . is_default <EOL> elif Utility . YOLO : <EOL> return self . _vertical == other . _vertical and self . _rotation == other . _rotation <EOL> else : <EOL> return self . _vertical == other . _vertical and self . _rotation == other . _rotation and self . _horizontal == other . _horizontal and self . _wrap_text == other . _wrap_text <EOL> def __hash__ ( self ) : <EOL> return hash ( ( self . _horizontal , self . _wrap_text ) ) <EOL> def __str__ ( self ) : <EOL> return "<STR_LIT>" % ( self . _horizontal , self . _vertical , self . _rotation ) </s>
94,723
from werkzeug . contrib . fixers import ProxyFix
from main import app
3,522,827,376,442,030,600
from werkzeug . contrib . fixers import ProxyFix <EOL> from main import app <EOL> app . wsgi_app = ProxyFix ( app . wsgi_app ) </s>
94,724
import empty <EOL> from opcodes import * <EOL> from memory_map import mem_map <EOL> from branching_flags import * <EOL> bench_dir = "<STR_LIT>" <EOL> bench_file = "<STR_LIT>" <EOL> bench_name = bench_dir + "<STR_LIT:/>" + bench_file <EOL> SIMD_bench_name = bench_dir + "<STR_LIT:/>" + "<STR_LIT>" + bench_file <EOL> empty = empty . assemble_all ( ) <EOL> def assemble_PC ( ) : <EOL> PC = empty [ "<STR_LIT>" ] <EOL> PC . file_name = bench_name <EOL> return PC <EOL> def assemble_A ( ) : <EOL> A = empty [ "<STR_LIT:A>" ] <EOL> A . file_name = bench_name <EOL> A . P ( "<STR_LIT>" , mem_map [ "<STR_LIT:A>" ] [ "<STR_LIT>" ] ) <EOL> A . A ( <NUM_LIT:0> ) <EOL> A . L ( <NUM_LIT:0> ) <EOL> A . L ( <NUM_LIT:1> ) , A . N ( "<STR_LIT>" ) <EOL> A . L ( <NUM_LIT:3> ) , A . N ( "<STR_LIT>" ) <EOL> A . L ( <NUM_LIT:2> ** ( A . width - <NUM_LIT:1> ) ) , A . N ( "<STR_LIT>" ) <EOL> A . L ( <NUM_LIT:0> ) , A . N ( "<STR_LIT>" ) <EOL> A . L ( <NUM_LIT:0> ) , A . N ( "<STR_LIT>" ) <EOL> A . L ( <NUM_LIT:0> ) , A . N ( "<STR_LIT>" ) <EOL> A . L ( <NUM_LIT:0> ) , A . N ( "<STR_LIT>" ) <EOL> return A <EOL> def assemble_B ( ) : <EOL> B = empty [ "<STR_LIT:B>" ] <EOL> B . file_name = bench_name <EOL> B . P ( "<STR_LIT>" , mem_map [ "<STR_LIT:B>" ] [ "<STR_LIT>" ] ) <EOL> B . P ( "<STR_LIT>" , mem_map [ "<STR_LIT:B>" ] [ "<STR_LIT>" ] , write_addr = mem_map [ "<STR_LIT:H>" ] [ "<STR_LIT>" ] ) <EOL> B . P ( "<STR_LIT>" , mem_map [ "<STR_LIT:B>" ] [ "<STR_LIT>" ] + <NUM_LIT:1> , write_addr = mem_map [ "<STR_LIT:H>" ] [ "<STR_LIT>" ] + <NUM_LIT:1> ) <EOL> B . A ( <NUM_LIT:0> ) <EOL> B . L ( <NUM_LIT:0> ) <EOL> B . L ( <NUM_LIT:0> ) , B . N ( "<STR_LIT>" ) <EOL> B . L ( <NUM_LIT> ) , B . N ( "<STR_LIT>" ) <EOL> B . L ( <NUM_LIT> ) <EOL> B . L ( <NUM_LIT> ) <EOL> B . L ( <NUM_LIT> ) <EOL> B . L ( <NUM_LIT> ) <EOL> B . L ( - <NUM_LIT:1> ) <EOL> B . L ( <NUM_LIT:0> ) , B . N ( "<STR_LIT>" ) <EOL> B . L ( <NUM_LIT:0> ) , B . N ( "<STR_LIT>" ) <EOL> return B <EOL> def assemble_I ( PC , A , B ) : <EOL> I = empty [ "<STR_LIT:I>" ] <EOL> I . file_name = bench_name <EOL> I . A ( <NUM_LIT:1> ) <EOL> base_addr = mem_map [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> depth = mem_map [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> I . I ( ADD , base_addr , "<STR_LIT>" , <NUM_LIT:0> ) <EOL> I . I ( ADD , base_addr + depth , "<STR_LIT>" , <NUM_LIT:0> ) <EOL> I . I ( ADD , base_addr + ( depth * <NUM_LIT:2> ) , "<STR_LIT>" , <NUM_LIT:0> ) <EOL> I . I ( ADD , base_addr + ( depth * <NUM_LIT:3> ) , "<STR_LIT>" , <NUM_LIT:0> ) <EOL> base_addr = mem_map [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> I . I ( ADD , base_addr , <NUM_LIT:0> , "<STR_LIT>" ) , I . N ( "<STR_LIT>" ) <EOL> I . I ( ADD , base_addr + <NUM_LIT:1> , <NUM_LIT:0> , "<STR_LIT>" ) <EOL> I . I ( ADD , "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" ) , I . N ( "<STR_LIT>" ) <EOL> I . I ( MLS , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , I . JEV ( "<STR_LIT>" , False , "<STR_LIT>" ) , I . JNE ( "<STR_LIT>" , False , "<STR_LIT>" ) <EOL> I . I ( ADD , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , I . JMP ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> I . I ( MHU , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , I . N ( "<STR_LIT>" ) <EOL> I . I ( ADD , "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" ) , I . N ( "<STR_LIT>" ) , I . JMP ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> I . resolve_forward_jumps ( ) <EOL> read_PO = ( mem_map [ "<STR_LIT:B>" ] [ "<STR_LIT>" ] - mem_map [ "<STR_LIT:B>" ] [ "<STR_LIT>" ] + B . R ( "<STR_LIT>" ) ) & <NUM_LIT> <EOL> write_PO = ( mem_map [ "<STR_LIT:H>" ] [ "<STR_LIT>" ] + mem_map [ "<STR_LIT:H>" ] [ "<STR_LIT>" ] - mem_map [ "<STR_LIT:H>" ] [ "<STR_LIT>" ] + B . W ( "<STR_LIT>" ) ) & <NUM_LIT> <EOL> PO = ( <NUM_LIT:1> << <NUM_LIT> ) | ( <NUM_LIT:1> << <NUM_LIT:32> ) | ( write_PO << <NUM_LIT:20> ) | read_PO <EOL> B . A ( B . R ( "<STR_LIT>" ) ) <EOL> B . L ( PO ) <EOL> read_PO -= <NUM_LIT:1> <EOL> write_PO -= <NUM_LIT:1> <EOL> PO = ( <NUM_LIT:1> << <NUM_LIT> ) | ( <NUM_LIT:1> << <NUM_LIT:32> ) | ( write_PO << <NUM_LIT:20> ) | read_PO <EOL> B . A ( B . R ( "<STR_LIT>" ) ) <EOL> B . L ( PO ) <EOL> return I
def assemble_XDO ( ) :
5,818,091,651,070,726,000
import empty <EOL> from opcodes import * <EOL> from memory_map import mem_map <EOL> from branching_flags import * <EOL> bench_dir = "<STR_LIT>" <EOL> bench_file = "<STR_LIT>" <EOL> bench_name = bench_dir + "<STR_LIT:/>" + bench_file <EOL> SIMD_bench_name = bench_dir + "<STR_LIT:/>" + "<STR_LIT>" + bench_file <EOL> empty = empty . assemble_all ( ) <EOL> def assemble_PC ( ) : <EOL> PC = empty [ "<STR_LIT>" ] <EOL> PC . file_name = bench_name <EOL> return PC <EOL> def assemble_A ( ) : <EOL> A = empty [ "<STR_LIT:A>" ] <EOL> A . file_name = bench_name <EOL> A . P ( "<STR_LIT>" , mem_map [ "<STR_LIT:A>" ] [ "<STR_LIT>" ] ) <EOL> A . A ( <NUM_LIT:0> ) <EOL> A . L ( <NUM_LIT:0> ) <EOL> A . L ( <NUM_LIT:1> ) , A . N ( "<STR_LIT>" ) <EOL> A . L ( <NUM_LIT:3> ) , A . N ( "<STR_LIT>" ) <EOL> A . L ( <NUM_LIT:2> ** ( A . width - <NUM_LIT:1> ) ) , A . N ( "<STR_LIT>" ) <EOL> A . L ( <NUM_LIT:0> ) , A . N ( "<STR_LIT>" ) <EOL> A . L ( <NUM_LIT:0> ) , A . N ( "<STR_LIT>" ) <EOL> A . L ( <NUM_LIT:0> ) , A . N ( "<STR_LIT>" ) <EOL> A . L ( <NUM_LIT:0> ) , A . N ( "<STR_LIT>" ) <EOL> return A <EOL> def assemble_B ( ) : <EOL> B = empty [ "<STR_LIT:B>" ] <EOL> B . file_name = bench_name <EOL> B . P ( "<STR_LIT>" , mem_map [ "<STR_LIT:B>" ] [ "<STR_LIT>" ] ) <EOL> B . P ( "<STR_LIT>" , mem_map [ "<STR_LIT:B>" ] [ "<STR_LIT>" ] , write_addr = mem_map [ "<STR_LIT:H>" ] [ "<STR_LIT>" ] ) <EOL> B . P ( "<STR_LIT>" , mem_map [ "<STR_LIT:B>" ] [ "<STR_LIT>" ] + <NUM_LIT:1> , write_addr = mem_map [ "<STR_LIT:H>" ] [ "<STR_LIT>" ] + <NUM_LIT:1> ) <EOL> B . A ( <NUM_LIT:0> ) <EOL> B . L ( <NUM_LIT:0> ) <EOL> B . L ( <NUM_LIT:0> ) , B . N ( "<STR_LIT>" ) <EOL> B . L ( <NUM_LIT> ) , B . N ( "<STR_LIT>" ) <EOL> B . L ( <NUM_LIT> ) <EOL> B . L ( <NUM_LIT> ) <EOL> B . L ( <NUM_LIT> ) <EOL> B . L ( <NUM_LIT> ) <EOL> B . L ( - <NUM_LIT:1> ) <EOL> B . L ( <NUM_LIT:0> ) , B . N ( "<STR_LIT>" ) <EOL> B . L ( <NUM_LIT:0> ) , B . N ( "<STR_LIT>" ) <EOL> return B <EOL> def assemble_I ( PC , A , B ) : <EOL> I = empty [ "<STR_LIT:I>" ] <EOL> I . file_name = bench_name <EOL> I . A ( <NUM_LIT:1> ) <EOL> base_addr = mem_map [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> depth = mem_map [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> I . I ( ADD , base_addr , "<STR_LIT>" , <NUM_LIT:0> ) <EOL> I . I ( ADD , base_addr + depth , "<STR_LIT>" , <NUM_LIT:0> ) <EOL> I . I ( ADD , base_addr + ( depth * <NUM_LIT:2> ) , "<STR_LIT>" , <NUM_LIT:0> ) <EOL> I . I ( ADD , base_addr + ( depth * <NUM_LIT:3> ) , "<STR_LIT>" , <NUM_LIT:0> ) <EOL> base_addr = mem_map [ "<STR_LIT>" ] [ "<STR_LIT>" ] <EOL> I . I ( ADD , base_addr , <NUM_LIT:0> , "<STR_LIT>" ) , I . N ( "<STR_LIT>" ) <EOL> I . I ( ADD , base_addr + <NUM_LIT:1> , <NUM_LIT:0> , "<STR_LIT>" ) <EOL> I . I ( ADD , "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" ) , I . N ( "<STR_LIT>" ) <EOL> I . I ( MLS , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , I . JEV ( "<STR_LIT>" , False , "<STR_LIT>" ) , I . JNE ( "<STR_LIT>" , False , "<STR_LIT>" ) <EOL> I . I ( ADD , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , I . JMP ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> I . I ( MHU , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , I . N ( "<STR_LIT>" ) <EOL> I . I ( ADD , "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" ) , I . N ( "<STR_LIT>" ) , I . JMP ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> I . resolve_forward_jumps ( ) <EOL> read_PO = ( mem_map [ "<STR_LIT:B>" ] [ "<STR_LIT>" ] - mem_map [ "<STR_LIT:B>" ] [ "<STR_LIT>" ] + B . R ( "<STR_LIT>" ) ) & <NUM_LIT> <EOL> write_PO = ( mem_map [ "<STR_LIT:H>" ] [ "<STR_LIT>" ] + mem_map [ "<STR_LIT:H>" ] [ "<STR_LIT>" ] - mem_map [ "<STR_LIT:H>" ] [ "<STR_LIT>" ] + B . W ( "<STR_LIT>" ) ) & <NUM_LIT> <EOL> PO = ( <NUM_LIT:1> << <NUM_LIT> ) | ( <NUM_LIT:1> << <NUM_LIT:32> ) | ( write_PO << <NUM_LIT:20> ) | read_PO <EOL> B . A ( B . R ( "<STR_LIT>" ) ) <EOL> B . L ( PO ) <EOL> read_PO -= <NUM_LIT:1> <EOL> write_PO -= <NUM_LIT:1> <EOL> PO = ( <NUM_LIT:1> << <NUM_LIT> ) | ( <NUM_LIT:1> << <NUM_LIT:32> ) | ( write_PO << <NUM_LIT:20> ) | read_PO <EOL> B . A ( B . R ( "<STR_LIT>" ) ) <EOL> B . L ( PO ) <EOL> return I <EOL> def assemble_XDO ( ) : <EOL> ADO , BDO , DDO = empty [ "<STR_LIT>" ] , empty [ "<STR_LIT>" ] , empty [ "<STR_LIT>" ] <EOL> ADO . file_name = bench_name <EOL> BDO . file_name = bench_name <EOL> DDO . file_name = bench_name <EOL> return ADO , BDO , DDO <EOL> def assemble_XPO ( ) : <EOL> APO , BPO , DPO = empty [ "<STR_LIT>" ] , empty [ "<STR_LIT>" ] , empty [ "<STR_LIT>" ] <EOL> APO . file_name = bench_name <EOL> BPO . file_name = bench_name <EOL> DPO . file_name = bench_name <EOL> return APO , BPO , DPO <EOL> def assemble_XIN ( ) : <EOL> AIN , BIN , DIN = empty [ "<STR_LIT>" ] , empty [ "<STR_LIT>" ] , empty [ "<STR_LIT>" ] <EOL> AIN . file_name = bench_name <EOL> BIN . file_name = bench_name <EOL> DIN . file_name = bench_name <EOL> return AIN , BIN , DIN <EOL> def assemble_branches ( ) : <EOL> BO , BD , BC , BP , BPE = empty [ "<STR_LIT>" ] , empty [ "<STR_LIT>" ] , empty [ "<STR_LIT>" ] , empty [ "<STR_LIT>" ] , empty [ "<STR_LIT>" ] <EOL> BO . file_name = bench_name <EOL> BD . file_name = bench_name <EOL> BC . file_name = bench_name <EOL> BP . file_name = bench_name <EOL> BPE . file_name = bench_name <EOL> return BO , BD , BC , BP , BPE <EOL> def assemble_all ( ) : <EOL> PC = assemble_PC ( ) <EOL> A = assemble_A ( ) <EOL> B = assemble_B ( ) <EOL> I = assemble_I ( PC , A , B ) <EOL> ADO , BDO , DDO = assemble_XDO ( ) <EOL> APO , BPO , DPO = assemble_XPO ( ) <EOL> AIN , BIN , DIN = assemble_XIN ( ) <EOL> BO , BD , BC , BP , BPE = assemble_branches ( ) <EOL> hailstone = { "<STR_LIT>" : PC , "<STR_LIT:A>" : A , "<STR_LIT:B>" : B , "<STR_LIT:I>" : I , <EOL> "<STR_LIT>" : ADO , "<STR_LIT>" : BDO , "<STR_LIT>" : DDO , <EOL> "<STR_LIT>" : APO , "<STR_LIT>" : BPO , "<STR_LIT>" : DPO , <EOL> "<STR_LIT>" : AIN , "<STR_LIT>" : BIN , "<STR_LIT>" : DIN , <EOL> "<STR_LIT>" : BO , "<STR_LIT>" : BD , "<STR_LIT>" : BC , "<STR_LIT>" : BP , "<STR_LIT>" : BPE } <EOL> return hailstone <EOL> def dump_all ( hailstone ) : <EOL> for memory in hailstone . values ( ) : <EOL> memory . file_dump ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> hailstone = assemble_all ( ) <EOL> dump_all ( hailstone ) </s>
94,725
import logging <EOL> from sqlalchemy import sql
from acoustid import tables as schema
7,464,434,591,927,983,000
import logging <EOL> from sqlalchemy import sql <EOL> from acoustid import tables as schema <EOL> logger = logging . getLogger ( __name__ ) <EOL> def find_or_insert_source ( conn , application_id , account_id , version = None ) : <EOL> """<STR_LIT>""" <EOL> with conn . begin ( ) : <EOL> query = sql . select ( [ schema . source . c . id ] , <EOL> sql . and_ ( schema . source . c . account_id == account_id , <EOL> schema . source . c . application_id == application_id , <EOL> schema . source . c . version == version ) ) <EOL> id = conn . execute ( query ) . scalar ( ) <EOL> if id is None : <EOL> insert_stmt = schema . source . insert ( ) . values ( account_id = account_id , application_id = application_id , version = version ) <EOL> id = conn . execute ( insert_stmt ) . inserted_primary_key [ <NUM_LIT:0> ] <EOL> logger . info ( "<STR_LIT>" , id , account_id , application_id , version ) <EOL> return id </s>
94,726
from __future__ import with_statement <EOL> import os <EOL> from alembic import context <EOL> from sqlalchemy import engine_from_config , pool <EOL> from logging . config import fileConfig <EOL> config = context . config <EOL> fileConfig ( config . config_file_name ) <EOL> import acoustid . tables <EOL> target_metadata = acoustid . tables . metadata <EOL> import acoustid . config <EOL> acoustid_config = acoustid . config . Config ( os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT:..>' , '<STR_LIT>' ) ) <EOL> def include_object ( obj , name , type , reflected , compare_to ) : <EOL> if type == "<STR_LIT>" and obj . schema == "<STR_LIT>" : <EOL> return False <EOL> if type == "<STR_LIT>" and not obj . table . schema == "<STR_LIT>" : <EOL> return False <EOL> return True <EOL> def run_migrations_offline ( ) : <EOL> """<STR_LIT>""" <EOL> url = acoustid_config . database . create_url ( ) <EOL> context . configure ( <EOL> url = url , target_metadata = target_metadata , literal_binds = True , <EOL> include_object = include_object ) <EOL> with context . begin_transaction ( ) : <EOL> context . run_migrations ( ) <EOL> def run_migrations_online ( ) : <EOL> """<STR_LIT>""" <EOL> connectable = acoustid_config . database . create_engine ( poolclass = pool . NullPool ) <EOL> with connectable . connect ( ) as connection : <EOL> context . configure ( <EOL> connection = connection , <EOL> target_metadata = target_metadata , <EOL> include_object = include_object , <EOL> ) <EOL> with context . begin_transaction ( ) : <EOL> context . run_migrations ( ) <EOL> if context . is_offline_mode ( ) : <EOL> run_migrations_offline ( ) <EOL> else :
run_migrations_online ( ) </s>
6,764,663,943,183,926,000
from __future__ import with_statement <EOL> import os <EOL> from alembic import context <EOL> from sqlalchemy import engine_from_config , pool <EOL> from logging . config import fileConfig <EOL> config = context . config <EOL> fileConfig ( config . config_file_name ) <EOL> import acoustid . tables <EOL> target_metadata = acoustid . tables . metadata <EOL> import acoustid . config <EOL> acoustid_config = acoustid . config . Config ( os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT:..>' , '<STR_LIT>' ) ) <EOL> def include_object ( obj , name , type , reflected , compare_to ) : <EOL> if type == "<STR_LIT>" and obj . schema == "<STR_LIT>" : <EOL> return False <EOL> if type == "<STR_LIT>" and not obj . table . schema == "<STR_LIT>" : <EOL> return False <EOL> return True <EOL> def run_migrations_offline ( ) : <EOL> """<STR_LIT>""" <EOL> url = acoustid_config . database . create_url ( ) <EOL> context . configure ( <EOL> url = url , target_metadata = target_metadata , literal_binds = True , <EOL> include_object = include_object ) <EOL> with context . begin_transaction ( ) : <EOL> context . run_migrations ( ) <EOL> def run_migrations_online ( ) : <EOL> """<STR_LIT>""" <EOL> connectable = acoustid_config . database . create_engine ( poolclass = pool . NullPool ) <EOL> with connectable . connect ( ) as connection : <EOL> context . configure ( <EOL> connection = connection , <EOL> target_metadata = target_metadata , <EOL> include_object = include_object , <EOL> ) <EOL> with context . begin_transaction ( ) : <EOL> context . run_migrations ( ) <EOL> if context . is_offline_mode ( ) : <EOL> run_migrations_offline ( ) <EOL> else : <EOL> run_migrations_online ( ) </s>
94,727
from nose . tools import * <EOL> from tests import prepare_database , with_database <EOL> from acoustid . data . meta import insert_meta <EOL> @ with_database <EOL> def test_insert_meta ( conn ) : <EOL> id = insert_meta ( conn , { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' ,
'<STR_LIT>' : '<STR_LIT>' ,
-6,595,620,554,020,367,000
from nose . tools import * <EOL> from tests import prepare_database , with_database <EOL> from acoustid . data . meta import insert_meta <EOL> @ with_database <EOL> def test_insert_meta ( conn ) : <EOL> id = insert_meta ( conn , { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : <NUM_LIT> <EOL> } ) <EOL> assert_equals ( <NUM_LIT:1> , id ) <EOL> row = conn . execute ( "<STR_LIT>" ) . fetchone ( ) <EOL> expected = { <EOL> '<STR_LIT:id>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : <NUM_LIT> <EOL> } <EOL> assert_equals ( expected , dict ( row ) ) </s>
94,728
<s> """<STR_LIT>"""
-9,038,529,813,824,490,000
"""<STR_LIT>""" <EOL> from django . conf import settings <EOL> from django . core . exceptions import ImproperlyConfigured <EOL> from appconf import AppConf <EOL> from permission . handlers import LogicalPermissionHandler <EOL> __all__ = ( '<STR_LIT>' , ) <EOL> class PermissionConf ( AppConf ) : <EOL> DEFAULT_PERMISSION_HANDLER = '<STR_LIT>' <EOL> """<STR_LIT>""" <EOL> CHECK_PERMISSION_PRESENCE = settings . DEBUG <EOL> """<STR_LIT>""" <EOL> REPLACE_BUILTIN_IF = True <EOL> """<STR_LIT>""" <EOL> DEFAULT_APL_FIELD_NAME = '<STR_LIT>' <EOL> DEFAULT_APL_ANY_PERMISSION = False <EOL> DEFAULT_APL_CHANGE_PERMISSION = True <EOL> DEFAULT_APL_DELETE_PERMISSION = True <EOL> DEFAULT_CPL_FIELD_NAME = '<STR_LIT>' <EOL> DEFAULT_CPL_ANY_PERMISSION = False <EOL> DEFAULT_CPL_CHANGE_PERMISSION = True <EOL> DEFAULT_CPL_DELETE_PERMISSION = False <EOL> DEFAULT_GIPL_ANY_PERMISSION = False <EOL> DEFAULT_GIPL_ADD_PERMISSION = True <EOL> DEFAULT_GIPL_CHANGE_PERMISSION = True <EOL> DEFAULT_GIPL_DELETE_PERMISSION = False <EOL> DEFAULT_OSPL_ANY_PERMISSION = False <EOL> DEFAULT_OSPL_CHANGE_PERMISSION = True <EOL> DEFAULT_OSPL_DELETE_PERMISSION = True <EOL> DEFAULT_SPL_ANY_PERMISSION = False <EOL> DEFAULT_SPL_ADD_PERMISSION = True <EOL> DEFAULT_SPL_CHANGE_PERMISSION = True <EOL> DEFAULT_SPL_DELETE_PERMISSION = False <EOL> AUTODISCOVER_MODULE_NAME = '<STR_LIT>' <EOL> AUTODISCOVER_VARIABLE_NAME = '<STR_LIT>' <EOL> CHECK_AUTHENTICATION_BACKENDS = True <EOL> """<STR_LIT>""" <EOL> CHECK_TEMPLATES_OPTIONS_BUILTINS = True <EOL> """<STR_LIT>""" </s>
94,729
import django <EOL> if django . VERSION < ( <NUM_LIT:1> , <NUM_LIT:6> ) : <EOL> from permission . tests . test_logics . test_base import *
from permission . tests . test_logics . test_author import *
9,095,841,170,549,299,000
import django <EOL> if django . VERSION < ( <NUM_LIT:1> , <NUM_LIT:6> ) : <EOL> from permission . tests . test_logics . test_base import * <EOL> from permission . tests . test_logics . test_author import * <EOL> from permission . tests . test_logics . test_collaborators import * <EOL> from permission . tests . test_logics . test_groupin import * <EOL> from permission . tests . test_logics . test_staff import * <EOL> from permission . tests . test_logics . test_oneself import * </s>
94,730
<s> from django . conf . urls . defaults import *
-9,137,781,415,563,600,000
from django . conf . urls . defaults import * <EOL> urlpatterns = patterns ( '<STR_LIT>' , <EOL> ( r'<STR_LIT>' , include ( '<STR_LIT>' ) ) , <EOL> ) </s>
94,731
import pickle <EOL> import base64
import sys
2,183,958,328,900,240,100
import pickle <EOL> import base64 <EOL> import sys <EOL> PRINT_ALL_COMMANDS = False <EOL> PRINT_STDOUT_ALWAYS = False <EOL> PRINT_STDERR_ALWAYS = False <EOL> COLORAMA_ENABLED = True <EOL> def dumps ( ) : <EOL> config_tuple = ( PRINT_ALL_COMMANDS , PRINT_STDOUT_ALWAYS , PRINT_STDERR_ALWAYS , COLORAMA_ENABLED ) <EOL> serialized_config = pickle . dumps ( config_tuple ) <EOL> if sys . version_info [ <NUM_LIT:0> ] == <NUM_LIT:2> : <EOL> return base64 . b64encode ( serialized_config ) <EOL> else : <EOL> return str ( base64 . b64encode ( serialized_config ) , '<STR_LIT:utf-8>' ) <EOL> def loads ( data ) : <EOL> global PRINT_ALL_COMMANDS , PRINT_STDOUT_ALWAYS , PRINT_STDERR_ALWAYS , COLORAMA_ENABLED <EOL> if sys . version_info [ <NUM_LIT:0> ] == <NUM_LIT:2> : <EOL> serialized_config = base64 . b64decode ( data ) <EOL> else : <EOL> serialized_config = base64 . b64decode ( bytes ( data , '<STR_LIT:utf-8>' ) ) <EOL> config_tuple = pickle . loads ( serialized_config ) <EOL> PRINT_ALL_COMMANDS , PRINT_STDOUT_ALWAYS , PRINT_STDERR_ALWAYS , COLORAMA_ENABLED = config_tuple </s>
94,732
import sys <EOL> import os <EOL> import pprint <EOL> import datetime <EOL> import logging <EOL> import mtlutils <EOL> CSV_FIELDS = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:path>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ,
'<STR_LIT>' ,
-4,985,962,253,325,202,000
import sys <EOL> import os <EOL> import pprint <EOL> import datetime <EOL> import logging <EOL> import mtlutils <EOL> CSV_FIELDS = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:path>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> def make_scene_line ( scene_dict ) : <EOL> line = '<STR_LIT>' <EOL> for fieldname in CSV_FIELDS : <EOL> value = str ( scene_dict . get ( fieldname , '<STR_LIT>' ) ) <EOL> line += value + '<STR_LIT:U+002C>' <EOL> line = line [ : - <NUM_LIT:1> ] <EOL> return line <EOL> def append_scene_line ( filename , scene_dict ) : <EOL> if not os . path . exists ( filename ) : <EOL> init_list_file ( filename ) <EOL> open ( filename , '<STR_LIT:a>' ) . write ( make_scene_line ( scene_dict ) + '<STR_LIT:\n>' ) <EOL> def init_list_file ( filename ) : <EOL> open ( filename , '<STR_LIT:w>' ) . write ( ( '<STR_LIT:U+002C>' . join ( CSV_FIELDS ) ) + '<STR_LIT:\n>' ) <EOL> def split_scene_line ( line ) : <EOL> fields = line . strip ( ) . split ( '<STR_LIT:U+002C>' ) <EOL> scene_dict = { } <EOL> for i in range ( len ( fields ) ) : <EOL> scene_dict [ CSV_FIELDS [ i ] ] = fields [ i ] <EOL> return scene_dict <EOL> def add_mtl_info ( scene_dict , scene_root , scene_dir ) : <EOL> mtl_filename = '<STR_LIT>' % ( scene_dir , scene_root ) <EOL> mtl_dict = mtlutils . parsemeta ( mtl_filename ) <EOL> mtl_dict = mtl_dict [ '<STR_LIT>' ] <EOL> scene_dict [ '<STR_LIT>' ] = scene_root <EOL> acq_datetime = datetime . datetime . combine ( <EOL> mtl_dict [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> mtl_dict [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> scene_dict [ '<STR_LIT>' ] = str ( acq_datetime ) <EOL> scene_dict [ '<STR_LIT>' ] = mtl_dict [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> scene_dict [ '<STR_LIT>' ] = mtl_dict [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> scene_dict [ '<STR_LIT:path>' ] = mtl_dict [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> scene_dict [ '<STR_LIT>' ] = mtl_dict [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> lats = [ mtl_dict [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> mtl_dict [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> mtl_dict [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> mtl_dict [ '<STR_LIT>' ] [ '<STR_LIT>' ] ] <EOL> lons = [ mtl_dict [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> mtl_dict [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> mtl_dict [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <EOL> mtl_dict [ '<STR_LIT>' ] [ '<STR_LIT>' ] ] <EOL> scene_dict [ '<STR_LIT>' ] = min ( lats ) <EOL> scene_dict [ '<STR_LIT>' ] = max ( lats ) <EOL> scene_dict [ '<STR_LIT>' ] = min ( lons ) <EOL> scene_dict [ '<STR_LIT>' ] = max ( lons ) <EOL> return scene_dict <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> if len ( sys . argv ) < <NUM_LIT:3> : <EOL> print '<STR_LIT>' <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> scene_dict = { } <EOL> add_mtl_info ( scene_dict , sys . argv [ <NUM_LIT:1> ] , sys . argv [ <NUM_LIT:2> ] ) <EOL> pprint . pprint ( scene_dict ) <EOL> print make_scene_line ( scene_dict ) </s>
94,733
import bh_plugin <EOL> import sublime <EOL> DEFAULT_TAGS = [ "<STR_LIT>" , "<STR_LIT:html>" , "<STR_LIT>" ] <EOL> class SelectBracket ( bh_plugin . BracketPluginCommand ) : <EOL> def run ( self , edit , name , select = '<STR_LIT>' , tags = DEFAULT_TAGS , always_include_brackets = False ) : <EOL> """<STR_LIT>""" <EOL> current_left , current_right = self . selection [ <NUM_LIT:0> ] . begin ( ) , self . selection [ <NUM_LIT:0> ] . end ( ) <EOL> left , right = self . left , self . right <EOL> first , last = left . end , right . begin <EOL> if select == '<STR_LIT:left>' : <EOL> if name in tags and left . size ( ) > <NUM_LIT:1> : <EOL> first , last = left . begin + <NUM_LIT:1> , left . begin + <NUM_LIT:1> <EOL> if first == current_left and last == current_right : <EOL> first , last = left . begin , left . begin <EOL> else : <EOL> first , last = left . end , left . end <EOL> if first == current_left and last == current_right : <EOL> first , last = left . begin , left . begin <EOL> elif select == '<STR_LIT:right>' :
if left . end != right . end :
-1,697,903,422,862,329,900
import bh_plugin <EOL> import sublime <EOL> DEFAULT_TAGS = [ "<STR_LIT>" , "<STR_LIT:html>" , "<STR_LIT>" ] <EOL> class SelectBracket ( bh_plugin . BracketPluginCommand ) : <EOL> def run ( self , edit , name , select = '<STR_LIT>' , tags = DEFAULT_TAGS , always_include_brackets = False ) : <EOL> """<STR_LIT>""" <EOL> current_left , current_right = self . selection [ <NUM_LIT:0> ] . begin ( ) , self . selection [ <NUM_LIT:0> ] . end ( ) <EOL> left , right = self . left , self . right <EOL> first , last = left . end , right . begin <EOL> if select == '<STR_LIT:left>' : <EOL> if name in tags and left . size ( ) > <NUM_LIT:1> : <EOL> first , last = left . begin + <NUM_LIT:1> , left . begin + <NUM_LIT:1> <EOL> if first == current_left and last == current_right : <EOL> first , last = left . begin , left . begin <EOL> else : <EOL> first , last = left . end , left . end <EOL> if first == current_left and last == current_right : <EOL> first , last = left . begin , left . begin <EOL> elif select == '<STR_LIT:right>' : <EOL> if left . end != right . end : <EOL> if name in tags and left . size ( ) > <NUM_LIT:1> : <EOL> first , last = right . begin + <NUM_LIT:1> , right . begin + <NUM_LIT:1> <EOL> if first == current_left and last == current_right : <EOL> first , last = right . end , right . end <EOL> else : <EOL> first , last = right . begin , right . begin <EOL> if first == current_left and last == current_right : <EOL> first , last = right . end , right . end <EOL> else : <EOL> if name in tags and left . size ( ) > <NUM_LIT:1> : <EOL> first , last = left . begin + <NUM_LIT:1> , left . begin + <NUM_LIT:1> <EOL> else : <EOL> first , last = right . end , right . end <EOL> if first == current_left and last == current_right : <EOL> first , last = right . end , right . end <EOL> elif first == current_left and last == current_right or always_include_brackets : <EOL> first , last = left . begin , right . end <EOL> self . selection = [ sublime . Region ( first , last ) ] <EOL> def plugin ( ) : <EOL> return SelectBracket </s>
94,734
import sublime , sublime_plugin <EOL> def fold_region_from_indent ( view , r ) : <EOL> if r . b == view . size ( ) : <EOL> return sublime . Region ( r . a - <NUM_LIT:1> , r . b ) <EOL> else : <EOL> return sublime . Region ( r . a - <NUM_LIT:1> , r . b - <NUM_LIT:1> )
class FoldUnfoldCommand ( sublime_plugin . TextCommand ) :
-4,542,034,762,413,826,600
import sublime , sublime_plugin <EOL> def fold_region_from_indent ( view , r ) : <EOL> if r . b == view . size ( ) : <EOL> return sublime . Region ( r . a - <NUM_LIT:1> , r . b ) <EOL> else : <EOL> return sublime . Region ( r . a - <NUM_LIT:1> , r . b - <NUM_LIT:1> ) <EOL> class FoldUnfoldCommand ( sublime_plugin . TextCommand ) : <EOL> def run ( self , edit ) : <EOL> new_sel = [ ] <EOL> for s in self . view . sel ( ) : <EOL> r = s <EOL> empty_region = r . empty ( ) <EOL> if empty_region : <EOL> r = sublime . Region ( r . a - <NUM_LIT:1> , r . a + <NUM_LIT:1> ) <EOL> unfolded = self . view . unfold ( r ) <EOL> if len ( unfolded ) == <NUM_LIT:0> : <EOL> self . view . fold ( s ) <EOL> elif empty_region : <EOL> for r in unfolded : <EOL> new_sel . append ( r ) <EOL> if len ( new_sel ) > <NUM_LIT:0> : <EOL> self . view . sel ( ) . clear ( ) <EOL> for r in new_sel : <EOL> self . view . sel ( ) . add ( r ) <EOL> class FoldCommand ( sublime_plugin . TextCommand ) : <EOL> def run ( self , edit ) : <EOL> new_sel = [ ] <EOL> for s in self . view . sel ( ) : <EOL> if s . empty ( ) : <EOL> r = self . view . indented_region ( s . a ) <EOL> if not r . empty ( ) : <EOL> r = fold_region_from_indent ( self . view , r ) <EOL> self . view . fold ( r ) <EOL> new_sel . append ( r ) <EOL> else : <EOL> new_sel . append ( s ) <EOL> else : <EOL> if self . view . fold ( s ) : <EOL> new_sel . append ( s ) <EOL> else : <EOL> r = self . view . indented_region ( s . a ) <EOL> if not r . empty ( ) : <EOL> r = fold_region_from_indent ( self . view , r ) <EOL> self . view . fold ( r ) <EOL> new_sel . append ( r ) <EOL> else : <EOL> new_sel . append ( s ) <EOL> self . view . sel ( ) . clear ( ) <EOL> for r in new_sel : <EOL> self . view . sel ( ) . add ( r ) <EOL> class FoldAllCommand ( sublime_plugin . TextCommand ) : <EOL> def run ( self , edit ) : <EOL> folds = [ ] <EOL> tp = <NUM_LIT:0> <EOL> size = self . view . size ( ) <EOL> while tp < size : <EOL> s = self . view . indented_region ( tp ) <EOL> if not s . empty ( ) : <EOL> r = fold_region_from_indent ( self . view , s ) <EOL> folds . append ( r ) <EOL> tp = s . b <EOL> else : <EOL> tp = self . view . full_line ( tp ) . b <EOL> self . view . fold ( folds ) <EOL> self . view . show ( self . view . sel ( ) ) <EOL> sublime . status_message ( "<STR_LIT>" + str ( len ( folds ) ) + "<STR_LIT>" ) <EOL> class FoldByLevelCommand ( sublime_plugin . TextCommand ) : <EOL> def run ( self , edit , level ) : <EOL> level = int ( level ) <EOL> folds = [ ] <EOL> tp = <NUM_LIT:0> <EOL> size = self . view . size ( ) <EOL> while tp < size : <EOL> if self . view . indentation_level ( tp ) == level : <EOL> s = self . view . indented_region ( tp ) <EOL> if not s . empty ( ) : <EOL> r = fold_region_from_indent ( self . view , s ) <EOL> folds . append ( r ) <EOL> tp = s . b <EOL> continue ; <EOL> tp = self . view . full_line ( tp ) . b <EOL> self . view . fold ( folds ) <EOL> self . view . show ( self . view . sel ( ) ) <EOL> sublime . status_message ( "<STR_LIT>" + str ( len ( folds ) ) + "<STR_LIT>" ) <EOL> class UnfoldCommand ( sublime_plugin . TextCommand ) : <EOL> def run ( self , edit ) : <EOL> new_sel = [ ] <EOL> for s in self . view . sel ( ) : <EOL> unfold = s <EOL> if s . empty ( ) : <EOL> unfold = sublime . Region ( s . a - <NUM_LIT:1> , s . a + <NUM_LIT:1> ) <EOL> unfolded = self . view . unfold ( unfold ) <EOL> if len ( unfolded ) == <NUM_LIT:0> and s . empty ( ) : <EOL> unfolded = self . view . unfold ( self . view . full_line ( s . b ) ) <EOL> if len ( unfolded ) == <NUM_LIT:0> : <EOL> new_sel . append ( s ) <EOL> else : <EOL> for r in unfolded : <EOL> new_sel . append ( r ) <EOL> if len ( new_sel ) > <NUM_LIT:0> : <EOL> self . view . sel ( ) . clear ( ) <EOL> for r in new_sel : <EOL> self . view . sel ( ) . add ( r ) <EOL> class UnfoldAllCommand ( sublime_plugin . TextCommand ) : <EOL> def run ( self , edit ) : <EOL> self . view . unfold ( sublime . Region ( <NUM_LIT:0> , self . view . size ( ) ) ) <EOL> self . view . show ( self . view . sel ( ) ) </s>
94,735
from __future__ import unicode_literals <EOL> import codecs <EOL> import inspect <EOL> import sys <EOL> class Runtime ( object ) :
@ staticmethod
-7,386,171,866,468,888,000
from __future__ import unicode_literals <EOL> import codecs <EOL> import inspect <EOL> import sys <EOL> class Runtime ( object ) : <EOL> @ staticmethod <EOL> def getCaller ( up = <NUM_LIT:0> ) : <EOL> return inspect . stack ( ) [ <NUM_LIT:2> + up ] [ <NUM_LIT:3> ] <EOL> class Types ( object ) : <EOL> if sys . version < '<STR_LIT:3>' : <EOL> text = unicode <EOL> binary = str <EOL> else : <EOL> text = str <EOL> binary = bytes <EOL> @ staticmethod <EOL> def u ( string ) : <EOL> if sys . version < '<STR_LIT:3>' : <EOL> return codecs . unicode_escape_decode ( string ) [ <NUM_LIT:0> ] <EOL> else : <EOL> return string </s>
94,736
import sublime <EOL> def fix_cloned_view ( f ) : <EOL> """<STR_LIT>"""
def _f ( self , view , * args , ** kwargs ) :
-4,149,590,908,157,036,000
import sublime <EOL> def fix_cloned_view ( f ) : <EOL> """<STR_LIT>""" <EOL> def _f ( self , view , * args , ** kwargs ) : <EOL> window = view . window ( ) <EOL> if window is None : <EOL> window = sublime . active_window ( ) <EOL> if window is not None : <EOL> if window . active_view ( ) is not None : <EOL> view = window . active_view ( ) <EOL> f ( self , view , * args , ** kwargs ) <EOL> return _f </s>
94,737
import re <EOL> import sublime_plugin <EOL> class GatherMissingLinkMarkersCommand ( sublime_plugin . TextCommand ) : <EOL> def run ( self , edit ) : <EOL> markers = [ ]
self . view . find_all ( "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" , markers )
-3,583,716,060,961,501,700
import re <EOL> import sublime_plugin <EOL> class GatherMissingLinkMarkersCommand ( sublime_plugin . TextCommand ) : <EOL> def run ( self , edit ) : <EOL> markers = [ ] <EOL> self . view . find_all ( "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" , markers ) <EOL> self . view . find_all ( "<STR_LIT>" , <NUM_LIT:0> , "<STR_LIT>" , markers ) <EOL> missinglinks = [ link for link in set ( markers ) if not self . view . find_all ( "<STR_LIT>" % re . escape ( link ) ) ] <EOL> if len ( missinglinks ) : <EOL> whitespace_at_end = self . view . find ( r'<STR_LIT>' , <NUM_LIT:0> ) <EOL> self . view . replace ( edit , whitespace_at_end , "<STR_LIT:\n>" ) <EOL> if not self . view . find ( r'<STR_LIT>' , <NUM_LIT:0> ) : <EOL> self . view . insert ( edit , self . view . size ( ) , "<STR_LIT:\n>" ) <EOL> for link in missinglinks : <EOL> self . view . insert ( edit , self . view . size ( ) , '<STR_LIT>' % link ) <EOL> def is_enabled ( self ) : <EOL> return bool ( self . view . score_selector ( self . view . sel ( ) [ <NUM_LIT:0> ] . a , "<STR_LIT>" ) ) </s>
94,738
import re <EOL> import os
import base64
-1,924,416,924,946,604,800
import re <EOL> import os <EOL> import base64 <EOL> try : <EOL> from urllib . parse import urlencode <EOL> except ( ImportError ) : <EOL> from urllib import urlencode <EOL> from . json_api_client import JSONApiClient <EOL> from . . downloaders . downloader_exception import DownloaderException <EOL> _readme_formats = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } <EOL> class ReadmeClient ( JSONApiClient ) : <EOL> def readme_info ( self , url ) : <EOL> """<STR_LIT>""" <EOL> contents = None <EOL> github_match = re . match ( '<STR_LIT>' , url , re . I ) <EOL> if github_match : <EOL> user_repo = github_match . group ( <NUM_LIT:1> ) <EOL> branch = github_match . group ( <NUM_LIT:2> ) <EOL> query_string = urlencode ( { '<STR_LIT>' : branch } ) <EOL> readme_json_url = '<STR_LIT>' % ( user_repo , query_string ) <EOL> try : <EOL> info = self . fetch_json ( readme_json_url , prefer_cached = True ) <EOL> contents = base64 . b64decode ( info [ '<STR_LIT:content>' ] ) <EOL> except ( ValueError ) as e : <EOL> pass <EOL> if not contents : <EOL> contents = self . fetch ( url ) <EOL> basename , ext = os . path . splitext ( url ) <EOL> format = '<STR_LIT>' <EOL> ext = ext . lower ( ) <EOL> if ext in _readme_formats : <EOL> format = _readme_formats [ ext ] <EOL> try : <EOL> contents = contents . decode ( '<STR_LIT:utf-8>' ) <EOL> except ( UnicodeDecodeError ) as e : <EOL> contents = contents . decode ( '<STR_LIT>' , errors = '<STR_LIT:replace>' ) <EOL> return { <EOL> '<STR_LIT:filename>' : os . path . basename ( url ) , <EOL> '<STR_LIT>' : format , <EOL> '<STR_LIT>' : contents <EOL> } </s>
94,739
import os <EOL> import subprocess <EOL> from . . console_write import console_write <EOL> from . . cmd import create_cmd <EOL> from . non_clean_exit_error import NonCleanExitError <EOL> from . binary_not_found_error import BinaryNotFoundError <EOL> class CliDownloader ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , settings ) : <EOL> self . settings = settings <EOL> def clean_tmp_file ( self ) : <EOL> if os . path . exists ( self . tmp_file ) : <EOL> os . remove ( self . tmp_file ) <EOL> def find_binary ( self , name ) : <EOL> """<STR_LIT>""" <EOL> dirs = os . environ [ '<STR_LIT>' ] . split ( os . pathsep ) <EOL> if os . name != '<STR_LIT>' : <EOL> dirs . append ( '<STR_LIT>' ) <EOL> for dir_ in dirs : <EOL> path = os . path . join ( dir_ , name ) <EOL> if os . path . exists ( path ) : <EOL> return path <EOL> raise BinaryNotFoundError ( '<STR_LIT>' % name ) <EOL> def execute ( self , args ) : <EOL> """<STR_LIT>""" <EOL> if self . settings . get ( '<STR_LIT>' ) : <EOL> console_write ( u"<STR_LIT>" % create_cmd ( args ) , True ) <EOL> proc = subprocess . Popen ( args , stdin = subprocess . PIPE , <EOL> stdout = subprocess . PIPE , stderr = subprocess . PIPE ) <EOL> output = proc . stdout . read ( ) <EOL> self . stderr = proc . stderr . read ( ) <EOL> returncode = proc . wait ( ) <EOL> if returncode != <NUM_LIT:0> :
error = NonCleanExitError ( returncode )
-1,885,995,203,671,018,500
import os <EOL> import subprocess <EOL> from . . console_write import console_write <EOL> from . . cmd import create_cmd <EOL> from . non_clean_exit_error import NonCleanExitError <EOL> from . binary_not_found_error import BinaryNotFoundError <EOL> class CliDownloader ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , settings ) : <EOL> self . settings = settings <EOL> def clean_tmp_file ( self ) : <EOL> if os . path . exists ( self . tmp_file ) : <EOL> os . remove ( self . tmp_file ) <EOL> def find_binary ( self , name ) : <EOL> """<STR_LIT>""" <EOL> dirs = os . environ [ '<STR_LIT>' ] . split ( os . pathsep ) <EOL> if os . name != '<STR_LIT>' : <EOL> dirs . append ( '<STR_LIT>' ) <EOL> for dir_ in dirs : <EOL> path = os . path . join ( dir_ , name ) <EOL> if os . path . exists ( path ) : <EOL> return path <EOL> raise BinaryNotFoundError ( '<STR_LIT>' % name ) <EOL> def execute ( self , args ) : <EOL> """<STR_LIT>""" <EOL> if self . settings . get ( '<STR_LIT>' ) : <EOL> console_write ( u"<STR_LIT>" % create_cmd ( args ) , True ) <EOL> proc = subprocess . Popen ( args , stdin = subprocess . PIPE , <EOL> stdout = subprocess . PIPE , stderr = subprocess . PIPE ) <EOL> output = proc . stdout . read ( ) <EOL> self . stderr = proc . stderr . read ( ) <EOL> returncode = proc . wait ( ) <EOL> if returncode != <NUM_LIT:0> : <EOL> error = NonCleanExitError ( returncode ) <EOL> error . stderr = self . stderr <EOL> error . stdout = output <EOL> raise error <EOL> return output </s>
94,740
import os <EOL> import sys <EOL> from . file_not_found_error import FileNotFoundError <EOL> def open_compat ( path , mode = '<STR_LIT:r>' ) : <EOL> if mode in [ '<STR_LIT:r>' , '<STR_LIT:rb>' ] and not os . path . exists ( path ) : <EOL> raise FileNotFoundError ( u"<STR_LIT>" % path ) <EOL> if sys . version_info >= ( <NUM_LIT:3> , ) : <EOL> encoding = '<STR_LIT:utf-8>' <EOL> errors = '<STR_LIT:replace>' <EOL> if mode in [ '<STR_LIT:rb>' , '<STR_LIT:wb>' , '<STR_LIT>' ] : <EOL> encoding = None <EOL> errors = None <EOL> return open ( path , mode , encoding = encoding , errors = errors ) <EOL> else : <EOL> return open ( path , mode ) <EOL> def read_compat ( file_obj ) :
if sys . version_info >= ( <NUM_LIT:3> , ) :
-5,479,076,588,097,582,000
import os <EOL> import sys <EOL> from . file_not_found_error import FileNotFoundError <EOL> def open_compat ( path , mode = '<STR_LIT:r>' ) : <EOL> if mode in [ '<STR_LIT:r>' , '<STR_LIT:rb>' ] and not os . path . exists ( path ) : <EOL> raise FileNotFoundError ( u"<STR_LIT>" % path ) <EOL> if sys . version_info >= ( <NUM_LIT:3> , ) : <EOL> encoding = '<STR_LIT:utf-8>' <EOL> errors = '<STR_LIT:replace>' <EOL> if mode in [ '<STR_LIT:rb>' , '<STR_LIT:wb>' , '<STR_LIT>' ] : <EOL> encoding = None <EOL> errors = None <EOL> return open ( path , mode , encoding = encoding , errors = errors ) <EOL> else : <EOL> return open ( path , mode ) <EOL> def read_compat ( file_obj ) : <EOL> if sys . version_info >= ( <NUM_LIT:3> , ) : <EOL> return file_obj . read ( ) <EOL> else : <EOL> return unicode ( file_obj . read ( ) , '<STR_LIT:utf-8>' , errors = '<STR_LIT:replace>' ) </s>
94,741
import os <EOL> import locale <EOL> import sys <EOL> _encoding = '<STR_LIT:utf-8>' if sys . platform == '<STR_LIT>' else locale . getpreferredencoding ( )
_fallback_encodings = [ '<STR_LIT:utf-8>' , '<STR_LIT>' ]
364,815,555,926,189,700
import os <EOL> import locale <EOL> import sys <EOL> _encoding = '<STR_LIT:utf-8>' if sys . platform == '<STR_LIT>' else locale . getpreferredencoding ( ) <EOL> _fallback_encodings = [ '<STR_LIT:utf-8>' , '<STR_LIT>' ] <EOL> def unicode_from_os ( e ) : <EOL> """<STR_LIT>""" <EOL> if sys . version_info >= ( <NUM_LIT:3> , ) : <EOL> return str ( e ) <EOL> try : <EOL> if isinstance ( e , Exception ) : <EOL> e = e . args [ <NUM_LIT:0> ] <EOL> if isinstance ( e , unicode ) : <EOL> return e <EOL> if isinstance ( e , int ) : <EOL> e = str ( e ) <EOL> return unicode ( e , _encoding ) <EOL> except UnicodeDecodeError : <EOL> for encoding in _fallback_encodings : <EOL> try : <EOL> return unicode ( e , encoding , errors = '<STR_LIT:strict>' ) <EOL> except : <EOL> pass <EOL> return unicode ( e , errors = '<STR_LIT:replace>' ) </s>
94,742
"""<STR_LIT>""" <EOL> from __future__ import with_statement <EOL> from os import path <EOL> import logging <EOL> from gensim import interfaces , utils <EOL> from gensim . corpora import IndexedCorpus <EOL> from six . moves import xrange <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> class BleiCorpus ( IndexedCorpus ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , fname , fname_vocab = None ) : <EOL> """<STR_LIT>""" <EOL> IndexedCorpus . __init__ ( self , fname ) <EOL> logger . info ( "<STR_LIT>" % fname ) <EOL> if fname_vocab is None : <EOL> fname_base , _ = path . splitext ( fname ) <EOL> fname_dir = path . dirname ( fname ) <EOL> for fname_vocab in [ <EOL> fname + '<STR_LIT>' , <EOL> fname + '<STR_LIT>' , <EOL> fname_base + '<STR_LIT>' , <EOL> fname_dir + '<STR_LIT>' , <EOL> ] : <EOL> if path . exists ( fname_vocab ) : <EOL> break <EOL> else : <EOL> raise IOError ( '<STR_LIT>' ) <EOL> self . fname = fname <EOL> with utils . smart_open ( fname_vocab ) as fin : <EOL> words = [ utils . to_unicode ( word ) . rstrip ( ) for word in fin ] <EOL> self . id2word = dict ( enumerate ( words ) ) <EOL> self . length = <NUM_LIT:0> <EOL> def __iter__ ( self ) :
"""<STR_LIT>"""
-4,887,645,001,585,062,000
"""<STR_LIT>""" <EOL> from __future__ import with_statement <EOL> from os import path <EOL> import logging <EOL> from gensim import interfaces , utils <EOL> from gensim . corpora import IndexedCorpus <EOL> from six . moves import xrange <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> class BleiCorpus ( IndexedCorpus ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , fname , fname_vocab = None ) : <EOL> """<STR_LIT>""" <EOL> IndexedCorpus . __init__ ( self , fname ) <EOL> logger . info ( "<STR_LIT>" % fname ) <EOL> if fname_vocab is None : <EOL> fname_base , _ = path . splitext ( fname ) <EOL> fname_dir = path . dirname ( fname ) <EOL> for fname_vocab in [ <EOL> fname + '<STR_LIT>' , <EOL> fname + '<STR_LIT>' , <EOL> fname_base + '<STR_LIT>' , <EOL> fname_dir + '<STR_LIT>' , <EOL> ] : <EOL> if path . exists ( fname_vocab ) : <EOL> break <EOL> else : <EOL> raise IOError ( '<STR_LIT>' ) <EOL> self . fname = fname <EOL> with utils . smart_open ( fname_vocab ) as fin : <EOL> words = [ utils . to_unicode ( word ) . rstrip ( ) for word in fin ] <EOL> self . id2word = dict ( enumerate ( words ) ) <EOL> self . length = <NUM_LIT:0> <EOL> def __iter__ ( self ) : <EOL> """<STR_LIT>""" <EOL> lineno = - <NUM_LIT:1> <EOL> with utils . smart_open ( self . fname ) as fin : <EOL> for lineno , line in enumerate ( fin ) : <EOL> yield self . line2doc ( line ) <EOL> self . length = lineno + <NUM_LIT:1> <EOL> def line2doc ( self , line ) : <EOL> parts = utils . to_unicode ( line ) . split ( ) <EOL> if int ( parts [ <NUM_LIT:0> ] ) != len ( parts ) - <NUM_LIT:1> : <EOL> raise ValueError ( "<STR_LIT>" % ( self . fname , repr ( line ) ) ) <EOL> doc = [ part . rsplit ( '<STR_LIT::>' , <NUM_LIT:1> ) for part in parts [ <NUM_LIT:1> : ] ] <EOL> doc = [ ( int ( p1 ) , float ( p2 ) ) for p1 , p2 in doc ] <EOL> return doc <EOL> @ staticmethod <EOL> def save_corpus ( fname , corpus , id2word = None , metadata = False ) : <EOL> """<STR_LIT>""" <EOL> if id2word is None : <EOL> logger . info ( "<STR_LIT>" ) <EOL> id2word = utils . dict_from_corpus ( corpus ) <EOL> num_terms = len ( id2word ) <EOL> else : <EOL> num_terms = <NUM_LIT:1> + max ( [ - <NUM_LIT:1> ] + id2word . keys ( ) ) <EOL> logger . info ( "<STR_LIT>" % fname ) <EOL> with utils . smart_open ( fname , '<STR_LIT:wb>' ) as fout : <EOL> offsets = [ ] <EOL> for doc in corpus : <EOL> doc = list ( doc ) <EOL> offsets . append ( fout . tell ( ) ) <EOL> parts = [ "<STR_LIT>" % p for p in doc if abs ( p [ <NUM_LIT:1> ] ) > <NUM_LIT> ] <EOL> fout . write ( utils . to_utf8 ( "<STR_LIT>" % ( len ( doc ) , '<STR_LIT:U+0020>' . join ( parts ) ) ) ) <EOL> fname_vocab = fname + '<STR_LIT>' <EOL> logger . info ( "<STR_LIT>" % ( num_terms , fname_vocab ) ) <EOL> with utils . smart_open ( fname_vocab , '<STR_LIT:wb>' ) as fout : <EOL> for featureid in xrange ( num_terms ) : <EOL> fout . write ( utils . to_utf8 ( "<STR_LIT>" % id2word . get ( featureid , '<STR_LIT>' ) ) ) <EOL> return offsets <EOL> def docbyoffset ( self , offset ) : <EOL> """<STR_LIT>""" <EOL> with utils . smart_open ( self . fname ) as f : <EOL> f . seek ( offset ) <EOL> return self . line2doc ( f . readline ( ) ) </s>
94,743
"""<STR_LIT>""" <EOL> from __future__ import with_statement <EOL> import os , sys , logging <EOL> import threading <EOL> import tempfile <EOL> import Queue <EOL> from gensim . models import lsimodel <EOL> from gensim import utils <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> SAVE_DEBUG = <NUM_LIT:0> <EOL> class Worker ( object ) : <EOL> def __init__ ( self ) : <EOL> self . model = None <EOL> def initialize ( self , myid , dispatcher , ** model_params ) : <EOL> self . lock_update = threading . Lock ( ) <EOL> self . jobsdone = <NUM_LIT:0> <EOL> self . myid = myid <EOL> self . dispatcher = dispatcher <EOL> self . finished = False <EOL> logger . info ( "<STR_LIT>" % myid ) <EOL> self . model = lsimodel . LsiModel ( ** model_params ) <EOL> def requestjob ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . model is None : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> job = None <EOL> while job is None and not self . finished : <EOL> try : <EOL> job = self . dispatcher . getjob ( self . myid ) <EOL> except Queue . Empty : <EOL> continue <EOL> if job is not None : <EOL> logger . info ( "<STR_LIT>" % ( self . myid , self . jobsdone ) ) <EOL> self . processjob ( job ) <EOL> self . dispatcher . jobdone ( self . myid ) <EOL> else : <EOL> logger . info ( "<STR_LIT>" % self . myid ) <EOL> @ utils . synchronous ( '<STR_LIT>' ) <EOL> def processjob ( self , job ) : <EOL> self . model . add_documents ( job ) <EOL> self . jobsdone += <NUM_LIT:1> <EOL> if SAVE_DEBUG and self . jobsdone % SAVE_DEBUG == <NUM_LIT:0> : <EOL> fname = os . path . join ( tempfile . gettempdir ( ) , '<STR_LIT>' ) <EOL> self . model . save ( fname ) <EOL> @ utils . synchronous ( '<STR_LIT>' ) <EOL> def getstate ( self ) : <EOL> logger . info ( "<STR_LIT>" % <EOL> ( self . myid , self . jobsdone ) ) <EOL> assert isinstance ( self . model . projection , lsimodel . Projection ) <EOL> self . finished = True <EOL> return self . model . projection <EOL> @ utils . synchronous ( '<STR_LIT>' ) <EOL> def reset ( self ) : <EOL> logger . info ( "<STR_LIT>" % self . myid ) <EOL> self . model . projection = self . model . projection . empty_like ( ) <EOL> self . finished = False <EOL> def exit ( self ) : <EOL> logger . info ( "<STR_LIT>" % self . myid ) <EOL> os . _exit ( <NUM_LIT:0> ) <EOL> def main ( ) : <EOL> logging . basicConfig ( format = '<STR_LIT>' , level = logging . INFO ) <EOL> logger . info ( "<STR_LIT>" % "<STR_LIT:U+0020>" . join ( sys . argv ) ) <EOL> program = os . path . basename ( sys . argv [ <NUM_LIT:0> ] )
if len ( sys . argv ) < <NUM_LIT:1> :
-8,526,574,115,946,171,000
"""<STR_LIT>""" <EOL> from __future__ import with_statement <EOL> import os , sys , logging <EOL> import threading <EOL> import tempfile <EOL> import Queue <EOL> from gensim . models import lsimodel <EOL> from gensim import utils <EOL> logger = logging . getLogger ( '<STR_LIT>' ) <EOL> SAVE_DEBUG = <NUM_LIT:0> <EOL> class Worker ( object ) : <EOL> def __init__ ( self ) : <EOL> self . model = None <EOL> def initialize ( self , myid , dispatcher , ** model_params ) : <EOL> self . lock_update = threading . Lock ( ) <EOL> self . jobsdone = <NUM_LIT:0> <EOL> self . myid = myid <EOL> self . dispatcher = dispatcher <EOL> self . finished = False <EOL> logger . info ( "<STR_LIT>" % myid ) <EOL> self . model = lsimodel . LsiModel ( ** model_params ) <EOL> def requestjob ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . model is None : <EOL> raise RuntimeError ( "<STR_LIT>" ) <EOL> job = None <EOL> while job is None and not self . finished : <EOL> try : <EOL> job = self . dispatcher . getjob ( self . myid ) <EOL> except Queue . Empty : <EOL> continue <EOL> if job is not None : <EOL> logger . info ( "<STR_LIT>" % ( self . myid , self . jobsdone ) ) <EOL> self . processjob ( job ) <EOL> self . dispatcher . jobdone ( self . myid ) <EOL> else : <EOL> logger . info ( "<STR_LIT>" % self . myid ) <EOL> @ utils . synchronous ( '<STR_LIT>' ) <EOL> def processjob ( self , job ) : <EOL> self . model . add_documents ( job ) <EOL> self . jobsdone += <NUM_LIT:1> <EOL> if SAVE_DEBUG and self . jobsdone % SAVE_DEBUG == <NUM_LIT:0> : <EOL> fname = os . path . join ( tempfile . gettempdir ( ) , '<STR_LIT>' ) <EOL> self . model . save ( fname ) <EOL> @ utils . synchronous ( '<STR_LIT>' ) <EOL> def getstate ( self ) : <EOL> logger . info ( "<STR_LIT>" % <EOL> ( self . myid , self . jobsdone ) ) <EOL> assert isinstance ( self . model . projection , lsimodel . Projection ) <EOL> self . finished = True <EOL> return self . model . projection <EOL> @ utils . synchronous ( '<STR_LIT>' ) <EOL> def reset ( self ) : <EOL> logger . info ( "<STR_LIT>" % self . myid ) <EOL> self . model . projection = self . model . projection . empty_like ( ) <EOL> self . finished = False <EOL> def exit ( self ) : <EOL> logger . info ( "<STR_LIT>" % self . myid ) <EOL> os . _exit ( <NUM_LIT:0> ) <EOL> def main ( ) : <EOL> logging . basicConfig ( format = '<STR_LIT>' , level = logging . INFO ) <EOL> logger . info ( "<STR_LIT>" % "<STR_LIT:U+0020>" . join ( sys . argv ) ) <EOL> program = os . path . basename ( sys . argv [ <NUM_LIT:0> ] ) <EOL> if len ( sys . argv ) < <NUM_LIT:1> : <EOL> print ( globals ( ) [ "<STR_LIT>" ] % locals ( ) ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> utils . pyro_daemon ( '<STR_LIT>' , Worker ( ) , random_suffix = True ) <EOL> logger . info ( "<STR_LIT>" % program ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
94,744
def load_id2word ( wordmap ) : <EOL> id2word = { } <EOL> with open ( wordmap ) as f : <EOL> f . readline ( ) <EOL> for l in f : <EOL> try : <EOL> word , id = l . strip ( ) . split ( ) <EOL> except : <EOL> print l <EOL> id = int ( id ) <EOL> id2word [ id ] = word <EOL> return id2word <EOL> def load_sentences ( topic_assign , id2word ) : <EOL> sentence_word = [ ] <EOL> sentence_topic = [ ] <EOL> topic_file = open ( "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> word_file = open ( "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> with open ( topic_assign ) as f : <EOL> for l in f : <EOL> words = l . strip ( ) . split ( )
tmp_sentence_word = [ ]
6,783,722,443,437,573,000
def load_id2word ( wordmap ) : <EOL> id2word = { } <EOL> with open ( wordmap ) as f : <EOL> f . readline ( ) <EOL> for l in f : <EOL> try : <EOL> word , id = l . strip ( ) . split ( ) <EOL> except : <EOL> print l <EOL> id = int ( id ) <EOL> id2word [ id ] = word <EOL> return id2word <EOL> def load_sentences ( topic_assign , id2word ) : <EOL> sentence_word = [ ] <EOL> sentence_topic = [ ] <EOL> topic_file = open ( "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> word_file = open ( "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> with open ( topic_assign ) as f : <EOL> for l in f : <EOL> words = l . strip ( ) . split ( ) <EOL> tmp_sentence_word = [ ] <EOL> tmp_sentence_topic = [ ] <EOL> for word in words : <EOL> id = <NUM_LIT:0> <EOL> try : <EOL> id , topic = word . split ( '<STR_LIT::>' ) <EOL> except : <EOL> print word <EOL> id = int ( id ) <EOL> topic = int ( topic ) <EOL> if id not in id2word : <EOL> continue <EOL> word = id2word [ id ] <EOL> print >> topic_file , topic , <EOL> print >> word_file , word , <EOL> print >> topic_file <EOL> print >> word_file <EOL> topic_file . close ( ) <EOL> word_file . close ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> id2word = load_id2word ( "<STR_LIT>" ) <EOL> sen_word , sen_topic = load_sentences ( "<STR_LIT>" , id2word ) </s>
94,745
from os . path import join , exists , abspath <EOL> from os import makedirs , listdir <EOL> import time <EOL> import datetime <EOL> import uuid <EOL> def get_data_path ( dataset = "<STR_LIT>" ) : <EOL> full_path = abspath ( '<STR_LIT:.>' ) <EOL> path = join ( full_path , '<STR_LIT:data>' , dataset ) <EOL> return path_exists ( path ) <EOL> def get_output_path ( ) : <EOL> full_path = abspath ( '<STR_LIT:.>' ) <EOL> path = join ( full_path , '<STR_LIT>' ) <EOL> return path_exists ( path ) <EOL> def get_training_evaluation_path ( root_path ) : <EOL> return path_exists ( join ( root_path , '<STR_LIT>' ) ) <EOL> def get_plot_evaluation_path_for_model ( root_path , extension ) : <EOL> return join ( get_training_evaluation_path ( root_path ) , '<STR_LIT>' % ( extension ) ) <EOL> def get_custom_eval_path ( i , root_path ) : <EOL> r_path = path_exists ( join ( root_path , '<STR_LIT>' ) ) <EOL> return join ( r_path , '<STR_LIT>' % str ( i ) ) <EOL> def get_plot_evaluation_path ( ) : <EOL> return join ( get_output_path ( ) , '<STR_LIT>' ) <EOL> def get_pickle_path ( root_path ) : <EOL> return path_exists ( join ( root_path , '<STR_LIT>' ) ) <EOL> def get_model_path ( root_path , type , n_in , n_hidden , n_out ) : <EOL> return join ( get_pickle_path ( root_path ) , '<STR_LIT>' % ( type , str ( n_in ) , str ( n_hidden ) , str ( n_out ) ) ) <EOL> def get_logging_path ( root_path ) : <EOL> t = time . time ( ) <EOL> n = "<STR_LIT>" % datetime . datetime . fromtimestamp ( t ) . strftime ( '<STR_LIT>' ) <EOL> return join ( root_path , n ) <EOL> def find_logging_path ( id ) : <EOL> out = get_output_path ( ) <EOL> dirs = listdir ( out ) <EOL> path = '<STR_LIT>' <EOL> for d in dirs : <EOL> if str ( id ) in d : <EOL> path = join ( out , d ) <EOL> if path == '<STR_LIT>' : <EOL> raise '<STR_LIT>' <EOL> for f in listdir ( path ) : <EOL> if '<STR_LIT>' in f : <EOL> return join ( path , f ) <EOL> def get_root_output_path ( type , n_in , n_hidden , n_out , id ) : <EOL> root = '<STR_LIT>' % ( str ( id ) , type , str ( n_in ) , str ( n_hidden ) , str ( n_out ) ) <EOL> path = join ( get_output_path ( ) , root ) <EOL> return path <EOL> def create_root_output_path ( type , n_in , n_hidden , n_out ) : <EOL> t = time . time ( ) <EOL> d = datetime . datetime . fromtimestamp ( t ) . strftime ( '<STR_LIT>' ) <EOL> root = '<STR_LIT>' % ( str ( d ) , type , str ( n_in ) , str ( n_hidden ) , str ( n_out ) ) <EOL> path = join ( get_output_path ( ) , root ) <EOL> if exists ( path ) : path += "<STR_LIT>" % str ( uuid . uuid4 ( ) ) <EOL> return path_exists ( path ) <EOL> def path_exists ( path ) : <EOL> if not exists ( path ) :
makedirs ( path )
2,530,063,441,910,558,000
from os . path import join , exists , abspath <EOL> from os import makedirs , listdir <EOL> import time <EOL> import datetime <EOL> import uuid <EOL> def get_data_path ( dataset = "<STR_LIT>" ) : <EOL> full_path = abspath ( '<STR_LIT:.>' ) <EOL> path = join ( full_path , '<STR_LIT:data>' , dataset ) <EOL> return path_exists ( path ) <EOL> def get_output_path ( ) : <EOL> full_path = abspath ( '<STR_LIT:.>' ) <EOL> path = join ( full_path , '<STR_LIT>' ) <EOL> return path_exists ( path ) <EOL> def get_training_evaluation_path ( root_path ) : <EOL> return path_exists ( join ( root_path , '<STR_LIT>' ) ) <EOL> def get_plot_evaluation_path_for_model ( root_path , extension ) : <EOL> return join ( get_training_evaluation_path ( root_path ) , '<STR_LIT>' % ( extension ) ) <EOL> def get_custom_eval_path ( i , root_path ) : <EOL> r_path = path_exists ( join ( root_path , '<STR_LIT>' ) ) <EOL> return join ( r_path , '<STR_LIT>' % str ( i ) ) <EOL> def get_plot_evaluation_path ( ) : <EOL> return join ( get_output_path ( ) , '<STR_LIT>' ) <EOL> def get_pickle_path ( root_path ) : <EOL> return path_exists ( join ( root_path , '<STR_LIT>' ) ) <EOL> def get_model_path ( root_path , type , n_in , n_hidden , n_out ) : <EOL> return join ( get_pickle_path ( root_path ) , '<STR_LIT>' % ( type , str ( n_in ) , str ( n_hidden ) , str ( n_out ) ) ) <EOL> def get_logging_path ( root_path ) : <EOL> t = time . time ( ) <EOL> n = "<STR_LIT>" % datetime . datetime . fromtimestamp ( t ) . strftime ( '<STR_LIT>' ) <EOL> return join ( root_path , n ) <EOL> def find_logging_path ( id ) : <EOL> out = get_output_path ( ) <EOL> dirs = listdir ( out ) <EOL> path = '<STR_LIT>' <EOL> for d in dirs : <EOL> if str ( id ) in d : <EOL> path = join ( out , d ) <EOL> if path == '<STR_LIT>' : <EOL> raise '<STR_LIT>' <EOL> for f in listdir ( path ) : <EOL> if '<STR_LIT>' in f : <EOL> return join ( path , f ) <EOL> def get_root_output_path ( type , n_in , n_hidden , n_out , id ) : <EOL> root = '<STR_LIT>' % ( str ( id ) , type , str ( n_in ) , str ( n_hidden ) , str ( n_out ) ) <EOL> path = join ( get_output_path ( ) , root ) <EOL> return path <EOL> def create_root_output_path ( type , n_in , n_hidden , n_out ) : <EOL> t = time . time ( ) <EOL> d = datetime . datetime . fromtimestamp ( t ) . strftime ( '<STR_LIT>' ) <EOL> root = '<STR_LIT>' % ( str ( d ) , type , str ( n_in ) , str ( n_hidden ) , str ( n_out ) ) <EOL> path = join ( get_output_path ( ) , root ) <EOL> if exists ( path ) : path += "<STR_LIT>" % str ( uuid . uuid4 ( ) ) <EOL> return path_exists ( path ) <EOL> def path_exists ( path ) : <EOL> if not exists ( path ) : <EOL> makedirs ( path ) <EOL> return path </s>
94,746
"""<STR_LIT>""" <EOL> import unittest <EOL> from anytop import common <EOL> class ZoomTestCase ( unittest . TestCase ) : <EOL> def test_zoom ( self ) :
self . assertEqual ( common . get_zoom ( <NUM_LIT:0> , <NUM_LIT:100> ) , <NUM_LIT:1> )
-5,262,530,024,504,352,000
"""<STR_LIT>""" <EOL> import unittest <EOL> from anytop import common <EOL> class ZoomTestCase ( unittest . TestCase ) : <EOL> def test_zoom ( self ) : <EOL> self . assertEqual ( common . get_zoom ( <NUM_LIT:0> , <NUM_LIT:100> ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( common . get_zoom ( <NUM_LIT> , <NUM_LIT:100> ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( common . get_zoom ( <NUM_LIT:100> , <NUM_LIT:100> ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( common . get_zoom ( <NUM_LIT> , <NUM_LIT:100> ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( common . get_zoom ( <NUM_LIT> , <NUM_LIT:100> ) , <NUM_LIT:2> ) <EOL> self . assertEqual ( common . get_zoom ( <NUM_LIT> , <NUM_LIT:100> ) , <NUM_LIT:3> ) <EOL> self . assertEqual ( common . get_zoom ( <NUM_LIT> , <NUM_LIT:100> ) , <NUM_LIT:5> ) <EOL> self . assertEqual ( common . get_zoom ( <NUM_LIT> , <NUM_LIT:100> ) , <NUM_LIT:10> ) <EOL> self . assertEqual ( common . get_zoom ( <NUM_LIT> , <NUM_LIT:100> ) , <NUM_LIT:20> ) <EOL> def suite ( ) : <EOL> return unittest . TestSuite ( ( <EOL> unittest . makeSuite ( ZoomTestCase ) , <EOL> ) ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> unittest . TextTestRunner ( verbosity = <NUM_LIT:1> ) . run ( suite ( ) ) </s>
94,747
from theano import tensor as T <EOL> from theano import function <EOL> def evaluate ( x , y , expr , x_value , y_value ) : <EOL> """<STR_LIT>""" <EOL> return function ( [ x , y ] , expr ) ( x_value , y_value ) <EOL> if __name__ == "<STR_LIT:__main__>" :
x = T . iscalar ( )
-2,023,488,659,435,539,500
from theano import tensor as T <EOL> from theano import function <EOL> def evaluate ( x , y , expr , x_value , y_value ) : <EOL> """<STR_LIT>""" <EOL> return function ( [ x , y ] , expr ) ( x_value , y_value ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> x = T . iscalar ( ) <EOL> y = T . iscalar ( ) <EOL> z = x + y <EOL> assert evaluate ( x , y , z , <NUM_LIT:1> , <NUM_LIT:2> ) == <NUM_LIT:3> <EOL> print "<STR_LIT>" </s>
94,748
import numpy <EOL> from fuel . streams import DataStream <EOL> from fuel . transformers import Mapping <EOL> from fuel . schemes import SequentialScheme <EOL> def build_mean_covariance ( dataset , batch_size ) : <EOL> """<STR_LIT>""" <EOL> data_stream = Mapping ( <EOL> data_stream = DataStream ( <EOL> dataset , <EOL> iteration_scheme = SequentialScheme (
examples = dataset . num_examples ,
6,440,752,729,000,646,000
import numpy <EOL> from fuel . streams import DataStream <EOL> from fuel . transformers import Mapping <EOL> from fuel . schemes import SequentialScheme <EOL> def build_mean_covariance ( dataset , batch_size ) : <EOL> """<STR_LIT>""" <EOL> data_stream = Mapping ( <EOL> data_stream = DataStream ( <EOL> dataset , <EOL> iteration_scheme = SequentialScheme ( <EOL> examples = dataset . num_examples , <EOL> batch_size = batch_size <EOL> ) <EOL> ) , <EOL> mapping = lambda x : x [ dataset . sources . index ( '<STR_LIT>' ) ] , <EOL> ) <EOL> dataset_iterator = data_stream . get_epoch_iterator ( ) <EOL> unnormalized_mean = <NUM_LIT:0.> <EOL> unnormalized_dot = <NUM_LIT:0.> <EOL> for data in dataset_iterator : <EOL> unnormalized_dot += data . T . dot ( data ) / batch_size <EOL> unnormalized_mean += data . sum ( axis = <NUM_LIT:0> ) / batch_size <EOL> del data <EOL> X_mean = unnormalized_mean * ( float ( batch_size ) / dataset . num_examples ) <EOL> X_cov = unnormalized_dot * ( float ( batch_size ) / dataset . num_examples ) - numpy . outer ( X_mean , X_mean ) <EOL> return X_mean , X_cov </s>
94,749
from __future__ import print_function <EOL> from emails . transformer import HTMLParser <EOL> def test_parser_inputs ( ) : <EOL> def _cleaned_body ( s ) : <EOL> for el in ( '<STR_LIT:html>' , '<STR_LIT:body>' ) :
s = s . replace ( '<STR_LIT>' % el , '<STR_LIT>' ) . replace ( '<STR_LIT>' % el , '<STR_LIT>' )
-3,356,134,940,045,620,700
from __future__ import print_function <EOL> from emails . transformer import HTMLParser <EOL> def test_parser_inputs ( ) : <EOL> def _cleaned_body ( s ) : <EOL> for el in ( '<STR_LIT:html>' , '<STR_LIT:body>' ) : <EOL> s = s . replace ( '<STR_LIT>' % el , '<STR_LIT>' ) . replace ( '<STR_LIT>' % el , '<STR_LIT>' ) <EOL> return s <EOL> for html , result in ( <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) : <EOL> r = HTMLParser ( html = html ) . to_string ( ) <EOL> print ( "<STR_LIT>" , html . __repr__ ( ) , "<STR_LIT>" , r . __repr__ ( ) , sep = '<STR_LIT>' ) <EOL> assert _cleaned_body ( r ) == result <EOL> def test_breaking_title ( ) : <EOL> assert '<STR_LIT>' not in HTMLParser ( html = "<STR_LIT>" ) . to_string ( ) </s>
94,750
class DownloadOptionChoice ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , value , short_description , long_description , bool_options_description = None ) : <EOL> self . value = value <EOL> self . short_description = short_description <EOL> self . long_description = long_description <EOL> self . bool_options_description = bool_options_description <EOL> class DownloadOption ( object ) : <EOL> """<STR_LIT>"""
def __init__ ( self , option_types , long_description , short_description , default_value ,
-2,564,298,794,928,070,700
class DownloadOptionChoice ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , value , short_description , long_description , bool_options_description = None ) : <EOL> self . value = value <EOL> self . short_description = short_description <EOL> self . long_description = long_description <EOL> self . bool_options_description = bool_options_description <EOL> class DownloadOption ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , option_types , long_description , short_description , default_value , <EOL> default_value_description ) : <EOL> self . option_types = option_types <EOL> self . long_description = long_description <EOL> self . short_description = short_description <EOL> self . default_value = default_value <EOL> self . default_value_description = default_value_description </s>
94,751
import json <EOL> import logging <EOL> from twisted . internet import interfaces , defer <EOL> from zope . interface import implements <EOL> from lbrynet . interfaces import IRequestHandler <EOL> log = logging . getLogger ( __name__ ) <EOL> class ServerRequestHandler ( object ) : <EOL> """<STR_LIT>""" <EOL> implements ( interfaces . IPushProducer , interfaces . IConsumer , IRequestHandler ) <EOL> def __init__ ( self , consumer ) : <EOL> self . consumer = consumer <EOL> self . production_paused = False <EOL> self . request_buff = '<STR_LIT>' <EOL> self . response_buff = '<STR_LIT>' <EOL> self . producer = None <EOL> self . request_received = False <EOL> self . CHUNK_SIZE = <NUM_LIT:2> ** <NUM_LIT> <EOL> self . query_handlers = { } <EOL> self . blob_sender = None <EOL> self . consumer . registerProducer ( self , True ) <EOL> def pauseProducing ( self ) : <EOL> self . production_paused = True <EOL> def stopProducing ( self ) : <EOL> if self . producer is not None : <EOL> self . producer . stopProducing ( ) <EOL> self . producer = None <EOL> self . production_paused = True <EOL> self . consumer . unregisterProducer ( ) <EOL> def resumeProducing ( self ) : <EOL> from twisted . internet import reactor <EOL> self . production_paused = False <EOL> self . _produce_more ( ) <EOL> if self . producer is not None : <EOL> reactor . callLater ( <NUM_LIT:0> , self . producer . resumeProducing ) <EOL> def _produce_more ( self ) : <EOL> from twisted . internet import reactor <EOL> if self . production_paused is False : <EOL> chunk = self . response_buff [ : self . CHUNK_SIZE ] <EOL> self . response_buff = self . response_buff [ self . CHUNK_SIZE : ] <EOL> if chunk != '<STR_LIT>' : <EOL> log . debug ( "<STR_LIT>" , str ( len ( chunk ) ) ) <EOL> self . consumer . write ( chunk ) <EOL> reactor . callLater ( <NUM_LIT:0> , self . _produce_more ) <EOL> def registerProducer ( self , producer , streaming ) : <EOL> self . producer = producer <EOL> assert streaming is False <EOL> producer . resumeProducing ( ) <EOL> def unregisterProducer ( self ) : <EOL> self . producer = None <EOL> def write ( self , data ) : <EOL> from twisted . internet import reactor <EOL> self . response_buff = self . response_buff + data <EOL> self . _produce_more ( ) <EOL> def get_more_data ( ) : <EOL> if self . producer is not None : <EOL> log . debug ( "<STR_LIT>" ) <EOL> self . producer . resumeProducing ( ) <EOL> reactor . callLater ( <NUM_LIT:0> , get_more_data ) <EOL> def data_received ( self , data ) : <EOL> log . debug ( "<STR_LIT>" ) <EOL> log . debug ( "<STR_LIT:%s>" , str ( data ) ) <EOL> if self . request_received is False : <EOL> self . request_buff = self . request_buff + data <EOL> msg = self . try_to_parse_request ( self . request_buff ) <EOL> if msg is not None : <EOL> self . request_buff = '<STR_LIT>' <EOL> d = self . handle_request ( msg ) <EOL> if self . blob_sender is not None : <EOL> d . addCallback ( lambda _ : self . blob_sender . send_blob_if_requested ( self ) ) <EOL> d . addCallbacks ( lambda _ : self . finished_response ( ) , self . request_failure_handler ) <EOL> else : <EOL> log . info ( "<STR_LIT>" ) <EOL> log . info ( "<STR_LIT>" , str ( self . request_buff ) ) <EOL> else : <EOL> log . warning ( "<STR_LIT>" ) <EOL> def register_query_handler ( self , query_handler , query_identifiers ) : <EOL> self . query_handlers [ query_handler ] = query_identifiers <EOL> def register_blob_sender ( self , blob_sender ) : <EOL> self . blob_sender = blob_sender <EOL> def request_failure_handler ( self , err ) : <EOL> log . warning ( "<STR_LIT>" , err . getErrorMessage ( ) ) <EOL> self . stopProducing ( )
return err
5,735,818,177,610,942,000
import json <EOL> import logging <EOL> from twisted . internet import interfaces , defer <EOL> from zope . interface import implements <EOL> from lbrynet . interfaces import IRequestHandler <EOL> log = logging . getLogger ( __name__ ) <EOL> class ServerRequestHandler ( object ) : <EOL> """<STR_LIT>""" <EOL> implements ( interfaces . IPushProducer , interfaces . IConsumer , IRequestHandler ) <EOL> def __init__ ( self , consumer ) : <EOL> self . consumer = consumer <EOL> self . production_paused = False <EOL> self . request_buff = '<STR_LIT>' <EOL> self . response_buff = '<STR_LIT>' <EOL> self . producer = None <EOL> self . request_received = False <EOL> self . CHUNK_SIZE = <NUM_LIT:2> ** <NUM_LIT> <EOL> self . query_handlers = { } <EOL> self . blob_sender = None <EOL> self . consumer . registerProducer ( self , True ) <EOL> def pauseProducing ( self ) : <EOL> self . production_paused = True <EOL> def stopProducing ( self ) : <EOL> if self . producer is not None : <EOL> self . producer . stopProducing ( ) <EOL> self . producer = None <EOL> self . production_paused = True <EOL> self . consumer . unregisterProducer ( ) <EOL> def resumeProducing ( self ) : <EOL> from twisted . internet import reactor <EOL> self . production_paused = False <EOL> self . _produce_more ( ) <EOL> if self . producer is not None : <EOL> reactor . callLater ( <NUM_LIT:0> , self . producer . resumeProducing ) <EOL> def _produce_more ( self ) : <EOL> from twisted . internet import reactor <EOL> if self . production_paused is False : <EOL> chunk = self . response_buff [ : self . CHUNK_SIZE ] <EOL> self . response_buff = self . response_buff [ self . CHUNK_SIZE : ] <EOL> if chunk != '<STR_LIT>' : <EOL> log . debug ( "<STR_LIT>" , str ( len ( chunk ) ) ) <EOL> self . consumer . write ( chunk ) <EOL> reactor . callLater ( <NUM_LIT:0> , self . _produce_more ) <EOL> def registerProducer ( self , producer , streaming ) : <EOL> self . producer = producer <EOL> assert streaming is False <EOL> producer . resumeProducing ( ) <EOL> def unregisterProducer ( self ) : <EOL> self . producer = None <EOL> def write ( self , data ) : <EOL> from twisted . internet import reactor <EOL> self . response_buff = self . response_buff + data <EOL> self . _produce_more ( ) <EOL> def get_more_data ( ) : <EOL> if self . producer is not None : <EOL> log . debug ( "<STR_LIT>" ) <EOL> self . producer . resumeProducing ( ) <EOL> reactor . callLater ( <NUM_LIT:0> , get_more_data ) <EOL> def data_received ( self , data ) : <EOL> log . debug ( "<STR_LIT>" ) <EOL> log . debug ( "<STR_LIT:%s>" , str ( data ) ) <EOL> if self . request_received is False : <EOL> self . request_buff = self . request_buff + data <EOL> msg = self . try_to_parse_request ( self . request_buff ) <EOL> if msg is not None : <EOL> self . request_buff = '<STR_LIT>' <EOL> d = self . handle_request ( msg ) <EOL> if self . blob_sender is not None : <EOL> d . addCallback ( lambda _ : self . blob_sender . send_blob_if_requested ( self ) ) <EOL> d . addCallbacks ( lambda _ : self . finished_response ( ) , self . request_failure_handler ) <EOL> else : <EOL> log . info ( "<STR_LIT>" ) <EOL> log . info ( "<STR_LIT>" , str ( self . request_buff ) ) <EOL> else : <EOL> log . warning ( "<STR_LIT>" ) <EOL> def register_query_handler ( self , query_handler , query_identifiers ) : <EOL> self . query_handlers [ query_handler ] = query_identifiers <EOL> def register_blob_sender ( self , blob_sender ) : <EOL> self . blob_sender = blob_sender <EOL> def request_failure_handler ( self , err ) : <EOL> log . warning ( "<STR_LIT>" , err . getErrorMessage ( ) ) <EOL> self . stopProducing ( ) <EOL> return err <EOL> def finished_response ( self ) : <EOL> self . request_received = False <EOL> self . _produce_more ( ) <EOL> def send_response ( self , msg ) : <EOL> m = json . dumps ( msg ) <EOL> log . info ( "<STR_LIT>" , str ( len ( m ) ) ) <EOL> log . debug ( "<STR_LIT>" , str ( m ) ) <EOL> self . response_buff = self . response_buff + m <EOL> self . _produce_more ( ) <EOL> return True <EOL> def handle_request ( self , msg ) : <EOL> log . debug ( "<STR_LIT>" ) <EOL> log . debug ( str ( msg ) ) <EOL> def create_response_message ( results ) : <EOL> response = { } <EOL> for success , result in results : <EOL> if success is True : <EOL> response . update ( result ) <EOL> else : <EOL> return result <EOL> log . debug ( "<STR_LIT>" , str ( response ) ) <EOL> return response <EOL> def log_errors ( err ) : <EOL> log . warning ( "<STR_LIT>" , err . getErrorMessage ( ) ) <EOL> return err <EOL> def send_response ( response ) : <EOL> self . send_response ( response ) <EOL> return True <EOL> ds = [ ] <EOL> for query_handler , query_identifiers in self . query_handlers . iteritems ( ) : <EOL> queries = { q_i : msg [ q_i ] for q_i in query_identifiers if q_i in msg } <EOL> d = query_handler . handle_queries ( queries ) <EOL> d . addErrback ( log_errors ) <EOL> ds . append ( d ) <EOL> dl = defer . DeferredList ( ds ) <EOL> dl . addCallback ( create_response_message ) <EOL> dl . addCallback ( send_response ) <EOL> return dl <EOL> def try_to_parse_request ( self , request_buff ) : <EOL> try : <EOL> msg = json . loads ( request_buff ) <EOL> return msg <EOL> except ValueError : <EOL> return None </s>
94,752
import logging <EOL> from zope . interface import implements <EOL> from lbrynet . cryptstream . CryptBlob import CryptBlobInfo <EOL> from lbrynet . interfaces import IMetadataHandler <EOL> log = logging . getLogger ( __name__ ) <EOL> class LBRYFileMetadataHandler ( object ) : <EOL> implements ( IMetadataHandler ) <EOL> def __init__ ( self , stream_hash , stream_info_manager , download_manager ) : <EOL> self . stream_hash = stream_hash <EOL> self . stream_info_manager = stream_info_manager <EOL> self . download_manager = download_manager
self . _final_blob_num = None
-4,627,571,335,263,252,000
import logging <EOL> from zope . interface import implements <EOL> from lbrynet . cryptstream . CryptBlob import CryptBlobInfo <EOL> from lbrynet . interfaces import IMetadataHandler <EOL> log = logging . getLogger ( __name__ ) <EOL> class LBRYFileMetadataHandler ( object ) : <EOL> implements ( IMetadataHandler ) <EOL> def __init__ ( self , stream_hash , stream_info_manager , download_manager ) : <EOL> self . stream_hash = stream_hash <EOL> self . stream_info_manager = stream_info_manager <EOL> self . download_manager = download_manager <EOL> self . _final_blob_num = None <EOL> def get_initial_blobs ( self ) : <EOL> d = self . stream_info_manager . get_blobs_for_stream ( self . stream_hash ) <EOL> d . addCallback ( self . _format_initial_blobs_for_download_manager ) <EOL> return d <EOL> def final_blob_num ( self ) : <EOL> return self . _final_blob_num <EOL> def _format_initial_blobs_for_download_manager ( self , blob_infos ) : <EOL> infos = [ ] <EOL> for blob_hash , blob_num , iv , length in blob_infos : <EOL> if blob_hash is not None : <EOL> infos . append ( CryptBlobInfo ( blob_hash , blob_num , length , iv ) ) <EOL> else : <EOL> log . debug ( "<STR_LIT>" , str ( blob_num - <NUM_LIT:1> ) ) <EOL> self . _final_blob_num = blob_num - <NUM_LIT:1> <EOL> return infos </s>
94,753
from zope . interface import implements <EOL> from lbrynet . interfaces import IMetadataHandler , IRequestCreator <EOL> from lbrynet . core . client . ClientRequest import ClientRequest , ClientPaidRequest <EOL> from lbrynet . core . Error import InsufficientFundsError , InvalidResponseError , RequestCanceledError <EOL> from lbrynet . core . Error import NoResponseError , ConnectionClosedBeforeResponseError <EOL> from ValuableBlobInfo import ValuableBlobInfo <EOL> import datetime <EOL> import logging <EOL> import random <EOL> from twisted . internet import defer <EOL> from twisted . python . failure import Failure <EOL> from collections import defaultdict <EOL> log = logging . getLogger ( __name__ ) <EOL> class BlindMetadataHandler ( object ) : <EOL> implements ( IMetadataHandler , IRequestCreator ) <EOL> def __init__ ( self , info_manager , peers , peer_finder , approved_peers , payment_rate_manager , wallet , <EOL> download_manager ) : <EOL> self . info_manager = info_manager <EOL> self . payment_rate_manager = payment_rate_manager <EOL> self . wallet = wallet <EOL> self . download_manager = download_manager <EOL> self . _peers = peers <EOL> self . peer_finder = peer_finder <EOL> self . approved_peers = approved_peers <EOL> self . _valuable_protocol_prices = { } <EOL> self . _info_protocol_prices = { } <EOL> self . _price_disagreements = [ ] <EOL> self . _incompatible_peers = [ ] <EOL> self . _last_blob_hashes_from_peers = { } <EOL> self . _valuable_hashes = { } <EOL> self . _blob_infos = { } <EOL> self . _peer_search_results = defaultdict ( list ) <EOL> def get_initial_blobs ( self ) : <EOL> d = self . info_manager . get_all_blob_infos ( ) <EOL> return d <EOL> def final_blob_num ( self ) : <EOL> return None <EOL> def send_next_request ( self , peer , protocol ) : <EOL> sent_request = False <EOL> if self . _should_send_request_to ( peer ) : <EOL> v_r = self . _get_valuable_blob_request ( peer ) <EOL> if v_r is not None : <EOL> v_p_r = self . _get_valuable_price_request ( peer , protocol ) <EOL> reserved_points = self . _reserve_points_valuable ( peer , protocol , v_r . max_pay_units ) <EOL> if reserved_points is not None : <EOL> d1 = protocol . add_request ( v_r ) <EOL> d1 . addCallback ( self . _handle_valuable_blob_response , peer , v_r ) <EOL> d1 . addBoth ( self . _pay_or_cancel_payment , protocol , reserved_points , <EOL> self . _info_protocol_prices ) <EOL> d1 . addErrback ( self . _request_failed , "<STR_LIT>" , peer ) <EOL> sent_request = True <EOL> if v_p_r is not None : <EOL> d2 = protocol . add_request ( v_p_r ) <EOL> d2 . addCallback ( self . _handle_valuable_price_response , peer , v_p_r , protocol ) <EOL> d2 . addErrback ( self . _request_failed , "<STR_LIT>" , peer ) <EOL> else : <EOL> return defer . fail ( InsufficientFundsError ( ) ) <EOL> i_r = self . _get_info_request ( peer ) <EOL> if i_r is not None : <EOL> i_p_r = self . _get_info_price_request ( peer , protocol ) <EOL> reserved_points = self . _reserve_points_info ( peer , protocol , i_r . max_pay_units ) <EOL> if reserved_points is not None : <EOL> d3 = protocol . add_request ( i_r ) <EOL> d3 . addCallback ( self . _handle_info_response , peer , i_r , protocol , reserved_points ) <EOL> d3 . addBoth ( self . _pay_or_cancel_payment , protocol , reserved_points , <EOL> self . _valuable_protocol_prices ) <EOL> d3 . addErrback ( self . _request_failed , "<STR_LIT>" , peer , reserved_points ) <EOL> sent_request = True <EOL> if i_p_r is not None : <EOL> d4 = protocol . add_request ( i_p_r ) <EOL> d4 . addCallback ( self . _handle_info_price_response , peer , i_p_r , protocol ) <EOL> d4 . addErrback ( self . _request_failed , "<STR_LIT>" , peer ) <EOL> else : <EOL> return defer . fail ( InsufficientFundsError ( ) ) <EOL> return defer . succeed ( sent_request ) <EOL> def get_new_peers ( self ) : <EOL> peers = None <EOL> if self . _peer_search_results : <EOL> peers = self . _peer_search_results . keys ( ) <EOL> elif len ( self . approved_peers ) != <NUM_LIT:0> : <EOL> peers = random . sample ( self . approved_peers , len ( self . approved_peers ) ) <EOL> return defer . succeed ( peers ) <EOL> def _should_send_request_to ( self , peer ) : <EOL> if peer in self . _incompatible_peers : <EOL> return False <EOL> if self . _peers [ peer ] >= <NUM_LIT:0> : <EOL> return True <EOL> return False <EOL> def _get_valuable_blob_request ( self , peer ) : <EOL> blob_hash = None <EOL> if peer in self . _last_blob_hashes_from_peers : <EOL> h , expire_time = self . _last_blob_hashes_from_peers [ peer ] <EOL> if datetime . datetime . now ( ) > expire_time : <EOL> del self . _last_blob_hashes_from_peers [ peer ] <EOL> else : <EOL> blob_hash = h <EOL> r_dict = { '<STR_LIT>' : { '<STR_LIT>' : blob_hash , '<STR_LIT>' : <NUM_LIT:20> } } <EOL> response_identifier = '<STR_LIT>' <EOL> request = ClientPaidRequest ( r_dict , response_identifier , <NUM_LIT:20> ) <EOL> return request <EOL> def _get_valuable_price_request ( self , peer , protocol ) : <EOL> request = None <EOL> if not protocol in self . _valuable_protocol_prices : <EOL> self . _valuable_protocol_prices [ protocol ] = self . payment_rate_manager . get_rate_valuable_blob_hash ( peer )
request_dict = { '<STR_LIT>' : self . _valuable_protocol_prices [ protocol ] }
-609,959,210,638,543,100
from zope . interface import implements <EOL> from lbrynet . interfaces import IMetadataHandler , IRequestCreator <EOL> from lbrynet . core . client . ClientRequest import ClientRequest , ClientPaidRequest <EOL> from lbrynet . core . Error import InsufficientFundsError , InvalidResponseError , RequestCanceledError <EOL> from lbrynet . core . Error import NoResponseError , ConnectionClosedBeforeResponseError <EOL> from ValuableBlobInfo import ValuableBlobInfo <EOL> import datetime <EOL> import logging <EOL> import random <EOL> from twisted . internet import defer <EOL> from twisted . python . failure import Failure <EOL> from collections import defaultdict <EOL> log = logging . getLogger ( __name__ ) <EOL> class BlindMetadataHandler ( object ) : <EOL> implements ( IMetadataHandler , IRequestCreator ) <EOL> def __init__ ( self , info_manager , peers , peer_finder , approved_peers , payment_rate_manager , wallet , <EOL> download_manager ) : <EOL> self . info_manager = info_manager <EOL> self . payment_rate_manager = payment_rate_manager <EOL> self . wallet = wallet <EOL> self . download_manager = download_manager <EOL> self . _peers = peers <EOL> self . peer_finder = peer_finder <EOL> self . approved_peers = approved_peers <EOL> self . _valuable_protocol_prices = { } <EOL> self . _info_protocol_prices = { } <EOL> self . _price_disagreements = [ ] <EOL> self . _incompatible_peers = [ ] <EOL> self . _last_blob_hashes_from_peers = { } <EOL> self . _valuable_hashes = { } <EOL> self . _blob_infos = { } <EOL> self . _peer_search_results = defaultdict ( list ) <EOL> def get_initial_blobs ( self ) : <EOL> d = self . info_manager . get_all_blob_infos ( ) <EOL> return d <EOL> def final_blob_num ( self ) : <EOL> return None <EOL> def send_next_request ( self , peer , protocol ) : <EOL> sent_request = False <EOL> if self . _should_send_request_to ( peer ) : <EOL> v_r = self . _get_valuable_blob_request ( peer ) <EOL> if v_r is not None : <EOL> v_p_r = self . _get_valuable_price_request ( peer , protocol ) <EOL> reserved_points = self . _reserve_points_valuable ( peer , protocol , v_r . max_pay_units ) <EOL> if reserved_points is not None : <EOL> d1 = protocol . add_request ( v_r ) <EOL> d1 . addCallback ( self . _handle_valuable_blob_response , peer , v_r ) <EOL> d1 . addBoth ( self . _pay_or_cancel_payment , protocol , reserved_points , <EOL> self . _info_protocol_prices ) <EOL> d1 . addErrback ( self . _request_failed , "<STR_LIT>" , peer ) <EOL> sent_request = True <EOL> if v_p_r is not None : <EOL> d2 = protocol . add_request ( v_p_r ) <EOL> d2 . addCallback ( self . _handle_valuable_price_response , peer , v_p_r , protocol ) <EOL> d2 . addErrback ( self . _request_failed , "<STR_LIT>" , peer ) <EOL> else : <EOL> return defer . fail ( InsufficientFundsError ( ) ) <EOL> i_r = self . _get_info_request ( peer ) <EOL> if i_r is not None : <EOL> i_p_r = self . _get_info_price_request ( peer , protocol ) <EOL> reserved_points = self . _reserve_points_info ( peer , protocol , i_r . max_pay_units ) <EOL> if reserved_points is not None : <EOL> d3 = protocol . add_request ( i_r ) <EOL> d3 . addCallback ( self . _handle_info_response , peer , i_r , protocol , reserved_points ) <EOL> d3 . addBoth ( self . _pay_or_cancel_payment , protocol , reserved_points , <EOL> self . _valuable_protocol_prices ) <EOL> d3 . addErrback ( self . _request_failed , "<STR_LIT>" , peer , reserved_points ) <EOL> sent_request = True <EOL> if i_p_r is not None : <EOL> d4 = protocol . add_request ( i_p_r ) <EOL> d4 . addCallback ( self . _handle_info_price_response , peer , i_p_r , protocol ) <EOL> d4 . addErrback ( self . _request_failed , "<STR_LIT>" , peer ) <EOL> else : <EOL> return defer . fail ( InsufficientFundsError ( ) ) <EOL> return defer . succeed ( sent_request ) <EOL> def get_new_peers ( self ) : <EOL> peers = None <EOL> if self . _peer_search_results : <EOL> peers = self . _peer_search_results . keys ( ) <EOL> elif len ( self . approved_peers ) != <NUM_LIT:0> : <EOL> peers = random . sample ( self . approved_peers , len ( self . approved_peers ) ) <EOL> return defer . succeed ( peers ) <EOL> def _should_send_request_to ( self , peer ) : <EOL> if peer in self . _incompatible_peers : <EOL> return False <EOL> if self . _peers [ peer ] >= <NUM_LIT:0> : <EOL> return True <EOL> return False <EOL> def _get_valuable_blob_request ( self , peer ) : <EOL> blob_hash = None <EOL> if peer in self . _last_blob_hashes_from_peers : <EOL> h , expire_time = self . _last_blob_hashes_from_peers [ peer ] <EOL> if datetime . datetime . now ( ) > expire_time : <EOL> del self . _last_blob_hashes_from_peers [ peer ] <EOL> else : <EOL> blob_hash = h <EOL> r_dict = { '<STR_LIT>' : { '<STR_LIT>' : blob_hash , '<STR_LIT>' : <NUM_LIT:20> } } <EOL> response_identifier = '<STR_LIT>' <EOL> request = ClientPaidRequest ( r_dict , response_identifier , <NUM_LIT:20> ) <EOL> return request <EOL> def _get_valuable_price_request ( self , peer , protocol ) : <EOL> request = None <EOL> if not protocol in self . _valuable_protocol_prices : <EOL> self . _valuable_protocol_prices [ protocol ] = self . payment_rate_manager . get_rate_valuable_blob_hash ( peer ) <EOL> request_dict = { '<STR_LIT>' : self . _valuable_protocol_prices [ protocol ] } <EOL> request = ClientRequest ( request_dict , '<STR_LIT>' ) <EOL> return request <EOL> def _get_info_request ( self , peer ) : <EOL> if peer in self . _peer_search_results : <EOL> blob_hashes = self . _peer_search_results [ peer ] <EOL> del self . _peer_search_results [ peer ] <EOL> references = [ ] <EOL> for blob_hash in blob_hashes : <EOL> if blob_hash in self . _valuable_hashes : <EOL> references . append ( self . _valuable_hashes [ blob_hash ] [ <NUM_LIT:1> ] ) <EOL> hashes_to_search = [ h for h , ( s , r , p ) in self . _valuable_hashes . iteritems ( ) if r in references ] <EOL> if hashes_to_search : <EOL> r_dict = { '<STR_LIT>' : { '<STR_LIT>' : hashes_to_search } } <EOL> response_identifier = '<STR_LIT>' <EOL> request = ClientPaidRequest ( r_dict , response_identifier , len ( hashes_to_search ) ) <EOL> return request <EOL> if not self . _peer_search_results : <EOL> self . _search_for_peers ( ) <EOL> return None <EOL> def _get_info_price_request ( self , peer , protocol ) : <EOL> request = None <EOL> if not protocol in self . _info_protocol_prices : <EOL> self . _info_protocol_prices [ protocol ] = self . payment_rate_manager . get_rate_valuable_blob_info ( peer ) <EOL> request_dict = { '<STR_LIT>' : self . _info_protocol_prices [ protocol ] } <EOL> request = ClientRequest ( request_dict , '<STR_LIT>' ) <EOL> return request <EOL> def _update_local_score ( self , peer , amount ) : <EOL> self . _peers [ peer ] += amount <EOL> def _reserve_points_valuable ( self , peer , protocol , max_units ) : <EOL> return self . _reserve_points ( peer , protocol , max_units , self . _valuable_protocol_prices ) <EOL> def _reserve_points_info ( self , peer , protocol , max_units ) : <EOL> return self . _reserve_points ( peer , protocol , max_units , self . _info_protocol_prices ) <EOL> def _reserve_points ( self , peer , protocol , max_units , prices ) : <EOL> assert protocol in prices <EOL> points_to_reserve = <NUM_LIT:1.0> * max_units * prices [ protocol ] / <NUM_LIT> <EOL> return self . wallet . reserve_points ( peer , points_to_reserve ) <EOL> def _pay_or_cancel_payment ( self , arg , protocol , reserved_points , protocol_prices ) : <EOL> if isinstance ( arg , Failure ) or arg == <NUM_LIT:0> : <EOL> self . _cancel_points ( reserved_points ) <EOL> else : <EOL> self . _pay_peer ( protocol , arg , reserved_points , protocol_prices ) <EOL> return arg <EOL> def _pay_peer ( self , protocol , num_units , reserved_points , prices ) : <EOL> assert num_units != <NUM_LIT:0> <EOL> assert protocol in prices <EOL> point_amount = <NUM_LIT:1.0> * num_units * prices [ protocol ] / <NUM_LIT> <EOL> self . wallet . send_points ( reserved_points , point_amount ) <EOL> def _cancel_points ( self , reserved_points ) : <EOL> self . wallet . cancel_point_reservation ( reserved_points ) <EOL> def _handle_valuable_blob_response ( self , response_dict , peer , request ) : <EOL> if not request . response_identifier in response_dict : <EOL> return InvalidResponseError ( "<STR_LIT>" ) <EOL> response = response_dict [ request . response_identifier ] <EOL> if '<STR_LIT:error>' in response : <EOL> if response [ '<STR_LIT:error>' ] == "<STR_LIT>" : <EOL> return <NUM_LIT:0> <EOL> else : <EOL> return InvalidResponseError ( "<STR_LIT>" % <EOL> ( response [ '<STR_LIT:error>' ] , ) ) <EOL> if not '<STR_LIT>' in response : <EOL> return InvalidResponseError ( "<STR_LIT>" ) <EOL> hashes = response [ '<STR_LIT>' ] <EOL> log . info ( "<STR_LIT>" , str ( len ( hashes ) ) , str ( peer ) ) <EOL> expire_time = datetime . datetime . now ( ) + datetime . timedelta ( minutes = <NUM_LIT:10> ) <EOL> reference = None <EOL> unique_hashes = set ( ) <EOL> if '<STR_LIT>' in response : <EOL> reference = response [ '<STR_LIT>' ] <EOL> for blob_hash , peer_score in hashes : <EOL> if reference is None : <EOL> reference = blob_hash <EOL> self . _last_blob_hashes_from_peers [ peer ] = ( blob_hash , expire_time ) <EOL> if not ( blob_hash in self . _valuable_hashes or blob_hash in self . _blob_infos ) : <EOL> self . _valuable_hashes [ blob_hash ] = ( peer_score , reference , peer ) <EOL> unique_hashes . add ( blob_hash ) <EOL> if len ( unique_hashes ) : <EOL> self . _update_local_score ( peer , len ( unique_hashes ) ) <EOL> peer . update_stats ( '<STR_LIT>' , len ( unique_hashes ) ) <EOL> peer . update_score ( len ( unique_hashes ) ) <EOL> else : <EOL> self . _update_local_score ( peer , - <NUM_LIT> ) <EOL> return len ( unique_hashes ) <EOL> def _handle_info_response ( self , response_dict , peer , request ) : <EOL> if not request . response_identifier in response_dict : <EOL> return InvalidResponseError ( "<STR_LIT>" ) <EOL> response = response_dict [ request . response_identifier ] <EOL> if '<STR_LIT:error>' in response : <EOL> if response [ '<STR_LIT:error>' ] == '<STR_LIT>' : <EOL> return <NUM_LIT:0> <EOL> else : <EOL> return InvalidResponseError ( "<STR_LIT>" % <EOL> ( response [ '<STR_LIT:error>' ] , ) ) <EOL> if not '<STR_LIT>' in response : <EOL> return InvalidResponseError ( "<STR_LIT>" ) <EOL> raw_blob_lengths = response [ '<STR_LIT>' ] <EOL> log . info ( "<STR_LIT>" , str ( len ( raw_blob_lengths ) ) , str ( peer ) ) <EOL> log . debug ( "<STR_LIT>" , str ( raw_blob_lengths ) ) <EOL> infos = [ ] <EOL> unique_hashes = set ( ) <EOL> for blob_hash , length in raw_blob_lengths : <EOL> if blob_hash in self . _valuable_hashes : <EOL> peer_score , reference , peer = self . _valuable_hashes [ blob_hash ] <EOL> del self . _valuable_hashes [ blob_hash ] <EOL> infos . append ( ValuableBlobInfo ( blob_hash , length , reference , peer , peer_score ) ) <EOL> unique_hashes . add ( blob_hash ) <EOL> elif blob_hash in request . request_dict [ '<STR_LIT>' ] [ '<STR_LIT>' ] : <EOL> unique_hashes . add ( blob_hash ) <EOL> d = self . info_manager . save_blob_infos ( infos ) <EOL> d . addCallback ( lambda _ : self . download_manager . add_blobs_to_download ( infos ) ) <EOL> def pay_or_penalize_peer ( ) : <EOL> if len ( unique_hashes ) : <EOL> self . _update_local_score ( peer , len ( unique_hashes ) ) <EOL> peer . update_stats ( '<STR_LIT>' , len ( unique_hashes ) ) <EOL> peer . update_score ( len ( unique_hashes ) ) <EOL> else : <EOL> self . _update_local_score ( peer , - <NUM_LIT> ) <EOL> return len ( unique_hashes ) <EOL> d . addCallback ( lambda _ : pay_or_penalize_peer ( ) ) <EOL> return d <EOL> def _handle_valuable_price_response ( self , response_dict , peer , request , protocol ) : <EOL> if not request . response_identifier in response_dict : <EOL> return InvalidResponseError ( "<STR_LIT>" ) <EOL> assert protocol in self . _valuable_protocol_prices <EOL> response = response_dict [ request . response_identifier ] <EOL> if response == "<STR_LIT>" : <EOL> return True <EOL> else : <EOL> del self . _valuable_protocol_prices [ protocol ] <EOL> self . _price_disagreements . append ( peer ) <EOL> return True <EOL> def _handle_info_price_response ( self , response_dict , peer , request , protocol ) : <EOL> if not request . response_identifier in response_dict : <EOL> return InvalidResponseError ( "<STR_LIT>" ) <EOL> assert protocol in self . _info_protocol_prices <EOL> response = response_dict [ request . response_identifier ] <EOL> if response == "<STR_LIT>" : <EOL> return True <EOL> else : <EOL> del self . _info_protocol_prices [ protocol ] <EOL> self . _price_disagreements . append ( peer ) <EOL> return True <EOL> def _request_failed ( self , reason , request_type , peer ) : <EOL> if reason . check ( RequestCanceledError ) : <EOL> return <EOL> if reason . check ( NoResponseError ) : <EOL> self . _incompatible_peers . append ( peer ) <EOL> log . warning ( "<STR_LIT>" , <EOL> str ( request_type ) , str ( reason . getErrorMessage ( ) ) ) <EOL> self . _update_local_score ( peer , - <NUM_LIT> ) <EOL> peer . update_score ( - <NUM_LIT> ) <EOL> if reason . check ( ConnectionClosedBeforeResponseError ) : <EOL> return <EOL> return reason <EOL> def _search_for_peers ( self ) : <EOL> references_with_sources = set ( ) <EOL> for h_list in self . _peer_search_results . itervalues ( ) : <EOL> for h in h_list : <EOL> if h in self . _valuable_hashes : <EOL> references_with_sources . add ( self . _valuable_hashes [ h ] [ <NUM_LIT:1> ] ) <EOL> hash_to_search = None <EOL> used_references = [ ] <EOL> for h , ( s , r , p ) in self . _valuable_hashes . iteritems ( ) : <EOL> if not r in used_references : <EOL> used_references . append ( r ) <EOL> hash_to_search = h <EOL> if not r in references_with_sources : <EOL> break <EOL> if hash_to_search : <EOL> d = self . peer_finder . find_peers_for_blob ( hash_to_search ) <EOL> d . addCallback ( self . _set_peer_search_results , hash_to_search ) <EOL> def _set_peer_search_results ( self , peers , searched_hash ) : <EOL> for peer in peers : <EOL> self . _peer_search_results [ peer ] . append ( searched_hash ) </s>
94,754
from setuptools import setup <EOL> import os <EOL> APP = [ os . path . join ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> DATA_FILES = [ ] <EOL> OPTIONS = { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] <EOL> } <EOL> ] <EOL> } <EOL> } <EOL> setup ( <EOL> app = APP , <EOL> data_files = DATA_FILES ,
options = { '<STR_LIT>' : OPTIONS } ,
6,636,726,067,863,529,000
from setuptools import setup <EOL> import os <EOL> APP = [ os . path . join ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ] <EOL> DATA_FILES = [ ] <EOL> OPTIONS = { '<STR_LIT>' : True , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : True , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] <EOL> } <EOL> ] <EOL> } <EOL> } <EOL> setup ( <EOL> app = APP , <EOL> data_files = DATA_FILES , <EOL> options = { '<STR_LIT>' : OPTIONS } , <EOL> setup_requires = [ '<STR_LIT>' ] , <EOL> ) </s>
94,755
from scio . client import Client , Fault , Method , NotSOAP
__all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] </s>
4,901,909,815,951,055,000
from scio . client import Client , Fault , Method , NotSOAP <EOL> __all__ = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] </s>
94,756
from textkit . utils import read_tokens
def test_read_tokens ( ) :
2,140,283,708,566,164,700
from textkit . utils import read_tokens <EOL> def test_read_tokens ( ) : <EOL> f = open ( '<STR_LIT>' , '<STR_LIT:r>' ) <EOL> tokens = read_tokens ( f ) <EOL> assert len ( tokens ) == <NUM_LIT:6> <EOL> f . close ( ) </s>
94,757
"""<STR_LIT>""" <EOL> from __future__ import print_function <EOL> import pycuda . autoinit <EOL> import pycuda . gpuarray as gpuarray
import numpy as np
-4,713,739,939,196,893,000
"""<STR_LIT>""" <EOL> from __future__ import print_function <EOL> import pycuda . autoinit <EOL> import pycuda . gpuarray as gpuarray <EOL> import numpy as np <EOL> import skcuda . fft as cu_fft <EOL> print ( '<STR_LIT>' ) <EOL> N = <NUM_LIT> <EOL> M = N // <NUM_LIT:2> <EOL> x = np . asarray ( np . random . rand ( N , M ) , np . float32 ) <EOL> xf = np . fft . fft2 ( x ) <EOL> y = np . real ( np . fft . ifft2 ( xf ) ) <EOL> x_gpu = gpuarray . to_gpu ( x ) <EOL> xf_gpu = gpuarray . empty ( ( x . shape [ <NUM_LIT:0> ] , x . shape [ <NUM_LIT:1> ] // <NUM_LIT:2> + <NUM_LIT:1> ) , np . complex64 ) <EOL> plan_forward = cu_fft . Plan ( x_gpu . shape , np . float32 , np . complex64 ) <EOL> cu_fft . fft ( x_gpu , xf_gpu , plan_forward ) <EOL> y_gpu = gpuarray . empty_like ( x_gpu ) <EOL> plan_inverse = cu_fft . Plan ( x_gpu . shape , np . complex64 , np . float32 ) <EOL> cu_fft . ifft ( xf_gpu , y_gpu , plan_inverse , True ) <EOL> print ( '<STR_LIT>' , np . allclose ( y , y_gpu . get ( ) , atol = <NUM_LIT> ) ) <EOL> print ( '<STR_LIT>' ) <EOL> x = np . asarray ( np . random . rand ( N , M ) + <NUM_LIT> * np . random . rand ( N , M ) , np . complex64 ) <EOL> x_gpu = gpuarray . to_gpu ( x ) <EOL> plan = cu_fft . Plan ( x_gpu . shape , np . complex64 , np . complex64 ) <EOL> cu_fft . fft ( x_gpu , x_gpu , plan ) <EOL> cu_fft . ifft ( x_gpu , x_gpu , plan , True ) <EOL> print ( '<STR_LIT>' , np . allclose ( x , x_gpu . get ( ) , atol = <NUM_LIT> ) ) </s>
94,758
<s> """<STR_LIT>"""
-7,276,128,957,384,905,000
"""<STR_LIT>""" <EOL> import pycuda . driver as drv <EOL> import pycuda . gpuarray as gpuarray <EOL> import pycuda . elementwise as el <EOL> import pycuda . tools as tools <EOL> import numpy as np <EOL> from . import cufft <EOL> from . cufft import CUFFT_COMPATIBILITY_NATIVE , CUFFT_COMPATIBILITY_FFTW_PADDING , CUFFT_COMPATIBILITY_FFTW_ASYMMETRIC , CUFFT_COMPATIBILITY_FFTW_ALL <EOL> from . import misc <EOL> class Plan : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , shape , in_dtype , out_dtype , batch = <NUM_LIT:1> , stream = None , <EOL> mode = <NUM_LIT> , inembed = None , istride = <NUM_LIT:1> , idist = <NUM_LIT:0> , onembed = None , <EOL> ostride = <NUM_LIT:1> , odist = <NUM_LIT:0> , auto_allocate = True ) : <EOL> if np . isscalar ( shape ) : <EOL> self . shape = ( shape , ) <EOL> else : <EOL> self . shape = shape <EOL> self . in_dtype = in_dtype <EOL> self . out_dtype = out_dtype <EOL> if batch <= <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> self . batch = batch <EOL> if in_dtype == np . float32 and out_dtype == np . complex64 : <EOL> self . fft_type = cufft . CUFFT_R2C <EOL> self . fft_func = cufft . cufftExecR2C <EOL> elif in_dtype == np . complex64 and out_dtype == np . float32 : <EOL> self . fft_type = cufft . CUFFT_C2R <EOL> self . fft_func = cufft . cufftExecC2R <EOL> elif in_dtype == np . complex64 and out_dtype == np . complex64 : <EOL> self . fft_type = cufft . CUFFT_C2C <EOL> self . fft_func = cufft . cufftExecC2C <EOL> elif in_dtype == np . float64 and out_dtype == np . complex128 : <EOL> self . fft_type = cufft . CUFFT_D2Z <EOL> self . fft_func = cufft . cufftExecD2Z <EOL> elif in_dtype == np . complex128 and out_dtype == np . float64 : <EOL> self . fft_type = cufft . CUFFT_Z2D <EOL> self . fft_func = cufft . cufftExecZ2D <EOL> elif in_dtype == np . complex128 and out_dtype == np . complex128 : <EOL> self . fft_type = cufft . CUFFT_Z2Z <EOL> self . fft_func = cufft . cufftExecZ2Z <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> capability = misc . get_compute_capability ( misc . get_current_device ( ) ) <EOL> if capability < <NUM_LIT> and ( misc . isdoubletype ( in_dtype ) or misc . isdoubletype ( out_dtype ) ) : <EOL> raise RuntimeError ( '<STR_LIT>' <EOL> '<STR_LIT>' % capability ) <EOL> if inembed is not None : <EOL> inembed = inembed . ctypes . data <EOL> if onembed is not None : <EOL> onembed = onembed . ctypes . data <EOL> if len ( self . shape ) <= <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> n = np . asarray ( self . shape , np . int32 ) <EOL> self . handle = cufft . cufftCreate ( ) <EOL> cufft . cufftSetCompatibilityMode ( self . handle , mode ) <EOL> cufft . cufftSetAutoAllocation ( self . handle , auto_allocate ) <EOL> self . worksize = cufft . cufftMakePlanMany ( <EOL> self . handle , len ( self . shape ) , n . ctypes . data , inembed , istride , idist , <EOL> onembed , ostride , odist , self . fft_type , self . batch ) <EOL> if stream != None : <EOL> cufft . cufftSetStream ( self . handle , stream . handle ) <EOL> def set_work_area ( self , work_area ) : <EOL> """<STR_LIT>""" <EOL> cufft . cufftSetWorkArea ( self . handle , int ( work_area . gpudata ) ) <EOL> def __del__ ( self ) : <EOL> try : <EOL> cufft . cufftDestroy ( self . handle ) <EOL> except : <EOL> pass <EOL> def _scale_inplace ( a , x_gpu ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> func = _scale_inplace . cache [ ( a , x_gpu . dtype ) ] <EOL> except KeyError : <EOL> ctype = tools . dtype_to_ctype ( x_gpu . dtype ) <EOL> func = el . ElementwiseKernel ( <EOL> "<STR_LIT>" . format ( ctype = ctype ) , <EOL> "<STR_LIT>" ) <EOL> _scale_inplace . cache [ ( a , x_gpu . dtype ) ] = func <EOL> func ( x_gpu . dtype . type ( a ) , x_gpu ) <EOL> _scale_inplace . cache = { } <EOL> def _fft ( x_gpu , y_gpu , plan , direction , scale = None ) : <EOL> """<STR_LIT>""" <EOL> if ( x_gpu . gpudata == y_gpu . gpudata ) and plan . fft_type not in [ cufft . CUFFT_C2C , cufft . CUFFT_Z2Z ] : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if direction == cufft . CUFFT_FORWARD and plan . in_dtype in np . sctypes [ '<STR_LIT>' ] and plan . out_dtype in np . sctypes [ '<STR_LIT:float>' ] : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if direction == cufft . CUFFT_INVERSE and plan . in_dtype in np . sctypes [ '<STR_LIT:float>' ] and plan . out_dtype in np . sctypes [ '<STR_LIT>' ] : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> if plan . fft_type in [ cufft . CUFFT_C2C , cufft . CUFFT_Z2Z ] : <EOL> plan . fft_func ( plan . handle , int ( x_gpu . gpudata ) , int ( y_gpu . gpudata ) , <EOL> direction ) <EOL> else : <EOL> plan . fft_func ( plan . handle , int ( x_gpu . gpudata ) , <EOL> int ( y_gpu . gpudata ) ) <EOL> if scale != None : <EOL> _scale_inplace ( scale , y_gpu ) <EOL> def fft ( x_gpu , y_gpu , plan , scale = False ) : <EOL> """<STR_LIT>""" <EOL> if scale == True : <EOL> return _fft ( x_gpu , y_gpu , plan , cufft . CUFFT_FORWARD , x_gpu . size / plan . batch ) <EOL> else : <EOL> return _fft ( x_gpu , y_gpu , plan , cufft . CUFFT_FORWARD ) <EOL> def ifft ( x_gpu , y_gpu , plan , scale = False ) : <EOL> """<STR_LIT>""" <EOL> if scale == True : <EOL> return _fft ( x_gpu , y_gpu , plan , cufft . CUFFT_INVERSE , y_gpu . size / plan . batch ) <EOL> else : <EOL> return _fft ( x_gpu , y_gpu , plan , cufft . CUFFT_INVERSE ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> import doctest <EOL> doctest . testmod ( ) </s>
94,759
"""<STR_LIT>""" <EOL> from kestrel . config import load_config
from kestrel . worker import Worker
-696,333,514,340,272,600
"""<STR_LIT>""" <EOL> from kestrel . config import load_config <EOL> from kestrel . worker import Worker <EOL> from kestrel . manager import Manager <EOL> from kestrel . user import Client <EOL> __version__ = '<STR_LIT>' </s>
94,760
<s> __author__ = '<STR_LIT>'
5,858,988,771,707,775,000
__author__ = '<STR_LIT>' <EOL> BING_API_KEY = '<STR_LIT>' </s>
94,761
import os <EOL> import json <EOL> import errno <EOL> from collections import OrderedDict <EOL> from inifile import IniFile <EOL> from lektor . context import get_ctx <EOL> from lektor . utils import iter_dotted_path_prefixes , resolve_dotted_value , merge , decode_flat_data <EOL> def load_databag ( filename ) : <EOL> try : <EOL> if filename . endswith ( '<STR_LIT>' ) : <EOL> with open ( filename , '<STR_LIT:r>' ) as f : <EOL> return json . load ( f , object_pairs_hook = OrderedDict ) <EOL> elif filename . endswith ( '<STR_LIT>' ) : <EOL> return decode_flat_data ( IniFile ( filename ) . items ( ) , <EOL> dict_cls = OrderedDict ) <EOL> except ( OSError , IOError ) as e : <EOL> if e . errno != errno . ENOENT : <EOL> raise <EOL> class Databags ( object ) : <EOL> def __init__ ( self , env ) : <EOL> self . env = env <EOL> self . root_path = os . path . join ( self . env . root_path , '<STR_LIT>' ) <EOL> self . _known_bags = { } <EOL> self . _bags = { }
try :
8,022,183,779,938,219,000
import os <EOL> import json <EOL> import errno <EOL> from collections import OrderedDict <EOL> from inifile import IniFile <EOL> from lektor . context import get_ctx <EOL> from lektor . utils import iter_dotted_path_prefixes , resolve_dotted_value , merge , decode_flat_data <EOL> def load_databag ( filename ) : <EOL> try : <EOL> if filename . endswith ( '<STR_LIT>' ) : <EOL> with open ( filename , '<STR_LIT:r>' ) as f : <EOL> return json . load ( f , object_pairs_hook = OrderedDict ) <EOL> elif filename . endswith ( '<STR_LIT>' ) : <EOL> return decode_flat_data ( IniFile ( filename ) . items ( ) , <EOL> dict_cls = OrderedDict ) <EOL> except ( OSError , IOError ) as e : <EOL> if e . errno != errno . ENOENT : <EOL> raise <EOL> class Databags ( object ) : <EOL> def __init__ ( self , env ) : <EOL> self . env = env <EOL> self . root_path = os . path . join ( self . env . root_path , '<STR_LIT>' ) <EOL> self . _known_bags = { } <EOL> self . _bags = { } <EOL> try : <EOL> for filename in os . listdir ( self . root_path ) : <EOL> if filename . endswith ( ( '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> self . _known_bags . setdefault ( <EOL> filename . rsplit ( '<STR_LIT:.>' , - <NUM_LIT:1> ) [ <NUM_LIT:0> ] , [ ] ) . append ( filename ) <EOL> except OSError : <EOL> pass <EOL> def get_bag ( self , name ) : <EOL> sources = self . _known_bags . get ( name ) <EOL> if not sources : <EOL> return None <EOL> rv = self . _bags . get ( name ) <EOL> if rv is None : <EOL> filenames = [ ] <EOL> rv = OrderedDict ( ) <EOL> for filename in sources : <EOL> filename = os . path . join ( self . root_path , filename ) <EOL> rv = merge ( rv , load_databag ( filename ) ) <EOL> filenames . append ( filename ) <EOL> self . _bags [ name ] = ( rv , filenames ) <EOL> else : <EOL> rv , filenames = rv <EOL> ctx = get_ctx ( ) <EOL> if ctx is not None : <EOL> for filename in filenames : <EOL> ctx . record_dependency ( filename ) <EOL> return rv <EOL> def lookup ( self , key ) : <EOL> for prefix , local_key in iter_dotted_path_prefixes ( key ) : <EOL> bag = self . get_bag ( prefix ) <EOL> if bag is not None : <EOL> if local_key is None : <EOL> return bag <EOL> return resolve_dotted_value ( bag , local_key ) </s>
94,762
import os <EOL> import re <EOL> import shutil <EOL> import subprocess <EOL> import tempfile <EOL> import uuid <EOL> from contextlib import contextmanager <EOL> from datetime import datetime <EOL> from functools import partial <EOL> import click <EOL> from jinja2 import Environment , PackageLoader <EOL> from . utils import fs_enc , slugify <EOL> from lektor . _compat import text_type <EOL> _var_re = re . compile ( r'<STR_LIT>' ) <EOL> class Generator ( object ) : <EOL> def __init__ ( self , base ) : <EOL> self . question = <NUM_LIT:0> <EOL> self . jinja_env = Environment ( <EOL> loader = PackageLoader ( '<STR_LIT>' , '<STR_LIT>' % base ) , <EOL> line_statement_prefix = '<STR_LIT>' , <EOL> line_comment_prefix = '<STR_LIT>' , <EOL> variable_start_string = '<STR_LIT>' , <EOL> variable_end_string = '<STR_LIT:}>' , <EOL> block_start_string = '<STR_LIT>' , <EOL> block_end_string = '<STR_LIT>' , <EOL> comment_start_string = '<STR_LIT>' , <EOL> comment_end_string = '<STR_LIT>' , <EOL> ) <EOL> self . options = { } <EOL> self . term_width = min ( click . get_terminal_size ( ) [ <NUM_LIT:0> ] , <NUM_LIT> ) <EOL> self . e = click . secho <EOL> self . w = partial ( click . wrap_text , width = self . term_width ) <EOL> def abort ( self , message ) : <EOL> click . echo ( '<STR_LIT>' % message , err = True ) <EOL> raise click . Abort ( ) <EOL> def prompt ( self , text , default = None , info = None ) : <EOL> self . question += <NUM_LIT:1> <EOL> self . e ( '<STR_LIT>' ) <EOL> self . e ( '<STR_LIT>' % self . question , fg = '<STR_LIT>' ) <EOL> if info is not None : <EOL> self . e ( click . wrap_text ( info , self . term_width - <NUM_LIT:2> , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> text = '<STR_LIT>' + click . style ( text , fg = '<STR_LIT>' ) <EOL> if default is True or default is False : <EOL> rv = click . confirm ( text , default = default ) <EOL> else : <EOL> rv = click . prompt ( text , default = default , show_default = True ) <EOL> return rv <EOL> def title ( self , title ) : <EOL> self . e ( title , fg = '<STR_LIT>' ) <EOL> self . e ( '<STR_LIT:=>' * len ( title ) , fg = '<STR_LIT>' ) <EOL> self . e ( '<STR_LIT>' ) <EOL> def text ( self , text ) : <EOL> self . e ( self . w ( text ) ) <EOL> def confirm ( self , prompt ) : <EOL> self . e ( '<STR_LIT>' ) <EOL> click . confirm ( prompt , default = True , abort = True , prompt_suffix = '<STR_LIT:U+0020>' ) <EOL> @ contextmanager <EOL> def make_target_directory ( self , path ) : <EOL> here = os . path . abspath ( os . getcwd ( ) ) <EOL> path = os . path . abspath ( path ) <EOL> if here != path : <EOL> try : <EOL> os . makedirs ( path ) <EOL> except OSError as e : <EOL> self . abort ( '<STR_LIT>' % e ) <EOL> if os . path . isdir ( path ) : <EOL> try : <EOL> if len ( os . listdir ( path ) ) != <NUM_LIT:0> : <EOL> raise OSError ( '<STR_LIT>' ) <EOL> except OSError as e : <EOL> self . abort ( '<STR_LIT>' % e ) <EOL> scratch = os . path . join ( tempfile . gettempdir ( ) , uuid . uuid4 ( ) . hex ) <EOL> os . makedirs ( scratch ) <EOL> try : <EOL> yield scratch <EOL> except : <EOL> shutil . rmtree ( scratch ) <EOL> raise <EOL> else : <EOL> for filename in os . listdir ( scratch ) : <EOL> if isinstance ( path , text_type ) : <EOL> filename = filename . decode ( fs_enc ) <EOL> shutil . move ( os . path . join ( scratch , filename ) , <EOL> os . path . join ( path , filename ) ) <EOL> os . rmdir ( scratch ) <EOL> def expand_filename ( self , base , ctx , template_filename ) : <EOL> def _repl ( match ) : <EOL> return ctx [ match . group ( <NUM_LIT:1> ) ] <EOL> return os . path . join ( base , _var_re . sub ( _repl , template_filename ) ) [ : - <NUM_LIT:3> ] <EOL> def run ( self , ctx , path ) : <EOL> with self . make_target_directory ( path ) as scratch : <EOL> for template in self . jinja_env . list_templates ( ) : <EOL> if not template . endswith ( '<STR_LIT>' ) : <EOL> continue <EOL> fn = self . expand_filename ( scratch , ctx , template ) <EOL> tmpl = self . jinja_env . get_template ( template ) <EOL> rv = tmpl . render ( ctx ) . strip ( '<STR_LIT:\r\n>' ) <EOL> if rv : <EOL> directory = os . path . dirname ( fn ) <EOL> try : <EOL> os . makedirs ( directory ) <EOL> except OSError : <EOL> pass <EOL> with open ( fn , '<STR_LIT:w>' ) as f : <EOL> f . write ( rv . encode ( '<STR_LIT:utf-8>' ) + '<STR_LIT:\n>' ) <EOL> def get_default_author ( ) : <EOL> import getpass <EOL> if os . name == '<STR_LIT>' : <EOL> return getpass . getuser ( ) . decode ( '<STR_LIT>' ) <EOL> import pwd <EOL> ent = pwd . getpwuid ( os . getuid ( ) ) <EOL> if ent and ent . pw_gecos : <EOL> return ent . pw_gecos . decode ( '<STR_LIT:utf-8>' , '<STR_LIT:replace>' ) <EOL> return getpass . getuser ( ) . decode ( '<STR_LIT:utf-8>' , '<STR_LIT:replace>' ) <EOL> def get_default_author_email ( ) : <EOL> try : <EOL> return subprocess . Popen ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE ) . communicate ( ) [ <NUM_LIT:0> ] . strip ( ) <EOL> except Exception : <EOL> return None <EOL> def project_quickstart ( defaults = None ) : <EOL> if not defaults : <EOL> defaults = { } <EOL> g = Generator ( '<STR_LIT>' ) <EOL> g . title ( '<STR_LIT>' ) <EOL> g . text ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> name = defaults . get ( '<STR_LIT:name>' ) <EOL> if name is None : <EOL> name = g . prompt ( '<STR_LIT>' , None , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> author_name = g . prompt ( '<STR_LIT>' , get_default_author ( ) , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> path = defaults . get ( '<STR_LIT:path>' ) <EOL> if path is None : <EOL> here = os . path . abspath ( os . getcwd ( ) ) <EOL> default_project_path = None <EOL> try : <EOL> if len ( os . listdir ( here ) ) == [ ] : <EOL> default_project_path = here <EOL> except OSError : <EOL> pass <EOL> if default_project_path is None : <EOL> default_project_path = os . path . join ( os . getcwd ( ) , name ) <EOL> path = g . prompt ( '<STR_LIT>' , default_project_path , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> path = os . path . expanduser ( path ) <EOL> with_blog = g . prompt ( '<STR_LIT>' , True , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' )
g . confirm ( '<STR_LIT>' )
7,183,740,524,408,389,000
import os <EOL> import re <EOL> import shutil <EOL> import subprocess <EOL> import tempfile <EOL> import uuid <EOL> from contextlib import contextmanager <EOL> from datetime import datetime <EOL> from functools import partial <EOL> import click <EOL> from jinja2 import Environment , PackageLoader <EOL> from . utils import fs_enc , slugify <EOL> from lektor . _compat import text_type <EOL> _var_re = re . compile ( r'<STR_LIT>' ) <EOL> class Generator ( object ) : <EOL> def __init__ ( self , base ) : <EOL> self . question = <NUM_LIT:0> <EOL> self . jinja_env = Environment ( <EOL> loader = PackageLoader ( '<STR_LIT>' , '<STR_LIT>' % base ) , <EOL> line_statement_prefix = '<STR_LIT>' , <EOL> line_comment_prefix = '<STR_LIT>' , <EOL> variable_start_string = '<STR_LIT>' , <EOL> variable_end_string = '<STR_LIT:}>' , <EOL> block_start_string = '<STR_LIT>' , <EOL> block_end_string = '<STR_LIT>' , <EOL> comment_start_string = '<STR_LIT>' , <EOL> comment_end_string = '<STR_LIT>' , <EOL> ) <EOL> self . options = { } <EOL> self . term_width = min ( click . get_terminal_size ( ) [ <NUM_LIT:0> ] , <NUM_LIT> ) <EOL> self . e = click . secho <EOL> self . w = partial ( click . wrap_text , width = self . term_width ) <EOL> def abort ( self , message ) : <EOL> click . echo ( '<STR_LIT>' % message , err = True ) <EOL> raise click . Abort ( ) <EOL> def prompt ( self , text , default = None , info = None ) : <EOL> self . question += <NUM_LIT:1> <EOL> self . e ( '<STR_LIT>' ) <EOL> self . e ( '<STR_LIT>' % self . question , fg = '<STR_LIT>' ) <EOL> if info is not None : <EOL> self . e ( click . wrap_text ( info , self . term_width - <NUM_LIT:2> , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> text = '<STR_LIT>' + click . style ( text , fg = '<STR_LIT>' ) <EOL> if default is True or default is False : <EOL> rv = click . confirm ( text , default = default ) <EOL> else : <EOL> rv = click . prompt ( text , default = default , show_default = True ) <EOL> return rv <EOL> def title ( self , title ) : <EOL> self . e ( title , fg = '<STR_LIT>' ) <EOL> self . e ( '<STR_LIT:=>' * len ( title ) , fg = '<STR_LIT>' ) <EOL> self . e ( '<STR_LIT>' ) <EOL> def text ( self , text ) : <EOL> self . e ( self . w ( text ) ) <EOL> def confirm ( self , prompt ) : <EOL> self . e ( '<STR_LIT>' ) <EOL> click . confirm ( prompt , default = True , abort = True , prompt_suffix = '<STR_LIT:U+0020>' ) <EOL> @ contextmanager <EOL> def make_target_directory ( self , path ) : <EOL> here = os . path . abspath ( os . getcwd ( ) ) <EOL> path = os . path . abspath ( path ) <EOL> if here != path : <EOL> try : <EOL> os . makedirs ( path ) <EOL> except OSError as e : <EOL> self . abort ( '<STR_LIT>' % e ) <EOL> if os . path . isdir ( path ) : <EOL> try : <EOL> if len ( os . listdir ( path ) ) != <NUM_LIT:0> : <EOL> raise OSError ( '<STR_LIT>' ) <EOL> except OSError as e : <EOL> self . abort ( '<STR_LIT>' % e ) <EOL> scratch = os . path . join ( tempfile . gettempdir ( ) , uuid . uuid4 ( ) . hex ) <EOL> os . makedirs ( scratch ) <EOL> try : <EOL> yield scratch <EOL> except : <EOL> shutil . rmtree ( scratch ) <EOL> raise <EOL> else : <EOL> for filename in os . listdir ( scratch ) : <EOL> if isinstance ( path , text_type ) : <EOL> filename = filename . decode ( fs_enc ) <EOL> shutil . move ( os . path . join ( scratch , filename ) , <EOL> os . path . join ( path , filename ) ) <EOL> os . rmdir ( scratch ) <EOL> def expand_filename ( self , base , ctx , template_filename ) : <EOL> def _repl ( match ) : <EOL> return ctx [ match . group ( <NUM_LIT:1> ) ] <EOL> return os . path . join ( base , _var_re . sub ( _repl , template_filename ) ) [ : - <NUM_LIT:3> ] <EOL> def run ( self , ctx , path ) : <EOL> with self . make_target_directory ( path ) as scratch : <EOL> for template in self . jinja_env . list_templates ( ) : <EOL> if not template . endswith ( '<STR_LIT>' ) : <EOL> continue <EOL> fn = self . expand_filename ( scratch , ctx , template ) <EOL> tmpl = self . jinja_env . get_template ( template ) <EOL> rv = tmpl . render ( ctx ) . strip ( '<STR_LIT:\r\n>' ) <EOL> if rv : <EOL> directory = os . path . dirname ( fn ) <EOL> try : <EOL> os . makedirs ( directory ) <EOL> except OSError : <EOL> pass <EOL> with open ( fn , '<STR_LIT:w>' ) as f : <EOL> f . write ( rv . encode ( '<STR_LIT:utf-8>' ) + '<STR_LIT:\n>' ) <EOL> def get_default_author ( ) : <EOL> import getpass <EOL> if os . name == '<STR_LIT>' : <EOL> return getpass . getuser ( ) . decode ( '<STR_LIT>' ) <EOL> import pwd <EOL> ent = pwd . getpwuid ( os . getuid ( ) ) <EOL> if ent and ent . pw_gecos : <EOL> return ent . pw_gecos . decode ( '<STR_LIT:utf-8>' , '<STR_LIT:replace>' ) <EOL> return getpass . getuser ( ) . decode ( '<STR_LIT:utf-8>' , '<STR_LIT:replace>' ) <EOL> def get_default_author_email ( ) : <EOL> try : <EOL> return subprocess . Popen ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . PIPE ) . communicate ( ) [ <NUM_LIT:0> ] . strip ( ) <EOL> except Exception : <EOL> return None <EOL> def project_quickstart ( defaults = None ) : <EOL> if not defaults : <EOL> defaults = { } <EOL> g = Generator ( '<STR_LIT>' ) <EOL> g . title ( '<STR_LIT>' ) <EOL> g . text ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> name = defaults . get ( '<STR_LIT:name>' ) <EOL> if name is None : <EOL> name = g . prompt ( '<STR_LIT>' , None , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> author_name = g . prompt ( '<STR_LIT>' , get_default_author ( ) , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> path = defaults . get ( '<STR_LIT:path>' ) <EOL> if path is None : <EOL> here = os . path . abspath ( os . getcwd ( ) ) <EOL> default_project_path = None <EOL> try : <EOL> if len ( os . listdir ( here ) ) == [ ] : <EOL> default_project_path = here <EOL> except OSError : <EOL> pass <EOL> if default_project_path is None : <EOL> default_project_path = os . path . join ( os . getcwd ( ) , name ) <EOL> path = g . prompt ( '<STR_LIT>' , default_project_path , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> path = os . path . expanduser ( path ) <EOL> with_blog = g . prompt ( '<STR_LIT>' , True , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> g . confirm ( '<STR_LIT>' ) <EOL> g . run ( { <EOL> '<STR_LIT>' : name , <EOL> '<STR_LIT>' : slugify ( name ) , <EOL> '<STR_LIT>' : path , <EOL> '<STR_LIT>' : with_blog , <EOL> '<STR_LIT>' : datetime . utcnow ( ) . year , <EOL> '<STR_LIT>' : datetime . utcnow ( ) . strftime ( '<STR_LIT>' ) , <EOL> '<STR_LIT>' : author_name , <EOL> } , path ) <EOL> def plugin_quickstart ( defaults = None , project = None ) : <EOL> if defaults is None : <EOL> defaults = { } <EOL> g = Generator ( '<STR_LIT>' ) <EOL> plugin_name = defaults . get ( '<STR_LIT>' ) <EOL> if plugin_name is None : <EOL> plugin_name = g . prompt ( '<STR_LIT>' , None , <EOL> '<STR_LIT>' ) <EOL> plugin_id = plugin_name . lower ( ) <EOL> if plugin_id . startswith ( '<STR_LIT>' ) : <EOL> plugin_id = plugin_id [ <NUM_LIT:6> : ] <EOL> if plugin_id . endswith ( '<STR_LIT>' ) : <EOL> plugin_id = plugin_id [ : - <NUM_LIT:6> ] <EOL> plugin_id = slugify ( plugin_id ) <EOL> path = defaults . get ( '<STR_LIT:path>' ) <EOL> if path is None : <EOL> if project is not None : <EOL> default_path = os . path . join ( project . tree , '<STR_LIT>' , <EOL> plugin_id ) <EOL> else : <EOL> if len ( os . listdir ( '<STR_LIT:.>' ) ) == <NUM_LIT:0> : <EOL> default_path = os . getcwd ( ) <EOL> else : <EOL> default_path = os . path . join ( os . getcwd ( ) , plugin_id ) <EOL> path = g . prompt ( '<STR_LIT>' , default_path , <EOL> '<STR_LIT>' ) <EOL> author_name = g . prompt ( '<STR_LIT>' , get_default_author ( ) , <EOL> '<STR_LIT>' ) <EOL> author_email = g . prompt ( '<STR_LIT>' , get_default_author_email ( ) , <EOL> '<STR_LIT>' ) <EOL> g . confirm ( '<STR_LIT>' ) <EOL> g . run ( { <EOL> '<STR_LIT>' : plugin_name , <EOL> '<STR_LIT>' : plugin_id , <EOL> '<STR_LIT>' : plugin_id . title ( ) . replace ( '<STR_LIT:->' , '<STR_LIT>' ) + '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' + plugin_id . replace ( '<STR_LIT:->' , '<STR_LIT:_>' ) , <EOL> '<STR_LIT>' : author_name , <EOL> '<STR_LIT>' : author_email , <EOL> } , path ) </s>
94,763
import sys <EOL> import time <EOL> import os <EOL> import json <EOL> import requests <EOL> import boto . s3 <EOL> import boto . s3 . connection <EOL> from boto . s3 . key import Key <EOL> from colorama import Fore , Back , Style <EOL> class Connector : <EOL> def __init__ ( self ) : <EOL> self . connection = { } <EOL> self . identity = None <EOL> self . protocol = "<STR_LIT>" <EOL> def get_identity ( self , api_host , api_access ) : <EOL> path = '<STR_LIT>'
headers = {
7,217,793,893,006,956,000
import sys <EOL> import time <EOL> import os <EOL> import json <EOL> import requests <EOL> import boto . s3 <EOL> import boto . s3 . connection <EOL> from boto . s3 . key import Key <EOL> from colorama import Fore , Back , Style <EOL> class Connector : <EOL> def __init__ ( self ) : <EOL> self . connection = { } <EOL> self . identity = None <EOL> self . protocol = "<STR_LIT>" <EOL> def get_identity ( self , api_host , api_access ) : <EOL> path = '<STR_LIT>' <EOL> headers = { <EOL> '<STR_LIT:Content-Type>' : '<STR_LIT:application/json>' , <EOL> '<STR_LIT>' : '<STR_LIT>' + api_access <EOL> } <EOL> try : <EOL> r = requests . post ( api_host + path , headers = headers , allow_redirects = False , verify = False ) <EOL> except : <EOL> sys . exit ( Fore . RED + "<STR_LIT>" + Style . RESET_ALL ) <EOL> if r . status_code != <NUM_LIT:200> : <EOL> sys . exit ( Fore . RED + '<STR_LIT>' + Style . RESET_ALL ) <EOL> response = r . json ( ) <EOL> return response <EOL> def s3_connection ( self , identity ) : <EOL> connection = { } <EOL> try : <EOL> connection [ "<STR_LIT>" ] = boto . s3 . connection . S3Connection ( aws_access_key_id = identity [ '<STR_LIT:data>' ] [ '<STR_LIT:key>' ] , aws_secret_access_key = identity [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] , security_token = identity [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] ) <EOL> connection [ "<STR_LIT>" ] = connection [ "<STR_LIT>" ] . get_bucket ( identity [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] , validate = False ) <EOL> connection [ "<STR_LIT:store>" ] = identity [ '<STR_LIT:data>' ] [ '<STR_LIT:store>' ] <EOL> connection [ "<STR_LIT>" ] = identity [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] <EOL> connection [ "<STR_LIT>" ] = identity [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] <EOL> except : <EOL> sys . exit ( Fore . RED + '<STR_LIT>' + Style . RESET_ALL ) <EOL> if not connection [ "<STR_LIT>" ] : <EOL> sys . exit ( Fore . RED + '<STR_LIT>' + Style . RESET_ALL ) <EOL> return connection </s>
94,764
<s> '''<STR_LIT>'''
-5,027,341,108,220,451,000
'''<STR_LIT>''' <EOL> import random <EOL> def enabled ( ) : <EOL> '''<STR_LIT>''' <EOL> return True <EOL> def applicable_types ( ) : <EOL> '''<STR_LIT>''' <EOL> return [ '<STR_LIT:text>' ] <EOL> def commands ( ) : <EOL> '''<STR_LIT>''' <EOL> return [ '<STR_LIT>' ] <EOL> def should_reply ( ) : <EOL> '''<STR_LIT>''' <EOL> return True <EOL> def reply_type ( ) : <EOL> '''<STR_LIT>''' <EOL> return '<STR_LIT:text>' <EOL> def run ( message ) : <EOL> '''<STR_LIT>''' <EOL> insults = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] <EOL> return random . choice ( insults ) </s>
94,765
from flask import Flask , url_for , session , jsonify <EOL> from flask . ext . oauthlib . contrib . client import OAuth <EOL> class DefaultConfig ( object ) : <EOL> DEBUG = True <EOL> SECRET_KEY = '<STR_LIT>' <EOL> TWITTER_CONSUMER_KEY = '<STR_LIT>' <EOL> TWITTER_CONSUMER_SECRET = '<STR_LIT>' <EOL> app = Flask ( __name__ ) <EOL> app . config . from_object ( DefaultConfig ) <EOL> app . config . from_pyfile ( '<STR_LIT>' , silent = True ) <EOL> oauth = OAuth ( app ) <EOL> twitter = oauth . remote_app ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT:1>' , <EOL> endpoint_url = '<STR_LIT>' , <EOL> request_token_url = '<STR_LIT>' , <EOL> access_token_url = '<STR_LIT>' , <EOL> authorization_url = '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT:/>' ) <EOL> def home ( ) :
if oauth_twitter_token ( ) :
3,842,439,058,116,992,000
from flask import Flask , url_for , session , jsonify <EOL> from flask . ext . oauthlib . contrib . client import OAuth <EOL> class DefaultConfig ( object ) : <EOL> DEBUG = True <EOL> SECRET_KEY = '<STR_LIT>' <EOL> TWITTER_CONSUMER_KEY = '<STR_LIT>' <EOL> TWITTER_CONSUMER_SECRET = '<STR_LIT>' <EOL> app = Flask ( __name__ ) <EOL> app . config . from_object ( DefaultConfig ) <EOL> app . config . from_pyfile ( '<STR_LIT>' , silent = True ) <EOL> oauth = OAuth ( app ) <EOL> twitter = oauth . remote_app ( <EOL> name = '<STR_LIT>' , <EOL> version = '<STR_LIT:1>' , <EOL> endpoint_url = '<STR_LIT>' , <EOL> request_token_url = '<STR_LIT>' , <EOL> access_token_url = '<STR_LIT>' , <EOL> authorization_url = '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT:/>' ) <EOL> def home ( ) : <EOL> if oauth_twitter_token ( ) : <EOL> response = twitter . get ( '<STR_LIT>' ) <EOL> return jsonify ( response = response . json ( ) ) <EOL> return '<STR_LIT>' % url_for ( '<STR_LIT>' ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> def oauth_twitter ( ) : <EOL> callback_uri = url_for ( '<STR_LIT>' , _external = True ) <EOL> return twitter . authorize ( callback_uri ) <EOL> @ app . route ( '<STR_LIT>' ) <EOL> def oauth_twitter_callback ( ) : <EOL> response = twitter . authorized_response ( ) <EOL> if response : <EOL> session [ '<STR_LIT>' ] = ( response . token , response . token_secret ) <EOL> return repr ( dict ( response ) ) <EOL> else : <EOL> return '<STR_LIT>' % ( url_for ( '<STR_LIT>' ) ) <EOL> @ twitter . tokengetter <EOL> def oauth_twitter_token ( ) : <EOL> return session . get ( '<STR_LIT>' ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> app . run ( ) </s>
94,766
import os <EOL> import sys <EOL> import tempfile <EOL> import unittest <EOL> from flask_oauthlib . client import prepare_request <EOL> try : <EOL> from urlparse import urlparse <EOL> except ImportError : <EOL> from urllib . parse import urlparse <EOL> if sys . version_info [ <NUM_LIT:0> ] == <NUM_LIT:3> : <EOL> python_version = <NUM_LIT:3> <EOL> string_type = str <EOL> else : <EOL> python_version = <NUM_LIT:2> <EOL> string_type = unicode <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT:true>' <EOL> class BaseSuite ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> app = self . create_app ( ) <EOL> self . db_fd , self . db_file = tempfile . mkstemp ( ) <EOL> config = { <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : ( <NUM_LIT:3> , <NUM_LIT:30> ) , <EOL> '<STR_LIT>' : [ '<STR_LIT:email>' , '<STR_LIT:address>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' % self . db_file <EOL> } <EOL> app . config . update ( config ) <EOL> self . setup_app ( app ) <EOL> self . app = app <EOL> self . client = app . test_client ( ) <EOL> return app <EOL> def tearDown ( self ) : <EOL> self . database . session . remove ( ) <EOL> self . database . drop_all ( ) <EOL> os . close ( self . db_fd ) <EOL> os . unlink ( self . db_file ) <EOL> @ property <EOL> def database ( self ) : <EOL> raise NotImplementedError <EOL> def create_app ( self ) : <EOL> raise NotImplementedError <EOL> def setup_app ( self , app ) : <EOL> raise NotImplementedError <EOL> def patch_request ( self , app ) : <EOL> test_client = app . test_client ( ) <EOL> def make_request ( uri , headers = None , data = None , method = None ) : <EOL> uri , headers , data , method = prepare_request ( <EOL> uri , headers , data , method <EOL> ) <EOL> parsed = urlparse ( uri ) <EOL> uri = '<STR_LIT>' % ( parsed . path , parsed . query ) <EOL> resp = test_client . open ( <EOL> uri , headers = headers , data = data , method = method <EOL> ) <EOL> resp . code = resp . status_code <EOL> return resp , resp . data <EOL> return make_request <EOL> def to_unicode ( text ) : <EOL> if not isinstance ( text , string_type ) :
text = text . decode ( '<STR_LIT:utf-8>' )
9,023,000,271,428,150,000
import os <EOL> import sys <EOL> import tempfile <EOL> import unittest <EOL> from flask_oauthlib . client import prepare_request <EOL> try : <EOL> from urlparse import urlparse <EOL> except ImportError : <EOL> from urllib . parse import urlparse <EOL> if sys . version_info [ <NUM_LIT:0> ] == <NUM_LIT:3> : <EOL> python_version = <NUM_LIT:3> <EOL> string_type = str <EOL> else : <EOL> python_version = <NUM_LIT:2> <EOL> string_type = unicode <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT:true>' <EOL> class BaseSuite ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> app = self . create_app ( ) <EOL> self . db_fd , self . db_file = tempfile . mkstemp ( ) <EOL> config = { <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT>' : ( <NUM_LIT:3> , <NUM_LIT:30> ) , <EOL> '<STR_LIT>' : [ '<STR_LIT:email>' , '<STR_LIT:address>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' % self . db_file <EOL> } <EOL> app . config . update ( config ) <EOL> self . setup_app ( app ) <EOL> self . app = app <EOL> self . client = app . test_client ( ) <EOL> return app <EOL> def tearDown ( self ) : <EOL> self . database . session . remove ( ) <EOL> self . database . drop_all ( ) <EOL> os . close ( self . db_fd ) <EOL> os . unlink ( self . db_file ) <EOL> @ property <EOL> def database ( self ) : <EOL> raise NotImplementedError <EOL> def create_app ( self ) : <EOL> raise NotImplementedError <EOL> def setup_app ( self , app ) : <EOL> raise NotImplementedError <EOL> def patch_request ( self , app ) : <EOL> test_client = app . test_client ( ) <EOL> def make_request ( uri , headers = None , data = None , method = None ) : <EOL> uri , headers , data , method = prepare_request ( <EOL> uri , headers , data , method <EOL> ) <EOL> parsed = urlparse ( uri ) <EOL> uri = '<STR_LIT>' % ( parsed . path , parsed . query ) <EOL> resp = test_client . open ( <EOL> uri , headers = headers , data = data , method = method <EOL> ) <EOL> resp . code = resp . status_code <EOL> return resp , resp . data <EOL> return make_request <EOL> def to_unicode ( text ) : <EOL> if not isinstance ( text , string_type ) : <EOL> text = text . decode ( '<STR_LIT:utf-8>' ) <EOL> return text <EOL> def to_bytes ( text ) : <EOL> if isinstance ( text , string_type ) : <EOL> text = text . encode ( '<STR_LIT:utf-8>' ) <EOL> return text <EOL> def clean_url ( location ) : <EOL> location = to_unicode ( location ) <EOL> ret = urlparse ( location ) <EOL> return '<STR_LIT>' % ( ret . path , ret . query ) </s>
94,767
"""<STR_LIT>""" <EOL> import os <EOL> import base64 <EOL> from . _base import BaseStorage <EOL> from . _compat import http , urljoin <EOL> from . _utils import ConfigItem <EOL> _missing = object ( ) <EOL> def http_request ( uri , headers = None , data = None , method = None ) : <EOL> if headers is None : <EOL> headers = { } <EOL> if data and not method : <EOL> method = '<STR_LIT:POST>'
elif not method :
3,589,376,274,163,491,000
"""<STR_LIT>""" <EOL> import os <EOL> import base64 <EOL> from . _base import BaseStorage <EOL> from . _compat import http , urljoin <EOL> from . _utils import ConfigItem <EOL> _missing = object ( ) <EOL> def http_request ( uri , headers = None , data = None , method = None ) : <EOL> if headers is None : <EOL> headers = { } <EOL> if data and not method : <EOL> method = '<STR_LIT:POST>' <EOL> elif not method : <EOL> method = '<STR_LIT:GET>' <EOL> req = http . Request ( uri , headers = headers , data = data ) <EOL> req . get_method = lambda : method . upper ( ) <EOL> try : <EOL> resp = http . urlopen ( req ) <EOL> except http . HTTPError as resp : <EOL> pass <EOL> content = resp . read ( ) <EOL> resp . close ( ) <EOL> return resp , content <EOL> class UpyunStorage ( BaseStorage ) : <EOL> bucket = ConfigItem ( '<STR_LIT>' ) <EOL> base_dir = ConfigItem ( '<STR_LIT>' ) <EOL> base_url = ConfigItem ( '<STR_LIT>' , default = _missing ) <EOL> username = ConfigItem ( '<STR_LIT:username>' ) <EOL> password = ConfigItem ( '<STR_LIT:password>' ) <EOL> @ property <EOL> def root ( self ) : <EOL> uri = '<STR_LIT>' % self . bucket <EOL> if self . base_dir : <EOL> uri = urljoin ( uri , self . base_dir ) <EOL> return uri <EOL> def request ( self , uri , data = None , method = None , headers = None ) : <EOL> """<STR_LIT>""" <EOL> auth = base64 . b64encode ( '<STR_LIT>' % ( self . username , self . password ) ) <EOL> if not headers : <EOL> headers = { } <EOL> headers [ '<STR_LIT>' ] = '<STR_LIT>' % auth <EOL> return http_request ( uri , headers = headers , data = data , method = method ) <EOL> def url ( self , filename ) : <EOL> """<STR_LIT>""" <EOL> if self . base_url is _missing : <EOL> base_url = '<STR_LIT>' % self . bucket <EOL> else : <EOL> base_url = self . base_url <EOL> if self . base_dir : <EOL> urlbase = urljoin ( base_url , self . base_dir ) <EOL> return urljoin ( urlbase , filename ) <EOL> def usage ( self ) : <EOL> """<STR_LIT>""" <EOL> uri = '<STR_LIT>' % self . root <EOL> resp , content = self . request ( uri ) <EOL> return content <EOL> def save ( self , storage , filename ) : <EOL> """<STR_LIT>""" <EOL> self . check ( storage ) <EOL> uri = urljoin ( self . root , filename ) <EOL> headers = { '<STR_LIT>' : '<STR_LIT:true>' } <EOL> stream = storage . stream <EOL> if isinstance ( stream , file ) : <EOL> length = os . fstat ( stream . fileno ( ) ) . st_size <EOL> headers [ '<STR_LIT>' ] = length <EOL> self . request ( uri , stream , '<STR_LIT>' , headers ) <EOL> return self . url ( filename ) </s>
94,768
from __future__ import with_statement <EOL> import re <EOL> from flask import Blueprint <EOL> from flask import render_template <EOL> from flask_wtf . csrf import CsrfProtect <EOL> from flask_wtf . csrf import validate_csrf , generate_csrf <EOL> from . base import TestCase , MyForm , to_unicode <EOL> csrf_token_input = re . compile ( <EOL> r'<STR_LIT>' <EOL> ) <EOL> def get_csrf_token ( data ) : <EOL> match = csrf_token_input . search ( to_unicode ( data ) ) <EOL> assert match <EOL> return match . groups ( ) [ <NUM_LIT:0> ] <EOL> class TestCSRF ( TestCase ) : <EOL> def setUp ( self ) : <EOL> app = self . create_app ( ) <EOL> app . config [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> csrf = CsrfProtect ( app ) <EOL> self . csrf = csrf <EOL> @ csrf . exempt <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def csrf_exempt ( ) : <EOL> form = MyForm ( ) <EOL> if form . validate_on_submit ( ) : <EOL> name = form . name . data . upper ( ) <EOL> else : <EOL> name = '<STR_LIT>' <EOL> return render_template ( <EOL> "<STR_LIT>" , form = form , name = name <EOL> ) <EOL> @ csrf . exempt <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def csrf_protect_method ( ) : <EOL> csrf . protect ( ) <EOL> return '<STR_LIT>' <EOL> bp = Blueprint ( '<STR_LIT>' , __name__ ) <EOL> @ bp . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def foo ( ) : <EOL> return '<STR_LIT:foo>' <EOL> app . register_blueprint ( bp , url_prefix = '<STR_LIT>' ) <EOL> self . bp = bp <EOL> self . app = app <EOL> self . client = self . app . test_client ( ) <EOL> def test_invalid_csrf ( self ) : <EOL> response = self . client . post ( "<STR_LIT:/>" , data = { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> assert response . status_code == <NUM_LIT> <EOL> @ self . csrf . error_handler <EOL> def invalid ( reason ) : <EOL> return reason <EOL> response = self . client . post ( "<STR_LIT:/>" , data = { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> assert response . status_code == <NUM_LIT:200> <EOL> assert b'<STR_LIT>' in response . data <EOL> def test_invalid_csrf2 ( self ) : <EOL> response = self . client . post ( "<STR_LIT:/>" , data = { <EOL> "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } ) <EOL> assert response . status_code == <NUM_LIT> <EOL> def test_invalid_secure_csrf3 ( self ) : <EOL> response = self . client . post ( "<STR_LIT:/>" , data = { <EOL> "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } ) <EOL> assert response . status_code == <NUM_LIT> <EOL> def test_valid_csrf ( self ) : <EOL> response = self . client . get ( "<STR_LIT:/>" ) <EOL> csrf_token = get_csrf_token ( response . data ) <EOL> response = self . client . post ( "<STR_LIT:/>" , data = { <EOL> "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : csrf_token <EOL> } ) <EOL> assert b'<STR_LIT>' in response . data <EOL> def test_prefixed_csrf ( self ) : <EOL> response = self . client . get ( '<STR_LIT:/>' ) <EOL> csrf_token = get_csrf_token ( response . data ) <EOL> response = self . client . post ( '<STR_LIT:/>' , data = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : csrf_token , <EOL> } ) <EOL> assert response . status_code == <NUM_LIT:200> <EOL> def test_invalid_secure_csrf ( self ) : <EOL> response = self . client . get ( "<STR_LIT:/>" , base_url = '<STR_LIT>' ) <EOL> csrf_token = get_csrf_token ( response . data ) <EOL> response = self . client . post ( <EOL> "<STR_LIT:/>" , <EOL> data = { "<STR_LIT:name>" : "<STR_LIT>" } , <EOL> headers = { '<STR_LIT>' : csrf_token } , <EOL> base_url = '<STR_LIT>' , <EOL> ) <EOL> assert response . status_code == <NUM_LIT> <EOL> assert b'<STR_LIT>' in response . data <EOL> response = self . client . post ( <EOL> "<STR_LIT:/>" , <EOL> data = { "<STR_LIT:name>" : "<STR_LIT>" } , <EOL> headers = { <EOL> '<STR_LIT>' : csrf_token , <EOL> } , <EOL> environ_base = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> base_url = '<STR_LIT>' , <EOL> ) <EOL> assert response . status_code == <NUM_LIT> <EOL> assert b'<STR_LIT>' in response . data <EOL> response = self . client . post ( <EOL> "<STR_LIT:/>" , <EOL> data = { "<STR_LIT:name>" : "<STR_LIT>" } , <EOL> headers = { <EOL> '<STR_LIT>' : csrf_token , <EOL> } , <EOL> environ_base = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> base_url = '<STR_LIT>' , <EOL> ) <EOL> assert response . status_code == <NUM_LIT> <EOL> assert b'<STR_LIT>' in response . data <EOL> response = self . client . post ( <EOL> "<STR_LIT:/>" , <EOL> data = { "<STR_LIT:name>" : "<STR_LIT>" } , <EOL> headers = { <EOL> '<STR_LIT>' : csrf_token , <EOL> } , <EOL> environ_base = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> base_url = '<STR_LIT>' , <EOL> ) <EOL> assert response . status_code == <NUM_LIT> <EOL> assert b'<STR_LIT>' in response . data <EOL> def test_valid_secure_csrf ( self ) : <EOL> response = self . client . get ( "<STR_LIT:/>" , base_url = '<STR_LIT>' ) <EOL> csrf_token = get_csrf_token ( response . data ) <EOL> response = self . client . post ( <EOL> "<STR_LIT:/>" , <EOL> data = { "<STR_LIT:name>" : "<STR_LIT>" } , <EOL> headers = { <EOL> '<STR_LIT>' : csrf_token , <EOL> } , <EOL> environ_base = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> base_url = '<STR_LIT>' , <EOL> ) <EOL> assert response . status_code == <NUM_LIT:200> <EOL> def test_valid_csrf_method ( self ) : <EOL> response = self . client . get ( "<STR_LIT:/>" ) <EOL> csrf_token = get_csrf_token ( response . data ) <EOL> response = self . client . post ( "<STR_LIT>" , data = { <EOL> "<STR_LIT>" : csrf_token <EOL> } ) <EOL> assert response . status_code == <NUM_LIT:200> <EOL> def test_invalid_csrf_method ( self ) : <EOL> response = self . client . post ( "<STR_LIT>" , data = { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> assert response . status_code == <NUM_LIT> <EOL> @ self . csrf . error_handler <EOL> def invalid ( reason ) : <EOL> return reason <EOL> response = self . client . post ( "<STR_LIT:/>" , data = { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> assert response . status_code == <NUM_LIT:200> <EOL> assert b'<STR_LIT>' in response . data <EOL> def test_empty_csrf_headers ( self ) : <EOL> response = self . client . get ( "<STR_LIT:/>" , base_url = '<STR_LIT>' ) <EOL> csrf_token = get_csrf_token ( response . data ) <EOL> self . app . config [ '<STR_LIT>' ] = list ( ) <EOL> response = self . client . post ( <EOL> "<STR_LIT:/>" , <EOL> data = { "<STR_LIT:name>" : "<STR_LIT>" } , <EOL> headers = { <EOL> '<STR_LIT>' : csrf_token , <EOL> } , <EOL> environ_base = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> base_url = '<STR_LIT>' , <EOL> ) <EOL> assert response . status_code == <NUM_LIT> <EOL> def test_custom_csrf_headers ( self ) : <EOL> response = self . client . get ( "<STR_LIT:/>" , base_url = '<STR_LIT>' ) <EOL> csrf_token = get_csrf_token ( response . data ) <EOL> self . app . config [ '<STR_LIT>' ] = [ '<STR_LIT>' ] <EOL> response = self . client . post ( <EOL> "<STR_LIT:/>" , <EOL> data = { "<STR_LIT:name>" : "<STR_LIT>" } , <EOL> headers = { <EOL> '<STR_LIT>' : csrf_token , <EOL> } , <EOL> environ_base = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> base_url = '<STR_LIT>' , <EOL> ) <EOL> assert response . status_code == <NUM_LIT:200> <EOL> def test_not_endpoint ( self ) : <EOL> response = self . client . post ( '<STR_LIT>' ) <EOL> assert response . status_code == <NUM_LIT> <EOL> def test_testing ( self ) : <EOL> self . app . testing = True <EOL> self . client . post ( "<STR_LIT:/>" , data = { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> def test_csrf_exempt ( self ) : <EOL> response = self . client . get ( "<STR_LIT>" ) <EOL> csrf_token = get_csrf_token ( response . data ) <EOL> response = self . client . post ( "<STR_LIT>" , data = { <EOL> "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : csrf_token <EOL> } ) <EOL> assert b'<STR_LIT>' in response . data <EOL> def test_validate_csrf ( self ) : <EOL> with self . app . test_request_context ( ) : <EOL> assert not validate_csrf ( '<STR_LIT>' ) <EOL> csrf_token = generate_csrf ( ) <EOL> assert validate_csrf ( csrf_token ) <EOL> def test_validate_not_expiring_csrf ( self ) : <EOL> with self . app . test_request_context ( ) : <EOL> csrf_token = generate_csrf ( time_limit = False ) <EOL> assert validate_csrf ( csrf_token , time_limit = False ) <EOL> def test_csrf_token_helper ( self ) : <EOL> @ self . app . route ( "<STR_LIT>" ) <EOL> def withtoken ( ) : <EOL> return render_template ( "<STR_LIT>" ) <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> assert b'<STR_LIT:#>' in response . data <EOL> def test_csrf_blueprint ( self ) : <EOL> response = self . client . post ( '<STR_LIT>' ) <EOL> assert response . status_code == <NUM_LIT>
self . csrf . exempt ( self . bp )
-1,731,987,693,029,976,600
from __future__ import with_statement <EOL> import re <EOL> from flask import Blueprint <EOL> from flask import render_template <EOL> from flask_wtf . csrf import CsrfProtect <EOL> from flask_wtf . csrf import validate_csrf , generate_csrf <EOL> from . base import TestCase , MyForm , to_unicode <EOL> csrf_token_input = re . compile ( <EOL> r'<STR_LIT>' <EOL> ) <EOL> def get_csrf_token ( data ) : <EOL> match = csrf_token_input . search ( to_unicode ( data ) ) <EOL> assert match <EOL> return match . groups ( ) [ <NUM_LIT:0> ] <EOL> class TestCSRF ( TestCase ) : <EOL> def setUp ( self ) : <EOL> app = self . create_app ( ) <EOL> app . config [ '<STR_LIT>' ] = "<STR_LIT>" <EOL> csrf = CsrfProtect ( app ) <EOL> self . csrf = csrf <EOL> @ csrf . exempt <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def csrf_exempt ( ) : <EOL> form = MyForm ( ) <EOL> if form . validate_on_submit ( ) : <EOL> name = form . name . data . upper ( ) <EOL> else : <EOL> name = '<STR_LIT>' <EOL> return render_template ( <EOL> "<STR_LIT>" , form = form , name = name <EOL> ) <EOL> @ csrf . exempt <EOL> @ app . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def csrf_protect_method ( ) : <EOL> csrf . protect ( ) <EOL> return '<STR_LIT>' <EOL> bp = Blueprint ( '<STR_LIT>' , __name__ ) <EOL> @ bp . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def foo ( ) : <EOL> return '<STR_LIT:foo>' <EOL> app . register_blueprint ( bp , url_prefix = '<STR_LIT>' ) <EOL> self . bp = bp <EOL> self . app = app <EOL> self . client = self . app . test_client ( ) <EOL> def test_invalid_csrf ( self ) : <EOL> response = self . client . post ( "<STR_LIT:/>" , data = { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> assert response . status_code == <NUM_LIT> <EOL> @ self . csrf . error_handler <EOL> def invalid ( reason ) : <EOL> return reason <EOL> response = self . client . post ( "<STR_LIT:/>" , data = { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> assert response . status_code == <NUM_LIT:200> <EOL> assert b'<STR_LIT>' in response . data <EOL> def test_invalid_csrf2 ( self ) : <EOL> response = self . client . post ( "<STR_LIT:/>" , data = { <EOL> "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } ) <EOL> assert response . status_code == <NUM_LIT> <EOL> def test_invalid_secure_csrf3 ( self ) : <EOL> response = self . client . post ( "<STR_LIT:/>" , data = { <EOL> "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } ) <EOL> assert response . status_code == <NUM_LIT> <EOL> def test_valid_csrf ( self ) : <EOL> response = self . client . get ( "<STR_LIT:/>" ) <EOL> csrf_token = get_csrf_token ( response . data ) <EOL> response = self . client . post ( "<STR_LIT:/>" , data = { <EOL> "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : csrf_token <EOL> } ) <EOL> assert b'<STR_LIT>' in response . data <EOL> def test_prefixed_csrf ( self ) : <EOL> response = self . client . get ( '<STR_LIT:/>' ) <EOL> csrf_token = get_csrf_token ( response . data ) <EOL> response = self . client . post ( '<STR_LIT:/>' , data = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : csrf_token , <EOL> } ) <EOL> assert response . status_code == <NUM_LIT:200> <EOL> def test_invalid_secure_csrf ( self ) : <EOL> response = self . client . get ( "<STR_LIT:/>" , base_url = '<STR_LIT>' ) <EOL> csrf_token = get_csrf_token ( response . data ) <EOL> response = self . client . post ( <EOL> "<STR_LIT:/>" , <EOL> data = { "<STR_LIT:name>" : "<STR_LIT>" } , <EOL> headers = { '<STR_LIT>' : csrf_token } , <EOL> base_url = '<STR_LIT>' , <EOL> ) <EOL> assert response . status_code == <NUM_LIT> <EOL> assert b'<STR_LIT>' in response . data <EOL> response = self . client . post ( <EOL> "<STR_LIT:/>" , <EOL> data = { "<STR_LIT:name>" : "<STR_LIT>" } , <EOL> headers = { <EOL> '<STR_LIT>' : csrf_token , <EOL> } , <EOL> environ_base = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> base_url = '<STR_LIT>' , <EOL> ) <EOL> assert response . status_code == <NUM_LIT> <EOL> assert b'<STR_LIT>' in response . data <EOL> response = self . client . post ( <EOL> "<STR_LIT:/>" , <EOL> data = { "<STR_LIT:name>" : "<STR_LIT>" } , <EOL> headers = { <EOL> '<STR_LIT>' : csrf_token , <EOL> } , <EOL> environ_base = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> base_url = '<STR_LIT>' , <EOL> ) <EOL> assert response . status_code == <NUM_LIT> <EOL> assert b'<STR_LIT>' in response . data <EOL> response = self . client . post ( <EOL> "<STR_LIT:/>" , <EOL> data = { "<STR_LIT:name>" : "<STR_LIT>" } , <EOL> headers = { <EOL> '<STR_LIT>' : csrf_token , <EOL> } , <EOL> environ_base = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> base_url = '<STR_LIT>' , <EOL> ) <EOL> assert response . status_code == <NUM_LIT> <EOL> assert b'<STR_LIT>' in response . data <EOL> def test_valid_secure_csrf ( self ) : <EOL> response = self . client . get ( "<STR_LIT:/>" , base_url = '<STR_LIT>' ) <EOL> csrf_token = get_csrf_token ( response . data ) <EOL> response = self . client . post ( <EOL> "<STR_LIT:/>" , <EOL> data = { "<STR_LIT:name>" : "<STR_LIT>" } , <EOL> headers = { <EOL> '<STR_LIT>' : csrf_token , <EOL> } , <EOL> environ_base = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> base_url = '<STR_LIT>' , <EOL> ) <EOL> assert response . status_code == <NUM_LIT:200> <EOL> def test_valid_csrf_method ( self ) : <EOL> response = self . client . get ( "<STR_LIT:/>" ) <EOL> csrf_token = get_csrf_token ( response . data ) <EOL> response = self . client . post ( "<STR_LIT>" , data = { <EOL> "<STR_LIT>" : csrf_token <EOL> } ) <EOL> assert response . status_code == <NUM_LIT:200> <EOL> def test_invalid_csrf_method ( self ) : <EOL> response = self . client . post ( "<STR_LIT>" , data = { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> assert response . status_code == <NUM_LIT> <EOL> @ self . csrf . error_handler <EOL> def invalid ( reason ) : <EOL> return reason <EOL> response = self . client . post ( "<STR_LIT:/>" , data = { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> assert response . status_code == <NUM_LIT:200> <EOL> assert b'<STR_LIT>' in response . data <EOL> def test_empty_csrf_headers ( self ) : <EOL> response = self . client . get ( "<STR_LIT:/>" , base_url = '<STR_LIT>' ) <EOL> csrf_token = get_csrf_token ( response . data ) <EOL> self . app . config [ '<STR_LIT>' ] = list ( ) <EOL> response = self . client . post ( <EOL> "<STR_LIT:/>" , <EOL> data = { "<STR_LIT:name>" : "<STR_LIT>" } , <EOL> headers = { <EOL> '<STR_LIT>' : csrf_token , <EOL> } , <EOL> environ_base = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> base_url = '<STR_LIT>' , <EOL> ) <EOL> assert response . status_code == <NUM_LIT> <EOL> def test_custom_csrf_headers ( self ) : <EOL> response = self . client . get ( "<STR_LIT:/>" , base_url = '<STR_LIT>' ) <EOL> csrf_token = get_csrf_token ( response . data ) <EOL> self . app . config [ '<STR_LIT>' ] = [ '<STR_LIT>' ] <EOL> response = self . client . post ( <EOL> "<STR_LIT:/>" , <EOL> data = { "<STR_LIT:name>" : "<STR_LIT>" } , <EOL> headers = { <EOL> '<STR_LIT>' : csrf_token , <EOL> } , <EOL> environ_base = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> base_url = '<STR_LIT>' , <EOL> ) <EOL> assert response . status_code == <NUM_LIT:200> <EOL> def test_not_endpoint ( self ) : <EOL> response = self . client . post ( '<STR_LIT>' ) <EOL> assert response . status_code == <NUM_LIT> <EOL> def test_testing ( self ) : <EOL> self . app . testing = True <EOL> self . client . post ( "<STR_LIT:/>" , data = { "<STR_LIT:name>" : "<STR_LIT>" } ) <EOL> def test_csrf_exempt ( self ) : <EOL> response = self . client . get ( "<STR_LIT>" ) <EOL> csrf_token = get_csrf_token ( response . data ) <EOL> response = self . client . post ( "<STR_LIT>" , data = { <EOL> "<STR_LIT:name>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : csrf_token <EOL> } ) <EOL> assert b'<STR_LIT>' in response . data <EOL> def test_validate_csrf ( self ) : <EOL> with self . app . test_request_context ( ) : <EOL> assert not validate_csrf ( '<STR_LIT>' ) <EOL> csrf_token = generate_csrf ( ) <EOL> assert validate_csrf ( csrf_token ) <EOL> def test_validate_not_expiring_csrf ( self ) : <EOL> with self . app . test_request_context ( ) : <EOL> csrf_token = generate_csrf ( time_limit = False ) <EOL> assert validate_csrf ( csrf_token , time_limit = False ) <EOL> def test_csrf_token_helper ( self ) : <EOL> @ self . app . route ( "<STR_LIT>" ) <EOL> def withtoken ( ) : <EOL> return render_template ( "<STR_LIT>" ) <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> assert b'<STR_LIT:#>' in response . data <EOL> def test_csrf_blueprint ( self ) : <EOL> response = self . client . post ( '<STR_LIT>' ) <EOL> assert response . status_code == <NUM_LIT> <EOL> self . csrf . exempt ( self . bp ) <EOL> response = self . client . post ( '<STR_LIT>' ) <EOL> assert response . status_code == <NUM_LIT:200> <EOL> def test_csrf_token_macro ( self ) : <EOL> @ self . app . route ( "<STR_LIT>" ) <EOL> def withtoken ( ) : <EOL> return render_template ( "<STR_LIT>" ) <EOL> response = self . client . get ( '<STR_LIT>' ) <EOL> assert b'<STR_LIT:#>' in response . data <EOL> def test_csrf_custom_token_key ( self ) : <EOL> with self . app . test_request_context ( ) : <EOL> default_csrf_token = generate_csrf ( ) <EOL> custom_csrf_token = generate_csrf ( token_key = '<STR_LIT>' ) <EOL> assert default_csrf_token != custom_csrf_token <EOL> assert validate_csrf ( custom_csrf_token , token_key = '<STR_LIT>' ) <EOL> def test_csrf_url_safe ( self ) : <EOL> with self . app . test_request_context ( ) : <EOL> default_csrf_token = generate_csrf ( ) <EOL> url_safe_csrf_token = generate_csrf ( url_safe = True ) <EOL> assert default_csrf_token != url_safe_csrf_token <EOL> assert '<STR_LIT:#>' not in url_safe_csrf_token <EOL> assert re . match ( r'<STR_LIT>' , url_safe_csrf_token ) <EOL> assert validate_csrf ( url_safe_csrf_token , url_safe = True ) </s>
94,769
import terminal <EOL> class MyCommand ( terminal . Command ) : <EOL> def print_title ( self , title ) : <EOL> if '<STR_LIT>' in title : <EOL> print ( terminal . magenta ( title ) ) <EOL> elif '<STR_LIT>' in title : <EOL> print ( terminal . green ( title ) ) <EOL> return self <EOL> program = MyCommand ( '<STR_LIT>' ) <EOL> subcommand = terminal . Command ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> program . action ( subcommand ) <EOL> @ program . action <EOL> def log ( verbose = False ) : <EOL> """<STR_LIT>""" <EOL> terminal . log . config ( verbose = verbose ) <EOL> terminal . log . info ( '<STR_LIT>' ) <EOL> terminal . log . verbose . info ( '<STR_LIT>' ) <EOL> program . option ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> program . option ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> program . parse ( ) <EOL> if program . output :
print ( '<STR_LIT>' % program . output ) </s>
172,572,413,680,678,180
import terminal <EOL> class MyCommand ( terminal . Command ) : <EOL> def print_title ( self , title ) : <EOL> if '<STR_LIT>' in title : <EOL> print ( terminal . magenta ( title ) ) <EOL> elif '<STR_LIT>' in title : <EOL> print ( terminal . green ( title ) ) <EOL> return self <EOL> program = MyCommand ( '<STR_LIT>' ) <EOL> subcommand = terminal . Command ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> program . action ( subcommand ) <EOL> @ program . action <EOL> def log ( verbose = False ) : <EOL> """<STR_LIT>""" <EOL> terminal . log . config ( verbose = verbose ) <EOL> terminal . log . info ( '<STR_LIT>' ) <EOL> terminal . log . verbose . info ( '<STR_LIT>' ) <EOL> program . option ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> program . option ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> program . parse ( ) <EOL> if program . output : <EOL> print ( '<STR_LIT>' % program . output ) </s>
94,770
__license__ = """<STR_LIT>""" <EOL> __author__ = "<STR_LIT>" <EOL> __email__ = "<STR_LIT>" <EOL> __description__ = "<STR_LIT>" <EOL> __api__ = """<STR_LIT>""" <EOL> __todo__ = """<STR_LIT>""" <EOL> import random <EOL> class BaseMathCaptcha ( object ) : <EOL> def __init__ ( self , qty = <NUM_LIT:2> , min = <NUM_LIT:10> , max = <NUM_LIT:30> , str_ops = False ) : <EOL> self . _str_ops = str_ops <EOL> self . _answer = None <EOL> self . _question = None <EOL> self . numbers = [ ] <EOL> for i in xrange ( qty ) : <EOL> num = random . randint ( min , max ) <EOL> self . numbers . append ( num ) <EOL> def check ( self , answer ) : <EOL> if self . _answer is None : <EOL> self . _calculate_answer ( ) <EOL> if int ( answer ) == self . _answer : <EOL> return True <EOL> else : <EOL> return False <EOL> def _calculate_answer ( self ) : <EOL> op = self . _operation ( ) <EOL> self . _answer = reduce ( op , self . numbers ) <EOL> def question ( self ) : <EOL> if self . _question is None : <EOL> str_numbers = [ ] <EOL> for number in self . numbers : <EOL> str_numbers . append ( str ( number ) ) <EOL> op_string = self . _op_string ( ) <EOL> self . _question = op_string . join ( str_numbers ) <EOL> return self . _question <EOL> class AdditionCaptcha ( BaseMathCaptcha ) : <EOL> '<STR_LIT>' <EOL> def _operation ( self ) : <EOL> return lambda a , b : a + b <EOL> def _op_string ( self ) : <EOL> if self . _str_ops is True : <EOL> return "<STR_LIT>" <EOL> else : <EOL> return "<STR_LIT>" <EOL> class SubtractionCaptcha ( BaseMathCaptcha ) : <EOL> '<STR_LIT>' <EOL> def _operation ( self ) : <EOL> return lambda a , b : a - b <EOL> def _op_string ( self ) : <EOL> if self . _str_ops is True : <EOL> return "<STR_LIT>" <EOL> else : <EOL> return "<STR_LIT>" <EOL> class MultiplicationCaptcha ( BaseMathCaptcha ) : <EOL> '<STR_LIT>' <EOL> def _operation ( self ) : <EOL> return lambda a , b : a * b <EOL> def _op_string ( self ) : <EOL> if self . _str_ops is True : <EOL> return "<STR_LIT>" <EOL> else : <EOL> return "<STR_LIT>" <EOL> class MissingNumberCaptcha ( object ) : <EOL> def __init__ ( self , min = <NUM_LIT:1> , max = <NUM_LIT:4> ) : <EOL> if min == max : <EOL> self . _question = "<STR_LIT>" <EOL> self . missing = min <EOL> else :
self . missing = random . randint ( min , max )
-2,032,624,419,083,199,700
__license__ = """<STR_LIT>""" <EOL> __author__ = "<STR_LIT>" <EOL> __email__ = "<STR_LIT>" <EOL> __description__ = "<STR_LIT>" <EOL> __api__ = """<STR_LIT>""" <EOL> __todo__ = """<STR_LIT>""" <EOL> import random <EOL> class BaseMathCaptcha ( object ) : <EOL> def __init__ ( self , qty = <NUM_LIT:2> , min = <NUM_LIT:10> , max = <NUM_LIT:30> , str_ops = False ) : <EOL> self . _str_ops = str_ops <EOL> self . _answer = None <EOL> self . _question = None <EOL> self . numbers = [ ] <EOL> for i in xrange ( qty ) : <EOL> num = random . randint ( min , max ) <EOL> self . numbers . append ( num ) <EOL> def check ( self , answer ) : <EOL> if self . _answer is None : <EOL> self . _calculate_answer ( ) <EOL> if int ( answer ) == self . _answer : <EOL> return True <EOL> else : <EOL> return False <EOL> def _calculate_answer ( self ) : <EOL> op = self . _operation ( ) <EOL> self . _answer = reduce ( op , self . numbers ) <EOL> def question ( self ) : <EOL> if self . _question is None : <EOL> str_numbers = [ ] <EOL> for number in self . numbers : <EOL> str_numbers . append ( str ( number ) ) <EOL> op_string = self . _op_string ( ) <EOL> self . _question = op_string . join ( str_numbers ) <EOL> return self . _question <EOL> class AdditionCaptcha ( BaseMathCaptcha ) : <EOL> '<STR_LIT>' <EOL> def _operation ( self ) : <EOL> return lambda a , b : a + b <EOL> def _op_string ( self ) : <EOL> if self . _str_ops is True : <EOL> return "<STR_LIT>" <EOL> else : <EOL> return "<STR_LIT>" <EOL> class SubtractionCaptcha ( BaseMathCaptcha ) : <EOL> '<STR_LIT>' <EOL> def _operation ( self ) : <EOL> return lambda a , b : a - b <EOL> def _op_string ( self ) : <EOL> if self . _str_ops is True : <EOL> return "<STR_LIT>" <EOL> else : <EOL> return "<STR_LIT>" <EOL> class MultiplicationCaptcha ( BaseMathCaptcha ) : <EOL> '<STR_LIT>' <EOL> def _operation ( self ) : <EOL> return lambda a , b : a * b <EOL> def _op_string ( self ) : <EOL> if self . _str_ops is True : <EOL> return "<STR_LIT>" <EOL> else : <EOL> return "<STR_LIT>" <EOL> class MissingNumberCaptcha ( object ) : <EOL> def __init__ ( self , min = <NUM_LIT:1> , max = <NUM_LIT:4> ) : <EOL> if min == max : <EOL> self . _question = "<STR_LIT>" <EOL> self . missing = min <EOL> else : <EOL> self . missing = random . randint ( min , max ) <EOL> numbers = range ( min - <NUM_LIT:1> , max + <NUM_LIT:2> ) <EOL> if len ( numbers ) > <NUM_LIT:0> : <EOL> numbers . remove ( self . missing ) <EOL> numbers = map ( lambda x : str ( x ) , numbers ) <EOL> self . _question = "<STR_LIT:U+0020>" . join ( numbers ) <EOL> else : <EOL> self . _question = "<STR_LIT>" <EOL> def check ( self , answer ) : <EOL> if int ( answer ) == self . missing : <EOL> return True <EOL> else : <EOL> return False <EOL> def question ( self ) : <EOL> return self . _question <EOL> def __str__ ( self ) : <EOL> return self . question ( ) </s>
94,771
<s> """<STR_LIT>"""
-7,207,313,985,924,909,000
"""<STR_LIT>""" <EOL> import tornado . options <EOL> from tornado . options import define , options <EOL> import os <EOL> define ( "<STR_LIT:port>" , default = <NUM_LIT> , help = "<STR_LIT>" , type = int ) <EOL> define ( "<STR_LIT>" , default = <NUM_LIT> , help = "<STR_LIT>" , type = int ) <EOL> define ( "<STR_LIT>" , default = "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> define ( "<STR_LIT>" , default = '<STR_LIT>' , help = "<STR_LIT>" ) <EOL> define ( "<STR_LIT>" , default = "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> define ( "<STR_LIT>" , default = "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> define ( "<STR_LIT>" , default = "<STR_LIT>" , help = "<STR_LIT>" ) <EOL> define ( "<STR_LIT>" , default = True , help = "<STR_LIT>" ) <EOL> define ( "<STR_LIT>" , default = [ ] , help = "<STR_LIT>" , multiple = True ) <EOL> define ( "<STR_LIT:host>" , default = "<STR_LIT>" ) <EOL> define ( "<STR_LIT>" , default = "<STR_LIT>" ) <EOL> define ( "<STR_LIT>" , default = <NUM_LIT> ) <EOL> define ( "<STR_LIT>" , default = "<STR_LIT>" ) <EOL> define ( "<STR_LIT>" , default = "<STR_LIT>" ) <EOL> class Settings ( object ) : <EOL> def __init__ ( self , ** entries ) : <EOL> self . __dict__ . update ( entries ) <EOL> def __getattribute__ ( self , name ) : <EOL> return object . __getattribute__ ( self , name . lower ( ) ) <EOL> settings = Settings ( ) <EOL> def _ensure_defaults ( dsettings , options , project_root , config_filename ) : <EOL> if options . template_path == '<STR_LIT>' : <EOL> dsettings [ '<STR_LIT>' ] = os . path . join ( project_root , '<STR_LIT>' ) <EOL> if options . static_path == '<STR_LIT>' : <EOL> dsettings [ '<STR_LIT>' ] = os . path . join ( project_root , '<STR_LIT>' ) <EOL> if options . flash_policy_file == '<STR_LIT>' : <EOL> dsettings [ '<STR_LIT>' ] = os . path . join ( project_root , '<STR_LIT>' ) <EOL> dsettings [ '<STR_LIT>' ] = ( ) <EOL> dsettings [ '<STR_LIT>' ] = True <EOL> dsettings [ '<STR_LIT>' ] = True <EOL> dsettings [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> dsettings [ '<STR_LIT>' ] = '<STR_LIT:utf-8>' <EOL> dsettings [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> dsettings [ '<STR_LIT>' ] = '<STR_LIT:localhost>' <EOL> dsettings [ '<STR_LIT>' ] = <NUM_LIT> <EOL> dsettings [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> dsettings [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> dsettings [ '<STR_LIT>' ] = False <EOL> dsettings [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> dsettings [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> dsettings [ '<STR_LIT>' ] = project_root <EOL> dsettings [ '<STR_LIT>' ] = config_filename . replace ( '<STR_LIT:/>' , '<STR_LIT:.>' ) [ : - <NUM_LIT:3> ] . strip ( '<STR_LIT:.>' ) <EOL> dsettings [ '<STR_LIT>' ] = getattr ( options , '<STR_LIT>' , '<STR_LIT>' ) <EOL> dsettings [ '<STR_LIT>' ] = getattr ( options , '<STR_LIT>' , '<STR_LIT:P>' ) <EOL> dsettings [ '<STR_LIT>' ] = getattr ( options , '<STR_LIT>' , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) <EOL> return dsettings <EOL> def _preconfigure ( project_root , config_filename = '<STR_LIT>' , argv = [ ] ) : <EOL> "<STR_LIT>" <EOL> tornado . options . parse_command_line ( argv ) <EOL> tornado . options . parse_config_file ( os . path . join ( project_root , config_filename ) ) <EOL> dsettings = { } <EOL> parsed_options = getattr ( options , '<STR_LIT>' , options ) <EOL> for key in parsed_options : <EOL> dsettings [ key ] = getattr ( options , key ) <EOL> _ensure_defaults ( dsettings , options , project_root , config_filename ) <EOL> config = __import__ ( '<STR_LIT>' ) <EOL> for name in dir ( config ) : <EOL> if not name . startswith ( '<STR_LIT:_>' ) : <EOL> dsettings [ name ] = getattr ( config , name ) <EOL> settings . __dict__ . update ( ** dsettings ) <EOL> settings . socket_io_port = settings . port <EOL> return dsettings </s>
94,772
import platform <EOL> import re <EOL> import urllib2 <EOL> import urlparse <EOL> from airy . core . exceptions import ValidationError <EOL> from airy . utils . translation import ugettext_lazy as _ <EOL> from airy . utils . encoding import smart_unicode <EOL> EMPTY_VALUES = ( None , '<STR_LIT>' , [ ] , ( ) , { } ) <EOL> try : <EOL> from airy . conf import settings <EOL> URL_VALIDATOR_USER_AGENT = settings . URL_VALIDATOR_USER_AGENT <EOL> except ImportError : <EOL> URL_VALIDATOR_USER_AGENT = '<STR_LIT>' <EOL> class RegexValidator ( object ) : <EOL> regex = '<STR_LIT>' <EOL> message = _ ( u'<STR_LIT>' ) <EOL> code = '<STR_LIT>' <EOL> def __init__ ( self , regex = None , message = None , code = None ) : <EOL> if regex is not None : <EOL> self . regex = regex <EOL> if message is not None : <EOL> self . message = message <EOL> if code is not None : <EOL> self . code = code <EOL> if isinstance ( self . regex , basestring ) : <EOL> self . regex = re . compile ( regex ) <EOL> def __call__ ( self , value ) : <EOL> """<STR_LIT>""" <EOL> if not self . regex . search ( smart_unicode ( value ) ) : <EOL> raise ValidationError ( self . message , code = self . code ) <EOL> class URLValidator ( RegexValidator ) : <EOL> regex = re . compile ( <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' , re . IGNORECASE ) <EOL> def __init__ ( self , verify_exists = False , <EOL> validator_user_agent = URL_VALIDATOR_USER_AGENT ) : <EOL> super ( URLValidator , self ) . __init__ ( ) <EOL> self . verify_exists = verify_exists <EOL> self . user_agent = validator_user_agent <EOL> def __call__ ( self , value ) : <EOL> try : <EOL> super ( URLValidator , self ) . __call__ ( value ) <EOL> except ValidationError , e : <EOL> if value : <EOL> value = smart_unicode ( value ) <EOL> scheme , netloc , path , query , fragment = urlparse . urlsplit ( value ) <EOL> try : <EOL> netloc = netloc . encode ( '<STR_LIT>' ) <EOL> except UnicodeError : <EOL> raise e <EOL> url = urlparse . urlunsplit ( ( scheme , netloc , path , query , fragment ) ) <EOL> super ( URLValidator , self ) . __call__ ( url ) <EOL> else : <EOL> raise <EOL> else : <EOL> url = value <EOL> if self . verify_exists : <EOL> headers = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : self . user_agent , <EOL> } <EOL> url = url . encode ( '<STR_LIT:utf-8>' ) <EOL> broken_error = ValidationError ( <EOL> _ ( u'<STR_LIT>' ) , code = '<STR_LIT>' ) <EOL> try : <EOL> req = urllib2 . Request ( url , None , headers ) <EOL> req . get_method = lambda : '<STR_LIT>' <EOL> opener = urllib2 . OpenerDirector ( ) <EOL> error_nop = lambda * args , ** kwargs : True <EOL> http_error_processor = urllib2 . HTTPErrorProcessor ( ) <EOL> http_error_processor . http_error_301 = error_nop <EOL> http_error_processor . http_error_302 = error_nop <EOL> http_error_processor . http_error_307 = error_nop <EOL> handlers = [ urllib2 . UnknownHandler ( ) , <EOL> urllib2 . HTTPHandler ( ) , <EOL> urllib2 . HTTPDefaultErrorHandler ( ) , <EOL> urllib2 . FTPHandler ( ) , <EOL> http_error_processor ] <EOL> try : <EOL> import ssl <EOL> handlers . append ( urllib2 . HTTPSHandler ( ) ) <EOL> except : <EOL> pass <EOL> map ( opener . add_handler , handlers ) <EOL> if platform . python_version_tuple ( ) >= ( <NUM_LIT:2> , <NUM_LIT:6> ) : <EOL> opener . open ( req , timeout = <NUM_LIT:10> ) <EOL> else : <EOL> opener . open ( req ) <EOL> except ValueError : <EOL> raise ValidationError ( _ ( u'<STR_LIT>' ) , code = '<STR_LIT>' ) <EOL> except : <EOL> raise broken_error <EOL> def validate_integer ( value ) : <EOL> try : <EOL> int ( value ) <EOL> except ( ValueError , TypeError ) , e : <EOL> raise ValidationError ( '<STR_LIT>' ) <EOL> class EmailValidator ( RegexValidator ) : <EOL> def __call__ ( self , value ) : <EOL> try : <EOL> super ( EmailValidator , self ) . __call__ ( value ) <EOL> except ValidationError , e : <EOL> if value and u'<STR_LIT:@>' in value : <EOL> parts = value . split ( u'<STR_LIT:@>' ) <EOL> domain_part = parts [ - <NUM_LIT:1> ] <EOL> try : <EOL> parts [ - <NUM_LIT:1> ] = parts [ - <NUM_LIT:1> ] . encode ( '<STR_LIT>' ) <EOL> except UnicodeError : <EOL> raise e <EOL> super ( EmailValidator , self ) . __call__ ( u'<STR_LIT:@>' . join ( parts ) ) <EOL> else : <EOL> raise <EOL> email_re = re . compile ( <EOL> r"<STR_LIT>" <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' , re . IGNORECASE ) <EOL> validate_email = EmailValidator ( email_re , _ ( u'<STR_LIT>' ) , '<STR_LIT>' ) <EOL> slug_re = re . compile ( r'<STR_LIT>' ) <EOL> validate_slug = RegexValidator ( slug_re , _ ( u"<STR_LIT>" ) , '<STR_LIT>' ) <EOL> ipv4_re = re . compile ( r'<STR_LIT>' ) <EOL> validate_ipv4_address = RegexValidator ( ipv4_re , _ ( u'<STR_LIT>' ) , '<STR_LIT>' ) <EOL> comma_separated_int_list_re = re . compile ( '<STR_LIT>' ) <EOL> validate_comma_separated_integer_list = RegexValidator ( comma_separated_int_list_re , _ ( u'<STR_LIT>' ) , '<STR_LIT>' ) <EOL> class BaseValidator ( object ) : <EOL> compare = lambda self , a , b : a is not b <EOL> clean = lambda self , x : x <EOL> message = _ ( u'<STR_LIT>' ) <EOL> code = '<STR_LIT>' <EOL> def __init__ ( self , limit_value ) : <EOL> self . limit_value = limit_value <EOL> def __call__ ( self , value ) : <EOL> cleaned = self . clean ( value ) <EOL> params = { '<STR_LIT>' : self . limit_value , '<STR_LIT>' : cleaned } <EOL> if self . compare ( cleaned , self . limit_value ) : <EOL> raise ValidationError ( <EOL> self . message % params , <EOL> code = self . code , <EOL> params = params , <EOL> ) <EOL> class MaxValueValidator ( BaseValidator ) : <EOL> compare = lambda self , a , b : a > b
message = _ ( u'<STR_LIT>' )
7,076,605,815,008,195,000
import platform <EOL> import re <EOL> import urllib2 <EOL> import urlparse <EOL> from airy . core . exceptions import ValidationError <EOL> from airy . utils . translation import ugettext_lazy as _ <EOL> from airy . utils . encoding import smart_unicode <EOL> EMPTY_VALUES = ( None , '<STR_LIT>' , [ ] , ( ) , { } ) <EOL> try : <EOL> from airy . conf import settings <EOL> URL_VALIDATOR_USER_AGENT = settings . URL_VALIDATOR_USER_AGENT <EOL> except ImportError : <EOL> URL_VALIDATOR_USER_AGENT = '<STR_LIT>' <EOL> class RegexValidator ( object ) : <EOL> regex = '<STR_LIT>' <EOL> message = _ ( u'<STR_LIT>' ) <EOL> code = '<STR_LIT>' <EOL> def __init__ ( self , regex = None , message = None , code = None ) : <EOL> if regex is not None : <EOL> self . regex = regex <EOL> if message is not None : <EOL> self . message = message <EOL> if code is not None : <EOL> self . code = code <EOL> if isinstance ( self . regex , basestring ) : <EOL> self . regex = re . compile ( regex ) <EOL> def __call__ ( self , value ) : <EOL> """<STR_LIT>""" <EOL> if not self . regex . search ( smart_unicode ( value ) ) : <EOL> raise ValidationError ( self . message , code = self . code ) <EOL> class URLValidator ( RegexValidator ) : <EOL> regex = re . compile ( <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' , re . IGNORECASE ) <EOL> def __init__ ( self , verify_exists = False , <EOL> validator_user_agent = URL_VALIDATOR_USER_AGENT ) : <EOL> super ( URLValidator , self ) . __init__ ( ) <EOL> self . verify_exists = verify_exists <EOL> self . user_agent = validator_user_agent <EOL> def __call__ ( self , value ) : <EOL> try : <EOL> super ( URLValidator , self ) . __call__ ( value ) <EOL> except ValidationError , e : <EOL> if value : <EOL> value = smart_unicode ( value ) <EOL> scheme , netloc , path , query , fragment = urlparse . urlsplit ( value ) <EOL> try : <EOL> netloc = netloc . encode ( '<STR_LIT>' ) <EOL> except UnicodeError : <EOL> raise e <EOL> url = urlparse . urlunsplit ( ( scheme , netloc , path , query , fragment ) ) <EOL> super ( URLValidator , self ) . __call__ ( url ) <EOL> else : <EOL> raise <EOL> else : <EOL> url = value <EOL> if self . verify_exists : <EOL> headers = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : self . user_agent , <EOL> } <EOL> url = url . encode ( '<STR_LIT:utf-8>' ) <EOL> broken_error = ValidationError ( <EOL> _ ( u'<STR_LIT>' ) , code = '<STR_LIT>' ) <EOL> try : <EOL> req = urllib2 . Request ( url , None , headers ) <EOL> req . get_method = lambda : '<STR_LIT>' <EOL> opener = urllib2 . OpenerDirector ( ) <EOL> error_nop = lambda * args , ** kwargs : True <EOL> http_error_processor = urllib2 . HTTPErrorProcessor ( ) <EOL> http_error_processor . http_error_301 = error_nop <EOL> http_error_processor . http_error_302 = error_nop <EOL> http_error_processor . http_error_307 = error_nop <EOL> handlers = [ urllib2 . UnknownHandler ( ) , <EOL> urllib2 . HTTPHandler ( ) , <EOL> urllib2 . HTTPDefaultErrorHandler ( ) , <EOL> urllib2 . FTPHandler ( ) , <EOL> http_error_processor ] <EOL> try : <EOL> import ssl <EOL> handlers . append ( urllib2 . HTTPSHandler ( ) ) <EOL> except : <EOL> pass <EOL> map ( opener . add_handler , handlers ) <EOL> if platform . python_version_tuple ( ) >= ( <NUM_LIT:2> , <NUM_LIT:6> ) : <EOL> opener . open ( req , timeout = <NUM_LIT:10> ) <EOL> else : <EOL> opener . open ( req ) <EOL> except ValueError : <EOL> raise ValidationError ( _ ( u'<STR_LIT>' ) , code = '<STR_LIT>' ) <EOL> except : <EOL> raise broken_error <EOL> def validate_integer ( value ) : <EOL> try : <EOL> int ( value ) <EOL> except ( ValueError , TypeError ) , e : <EOL> raise ValidationError ( '<STR_LIT>' ) <EOL> class EmailValidator ( RegexValidator ) : <EOL> def __call__ ( self , value ) : <EOL> try : <EOL> super ( EmailValidator , self ) . __call__ ( value ) <EOL> except ValidationError , e : <EOL> if value and u'<STR_LIT:@>' in value : <EOL> parts = value . split ( u'<STR_LIT:@>' ) <EOL> domain_part = parts [ - <NUM_LIT:1> ] <EOL> try : <EOL> parts [ - <NUM_LIT:1> ] = parts [ - <NUM_LIT:1> ] . encode ( '<STR_LIT>' ) <EOL> except UnicodeError : <EOL> raise e <EOL> super ( EmailValidator , self ) . __call__ ( u'<STR_LIT:@>' . join ( parts ) ) <EOL> else : <EOL> raise <EOL> email_re = re . compile ( <EOL> r"<STR_LIT>" <EOL> r'<STR_LIT>' <EOL> r'<STR_LIT>' , re . IGNORECASE ) <EOL> validate_email = EmailValidator ( email_re , _ ( u'<STR_LIT>' ) , '<STR_LIT>' ) <EOL> slug_re = re . compile ( r'<STR_LIT>' ) <EOL> validate_slug = RegexValidator ( slug_re , _ ( u"<STR_LIT>" ) , '<STR_LIT>' ) <EOL> ipv4_re = re . compile ( r'<STR_LIT>' ) <EOL> validate_ipv4_address = RegexValidator ( ipv4_re , _ ( u'<STR_LIT>' ) , '<STR_LIT>' ) <EOL> comma_separated_int_list_re = re . compile ( '<STR_LIT>' ) <EOL> validate_comma_separated_integer_list = RegexValidator ( comma_separated_int_list_re , _ ( u'<STR_LIT>' ) , '<STR_LIT>' ) <EOL> class BaseValidator ( object ) : <EOL> compare = lambda self , a , b : a is not b <EOL> clean = lambda self , x : x <EOL> message = _ ( u'<STR_LIT>' ) <EOL> code = '<STR_LIT>' <EOL> def __init__ ( self , limit_value ) : <EOL> self . limit_value = limit_value <EOL> def __call__ ( self , value ) : <EOL> cleaned = self . clean ( value ) <EOL> params = { '<STR_LIT>' : self . limit_value , '<STR_LIT>' : cleaned } <EOL> if self . compare ( cleaned , self . limit_value ) : <EOL> raise ValidationError ( <EOL> self . message % params , <EOL> code = self . code , <EOL> params = params , <EOL> ) <EOL> class MaxValueValidator ( BaseValidator ) : <EOL> compare = lambda self , a , b : a > b <EOL> message = _ ( u'<STR_LIT>' ) <EOL> code = '<STR_LIT>' <EOL> class MinValueValidator ( BaseValidator ) : <EOL> compare = lambda self , a , b : a < b <EOL> message = _ ( u'<STR_LIT>' ) <EOL> code = '<STR_LIT>' <EOL> class MinLengthValidator ( BaseValidator ) : <EOL> compare = lambda self , a , b : a < b <EOL> clean = lambda self , x : len ( x ) <EOL> message = _ ( u'<STR_LIT>' ) <EOL> code = '<STR_LIT>' <EOL> class MaxLengthValidator ( BaseValidator ) : <EOL> compare = lambda self , a , b : a > b <EOL> clean = lambda self , x : len ( x ) <EOL> message = _ ( u'<STR_LIT>' ) <EOL> code = '<STR_LIT:max_length>' </s>
94,773
import types <EOL> import urllib <EOL> import locale <EOL> import datetime <EOL> import codecs <EOL> from decimal import Decimal <EOL> from airy . utils . functional import Promise <EOL> class DjangoUnicodeDecodeError ( UnicodeDecodeError ) : <EOL> def __init__ ( self , obj , * args ) : <EOL> self . obj = obj <EOL> UnicodeDecodeError . __init__ ( self , * args ) <EOL> def __str__ ( self ) : <EOL> original = UnicodeDecodeError . __str__ ( self ) <EOL> return '<STR_LIT>' % ( original , self . obj , <EOL> type ( self . obj ) ) <EOL> class StrAndUnicode ( object ) : <EOL> """<STR_LIT>""" <EOL> def __str__ ( self ) : <EOL> return self . __unicode__ ( ) . encode ( '<STR_LIT:utf-8>' ) <EOL> def smart_unicode ( s , encoding = '<STR_LIT:utf-8>' , strings_only = False , errors = '<STR_LIT:strict>' ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( s , Promise ) : <EOL> return s <EOL> return force_unicode ( s , encoding , strings_only , errors ) <EOL> def is_protected_type ( obj ) : <EOL> """<STR_LIT>""" <EOL> return isinstance ( obj , ( <EOL> types . NoneType , <EOL> int , long , <EOL> datetime . datetime , datetime . date , datetime . time , <EOL> float , Decimal ) <EOL> ) <EOL> def force_unicode ( s , encoding = '<STR_LIT:utf-8>' , strings_only = False , errors = '<STR_LIT:strict>' ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( s , unicode ) : <EOL> return s <EOL> if strings_only and is_protected_type ( s ) : <EOL> return s <EOL> try : <EOL> if not isinstance ( s , basestring , ) : <EOL> if hasattr ( s , '<STR_LIT>' ) : <EOL> s = unicode ( s ) <EOL> else : <EOL> try : <EOL> s = unicode ( str ( s ) , encoding , errors ) <EOL> except UnicodeEncodeError : <EOL> if not isinstance ( s , Exception ) : <EOL> raise <EOL> s = '<STR_LIT:U+0020>' . join ( [ force_unicode ( arg , encoding , strings_only , <EOL> errors ) for arg in s ] ) <EOL> elif not isinstance ( s , unicode ) : <EOL> s = s . decode ( encoding , errors ) <EOL> except UnicodeDecodeError , e : <EOL> if not isinstance ( s , Exception ) : <EOL> raise DjangoUnicodeDecodeError ( s , * e . args ) <EOL> else : <EOL> s = '<STR_LIT:U+0020>' . join ( [ force_unicode ( arg , encoding , strings_only , <EOL> errors ) for arg in s ] ) <EOL> return s <EOL> def smart_str ( s , encoding = '<STR_LIT:utf-8>' , strings_only = False , errors = '<STR_LIT:strict>' ) : <EOL> """<STR_LIT>""" <EOL> if strings_only and isinstance ( s , ( types . NoneType , int ) ) : <EOL> return s <EOL> if isinstance ( s , Promise ) : <EOL> return unicode ( s ) . encode ( encoding , errors ) <EOL> elif not isinstance ( s , basestring ) : <EOL> try : <EOL> return str ( s ) <EOL> except UnicodeEncodeError : <EOL> if isinstance ( s , Exception ) : <EOL> return '<STR_LIT:U+0020>' . join ( [ smart_str ( arg , encoding , strings_only , <EOL> errors ) for arg in s ] ) <EOL> return unicode ( s ) . encode ( encoding , errors ) <EOL> elif isinstance ( s , unicode ) : <EOL> return s . encode ( encoding , errors ) <EOL> elif s and encoding != '<STR_LIT:utf-8>' : <EOL> return s . decode ( '<STR_LIT:utf-8>' , errors ) . encode ( encoding , errors ) <EOL> else :
return s
342,704,063,180,547,600
import types <EOL> import urllib <EOL> import locale <EOL> import datetime <EOL> import codecs <EOL> from decimal import Decimal <EOL> from airy . utils . functional import Promise <EOL> class DjangoUnicodeDecodeError ( UnicodeDecodeError ) : <EOL> def __init__ ( self , obj , * args ) : <EOL> self . obj = obj <EOL> UnicodeDecodeError . __init__ ( self , * args ) <EOL> def __str__ ( self ) : <EOL> original = UnicodeDecodeError . __str__ ( self ) <EOL> return '<STR_LIT>' % ( original , self . obj , <EOL> type ( self . obj ) ) <EOL> class StrAndUnicode ( object ) : <EOL> """<STR_LIT>""" <EOL> def __str__ ( self ) : <EOL> return self . __unicode__ ( ) . encode ( '<STR_LIT:utf-8>' ) <EOL> def smart_unicode ( s , encoding = '<STR_LIT:utf-8>' , strings_only = False , errors = '<STR_LIT:strict>' ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( s , Promise ) : <EOL> return s <EOL> return force_unicode ( s , encoding , strings_only , errors ) <EOL> def is_protected_type ( obj ) : <EOL> """<STR_LIT>""" <EOL> return isinstance ( obj , ( <EOL> types . NoneType , <EOL> int , long , <EOL> datetime . datetime , datetime . date , datetime . time , <EOL> float , Decimal ) <EOL> ) <EOL> def force_unicode ( s , encoding = '<STR_LIT:utf-8>' , strings_only = False , errors = '<STR_LIT:strict>' ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( s , unicode ) : <EOL> return s <EOL> if strings_only and is_protected_type ( s ) : <EOL> return s <EOL> try : <EOL> if not isinstance ( s , basestring , ) : <EOL> if hasattr ( s , '<STR_LIT>' ) : <EOL> s = unicode ( s ) <EOL> else : <EOL> try : <EOL> s = unicode ( str ( s ) , encoding , errors ) <EOL> except UnicodeEncodeError : <EOL> if not isinstance ( s , Exception ) : <EOL> raise <EOL> s = '<STR_LIT:U+0020>' . join ( [ force_unicode ( arg , encoding , strings_only , <EOL> errors ) for arg in s ] ) <EOL> elif not isinstance ( s , unicode ) : <EOL> s = s . decode ( encoding , errors ) <EOL> except UnicodeDecodeError , e : <EOL> if not isinstance ( s , Exception ) : <EOL> raise DjangoUnicodeDecodeError ( s , * e . args ) <EOL> else : <EOL> s = '<STR_LIT:U+0020>' . join ( [ force_unicode ( arg , encoding , strings_only , <EOL> errors ) for arg in s ] ) <EOL> return s <EOL> def smart_str ( s , encoding = '<STR_LIT:utf-8>' , strings_only = False , errors = '<STR_LIT:strict>' ) : <EOL> """<STR_LIT>""" <EOL> if strings_only and isinstance ( s , ( types . NoneType , int ) ) : <EOL> return s <EOL> if isinstance ( s , Promise ) : <EOL> return unicode ( s ) . encode ( encoding , errors ) <EOL> elif not isinstance ( s , basestring ) : <EOL> try : <EOL> return str ( s ) <EOL> except UnicodeEncodeError : <EOL> if isinstance ( s , Exception ) : <EOL> return '<STR_LIT:U+0020>' . join ( [ smart_str ( arg , encoding , strings_only , <EOL> errors ) for arg in s ] ) <EOL> return unicode ( s ) . encode ( encoding , errors ) <EOL> elif isinstance ( s , unicode ) : <EOL> return s . encode ( encoding , errors ) <EOL> elif s and encoding != '<STR_LIT:utf-8>' : <EOL> return s . decode ( '<STR_LIT:utf-8>' , errors ) . encode ( encoding , errors ) <EOL> else : <EOL> return s <EOL> def iri_to_uri ( iri ) : <EOL> """<STR_LIT>""" <EOL> if iri is None : <EOL> return iri <EOL> return urllib . quote ( smart_str ( iri ) , safe = "<STR_LIT>" ) <EOL> def filepath_to_uri ( path ) : <EOL> """<STR_LIT>""" <EOL> if path is None : <EOL> return path <EOL> return urllib . quote ( smart_str ( path ) . replace ( "<STR_LIT:\\>" , "<STR_LIT:/>" ) , safe = "<STR_LIT>" ) <EOL> try : <EOL> DEFAULT_LOCALE_ENCODING = locale . getdefaultlocale ( ) [ <NUM_LIT:1> ] or '<STR_LIT:ascii>' <EOL> codecs . lookup ( DEFAULT_LOCALE_ENCODING ) <EOL> except : <EOL> DEFAULT_LOCALE_ENCODING = '<STR_LIT:ascii>' </s>
94,774
import sys , os <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . join ( os . path . abspath ( os . path . dirname ( __file__ ) ) , '<STR_LIT:..>' ) ) <EOL> extensions = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> templates_path = [ '<STR_LIT>' ]
source_suffix = '<STR_LIT>'
-8,187,605,045,508,893,000
import sys , os <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . join ( os . path . abspath ( os . path . dirname ( __file__ ) ) , '<STR_LIT:..>' ) ) <EOL> extensions = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> templates_path = [ '<STR_LIT>' ] <EOL> source_suffix = '<STR_LIT>' <EOL> master_doc = '<STR_LIT:index>' <EOL> project = u'<STR_LIT>' <EOL> copyright = u'<STR_LIT>' <EOL> version = '<STR_LIT>' <EOL> release = '<STR_LIT>' <EOL> exclude_patterns = [ '<STR_LIT>' ] <EOL> pygments_style = '<STR_LIT>' <EOL> html_theme = '<STR_LIT>' <EOL> html_static_path = [ '<STR_LIT>' ] <EOL> htmlhelp_basename = '<STR_LIT>' <EOL> latex_elements = { <EOL> } <EOL> latex_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] <EOL> man_pages = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> [ u'<STR_LIT>' ] , <NUM_LIT:1> ) <EOL> ] <EOL> texinfo_documents = [ <EOL> ( '<STR_LIT:index>' , '<STR_LIT>' , u'<STR_LIT>' , <EOL> u'<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ) , <EOL> ] </s>
94,775
"""<STR_LIT>""" <EOL> import collections <EOL> from cryptography . hazmat . primitives . asymmetric import rsa <EOL> import OpenSSL <EOL> import six <EOL> class abstractclassmethod ( classmethod ) : <EOL> """<STR_LIT>""" <EOL> __isabstractmethod__ = True <EOL> def __init__ ( self , target ) : <EOL> target . __isabstractmethod__ = True <EOL> super ( abstractclassmethod , self ) . __init__ ( target ) <EOL> class ComparableX509 ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , wrapped ) : <EOL> assert isinstance ( wrapped , OpenSSL . crypto . X509 ) or isinstance ( <EOL> wrapped , OpenSSL . crypto . X509Req ) <EOL> self . wrapped = wrapped <EOL> def __getattr__ ( self , name ) : <EOL> return getattr ( self . wrapped , name ) <EOL> def _dump ( self , filetype = OpenSSL . crypto . FILETYPE_ASN1 ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( self . wrapped , OpenSSL . crypto . X509 ) : <EOL> func = OpenSSL . crypto . dump_certificate <EOL> else : <EOL> func = OpenSSL . crypto . dump_certificate_request <EOL> return func ( filetype , self . wrapped )
def __eq__ ( self , other ) :
6,815,979,420,099,838,000
"""<STR_LIT>""" <EOL> import collections <EOL> from cryptography . hazmat . primitives . asymmetric import rsa <EOL> import OpenSSL <EOL> import six <EOL> class abstractclassmethod ( classmethod ) : <EOL> """<STR_LIT>""" <EOL> __isabstractmethod__ = True <EOL> def __init__ ( self , target ) : <EOL> target . __isabstractmethod__ = True <EOL> super ( abstractclassmethod , self ) . __init__ ( target ) <EOL> class ComparableX509 ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , wrapped ) : <EOL> assert isinstance ( wrapped , OpenSSL . crypto . X509 ) or isinstance ( <EOL> wrapped , OpenSSL . crypto . X509Req ) <EOL> self . wrapped = wrapped <EOL> def __getattr__ ( self , name ) : <EOL> return getattr ( self . wrapped , name ) <EOL> def _dump ( self , filetype = OpenSSL . crypto . FILETYPE_ASN1 ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( self . wrapped , OpenSSL . crypto . X509 ) : <EOL> func = OpenSSL . crypto . dump_certificate <EOL> else : <EOL> func = OpenSSL . crypto . dump_certificate_request <EOL> return func ( filetype , self . wrapped ) <EOL> def __eq__ ( self , other ) : <EOL> if not isinstance ( other , self . __class__ ) : <EOL> return NotImplemented <EOL> return self . _dump ( ) == other . _dump ( ) <EOL> def __hash__ ( self ) : <EOL> return hash ( ( self . __class__ , self . _dump ( ) ) ) <EOL> def __ne__ ( self , other ) : <EOL> return not self == other <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' . format ( self . __class__ . __name__ , self . wrapped ) <EOL> class ComparableKey ( object ) : <EOL> """<STR_LIT>""" <EOL> __hash__ = NotImplemented <EOL> def __init__ ( self , wrapped ) : <EOL> self . _wrapped = wrapped <EOL> def __getattr__ ( self , name ) : <EOL> return getattr ( self . _wrapped , name ) <EOL> def __eq__ ( self , other ) : <EOL> if ( not isinstance ( other , self . __class__ ) or <EOL> self . _wrapped . __class__ is not other . _wrapped . __class__ ) : <EOL> return NotImplemented <EOL> elif hasattr ( self . _wrapped , '<STR_LIT>' ) : <EOL> return self . private_numbers ( ) == other . private_numbers ( ) <EOL> elif hasattr ( self . _wrapped , '<STR_LIT>' ) : <EOL> return self . public_numbers ( ) == other . public_numbers ( ) <EOL> else : <EOL> return NotImplemented <EOL> def __ne__ ( self , other ) : <EOL> return not self == other <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' . format ( self . __class__ . __name__ , self . _wrapped ) <EOL> def public_key ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . __class__ ( self . _wrapped . public_key ( ) ) <EOL> class ComparableRSAKey ( ComparableKey ) : <EOL> """<STR_LIT>""" <EOL> def __hash__ ( self ) : <EOL> if isinstance ( self . _wrapped , rsa . RSAPrivateKeyWithSerialization ) : <EOL> priv = self . private_numbers ( ) <EOL> pub = priv . public_numbers <EOL> return hash ( ( self . __class__ , priv . p , priv . q , priv . dmp1 , <EOL> priv . dmq1 , priv . iqmp , pub . n , pub . e ) ) <EOL> elif isinstance ( self . _wrapped , rsa . RSAPublicKeyWithSerialization ) : <EOL> pub = self . public_numbers ( ) <EOL> return hash ( ( self . __class__ , pub . n , pub . e ) ) <EOL> class ImmutableMap ( collections . Mapping , collections . Hashable ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( ) <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , ** kwargs ) : <EOL> if set ( kwargs ) != set ( self . __slots__ ) : <EOL> raise TypeError ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' . format ( '<STR_LIT:U+002CU+0020>' . join ( self . __slots__ ) , <EOL> '<STR_LIT:U+002CU+0020>' . join ( kwargs ) if kwargs else '<STR_LIT:none>' ) ) <EOL> for slot in self . __slots__ : <EOL> object . __setattr__ ( self , slot , kwargs . pop ( slot ) ) <EOL> def update ( self , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> items = dict ( self ) <EOL> items . update ( kwargs ) <EOL> return type ( self ) ( ** items ) <EOL> def __getitem__ ( self , key ) : <EOL> try : <EOL> return getattr ( self , key ) <EOL> except AttributeError : <EOL> raise KeyError ( key ) <EOL> def __iter__ ( self ) : <EOL> return iter ( self . __slots__ ) <EOL> def __len__ ( self ) : <EOL> return len ( self . __slots__ ) <EOL> def __hash__ ( self ) : <EOL> return hash ( tuple ( getattr ( self , slot ) for slot in self . __slots__ ) ) <EOL> def __setattr__ ( self , name , value ) : <EOL> raise AttributeError ( "<STR_LIT>" ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' . format ( self . __class__ . __name__ , '<STR_LIT:U+002CU+0020>' . join ( <EOL> '<STR_LIT>' . format ( key , value ) <EOL> for key , value in six . iteritems ( self ) ) ) <EOL> class frozendict ( collections . Mapping , collections . Hashable ) : <EOL> """<STR_LIT>""" <EOL> __slots__ = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> if kwargs and not args : <EOL> items = dict ( kwargs ) <EOL> elif len ( args ) == <NUM_LIT:1> and isinstance ( args [ <NUM_LIT:0> ] , collections . Mapping ) : <EOL> items = args [ <NUM_LIT:0> ] <EOL> else : <EOL> raise TypeError ( ) <EOL> object . __setattr__ ( self , '<STR_LIT>' , items ) <EOL> object . __setattr__ ( self , '<STR_LIT>' , tuple ( sorted ( six . iterkeys ( items ) ) ) ) <EOL> def __getitem__ ( self , key ) : <EOL> return self . _items [ key ] <EOL> def __iter__ ( self ) : <EOL> return iter ( self . _keys ) <EOL> def __len__ ( self ) : <EOL> return len ( self . _items ) <EOL> def _sorted_items ( self ) : <EOL> return tuple ( ( key , self [ key ] ) for key in self . _keys ) <EOL> def __hash__ ( self ) : <EOL> return hash ( self . _sorted_items ( ) ) <EOL> def __getattr__ ( self , name ) : <EOL> try : <EOL> return self . _items [ name ] <EOL> except KeyError : <EOL> raise AttributeError ( name ) <EOL> def __setattr__ ( self , name , value ) : <EOL> raise AttributeError ( "<STR_LIT>" ) <EOL> def __repr__ ( self ) : <EOL> return '<STR_LIT>' . format ( '<STR_LIT:U+002CU+0020>' . join ( '<STR_LIT>' . format ( <EOL> key , value ) for key , value in self . _sorted_items ( ) ) ) </s>
94,776
"""<STR_LIT>""" <EOL> import os <EOL> import shutil <EOL> import unittest <EOL> import augeas <EOL> import mock <EOL> from certbot import errors <EOL> from certbot_apache . tests import util <EOL> class BasicParserTest ( util . ParserTest ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> super ( BasicParserTest , self ) . setUp ( ) <EOL> def tearDown ( self ) : <EOL> shutil . rmtree ( self . temp_dir ) <EOL> shutil . rmtree ( self . config_dir ) <EOL> shutil . rmtree ( self . work_dir ) <EOL> def test_find_config_root_no_root ( self ) : <EOL> os . remove ( self . parser . loc [ "<STR_LIT:root>" ] ) <EOL> self . assertRaises ( <EOL> errors . NoInstallationError , self . parser . _find_config_root ) <EOL> def test_parse_file ( self ) : <EOL> """<STR_LIT>""" <EOL> file_path = os . path . join ( <EOL> self . config_path , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . parser . _parse_file ( file_path ) <EOL> matches = self . parser . aug . match ( <EOL> "<STR_LIT>" % file_path ) <EOL> self . assertTrue ( matches ) <EOL> def test_find_dir ( self ) : <EOL> test = self . parser . find_dir ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> test2 = self . parser . find_dir ( "<STR_LIT>" ) <EOL> self . assertEqual ( len ( test ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( test2 ) , <NUM_LIT:4> ) <EOL> def test_add_dir ( self ) : <EOL> aug_default = "<STR_LIT>" + self . parser . loc [ "<STR_LIT:default>" ] <EOL> self . parser . add_dir ( aug_default , "<STR_LIT>" , "<STR_LIT:test>" ) <EOL> self . assertTrue ( <EOL> self . parser . find_dir ( "<STR_LIT>" , "<STR_LIT:test>" , aug_default ) ) <EOL> self . parser . add_dir ( aug_default , "<STR_LIT>" , [ "<STR_LIT:1>" , "<STR_LIT:2>" , "<STR_LIT:3>" , "<STR_LIT:4>" ] ) <EOL> matches = self . parser . find_dir ( "<STR_LIT>" , None , aug_default ) <EOL> for i , match in enumerate ( matches ) : <EOL> self . assertEqual ( self . parser . aug . get ( match ) , str ( i + <NUM_LIT:1> ) ) <EOL> def test_add_dir_to_ifmodssl ( self ) : <EOL> """<STR_LIT>""" <EOL> from certbot_apache . parser import get_aug_path <EOL> self . parser . modules . add ( "<STR_LIT>" ) <EOL> self . parser . add_dir_to_ifmodssl ( <EOL> get_aug_path ( self . parser . loc [ "<STR_LIT:default>" ] ) , <EOL> "<STR_LIT>" , [ "<STR_LIT>" ] ) <EOL> matches = self . parser . find_dir ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . assertEqual ( len ( matches ) , <NUM_LIT:1> ) <EOL> self . assertTrue ( "<STR_LIT>" in matches [ <NUM_LIT:0> ] ) <EOL> def test_add_dir_to_ifmodssl_multiple ( self ) : <EOL> from certbot_apache . parser import get_aug_path <EOL> self . parser . modules . add ( "<STR_LIT>" ) <EOL> self . parser . add_dir_to_ifmodssl ( <EOL> get_aug_path ( self . parser . loc [ "<STR_LIT:default>" ] ) , <EOL> "<STR_LIT>" , [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> matches = self . parser . find_dir ( "<STR_LIT>" ) <EOL> self . assertEqual ( len ( matches ) , <NUM_LIT:3> ) <EOL> self . assertTrue ( "<STR_LIT>" in matches [ <NUM_LIT:0> ] ) <EOL> def test_get_aug_path ( self ) : <EOL> from certbot_apache . parser import get_aug_path <EOL> self . assertEqual ( "<STR_LIT>" , get_aug_path ( "<STR_LIT>" ) ) <EOL> def test_set_locations ( self ) : <EOL> with mock . patch ( "<STR_LIT>" ) as mock_path : <EOL> mock_path . isfile . side_effect = [ False , False ] <EOL> results = self . parser . _set_locations ( ) <EOL> self . assertEqual ( results [ "<STR_LIT:default>" ] , results [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( results [ "<STR_LIT:default>" ] , results [ "<STR_LIT:name>" ] ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_update_runtime_variables ( self , mock_cfg ) : <EOL> mock_cfg . return_value = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> expected_vars = { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } <EOL> self . parser . update_runtime_variables ( ) <EOL> self . assertEqual ( self . parser . variables , expected_vars ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_update_runtime_vars_bad_output ( self , mock_cfg ) : <EOL> mock_cfg . return_value = "<STR_LIT>" <EOL> self . parser . update_runtime_variables ( ) <EOL> mock_cfg . return_value = "<STR_LIT>" <EOL> self . assertRaises ( <EOL> errors . PluginError , self . parser . update_runtime_variables ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_update_runtime_vars_bad_ctl ( self , mock_popen , mock_const ) : <EOL> mock_popen . side_effect = OSError <EOL> mock_const . return_value = "<STR_LIT>" <EOL> self . assertRaises ( <EOL> errors . MisconfigurationError , <EOL> self . parser . update_runtime_variables ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_update_runtime_vars_bad_exit ( self , mock_popen ) : <EOL> mock_popen ( ) . communicate . return_value = ( "<STR_LIT>" , "<STR_LIT>" )
mock_popen . returncode = - <NUM_LIT:1>
-4,652,303,591,504,808,000
"""<STR_LIT>""" <EOL> import os <EOL> import shutil <EOL> import unittest <EOL> import augeas <EOL> import mock <EOL> from certbot import errors <EOL> from certbot_apache . tests import util <EOL> class BasicParserTest ( util . ParserTest ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> super ( BasicParserTest , self ) . setUp ( ) <EOL> def tearDown ( self ) : <EOL> shutil . rmtree ( self . temp_dir ) <EOL> shutil . rmtree ( self . config_dir ) <EOL> shutil . rmtree ( self . work_dir ) <EOL> def test_find_config_root_no_root ( self ) : <EOL> os . remove ( self . parser . loc [ "<STR_LIT:root>" ] ) <EOL> self . assertRaises ( <EOL> errors . NoInstallationError , self . parser . _find_config_root ) <EOL> def test_parse_file ( self ) : <EOL> """<STR_LIT>""" <EOL> file_path = os . path . join ( <EOL> self . config_path , "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . parser . _parse_file ( file_path ) <EOL> matches = self . parser . aug . match ( <EOL> "<STR_LIT>" % file_path ) <EOL> self . assertTrue ( matches ) <EOL> def test_find_dir ( self ) : <EOL> test = self . parser . find_dir ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> test2 = self . parser . find_dir ( "<STR_LIT>" ) <EOL> self . assertEqual ( len ( test ) , <NUM_LIT:1> ) <EOL> self . assertEqual ( len ( test2 ) , <NUM_LIT:4> ) <EOL> def test_add_dir ( self ) : <EOL> aug_default = "<STR_LIT>" + self . parser . loc [ "<STR_LIT:default>" ] <EOL> self . parser . add_dir ( aug_default , "<STR_LIT>" , "<STR_LIT:test>" ) <EOL> self . assertTrue ( <EOL> self . parser . find_dir ( "<STR_LIT>" , "<STR_LIT:test>" , aug_default ) ) <EOL> self . parser . add_dir ( aug_default , "<STR_LIT>" , [ "<STR_LIT:1>" , "<STR_LIT:2>" , "<STR_LIT:3>" , "<STR_LIT:4>" ] ) <EOL> matches = self . parser . find_dir ( "<STR_LIT>" , None , aug_default ) <EOL> for i , match in enumerate ( matches ) : <EOL> self . assertEqual ( self . parser . aug . get ( match ) , str ( i + <NUM_LIT:1> ) ) <EOL> def test_add_dir_to_ifmodssl ( self ) : <EOL> """<STR_LIT>""" <EOL> from certbot_apache . parser import get_aug_path <EOL> self . parser . modules . add ( "<STR_LIT>" ) <EOL> self . parser . add_dir_to_ifmodssl ( <EOL> get_aug_path ( self . parser . loc [ "<STR_LIT:default>" ] ) , <EOL> "<STR_LIT>" , [ "<STR_LIT>" ] ) <EOL> matches = self . parser . find_dir ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . assertEqual ( len ( matches ) , <NUM_LIT:1> ) <EOL> self . assertTrue ( "<STR_LIT>" in matches [ <NUM_LIT:0> ] ) <EOL> def test_add_dir_to_ifmodssl_multiple ( self ) : <EOL> from certbot_apache . parser import get_aug_path <EOL> self . parser . modules . add ( "<STR_LIT>" ) <EOL> self . parser . add_dir_to_ifmodssl ( <EOL> get_aug_path ( self . parser . loc [ "<STR_LIT:default>" ] ) , <EOL> "<STR_LIT>" , [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> matches = self . parser . find_dir ( "<STR_LIT>" ) <EOL> self . assertEqual ( len ( matches ) , <NUM_LIT:3> ) <EOL> self . assertTrue ( "<STR_LIT>" in matches [ <NUM_LIT:0> ] ) <EOL> def test_get_aug_path ( self ) : <EOL> from certbot_apache . parser import get_aug_path <EOL> self . assertEqual ( "<STR_LIT>" , get_aug_path ( "<STR_LIT>" ) ) <EOL> def test_set_locations ( self ) : <EOL> with mock . patch ( "<STR_LIT>" ) as mock_path : <EOL> mock_path . isfile . side_effect = [ False , False ] <EOL> results = self . parser . _set_locations ( ) <EOL> self . assertEqual ( results [ "<STR_LIT:default>" ] , results [ "<STR_LIT>" ] ) <EOL> self . assertEqual ( results [ "<STR_LIT:default>" ] , results [ "<STR_LIT:name>" ] ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_update_runtime_variables ( self , mock_cfg ) : <EOL> mock_cfg . return_value = ( <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> ) <EOL> expected_vars = { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" } <EOL> self . parser . update_runtime_variables ( ) <EOL> self . assertEqual ( self . parser . variables , expected_vars ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_update_runtime_vars_bad_output ( self , mock_cfg ) : <EOL> mock_cfg . return_value = "<STR_LIT>" <EOL> self . parser . update_runtime_variables ( ) <EOL> mock_cfg . return_value = "<STR_LIT>" <EOL> self . assertRaises ( <EOL> errors . PluginError , self . parser . update_runtime_variables ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_update_runtime_vars_bad_ctl ( self , mock_popen , mock_const ) : <EOL> mock_popen . side_effect = OSError <EOL> mock_const . return_value = "<STR_LIT>" <EOL> self . assertRaises ( <EOL> errors . MisconfigurationError , <EOL> self . parser . update_runtime_variables ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_update_runtime_vars_bad_exit ( self , mock_popen ) : <EOL> mock_popen ( ) . communicate . return_value = ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> mock_popen . returncode = - <NUM_LIT:1> <EOL> self . assertRaises ( <EOL> errors . MisconfigurationError , <EOL> self . parser . update_runtime_variables ) <EOL> class ParserInitTest ( util . ApacheTest ) : <EOL> def setUp ( self ) : <EOL> super ( ParserInitTest , self ) . setUp ( ) <EOL> self . aug = augeas . Augeas ( <EOL> flags = augeas . Augeas . NONE | augeas . Augeas . NO_MODL_AUTOLOAD ) <EOL> def tearDown ( self ) : <EOL> shutil . rmtree ( self . temp_dir ) <EOL> shutil . rmtree ( self . config_dir ) <EOL> shutil . rmtree ( self . work_dir ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_unparsable ( self , mock_cfg ) : <EOL> from certbot_apache . parser import ApacheParser <EOL> mock_cfg . return_value = ( '<STR_LIT>' ) <EOL> self . assertRaises ( <EOL> errors . PluginError , <EOL> ApacheParser , self . aug , os . path . relpath ( self . config_path ) , <EOL> "<STR_LIT>" , version = ( <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT> ) ) <EOL> def test_root_normalized ( self ) : <EOL> from certbot_apache . parser import ApacheParser <EOL> with mock . patch ( "<STR_LIT>" <EOL> "<STR_LIT>" ) : <EOL> path = os . path . join ( <EOL> self . temp_dir , <EOL> "<STR_LIT>" ) <EOL> parser = ApacheParser ( self . aug , path , <EOL> "<STR_LIT>" ) <EOL> self . assertEqual ( parser . root , self . config_path ) <EOL> def test_root_absolute ( self ) : <EOL> from certbot_apache . parser import ApacheParser <EOL> with mock . patch ( "<STR_LIT>" <EOL> "<STR_LIT>" ) : <EOL> parser = ApacheParser ( <EOL> self . aug , os . path . relpath ( self . config_path ) , <EOL> "<STR_LIT>" ) <EOL> self . assertEqual ( parser . root , self . config_path ) <EOL> def test_root_no_trailing_slash ( self ) : <EOL> from certbot_apache . parser import ApacheParser <EOL> with mock . patch ( "<STR_LIT>" <EOL> "<STR_LIT>" ) : <EOL> parser = ApacheParser ( <EOL> self . aug , self . config_path + os . path . sep , <EOL> "<STR_LIT>" ) <EOL> self . assertEqual ( parser . root , self . config_path ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
94,777
"""<STR_LIT>""" <EOL> import os <EOL> import shutil <EOL> import unittest <EOL> import mock <EOL> import OpenSSL <EOL> from acme import challenges <EOL> from acme import messages <EOL> from certbot import achallenges <EOL> from certbot import errors <EOL> from certbot_nginx . tests import util <EOL> class NginxConfiguratorTest ( util . NginxTest ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> super ( NginxConfiguratorTest , self ) . setUp ( ) <EOL> self . config = util . get_nginx_configurator ( <EOL> self . config_path , self . config_dir , self . work_dir ) <EOL> def tearDown ( self ) : <EOL> shutil . rmtree ( self . temp_dir ) <EOL> shutil . rmtree ( self . config_dir ) <EOL> shutil . rmtree ( self . work_dir ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_prepare_no_install ( self , mock_exe_exists ) : <EOL> mock_exe_exists . return_value = False <EOL> self . assertRaises ( <EOL> errors . NoInstallationError , self . config . prepare ) <EOL> def test_prepare ( self ) : <EOL> self . assertEquals ( ( <NUM_LIT:1> , <NUM_LIT:6> , <NUM_LIT:2> ) , self . config . version ) <EOL> self . assertEquals ( <NUM_LIT:5> , len ( self . config . parser . parsed ) ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_prepare_initializes_version ( self , mock_popen , mock_exe_exists ) : <EOL> mock_popen ( ) . communicate . return_value = ( <EOL> "<STR_LIT>" , "<STR_LIT:\n>" . join ( [ "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ] ) ) <EOL> mock_exe_exists . return_value = True <EOL> self . config . version = None <EOL> self . config . config_test = mock . Mock ( ) <EOL> self . config . prepare ( ) <EOL> self . assertEquals ( ( <NUM_LIT:1> , <NUM_LIT:6> , <NUM_LIT:2> ) , self . config . version ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_get_all_names ( self , mock_gethostbyaddr ) : <EOL> mock_gethostbyaddr . return_value = ( '<STR_LIT>' , [ ] , [ ] ) <EOL> names = self . config . get_all_names ( ) <EOL> self . assertEqual ( names , set ( <EOL> [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT:localhost>" , "<STR_LIT>" , r"<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) ) <EOL> def test_supported_enhancements ( self ) : <EOL> self . assertEqual ( [ '<STR_LIT>' ] , self . config . supported_enhancements ( ) ) <EOL> def test_enhance ( self ) : <EOL> self . assertRaises ( <EOL> errors . PluginError , self . config . enhance , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_get_chall_pref ( self ) : <EOL> self . assertEqual ( [ challenges . TLSSNI01 ] , <EOL> self . config . get_chall_pref ( '<STR_LIT>' ) ) <EOL> def test_save ( self ) : <EOL> filep = self . config . parser . abs_path ( '<STR_LIT>' ) <EOL> self . config . parser . add_server_directives ( <EOL> filep , set ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> [ [ '<STR_LIT>' , '<STR_LIT>' ] ] , <EOL> replace = False ) <EOL> self . config . save ( ) <EOL> parsed = self . config . parser . _parse_files ( filep , override = True ) <EOL> self . assertEqual ( [ [ [ '<STR_LIT>' ] , [ <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT:127.0.0.1>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ] ] ] , <EOL> parsed [ <NUM_LIT:0> ] ) <EOL> def test_choose_vhost ( self ) : <EOL> localhost_conf = set ( [ '<STR_LIT:localhost>' , r'<STR_LIT>' ] ) <EOL> server_conf = set ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> example_conf = set ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> foo_conf = set ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> results = { '<STR_LIT:localhost>' : localhost_conf , <EOL> '<STR_LIT>' : server_conf , <EOL> '<STR_LIT>' : example_conf , <EOL> '<STR_LIT>' : example_conf , <EOL> '<STR_LIT>' : example_conf , <EOL> '<STR_LIT>' : foo_conf , <EOL> '<STR_LIT>' : foo_conf , <EOL> '<STR_LIT>' : localhost_conf } <EOL> conf_path = { '<STR_LIT:localhost>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" } <EOL> bad_results = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> for name in results : <EOL> vhost = self . config . choose_vhost ( name ) <EOL> path = os . path . relpath ( vhost . filep , self . temp_dir ) <EOL> self . assertEqual ( results [ name ] , vhost . names ) <EOL> self . assertEqual ( conf_path [ name ] , path ) <EOL> for name in bad_results : <EOL> self . assertEqual ( set ( [ name ] ) , self . config . choose_vhost ( name ) . names ) <EOL> def test_more_info ( self ) : <EOL> self . assertTrue ( '<STR_LIT>' in self . config . more_info ( ) ) <EOL> def test_deploy_cert_stapling ( self ) : <EOL> self . config . version = ( <NUM_LIT:1> , <NUM_LIT:9> , <NUM_LIT:6> ) <EOL> example_conf = self . config . parser . abs_path ( '<STR_LIT>' ) <EOL> self . config . deploy_cert ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> self . config . save ( ) <EOL> self . config . parser . load ( ) <EOL> generated_conf = self . config . parser . parsed [ example_conf ] <EOL> self . assertTrue ( util . contains_at_depth ( generated_conf , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <NUM_LIT:2> ) ) <EOL> self . assertTrue ( util . contains_at_depth ( generated_conf , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <NUM_LIT:2> ) ) <EOL> self . assertTrue ( util . contains_at_depth ( generated_conf , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <NUM_LIT:2> ) ) <EOL> def test_deploy_cert_stapling_requires_chain_path ( self ) : <EOL> self . config . version = ( <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:7> ) <EOL> self . assertRaises ( errors . PluginError , self . config . deploy_cert , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> None , <EOL> "<STR_LIT>" ) <EOL> def test_deploy_cert_requires_fullchain_path ( self ) : <EOL> self . config . version = ( <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:1> ) <EOL> self . assertRaises ( errors . PluginError , self . config . deploy_cert , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> None ) <EOL> def test_deploy_cert ( self ) : <EOL> server_conf = self . config . parser . abs_path ( '<STR_LIT>' ) <EOL> nginx_conf = self . config . parser . abs_path ( '<STR_LIT>' ) <EOL> example_conf = self . config . parser . abs_path ( '<STR_LIT>' ) <EOL> self . config . version = ( <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:1> ) <EOL> self . config . deploy_cert ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> self . config . deploy_cert ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> self . config . save ( ) <EOL> self . config . parser . load ( ) <EOL> parsed_example_conf = util . filter_comments ( self . config . parser . parsed [ example_conf ] ) <EOL> parsed_server_conf = util . filter_comments ( self . config . parser . parsed [ server_conf ] ) <EOL> parsed_nginx_conf = util . filter_comments ( self . config . parser . parsed [ nginx_conf ] ) <EOL> self . assertEqual ( [ [ [ '<STR_LIT>' ] , <EOL> [ <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT:127.0.0.1>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , self . config . parser . loc [ "<STR_LIT>" ] ] <EOL> ] ] ] , <EOL> parsed_example_conf ) <EOL> self . assertEqual ( [ [ '<STR_LIT>' , '<STR_LIT>' ] ] , <EOL> parsed_server_conf ) <EOL> self . assertTrue ( util . contains_at_depth ( <EOL> parsed_nginx_conf , <EOL> [ [ '<STR_LIT>' ] , <EOL> [ <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ [ '<STR_LIT:location>' , '<STR_LIT:/>' ] , <EOL> [ [ '<STR_LIT:root>' , '<STR_LIT:html>' ] , <EOL> [ '<STR_LIT:index>' , '<STR_LIT>' ] ] ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , self . config . parser . loc [ "<STR_LIT>" ] ] ] ] , <EOL> <NUM_LIT:2> ) ) <EOL> def test_get_all_certs_keys ( self ) : <EOL> nginx_conf = self . config . parser . abs_path ( '<STR_LIT>' ) <EOL> example_conf = self . config . parser . abs_path ( '<STR_LIT>' ) <EOL> self . config . deploy_cert ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> self . config . deploy_cert ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> self . config . save ( ) <EOL> self . config . parser . load ( ) <EOL> self . assertEqual ( set ( [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' , example_conf ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , nginx_conf ) , <EOL> ] ) , self . config . get_all_certs_keys ( ) )
@ mock . patch ( "<STR_LIT>" )
757,120,737,524,549,100
"""<STR_LIT>""" <EOL> import os <EOL> import shutil <EOL> import unittest <EOL> import mock <EOL> import OpenSSL <EOL> from acme import challenges <EOL> from acme import messages <EOL> from certbot import achallenges <EOL> from certbot import errors <EOL> from certbot_nginx . tests import util <EOL> class NginxConfiguratorTest ( util . NginxTest ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> super ( NginxConfiguratorTest , self ) . setUp ( ) <EOL> self . config = util . get_nginx_configurator ( <EOL> self . config_path , self . config_dir , self . work_dir ) <EOL> def tearDown ( self ) : <EOL> shutil . rmtree ( self . temp_dir ) <EOL> shutil . rmtree ( self . config_dir ) <EOL> shutil . rmtree ( self . work_dir ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_prepare_no_install ( self , mock_exe_exists ) : <EOL> mock_exe_exists . return_value = False <EOL> self . assertRaises ( <EOL> errors . NoInstallationError , self . config . prepare ) <EOL> def test_prepare ( self ) : <EOL> self . assertEquals ( ( <NUM_LIT:1> , <NUM_LIT:6> , <NUM_LIT:2> ) , self . config . version ) <EOL> self . assertEquals ( <NUM_LIT:5> , len ( self . config . parser . parsed ) ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_prepare_initializes_version ( self , mock_popen , mock_exe_exists ) : <EOL> mock_popen ( ) . communicate . return_value = ( <EOL> "<STR_LIT>" , "<STR_LIT:\n>" . join ( [ "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ] ) ) <EOL> mock_exe_exists . return_value = True <EOL> self . config . version = None <EOL> self . config . config_test = mock . Mock ( ) <EOL> self . config . prepare ( ) <EOL> self . assertEquals ( ( <NUM_LIT:1> , <NUM_LIT:6> , <NUM_LIT:2> ) , self . config . version ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_get_all_names ( self , mock_gethostbyaddr ) : <EOL> mock_gethostbyaddr . return_value = ( '<STR_LIT>' , [ ] , [ ] ) <EOL> names = self . config . get_all_names ( ) <EOL> self . assertEqual ( names , set ( <EOL> [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT:localhost>" , "<STR_LIT>" , r"<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] ) ) <EOL> def test_supported_enhancements ( self ) : <EOL> self . assertEqual ( [ '<STR_LIT>' ] , self . config . supported_enhancements ( ) ) <EOL> def test_enhance ( self ) : <EOL> self . assertRaises ( <EOL> errors . PluginError , self . config . enhance , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_get_chall_pref ( self ) : <EOL> self . assertEqual ( [ challenges . TLSSNI01 ] , <EOL> self . config . get_chall_pref ( '<STR_LIT>' ) ) <EOL> def test_save ( self ) : <EOL> filep = self . config . parser . abs_path ( '<STR_LIT>' ) <EOL> self . config . parser . add_server_directives ( <EOL> filep , set ( [ '<STR_LIT>' , '<STR_LIT>' ] ) , <EOL> [ [ '<STR_LIT>' , '<STR_LIT>' ] ] , <EOL> replace = False ) <EOL> self . config . save ( ) <EOL> parsed = self . config . parser . _parse_files ( filep , override = True ) <EOL> self . assertEqual ( [ [ [ '<STR_LIT>' ] , [ <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT:127.0.0.1>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> ] ] ] , <EOL> parsed [ <NUM_LIT:0> ] ) <EOL> def test_choose_vhost ( self ) : <EOL> localhost_conf = set ( [ '<STR_LIT:localhost>' , r'<STR_LIT>' ] ) <EOL> server_conf = set ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> example_conf = set ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> foo_conf = set ( [ '<STR_LIT>' , '<STR_LIT>' ] ) <EOL> results = { '<STR_LIT:localhost>' : localhost_conf , <EOL> '<STR_LIT>' : server_conf , <EOL> '<STR_LIT>' : example_conf , <EOL> '<STR_LIT>' : example_conf , <EOL> '<STR_LIT>' : example_conf , <EOL> '<STR_LIT>' : foo_conf , <EOL> '<STR_LIT>' : foo_conf , <EOL> '<STR_LIT>' : localhost_conf } <EOL> conf_path = { '<STR_LIT:localhost>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" } <EOL> bad_results = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> for name in results : <EOL> vhost = self . config . choose_vhost ( name ) <EOL> path = os . path . relpath ( vhost . filep , self . temp_dir ) <EOL> self . assertEqual ( results [ name ] , vhost . names ) <EOL> self . assertEqual ( conf_path [ name ] , path ) <EOL> for name in bad_results : <EOL> self . assertEqual ( set ( [ name ] ) , self . config . choose_vhost ( name ) . names ) <EOL> def test_more_info ( self ) : <EOL> self . assertTrue ( '<STR_LIT>' in self . config . more_info ( ) ) <EOL> def test_deploy_cert_stapling ( self ) : <EOL> self . config . version = ( <NUM_LIT:1> , <NUM_LIT:9> , <NUM_LIT:6> ) <EOL> example_conf = self . config . parser . abs_path ( '<STR_LIT>' ) <EOL> self . config . deploy_cert ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> self . config . save ( ) <EOL> self . config . parser . load ( ) <EOL> generated_conf = self . config . parser . parsed [ example_conf ] <EOL> self . assertTrue ( util . contains_at_depth ( generated_conf , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <NUM_LIT:2> ) ) <EOL> self . assertTrue ( util . contains_at_depth ( generated_conf , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <NUM_LIT:2> ) ) <EOL> self . assertTrue ( util . contains_at_depth ( generated_conf , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <NUM_LIT:2> ) ) <EOL> def test_deploy_cert_stapling_requires_chain_path ( self ) : <EOL> self . config . version = ( <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:7> ) <EOL> self . assertRaises ( errors . PluginError , self . config . deploy_cert , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> None , <EOL> "<STR_LIT>" ) <EOL> def test_deploy_cert_requires_fullchain_path ( self ) : <EOL> self . config . version = ( <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:1> ) <EOL> self . assertRaises ( errors . PluginError , self . config . deploy_cert , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> None ) <EOL> def test_deploy_cert ( self ) : <EOL> server_conf = self . config . parser . abs_path ( '<STR_LIT>' ) <EOL> nginx_conf = self . config . parser . abs_path ( '<STR_LIT>' ) <EOL> example_conf = self . config . parser . abs_path ( '<STR_LIT>' ) <EOL> self . config . version = ( <NUM_LIT:1> , <NUM_LIT:3> , <NUM_LIT:1> ) <EOL> self . config . deploy_cert ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> self . config . deploy_cert ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> self . config . save ( ) <EOL> self . config . parser . load ( ) <EOL> parsed_example_conf = util . filter_comments ( self . config . parser . parsed [ example_conf ] ) <EOL> parsed_server_conf = util . filter_comments ( self . config . parser . parsed [ server_conf ] ) <EOL> parsed_nginx_conf = util . filter_comments ( self . config . parser . parsed [ nginx_conf ] ) <EOL> self . assertEqual ( [ [ [ '<STR_LIT>' ] , <EOL> [ <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT:127.0.0.1>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , self . config . parser . loc [ "<STR_LIT>" ] ] <EOL> ] ] ] , <EOL> parsed_example_conf ) <EOL> self . assertEqual ( [ [ '<STR_LIT>' , '<STR_LIT>' ] ] , <EOL> parsed_server_conf ) <EOL> self . assertTrue ( util . contains_at_depth ( <EOL> parsed_nginx_conf , <EOL> [ [ '<STR_LIT>' ] , <EOL> [ <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ [ '<STR_LIT:location>' , '<STR_LIT:/>' ] , <EOL> [ [ '<STR_LIT:root>' , '<STR_LIT:html>' ] , <EOL> [ '<STR_LIT:index>' , '<STR_LIT>' ] ] ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ '<STR_LIT>' , self . config . parser . loc [ "<STR_LIT>" ] ] ] ] , <EOL> <NUM_LIT:2> ) ) <EOL> def test_get_all_certs_keys ( self ) : <EOL> nginx_conf = self . config . parser . abs_path ( '<STR_LIT>' ) <EOL> example_conf = self . config . parser . abs_path ( '<STR_LIT>' ) <EOL> self . config . deploy_cert ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> self . config . deploy_cert ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> self . config . save ( ) <EOL> self . config . parser . load ( ) <EOL> self . assertEqual ( set ( [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' , example_conf ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , nginx_conf ) , <EOL> ] ) , self . config . get_all_certs_keys ( ) ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_perform ( self , mock_restart , mock_perform ) : <EOL> achall1 = achallenges . KeyAuthorizationAnnotatedChallenge ( <EOL> challb = messages . ChallengeBody ( <EOL> chall = challenges . TLSSNI01 ( token = "<STR_LIT>" ) , <EOL> uri = "<STR_LIT>" , <EOL> status = messages . Status ( "<STR_LIT>" ) , <EOL> ) , domain = "<STR_LIT:localhost>" , account_key = self . rsa512jwk ) <EOL> achall2 = achallenges . KeyAuthorizationAnnotatedChallenge ( <EOL> challb = messages . ChallengeBody ( <EOL> chall = challenges . TLSSNI01 ( token = "<STR_LIT>" ) , <EOL> uri = "<STR_LIT>" , <EOL> status = messages . Status ( "<STR_LIT>" ) , <EOL> ) , domain = "<STR_LIT>" , account_key = self . rsa512jwk ) <EOL> expected = [ <EOL> achall1 . response ( self . rsa512jwk ) , <EOL> achall2 . response ( self . rsa512jwk ) , <EOL> ] <EOL> mock_perform . return_value = expected <EOL> responses = self . config . perform ( [ achall1 , achall2 ] ) <EOL> self . assertEqual ( mock_perform . call_count , <NUM_LIT:1> ) <EOL> self . assertEqual ( responses , expected ) <EOL> self . assertEqual ( mock_restart . call_count , <NUM_LIT:1> ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_get_version ( self , mock_popen ) : <EOL> mock_popen ( ) . communicate . return_value = ( <EOL> "<STR_LIT>" , "<STR_LIT:\n>" . join ( [ "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ] ) ) <EOL> self . assertEqual ( self . config . get_version ( ) , ( <NUM_LIT:1> , <NUM_LIT:4> , <NUM_LIT:2> ) ) <EOL> mock_popen ( ) . communicate . return_value = ( <EOL> "<STR_LIT>" , "<STR_LIT:\n>" . join ( [ "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) ) <EOL> self . assertEqual ( self . config . get_version ( ) , ( <NUM_LIT:0> , <NUM_LIT:9> ) ) <EOL> mock_popen ( ) . communicate . return_value = ( <EOL> "<STR_LIT>" , "<STR_LIT:\n>" . join ( [ "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) ) <EOL> self . assertRaises ( errors . PluginError , self . config . get_version ) <EOL> mock_popen ( ) . communicate . return_value = ( <EOL> "<STR_LIT>" , "<STR_LIT:\n>" . join ( [ "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) ) <EOL> self . assertRaises ( errors . PluginError , self . config . get_version ) <EOL> mock_popen ( ) . communicate . return_value = ( <EOL> "<STR_LIT>" , "<STR_LIT:\n>" . join ( [ "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) ) <EOL> self . assertRaises ( errors . PluginError , self . config . get_version ) <EOL> mock_popen ( ) . communicate . return_value = ( <EOL> "<STR_LIT>" , "<STR_LIT:\n>" . join ( [ "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) ) <EOL> self . assertRaises ( errors . NotSupportedError , self . config . get_version ) <EOL> mock_popen . side_effect = OSError ( "<STR_LIT>" ) <EOL> self . assertRaises ( errors . PluginError , self . config . get_version ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_nginx_restart ( self , mock_popen ) : <EOL> mocked = mock_popen ( ) <EOL> mocked . communicate . return_value = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> mocked . returncode = <NUM_LIT:0> <EOL> self . config . restart ( ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_nginx_restart_fail ( self , mock_popen ) : <EOL> mocked = mock_popen ( ) <EOL> mocked . communicate . return_value = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> mocked . returncode = <NUM_LIT:1> <EOL> self . assertRaises ( errors . MisconfigurationError , self . config . restart ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_no_nginx_start ( self , mock_popen ) : <EOL> mock_popen . side_effect = OSError ( "<STR_LIT>" ) <EOL> self . assertRaises ( errors . MisconfigurationError , self . config . restart ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_config_test ( self , _ ) : <EOL> self . config . config_test ( ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_config_test_bad_process ( self , mock_run_script ) : <EOL> mock_run_script . side_effect = errors . SubprocessError <EOL> self . assertRaises ( errors . MisconfigurationError , self . config . config_test ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_recovery_routine_throws_error_from_reverter ( self , mock_recovery_routine ) : <EOL> mock_recovery_routine . side_effect = errors . ReverterError ( "<STR_LIT:foo>" ) <EOL> self . assertRaises ( errors . PluginError , self . config . recovery_routine ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_view_config_changes_throws_error_from_reverter ( self , mock_view_config_changes ) : <EOL> mock_view_config_changes . side_effect = errors . ReverterError ( "<STR_LIT:foo>" ) <EOL> self . assertRaises ( errors . PluginError , self . config . view_config_changes ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_rollback_checkpoints_throws_error_from_reverter ( self , mock_rollback_checkpoints ) : <EOL> mock_rollback_checkpoints . side_effect = errors . ReverterError ( "<STR_LIT:foo>" ) <EOL> self . assertRaises ( errors . PluginError , self . config . rollback_checkpoints ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_revert_challenge_config_throws_error_from_reverter ( self , mock_revert_temporary_config ) : <EOL> mock_revert_temporary_config . side_effect = errors . ReverterError ( "<STR_LIT:foo>" ) <EOL> self . assertRaises ( errors . PluginError , self . config . revert_challenge_config ) <EOL> @ mock . patch ( "<STR_LIT>" ) <EOL> def test_save_throws_error_from_reverter ( self , mock_add_to_checkpoint ) : <EOL> mock_add_to_checkpoint . side_effect = errors . ReverterError ( "<STR_LIT:foo>" ) <EOL> self . assertRaises ( errors . PluginError , self . config . save ) <EOL> def test_get_snakeoil_paths ( self ) : <EOL> cert , key = self . config . _get_snakeoil_paths ( ) <EOL> self . assertTrue ( os . path . exists ( cert ) ) <EOL> self . assertTrue ( os . path . exists ( key ) ) <EOL> with open ( cert ) as cert_file : <EOL> OpenSSL . crypto . load_certificate ( <EOL> OpenSSL . crypto . FILETYPE_PEM , cert_file . read ( ) ) <EOL> with open ( key ) as key_file : <EOL> OpenSSL . crypto . load_privatekey ( <EOL> OpenSSL . crypto . FILETYPE_PEM , key_file . read ( ) ) <EOL> def test_redirect_enhance ( self ) : <EOL> expected = [ <EOL> [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> [ [ '<STR_LIT>' , '<STR_LIT>' ] ] <EOL> ] <EOL> example_conf = self . config . parser . abs_path ( '<STR_LIT>' ) <EOL> self . config . enhance ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> generated_conf = self . config . parser . parsed [ example_conf ] <EOL> self . assertTrue ( util . contains_at_depth ( generated_conf , expected , <NUM_LIT:2> ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
94,778
"""<STR_LIT>""" <EOL> import abc <EOL> import zope . interface <EOL> class AccountStorage ( object ) : <EOL> """<STR_LIT>""" <EOL> __metaclass__ = abc . ABCMeta <EOL> @ abc . abstractmethod <EOL> def find_all ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> @ abc . abstractmethod <EOL> def load ( self , account_id ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> @ abc . abstractmethod <EOL> def save ( self , account ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( )
class IPluginFactory ( zope . interface . Interface ) :
6,701,782,341,187,506,000
"""<STR_LIT>""" <EOL> import abc <EOL> import zope . interface <EOL> class AccountStorage ( object ) : <EOL> """<STR_LIT>""" <EOL> __metaclass__ = abc . ABCMeta <EOL> @ abc . abstractmethod <EOL> def find_all ( self ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> @ abc . abstractmethod <EOL> def load ( self , account_id ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> @ abc . abstractmethod <EOL> def save ( self , account ) : <EOL> """<STR_LIT>""" <EOL> raise NotImplementedError ( ) <EOL> class IPluginFactory ( zope . interface . Interface ) : <EOL> """<STR_LIT>""" <EOL> description = zope . interface . Attribute ( "<STR_LIT>" ) <EOL> def __call__ ( config , name ) : <EOL> """<STR_LIT>""" <EOL> def inject_parser_options ( parser , name ) : <EOL> """<STR_LIT>""" <EOL> class IPlugin ( zope . interface . Interface ) : <EOL> """<STR_LIT>""" <EOL> def prepare ( ) : <EOL> """<STR_LIT>""" <EOL> def more_info ( ) : <EOL> """<STR_LIT>""" <EOL> class IAuthenticator ( IPlugin ) : <EOL> """<STR_LIT>""" <EOL> def get_chall_pref ( domain ) : <EOL> """<STR_LIT>""" <EOL> def perform ( achalls ) : <EOL> """<STR_LIT>""" <EOL> def cleanup ( achalls ) : <EOL> """<STR_LIT>""" <EOL> class IConfig ( zope . interface . Interface ) : <EOL> """<STR_LIT>""" <EOL> server = zope . interface . Attribute ( "<STR_LIT>" ) <EOL> email = zope . interface . Attribute ( <EOL> "<STR_LIT>" ) <EOL> rsa_key_size = zope . interface . Attribute ( "<STR_LIT>" ) <EOL> config_dir = zope . interface . Attribute ( "<STR_LIT>" ) <EOL> work_dir = zope . interface . Attribute ( "<STR_LIT>" ) <EOL> accounts_dir = zope . interface . Attribute ( <EOL> "<STR_LIT>" ) <EOL> backup_dir = zope . interface . Attribute ( "<STR_LIT>" ) <EOL> csr_dir = zope . interface . Attribute ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> in_progress_dir = zope . interface . Attribute ( <EOL> "<STR_LIT>" ) <EOL> key_dir = zope . interface . Attribute ( "<STR_LIT>" ) <EOL> temp_checkpoint_dir = zope . interface . Attribute ( <EOL> "<STR_LIT>" ) <EOL> renewer_config_file = zope . interface . Attribute ( <EOL> "<STR_LIT>" ) <EOL> no_verify_ssl = zope . interface . Attribute ( <EOL> "<STR_LIT>" ) <EOL> tls_sni_01_port = zope . interface . Attribute ( <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> http01_port = zope . interface . Attribute ( <EOL> "<STR_LIT>" ) <EOL> class IInstaller ( IPlugin ) : <EOL> """<STR_LIT>""" <EOL> def get_all_names ( ) : <EOL> """<STR_LIT>""" <EOL> def deploy_cert ( domain , cert_path , key_path , chain_path , fullchain_path ) : <EOL> """<STR_LIT>""" <EOL> def enhance ( domain , enhancement , options = None ) : <EOL> """<STR_LIT>""" <EOL> def supported_enhancements ( ) : <EOL> """<STR_LIT>""" <EOL> def get_all_certs_keys ( ) : <EOL> """<STR_LIT>""" <EOL> def save ( title = None , temporary = False ) : <EOL> """<STR_LIT>""" <EOL> def rollback_checkpoints ( rollback = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> def recovery_routine ( ) : <EOL> """<STR_LIT>""" <EOL> def view_config_changes ( ) : <EOL> """<STR_LIT>""" <EOL> def config_test ( ) : <EOL> """<STR_LIT>""" <EOL> def restart ( ) : <EOL> """<STR_LIT>""" <EOL> class IDisplay ( zope . interface . Interface ) : <EOL> """<STR_LIT>""" <EOL> def notification ( message , height , pause ) : <EOL> """<STR_LIT>""" <EOL> def menu ( message , choices , ok_label = "<STR_LIT:OK>" , <EOL> cancel_label = "<STR_LIT>" , help_label = "<STR_LIT>" , default = None , cli_flag = None ) : <EOL> """<STR_LIT>""" <EOL> def input ( message , default = None , cli_args = None ) : <EOL> """<STR_LIT>""" <EOL> def yesno ( message , yes_label = "<STR_LIT>" , no_label = "<STR_LIT>" , default = None , <EOL> cli_args = None ) : <EOL> """<STR_LIT>""" <EOL> def checklist ( message , tags , default_state , default = None , cli_args = None ) : <EOL> """<STR_LIT>""" <EOL> def directory_select ( self , message , default = None , cli_flag = None ) : <EOL> """<STR_LIT>""" <EOL> class IValidator ( zope . interface . Interface ) : <EOL> """<STR_LIT>""" <EOL> def certificate ( cert , name , alt_host = None , port = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> def redirect ( name , port = <NUM_LIT> , headers = None ) : <EOL> """<STR_LIT>""" <EOL> def hsts ( name ) : <EOL> """<STR_LIT>""" <EOL> def ocsp_stapling ( name ) : <EOL> """<STR_LIT>""" <EOL> class IReporter ( zope . interface . Interface ) : <EOL> """<STR_LIT>""" <EOL> HIGH_PRIORITY = zope . interface . Attribute ( <EOL> "<STR_LIT>" ) <EOL> MEDIUM_PRIORITY = zope . interface . Attribute ( <EOL> "<STR_LIT>" ) <EOL> LOW_PRIORITY = zope . interface . Attribute ( <EOL> "<STR_LIT>" ) <EOL> def add_message ( self , msg , priority , on_crash = True ) : <EOL> """<STR_LIT>""" <EOL> def print_messages ( self ) : <EOL> """<STR_LIT>""" </s>
94,779
"""<STR_LIT>""" <EOL> import datetime <EOL> import itertools <EOL> from acme import challenges <EOL> from acme import jose <EOL> from acme import messages <EOL> from certbot . tests import test_util <EOL> KEY = test_util . load_rsa_private_key ( '<STR_LIT>' ) <EOL> HTTP01 = challenges . HTTP01 ( <EOL> token = "<STR_LIT>" ) <EOL> TLSSNI01 = challenges . TLSSNI01 ( <EOL> token = jose . b64decode ( b"<STR_LIT>" ) ) <EOL> DNS = challenges . DNS ( token = "<STR_LIT>" ) <EOL> CHALLENGES = [ HTTP01 , TLSSNI01 , DNS ] <EOL> def gen_combos ( challbs ) : <EOL> """<STR_LIT>""" <EOL> return tuple ( ( i , ) for i , _ in enumerate ( challbs ) ) <EOL> def chall_to_challb ( chall , status ) : <EOL> """<STR_LIT>""" <EOL> kwargs = { <EOL> "<STR_LIT>" : chall , <EOL> "<STR_LIT>" : chall . typ + "<STR_LIT>" , <EOL> "<STR_LIT:status>" : status , <EOL> } <EOL> if status == messages . STATUS_VALID : <EOL> kwargs . update ( { "<STR_LIT>" : datetime . datetime . now ( ) } ) <EOL> return messages . ChallengeBody ( ** kwargs ) <EOL> TLSSNI01_P = chall_to_challb ( TLSSNI01 , messages . STATUS_PENDING ) <EOL> HTTP01_P = chall_to_challb ( HTTP01 , messages . STATUS_PENDING ) <EOL> DNS_P = chall_to_challb ( DNS , messages . STATUS_PENDING ) <EOL> CHALLENGES_P = [ HTTP01_P , TLSSNI01_P , DNS_P ] <EOL> def gen_authzr ( authz_status , domain , challs , statuses , combos = True ) : <EOL> """<STR_LIT>""" <EOL> challbs = tuple ( <EOL> chall_to_challb ( chall , status ) <EOL> for chall , status in itertools . izip ( challs , statuses ) <EOL> ) <EOL> authz_kwargs = { <EOL> "<STR_LIT>" : messages . Identifier ( <EOL> typ = messages . IDENTIFIER_FQDN , value = domain ) , <EOL> "<STR_LIT>" : challbs , <EOL> } <EOL> if combos : <EOL> authz_kwargs . update ( { "<STR_LIT>" : gen_combos ( challbs ) } ) <EOL> if authz_status == messages . STATUS_VALID :
authz_kwargs . update ( {
-3,866,717,824,105,581,000
"""<STR_LIT>""" <EOL> import datetime <EOL> import itertools <EOL> from acme import challenges <EOL> from acme import jose <EOL> from acme import messages <EOL> from certbot . tests import test_util <EOL> KEY = test_util . load_rsa_private_key ( '<STR_LIT>' ) <EOL> HTTP01 = challenges . HTTP01 ( <EOL> token = "<STR_LIT>" ) <EOL> TLSSNI01 = challenges . TLSSNI01 ( <EOL> token = jose . b64decode ( b"<STR_LIT>" ) ) <EOL> DNS = challenges . DNS ( token = "<STR_LIT>" ) <EOL> CHALLENGES = [ HTTP01 , TLSSNI01 , DNS ] <EOL> def gen_combos ( challbs ) : <EOL> """<STR_LIT>""" <EOL> return tuple ( ( i , ) for i , _ in enumerate ( challbs ) ) <EOL> def chall_to_challb ( chall , status ) : <EOL> """<STR_LIT>""" <EOL> kwargs = { <EOL> "<STR_LIT>" : chall , <EOL> "<STR_LIT>" : chall . typ + "<STR_LIT>" , <EOL> "<STR_LIT:status>" : status , <EOL> } <EOL> if status == messages . STATUS_VALID : <EOL> kwargs . update ( { "<STR_LIT>" : datetime . datetime . now ( ) } ) <EOL> return messages . ChallengeBody ( ** kwargs ) <EOL> TLSSNI01_P = chall_to_challb ( TLSSNI01 , messages . STATUS_PENDING ) <EOL> HTTP01_P = chall_to_challb ( HTTP01 , messages . STATUS_PENDING ) <EOL> DNS_P = chall_to_challb ( DNS , messages . STATUS_PENDING ) <EOL> CHALLENGES_P = [ HTTP01_P , TLSSNI01_P , DNS_P ] <EOL> def gen_authzr ( authz_status , domain , challs , statuses , combos = True ) : <EOL> """<STR_LIT>""" <EOL> challbs = tuple ( <EOL> chall_to_challb ( chall , status ) <EOL> for chall , status in itertools . izip ( challs , statuses ) <EOL> ) <EOL> authz_kwargs = { <EOL> "<STR_LIT>" : messages . Identifier ( <EOL> typ = messages . IDENTIFIER_FQDN , value = domain ) , <EOL> "<STR_LIT>" : challbs , <EOL> } <EOL> if combos : <EOL> authz_kwargs . update ( { "<STR_LIT>" : gen_combos ( challbs ) } ) <EOL> if authz_status == messages . STATUS_VALID : <EOL> authz_kwargs . update ( { <EOL> "<STR_LIT:status>" : authz_status , <EOL> "<STR_LIT>" : datetime . datetime . now ( ) + datetime . timedelta ( days = <NUM_LIT> ) , <EOL> } ) <EOL> else : <EOL> authz_kwargs . update ( { <EOL> "<STR_LIT:status>" : authz_status , <EOL> } ) <EOL> return messages . AuthorizationResource ( <EOL> uri = "<STR_LIT>" , <EOL> new_cert_uri = "<STR_LIT>" , <EOL> body = messages . Authorization ( ** authz_kwargs ) <EOL> ) </s>
94,780
"""<STR_LIT>""" <EOL> from distutils . version import LooseVersion <EOL> from json import loads <EOL> from os import devnull , environ <EOL> from os . path import dirname , join <EOL> import re <EOL> from subprocess import check_call , CalledProcessError <EOL> from sys import argv , exit <EOL> from urllib2 import build_opener , HTTPHandler , HTTPSHandler , HTTPError <EOL> PUBLIC_KEY = environ . get ( '<STR_LIT>' , """<STR_LIT>""" ) <EOL> class ExpectedError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> class HttpsGetter ( object ) : <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _opener = build_opener ( HTTPSHandler ( ) ) <EOL> for handler in self . _opener . handlers : <EOL> if isinstance ( handler , HTTPHandler ) : <EOL> self . _opener . handlers . remove ( handler ) <EOL> def get ( self , url ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return self . _opener . open ( url ) . read ( ) <EOL> except ( HTTPError , IOError ) as exc : <EOL> raise ExpectedError ( "<STR_LIT>" % url , exc ) <EOL> def write ( contents , dir , filename ) : <EOL> """<STR_LIT>""" <EOL> with open ( join ( dir , filename ) , '<STR_LIT:w>' ) as file : <EOL> file . write ( contents ) <EOL> def latest_stable_version ( get ) : <EOL> """<STR_LIT>""" <EOL> metadata = loads ( get ( <EOL> environ . get ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) ) ) <EOL> return str ( max ( LooseVersion ( r ) for r <EOL> in metadata [ '<STR_LIT>' ] . iterkeys ( ) <EOL> if re . match ( '<STR_LIT>' , r ) ) ) <EOL> def verified_new_le_auto ( get , tag , temp_dir ) : <EOL> """<STR_LIT>""" <EOL> le_auto_dir = environ . get ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) % tag <EOL> write ( get ( le_auto_dir + '<STR_LIT>' ) , temp_dir , '<STR_LIT>' ) <EOL> write ( get ( le_auto_dir + '<STR_LIT>' ) , temp_dir , '<STR_LIT>' ) <EOL> write ( PUBLIC_KEY , temp_dir , '<STR_LIT>' ) <EOL> try : <EOL> with open ( devnull , '<STR_LIT:w>' ) as dev_null :
check_call ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ,
2,654,158,417,131,985,000
"""<STR_LIT>""" <EOL> from distutils . version import LooseVersion <EOL> from json import loads <EOL> from os import devnull , environ <EOL> from os . path import dirname , join <EOL> import re <EOL> from subprocess import check_call , CalledProcessError <EOL> from sys import argv , exit <EOL> from urllib2 import build_opener , HTTPHandler , HTTPSHandler , HTTPError <EOL> PUBLIC_KEY = environ . get ( '<STR_LIT>' , """<STR_LIT>""" ) <EOL> class ExpectedError ( Exception ) : <EOL> """<STR_LIT>""" <EOL> class HttpsGetter ( object ) : <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _opener = build_opener ( HTTPSHandler ( ) ) <EOL> for handler in self . _opener . handlers : <EOL> if isinstance ( handler , HTTPHandler ) : <EOL> self . _opener . handlers . remove ( handler ) <EOL> def get ( self , url ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return self . _opener . open ( url ) . read ( ) <EOL> except ( HTTPError , IOError ) as exc : <EOL> raise ExpectedError ( "<STR_LIT>" % url , exc ) <EOL> def write ( contents , dir , filename ) : <EOL> """<STR_LIT>""" <EOL> with open ( join ( dir , filename ) , '<STR_LIT:w>' ) as file : <EOL> file . write ( contents ) <EOL> def latest_stable_version ( get ) : <EOL> """<STR_LIT>""" <EOL> metadata = loads ( get ( <EOL> environ . get ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) ) ) <EOL> return str ( max ( LooseVersion ( r ) for r <EOL> in metadata [ '<STR_LIT>' ] . iterkeys ( ) <EOL> if re . match ( '<STR_LIT>' , r ) ) ) <EOL> def verified_new_le_auto ( get , tag , temp_dir ) : <EOL> """<STR_LIT>""" <EOL> le_auto_dir = environ . get ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) % tag <EOL> write ( get ( le_auto_dir + '<STR_LIT>' ) , temp_dir , '<STR_LIT>' ) <EOL> write ( get ( le_auto_dir + '<STR_LIT>' ) , temp_dir , '<STR_LIT>' ) <EOL> write ( PUBLIC_KEY , temp_dir , '<STR_LIT>' ) <EOL> try : <EOL> with open ( devnull , '<STR_LIT:w>' ) as dev_null : <EOL> check_call ( [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> join ( temp_dir , '<STR_LIT>' ) , <EOL> '<STR_LIT>' , <EOL> join ( temp_dir , '<STR_LIT>' ) , <EOL> join ( temp_dir , '<STR_LIT>' ) ] , <EOL> stdout = dev_null , <EOL> stderr = dev_null ) <EOL> except CalledProcessError as exc : <EOL> raise ExpectedError ( "<STR_LIT>" <EOL> "<STR_LIT>" , exc ) <EOL> def main ( ) : <EOL> get = HttpsGetter ( ) . get <EOL> flag = argv [ <NUM_LIT:1> ] <EOL> try : <EOL> if flag == '<STR_LIT>' : <EOL> print latest_stable_version ( get ) <EOL> elif flag == '<STR_LIT>' : <EOL> tag = argv [ <NUM_LIT:2> ] <EOL> verified_new_le_auto ( get , tag , dirname ( argv [ <NUM_LIT:0> ] ) ) <EOL> except ExpectedError as exc : <EOL> print exc . args [ <NUM_LIT:0> ] , exc . args [ <NUM_LIT:1> ] <EOL> return <NUM_LIT:1> <EOL> else : <EOL> return <NUM_LIT:0> <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> exit ( main ( ) ) </s>
94,781
from flask import render_template , redirect , request , url_for , flash , session , jsonify <EOL> from flask . ext . login import login_user , logout_user , login_required , current_user <EOL> from . import auth <EOL> from app import db <EOL> from . . models import User , Notebook <EOL> from . . email import send_email <EOL> from . forms import LoginForm , RegistrationForm , ChangePasswordForm , PasswordResetRequestForm , PasswordResetForm , ChangeEmailForm , ChangeUserNameForm <EOL> @ auth . before_app_request <EOL> def before_request ( ) : <EOL> if current_user . is_authenticated ( ) and not current_user . confirmed and request . endpoint [ : <NUM_LIT:5> ] != '<STR_LIT>' and request . endpoint != '<STR_LIT>' : <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> @ auth . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def login ( ) : <EOL> form = LoginForm ( ) <EOL> if form . validate_on_submit ( ) : <EOL> user = User . query . filter_by ( email = form . email . data . lower ( ) . strip ( ) ) . first ( ) <EOL> if user is not None and user . verify_password ( form . password . data ) : <EOL> login_user ( user , form . remember_me . data ) <EOL> return redirect ( request . args . get ( '<STR_LIT>' ) or url_for ( '<STR_LIT>' ) ) <EOL> flash ( '<STR_LIT>' ) <EOL> return render_template ( '<STR_LIT>' , form = form ) <EOL> @ auth . route ( '<STR_LIT>' ) <EOL> @ login_required <EOL> def logout ( ) : <EOL> logout_user ( ) <EOL> if '<STR_LIT>' in session : <EOL> session . pop ( '<STR_LIT>' , None ) <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> @ auth . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def register ( ) : <EOL> form = RegistrationForm ( ) <EOL> if form . validate_on_submit ( ) : <EOL> user = User ( <EOL> email = form . email . data . lower ( ) . strip ( ) , <EOL> username = form . username . data , <EOL> password = form . password . data ) <EOL> db . session . add ( user ) <EOL> db . session . commit ( ) <EOL> default_notebook = Notebook ( <EOL> title = '<STR_LIT>' , author_id = user . id ) <EOL> db . session . add ( default_notebook ) <EOL> db . session . commit ( ) <EOL> token = user . generate_confirmation_token ( ) <EOL> send_email ( <EOL> user . email , '<STR_LIT>' , <EOL> '<STR_LIT>' , user = user , token = token ) <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> return render_template ( '<STR_LIT>' , form = form ) <EOL> @ auth . route ( '<STR_LIT>' ) <EOL> @ login_required <EOL> def resend_confirmation ( ) : <EOL> token = current_user . generate_confirmation_token ( ) <EOL> send_email ( <EOL> current_user . email , '<STR_LIT>' , <EOL> '<STR_LIT>' , user = current_user , token = token ) <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> @ auth . route ( '<STR_LIT>' ) <EOL> @ login_required <EOL> def confirm ( token ) : <EOL> if current_user . confirmed : <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> if current_user . confirm ( token ) : <EOL> flash ( '<STR_LIT>' ) <EOL> else : <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> @ auth . route ( '<STR_LIT>' ) <EOL> def unconfirmed ( ) : <EOL> if current_user . is_anonymous ( ) or current_user . confirmed : <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> return render_template ( '<STR_LIT>' ) <EOL> @ auth . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> @ login_required <EOL> def change_password ( ) : <EOL> form = ChangePasswordForm ( ) <EOL> if form . validate_on_submit ( ) : <EOL> if current_user . verify_password ( form . old_password . data ) : <EOL> current_user . password = form . password . data <EOL> db . session . add ( current_user ) <EOL> db . session . commit ( ) <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> else : <EOL> flash ( '<STR_LIT>' ) <EOL> return render_template ( "<STR_LIT>" , form = form ) <EOL> @ auth . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def password_reset_request ( ) : <EOL> if not current_user . is_anonymous : <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> form = PasswordResetRequestForm ( ) <EOL> if form . validate_on_submit ( ) : <EOL> user = User . query . filter_by ( email = form . email . data . lower ( ) . strip ( ) ) . first ( ) <EOL> if user : <EOL> token = user . generate_reset_token ( ) <EOL> send_email ( user . email , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> user = user , token = token , <EOL> next = request . args . get ( '<STR_LIT>' ) ) <EOL> flash ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> return render_template ( '<STR_LIT>' , form = form ) <EOL> @ auth . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def password_reset ( token ) : <EOL> if not current_user . is_anonymous : <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> form = PasswordResetForm ( ) <EOL> if form . validate_on_submit ( ) : <EOL> user = User . query . filter_by ( email = form . email . data . lower ( ) . strip ( ) ) . first ( ) <EOL> if user is None : <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> if user . reset_password ( token , form . password . data ) : <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> else : <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> return render_template ( '<STR_LIT>' , form = form ) <EOL> @ auth . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> @ login_required <EOL> def change_email_request ( ) : <EOL> form = ChangeEmailForm ( ) <EOL> if form . validate_on_submit ( ) :
if current_user . verify_password ( form . password . data ) :
8,676,589,363,319,635,000
from flask import render_template , redirect , request , url_for , flash , session , jsonify <EOL> from flask . ext . login import login_user , logout_user , login_required , current_user <EOL> from . import auth <EOL> from app import db <EOL> from . . models import User , Notebook <EOL> from . . email import send_email <EOL> from . forms import LoginForm , RegistrationForm , ChangePasswordForm , PasswordResetRequestForm , PasswordResetForm , ChangeEmailForm , ChangeUserNameForm <EOL> @ auth . before_app_request <EOL> def before_request ( ) : <EOL> if current_user . is_authenticated ( ) and not current_user . confirmed and request . endpoint [ : <NUM_LIT:5> ] != '<STR_LIT>' and request . endpoint != '<STR_LIT>' : <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> @ auth . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def login ( ) : <EOL> form = LoginForm ( ) <EOL> if form . validate_on_submit ( ) : <EOL> user = User . query . filter_by ( email = form . email . data . lower ( ) . strip ( ) ) . first ( ) <EOL> if user is not None and user . verify_password ( form . password . data ) : <EOL> login_user ( user , form . remember_me . data ) <EOL> return redirect ( request . args . get ( '<STR_LIT>' ) or url_for ( '<STR_LIT>' ) ) <EOL> flash ( '<STR_LIT>' ) <EOL> return render_template ( '<STR_LIT>' , form = form ) <EOL> @ auth . route ( '<STR_LIT>' ) <EOL> @ login_required <EOL> def logout ( ) : <EOL> logout_user ( ) <EOL> if '<STR_LIT>' in session : <EOL> session . pop ( '<STR_LIT>' , None ) <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> @ auth . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def register ( ) : <EOL> form = RegistrationForm ( ) <EOL> if form . validate_on_submit ( ) : <EOL> user = User ( <EOL> email = form . email . data . lower ( ) . strip ( ) , <EOL> username = form . username . data , <EOL> password = form . password . data ) <EOL> db . session . add ( user ) <EOL> db . session . commit ( ) <EOL> default_notebook = Notebook ( <EOL> title = '<STR_LIT>' , author_id = user . id ) <EOL> db . session . add ( default_notebook ) <EOL> db . session . commit ( ) <EOL> token = user . generate_confirmation_token ( ) <EOL> send_email ( <EOL> user . email , '<STR_LIT>' , <EOL> '<STR_LIT>' , user = user , token = token ) <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> return render_template ( '<STR_LIT>' , form = form ) <EOL> @ auth . route ( '<STR_LIT>' ) <EOL> @ login_required <EOL> def resend_confirmation ( ) : <EOL> token = current_user . generate_confirmation_token ( ) <EOL> send_email ( <EOL> current_user . email , '<STR_LIT>' , <EOL> '<STR_LIT>' , user = current_user , token = token ) <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> @ auth . route ( '<STR_LIT>' ) <EOL> @ login_required <EOL> def confirm ( token ) : <EOL> if current_user . confirmed : <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> if current_user . confirm ( token ) : <EOL> flash ( '<STR_LIT>' ) <EOL> else : <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> @ auth . route ( '<STR_LIT>' ) <EOL> def unconfirmed ( ) : <EOL> if current_user . is_anonymous ( ) or current_user . confirmed : <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> return render_template ( '<STR_LIT>' ) <EOL> @ auth . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> @ login_required <EOL> def change_password ( ) : <EOL> form = ChangePasswordForm ( ) <EOL> if form . validate_on_submit ( ) : <EOL> if current_user . verify_password ( form . old_password . data ) : <EOL> current_user . password = form . password . data <EOL> db . session . add ( current_user ) <EOL> db . session . commit ( ) <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> else : <EOL> flash ( '<STR_LIT>' ) <EOL> return render_template ( "<STR_LIT>" , form = form ) <EOL> @ auth . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def password_reset_request ( ) : <EOL> if not current_user . is_anonymous : <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> form = PasswordResetRequestForm ( ) <EOL> if form . validate_on_submit ( ) : <EOL> user = User . query . filter_by ( email = form . email . data . lower ( ) . strip ( ) ) . first ( ) <EOL> if user : <EOL> token = user . generate_reset_token ( ) <EOL> send_email ( user . email , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> user = user , token = token , <EOL> next = request . args . get ( '<STR_LIT>' ) ) <EOL> flash ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> return render_template ( '<STR_LIT>' , form = form ) <EOL> @ auth . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> def password_reset ( token ) : <EOL> if not current_user . is_anonymous : <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> form = PasswordResetForm ( ) <EOL> if form . validate_on_submit ( ) : <EOL> user = User . query . filter_by ( email = form . email . data . lower ( ) . strip ( ) ) . first ( ) <EOL> if user is None : <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> if user . reset_password ( token , form . password . data ) : <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> else : <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> return render_template ( '<STR_LIT>' , form = form ) <EOL> @ auth . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> @ login_required <EOL> def change_email_request ( ) : <EOL> form = ChangeEmailForm ( ) <EOL> if form . validate_on_submit ( ) : <EOL> if current_user . verify_password ( form . password . data ) : <EOL> new_email = form . email . data . lower ( ) . strip ( ) <EOL> token = current_user . generate_email_change_token ( new_email ) <EOL> send_email ( new_email , '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> user = current_user , token = token ) <EOL> flash ( '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> else : <EOL> flash ( '<STR_LIT>' ) <EOL> return render_template ( "<STR_LIT>" , form = form ) <EOL> @ auth . route ( '<STR_LIT>' ) <EOL> @ login_required <EOL> def change_email ( token ) : <EOL> if current_user . change_email ( token ) : <EOL> flash ( '<STR_LIT>' ) <EOL> else : <EOL> flash ( '<STR_LIT>' ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> @ auth . route ( '<STR_LIT>' , methods = [ '<STR_LIT:GET>' , '<STR_LIT:POST>' ] ) <EOL> @ login_required <EOL> def change_username ( ) : <EOL> form = ChangeUserNameForm ( ) <EOL> if form . validate_on_submit ( ) : <EOL> user = User . query . filter_by ( id = current_user . id ) . first_or_404 ( ) <EOL> new_username = form . username . data <EOL> user . username = new_username <EOL> db . session . add ( user ) <EOL> db . session . commit ( ) <EOL> flash ( '<STR_LIT>' . format ( new_username ) ) <EOL> return redirect ( url_for ( '<STR_LIT>' ) ) <EOL> return render_template ( "<STR_LIT>" , form = form ) </s>
94,782
from Rerequester import Rerequester <EOL> from urllib import quote <EOL> from threading import Event <EOL> from random import randrange <EOL> from string import lower <EOL> import sys <EOL> import __init__ <EOL> try : <EOL> True <EOL> except : <EOL> True = <NUM_LIT:1> <EOL> False = <NUM_LIT:0> <EOL> DEBUG = True <EOL> def excfunc ( x ) : <EOL> print x <EOL> class T2TConnection : <EOL> def __init__ ( self , myid , tracker , hash , interval , peers , timeout , <EOL> rawserver , disallow , isdisallowed ) : <EOL> self . tracker = tracker <EOL> self . interval = interval <EOL> self . hash = hash <EOL> self . operatinginterval = interval <EOL> self . peers = peers <EOL> self . rawserver = rawserver <EOL> self . disallow = disallow <EOL> self . isdisallowed = isdisallowed <EOL> self . active = True <EOL> self . busy = False <EOL> self . errors = <NUM_LIT:0> <EOL> self . rejected = <NUM_LIT:0> <EOL> self . trackererror = False <EOL> self . peerlists = [ ] <EOL> self . rerequester = Rerequester ( [ [ tracker ] ] , interval , <EOL> rawserver . add_task , lambda : <NUM_LIT:0> , peers , self . addtolist , <EOL> rawserver . add_task , lambda : <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , '<STR_LIT>' , <EOL> myid , hash , timeout , self . errorfunc , excfunc , peers , Event ( ) , <EOL> lambda : <NUM_LIT:0> , lambda : <NUM_LIT:0> ) <EOL> if self . isactive ( ) : <EOL> rawserver . add_task ( self . refresh , randrange ( int ( self . interval / <NUM_LIT:10> ) , self . interval ) ) <EOL> def isactive ( self ) : <EOL> if self . isdisallowed ( self . tracker ) : <EOL> self . deactivate ( ) <EOL> return self . active <EOL> def deactivate ( self ) : <EOL> self . active = False <EOL> def refresh ( self ) : <EOL> if not self . isactive ( ) : <EOL> return <EOL> self . lastsuccessful = True <EOL> self . newpeerdata = [ ] <EOL> if DEBUG : <EOL> print '<STR_LIT>' % ( self . tracker , quote ( self . hash ) ) <EOL> self . rerequester . snoop ( self . peers , self . callback ) <EOL> def callback ( self ) : <EOL> self . busy = False <EOL> if self . lastsuccessful : <EOL> self . errors = <NUM_LIT:0> <EOL> self . rejected = <NUM_LIT:0> <EOL> if self . rerequester . announce_interval > ( <NUM_LIT:3> * self . interval ) : <EOL> self . peers = int ( self . peers * ( self . rerequester . announce_interval / self . interval ) ) <EOL> self . operatinginterval = self . rerequester . announce_interval <EOL> if DEBUG : <EOL> print ( "<STR_LIT>" % <EOL> ( self . tracker , quote ( self . hash ) , len ( self . newpeerdata ) ) ) <EOL> self . peerlists . append ( self . newpeerdata ) <EOL> self . peerlists = self . peerlists [ - <NUM_LIT:10> : ] <EOL> if self . isactive ( ) : <EOL> self . rawserver . add_task ( self . refresh , self . operatinginterval ) <EOL> def addtolist ( self , peers ) : <EOL> for peer in peers : <EOL> self . newpeerdata . append ( ( peer [ <NUM_LIT:1> ] , peer [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , peer [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] ) ) <EOL> def errorfunc ( self , r ) : <EOL> self . lastsuccessful = False <EOL> if DEBUG : <EOL> print "<STR_LIT>" % ( self . tracker , quote ( self . hash ) , r ) <EOL> if r == self . rerequester . rejectedmessage + '<STR_LIT>' : <EOL> if DEBUG : <EOL> print '<STR_LIT>' <EOL> self . deactivate ( ) <EOL> self . disallow ( self . tracker ) <EOL> return <EOL> if lower ( r [ : <NUM_LIT:8> ] ) == '<STR_LIT>' : <EOL> self . rejected += <NUM_LIT:1> <EOL> if self . rejected == <NUM_LIT:3> : <EOL> if DEBUG :
print '<STR_LIT>'
6,322,746,583,788,871,000
from Rerequester import Rerequester <EOL> from urllib import quote <EOL> from threading import Event <EOL> from random import randrange <EOL> from string import lower <EOL> import sys <EOL> import __init__ <EOL> try : <EOL> True <EOL> except : <EOL> True = <NUM_LIT:1> <EOL> False = <NUM_LIT:0> <EOL> DEBUG = True <EOL> def excfunc ( x ) : <EOL> print x <EOL> class T2TConnection : <EOL> def __init__ ( self , myid , tracker , hash , interval , peers , timeout , <EOL> rawserver , disallow , isdisallowed ) : <EOL> self . tracker = tracker <EOL> self . interval = interval <EOL> self . hash = hash <EOL> self . operatinginterval = interval <EOL> self . peers = peers <EOL> self . rawserver = rawserver <EOL> self . disallow = disallow <EOL> self . isdisallowed = isdisallowed <EOL> self . active = True <EOL> self . busy = False <EOL> self . errors = <NUM_LIT:0> <EOL> self . rejected = <NUM_LIT:0> <EOL> self . trackererror = False <EOL> self . peerlists = [ ] <EOL> self . rerequester = Rerequester ( [ [ tracker ] ] , interval , <EOL> rawserver . add_task , lambda : <NUM_LIT:0> , peers , self . addtolist , <EOL> rawserver . add_task , lambda : <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , '<STR_LIT>' , <EOL> myid , hash , timeout , self . errorfunc , excfunc , peers , Event ( ) , <EOL> lambda : <NUM_LIT:0> , lambda : <NUM_LIT:0> ) <EOL> if self . isactive ( ) : <EOL> rawserver . add_task ( self . refresh , randrange ( int ( self . interval / <NUM_LIT:10> ) , self . interval ) ) <EOL> def isactive ( self ) : <EOL> if self . isdisallowed ( self . tracker ) : <EOL> self . deactivate ( ) <EOL> return self . active <EOL> def deactivate ( self ) : <EOL> self . active = False <EOL> def refresh ( self ) : <EOL> if not self . isactive ( ) : <EOL> return <EOL> self . lastsuccessful = True <EOL> self . newpeerdata = [ ] <EOL> if DEBUG : <EOL> print '<STR_LIT>' % ( self . tracker , quote ( self . hash ) ) <EOL> self . rerequester . snoop ( self . peers , self . callback ) <EOL> def callback ( self ) : <EOL> self . busy = False <EOL> if self . lastsuccessful : <EOL> self . errors = <NUM_LIT:0> <EOL> self . rejected = <NUM_LIT:0> <EOL> if self . rerequester . announce_interval > ( <NUM_LIT:3> * self . interval ) : <EOL> self . peers = int ( self . peers * ( self . rerequester . announce_interval / self . interval ) ) <EOL> self . operatinginterval = self . rerequester . announce_interval <EOL> if DEBUG : <EOL> print ( "<STR_LIT>" % <EOL> ( self . tracker , quote ( self . hash ) , len ( self . newpeerdata ) ) ) <EOL> self . peerlists . append ( self . newpeerdata ) <EOL> self . peerlists = self . peerlists [ - <NUM_LIT:10> : ] <EOL> if self . isactive ( ) : <EOL> self . rawserver . add_task ( self . refresh , self . operatinginterval ) <EOL> def addtolist ( self , peers ) : <EOL> for peer in peers : <EOL> self . newpeerdata . append ( ( peer [ <NUM_LIT:1> ] , peer [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] , peer [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] ) ) <EOL> def errorfunc ( self , r ) : <EOL> self . lastsuccessful = False <EOL> if DEBUG : <EOL> print "<STR_LIT>" % ( self . tracker , quote ( self . hash ) , r ) <EOL> if r == self . rerequester . rejectedmessage + '<STR_LIT>' : <EOL> if DEBUG : <EOL> print '<STR_LIT>' <EOL> self . deactivate ( ) <EOL> self . disallow ( self . tracker ) <EOL> return <EOL> if lower ( r [ : <NUM_LIT:8> ] ) == '<STR_LIT>' : <EOL> self . rejected += <NUM_LIT:1> <EOL> if self . rejected == <NUM_LIT:3> : <EOL> if DEBUG : <EOL> print '<STR_LIT>' <EOL> self . deactivate ( ) <EOL> return <EOL> self . errors += <NUM_LIT:1> <EOL> if self . errors >= <NUM_LIT:3> : <EOL> self . operatinginterval += self . interval <EOL> if DEBUG : <EOL> print '<STR_LIT>' + str ( self . operatinginterval ) + '<STR_LIT>' <EOL> def harvest ( self ) : <EOL> x = [ ] <EOL> for list in self . peerlists : <EOL> x += list <EOL> self . peerlists = [ ] <EOL> return x <EOL> class T2TList : <EOL> def __init__ ( self , enabled , trackerid , interval , maxpeers , timeout , rawserver ) : <EOL> self . enabled = enabled <EOL> self . trackerid = trackerid <EOL> self . interval = interval <EOL> self . maxpeers = maxpeers <EOL> self . timeout = timeout <EOL> self . rawserver = rawserver <EOL> self . list = { } <EOL> self . torrents = { } <EOL> self . disallowed = { } <EOL> self . oldtorrents = [ ] <EOL> def parse ( self , allowed_list ) : <EOL> if not self . enabled : <EOL> return <EOL> newlist = { } <EOL> for hash , data in allowed_list . items ( ) : <EOL> if data . has_key ( '<STR_LIT>' ) : <EOL> for tier in data [ '<STR_LIT>' ] : <EOL> for tracker in tier : <EOL> self . disallowed . setdefault ( tracker , False ) <EOL> newlist . setdefault ( tracker , { } ) <EOL> newlist [ tracker ] [ hash ] = None <EOL> for tracker , hashdata in self . list . items ( ) : <EOL> for hash , t2t in hashdata . items ( ) : <EOL> if not newlist . has_key ( tracker ) or not newlist [ tracker ] . has_key ( hash ) : <EOL> t2t . deactivate ( ) <EOL> self . oldtorrents += [ t2t ] <EOL> else : <EOL> newlist [ tracker ] [ hash ] = t2t <EOL> if not newlist . has_key ( tracker ) : <EOL> self . disallowed [ tracker ] = False <EOL> self . list = newlist <EOL> newtorrents = { } <EOL> for tracker , hashdata in newlist . items ( ) : <EOL> for hash , t2t in hashdata . items ( ) : <EOL> if t2t is None : <EOL> hashdata [ hash ] = T2TConnection ( self . trackerid , tracker , hash , <EOL> self . interval , self . maxpeers , self . timeout , <EOL> self . rawserver , self . _disallow , self . _isdisallowed ) <EOL> newtorrents . setdefault ( hash , [ ] ) <EOL> newtorrents [ hash ] += [ hashdata [ hash ] ] <EOL> self . torrents = newtorrents <EOL> def _disallow ( self , tracker ) : <EOL> self . disallowed [ tracker ] = True <EOL> def _isdisallowed ( self , tracker ) : <EOL> return self . disallowed [ tracker ] <EOL> def harvest ( self , hash ) : <EOL> harvest = [ ] <EOL> if self . enabled : <EOL> for t2t in self . torrents [ hash ] : <EOL> harvest += t2t . harvest ( ) <EOL> return harvest </s>
94,783
<s> import threading
7,246,533,104,317,256,000
import threading <EOL> from time import sleep <EOL> class Cron ( object ) : <EOL> def __init__ ( self , website ) : <EOL> self . website = website <EOL> self . conn = None <EOL> self . has_lock = False <EOL> self . exclusive_jobs = [ ] <EOL> def __call__ ( self , period , func , exclusive = False ) : <EOL> if period <= <NUM_LIT:0> : <EOL> return <EOL> if exclusive and not self . has_lock : <EOL> self . exclusive_jobs . append ( ( period , func ) ) <EOL> self . _wait_for_lock ( ) <EOL> return <EOL> def f ( ) : <EOL> while True : <EOL> try : <EOL> func ( ) <EOL> except Exception as e : <EOL> self . website . tell_sentry ( e , { } , allow_reraise = True ) <EOL> sleep ( period ) <EOL> t = threading . Thread ( target = f ) <EOL> t . daemon = True <EOL> t . start ( ) <EOL> def _wait_for_lock ( self ) : <EOL> if self . conn : <EOL> return <EOL> self . conn = self . website . db . get_connection ( ) . __enter__ ( ) <EOL> def f ( ) : <EOL> cursor = self . conn . cursor ( ) <EOL> while True : <EOL> if cursor . one ( "<STR_LIT>" ) : <EOL> self . has_lock = True <EOL> break <EOL> sleep ( <NUM_LIT> ) <EOL> for job in self . exclusive_jobs : <EOL> self ( * job , exclusive = True ) <EOL> t = threading . Thread ( target = f ) <EOL> t . daemon = True <EOL> t . start ( ) </s>
94,784
from __future__ import absolute_import , division , print_function , unicode_literals <EOL> import re <EOL> from . import jinja2_htmlescaped as base <EOL> whitespace_re = re . compile ( r'<STR_LIT>' ) <EOL> class Renderer ( base . Factory . Renderer ) :
def render_content ( self , context ) :
8,569,721,569,368,698,000
from __future__ import absolute_import , division , print_function , unicode_literals <EOL> import re <EOL> from . import jinja2_htmlescaped as base <EOL> whitespace_re = re . compile ( r'<STR_LIT>' ) <EOL> class Renderer ( base . Factory . Renderer ) : <EOL> def render_content ( self , context ) : <EOL> xml = super ( Renderer , self ) . render_content ( context ) <EOL> return whitespace_re . sub ( '<STR_LIT>' , xml ) <EOL> class Factory ( base . Factory ) : <EOL> Renderer = Renderer </s>
94,785
from __future__ import print_function , unicode_literals <EOL> from decimal import Decimal <EOL> from liberapay . testing import Harness <EOL> class Tests ( Harness ) : <EOL> def setUp ( self ) : <EOL> self . alice = self . make_participant ( '<STR_LIT>' )
def change_goal ( self , goal , goal_custom = "<STR_LIT>" , auth_as = "<STR_LIT>" ) :
-2,061,222,223,855,400,200
from __future__ import print_function , unicode_literals <EOL> from decimal import Decimal <EOL> from liberapay . testing import Harness <EOL> class Tests ( Harness ) : <EOL> def setUp ( self ) : <EOL> self . alice = self . make_participant ( '<STR_LIT>' ) <EOL> def change_goal ( self , goal , goal_custom = "<STR_LIT>" , auth_as = "<STR_LIT>" ) : <EOL> return self . client . PxST ( <EOL> "<STR_LIT>" , <EOL> { '<STR_LIT>' : goal , '<STR_LIT>' : goal_custom } , <EOL> auth_as = self . alice if auth_as == '<STR_LIT>' else auth_as <EOL> ) <EOL> def test_changing_to_minus_1_asks_confirmation ( self ) : <EOL> r = self . client . PxST ( '<STR_LIT>' , { '<STR_LIT>' : '<STR_LIT>' } , auth_as = self . alice ) <EOL> assert "<STR_LIT>" in r . text <EOL> def test_wonky_custom_amounts_are_standardized ( self ) : <EOL> self . change_goal ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> alice = self . alice . from_id ( self . alice . id ) <EOL> assert alice . goal == <NUM_LIT> <EOL> def test_anonymous_gets_403 ( self ) : <EOL> response = self . change_goal ( "<STR_LIT>" , auth_as = None ) <EOL> assert response . code == <NUM_LIT> , response . code <EOL> def test_invalid_is_400 ( self ) : <EOL> response = self . change_goal ( "<STR_LIT>" ) <EOL> assert response . code == <NUM_LIT> , response . code <EOL> def test_invalid_custom_amount_is_400 ( self ) : <EOL> response = self . change_goal ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> assert response . code == <NUM_LIT> , response . code <EOL> def test_change_goal ( self ) : <EOL> self . change_goal ( "<STR_LIT>" , "<STR_LIT:100>" ) <EOL> self . change_goal ( "<STR_LIT:0>" ) <EOL> self . change_goal ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> self . change_goal ( "<STR_LIT:null>" , "<STR_LIT>" ) <EOL> self . change_goal ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> actual = self . db . one ( "<STR_LIT>" ) <EOL> assert actual == Decimal ( "<STR_LIT>" ) <EOL> actual = self . db . all ( """<STR_LIT>""" ) <EOL> assert actual == [ '<STR_LIT>' , None , '<STR_LIT>' , '<STR_LIT:0>' , '<STR_LIT:100>' ] <EOL> def test_team_member_can_change_team_goal ( self ) : <EOL> team = self . make_participant ( '<STR_LIT>' , kind = '<STR_LIT>' ) <EOL> team . add_member ( self . alice ) <EOL> r = self . client . PxST ( <EOL> '<STR_LIT>' , <EOL> { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> auth_as = self . alice <EOL> ) <EOL> assert r . code == <NUM_LIT> <EOL> assert team . refetch ( ) . goal == Decimal ( '<STR_LIT>' ) </s>
94,786
import json <EOL> import datetime <EOL> from django . db . models import FieldDoesNotExist <EOL> from django . db . models . fields . related import ManyToManyField <EOL> class EsSerializer ( object ) : <EOL> def serialize ( self , instance ) : <EOL> raise NotImplementedError ( ) <EOL> def deserialize ( self , source ) : <EOL> raise NotImplementedError ( ) <EOL> class EsDbMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> def deserialize ( self , source ) : <EOL> pk_field = self . model . _meta . auto_field <EOL> ids = [ e [ pk_field . name ] for e in source ] <EOL> return self . model . objects . filter ( ** { pk_field . name + '<STR_LIT>' : ids } ) <EOL> class EsJsonToModelMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> def instanciate ( self , attrs ) : <EOL> instance = self . model ( ** attrs ) <EOL> instance . _is_es_deserialized = True <EOL> return instance <EOL> def nested_deserialize ( self , field , source ) : <EOL> if source : <EOL> if hasattr ( field . rel . to , '<STR_LIT>' ) : <EOL> serializer = field . rel . to . es . get_serializer ( ) <EOL> obj = serializer . deserialize ( source ) <EOL> return obj <EOL> elif '<STR_LIT:id>' in source and '<STR_LIT:value>' in source : <EOL> return field . rel . to . objects . get ( pk = source . get ( '<STR_LIT:id>' ) ) <EOL> def deserialize_field ( self , source , field_name ) : <EOL> method_name = '<STR_LIT>' . format ( field_name ) <EOL> if hasattr ( self , method_name ) : <EOL> return getattr ( self , method_name ) ( source , field_name ) <EOL> field = self . model . _meta . get_field ( field_name ) <EOL> field_type_method_name = '<STR_LIT>' . format ( <EOL> field . __class__ . __name__ . lower ( ) ) <EOL> if hasattr ( self , field_type_method_name ) : <EOL> return getattr ( self , field_type_method_name ) ( source , field_name )
val = source . get ( field_name )
8,667,484,079,252,960,000
import json <EOL> import datetime <EOL> from django . db . models import FieldDoesNotExist <EOL> from django . db . models . fields . related import ManyToManyField <EOL> class EsSerializer ( object ) : <EOL> def serialize ( self , instance ) : <EOL> raise NotImplementedError ( ) <EOL> def deserialize ( self , source ) : <EOL> raise NotImplementedError ( ) <EOL> class EsDbMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> def deserialize ( self , source ) : <EOL> pk_field = self . model . _meta . auto_field <EOL> ids = [ e [ pk_field . name ] for e in source ] <EOL> return self . model . objects . filter ( ** { pk_field . name + '<STR_LIT>' : ids } ) <EOL> class EsJsonToModelMixin ( object ) : <EOL> """<STR_LIT>""" <EOL> def instanciate ( self , attrs ) : <EOL> instance = self . model ( ** attrs ) <EOL> instance . _is_es_deserialized = True <EOL> return instance <EOL> def nested_deserialize ( self , field , source ) : <EOL> if source : <EOL> if hasattr ( field . rel . to , '<STR_LIT>' ) : <EOL> serializer = field . rel . to . es . get_serializer ( ) <EOL> obj = serializer . deserialize ( source ) <EOL> return obj <EOL> elif '<STR_LIT:id>' in source and '<STR_LIT:value>' in source : <EOL> return field . rel . to . objects . get ( pk = source . get ( '<STR_LIT:id>' ) ) <EOL> def deserialize_field ( self , source , field_name ) : <EOL> method_name = '<STR_LIT>' . format ( field_name ) <EOL> if hasattr ( self , method_name ) : <EOL> return getattr ( self , method_name ) ( source , field_name ) <EOL> field = self . model . _meta . get_field ( field_name ) <EOL> field_type_method_name = '<STR_LIT>' . format ( <EOL> field . __class__ . __name__ . lower ( ) ) <EOL> if hasattr ( self , field_type_method_name ) : <EOL> return getattr ( self , field_type_method_name ) ( source , field_name ) <EOL> val = source . get ( field_name ) <EOL> typ = field . get_internal_type ( ) <EOL> if val and typ in ( '<STR_LIT>' , '<STR_LIT>' ) : <EOL> return datetime . datetime . strptime ( val , '<STR_LIT>' ) <EOL> if field . rel : <EOL> if isinstance ( field , ManyToManyField ) : <EOL> raise AttributeError <EOL> return self . nested_deserialize ( field , source . get ( field_name ) ) <EOL> return source . get ( field_name ) <EOL> def deserialize ( self , source ) : <EOL> """<STR_LIT>""" <EOL> attrs = { } <EOL> for k , v in source . iteritems ( ) : <EOL> try : <EOL> attrs [ k ] = self . deserialize_field ( source , k ) <EOL> except ( AttributeError , FieldDoesNotExist ) : <EOL> pass <EOL> return self . instanciate ( attrs ) <EOL> class EsModelToJsonMixin ( object ) : <EOL> def __init__ ( self , model , max_depth = <NUM_LIT:2> , cur_depth = <NUM_LIT:1> ) : <EOL> self . model = model <EOL> self . cur_depth = cur_depth <EOL> self . max_depth = max_depth <EOL> def serialize_field ( self , instance , field_name ) : <EOL> method_name = '<STR_LIT>' . format ( field_name ) <EOL> if hasattr ( self , method_name ) : <EOL> return getattr ( self , method_name ) ( instance , field_name ) <EOL> try : <EOL> field = self . model . _meta . get_field ( field_name ) <EOL> except FieldDoesNotExist : <EOL> pass <EOL> else : <EOL> field_type_method_name = '<STR_LIT>' . format ( <EOL> field . __class__ . __name__ . lower ( ) ) <EOL> if hasattr ( self , field_type_method_name ) : <EOL> return getattr ( self , field_type_method_name ) ( instance , field_name ) <EOL> if field . rel : <EOL> if isinstance ( field , ManyToManyField ) : <EOL> return [ self . nested_serialize ( r ) <EOL> for r in getattr ( instance , field . name ) . all ( ) ] <EOL> rel = getattr ( instance , field . name ) <EOL> if rel : <EOL> if self . cur_depth >= self . max_depth : <EOL> return <EOL> return self . nested_serialize ( rel ) <EOL> try : <EOL> return getattr ( instance , field_name ) <EOL> except AttributeError : <EOL> raise AttributeError ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" . format ( field_name , method_name ) ) <EOL> def nested_serialize ( self , rel ) : <EOL> if hasattr ( rel , '<STR_LIT>' ) : <EOL> serializer = rel . es . get_serializer ( max_depth = self . max_depth , <EOL> cur_depth = self . cur_depth + <NUM_LIT:1> ) <EOL> obj = serializer . format ( rel ) <EOL> return obj <EOL> return dict ( id = rel . pk , value = unicode ( rel ) ) <EOL> def format ( self , instance ) : <EOL> fields = self . model . es . get_fields ( ) <EOL> obj = dict ( [ ( field , self . serialize_field ( instance , field ) ) <EOL> for field in fields ] ) <EOL> completion_fields = instance . Elasticsearch . completion_fields <EOL> for field_name in completion_fields or [ ] : <EOL> suggest_name = "<STR_LIT>" . format ( field_name ) <EOL> obj [ suggest_name ] = self . serialize_field ( instance , field_name ) <EOL> return obj <EOL> def serialize ( self , instance ) : <EOL> return json . dumps ( self . format ( instance ) , <EOL> default = lambda d : ( <EOL> d . isoformat ( ) if isinstance ( d , datetime . datetime ) <EOL> or isinstance ( d , datetime . date ) else None ) ) <EOL> class EsJsonSerializer ( EsModelToJsonMixin , EsJsonToModelMixin , EsSerializer ) : <EOL> """<STR_LIT>""" <EOL> pass <EOL> class EsSimpleJsonSerializer ( EsModelToJsonMixin , EsDbMixin , EsSerializer ) : <EOL> pass </s>
94,787
__version__ = "<STR_LIT>" <EOL> def main ( ) :
from lice . core import main
-4,457,169,340,035,310,000
__version__ = "<STR_LIT>" <EOL> def main ( ) : <EOL> from lice . core import main <EOL> main ( ) </s>
94,788
<s> from documents import * </s>
-127,618,251,350,086,620
from documents import * </s>
94,789
<s> from __future__ import with_statement , division , absolute_import </s>
-5,286,168,372,051,726,000
from __future__ import with_statement , division , absolute_import </s>
94,790
<s> from . classes import * </s>
37,619,367,567,635,704
from . classes import * </s>
94,791
import sys , os <EOL> path = os . path . dirname ( os . path . abspath ( __file__ ) ) <EOL> if path not in sys . path : <EOL> sys . path . insert ( <NUM_LIT:0> , path ) <EOL> from uliweb . manage import make_simple_application
application = make_simple_application ( project_dir = path ) </s>
-2,793,708,651,232,110,000
import sys , os <EOL> path = os . path . dirname ( os . path . abspath ( __file__ ) ) <EOL> if path not in sys . path : <EOL> sys . path . insert ( <NUM_LIT:0> , path ) <EOL> from uliweb . manage import make_simple_application <EOL> application = make_simple_application ( project_dir = path ) </s>
94,792
__author__ = '<STR_LIT>' <EOL> __author_email__ = '<STR_LIT>' <EOL> __url__ = '<STR_LIT>' <EOL> __license__ = '<STR_LIT>' <EOL> version = __version__ = '<STR_LIT>' <EOL> import os , sys <EOL> workpath = os . path . dirname ( __file__ ) <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . join ( workpath , '<STR_LIT>' ) ) <EOL> class UliwebError ( Exception ) : pass <EOL> from . core . SimpleFrame import ( Request , Response , redirect , Redirect , error , json , jsonp , <EOL> POST , GET , url_for , expose , get_app_dir , get_apps , function , Finder , decorators , <EOL> functions , response , request , settings , application , NotFound , HTTPException , <EOL> is_in_web , CONTENT_TYPE_JSON , CONTENT_TYPE_TEXT <EOL> ) <EOL> from . core . js import json_dumps <EOL> from . utils . storage import Storage <EOL> from . core . rules import get_endpoint <EOL> class Middleware ( object ) : <EOL> ORDER = <NUM_LIT> <EOL> def __init__ ( self , application , settings ) : <EOL> self . application = application <EOL> self . settings = settings <EOL> def load_ipython_extension ( ipython ) : <EOL> from uliweb . utils . ipython_extension import make_shell_env , patch_ipython
patch_ipython ( )
3,648,083,913,931,485,000
__author__ = '<STR_LIT>' <EOL> __author_email__ = '<STR_LIT>' <EOL> __url__ = '<STR_LIT>' <EOL> __license__ = '<STR_LIT>' <EOL> version = __version__ = '<STR_LIT>' <EOL> import os , sys <EOL> workpath = os . path . dirname ( __file__ ) <EOL> sys . path . insert ( <NUM_LIT:0> , os . path . join ( workpath , '<STR_LIT>' ) ) <EOL> class UliwebError ( Exception ) : pass <EOL> from . core . SimpleFrame import ( Request , Response , redirect , Redirect , error , json , jsonp , <EOL> POST , GET , url_for , expose , get_app_dir , get_apps , function , Finder , decorators , <EOL> functions , response , request , settings , application , NotFound , HTTPException , <EOL> is_in_web , CONTENT_TYPE_JSON , CONTENT_TYPE_TEXT <EOL> ) <EOL> from . core . js import json_dumps <EOL> from . utils . storage import Storage <EOL> from . core . rules import get_endpoint <EOL> class Middleware ( object ) : <EOL> ORDER = <NUM_LIT> <EOL> def __init__ ( self , application , settings ) : <EOL> self . application = application <EOL> self . settings = settings <EOL> def load_ipython_extension ( ipython ) : <EOL> from uliweb . utils . ipython_extension import make_shell_env , patch_ipython <EOL> patch_ipython ( ) <EOL> ipython . push ( make_shell_env ( ) ) </s>
94,793
import os <EOL> from uliweb import functions <EOL> from optparse import make_option <EOL> from uliweb . core . commands import Command , get_input , get_answer <EOL> class MigrateModelConfigCommand ( Command ) : <EOL> name = '<STR_LIT>' <EOL> help = '<STR_LIT>' <EOL> args = '<STR_LIT>' <EOL> check_apps_dirs = True <EOL> check_apps = False <EOL> option_list = ( <EOL> make_option ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , action = '<STR_LIT:store_true>' , help = '<STR_LIT>' ) , <EOL> ) <EOL> def reset_model ( self , model_name , reset ) : <EOL> M = functions . get_model ( model_name , signal = False ) <EOL> engine = M . get_engine ( ) . engine <EOL> if not M . table . exists ( engine ) : <EOL> print "<STR_LIT>" % ( model_name , M . tablename )
M . table . create ( engine )
5,839,761,848,085,545,000
import os <EOL> from uliweb import functions <EOL> from optparse import make_option <EOL> from uliweb . core . commands import Command , get_input , get_answer <EOL> class MigrateModelConfigCommand ( Command ) : <EOL> name = '<STR_LIT>' <EOL> help = '<STR_LIT>' <EOL> args = '<STR_LIT>' <EOL> check_apps_dirs = True <EOL> check_apps = False <EOL> option_list = ( <EOL> make_option ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , action = '<STR_LIT:store_true>' , help = '<STR_LIT>' ) , <EOL> ) <EOL> def reset_model ( self , model_name , reset ) : <EOL> M = functions . get_model ( model_name , signal = False ) <EOL> engine = M . get_engine ( ) . engine <EOL> if not M . table . exists ( engine ) : <EOL> print "<STR_LIT>" % ( model_name , M . tablename ) <EOL> M . table . create ( engine ) <EOL> else : <EOL> if reset : <EOL> print "<STR_LIT>" % ( model_name , M . tablename ) <EOL> M . table . drop ( engine ) <EOL> M . table . create ( engine ) <EOL> else : <EOL> print "<STR_LIT>" % ( model_name , M . tablename ) <EOL> M . migrate ( ) <EOL> def handle ( self , options , global_options , * args ) : <EOL> self . get_application ( global_options ) <EOL> self . reset_model ( '<STR_LIT>' , options . reset ) <EOL> self . reset_model ( '<STR_LIT>' , options . reset ) </s>
94,794
from uliweb import Middleware , settings <EOL> from uliweb . utils . common import import_attr , application_path <EOL> from uliweb . core . SimpleFrame import RedirectException <EOL> from weto . session import Session , SessionCookie <EOL> class SessionMiddle ( Middleware ) : <EOL> def __init__ ( self , application , settings ) : <EOL> from datetime import timedelta <EOL> self . options = dict ( settings . get ( '<STR_LIT>' , { } ) ) <EOL> self . options [ '<STR_LIT>' ] = application_path ( self . options [ '<STR_LIT>' ] ) <EOL> if '<STR_LIT:url>' not in self . options : <EOL> _url = ( settings . get_var ( '<STR_LIT>' , '<STR_LIT>' ) or <EOL> settings . get_var ( '<STR_LIT>' , { } ) . get ( '<STR_LIT:default>' , { } ) . get ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> if _url : <EOL> self . options [ '<STR_LIT:url>' ] = _url <EOL> self . remember_me_timeout = settings . SESSION . remember_me_timeout <EOL> self . session_storage_type = settings . SESSION . type <EOL> self . timeout = settings . SESSION . timeout <EOL> Session . force = settings . SESSION . force <EOL> SessionCookie . default_domain = settings . SESSION_COOKIE . domain <EOL> SessionCookie . default_path = settings . SESSION_COOKIE . path <EOL> SessionCookie . default_secure = settings . SESSION_COOKIE . secure <EOL> SessionCookie . default_cookie_id = settings . SESSION_COOKIE . cookie_id <EOL> if isinstance ( settings . SESSION_COOKIE . timeout , int ) : <EOL> timeout = timedelta ( seconds = settings . SESSION_COOKIE . timeout ) <EOL> else : <EOL> timeout = settings . SESSION_COOKIE . timeout <EOL> SessionCookie . default_expiry_time = timeout <EOL> def process_request ( self , request ) : <EOL> key = request . cookies . get ( SessionCookie . default_cookie_id ) <EOL> if not key : <EOL> key = request . values . get ( SessionCookie . default_cookie_id ) <EOL> serial_cls_path = settings . SESSION . serial_cls <EOL> if serial_cls_path : <EOL> serial_cls = import_attr ( serial_cls_path ) <EOL> else :
serial_cls = None
-2,312,592,884,806,247,400
from uliweb import Middleware , settings <EOL> from uliweb . utils . common import import_attr , application_path <EOL> from uliweb . core . SimpleFrame import RedirectException <EOL> from weto . session import Session , SessionCookie <EOL> class SessionMiddle ( Middleware ) : <EOL> def __init__ ( self , application , settings ) : <EOL> from datetime import timedelta <EOL> self . options = dict ( settings . get ( '<STR_LIT>' , { } ) ) <EOL> self . options [ '<STR_LIT>' ] = application_path ( self . options [ '<STR_LIT>' ] ) <EOL> if '<STR_LIT:url>' not in self . options : <EOL> _url = ( settings . get_var ( '<STR_LIT>' , '<STR_LIT>' ) or <EOL> settings . get_var ( '<STR_LIT>' , { } ) . get ( '<STR_LIT:default>' , { } ) . get ( '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> if _url : <EOL> self . options [ '<STR_LIT:url>' ] = _url <EOL> self . remember_me_timeout = settings . SESSION . remember_me_timeout <EOL> self . session_storage_type = settings . SESSION . type <EOL> self . timeout = settings . SESSION . timeout <EOL> Session . force = settings . SESSION . force <EOL> SessionCookie . default_domain = settings . SESSION_COOKIE . domain <EOL> SessionCookie . default_path = settings . SESSION_COOKIE . path <EOL> SessionCookie . default_secure = settings . SESSION_COOKIE . secure <EOL> SessionCookie . default_cookie_id = settings . SESSION_COOKIE . cookie_id <EOL> if isinstance ( settings . SESSION_COOKIE . timeout , int ) : <EOL> timeout = timedelta ( seconds = settings . SESSION_COOKIE . timeout ) <EOL> else : <EOL> timeout = settings . SESSION_COOKIE . timeout <EOL> SessionCookie . default_expiry_time = timeout <EOL> def process_request ( self , request ) : <EOL> key = request . cookies . get ( SessionCookie . default_cookie_id ) <EOL> if not key : <EOL> key = request . values . get ( SessionCookie . default_cookie_id ) <EOL> serial_cls_path = settings . SESSION . serial_cls <EOL> if serial_cls_path : <EOL> serial_cls = import_attr ( serial_cls_path ) <EOL> else : <EOL> serial_cls = None <EOL> session = Session ( key , storage_type = self . session_storage_type , <EOL> options = self . options , expiry_time = self . timeout , serial_cls = serial_cls ) <EOL> request . session = session <EOL> def process_response ( self , request , response ) : <EOL> session = request . session <EOL> if session . deleted : <EOL> response . delete_cookie ( session . cookie . cookie_id ) <EOL> else : <EOL> cookie_max_age = None <EOL> c = session . cookie <EOL> if session . remember : <EOL> session . set_expiry ( self . remember_me_timeout ) <EOL> cookie_max_age = self . remember_me_timeout <EOL> else : <EOL> cookie_max_age = c . expiry_time <EOL> flag = session . save ( ) <EOL> if flag : <EOL> response . set_cookie ( c . cookie_id , <EOL> session . key , max_age = cookie_max_age , <EOL> expires = None , domain = c . domain , <EOL> path = c . path , secure = c . secure ) <EOL> return response <EOL> def process_exception ( self , request , e ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( e , RedirectException ) : <EOL> response = e . get_response ( ) <EOL> self . process_response ( request , response ) </s>
94,795
from __future__ import with_statement <EOL> from layout import Layout <EOL> from uliweb . core import uaml <EOL> from uliweb . core . html import Tag , begin_tag , end_tag , u_str <EOL> class FormWriter ( uaml . Writer ) : <EOL> field_classes = { <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : '<STR_LIT>' , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : '<STR_LIT>' , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) : '<STR_LIT>' , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) : '<STR_LIT>' , <EOL> } <EOL> def __init__ ( self , form ) : <EOL> self . form = form <EOL> def get_class ( self , f ) : <EOL> name = f . build . __name__ <EOL> _class = '<STR_LIT>' <EOL> for k , v in self . field_classes . items ( ) : <EOL> if name in k : <EOL> _class = v <EOL> break <EOL> return _class <EOL> def get_widget_name ( self , f ) : <EOL> return f . build . __name__ <EOL> def is_hidden ( self , f ) : <EOL> return self . get_widget_name ( f ) == '<STR_LIT>' <EOL> def begin_form ( self , indent , value , ** kwargs ) : <EOL> if kwargs . get ( '<STR_LIT:class>' , None ) : <EOL> self . form . html_attrs [ '<STR_LIT:class>' ] = kwargs [ '<STR_LIT:class>' ] <EOL> return indent * '<STR_LIT:U+0020>' + self . form . form_begin <EOL> def close_form ( self , indent ) : <EOL> return indent * '<STR_LIT:U+0020>' + self . form . form_end <EOL> def begin_buttons ( self , indent , value , ** kwargs ) : <EOL> kwargs [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> return indent * '<STR_LIT:U+0020>' + begin_tag ( '<STR_LIT>' , ** kwargs ) <EOL> def close_buttons ( self , indent ) : <EOL> return indent * '<STR_LIT:U+0020>' + end_tag ( '<STR_LIT>' ) <EOL> def do_button ( self , indent , value , ** kwargs ) : <EOL> v = { '<STR_LIT:value>' : value , '<STR_LIT:type>' : '<STR_LIT>' } <EOL> v . update ( kwargs ) <EOL> return indent * '<STR_LIT:U+0020>' + str ( Tag ( '<STR_LIT:input>' , None , ** v ) ) <EOL> def do_field ( self , indent , value , ** kwargs ) : <EOL> field_name = kwargs [ '<STR_LIT:name>' ]
field = getattr ( self . form , field_name )
-3,106,717,466,618,451,000
from __future__ import with_statement <EOL> from layout import Layout <EOL> from uliweb . core import uaml <EOL> from uliweb . core . html import Tag , begin_tag , end_tag , u_str <EOL> class FormWriter ( uaml . Writer ) : <EOL> field_classes = { <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : '<STR_LIT>' , <EOL> ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) : '<STR_LIT>' , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) : '<STR_LIT>' , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) : '<STR_LIT>' , <EOL> } <EOL> def __init__ ( self , form ) : <EOL> self . form = form <EOL> def get_class ( self , f ) : <EOL> name = f . build . __name__ <EOL> _class = '<STR_LIT>' <EOL> for k , v in self . field_classes . items ( ) : <EOL> if name in k : <EOL> _class = v <EOL> break <EOL> return _class <EOL> def get_widget_name ( self , f ) : <EOL> return f . build . __name__ <EOL> def is_hidden ( self , f ) : <EOL> return self . get_widget_name ( f ) == '<STR_LIT>' <EOL> def begin_form ( self , indent , value , ** kwargs ) : <EOL> if kwargs . get ( '<STR_LIT:class>' , None ) : <EOL> self . form . html_attrs [ '<STR_LIT:class>' ] = kwargs [ '<STR_LIT:class>' ] <EOL> return indent * '<STR_LIT:U+0020>' + self . form . form_begin <EOL> def close_form ( self , indent ) : <EOL> return indent * '<STR_LIT:U+0020>' + self . form . form_end <EOL> def begin_buttons ( self , indent , value , ** kwargs ) : <EOL> kwargs [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> return indent * '<STR_LIT:U+0020>' + begin_tag ( '<STR_LIT>' , ** kwargs ) <EOL> def close_buttons ( self , indent ) : <EOL> return indent * '<STR_LIT:U+0020>' + end_tag ( '<STR_LIT>' ) <EOL> def do_button ( self , indent , value , ** kwargs ) : <EOL> v = { '<STR_LIT:value>' : value , '<STR_LIT:type>' : '<STR_LIT>' } <EOL> v . update ( kwargs ) <EOL> return indent * '<STR_LIT:U+0020>' + str ( Tag ( '<STR_LIT:input>' , None , ** v ) ) <EOL> def do_field ( self , indent , value , ** kwargs ) : <EOL> field_name = kwargs [ '<STR_LIT:name>' ] <EOL> field = getattr ( self . form , field_name ) <EOL> error = field . error <EOL> obj = self . form . fields [ field_name ] <EOL> help_string = kwargs . get ( '<STR_LIT>' , None ) or field . help_string <EOL> if '<STR_LIT:label>' in kwargs : <EOL> label = kwargs [ '<STR_LIT:label>' ] <EOL> else : <EOL> label = obj . label <EOL> if label : <EOL> obj . label = label <EOL> label_text = obj . get_label ( _class = '<STR_LIT>' ) <EOL> else : <EOL> label_text = '<STR_LIT>' <EOL> _class = self . get_class ( obj ) <EOL> if error : <EOL> _class = _class + '<STR_LIT>' <EOL> if self . is_hidden ( obj ) : <EOL> return str ( field ) <EOL> div = Tag ( '<STR_LIT>' , _class = _class ) <EOL> with div : <EOL> if error : <EOL> div . strong ( error , _class = "<STR_LIT:message>" ) <EOL> if self . get_widget_name ( obj ) == '<STR_LIT>' : <EOL> div << '<STR_LIT>' + str ( field ) + label + '<STR_LIT>' <EOL> div << help_string <EOL> else : <EOL> div << label_text <EOL> div << help_string <EOL> div << field <EOL> return indent * '<STR_LIT:U+0020>' + str ( div ) <EOL> def do_td_field ( self , indent , value , ** kwargs ) : <EOL> field_name = kwargs . pop ( '<STR_LIT:name>' , None ) <EOL> field = getattr ( self . form , field_name ) <EOL> obj = self . form . fields [ field_name ] <EOL> if '<STR_LIT:label>' in kwargs : <EOL> label = kwargs . pop ( '<STR_LIT:label>' ) <EOL> else : <EOL> label = obj . label <EOL> if label : <EOL> obj . label = label <EOL> label_text = obj . get_label ( _class = '<STR_LIT>' ) <EOL> else : <EOL> label_text = '<STR_LIT>' <EOL> display = field . data or '<STR_LIT>' <EOL> if '<STR_LIT:width>' not in kwargs : <EOL> kwargs [ '<STR_LIT:width>' ] = <NUM_LIT:200> <EOL> td = begin_tag ( '<STR_LIT>' , ** kwargs ) + u_str ( display ) + end_tag ( '<STR_LIT>' ) <EOL> return indent * '<STR_LIT:U+0020>' + '<STR_LIT>' % ( label_text , td ) <EOL> def do_static ( self , indent , value , ** kwargs ) : <EOL> field_name = kwargs . get ( '<STR_LIT:name>' , None ) <EOL> field = getattr ( self . form , field_name ) <EOL> label = kwargs . get ( '<STR_LIT:label>' , None ) <EOL> obj = self . form . fields [ field_name ] <EOL> if label : <EOL> obj . label = label <EOL> label = obj . get_label ( _class = '<STR_LIT>' ) <EOL> display = field . data or '<STR_LIT>' <EOL> return indent * '<STR_LIT:U+0020>' + '<STR_LIT>' % ( label , u_str ( display ) ) <EOL> class TemplateLayout ( Layout ) : <EOL> def __init__ ( self , form , layout = None , writer = None ) : <EOL> self . form = form <EOL> self . layout = layout <EOL> self . writer = writer or FormWriter ( form ) <EOL> def html ( self ) : <EOL> from uliweb import application <EOL> f = file ( application . get_file ( self . layout , dir = '<STR_LIT>' ) , '<STR_LIT:rb>' ) <EOL> text = f . read ( ) <EOL> f . close ( ) <EOL> return str ( uaml . Parser ( text , self . writer ) ) <EOL> class TemplateFileLayout ( Layout ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , form , layout = None , vars = None ) : <EOL> self . form = form <EOL> self . layout = layout <EOL> self . vars = vars or { } <EOL> self . vars [ '<STR_LIT>' ] = form <EOL> def html ( self ) : <EOL> from uliweb import application <EOL> return application . template ( self . layout , self . vars ) <EOL> class BootstrapFormWriter ( FormWriter ) : <EOL> def begin_form ( self , indent , value , ** kwargs ) : <EOL> if kwargs . get ( '<STR_LIT:class>' , None ) : <EOL> self . form . html_attrs [ '<STR_LIT:class>' ] = kwargs [ '<STR_LIT:class>' ] <EOL> if not self . form . html_attrs [ '<STR_LIT:class>' ] : <EOL> self . form . html_attrs [ '<STR_LIT:class>' ] = '<STR_LIT>' <EOL> return indent * '<STR_LIT:U+0020>' + self . form . form_begin <EOL> def begin_buttons ( self , indent , value , ** kwargs ) : <EOL> kwargs [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> return indent * '<STR_LIT:U+0020>' + begin_tag ( '<STR_LIT>' , ** kwargs ) <EOL> def do_button ( self , indent , value , ** kwargs ) : <EOL> v = { '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT:class>' : '<STR_LIT>' } <EOL> v . update ( kwargs ) <EOL> return indent * '<STR_LIT:U+0020>' + str ( Tag ( '<STR_LIT>' , value , ** v ) ) <EOL> def do_field ( self , indent , value , ** kwargs ) : <EOL> field_name = kwargs . pop ( '<STR_LIT:name>' ) <EOL> field = getattr ( self . form , field_name ) <EOL> error = field . error <EOL> obj = self . form . fields [ field_name ] <EOL> help_string = kwargs . pop ( '<STR_LIT>' , None ) or field . help_string <EOL> if '<STR_LIT:label>' in kwargs : <EOL> label = kwargs . pop ( '<STR_LIT:label>' ) <EOL> else : <EOL> label = obj . label <EOL> if label : <EOL> obj . label = label <EOL> label_text = obj . get_label ( _class = '<STR_LIT>' ) <EOL> else : <EOL> label_text = '<STR_LIT>' <EOL> _class = self . get_class ( obj ) + "<STR_LIT>" <EOL> if label_text == '<STR_LIT>' : <EOL> _class = _class + "<STR_LIT>" <EOL> if error : <EOL> _class = _class + '<STR_LIT>' <EOL> field . field . html_attrs . update ( kwargs ) <EOL> if self . is_hidden ( obj ) : <EOL> return str ( field ) <EOL> div_group = Tag ( '<STR_LIT>' , _class = _class , id = '<STR_LIT>' + obj . id ) <EOL> with div_group : <EOL> if self . get_widget_name ( obj ) == '<STR_LIT>' : <EOL> div_group << "<STR_LIT>" <EOL> else : <EOL> div_group << label_text <EOL> div = Tag ( '<STR_LIT>' , _class = '<STR_LIT>' ) <EOL> with div : <EOL> if self . get_widget_name ( obj ) == '<STR_LIT>' : <EOL> div << '<STR_LIT>' + str ( field ) + label + '<STR_LIT>' <EOL> else : <EOL> div << field <EOL> div << Tag ( '<STR_LIT>' , _class = "<STR_LIT>" , _value = help_string ) <EOL> if error : <EOL> div << Tag ( '<STR_LIT>' , _class = "<STR_LIT>" , _value = error ) <EOL> div_group << str ( div ) <EOL> return indent * '<STR_LIT:U+0020>' + str ( div_group ) <EOL> class BootstrapTemplateLayout ( TemplateLayout ) : <EOL> def __init__ ( self , form , layout = None , writer = None ) : <EOL> self . form = form <EOL> self . layout = layout <EOL> self . writer = BootstrapFormWriter ( form ) </s>
94,796
"""<STR_LIT>""" <EOL> import os <EOL> import hmac <EOL> import hashlib <EOL> import posixpath <EOL> import codecs <EOL> from struct import Struct <EOL> from random import SystemRandom <EOL> from operator import xor <EOL> from itertools import starmap <EOL> from werkzeug . _compat import range_type , PY2 , text_type , izip , to_bytes , string_types , to_native <EOL> SALT_CHARS = '<STR_LIT>' <EOL> DEFAULT_PBKDF2_ITERATIONS = <NUM_LIT:1000> <EOL> _pack_int = Struct ( '<STR_LIT>' ) . pack <EOL> _builtin_safe_str_cmp = getattr ( hmac , '<STR_LIT>' , None ) <EOL> _sys_rng = SystemRandom ( ) <EOL> _os_alt_seps = list ( sep for sep in [ os . path . sep , os . path . altsep ] <EOL> if sep not in ( None , '<STR_LIT:/>' ) ) <EOL> def _find_hashlib_algorithms ( ) : <EOL> algos = getattr ( hashlib , '<STR_LIT>' , None ) <EOL> if algos is None : <EOL> algos = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> rv = { } <EOL> for algo in algos : <EOL> func = getattr ( hashlib , algo , None ) <EOL> if func is not None : <EOL> rv [ algo ] = func <EOL> return rv <EOL> _hash_funcs = _find_hashlib_algorithms ( ) <EOL> def pbkdf2_hex ( data , salt , iterations = DEFAULT_PBKDF2_ITERATIONS , <EOL> keylen = None , hashfunc = None ) : <EOL> """<STR_LIT>""" <EOL> rv = pbkdf2_bin ( data , salt , iterations , keylen , hashfunc ) <EOL> return to_native ( codecs . encode ( rv , '<STR_LIT>' ) ) <EOL> def pbkdf2_bin ( data , salt , iterations = DEFAULT_PBKDF2_ITERATIONS , <EOL> keylen = None , hashfunc = None ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( hashfunc , string_types ) : <EOL> hashfunc = _hash_funcs [ hashfunc ] <EOL> elif not hashfunc : <EOL> hashfunc = hashlib . sha1 <EOL> salt = to_bytes ( salt ) <EOL> mac = hmac . HMAC ( to_bytes ( data ) , None , hashfunc ) <EOL> if not keylen : <EOL> keylen = mac . digest_size <EOL> def _pseudorandom ( x , mac = mac ) : <EOL> h = mac . copy ( ) <EOL> h . update ( x ) <EOL> return bytearray ( h . digest ( ) ) <EOL> buf = bytearray ( ) <EOL> for block in range_type ( <NUM_LIT:1> , - ( - keylen // mac . digest_size ) + <NUM_LIT:1> ) : <EOL> rv = u = _pseudorandom ( salt + _pack_int ( block ) ) <EOL> for i in range_type ( iterations - <NUM_LIT:1> ) : <EOL> u = _pseudorandom ( bytes ( u ) ) <EOL> rv = bytearray ( starmap ( xor , izip ( rv , u ) ) ) <EOL> buf . extend ( rv ) <EOL> return bytes ( buf [ : keylen ] ) <EOL> def safe_str_cmp ( a , b ) : <EOL> """<STR_LIT>""" <EOL> if _builtin_safe_str_cmp is not None : <EOL> return _builtin_safe_str_cmp ( a , b ) <EOL> if len ( a ) != len ( b ) : <EOL> return False <EOL> rv = <NUM_LIT:0> <EOL> if isinstance ( a , bytes ) and isinstance ( b , bytes ) and not PY2 : <EOL> for x , y in izip ( a , b ) : <EOL> rv |= x ^ y <EOL> else : <EOL> for x , y in izip ( a , b ) : <EOL> rv |= ord ( x ) ^ ord ( y ) <EOL> return rv == <NUM_LIT:0> <EOL> def gen_salt ( length ) : <EOL> """<STR_LIT>""" <EOL> if length <= <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return '<STR_LIT>' . join ( _sys_rng . choice ( SALT_CHARS ) for _ in range_type ( length ) )
def _hash_internal ( method , salt , password ) :
7,248,343,448,341,948,000
"""<STR_LIT>""" <EOL> import os <EOL> import hmac <EOL> import hashlib <EOL> import posixpath <EOL> import codecs <EOL> from struct import Struct <EOL> from random import SystemRandom <EOL> from operator import xor <EOL> from itertools import starmap <EOL> from werkzeug . _compat import range_type , PY2 , text_type , izip , to_bytes , string_types , to_native <EOL> SALT_CHARS = '<STR_LIT>' <EOL> DEFAULT_PBKDF2_ITERATIONS = <NUM_LIT:1000> <EOL> _pack_int = Struct ( '<STR_LIT>' ) . pack <EOL> _builtin_safe_str_cmp = getattr ( hmac , '<STR_LIT>' , None ) <EOL> _sys_rng = SystemRandom ( ) <EOL> _os_alt_seps = list ( sep for sep in [ os . path . sep , os . path . altsep ] <EOL> if sep not in ( None , '<STR_LIT:/>' ) ) <EOL> def _find_hashlib_algorithms ( ) : <EOL> algos = getattr ( hashlib , '<STR_LIT>' , None ) <EOL> if algos is None : <EOL> algos = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> rv = { } <EOL> for algo in algos : <EOL> func = getattr ( hashlib , algo , None ) <EOL> if func is not None : <EOL> rv [ algo ] = func <EOL> return rv <EOL> _hash_funcs = _find_hashlib_algorithms ( ) <EOL> def pbkdf2_hex ( data , salt , iterations = DEFAULT_PBKDF2_ITERATIONS , <EOL> keylen = None , hashfunc = None ) : <EOL> """<STR_LIT>""" <EOL> rv = pbkdf2_bin ( data , salt , iterations , keylen , hashfunc ) <EOL> return to_native ( codecs . encode ( rv , '<STR_LIT>' ) ) <EOL> def pbkdf2_bin ( data , salt , iterations = DEFAULT_PBKDF2_ITERATIONS , <EOL> keylen = None , hashfunc = None ) : <EOL> """<STR_LIT>""" <EOL> if isinstance ( hashfunc , string_types ) : <EOL> hashfunc = _hash_funcs [ hashfunc ] <EOL> elif not hashfunc : <EOL> hashfunc = hashlib . sha1 <EOL> salt = to_bytes ( salt ) <EOL> mac = hmac . HMAC ( to_bytes ( data ) , None , hashfunc ) <EOL> if not keylen : <EOL> keylen = mac . digest_size <EOL> def _pseudorandom ( x , mac = mac ) : <EOL> h = mac . copy ( ) <EOL> h . update ( x ) <EOL> return bytearray ( h . digest ( ) ) <EOL> buf = bytearray ( ) <EOL> for block in range_type ( <NUM_LIT:1> , - ( - keylen // mac . digest_size ) + <NUM_LIT:1> ) : <EOL> rv = u = _pseudorandom ( salt + _pack_int ( block ) ) <EOL> for i in range_type ( iterations - <NUM_LIT:1> ) : <EOL> u = _pseudorandom ( bytes ( u ) ) <EOL> rv = bytearray ( starmap ( xor , izip ( rv , u ) ) ) <EOL> buf . extend ( rv ) <EOL> return bytes ( buf [ : keylen ] ) <EOL> def safe_str_cmp ( a , b ) : <EOL> """<STR_LIT>""" <EOL> if _builtin_safe_str_cmp is not None : <EOL> return _builtin_safe_str_cmp ( a , b ) <EOL> if len ( a ) != len ( b ) : <EOL> return False <EOL> rv = <NUM_LIT:0> <EOL> if isinstance ( a , bytes ) and isinstance ( b , bytes ) and not PY2 : <EOL> for x , y in izip ( a , b ) : <EOL> rv |= x ^ y <EOL> else : <EOL> for x , y in izip ( a , b ) : <EOL> rv |= ord ( x ) ^ ord ( y ) <EOL> return rv == <NUM_LIT:0> <EOL> def gen_salt ( length ) : <EOL> """<STR_LIT>""" <EOL> if length <= <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> return '<STR_LIT>' . join ( _sys_rng . choice ( SALT_CHARS ) for _ in range_type ( length ) ) <EOL> def _hash_internal ( method , salt , password ) : <EOL> """<STR_LIT>""" <EOL> if method == '<STR_LIT>' : <EOL> return password , method <EOL> if isinstance ( password , text_type ) : <EOL> password = password . encode ( '<STR_LIT:utf-8>' ) <EOL> if method . startswith ( '<STR_LIT>' ) : <EOL> args = method [ <NUM_LIT:7> : ] . split ( '<STR_LIT::>' ) <EOL> if len ( args ) not in ( <NUM_LIT:1> , <NUM_LIT:2> ) : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> method = args . pop ( <NUM_LIT:0> ) <EOL> iterations = args and int ( args [ <NUM_LIT:0> ] or <NUM_LIT:0> ) or DEFAULT_PBKDF2_ITERATIONS <EOL> is_pbkdf2 = True <EOL> actual_method = '<STR_LIT>' % ( method , iterations ) <EOL> else : <EOL> is_pbkdf2 = False <EOL> actual_method = method <EOL> hash_func = _hash_funcs . get ( method ) <EOL> if hash_func is None : <EOL> raise TypeError ( '<STR_LIT>' % method ) <EOL> if is_pbkdf2 : <EOL> if not salt : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> rv = pbkdf2_hex ( password , salt , iterations , <EOL> hashfunc = hash_func ) <EOL> elif salt : <EOL> if isinstance ( salt , text_type ) : <EOL> salt = salt . encode ( '<STR_LIT:utf-8>' ) <EOL> rv = hmac . HMAC ( salt , password , hash_func ) . hexdigest ( ) <EOL> else : <EOL> h = hash_func ( ) <EOL> h . update ( password ) <EOL> rv = h . hexdigest ( ) <EOL> return rv , actual_method <EOL> def generate_password_hash ( password , method = '<STR_LIT>' , salt_length = <NUM_LIT:8> ) : <EOL> """<STR_LIT>""" <EOL> salt = method != '<STR_LIT>' and gen_salt ( salt_length ) or '<STR_LIT>' <EOL> h , actual_method = _hash_internal ( method , salt , password ) <EOL> return '<STR_LIT>' % ( actual_method , salt , h ) <EOL> def check_password_hash ( pwhash , password ) : <EOL> """<STR_LIT>""" <EOL> if pwhash . count ( '<STR_LIT:$>' ) < <NUM_LIT:2> : <EOL> return False <EOL> method , salt , hashval = pwhash . split ( '<STR_LIT:$>' , <NUM_LIT:2> ) <EOL> return safe_str_cmp ( _hash_internal ( method , salt , password ) [ <NUM_LIT:0> ] , hashval ) <EOL> def safe_join ( directory , filename ) : <EOL> """<STR_LIT>""" <EOL> filename = posixpath . normpath ( filename ) <EOL> for sep in _os_alt_seps : <EOL> if sep in filename : <EOL> return None <EOL> if os . path . isabs ( filename ) or filename . startswith ( '<STR_LIT>' ) : <EOL> return None <EOL> return os . path . join ( directory , filename ) </s>
94,797
import os , sys <EOL> import re <EOL> import logging <EOL> import cPickle <EOL> log = logging <EOL> class _Default ( object ) : pass <EOL> def safe_import ( path ) : <EOL> module = path . split ( '<STR_LIT:.>' ) <EOL> g = __import__ ( module [ <NUM_LIT:0> ] , fromlist = [ '<STR_LIT:*>' ] ) <EOL> s = [ module [ <NUM_LIT:0> ] ] <EOL> for i in module [ <NUM_LIT:1> : ] : <EOL> mod = g <EOL> if hasattr ( mod , i ) : <EOL> g = getattr ( mod , i ) <EOL> else : <EOL> s . append ( i ) <EOL> g = __import__ ( '<STR_LIT:.>' . join ( s ) , fromlist = [ '<STR_LIT:*>' ] ) <EOL> return mod , g <EOL> def import_mod_attr ( path ) : <EOL> """<STR_LIT>""" <EOL> import inspect <EOL> if isinstance ( path , ( str , unicode ) ) : <EOL> v = path . split ( '<STR_LIT::>' ) <EOL> if len ( v ) == <NUM_LIT:1> : <EOL> module , func = path . rsplit ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> else : <EOL> module , func = v <EOL> mod = __import__ ( module , fromlist = [ '<STR_LIT:*>' ] ) <EOL> f = mod <EOL> for x in func . split ( '<STR_LIT:.>' ) : <EOL> try : <EOL> f = getattr ( f , x )
except :
-3,433,169,458,346,215,400
import os , sys <EOL> import re <EOL> import logging <EOL> import cPickle <EOL> log = logging <EOL> class _Default ( object ) : pass <EOL> def safe_import ( path ) : <EOL> module = path . split ( '<STR_LIT:.>' ) <EOL> g = __import__ ( module [ <NUM_LIT:0> ] , fromlist = [ '<STR_LIT:*>' ] ) <EOL> s = [ module [ <NUM_LIT:0> ] ] <EOL> for i in module [ <NUM_LIT:1> : ] : <EOL> mod = g <EOL> if hasattr ( mod , i ) : <EOL> g = getattr ( mod , i ) <EOL> else : <EOL> s . append ( i ) <EOL> g = __import__ ( '<STR_LIT:.>' . join ( s ) , fromlist = [ '<STR_LIT:*>' ] ) <EOL> return mod , g <EOL> def import_mod_attr ( path ) : <EOL> """<STR_LIT>""" <EOL> import inspect <EOL> if isinstance ( path , ( str , unicode ) ) : <EOL> v = path . split ( '<STR_LIT::>' ) <EOL> if len ( v ) == <NUM_LIT:1> : <EOL> module , func = path . rsplit ( '<STR_LIT:.>' , <NUM_LIT:1> ) <EOL> else : <EOL> module , func = v <EOL> mod = __import__ ( module , fromlist = [ '<STR_LIT:*>' ] ) <EOL> f = mod <EOL> for x in func . split ( '<STR_LIT:.>' ) : <EOL> try : <EOL> f = getattr ( f , x ) <EOL> except : <EOL> raise AttributeError ( "<STR_LIT>" % ( x , path ) ) <EOL> else : <EOL> f = path <EOL> mod = inspect . getmodule ( path ) <EOL> return mod , f <EOL> def import_attr ( func ) : <EOL> mod , f = import_mod_attr ( func ) <EOL> return f <EOL> def myimport ( module ) : <EOL> mod = __import__ ( module , fromlist = [ '<STR_LIT:*>' ] ) <EOL> return mod <EOL> def install ( packages ) : <EOL> from pkg_resources import load_entry_point <EOL> load = load_entry_point ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> load ( packages ) <EOL> class MyPkg ( object ) : <EOL> @ staticmethod <EOL> def resource_filename ( module , path ) : <EOL> mod = myimport ( module ) <EOL> p = os . path . dirname ( mod . __file__ ) <EOL> if path : <EOL> return os . path . join ( p , path ) <EOL> else : <EOL> return p <EOL> @ staticmethod <EOL> def resource_listdir ( module , path ) : <EOL> d = MyPkg . resource_filename ( module , path ) <EOL> return os . listdir ( d ) <EOL> @ staticmethod <EOL> def resource_isdir ( module , path ) : <EOL> d = MyPkg . resource_filename ( module , path ) <EOL> return os . path . isdir ( d ) <EOL> try : <EOL> import pkg_resources as pkg <EOL> except : <EOL> pkg = MyPkg <EOL> def extract_file ( module , path , dist , verbose = False , replace = True ) : <EOL> outf = os . path . join ( dist , os . path . basename ( path ) ) <EOL> import shutil <EOL> inf = pkg . resource_filename ( module , path ) <EOL> sfile = os . path . basename ( inf ) <EOL> if os . path . isdir ( dist ) : <EOL> dfile = os . path . join ( dist , sfile ) <EOL> else : <EOL> dfile = dist <EOL> f = os . path . exists ( dfile ) <EOL> if replace or not f : <EOL> shutil . copy2 ( inf , dfile ) <EOL> if verbose : <EOL> print '<STR_LIT>' % ( inf , dfile ) <EOL> def extract_dirs ( mod , path , dst , verbose = False , exclude = None , exclude_ext = None , recursion = True , replace = True ) : <EOL> """<STR_LIT>""" <EOL> default_exclude = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> default_exclude_ext = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> exclude = exclude or [ ] <EOL> exclude_ext = exclude_ext or [ ] <EOL> if not os . path . exists ( dst ) : <EOL> os . makedirs ( dst ) <EOL> if verbose : <EOL> print '<STR_LIT>' % dst <EOL> for r in pkg . resource_listdir ( mod , path ) : <EOL> if r in exclude or r in default_exclude : <EOL> continue <EOL> fpath = os . path . join ( path , r ) <EOL> if pkg . resource_isdir ( mod , fpath ) : <EOL> if recursion : <EOL> extract_dirs ( mod , fpath , os . path . join ( dst , r ) , verbose , exclude , exclude_ext , recursion , replace ) <EOL> else : <EOL> ext = os . path . splitext ( fpath ) [ <NUM_LIT:1> ] <EOL> if ext in exclude_ext or ext in default_exclude_ext : <EOL> continue <EOL> extract_file ( mod , fpath , dst , verbose , replace ) <EOL> def match ( f , patterns ) : <EOL> from fnmatch import fnmatch <EOL> flag = False <EOL> for x in patterns : <EOL> if fnmatch ( f , x ) : <EOL> return True <EOL> def walk_dirs ( path , include = None , include_ext = None , exclude = None , <EOL> exclude_ext = None , recursion = True , file_only = False , use_default_pattern = True ) : <EOL> """<STR_LIT>""" <EOL> default_exclude = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> default_exclude_ext = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> exclude = exclude or [ ] <EOL> exclude_ext = exclude_ext or [ ] <EOL> include_ext = include_ext or [ ] <EOL> include = include or [ ] <EOL> if not os . path . exists ( path ) : <EOL> raise StopIteration <EOL> for r in os . listdir ( path ) : <EOL> if match ( r , exclude ) or ( use_default_pattern and r in default_exclude ) : <EOL> continue <EOL> if include and r not in include : <EOL> continue <EOL> fpath = os . path . join ( path , r ) <EOL> if os . path . isdir ( fpath ) : <EOL> if not file_only : <EOL> yield os . path . normpath ( fpath ) . replace ( '<STR_LIT:\\>' , '<STR_LIT:/>' ) <EOL> if recursion : <EOL> for f in walk_dirs ( fpath , include , include_ext , exclude , <EOL> exclude_ext , recursion , file_only ) : <EOL> yield os . path . normpath ( f ) . replace ( '<STR_LIT:\\>' , '<STR_LIT:/>' ) <EOL> else : <EOL> ext = os . path . splitext ( fpath ) [ <NUM_LIT:1> ] <EOL> if ext in exclude_ext or ( use_default_pattern and ext in default_exclude_ext ) : <EOL> continue <EOL> if include_ext and ext not in include_ext : <EOL> continue <EOL> yield os . path . normpath ( fpath ) . replace ( '<STR_LIT:\\>' , '<STR_LIT:/>' ) <EOL> def copy_dir ( src , dst , verbose = False , check = False , processor = None ) : <EOL> import shutil <EOL> def _md5 ( filename ) : <EOL> try : <EOL> import hashlib <EOL> a = hashlib . md5 ( ) <EOL> except ImportError : <EOL> import md5 <EOL> a = md5 . new ( ) <EOL> a . update ( file ( filename , '<STR_LIT:rb>' ) . read ( ) ) <EOL> return a . digest ( ) <EOL> if not os . path . exists ( dst ) : <EOL> os . makedirs ( dst ) <EOL> if verbose : <EOL> print "<STR_LIT>" % src <EOL> for r in os . listdir ( src ) : <EOL> if r in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> continue <EOL> fpath = os . path . join ( src , r ) <EOL> if os . path . isdir ( fpath ) : <EOL> if os . path . abspath ( fpath ) != os . path . abspath ( dst ) : <EOL> copy_dir ( fpath , os . path . join ( dst , r ) , verbose , check , processor ) <EOL> else : <EOL> continue <EOL> else : <EOL> ext = os . path . splitext ( fpath ) [ <NUM_LIT:1> ] <EOL> if ext in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> continue <EOL> df = os . path . join ( dst , r ) <EOL> if check : <EOL> if os . path . exists ( df ) : <EOL> a = _md5 ( fpath ) <EOL> b = _md5 ( df ) <EOL> if a != b : <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( fpath , dst ) ) <EOL> else : <EOL> if processor : <EOL> if processor ( fpath , dst , df ) : <EOL> continue <EOL> shutil . copy2 ( fpath , dst ) <EOL> if verbose : <EOL> print "<STR_LIT>" % ( fpath , dst ) <EOL> else : <EOL> if processor : <EOL> if processor ( fpath , dst , df ) : <EOL> continue <EOL> shutil . copy2 ( fpath , dst ) <EOL> if verbose : <EOL> print "<STR_LIT>" % ( fpath , dst ) <EOL> def copy_dir_with_check ( dirs , dst , verbose = False , check = True , processor = None ) : <EOL> for d in dirs : <EOL> if not os . path . exists ( d ) : <EOL> continue <EOL> copy_dir ( d , dst , verbose , check , processor ) <EOL> def check_apps_dir ( apps_dir ) : <EOL> log = logging <EOL> if not os . path . exists ( apps_dir ) : <EOL> print >> sys . stderr , "<STR_LIT>" % apps_dir <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> def is_pyfile_exist ( dir , pymodule ) : <EOL> path = os . path . join ( dir , '<STR_LIT>' % pymodule ) <EOL> if not os . path . exists ( path ) : <EOL> path = os . path . join ( dir , '<STR_LIT>' % pymodule ) <EOL> if not os . path . exists ( path ) : <EOL> path = os . path . join ( dir , '<STR_LIT>' % pymodule ) <EOL> if not os . path . exists ( path ) : <EOL> return False <EOL> return True <EOL> def wraps ( src ) : <EOL> def _f ( des ) : <EOL> def f ( * args , ** kwargs ) : <EOL> from uliweb import application <EOL> if application : <EOL> env = application . get_view_env ( ) <EOL> for k , v in env . iteritems ( ) : <EOL> src . func_globals [ k ] = v <EOL> src . func_globals [ '<STR_LIT>' ] = env <EOL> return des ( * args , ** kwargs ) <EOL> f . __name__ = src . __name__ <EOL> f . func_globals . update ( src . func_globals ) <EOL> f . __doc__ = src . __doc__ <EOL> f . __module__ = src . __module__ <EOL> f . __dict__ . update ( src . __dict__ ) <EOL> return f <EOL> return _f <EOL> def timeit ( func ) : <EOL> log = logging . getLogger ( '<STR_LIT>' ) <EOL> import time <EOL> @ wraps ( func ) <EOL> def f ( * args , ** kwargs ) : <EOL> begin = time . time ( ) <EOL> ret = func ( * args , ** kwargs ) <EOL> end = time . time ( ) <EOL> print ( "<STR_LIT>" % ( func . __module__ , func . __name__ , end - begin ) ) <EOL> return ret <EOL> return f <EOL> def safe_unicode ( s , encoding = '<STR_LIT:utf-8>' ) : <EOL> from uliweb . i18n . lazystr import LazyString <EOL> if isinstance ( s , unicode ) : <EOL> return s <EOL> elif isinstance ( s , LazyString ) : <EOL> return unicode ( s ) <EOL> else : <EOL> return unicode ( str ( s ) , encoding ) <EOL> def safe_str ( s , encoding = '<STR_LIT:utf-8>' ) : <EOL> from uliweb . i18n . lazystr import LazyString <EOL> if isinstance ( s , unicode ) : <EOL> return s . encode ( encoding ) <EOL> elif isinstance ( s , LazyString ) : <EOL> return unicode ( s ) . encode ( encoding ) <EOL> else : <EOL> return str ( s ) <EOL> def get_var ( key ) : <EOL> def f ( ) : <EOL> from uliweb import settings <EOL> return settings . get_var ( key ) <EOL> return f <EOL> def get_choice ( choices , value , default = None ) : <EOL> if callable ( choices ) : <EOL> choices = choices ( ) <EOL> return dict ( choices ) . get ( value , default ) <EOL> def simple_value ( v , encoding = '<STR_LIT:utf-8>' , none = False ) : <EOL> import datetime <EOL> import decimal <EOL> if callable ( v ) : <EOL> v = v ( ) <EOL> if isinstance ( v , datetime . datetime ) : <EOL> return v . strftime ( '<STR_LIT>' ) <EOL> elif isinstance ( v , datetime . date ) : <EOL> return v . strftime ( '<STR_LIT>' ) <EOL> elif isinstance ( v , datetime . time ) : <EOL> return v . strftime ( '<STR_LIT>' ) <EOL> elif isinstance ( v , decimal . Decimal ) : <EOL> return str ( v ) <EOL> elif isinstance ( v , unicode ) : <EOL> return v . encode ( encoding ) <EOL> elif isinstance ( v , ( tuple , list ) ) : <EOL> s = [ ] <EOL> for x in v : <EOL> s . append ( simple_value ( x , encoding , none ) ) <EOL> return s <EOL> elif isinstance ( v , dict ) : <EOL> d = { } <EOL> for k , v in v . iteritems ( ) : <EOL> d [ simple_value ( k ) ] = simple_value ( v , encoding , none ) <EOL> return d <EOL> elif v is None : <EOL> if none : <EOL> return v <EOL> else : <EOL> return '<STR_LIT>' <EOL> else : <EOL> return v <EOL> re_newline = re . compile ( r'<STR_LIT>' ) <EOL> def str_value ( v , encoding = '<STR_LIT:utf-8>' , bool_int = True , none = '<STR_LIT>' , newline_escape = False ) : <EOL> import datetime <EOL> import decimal <EOL> if callable ( v ) : <EOL> v = v ( ) <EOL> if isinstance ( v , datetime . datetime ) : <EOL> return v . strftime ( '<STR_LIT>' ) <EOL> elif isinstance ( v , datetime . date ) : <EOL> return v . strftime ( '<STR_LIT>' ) <EOL> elif isinstance ( v , datetime . time ) : <EOL> return v . strftime ( '<STR_LIT>' ) <EOL> elif isinstance ( v , decimal . Decimal ) : <EOL> return str ( v ) <EOL> elif isinstance ( v , ( str , unicode ) ) : <EOL> if isinstance ( v , unicode ) : <EOL> v = v . encode ( encoding ) <EOL> if newline_escape : <EOL> v = re_newline . sub ( r'<STR_LIT>' , v ) <EOL> return v <EOL> elif v is None : <EOL> return none <EOL> elif isinstance ( v , bool ) : <EOL> if bool_int : <EOL> if v : <EOL> return '<STR_LIT:1>' <EOL> else : <EOL> return '<STR_LIT:0>' <EOL> else : <EOL> return str ( v ) <EOL> else : <EOL> return str ( v ) <EOL> def norm_path ( path ) : <EOL> return os . path . normpath ( os . path . abspath ( path ) ) <EOL> r_expand_path = re . compile ( '<STR_LIT>' ) <EOL> def expand_path ( path ) : <EOL> """<STR_LIT>""" <EOL> from uliweb import application <EOL> def replace ( m ) : <EOL> txt = m . groups ( ) [ <NUM_LIT:0> ] <EOL> if txt == '<STR_LIT>' : <EOL> return application . apps_dir <EOL> else : <EOL> return pkg . resource_filename ( txt , '<STR_LIT>' ) <EOL> p = re . sub ( r_expand_path , replace , path ) <EOL> return os . path . expandvars ( os . path . expanduser ( path ) ) <EOL> def date_in ( d , dates ) : <EOL> """<STR_LIT>""" <EOL> if not d : <EOL> return False <EOL> return dates [ <NUM_LIT:0> ] <= d <= dates [ <NUM_LIT:1> ] <EOL> class Serial ( object ) : <EOL> """<STR_LIT>""" <EOL> @ classmethod <EOL> def load ( cls , s , protocal = None ) : <EOL> import json <EOL> if not protocal : <EOL> return cPickle . loads ( s ) <EOL> elif protocal == '<STR_LIT>' : <EOL> return json . loads ( s ) <EOL> else : <EOL> raise Exception ( "<STR_LIT>" % protocal ) <EOL> @ classmethod <EOL> def dump ( cls , v , protocal = None ) : <EOL> from uliweb import json_dumps <EOL> if not protocal : <EOL> return cPickle . dumps ( v , cPickle . HIGHEST_PROTOCOL ) <EOL> elif protocal == '<STR_LIT>' : <EOL> return json_dumps ( v ) <EOL> else : <EOL> raise Exception ( "<STR_LIT>" % protocal ) <EOL> import urlparse <EOL> class QueryString ( object ) : <EOL> def __init__ ( self , url ) : <EOL> self . url = str ( url ) <EOL> self . scheme , self . netloc , self . script_root , qs , self . anchor = self . parse ( ) <EOL> self . qs = urlparse . parse_qs ( qs , True ) <EOL> def parse ( self ) : <EOL> return urlparse . urlsplit ( self . url ) <EOL> def __getitem__ ( self , name ) : <EOL> return self . qs . get ( name , [ ] ) <EOL> def __setitem__ ( self , name , value ) : <EOL> self . qs [ name ] = [ value ] <EOL> def set ( self , name , value , replace = False ) : <EOL> v = self . qs . setdefault ( name , [ ] ) <EOL> if replace : <EOL> self . qs [ name ] = [ value ] <EOL> else : <EOL> v . append ( value ) <EOL> return self <EOL> def __str__ ( self ) : <EOL> import urllib <EOL> qs = urllib . urlencode ( self . qs , True ) <EOL> return urlparse . urlunsplit ( ( self . scheme , self . netloc , self . script_root , qs , self . anchor ) ) <EOL> def query_string ( url , replace = True , ** kwargs ) : <EOL> q = QueryString ( url ) <EOL> for k , v in kwargs . items ( ) : <EOL> q . set ( k , v , replace ) <EOL> return str ( q ) <EOL> def camel_to_ ( s ) : <EOL> """<STR_LIT>""" <EOL> s1 = re . sub ( '<STR_LIT>' , r'<STR_LIT>' , s ) <EOL> return re . sub ( '<STR_LIT>' , r'<STR_LIT>' , s1 ) . lower ( ) <EOL> def application_path ( path ) : <EOL> """<STR_LIT>""" <EOL> from uliweb import application <EOL> return os . path . join ( application . project_dir , path ) <EOL> def get_uuid ( type = <NUM_LIT:4> ) : <EOL> """<STR_LIT>""" <EOL> import uuid <EOL> name = '<STR_LIT>' + str ( type ) <EOL> u = getattr ( uuid , name ) <EOL> return u ( ) . hex <EOL> def pretty_dict ( d , leading = '<STR_LIT:U+0020>' , newline = '<STR_LIT:\n>' , indent = <NUM_LIT:0> , tabstop = <NUM_LIT:4> , process = None ) : <EOL> """<STR_LIT>""" <EOL> for k , v in d . items ( ) : <EOL> if process : <EOL> k , v = process ( k , v ) <EOL> if isinstance ( v , dict ) : <EOL> yield '<STR_LIT>' % ( indent * tabstop * leading , simple_value ( k ) , newline ) <EOL> for x in pretty_dict ( v , leading = leading , newline = newline , indent = indent + <NUM_LIT:1> , tabstop = tabstop ) : <EOL> yield x <EOL> continue <EOL> yield '<STR_LIT>' % ( indent * tabstop * leading , simple_value ( k ) , simple_value ( v ) , newline ) <EOL> def request_url ( req = None ) : <EOL> """<STR_LIT>""" <EOL> from uliweb import request <EOL> r = req or request <EOL> if request : <EOL> if r . query_string : <EOL> return r . path + '<STR_LIT:?>' + r . query_string <EOL> else : <EOL> return r . path <EOL> else : <EOL> return '<STR_LIT>' <EOL> def flat_list ( * alist ) : <EOL> """<STR_LIT>""" <EOL> a = [ ] <EOL> for x in alist : <EOL> if x is None : <EOL> continue <EOL> if isinstance ( x , ( tuple , list ) ) : <EOL> a . extend ( [ i for i in x if i is not None ] ) <EOL> else : <EOL> a . append ( x ) <EOL> return a <EOL> def compare_dict ( da , db ) : <EOL> """<STR_LIT>""" <EOL> sa = set ( da . items ( ) ) <EOL> sb = set ( db . items ( ) ) <EOL> diff = sa & sb <EOL> return dict ( sa - diff ) , dict ( sb - diff ) <EOL> def get_caller ( skip = None ) : <EOL> """<STR_LIT>""" <EOL> import inspect <EOL> from fnmatch import fnmatch <EOL> try : <EOL> stack = inspect . stack ( ) <EOL> except : <EOL> stack = [ None , inspect . currentframe ( ) ] <EOL> if len ( stack ) > <NUM_LIT:1> : <EOL> stack . pop ( <NUM_LIT:0> ) <EOL> if skip and not isinstance ( skip , ( list , tuple ) ) : <EOL> skip = [ skip ] <EOL> else : <EOL> skip = [ ] <EOL> ptn = [ os . path . splitext ( s . replace ( '<STR_LIT:\\>' , '<STR_LIT:/>' ) ) [ <NUM_LIT:0> ] for s in skip ] <EOL> for frame in stack : <EOL> if isinstance ( frame , tuple ) : <EOL> filename , funcname , lineno = frame [ <NUM_LIT:1> ] , frame [ <NUM_LIT:3> ] , frame [ <NUM_LIT:2> ] <EOL> else : <EOL> filename , funcname , lineno = frame . f_code . co_filename , frame . f_code . co_name , frame . f_lineno <EOL> del frame <EOL> found = False <EOL> for k in ptn : <EOL> filename = os . path . splitext ( filename . replace ( '<STR_LIT:\\>' , '<STR_LIT:/>' ) ) [ <NUM_LIT:0> ] <EOL> if fnmatch ( filename , k ) : <EOL> found = True <EOL> break <EOL> if not found : <EOL> return filename , lineno , funcname <EOL> class classonlymethod ( classmethod ) : <EOL> """<STR_LIT>""" <EOL> def __get__ ( self , instance , owner ) : <EOL> if instance is not None : <EOL> raise AttributeError ( "<STR_LIT>" ) <EOL> return super ( classonlymethod , self ) . __get__ ( instance , owner ) <EOL> def trim_path ( path , length = <NUM_LIT:30> ) : <EOL> """<STR_LIT>""" <EOL> s = path . replace ( '<STR_LIT:\\>' , '<STR_LIT:/>' ) . split ( '<STR_LIT:/>' ) <EOL> t = - <NUM_LIT:1> <EOL> for i in range ( len ( s ) - <NUM_LIT:1> , - <NUM_LIT:1> , - <NUM_LIT:1> ) : <EOL> t = len ( s [ i ] ) + t + <NUM_LIT:1> <EOL> if t > length - <NUM_LIT:4> : <EOL> break <EOL> return '<STR_LIT>' + '<STR_LIT:/>' . join ( s [ i + <NUM_LIT:1> : ] ) <EOL> class cached_property ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , func ) : <EOL> self . value = _Default <EOL> self . func = func <EOL> def __get__ ( self , obj , type = None ) : <EOL> value = self . value <EOL> if self . value is _Default : <EOL> value = self . func ( type ) <EOL> self . value = value <EOL> return value <EOL> def get_temppath ( prefix , suffix = '<STR_LIT>' , dir = '<STR_LIT>' ) : <EOL> import tempfile <EOL> return tempfile . mkdtemp ( suffix = suffix , prefix = prefix , dir = dir ) <EOL> def get_tempfilename2 ( prefix , suffix = '<STR_LIT>' , dir = '<STR_LIT>' ) : <EOL> import tempfile <EOL> return tempfile . mkstemp ( suffix = suffix , prefix = prefix , dir = dir ) <EOL> def get_tempfilename ( prefix , suffix = '<STR_LIT>' , dir = '<STR_LIT>' ) : <EOL> return get_tempfilename2 ( prefix , suffix , dir ) [ <NUM_LIT:1> ] <EOL> def get_configrable_object ( key , section , cls = None ) : <EOL> """<STR_LIT>""" <EOL> from uliweb import UliwebError , settings <EOL> import inspect <EOL> if inspect . isclass ( key ) and cls and issubclass ( key , cls ) : <EOL> return key <EOL> elif isinstance ( key , ( str , unicode ) ) : <EOL> path = settings [ section ] . get ( key ) <EOL> if path : <EOL> _cls = import_attr ( path ) <EOL> return _cls <EOL> else : <EOL> raise UliwebError ( "<STR_LIT>" % section ) <EOL> else : <EOL> raise UliwebError ( "<STR_LIT>" % ( key , cls ) ) <EOL> def format_size ( size ) : <EOL> """<STR_LIT>""" <EOL> units = [ '<STR_LIT:B>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> unit = '<STR_LIT>' <EOL> n = size <EOL> old_n = n <EOL> value = size <EOL> for i in units : <EOL> old_n = n <EOL> x , y = divmod ( n , <NUM_LIT> ) <EOL> if x == <NUM_LIT:0> : <EOL> unit = i <EOL> value = y <EOL> break <EOL> n = x <EOL> unit = i <EOL> value = old_n <EOL> return str ( value ) + unit <EOL> def convert_bytes ( n ) : <EOL> """<STR_LIT>""" <EOL> symbols = ( '<STR_LIT>' , '<STR_LIT:M>' , '<STR_LIT>' , '<STR_LIT:T>' , '<STR_LIT:P>' , '<STR_LIT:E>' , '<STR_LIT>' , '<STR_LIT:Y>' ) <EOL> prefix = { } <EOL> for i , s in enumerate ( symbols ) : <EOL> prefix [ s ] = <NUM_LIT:1> << ( i + <NUM_LIT:1> ) * <NUM_LIT:10> <EOL> for s in reversed ( symbols ) : <EOL> if n >= prefix [ s ] : <EOL> value = float ( n ) / prefix [ s ] <EOL> return '<STR_LIT>' % ( value , s ) <EOL> return "<STR_LIT>" % n </s>
94,798
VERSION = ( <NUM_LIT:0> , <NUM_LIT:5> , <NUM_LIT:0> , "<STR_LIT:a>" , <NUM_LIT:1> ) <EOL> DEV_N = <NUM_LIT:1> <EOL> def get_version ( short = False ) : <EOL> version = "<STR_LIT>" % ( VERSION [ <NUM_LIT:0> ] , VERSION [ <NUM_LIT:1> ] ) <EOL> if short : <EOL> return version <EOL> if VERSION [ <NUM_LIT:2> ] :
version = "<STR_LIT>" % ( version , VERSION [ <NUM_LIT:2> ] )
6,698,038,780,196,176,000
VERSION = ( <NUM_LIT:0> , <NUM_LIT:5> , <NUM_LIT:0> , "<STR_LIT:a>" , <NUM_LIT:1> ) <EOL> DEV_N = <NUM_LIT:1> <EOL> def get_version ( short = False ) : <EOL> version = "<STR_LIT>" % ( VERSION [ <NUM_LIT:0> ] , VERSION [ <NUM_LIT:1> ] ) <EOL> if short : <EOL> return version <EOL> if VERSION [ <NUM_LIT:2> ] : <EOL> version = "<STR_LIT>" % ( version , VERSION [ <NUM_LIT:2> ] ) <EOL> if VERSION [ <NUM_LIT:3> ] != "<STR_LIT:f>" : <EOL> version = "<STR_LIT>" % ( version , VERSION [ <NUM_LIT:3> ] , VERSION [ <NUM_LIT:4> ] ) <EOL> if DEV_N : <EOL> version = "<STR_LIT>" % ( version , DEV_N ) <EOL> return version <EOL> __version__ = get_version ( ) </s>
94,799
from setuptools import setup , find_packages <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = __import__ ( '<STR_LIT>' ) . __version__ , <EOL> description = '<STR_LIT>' , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> download_url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> packages = find_packages ( exclude = [ '<STR_LIT>' ] ) ,
include_package_data = True ,
2,950,377,968,105,358,300
from setuptools import setup , find_packages <EOL> setup ( <EOL> name = '<STR_LIT>' , <EOL> version = __import__ ( '<STR_LIT>' ) . __version__ , <EOL> description = '<STR_LIT>' , <EOL> long_description = open ( '<STR_LIT>' ) . read ( ) , <EOL> author = '<STR_LIT>' , <EOL> author_email = '<STR_LIT>' , <EOL> url = '<STR_LIT>' , <EOL> download_url = '<STR_LIT>' , <EOL> license = '<STR_LIT>' , <EOL> packages = find_packages ( exclude = [ '<STR_LIT>' ] ) , <EOL> include_package_data = True , <EOL> zip_safe = False , <EOL> install_requires = [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> classifiers = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] , <EOL> ) </s>