code
stringlengths
9
256k
<s> import os <EOL> import shutil <EOL> import sys <EOL> import argparse <EOL> import time <EOL> import re <EOL> import random <EOL> from decimal import Decimal <EOL> from operator import itemgetter <EOL> from PIL import Image , ImageDraw , ImageFont <EOL> sys . path . append ( os . environ . get ( '<STR_LIT>' ) ) <EOL> CAFFE_HOME = os . environ . get ( "<STR_LIT>" ) <EOL> sys . path . append ( CAFFE_HOME ) <EOL> os . environ [ '<STR_LIT>' ] = '<STR_LIT:1>' <EOL> from selective_search import * <EOL> import features <EOL> from skimage . transform import resize <EOL> import caffe <EOL> import numpy as np <EOL> import simplejson as json <EOL> def parse_command_line ( ) : <EOL> parser = argparse . ArgumentParser ( <EOL> description = """<STR_LIT>""" ) <EOL> parser . add_argument ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> default = "<STR_LIT>" <EOL> ) <EOL> parser . add_argument ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> default = ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:3> ) <EOL> ) <EOL> parser . add_argument ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> type = int , <EOL> help = "<STR_LIT>" , <EOL> default = <NUM_LIT:16> <EOL> ) <EOL> parser . add_argument ( <EOL> "<STR_LIT:-c>" , <EOL> "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> default = "<STR_LIT>" <EOL> ) <EOL> parser . add_argument ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> default = "<STR_LIT>" <EOL> ) <EOL> parser . add_argument ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> default = "<STR_LIT>" <EOL> ) <EOL> parser . add_argument ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> default = "<STR_LIT>" <EOL> ) <EOL> parser . add_argument ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> type = int , <EOL> default = <NUM_LIT:3> <EOL> ) <EOL> parser . add_argument ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> type = float , <EOL> default = <NUM_LIT> <EOL> ) <EOL> parser . add_argument ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> default = True <EOL> ) <EOL> parser . add_argument ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> help = "<STR_LIT>" , <EOL> type = int , <EOL> default = <NUM_LIT:100> <EOL> ) <EOL> parser . add_argument ( <EOL> "<STR_LIT>" , <EOL> help = """<STR_LIT>""" , <EOL> type = int , <EOL> default = None <EOL> ) <EOL> args = parser . parse_args ( ) <EOL> if os . environ . get ( "<STR_LIT>" ) == None : <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> exit ( <NUM_LIT:1> ) <EOL> if os . environ . get ( "<STR_LIT>" ) == None : <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> exit ( <NUM_LIT:1> ) <EOL> return args <EOL> def gen_regions ( image , dims , pad , ks ) : <EOL> """<STR_LIT>""" <EOL> print "<STR_LIT>" <EOL> assert ( len ( dims ) == <NUM_LIT:3> ) <EOL> regions = selective_search ( image , ks = [ ks ] , feature_masks = [ features . SimilarityMask ( <EOL> size = <NUM_LIT:1> , <EOL> color = <NUM_LIT:1> , <EOL> texture = <NUM_LIT:1> , <EOL> fill = <NUM_LIT:1> , <EOL> ) ] ) <EOL> crops = [ ] <EOL> for conf , ( y0 , x0 , y1 , x1 ) in regions : <EOL> if x0 - pad >= <NUM_LIT:0> : <EOL> x0 = x0 - pad <EOL> if y0 - pad >= <NUM_LIT:0> : <EOL> y0 = y0 - pad <EOL> if x1 + pad <= dims [ <NUM_LIT:0> ] : <EOL> x1 = x1 + pad <EOL> if y1 + pad <= dims [ <NUM_LIT:0> ] : <EOL> y1 = y1 + pad <EOL> region = image [ y0 : y1 , x0 : x1 , : ] <EOL> candidate = resize ( region , dims ) <EOL> crops . append ( ( conf , candidate , region , ( x0 , y0 , x1 , y1 ) ) ) <EOL> print "<STR_LIT>" . format ( len ( crops ) ) <EOL> return crops <EOL> def get_region_filename ( idx ) : <EOL> """<STR_LIT>""" <EOL> return "<STR_LIT>" % idx <EOL> def dump_regions ( crops ) : <EOL> """<STR_LIT>""" <EOL> shutil . rmtree ( "<STR_LIT>" , ignore_errors = True ) <EOL> os . makedirs ( "<STR_LIT>" ) <EOL> for idx , img in enumerate ( crops ) : <EOL> fname = get_region_filename ( idx ) <EOL> skimage . io . imsave ( fname , img [ <NUM_LIT:2> ] ) <EOL> print "<STR_LIT>" <EOL> def classify ( images , config , weights ) : <EOL> """<STR_LIT>""" <EOL> print ( "<STR_LIT>" % len ( images ) ) <EOL> assert ( os . path . isfile ( config ) and os . path . isfile ( weights ) ) <EOL> channel_swap = [ <NUM_LIT:2> , <NUM_LIT:1> , <NUM_LIT:0> ] <EOL> classifier = caffe . Classifier ( config , <EOL> weights , <EOL> raw_scale = <NUM_LIT:255> , <EOL> channel_swap = channel_swap , <EOL> ) <EOL> return classifier . predict ( images , oversample = False ) <EOL> def load_classes ( class_file ) : <EOL> classes = { } <EOL> if os . path . isfile ( class_file ) : <EOL> f = open ( class_file , '<STR_LIT:r>' ) <EOL> for line in f : <EOL> key = int ( line . split ( "<STR_LIT:U+0020>" ) [ <NUM_LIT:0> ] ) <EOL> value = line . split ( "<STR_LIT:U+0020>" , <NUM_LIT:1> ) [ <NUM_LIT:1> ] . strip ( '<STR_LIT:\n>' ) <EOL> classes [ key ] = value <EOL> return classes <EOL> def sort_predictions ( classes , predictions , bboxes ) : <EOL> """<STR_LIT>""" <EOL> results = [ ] <EOL> for idx , pred in enumerate ( predictions ) : <EOL> results . append ( { <EOL> "<STR_LIT>" : np . argmax ( pred ) , <EOL> "<STR_LIT:class>" : classes [ np . argmax ( pred ) ] , <EOL> "<STR_LIT>" : pred [ np . argmax ( pred ) ] , <EOL> "<STR_LIT>" : get_region_filename ( idx ) , <EOL> "<STR_LIT>" : bboxes [ idx ] , <EOL> } ) <EOL> results . sort ( key = itemgetter ( "<STR_LIT>" ) , reverse = True ) <EOL> return results <EOL> def filter_predictions ( predictions , max_regions , threshold ) : <EOL> """<STR_LIT>""" <EOL> results = [ entry for entry in predictions if entry [ "<STR_LIT>" ] >= threshold ] <EOL> results = results [ <NUM_LIT:0> : max_regions ] <EOL> return results <EOL> def print_predictions ( classes , predictions ) : <EOL> """<STR_LIT>""" <EOL> print "<STR_LIT>" <EOL> for idx , pred in enumerate ( predictions ) : <EOL> print ( "<STR_LIT>" . format ( <EOL> predictions [ idx ] [ "<STR_LIT>" ] , <EOL> predictions [ idx ] [ "<STR_LIT:class>" ] , <EOL> predictions [ idx ] [ "<STR_LIT>" ] , <EOL> predictions [ idx ] [ "<STR_LIT>" ] , <EOL> ) ) <EOL> def draw_bounding_boxes ( image_path , image , classes , predictions , only_for_class = None ) : <EOL> image = Image . fromarray ( numpy . uint8 ( image ) ) <EOL> dr = ImageDraw . Draw ( image , "<STR_LIT>" ) <EOL> colors = { } <EOL> for idx , pred in enumerate ( predictions ) : <EOL> x0 , y0 , x1 , y1 = pred [ "<STR_LIT>" ] <EOL> color = ( <NUM_LIT:255> , <NUM_LIT:255> , <NUM_LIT:0> , <NUM_LIT> ) <EOL> if not only_for_class : <EOL> class_idx = pred [ "<STR_LIT>" ] <EOL> color = ( random . randint ( <NUM_LIT:0> , <NUM_LIT:255> ) , random . randint ( <NUM_LIT:0> , <NUM_LIT:255> ) , random . randint ( <NUM_LIT:0> , <NUM_LIT:255> ) ) <EOL> if class_idx in colors : <EOL> color = colors [ class_idx ] <EOL> colors [ class_idx ] = color <EOL> dr . rectangle ( ( ( x0 , y0 ) , ( x1 , y1 ) ) , fill = color ) <EOL> if not only_for_class : <EOL> dr . text ( ( x0 , y0 ) , pred [ "<STR_LIT:class>" ] , fill = color ) <EOL> filename = os . path . splitext ( image_path ) [ <NUM_LIT:0> ] + "<STR_LIT>" <EOL> image . save ( filename ) <EOL> print "<STR_LIT>" % filename <EOL> def dump_bounding_box_info ( image_path , predictions ) : <EOL> """<STR_LIT>""" <EOL> filename = os . path . splitext ( image_path ) [ <NUM_LIT:0> ] + "<STR_LIT>" <EOL> for entry in predictions : <EOL> entry [ "<STR_LIT>" ] = Decimal ( "<STR_LIT>" % entry [ "<STR_LIT>" ] ) <EOL> with open ( filename , "<STR_LIT:w>" ) as f : <EOL> f . write ( json . dumps ( predictions , use_decimal = True , indent = <NUM_LIT:4> , separators = ( '<STR_LIT:U+002C>' , '<STR_LIT>' ) ) ) <EOL> print "<STR_LIT>" % filename <EOL> def main ( argv ) : <EOL> args = parse_command_line ( ) <EOL> image_path = os . path . abspath ( args . image ) <EOL> image = skimage . io . imread ( image_path ) <EOL> crops = gen_regions ( image , args . dimension , args . pad , args . ks ) <EOL> if args . dump_regions : <EOL> dump_regions ( crops ) <EOL> images = [ entry [ <NUM_LIT:1> ] for entry in crops ] <EOL> classes = load_classes ( args . classes ) <EOL> config = os . path . abspath ( args . config ) <EOL> weights = os . path . abspath ( args . weights ) <EOL> predictions = classify ( images , config , weights ) <EOL> bboxes = [ entry [ <NUM_LIT:3> ] for entry in crops ] <EOL> predictions = sort_predictions ( classes , predictions , bboxes ) <EOL> predictions = filter_predictions ( predictions , args . max_regions , args . threshold ) <EOL> print_predictions ( classes , predictions ) <EOL> draw_bounding_boxes ( image_path , image , classes , predictions , args . only_for_class ) <EOL> dump_bounding_box_info ( image_path , predictions ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( sys . argv ) </s>
<s> import os <EOL> import sys <EOL> import optparse <EOL> import configobj <EOL> import traceback <EOL> import tempfile <EOL> sys . path . append ( os . path . abspath ( os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT:src>' ) ) ) <EOL> def getIncludePaths ( path ) : <EOL> for f in os . listdir ( path ) : <EOL> cPath = os . path . abspath ( os . path . join ( path , f ) ) <EOL> if os . path . isfile ( cPath ) and len ( f ) > <NUM_LIT:3> and f . endswith ( '<STR_LIT>' ) : <EOL> sys . path . append ( os . path . dirname ( cPath ) ) <EOL> elif os . path . isdir ( cPath ) : <EOL> getIncludePaths ( cPath ) <EOL> collectors = { } <EOL> def getCollectors ( path ) : <EOL> for f in os . listdir ( path ) : <EOL> cPath = os . path . abspath ( os . path . join ( path , f ) ) <EOL> if os . path . isfile ( cPath ) and len ( f ) > <NUM_LIT:3> and f . endswith ( '<STR_LIT>' ) : <EOL> modname = f [ : - <NUM_LIT:3> ] <EOL> if modname . startswith ( '<STR_LIT>' ) : <EOL> continue <EOL> if modname . startswith ( '<STR_LIT:test>' ) : <EOL> continue <EOL> try : <EOL> module = __import__ ( modname , globals ( ) , locals ( ) , [ '<STR_LIT:*>' ] ) <EOL> for attr in dir ( module ) : <EOL> if not attr . endswith ( '<STR_LIT>' ) : <EOL> continue <EOL> cls = getattr ( module , attr ) <EOL> if cls . __name__ not in collectors : <EOL> collectors [ cls . __name__ ] = module <EOL> except Exception : <EOL> print "<STR_LIT>" % ( <EOL> modname , traceback . format_exc ( ) ) <EOL> collectors [ modname ] = False <EOL> elif os . path . isdir ( cPath ) : <EOL> getCollectors ( cPath ) <EOL> handlers = { } <EOL> def getHandlers ( path ) : <EOL> for f in os . listdir ( path ) : <EOL> cPath = os . path . abspath ( os . path . join ( path , f ) ) <EOL> if os . path . isfile ( cPath ) and len ( f ) > <NUM_LIT:3> and f . endswith ( '<STR_LIT>' ) : <EOL> modname = f [ : - <NUM_LIT:3> ] <EOL> try : <EOL> module = __import__ ( modname , globals ( ) , locals ( ) , [ '<STR_LIT:*>' ] ) <EOL> for attr in dir ( module ) : <EOL> if ( not attr . endswith ( '<STR_LIT>' ) <EOL> or attr . startswith ( '<STR_LIT>' ) ) : <EOL> continue <EOL> cls = getattr ( module , attr ) <EOL> if cls . __name__ not in handlers : <EOL> handlers [ cls . __name__ ] = module <EOL> except Exception : <EOL> print "<STR_LIT>" % ( <EOL> modname , traceback . format_exc ( ) ) <EOL> handlers [ modname ] = False <EOL> elif os . path . isdir ( cPath ) : <EOL> getHandlers ( cPath ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> parser = optparse . OptionParser ( ) <EOL> parser . add_option ( "<STR_LIT:-c>" , "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> default = None , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> dest = "<STR_LIT>" , <EOL> default = False , <EOL> help = "<STR_LIT>" ) <EOL> ( options , args ) = parser . parse_args ( ) <EOL> if os . path . exists ( options . configfile ) : <EOL> config = configobj . ConfigObj ( os . path . abspath ( options . configfile ) ) <EOL> else : <EOL> print >> sys . stderr , "<STR_LIT>" % ( <EOL> options . configfile ) <EOL> print >> sys . stderr , ( "<STR_LIT>" <EOL> + "<STR_LIT>" ) <EOL> parser . print_help ( sys . stderr ) <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> collector_path = config [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> docs_path = os . path . abspath ( os . path . join ( os . path . dirname ( __file__ ) , '<STR_LIT>' ) ) <EOL> handler_path = os . path . abspath ( os . path . join ( os . path . dirname ( __file__ ) , <EOL> '<STR_LIT:src>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> getIncludePaths ( collector_path ) <EOL> getCollectors ( os . path . join ( collector_path , '<STR_LIT>' ) ) <EOL> getCollectors ( collector_path ) <EOL> collectorIndexFile = open ( os . path . join ( docs_path , "<STR_LIT>" ) , '<STR_LIT:w>' ) <EOL> collectorIndexFile . write ( "<STR_LIT>" ) <EOL> collectorIndexFile . write ( "<STR_LIT:\n>" ) <EOL> for collector in sorted ( collectors . iterkeys ( ) ) : <EOL> if collector == "<STR_LIT>" : <EOL> continue <EOL> if collector . startswith ( '<STR_LIT>' ) : <EOL> continue <EOL> print "<STR_LIT>" % ( collector ) <EOL> if not hasattr ( collectors [ collector ] , collector ) : <EOL> continue <EOL> cls = getattr ( collectors [ collector ] , collector ) <EOL> obj = cls ( config = config , handlers = { } ) <EOL> options = obj . get_default_config_help ( ) <EOL> defaultOptions = obj . get_default_config ( ) <EOL> docFile = open ( os . path . join ( docs_path , <EOL> "<STR_LIT>" + collector + "<STR_LIT>" ) , '<STR_LIT:w>' ) <EOL> enabled = '<STR_LIT>' <EOL> collectorIndexFile . write ( "<STR_LIT>" % ( collector , <EOL> collector , <EOL> enabled ) ) <EOL> docFile . write ( "<STR_LIT>" % ( collector ) ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> if collectors [ collector ] . __doc__ is None : <EOL> print "<STR_LIT>" <EOL> docFile . write ( "<STR_LIT>" % ( collectors [ collector ] . __doc__ ) ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT:\n>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> for option in sorted ( options . keys ( ) ) : <EOL> defaultOption = '<STR_LIT>' <EOL> defaultOptionType = '<STR_LIT>' <EOL> if option in defaultOptions : <EOL> defaultOptionType = defaultOptions [ option ] . __class__ . __name__ <EOL> if isinstance ( defaultOptions [ option ] , list ) : <EOL> defaultOption = '<STR_LIT:U+002CU+0020>' . join ( map ( str , defaultOptions [ option ] ) ) <EOL> defaultOption += '<STR_LIT:U+002C>' <EOL> else : <EOL> defaultOption = str ( defaultOptions [ option ] ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT>" % ( option ) ) <EOL> docFile . write ( "<STR_LIT>" % ( defaultOption ) ) <EOL> docFile . write ( "<STR_LIT>" % ( options [ option ] . replace ( <EOL> "<STR_LIT:\n>" , '<STR_LIT>' ) ) ) <EOL> docFile . write ( "<STR_LIT>" % ( defaultOptionType ) ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT:\n>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT:\n>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT:\n>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT:\n>" ) <EOL> docFile . close ( ) <EOL> collectorIndexFile . close ( ) <EOL> getIncludePaths ( handler_path ) <EOL> getHandlers ( handler_path ) <EOL> handlerIndexFile = open ( os . path . join ( docs_path , "<STR_LIT>" ) , '<STR_LIT:w>' ) <EOL> handlerIndexFile . write ( "<STR_LIT>" ) <EOL> handlerIndexFile . write ( "<STR_LIT:\n>" ) <EOL> for handler in sorted ( handlers . iterkeys ( ) ) : <EOL> if handler == "<STR_LIT>" : <EOL> continue <EOL> if handler [ <NUM_LIT:0> : <NUM_LIT:4> ] == "<STR_LIT>" : <EOL> continue <EOL> print "<STR_LIT>" % ( handler ) <EOL> if not hasattr ( handlers [ handler ] , handler ) : <EOL> continue <EOL> cls = getattr ( handlers [ handler ] , handler ) <EOL> tmpfile = tempfile . mkstemp ( ) <EOL> options = None <EOL> defaultOptions = None <EOL> try : <EOL> obj = cls ( { <EOL> '<STR_LIT>' : tmpfile [ <NUM_LIT:1> ] , <EOL> } ) <EOL> options = obj . get_default_config_help ( ) <EOL> defaultOptions = obj . get_default_config ( ) <EOL> except Exception , e : <EOL> print "<STR_LIT>" % e <EOL> os . remove ( tmpfile [ <NUM_LIT:1> ] ) <EOL> docFile = open ( os . path . join ( docs_path , <EOL> "<STR_LIT>" + handler + "<STR_LIT>" ) , '<STR_LIT:w>' ) <EOL> handlerIndexFile . write ( "<STR_LIT>" % ( handler , handler ) ) <EOL> docFile . write ( "<STR_LIT>" % ( handler ) ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT:%s>" % ( handlers [ handler ] . __doc__ ) ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT:\n>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> if options : <EOL> for option in sorted ( options . keys ( ) ) : <EOL> defaultOption = '<STR_LIT>' <EOL> defaultOptionType = '<STR_LIT>' <EOL> if option in defaultOptions : <EOL> defaultOptionType = defaultOptions [ <EOL> option ] . __class__ . __name__ <EOL> if isinstance ( defaultOptions [ option ] , list ) : <EOL> defaultOption = '<STR_LIT:U+002CU+0020>' . join ( map ( str , <EOL> defaultOptions [ option ] ) ) <EOL> defaultOption += '<STR_LIT:U+002C>' <EOL> else : <EOL> defaultOption = str ( defaultOptions [ option ] ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT>" % ( option ) ) <EOL> docFile . write ( "<STR_LIT>" % ( defaultOption ) ) <EOL> docFile . write ( "<STR_LIT>" % ( options [ option ] . replace ( <EOL> "<STR_LIT:\n>" , '<STR_LIT>' ) ) ) <EOL> docFile . write ( "<STR_LIT>" % ( defaultOptionType ) ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT:\n>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT>" ) <EOL> docFile . write ( "<STR_LIT:\n>" ) <EOL> docFile . close ( ) <EOL> handlerIndexFile . close ( ) </s>
<s> """<STR_LIT>""" <EOL> import diamond . collector <EOL> import diamond . convertor <EOL> import time <EOL> import os <EOL> import re <EOL> try : <EOL> import psutil <EOL> except ImportError : <EOL> psutil = None <EOL> class DiskUsageCollector ( diamond . collector . Collector ) : <EOL> MAX_VALUES = { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> <EOL> } <EOL> LastCollectTime = None <EOL> def get_default_config_help ( self ) : <EOL> config_help = super ( DiskUsageCollector , self ) . get_default_config_help ( ) <EOL> config_help . update ( { <EOL> '<STR_LIT>' : "<STR_LIT>" <EOL> + "<STR_LIT>" , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> return config_help <EOL> def get_default_config ( self ) : <EOL> """<STR_LIT>""" <EOL> config = super ( DiskUsageCollector , self ) . get_default_config ( ) <EOL> config . update ( { <EOL> '<STR_LIT:path>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : ( '<STR_LIT>' <EOL> + '<STR_LIT>' <EOL> + '<STR_LIT>' <EOL> + '<STR_LIT>' <EOL> + '<STR_LIT>' <EOL> + '<STR_LIT>' ) , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : False , <EOL> } ) <EOL> return config <EOL> def get_disk_statistics ( self ) : <EOL> """<STR_LIT>""" <EOL> result = { } <EOL> if os . access ( '<STR_LIT>' , os . R_OK ) : <EOL> self . proc_diskstats = True <EOL> fp = open ( '<STR_LIT>' ) <EOL> try : <EOL> for line in fp : <EOL> try : <EOL> columns = line . split ( ) <EOL> if len ( columns ) < <NUM_LIT> : <EOL> continue <EOL> major = int ( columns [ <NUM_LIT:0> ] ) <EOL> minor = int ( columns [ <NUM_LIT:1> ] ) <EOL> device = columns [ <NUM_LIT:2> ] <EOL> if ( device . startswith ( '<STR_LIT>' ) <EOL> or device . startswith ( '<STR_LIT>' ) ) : <EOL> continue <EOL> result [ ( major , minor ) ] = { <EOL> '<STR_LIT>' : device , <EOL> '<STR_LIT>' : float ( columns [ <NUM_LIT:3> ] ) , <EOL> '<STR_LIT>' : float ( columns [ <NUM_LIT:4> ] ) , <EOL> '<STR_LIT>' : float ( columns [ <NUM_LIT:5> ] ) , <EOL> '<STR_LIT>' : float ( columns [ <NUM_LIT:6> ] ) , <EOL> '<STR_LIT>' : float ( columns [ <NUM_LIT:7> ] ) , <EOL> '<STR_LIT>' : float ( columns [ <NUM_LIT:8> ] ) , <EOL> '<STR_LIT>' : float ( columns [ <NUM_LIT:9> ] ) , <EOL> '<STR_LIT>' : float ( columns [ <NUM_LIT:10> ] ) , <EOL> '<STR_LIT>' : float ( columns [ <NUM_LIT:11> ] ) , <EOL> '<STR_LIT>' : float ( columns [ <NUM_LIT:12> ] ) , <EOL> '<STR_LIT>' : float ( columns [ <NUM_LIT> ] ) <EOL> } <EOL> except ValueError : <EOL> continue <EOL> finally : <EOL> fp . close ( ) <EOL> else : <EOL> self . proc_diskstats = False <EOL> if not psutil : <EOL> self . log . error ( '<STR_LIT>' ) <EOL> return None <EOL> disks = psutil . disk_io_counters ( True ) <EOL> for disk in disks : <EOL> result [ ( <NUM_LIT:0> , len ( result ) ) ] = { <EOL> '<STR_LIT>' : disk , <EOL> '<STR_LIT>' : disks [ disk ] . read_count , <EOL> '<STR_LIT>' : ( disks [ disk ] . read_bytes <EOL> / int ( self . config [ '<STR_LIT>' ] ) ) , <EOL> '<STR_LIT>' : disks [ disk ] . read_time , <EOL> '<STR_LIT>' : disks [ disk ] . write_count , <EOL> '<STR_LIT>' : ( disks [ disk ] . write_bytes <EOL> / int ( self . config [ '<STR_LIT>' ] ) ) , <EOL> '<STR_LIT>' : disks [ disk ] . write_time , <EOL> '<STR_LIT>' : <EOL> disks [ disk ] . read_time + disks [ disk ] . write_time , <EOL> '<STR_LIT>' : <EOL> disks [ disk ] . read_time + disks [ disk ] . write_time <EOL> } <EOL> return result <EOL> def collect ( self ) : <EOL> CollectTime = time . time ( ) <EOL> time_delta = float ( self . config [ '<STR_LIT>' ] ) <EOL> if self . LastCollectTime : <EOL> time_delta = CollectTime - self . LastCollectTime <EOL> if not time_delta : <EOL> time_delta = float ( self . config [ '<STR_LIT>' ] ) <EOL> self . LastCollectTime = CollectTime <EOL> exp = self . config [ '<STR_LIT>' ] <EOL> reg = re . compile ( exp ) <EOL> results = self . get_disk_statistics ( ) <EOL> if not results : <EOL> self . log . error ( '<STR_LIT>' ) <EOL> return None <EOL> for key , info in results . iteritems ( ) : <EOL> metrics = { } <EOL> name = info [ '<STR_LIT>' ] <EOL> if not reg . match ( name ) : <EOL> continue <EOL> for key , value in info . iteritems ( ) : <EOL> if key == '<STR_LIT>' : <EOL> continue <EOL> oldkey = key <EOL> for unit in self . config [ '<STR_LIT>' ] : <EOL> key = oldkey <EOL> if key . endswith ( '<STR_LIT>' ) : <EOL> key = key . replace ( '<STR_LIT>' , unit ) <EOL> value /= ( <NUM_LIT> / int ( self . config [ '<STR_LIT>' ] ) ) <EOL> value = diamond . convertor . binary . convert ( value = value , <EOL> oldUnit = '<STR_LIT>' , <EOL> newUnit = unit ) <EOL> self . MAX_VALUES [ key ] = diamond . convertor . binary . convert ( <EOL> value = diamond . collector . MAX_COUNTER , <EOL> oldUnit = '<STR_LIT>' , <EOL> newUnit = unit ) <EOL> metric_name = '<STR_LIT:.>' . join ( [ info [ '<STR_LIT>' ] , key ] ) <EOL> if key != '<STR_LIT>' : <EOL> metric_value = self . derivative ( <EOL> metric_name , <EOL> value , <EOL> self . MAX_VALUES [ key ] , <EOL> time_delta = False ) <EOL> else : <EOL> metric_value = value <EOL> metrics [ key ] = metric_value <EOL> if self . proc_diskstats : <EOL> metrics [ '<STR_LIT>' ] = ( <EOL> metrics [ '<STR_LIT>' ] / time_delta ) <EOL> metrics [ '<STR_LIT>' ] = ( <EOL> metrics [ '<STR_LIT>' ] / time_delta ) <EOL> metrics [ '<STR_LIT>' ] = metrics [ '<STR_LIT>' ] / time_delta <EOL> metrics [ '<STR_LIT>' ] = metrics [ '<STR_LIT>' ] / time_delta <EOL> for unit in self . config [ '<STR_LIT>' ] : <EOL> metric_name = '<STR_LIT>' % unit <EOL> key = '<STR_LIT>' % unit <EOL> metrics [ metric_name ] = metrics [ key ] / time_delta <EOL> metric_name = '<STR_LIT>' % unit <EOL> key = '<STR_LIT>' % unit <EOL> metrics [ metric_name ] = metrics [ key ] / time_delta <EOL> metric_name = '<STR_LIT>' % unit <EOL> metrics [ metric_name ] = <NUM_LIT:0> <EOL> metrics [ '<STR_LIT>' ] = metrics [ '<STR_LIT>' ] + metrics [ '<STR_LIT>' ] <EOL> metrics [ '<STR_LIT>' ] = ( <EOL> metrics [ '<STR_LIT>' ] <EOL> / time_delta <EOL> / <NUM_LIT> ) <EOL> metrics [ '<STR_LIT>' ] = ( metrics [ '<STR_LIT>' ] <EOL> / time_delta <EOL> / <NUM_LIT> ) <EOL> if metrics [ '<STR_LIT>' ] > <NUM_LIT:0> : <EOL> metrics [ '<STR_LIT>' ] = ( <EOL> metrics [ '<STR_LIT>' ] / metrics [ '<STR_LIT>' ] ) <EOL> else : <EOL> metrics [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> if metrics [ '<STR_LIT>' ] > <NUM_LIT:0> : <EOL> metrics [ '<STR_LIT>' ] = ( <EOL> metrics [ '<STR_LIT>' ] / metrics [ '<STR_LIT>' ] ) <EOL> else : <EOL> metrics [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> for unit in self . config [ '<STR_LIT>' ] : <EOL> rkey = '<STR_LIT>' % unit <EOL> wkey = '<STR_LIT>' % unit <EOL> metric_name = '<STR_LIT>' % unit <EOL> if ( metrics [ '<STR_LIT>' ] > <NUM_LIT:0> ) : <EOL> metrics [ metric_name ] = ( <EOL> metrics [ rkey ] + metrics [ wkey ] ) / metrics [ '<STR_LIT>' ] <EOL> else : <EOL> metrics [ metric_name ] = <NUM_LIT:0> <EOL> metrics [ '<STR_LIT>' ] = metrics [ '<STR_LIT>' ] / time_delta <EOL> if ( metrics [ '<STR_LIT>' ] > <NUM_LIT:0> ) : <EOL> metrics [ '<STR_LIT>' ] = ( <EOL> metrics [ '<STR_LIT>' ] / metrics [ '<STR_LIT>' ] ) <EOL> metrics [ '<STR_LIT>' ] = ( <EOL> metrics [ '<STR_LIT>' ] <EOL> + metrics [ '<STR_LIT>' ] ) / metrics [ '<STR_LIT>' ] <EOL> else : <EOL> metrics [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> metrics [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> metrics [ '<STR_LIT>' ] = ( metrics [ '<STR_LIT>' ] <EOL> + metrics [ '<STR_LIT>' ] <EOL> ) * ( metrics [ '<STR_LIT>' ] <EOL> / <NUM_LIT> ) <EOL> if ( metrics [ '<STR_LIT>' ] > <NUM_LIT:0> or self . config [ '<STR_LIT>' ] ) : <EOL> for key in metrics : <EOL> metric_name = '<STR_LIT:.>' . join ( [ info [ '<STR_LIT>' ] , key ] ) . replace ( <EOL> '<STR_LIT:/>' , '<STR_LIT:_>' ) <EOL> self . publish ( metric_name , metrics [ key ] ) </s>
<s> from test import CollectorTestCase <EOL> from test import get_collector_config <EOL> from mock import patch <EOL> import os <EOL> from diamond . collector import Collector <EOL> from gridengine import GridEngineCollector <EOL> class TestGridEngineCollector ( CollectorTestCase ) : <EOL> """<STR_LIT>""" <EOL> def setUp ( self ) : <EOL> config = get_collector_config ( '<STR_LIT>' , { } ) <EOL> self . collector = GridEngineCollector ( config , None ) <EOL> self . fixtures_dir = os . path . abspath ( os . path . join ( <EOL> os . path . dirname ( __file__ ) , '<STR_LIT>' ) ) <EOL> def test_import ( self ) : <EOL> """<STR_LIT>""" <EOL> self . assertTrue ( GridEngineCollector ) <EOL> @ patch . object ( GridEngineCollector , '<STR_LIT>' ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_queue_stats_should_work_with_real_data ( <EOL> self , publish_mock , xml_mock ) : <EOL> """<STR_LIT>""" <EOL> xml_mock . return_value = self . getFixture ( '<STR_LIT>' ) . getvalue ( ) <EOL> self . collector . _collect_queue_stats ( ) <EOL> published_metrics = { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:0> <EOL> } <EOL> self . assertPublishedMany ( publish_mock , published_metrics ) <EOL> @ patch . object ( GridEngineCollector , '<STR_LIT>' ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_707 ( <EOL> self , publish_mock , xml_mock ) : <EOL> """<STR_LIT>""" <EOL> xml_mock . return_value = self . getFixture ( '<STR_LIT>' ) . getvalue ( ) <EOL> self . collector . _collect_queue_stats ( ) </s>
<s> """<STR_LIT>""" <EOL> import threading <EOL> import re <EOL> import Queue <EOL> import diamond . collector <EOL> import diamond . metric <EOL> import collectd_network <EOL> ALIVE = True <EOL> class JCollectdCollector ( diamond . collector . Collector ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( JCollectdCollector , self ) . __init__ ( * args , ** kwargs ) <EOL> self . listener_thread = None <EOL> def get_default_config ( self ) : <EOL> """<STR_LIT>""" <EOL> config = super ( JCollectdCollector , self ) . get_default_config ( ) <EOL> config . update ( { <EOL> '<STR_LIT:path>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT:127.0.0.1>' , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> } ) <EOL> return config <EOL> def collect ( self ) : <EOL> if not self . listener_thread : <EOL> self . start_listener ( ) <EOL> q = self . listener_thread . queue <EOL> while True : <EOL> try : <EOL> dp = q . get ( False ) <EOL> metric = self . make_metric ( dp ) <EOL> except Queue . Empty : <EOL> break <EOL> self . publish_metric ( metric ) <EOL> def start_listener ( self ) : <EOL> self . listener_thread = ListenerThread ( self . config [ '<STR_LIT>' ] , <EOL> self . config [ '<STR_LIT>' ] , <EOL> self . log ) <EOL> self . listener_thread . start ( ) <EOL> def stop_listener ( self ) : <EOL> global ALIVE <EOL> ALIVE = False <EOL> self . listener_thread . join ( ) <EOL> self . log . error ( '<STR_LIT>' ) <EOL> def make_metric ( self , dp ) : <EOL> path = "<STR_LIT:.>" . join ( ( dp . host , self . config [ '<STR_LIT:path>' ] , dp . name ) ) <EOL> if '<STR_LIT>' in self . config : <EOL> prefix = self . config [ '<STR_LIT>' ] <EOL> if prefix : <EOL> path = "<STR_LIT:.>" . join ( ( prefix , path ) ) <EOL> if '<STR_LIT>' in self . config : <EOL> suffix = self . config [ '<STR_LIT>' ] <EOL> if suffix : <EOL> path = "<STR_LIT:.>" . join ( ( path , suffix ) ) <EOL> if dp . is_counter : <EOL> metric_type = "<STR_LIT>" <EOL> else : <EOL> metric_type = "<STR_LIT>" <EOL> metric = diamond . metric . Metric ( path , dp . value , dp . time , <EOL> metric_type = metric_type ) <EOL> return metric <EOL> def __del__ ( self ) : <EOL> if self . listener_thread : <EOL> self . stop_listener ( ) <EOL> class ListenerThread ( threading . Thread ) : <EOL> def __init__ ( self , host , port , log , poll_interval = <NUM_LIT> ) : <EOL> super ( ListenerThread , self ) . __init__ ( ) <EOL> self . name = '<STR_LIT>' <EOL> self . host = host <EOL> self . port = port <EOL> self . log = log <EOL> self . poll_interval = poll_interval <EOL> self . queue = Queue . Queue ( ) <EOL> def run ( self ) : <EOL> self . log . info ( '<STR_LIT>' . format ( <EOL> self . host , self . port ) ) <EOL> rdr = collectd_network . Reader ( self . host , self . port ) <EOL> try : <EOL> while ALIVE : <EOL> try : <EOL> items = rdr . interpret ( poll_interval = self . poll_interval ) <EOL> self . send_to_collector ( items ) <EOL> except ValueError , e : <EOL> self . log . warn ( '<STR_LIT>' . format ( e ) ) <EOL> except Exception , e : <EOL> self . log . error ( '<STR_LIT>' . format ( type ( e ) , <EOL> e ) ) <EOL> self . log . info ( '<STR_LIT>' ) <EOL> def send_to_collector ( self , items ) : <EOL> if items is None : <EOL> return <EOL> for item in items : <EOL> try : <EOL> metric = self . transform ( item ) <EOL> self . queue . put ( metric ) <EOL> except Queue . Full : <EOL> self . log . error ( '<STR_LIT>' ) <EOL> except Exception , e : <EOL> self . log . error ( '<STR_LIT>' . format ( type ( e ) , <EOL> e ) ) <EOL> def transform ( self , item ) : <EOL> parts = [ ] <EOL> path = item . plugininstance <EOL> if '<STR_LIT:->' in path : <EOL> ( jvm , tail ) = path . split ( '<STR_LIT:->' , <NUM_LIT:1> ) <EOL> path = tail <EOL> else : <EOL> jvm = '<STR_LIT>' <EOL> parts . append ( jvm ) <EOL> parts . append ( item . plugin ) <EOL> if '<STR_LIT:U+0020>' in path : <EOL> ( mb_type , mb_name ) = path . split ( '<STR_LIT:U+0020>' , <NUM_LIT:1> ) <EOL> parts . append ( mb_type ) <EOL> parts . append ( mb_name ) <EOL> else : <EOL> parts . append ( path ) <EOL> parts . append ( item . typeinstance ) <EOL> name = '<STR_LIT:.>' . join ( [ sanitize_word ( part ) for part in parts ] ) <EOL> if item [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] == <NUM_LIT:0> : <EOL> is_counter = True <EOL> else : <EOL> is_counter = False <EOL> dp = Datapoint ( item . host , item . time , name , item [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] , is_counter ) <EOL> return dp <EOL> def sanitize_word ( s ) : <EOL> """<STR_LIT>""" <EOL> s = re . sub ( '<STR_LIT>' , '<STR_LIT:_>' , s ) <EOL> s = re . sub ( '<STR_LIT>' , '<STR_LIT:_>' , s ) <EOL> return s . strip ( '<STR_LIT:_>' ) <EOL> class Datapoint ( object ) : <EOL> def __init__ ( self , host , time , name , value , is_counter ) : <EOL> self . host = host <EOL> self . time = time <EOL> self . name = name <EOL> self . value = value <EOL> self . is_counter = is_counter </s>
<s> from test import CollectorTestCase <EOL> from test import get_collector_config <EOL> from test import unittest <EOL> from test import run_only <EOL> from mock import MagicMock <EOL> from mock import patch <EOL> from mock import call <EOL> from diamond . collector import Collector <EOL> from mongodb import MongoDBCollector <EOL> def run_only_if_pymongo_is_available ( func ) : <EOL> try : <EOL> import pymongo <EOL> except ImportError : <EOL> pymongo = None <EOL> pred = lambda : pymongo is not None <EOL> return run_only ( func , pred ) <EOL> class TestMongoDBCollector ( CollectorTestCase ) : <EOL> def setUp ( self ) : <EOL> config = get_collector_config ( '<STR_LIT>' , { <EOL> '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> self . collector = MongoDBCollector ( config , None ) <EOL> self . connection = MagicMock ( ) <EOL> def test_import ( self ) : <EOL> self . assertTrue ( MongoDBCollector ) <EOL> @ run_only_if_pymongo_is_available <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_publish_nested_keys_for_server_stats ( self , <EOL> publish_mock , <EOL> connector_mock ) : <EOL> data = { '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:1> } , '<STR_LIT:key>' : <NUM_LIT:2> , '<STR_LIT:string>' : '<STR_LIT:str>' } <EOL> self . _annotate_connection ( connector_mock , data ) <EOL> self . collector . collect ( ) <EOL> self . connection . db . command . assert_called_once_with ( '<STR_LIT>' ) <EOL> self . assertPublishedMany ( publish_mock , { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT:key>' : <NUM_LIT:2> <EOL> } ) <EOL> @ run_only_if_pymongo_is_available <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_publish_nested_keys_for_db_stats ( self , <EOL> publish_mock , <EOL> connector_mock ) : <EOL> data = { '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:1> } , '<STR_LIT>' : <NUM_LIT:2> , '<STR_LIT>' : '<STR_LIT:str>' } <EOL> self . _annotate_connection ( connector_mock , data ) <EOL> self . collector . collect ( ) <EOL> self . connection [ '<STR_LIT>' ] . command . assert_called_once_with ( '<STR_LIT>' ) <EOL> metrics = { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:2> <EOL> } <EOL> self . setDocExample ( collector = self . collector . __class__ . __name__ , <EOL> metrics = metrics , <EOL> defaultpath = self . collector . config [ '<STR_LIT:path>' ] ) <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> @ run_only_if_pymongo_is_available <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_publish_stats_with_long_type ( self , <EOL> publish_mock , <EOL> connector_mock ) : <EOL> data = { '<STR_LIT>' : long ( <NUM_LIT:1> ) , '<STR_LIT:key>' : <NUM_LIT:2> , '<STR_LIT:string>' : '<STR_LIT:str>' } <EOL> self . _annotate_connection ( connector_mock , data ) <EOL> self . collector . collect ( ) <EOL> self . connection . db . command . assert_called_once_with ( '<STR_LIT>' ) <EOL> self . assertPublishedMany ( publish_mock , { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT:key>' : <NUM_LIT:2> <EOL> } ) <EOL> @ run_only_if_pymongo_is_available <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_ignore_unneeded_databases ( self , <EOL> publish_mock , <EOL> connector_mock ) : <EOL> self . _annotate_connection ( connector_mock , { } ) <EOL> self . collector . collect ( ) <EOL> assert call ( '<STR_LIT>' ) not in self . connection . __getitem__ . call_args_list <EOL> @ run_only_if_pymongo_is_available <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_ignore_unneeded_collections ( self , <EOL> publish_mock , <EOL> connector_mock ) : <EOL> data = { '<STR_LIT>' : long ( <NUM_LIT:1> ) , '<STR_LIT:key>' : <NUM_LIT:2> , '<STR_LIT:string>' : '<STR_LIT:str>' } <EOL> self . _annotate_connection ( connector_mock , data ) <EOL> self . connection [ '<STR_LIT>' ] . collection_names . return_value = [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> self . connection [ '<STR_LIT>' ] . command . return_value = { '<STR_LIT:key>' : <NUM_LIT:2> , <EOL> '<STR_LIT:string>' : '<STR_LIT:str>' } <EOL> self . collector . collect ( ) <EOL> self . connection . db . command . assert_called_once_with ( '<STR_LIT>' ) <EOL> self . connection [ '<STR_LIT>' ] . collection_names . assert_called_once_with ( ) <EOL> self . connection [ '<STR_LIT>' ] . command . assert_any_call ( '<STR_LIT>' ) <EOL> self . connection [ '<STR_LIT>' ] . command . assert_any_call ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> assert call ( '<STR_LIT>' , '<STR_LIT>' ) not in self . connection [ '<STR_LIT>' ] . command . call_args_list <EOL> metrics = { <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> } <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> def _annotate_connection ( self , connector_mock , data ) : <EOL> connector_mock . return_value = self . connection <EOL> self . connection . db . command . return_value = data <EOL> self . connection . database_names . return_value = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> class TestMongoMultiHostDBCollector ( CollectorTestCase ) : <EOL> def setUp ( self ) : <EOL> config = get_collector_config ( '<STR_LIT>' , { <EOL> '<STR_LIT>' : [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> self . collector = MongoDBCollector ( config , None ) <EOL> self . connection = MagicMock ( ) <EOL> def test_import ( self ) : <EOL> self . assertTrue ( MongoDBCollector ) <EOL> @ run_only_if_pymongo_is_available <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_publish_nested_keys_for_server_stats ( self , <EOL> publish_mock , <EOL> connector_mock ) : <EOL> data = { '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:1> } , '<STR_LIT:key>' : <NUM_LIT:2> , '<STR_LIT:string>' : '<STR_LIT:str>' } <EOL> self . _annotate_connection ( connector_mock , data ) <EOL> self . collector . collect ( ) <EOL> self . connection . db . command . assert_called_with ( '<STR_LIT>' ) <EOL> self . assertPublishedMany ( publish_mock , { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT:2> <EOL> } ) <EOL> @ run_only_if_pymongo_is_available <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_publish_nested_keys_for_db_stats ( self , <EOL> publish_mock , <EOL> connector_mock ) : <EOL> data = { '<STR_LIT>' : { '<STR_LIT>' : <NUM_LIT:1> } , '<STR_LIT>' : <NUM_LIT:2> , '<STR_LIT>' : '<STR_LIT:str>' } <EOL> self . _annotate_connection ( connector_mock , data ) <EOL> self . collector . collect ( ) <EOL> self . connection [ '<STR_LIT>' ] . command . assert_called_with ( '<STR_LIT>' ) <EOL> metrics = { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT:2> <EOL> } <EOL> self . setDocExample ( collector = self . collector . __class__ . __name__ , <EOL> metrics = metrics , <EOL> defaultpath = self . collector . config [ '<STR_LIT:path>' ] ) <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> @ run_only_if_pymongo_is_available <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_publish_stats_with_long_type ( self , <EOL> publish_mock , <EOL> connector_mock ) : <EOL> data = { '<STR_LIT>' : long ( <NUM_LIT:1> ) , '<STR_LIT:key>' : <NUM_LIT:2> , '<STR_LIT:string>' : '<STR_LIT:str>' } <EOL> self . _annotate_connection ( connector_mock , data ) <EOL> self . collector . collect ( ) <EOL> self . connection . db . command . assert_called_with ( '<STR_LIT>' ) <EOL> self . assertPublishedMany ( publish_mock , { <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT:2> <EOL> } ) <EOL> @ run_only_if_pymongo_is_available <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_ignore_unneeded_databases ( self , <EOL> publish_mock , <EOL> connector_mock ) : <EOL> self . _annotate_connection ( connector_mock , { } ) <EOL> self . collector . collect ( ) <EOL> assert call ( '<STR_LIT>' ) not in self . connection . __getitem__ . call_args_list <EOL> @ run_only_if_pymongo_is_available <EOL> @ patch ( '<STR_LIT>' ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_ignore_unneeded_collections ( self , <EOL> publish_mock , <EOL> connector_mock ) : <EOL> data = { '<STR_LIT>' : long ( <NUM_LIT:1> ) , '<STR_LIT:key>' : <NUM_LIT:2> , '<STR_LIT:string>' : '<STR_LIT:str>' } <EOL> self . _annotate_connection ( connector_mock , data ) <EOL> self . connection [ '<STR_LIT>' ] . collection_names . return_value = [ '<STR_LIT>' , <EOL> '<STR_LIT>' ] <EOL> self . connection [ '<STR_LIT>' ] . command . return_value = { '<STR_LIT:key>' : <NUM_LIT:2> , <EOL> '<STR_LIT:string>' : '<STR_LIT:str>' } <EOL> self . collector . collect ( ) <EOL> self . connection . db . command . assert_called_with ( '<STR_LIT>' ) <EOL> self . connection [ '<STR_LIT>' ] . collection_names . assert_called_with ( ) <EOL> self . connection [ '<STR_LIT>' ] . command . assert_any_call ( '<STR_LIT>' ) <EOL> self . connection [ '<STR_LIT>' ] . command . assert_any_call ( '<STR_LIT>' , <EOL> '<STR_LIT>' ) <EOL> assert call ( '<STR_LIT>' , '<STR_LIT>' ) not in self . connection [ '<STR_LIT>' ] . command . call_args_list <EOL> metrics = { <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> } <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> def _annotate_connection ( self , connector_mock , data ) : <EOL> connector_mock . return_value = self . connection <EOL> self . connection . db . command . return_value = data <EOL> self . connection . database_names . return_value = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> """<STR_LIT>""" <EOL> import diamond . collector <EOL> import os <EOL> class NfsdCollector ( diamond . collector . Collector ) : <EOL> PROC = '<STR_LIT>' <EOL> def get_default_config_help ( self ) : <EOL> config_help = super ( NfsdCollector , self ) . get_default_config_help ( ) <EOL> config_help . update ( { <EOL> } ) <EOL> return config_help <EOL> def get_default_config ( self ) : <EOL> """<STR_LIT>""" <EOL> config = super ( NfsdCollector , self ) . get_default_config ( ) <EOL> config . update ( { <EOL> '<STR_LIT:path>' : '<STR_LIT>' <EOL> } ) <EOL> return config <EOL> def collect ( self ) : <EOL> """<STR_LIT>""" <EOL> if os . access ( self . PROC , os . R_OK ) : <EOL> results = { } <EOL> file = open ( self . PROC ) <EOL> for line in file : <EOL> line = line . split ( ) <EOL> if line [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:1> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:2> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:3> ] <EOL> elif line [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:1> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:2> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:3> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:4> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:5> ] <EOL> elif line [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:1> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:2> ] <EOL> elif line [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:1> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:2> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:3> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:4> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:5> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:6> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:7> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:8> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:9> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:10> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:11> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:12> ] <EOL> elif line [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:1> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:2> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:3> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:4> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:5> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:6> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:7> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:8> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:9> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:10> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:11> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:12> ] <EOL> elif line [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:1> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:2> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:3> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:4> ] <EOL> elif line [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:1> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:2> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:3> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:4> ] <EOL> elif line [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:1> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:2> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:3> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:4> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:5> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:6> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:7> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:8> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:9> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:10> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:11> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:12> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:15> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:16> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> elif line [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:1> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:2> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:3> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:4> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:5> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:6> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:7> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:8> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:9> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:10> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:11> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:12> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:15> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:16> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:20> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> elif line [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:1> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:2> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:3> ] <EOL> elif line [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:1> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:2> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:3> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:4> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:5> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:6> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:7> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:8> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:9> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:10> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:11> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:12> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:15> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:16> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:20> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:30> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT:32> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> results [ '<STR_LIT>' ] = line [ <NUM_LIT> ] <EOL> file . close ( ) <EOL> for stat in results . keys ( ) : <EOL> metric_name = '<STR_LIT:.>' + stat <EOL> metric_value = long ( float ( results [ stat ] ) ) <EOL> metric_value = self . derivative ( metric_name , metric_value ) <EOL> self . publish ( metric_name , metric_value ) <EOL> return True <EOL> return False </s>
<s> """<STR_LIT>""" <EOL> import socket <EOL> import sys <EOL> try : <EOL> import json <EOL> except ImportError : <EOL> import simplejson as json <EOL> import diamond . collector <EOL> from diamond . collector import str_to_bool <EOL> if sys . version_info < ( <NUM_LIT:2> , <NUM_LIT:6> ) : <EOL> from string import maketrans <EOL> DOTS_TO_UNDERS = maketrans ( '<STR_LIT:.>' , '<STR_LIT:_>' ) <EOL> else : <EOL> DOTS_TO_UNDERS = { ord ( u'<STR_LIT:.>' ) : u'<STR_LIT:_>' } <EOL> class PostfixCollector ( diamond . collector . Collector ) : <EOL> def get_default_config_help ( self ) : <EOL> config_help = super ( PostfixCollector , <EOL> self ) . get_default_config_help ( ) <EOL> config_help . update ( { <EOL> '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT:port>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> return config_help <EOL> def get_default_config ( self ) : <EOL> """<STR_LIT>""" <EOL> config = super ( PostfixCollector , self ) . get_default_config ( ) <EOL> config . update ( { <EOL> '<STR_LIT:path>' : '<STR_LIT>' , <EOL> '<STR_LIT:host>' : '<STR_LIT:localhost>' , <EOL> '<STR_LIT:port>' : <NUM_LIT> , <EOL> '<STR_LIT>' : True , <EOL> } ) <EOL> return config <EOL> def get_json ( self ) : <EOL> json_string = '<STR_LIT>' <EOL> address = ( self . config [ '<STR_LIT:host>' ] , int ( self . config [ '<STR_LIT:port>' ] ) ) <EOL> s = None <EOL> try : <EOL> try : <EOL> s = socket . create_connection ( address , timeout = <NUM_LIT:1> ) <EOL> s . sendall ( '<STR_LIT>' ) <EOL> while <NUM_LIT:1> : <EOL> data = s . recv ( <NUM_LIT> ) <EOL> if not data : <EOL> break <EOL> json_string += data <EOL> except socket . error : <EOL> self . log . exception ( "<STR_LIT>" ) <EOL> return '<STR_LIT:{}>' <EOL> finally : <EOL> if s : <EOL> s . close ( ) <EOL> return json_string or '<STR_LIT:{}>' <EOL> def get_data ( self ) : <EOL> json_string = self . get_json ( ) <EOL> try : <EOL> data = json . loads ( json_string ) <EOL> except ( ValueError , TypeError ) : <EOL> self . log . exception ( "<STR_LIT>" ) <EOL> return None <EOL> return data <EOL> def collect ( self ) : <EOL> data = self . get_data ( ) <EOL> if not data : <EOL> return <EOL> if str_to_bool ( self . config [ '<STR_LIT>' ] ) and u'<STR_LIT>' in data : <EOL> for client , value in data [ '<STR_LIT>' ] . iteritems ( ) : <EOL> metric = u'<STR_LIT:.>' . join ( [ '<STR_LIT>' , <EOL> client . translate ( DOTS_TO_UNDERS ) ] ) <EOL> dvalue = self . derivative ( metric , value ) <EOL> self . publish ( metric , dvalue ) <EOL> for action in ( u'<STR_LIT>' , u'<STR_LIT>' , u'<STR_LIT>' ) : <EOL> if action not in data : <EOL> continue <EOL> for sect , stats in data [ action ] . iteritems ( ) : <EOL> for status , value in stats . iteritems ( ) : <EOL> metric = '<STR_LIT:.>' . join ( [ action , <EOL> sect , <EOL> status . translate ( DOTS_TO_UNDERS ) ] ) <EOL> dvalue = self . derivative ( metric , value ) <EOL> self . publish ( metric , dvalue ) <EOL> if u'<STR_LIT>' in data : <EOL> for key , value in data [ u'<STR_LIT>' ] . iteritems ( ) : <EOL> metric = '<STR_LIT:.>' . join ( [ '<STR_LIT>' , key ] ) <EOL> dvalue = self . derivative ( metric , value ) <EOL> self . publish ( metric , dvalue ) </s>
<s> """<STR_LIT>""" <EOL> try : <EOL> import json <EOL> except ImportError : <EOL> import simplejson as json <EOL> import urllib2 <EOL> import diamond . collector <EOL> class SidekiqWebCollector ( diamond . collector . Collector ) : <EOL> def get_default_config_help ( self ) : <EOL> config_help = super ( SidekiqWebCollector , self ) . get_default_config_help ( ) <EOL> config_help . update ( { <EOL> } ) <EOL> return config_help <EOL> def get_default_config ( self ) : <EOL> """<STR_LIT>""" <EOL> config = super ( SidekiqWebCollector , self ) . get_default_config ( ) <EOL> config . update ( { <EOL> '<STR_LIT:host>' : '<STR_LIT:localhost>' , <EOL> '<STR_LIT:port>' : <NUM_LIT> , <EOL> '<STR_LIT>' : [ '<STR_LIT>' ] , <EOL> } ) <EOL> return config <EOL> def collect ( self ) : <EOL> try : <EOL> response = urllib2 . urlopen ( "<STR_LIT>" % ( <EOL> self . config [ '<STR_LIT:host>' ] , int ( self . config [ '<STR_LIT:port>' ] ) ) ) <EOL> except Exception , e : <EOL> self . log . error ( '<STR_LIT>' , e ) <EOL> return { } <EOL> try : <EOL> j = json . loads ( response . read ( ) ) <EOL> except Exception , e : <EOL> self . log . error ( '<STR_LIT>' , e ) <EOL> return { } <EOL> for k in j : <EOL> for item , value in j [ k ] . items ( ) : <EOL> if isinstance ( value , ( str , unicode ) ) and '<STR_LIT:M>' in value : <EOL> value = float ( value . replace ( '<STR_LIT:M>' , '<STR_LIT>' ) ) <EOL> for unit in self . config [ '<STR_LIT>' ] : <EOL> unit_value = diamond . convertor . binary . convert ( <EOL> value = value , <EOL> oldUnit = '<STR_LIT>' , <EOL> newUnit = unit ) <EOL> self . publish ( "<STR_LIT>" % ( k , item , unit ) , unit_value ) <EOL> else : <EOL> self . publish ( "<STR_LIT>" % ( k , item ) , value ) </s>
<s> from test import CollectorTestCase <EOL> from test import get_collector_config <EOL> from test import unittest <EOL> from mock import Mock <EOL> from mock import patch <EOL> from diamond . collector import Collector <EOL> from unbound import UnboundCollector <EOL> class TestUnboundCollector ( CollectorTestCase ) : <EOL> def setUp ( self ) : <EOL> config = get_collector_config ( '<STR_LIT>' , { } ) <EOL> self . collector = UnboundCollector ( config , None ) <EOL> def test_import ( self ) : <EOL> self . assertTrue ( UnboundCollector ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_work_wtih_real_data ( self , publish_mock ) : <EOL> fixture_data = self . getFixture ( '<STR_LIT>' ) . getvalue ( ) <EOL> collector_mock = patch . object ( UnboundCollector , <EOL> '<STR_LIT>' , <EOL> Mock ( return_value = [ fixture_data , '<STR_LIT>' ] ) ) <EOL> collector_mock . start ( ) <EOL> self . collector . collect ( ) <EOL> collector_mock . stop ( ) <EOL> metrics = { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:7> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:9> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:16> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:10> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:1.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> } <EOL> self . setDocExample ( collector = self . collector . __class__ . __name__ , <EOL> metrics = metrics , <EOL> defaultpath = self . collector . config [ '<STR_LIT:path>' ] ) <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_should_fail_gracefully ( self , publish_mock ) : <EOL> collector_mock = patch . object ( UnboundCollector , <EOL> '<STR_LIT>' , <EOL> Mock ( return_value = None ) ) <EOL> collector_mock . start ( ) <EOL> self . collector . collect ( ) <EOL> collector_mock . stop ( ) <EOL> self . assertPublishedMany ( publish_mock , { } ) <EOL> @ patch . object ( Collector , '<STR_LIT>' ) <EOL> def test_exclude_histogram ( self , publish_mock ) : <EOL> self . collector . config [ '<STR_LIT>' ] = False <EOL> fixture_data = self . getFixture ( '<STR_LIT>' ) . getvalue ( ) <EOL> collector_mock = patch . object ( UnboundCollector , <EOL> '<STR_LIT>' , <EOL> Mock ( return_value = [ fixture_data , '<STR_LIT>' ] ) ) <EOL> collector_mock . start ( ) <EOL> self . collector . collect ( ) <EOL> collector_mock . stop ( ) <EOL> metrics = { <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:7> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:9> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:16> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:10> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> } <EOL> histogram = { <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:1.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> } <EOL> self . assertPublishedMany ( publish_mock , metrics ) <EOL> self . assertUnpublishedMany ( publish_mock , histogram ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> from xdrlib import Packer , Unpacker <EOL> import socket <EOL> slope_str2int = { '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:1> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT:3> , <EOL> '<STR_LIT>' : <NUM_LIT:4> } <EOL> slope_int2str = { <NUM_LIT:0> : '<STR_LIT>' , <EOL> <NUM_LIT:1> : '<STR_LIT>' , <EOL> <NUM_LIT:2> : '<STR_LIT>' , <EOL> <NUM_LIT:3> : '<STR_LIT>' , <EOL> <NUM_LIT:4> : '<STR_LIT>' } <EOL> class Gmetric : <EOL> """<STR_LIT>""" <EOL> type = ( '<STR_LIT>' , '<STR_LIT:string>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:float>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) <EOL> protocol = ( '<STR_LIT>' , '<STR_LIT>' ) <EOL> def __init__ ( self , host , port , protocol ) : <EOL> if protocol not in self . protocol : <EOL> raise ValueError ( "<STR_LIT>" + str ( self . protocol ) ) <EOL> self . socket = socket . socket ( socket . AF_INET , socket . SOCK_DGRAM ) <EOL> if protocol == '<STR_LIT>' : <EOL> self . socket . setsockopt ( socket . IPPROTO_IP , <EOL> socket . IP_MULTICAST_TTL , <NUM_LIT:20> ) <EOL> self . hostport = ( host , int ( port ) ) <EOL> def send ( self , NAME , VAL , TYPE = '<STR_LIT>' , UNITS = '<STR_LIT>' , SLOPE = '<STR_LIT>' , <EOL> TMAX = <NUM_LIT> , DMAX = <NUM_LIT:0> , GROUP = "<STR_LIT>" ) : <EOL> if SLOPE not in slope_str2int : <EOL> raise ValueError ( "<STR_LIT>" + str ( self . slope . keys ( ) ) ) <EOL> if TYPE not in self . type : <EOL> raise ValueError ( "<STR_LIT>" + str ( self . type ) ) <EOL> if len ( NAME ) == <NUM_LIT:0> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> ( meta_msg , data_msg ) = gmetric_write ( NAME , <EOL> VAL , <EOL> TYPE , <EOL> UNITS , <EOL> SLOPE , <EOL> TMAX , <EOL> DMAX , <EOL> GROUP ) <EOL> self . socket . sendto ( meta_msg , self . hostport ) <EOL> self . socket . sendto ( data_msg , self . hostport ) <EOL> def gmetric_write ( NAME , VAL , TYPE , UNITS , SLOPE , TMAX , DMAX , GROUP ) : <EOL> """<STR_LIT>""" <EOL> packer = Packer ( ) <EOL> HOSTNAME = "<STR_LIT:test>" <EOL> SPOOF = <NUM_LIT:0> <EOL> packer . pack_int ( <NUM_LIT> ) <EOL> packer . pack_string ( HOSTNAME ) <EOL> packer . pack_string ( NAME ) <EOL> packer . pack_int ( SPOOF ) <EOL> packer . pack_string ( TYPE ) <EOL> packer . pack_string ( NAME ) <EOL> packer . pack_string ( UNITS ) <EOL> packer . pack_int ( slope_str2int [ SLOPE ] ) <EOL> packer . pack_uint ( int ( TMAX ) ) <EOL> packer . pack_uint ( int ( DMAX ) ) <EOL> if GROUP == "<STR_LIT>" : <EOL> packer . pack_int ( <NUM_LIT:0> ) <EOL> else : <EOL> packer . pack_int ( <NUM_LIT:1> ) <EOL> packer . pack_string ( "<STR_LIT>" ) <EOL> packer . pack_string ( GROUP ) <EOL> data = Packer ( ) <EOL> data . pack_int ( <NUM_LIT> + <NUM_LIT:5> ) <EOL> data . pack_string ( HOSTNAME ) <EOL> data . pack_string ( NAME ) <EOL> data . pack_int ( SPOOF ) <EOL> data . pack_string ( "<STR_LIT:%s>" ) <EOL> data . pack_string ( str ( VAL ) ) <EOL> return packer . get_buffer ( ) , data . get_buffer ( ) <EOL> def gmetric_read ( msg ) : <EOL> unpacker = Unpacker ( msg ) <EOL> values = dict ( ) <EOL> unpacker . unpack_int ( ) <EOL> values [ '<STR_LIT>' ] = unpacker . unpack_string ( ) <EOL> values [ '<STR_LIT>' ] = unpacker . unpack_string ( ) <EOL> values [ '<STR_LIT>' ] = unpacker . unpack_string ( ) <EOL> values [ '<STR_LIT>' ] = unpacker . unpack_string ( ) <EOL> values [ '<STR_LIT>' ] = slope_int2str [ unpacker . unpack_int ( ) ] <EOL> values [ '<STR_LIT>' ] = unpacker . unpack_uint ( ) <EOL> values [ '<STR_LIT>' ] = unpacker . unpack_uint ( ) <EOL> unpacker . done ( ) <EOL> return values <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import optparse <EOL> parser = optparse . OptionParser ( ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" <EOL> + "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT:host>" , default = "<STR_LIT:127.0.0.1>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT:port>" , default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT:name>" , default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT:value>" , default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" <EOL> + "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT:type>" , default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" <EOL> + "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" <EOL> + "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , default = "<STR_LIT:0>" , <EOL> help = "<STR_LIT>" <EOL> + "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" <EOL> + "<STR_LIT>" ) <EOL> ( options , args ) = parser . parse_args ( ) <EOL> g = Gmetric ( options . host , options . port , options . protocol ) <EOL> g . send ( options . name , options . value , options . type , options . units , <EOL> options . slope , options . tmax , options . dmax , options . group ) </s>
<s> from test import unittest <EOL> from test import run_only <EOL> import configobj <EOL> from diamond . handler . riemann import RiemannHandler <EOL> from diamond . metric import Metric <EOL> def run_only_if_bernhard_is_available ( func ) : <EOL> try : <EOL> import bernhard <EOL> except ImportError : <EOL> bernhard = None <EOL> pred = lambda : bernhard is not None <EOL> return run_only ( func , pred ) <EOL> class TestRiemannHandler ( unittest . TestCase ) : <EOL> @ run_only_if_bernhard_is_available <EOL> def test_metric_to_riemann_event ( self ) : <EOL> config = configobj . ConfigObj ( ) <EOL> config [ '<STR_LIT:host>' ] = '<STR_LIT:localhost>' <EOL> config [ '<STR_LIT:port>' ] = <NUM_LIT> <EOL> handler = RiemannHandler ( config ) <EOL> metric = Metric ( '<STR_LIT>' , <EOL> <NUM_LIT:0> , <EOL> timestamp = <NUM_LIT> , <EOL> host = '<STR_LIT>' ) <EOL> event = handler . _metric_to_riemann_event ( metric ) <EOL> self . assertEqual ( event , { <EOL> '<STR_LIT:host>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:time>' : <NUM_LIT> , <EOL> '<STR_LIT>' : <NUM_LIT:0.0> , <EOL> '<STR_LIT>' : None <EOL> } ) </s>
<s> from django . core . management . base import AppCommand , CommandError <EOL> from drf_generators . generators import * <EOL> from optparse import make_option <EOL> import django <EOL> class Command ( AppCommand ) : <EOL> help = '<STR_LIT>' <EOL> args = "<STR_LIT>" <EOL> base_options = ( <EOL> make_option ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , default = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) , <EOL> make_option ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , default = <NUM_LIT:0> , <EOL> help = '<STR_LIT>' ) , <EOL> make_option ( '<STR_LIT>' , dest = '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> help = '<STR_LIT>' ) , <EOL> make_option ( '<STR_LIT>' , dest = '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> help = '<STR_LIT>' ) , <EOL> make_option ( '<STR_LIT>' , dest = '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> help = '<STR_LIT>' ) , <EOL> make_option ( '<STR_LIT>' , dest = '<STR_LIT>' , action = '<STR_LIT:store_true>' , <EOL> help = '<STR_LIT>' ) , <EOL> ) <EOL> option_list = AppCommand . option_list + base_options <EOL> def handle_app_config ( self , app_config , ** options ) : <EOL> if app_config . models_module is None : <EOL> raise CommandError ( '<STR_LIT>' ) <EOL> if django . VERSION [ <NUM_LIT:1> ] == <NUM_LIT:7> : <EOL> force = options [ '<STR_LIT>' ] if '<STR_LIT>' in options else False <EOL> format = options [ '<STR_LIT>' ] if '<STR_LIT>' in options else None <EOL> depth = options [ '<STR_LIT>' ] if '<STR_LIT>' in format else <NUM_LIT:0> <EOL> if '<STR_LIT>' in options : <EOL> serializers = options [ '<STR_LIT>' ] <EOL> else : <EOL> serializers = False <EOL> views = options [ '<STR_LIT>' ] if '<STR_LIT>' in options else False <EOL> urls = options [ '<STR_LIT>' ] if '<STR_LIT>' in options else False <EOL> elif django . VERSION [ <NUM_LIT:1> ] >= <NUM_LIT:8> : <EOL> force = options [ '<STR_LIT>' ] <EOL> format = options [ '<STR_LIT>' ] <EOL> depth = options [ '<STR_LIT>' ] <EOL> serializers = options [ '<STR_LIT>' ] <EOL> views = options [ '<STR_LIT>' ] <EOL> urls = options [ '<STR_LIT>' ] <EOL> else : <EOL> raise CommandError ( '<STR_LIT>' ) <EOL> if format == '<STR_LIT>' : <EOL> generator = ViewSetGenerator ( app_config , force ) <EOL> elif format == '<STR_LIT>' : <EOL> generator = APIViewGenerator ( app_config , force ) <EOL> elif format == '<STR_LIT>' : <EOL> generator = FunctionViewGenerator ( app_config , force ) <EOL> elif format == '<STR_LIT>' : <EOL> generator = ModelViewSetGenerator ( app_config , force ) <EOL> else : <EOL> message = '<STR_LIT>' % options [ '<STR_LIT>' ] <EOL> message += '<STR_LIT>' <EOL> raise CommandError ( message ) <EOL> if serializers : <EOL> result = generator . generate_serializers ( depth ) <EOL> elif views : <EOL> result = generator . generate_views ( ) <EOL> elif urls : <EOL> result = generator . generate_urls ( ) <EOL> else : <EOL> result = generator . generate_serializers ( depth ) + '<STR_LIT:\n>' <EOL> result += generator . generate_views ( ) + '<STR_LIT:\n>' <EOL> result += generator . generate_urls ( ) <EOL> print ( result ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> from configHandler import idleConf <EOL> import macosxSupport <EOL> menudefs = [ <EOL> ( '<STR_LIT:file>' , [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> None , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> None , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> None , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] ) , <EOL> ( '<STR_LIT>' , [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> None , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> None , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] ) , <EOL> ( '<STR_LIT>' , [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] ) , <EOL> ( '<STR_LIT>' , [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] ) , <EOL> ( '<STR_LIT>' , [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] ) , <EOL> ( '<STR_LIT>' , [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] ) , <EOL> ( '<STR_LIT>' , [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> None , <EOL> ] ) , <EOL> ( '<STR_LIT>' , [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> None , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] ) , <EOL> ] <EOL> if macosxSupport . runningAsOSXApp ( ) : <EOL> quitItem = menudefs [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ - <NUM_LIT:1> ] <EOL> closeItem = menudefs [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ - <NUM_LIT:2> ] <EOL> del menudefs [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] [ - <NUM_LIT:3> : ] <EOL> menudefs [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] . insert ( <NUM_LIT:6> , closeItem ) <EOL> del menudefs [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] [ <NUM_LIT:0> : <NUM_LIT:2> ] <EOL> menudefs . insert ( <NUM_LIT:0> , <EOL> ( '<STR_LIT>' , [ <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> None , <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , <EOL> ] ) ) <EOL> default_keydefs = idleConf . GetCurrentKeySet ( ) <EOL> del sys </s>
<s> from Tkinter import * <EOL> import SearchEngine <EOL> from SearchDialogBase import SearchDialogBase <EOL> def replace ( text ) : <EOL> root = text . _root ( ) <EOL> engine = SearchEngine . get ( root ) <EOL> if not hasattr ( engine , "<STR_LIT>" ) : <EOL> engine . _replacedialog = ReplaceDialog ( root , engine ) <EOL> dialog = engine . _replacedialog <EOL> dialog . open ( text ) <EOL> class ReplaceDialog ( SearchDialogBase ) : <EOL> title = "<STR_LIT>" <EOL> icon = "<STR_LIT>" <EOL> def __init__ ( self , root , engine ) : <EOL> SearchDialogBase . __init__ ( self , root , engine ) <EOL> self . replvar = StringVar ( root ) <EOL> def open ( self , text ) : <EOL> SearchDialogBase . open ( self , text ) <EOL> try : <EOL> first = text . index ( "<STR_LIT>" ) <EOL> except TclError : <EOL> first = None <EOL> try : <EOL> last = text . index ( "<STR_LIT>" ) <EOL> except TclError : <EOL> last = None <EOL> first = first or text . index ( "<STR_LIT>" ) <EOL> last = last or first <EOL> self . show_hit ( first , last ) <EOL> self . ok = <NUM_LIT:1> <EOL> def create_entries ( self ) : <EOL> SearchDialogBase . create_entries ( self ) <EOL> self . replent = self . make_entry ( "<STR_LIT>" , self . replvar ) <EOL> def create_command_buttons ( self ) : <EOL> SearchDialogBase . create_command_buttons ( self ) <EOL> self . make_button ( "<STR_LIT>" , self . find_it ) <EOL> self . make_button ( "<STR_LIT>" , self . replace_it ) <EOL> self . make_button ( "<STR_LIT>" , self . default_command , <NUM_LIT:1> ) <EOL> self . make_button ( "<STR_LIT>" , self . replace_all ) <EOL> def find_it ( self , event = None ) : <EOL> self . do_find ( <NUM_LIT:0> ) <EOL> def replace_it ( self , event = None ) : <EOL> if self . do_find ( self . ok ) : <EOL> self . do_replace ( ) <EOL> def default_command ( self , event = None ) : <EOL> if self . do_find ( self . ok ) : <EOL> self . do_replace ( ) <EOL> self . do_find ( <NUM_LIT:0> ) <EOL> def replace_all ( self , event = None ) : <EOL> prog = self . engine . getprog ( ) <EOL> if not prog : <EOL> return <EOL> repl = self . replvar . get ( ) <EOL> text = self . text <EOL> res = self . engine . search_text ( text , prog ) <EOL> if not res : <EOL> text . bell ( ) <EOL> return <EOL> text . tag_remove ( "<STR_LIT>" , "<STR_LIT:1.0>" , "<STR_LIT:end>" ) <EOL> text . tag_remove ( "<STR_LIT>" , "<STR_LIT:1.0>" , "<STR_LIT:end>" ) <EOL> line = res [ <NUM_LIT:0> ] <EOL> col = res [ <NUM_LIT:1> ] . start ( ) <EOL> if self . engine . iswrap ( ) : <EOL> line = <NUM_LIT:1> <EOL> col = <NUM_LIT:0> <EOL> ok = <NUM_LIT:1> <EOL> first = last = None <EOL> text . undo_block_start ( ) <EOL> while <NUM_LIT:1> : <EOL> res = self . engine . search_forward ( text , prog , line , col , <NUM_LIT:0> , ok ) <EOL> if not res : <EOL> break <EOL> line , m = res <EOL> chars = text . get ( "<STR_LIT>" % line , "<STR_LIT>" % ( line + <NUM_LIT:1> ) ) <EOL> orig = m . group ( ) <EOL> new = m . expand ( repl ) <EOL> i , j = m . span ( ) <EOL> first = "<STR_LIT>" % ( line , i ) <EOL> last = "<STR_LIT>" % ( line , j ) <EOL> if new == orig : <EOL> text . mark_set ( "<STR_LIT>" , last ) <EOL> else : <EOL> text . mark_set ( "<STR_LIT>" , first ) <EOL> if first != last : <EOL> text . delete ( first , last ) <EOL> if new : <EOL> text . insert ( first , new ) <EOL> col = i + len ( new ) <EOL> ok = <NUM_LIT:0> <EOL> text . undo_block_stop ( ) <EOL> if first and last : <EOL> self . show_hit ( first , last ) <EOL> self . close ( ) <EOL> def do_find ( self , ok = <NUM_LIT:0> ) : <EOL> if not self . engine . getprog ( ) : <EOL> return False <EOL> text = self . text <EOL> res = self . engine . search_text ( text , None , ok ) <EOL> if not res : <EOL> text . bell ( ) <EOL> return False <EOL> line , m = res <EOL> i , j = m . span ( ) <EOL> first = "<STR_LIT>" % ( line , i ) <EOL> last = "<STR_LIT>" % ( line , j ) <EOL> self . show_hit ( first , last ) <EOL> self . ok = <NUM_LIT:1> <EOL> return True <EOL> def do_replace ( self ) : <EOL> prog = self . engine . getprog ( ) <EOL> if not prog : <EOL> return False <EOL> text = self . text <EOL> try : <EOL> first = pos = text . index ( "<STR_LIT>" ) <EOL> last = text . index ( "<STR_LIT>" ) <EOL> except TclError : <EOL> pos = None <EOL> if not pos : <EOL> first = last = pos = text . index ( "<STR_LIT>" ) <EOL> line , col = SearchEngine . get_line_col ( pos ) <EOL> chars = text . get ( "<STR_LIT>" % line , "<STR_LIT>" % ( line + <NUM_LIT:1> ) ) <EOL> m = prog . match ( chars , col ) <EOL> if not prog : <EOL> return False <EOL> new = m . expand ( self . replvar . get ( ) ) <EOL> text . mark_set ( "<STR_LIT>" , first ) <EOL> text . undo_block_start ( ) <EOL> if m . group ( ) : <EOL> text . delete ( first , last ) <EOL> if new : <EOL> text . insert ( first , new ) <EOL> text . undo_block_stop ( ) <EOL> self . show_hit ( first , text . index ( "<STR_LIT>" ) ) <EOL> self . ok = <NUM_LIT:0> <EOL> return True <EOL> def show_hit ( self , first , last ) : <EOL> text = self . text <EOL> text . mark_set ( "<STR_LIT>" , first ) <EOL> text . tag_remove ( "<STR_LIT>" , "<STR_LIT:1.0>" , "<STR_LIT:end>" ) <EOL> text . tag_add ( "<STR_LIT>" , first , last ) <EOL> text . tag_remove ( "<STR_LIT>" , "<STR_LIT:1.0>" , "<STR_LIT:end>" ) <EOL> if first == last : <EOL> text . tag_add ( "<STR_LIT>" , first ) <EOL> else : <EOL> text . tag_add ( "<STR_LIT>" , first , last ) <EOL> text . see ( "<STR_LIT>" ) <EOL> text . update_idletasks ( ) <EOL> def close ( self , event = None ) : <EOL> SearchDialogBase . close ( self , event ) <EOL> self . text . tag_remove ( "<STR_LIT>" , "<STR_LIT:1.0>" , "<STR_LIT:end>" ) </s>
<s> import sys <EOL> import linecache <EOL> import time <EOL> import socket <EOL> import traceback <EOL> import thread <EOL> import threading <EOL> import Queue <EOL> import CallTips <EOL> import AutoComplete <EOL> from utils import tb_print_list <EOL> import RemoteDebugger <EOL> import RemoteObjectBrowser <EOL> import StackViewer <EOL> import rpc <EOL> import __main__ <EOL> LOCALHOST = '<STR_LIT:127.0.0.1>' <EOL> try : <EOL> import warnings <EOL> except ImportError : <EOL> pass <EOL> else : <EOL> def idle_formatwarning_subproc ( message , category , filename , lineno , <EOL> line = None ) : <EOL> """<STR_LIT>""" <EOL> s = "<STR_LIT>" <EOL> s += '<STR_LIT>' % ( filename , lineno ) <EOL> if line is None : <EOL> line = linecache . getline ( filename , lineno ) <EOL> line = line . strip ( ) <EOL> if line : <EOL> s += "<STR_LIT>" % line <EOL> s += "<STR_LIT>" % ( category . __name__ , message ) <EOL> return s <EOL> warnings . formatwarning = idle_formatwarning_subproc <EOL> exit_now = False <EOL> quitting = False <EOL> interruptable = False <EOL> def main ( del_exitfunc = False ) : <EOL> """<STR_LIT>""" <EOL> global exit_now <EOL> global quitting <EOL> global no_exitfunc <EOL> no_exitfunc = del_exitfunc <EOL> try : <EOL> assert ( len ( sys . argv ) > <NUM_LIT:1> ) <EOL> port = int ( sys . argv [ - <NUM_LIT:1> ] ) <EOL> except : <EOL> print >> sys . stderr , "<STR_LIT>" <EOL> return <EOL> sys . argv [ : ] = [ "<STR_LIT>" ] <EOL> sockthread = threading . Thread ( target = manage_socket , <EOL> name = '<STR_LIT>' , <EOL> args = ( ( LOCALHOST , port ) , ) ) <EOL> sockthread . setDaemon ( True ) <EOL> sockthread . start ( ) <EOL> while <NUM_LIT:1> : <EOL> try : <EOL> if exit_now : <EOL> try : <EOL> exit ( ) <EOL> except KeyboardInterrupt : <EOL> continue <EOL> try : <EOL> seq , request = rpc . request_queue . get ( block = True , timeout = <NUM_LIT> ) <EOL> except Queue . Empty : <EOL> continue <EOL> method , args , kwargs = request <EOL> ret = method ( * args , ** kwargs ) <EOL> rpc . response_queue . put ( ( seq , ret ) ) <EOL> except KeyboardInterrupt : <EOL> if quitting : <EOL> exit_now = True <EOL> continue <EOL> except SystemExit : <EOL> raise <EOL> except : <EOL> type , value , tb = sys . exc_info ( ) <EOL> try : <EOL> print_exception ( ) <EOL> rpc . response_queue . put ( ( seq , None ) ) <EOL> except : <EOL> traceback . print_exception ( type , value , tb , file = sys . __stderr__ ) <EOL> exit ( ) <EOL> else : <EOL> continue <EOL> def manage_socket ( address ) : <EOL> for i in range ( <NUM_LIT:3> ) : <EOL> time . sleep ( i ) <EOL> try : <EOL> server = MyRPCServer ( address , MyHandler ) <EOL> break <EOL> except socket . error , err : <EOL> print >> sys . __stderr__ , "<STR_LIT>" + err [ <NUM_LIT:1> ] + "<STR_LIT>" <EOL> else : <EOL> print >> sys . __stderr__ , "<STR_LIT>" "<STR_LIT>" <EOL> show_socket_error ( err , address ) <EOL> global exit_now <EOL> exit_now = True <EOL> return <EOL> server . handle_request ( ) <EOL> def show_socket_error ( err , address ) : <EOL> import Tkinter <EOL> import tkMessageBox <EOL> root = Tkinter . Tk ( ) <EOL> root . withdraw ( ) <EOL> if err [ <NUM_LIT:0> ] == <NUM_LIT> : <EOL> msg = "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" "<STR_LIT>" % address <EOL> tkMessageBox . showerror ( "<STR_LIT>" , msg , parent = root ) <EOL> else : <EOL> tkMessageBox . showerror ( "<STR_LIT>" , "<STR_LIT>" % err [ <NUM_LIT:1> ] ) <EOL> root . destroy ( ) <EOL> def print_exception ( temp_filename = None ) : <EOL> import linecache <EOL> linecache . checkcache ( ) <EOL> flush_stdout ( ) <EOL> efile = sys . stderr <EOL> typ , val , tb = excinfo = sys . exc_info ( ) <EOL> sys . last_type , sys . last_value , sys . last_traceback = excinfo <EOL> tbe = traceback . extract_tb ( tb ) <EOL> print >> efile , '<STR_LIT>' <EOL> exclude = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ) <EOL> cleanup_traceback ( tbe , exclude ) <EOL> if temp_filename is not None : <EOL> main_fname = '<STR_LIT>' <EOL> new_tbe = [ ] <EOL> for t in tbe : <EOL> fname = main_fname if t [ <NUM_LIT:0> ] == temp_filename else t [ <NUM_LIT:0> ] <EOL> new_tbe . append ( ( fname , ) + t [ <NUM_LIT:1> : ] ) <EOL> tbe = new_tbe <EOL> else : <EOL> main_fname = tbe [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> tb_print_list ( tbe , main_fname , sys . stdout , efile ) <EOL> lines = traceback . format_exception_only ( typ , val ) <EOL> for line in lines : <EOL> print >> efile , line , <EOL> def cleanup_traceback ( tb , exclude ) : <EOL> "<STR_LIT>" <EOL> orig_tb = tb [ : ] <EOL> while tb : <EOL> for rpcfile in exclude : <EOL> if tb [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] . count ( rpcfile ) : <EOL> break <EOL> else : <EOL> break <EOL> del tb [ <NUM_LIT:0> ] <EOL> while tb : <EOL> for rpcfile in exclude : <EOL> if tb [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] . count ( rpcfile ) : <EOL> break <EOL> else : <EOL> break <EOL> del tb [ - <NUM_LIT:1> ] <EOL> if len ( tb ) == <NUM_LIT:0> : <EOL> tb [ : ] = orig_tb [ : ] <EOL> print >> sys . stderr , "<STR_LIT>" <EOL> rpchandler = rpc . objecttable [ '<STR_LIT>' ] . rpchandler <EOL> for i in range ( len ( tb ) ) : <EOL> fn , ln , nm , line = tb [ i ] <EOL> if nm == '<STR_LIT:?>' : <EOL> nm = "<STR_LIT>" <EOL> if not line and fn . startswith ( "<STR_LIT>" ) : <EOL> line = rpchandler . remotecall ( '<STR_LIT>' , '<STR_LIT>' , <EOL> ( fn , ln ) , { } ) <EOL> tb [ i ] = fn , ln , nm , line <EOL> def flush_stdout ( ) : <EOL> try : <EOL> if sys . stdout . softspace : <EOL> sys . stdout . softspace = <NUM_LIT:0> <EOL> sys . stdout . write ( "<STR_LIT:\n>" ) <EOL> except ( AttributeError , EOFError ) : <EOL> pass <EOL> def exit ( ) : <EOL> """<STR_LIT>""" <EOL> if no_exitfunc : <EOL> try : <EOL> del sys . exitfunc <EOL> except AttributeError : <EOL> pass <EOL> sys . exit ( <NUM_LIT:0> ) <EOL> class MyRPCServer ( rpc . RPCServer ) : <EOL> def handle_error ( self , request , client_address ) : <EOL> """<STR_LIT>""" <EOL> global quitting <EOL> try : <EOL> raise <EOL> except SystemExit : <EOL> raise <EOL> except EOFError : <EOL> global exit_now <EOL> exit_now = True <EOL> thread . interrupt_main ( ) <EOL> except : <EOL> erf = sys . __stderr__ <EOL> print >> erf , '<STR_LIT:\n>' + '<STR_LIT:->' * <NUM_LIT> <EOL> print >> erf , '<STR_LIT>' <EOL> print >> erf , '<STR_LIT>' % threading . currentThread ( ) . getName ( ) <EOL> print >> erf , '<STR_LIT>' , client_address <EOL> print >> erf , '<STR_LIT>' , repr ( request ) <EOL> traceback . print_exc ( file = erf ) <EOL> print >> erf , '<STR_LIT>' <EOL> print >> erf , '<STR_LIT:->' * <NUM_LIT> <EOL> quitting = True <EOL> thread . interrupt_main ( ) <EOL> class MyHandler ( rpc . RPCHandler ) : <EOL> def handle ( self ) : <EOL> """<STR_LIT>""" <EOL> executive = Executive ( self ) <EOL> self . register ( "<STR_LIT>" , executive ) <EOL> sys . stdin = self . console = self . get_remote_proxy ( "<STR_LIT>" ) <EOL> sys . stdout = self . get_remote_proxy ( "<STR_LIT>" ) <EOL> sys . stderr = self . get_remote_proxy ( "<STR_LIT>" ) <EOL> import IOBinding <EOL> sys . stdin . encoding = sys . stdout . encoding = sys . stderr . encoding = IOBinding . encoding <EOL> self . interp = self . get_remote_proxy ( "<STR_LIT>" ) <EOL> rpc . RPCHandler . getresponse ( self , myseq = None , wait = <NUM_LIT> ) <EOL> def exithook ( self ) : <EOL> "<STR_LIT>" <EOL> time . sleep ( <NUM_LIT:10> ) <EOL> def EOFhook ( self ) : <EOL> "<STR_LIT>" <EOL> global quitting <EOL> quitting = True <EOL> thread . interrupt_main ( ) <EOL> def decode_interrupthook ( self ) : <EOL> "<STR_LIT>" <EOL> global quitting <EOL> quitting = True <EOL> thread . interrupt_main ( ) <EOL> class Executive ( object ) : <EOL> def __init__ ( self , rpchandler ) : <EOL> self . rpchandler = rpchandler <EOL> self . locals = __main__ . __dict__ <EOL> self . calltip = CallTips . CallTips ( ) <EOL> self . autocomplete = AutoComplete . AutoComplete ( ) <EOL> def runcode ( self , code , temp_filename = None ) : <EOL> global interruptable <EOL> try : <EOL> self . usr_exc_info = None <EOL> interruptable = True <EOL> try : <EOL> exec code in self . locals <EOL> finally : <EOL> interruptable = False <EOL> except : <EOL> self . usr_exc_info = sys . exc_info ( ) <EOL> if quitting : <EOL> exit ( ) <EOL> print_exception ( temp_filename ) <EOL> jit = self . rpchandler . console . getvar ( "<STR_LIT>" ) <EOL> if jit : <EOL> self . rpchandler . interp . open_remote_stack_viewer ( ) <EOL> else : <EOL> if hasattr ( sys , '<STR_LIT>' ) and sys . exitfunc : <EOL> sys . exitfunc ( ) <EOL> flush_stdout ( ) <EOL> def interrupt_the_server ( self ) : <EOL> if interruptable : <EOL> thread . interrupt_main ( ) <EOL> def start_the_debugger ( self , gui_adap_oid ) : <EOL> return RemoteDebugger . start_debugger ( self . rpchandler , gui_adap_oid ) <EOL> def stop_the_debugger ( self , idb_adap_oid ) : <EOL> "<STR_LIT>" <EOL> self . rpchandler . unregister ( idb_adap_oid ) <EOL> def get_the_calltip ( self , name ) : <EOL> return self . calltip . fetch_tip ( name ) <EOL> def get_the_completion_list ( self , what , mode ) : <EOL> return self . autocomplete . fetch_completions ( what , mode ) <EOL> def stackviewer ( self , flist_oid = None ) : <EOL> if self . usr_exc_info : <EOL> typ , val , tb = self . usr_exc_info <EOL> else : <EOL> return None <EOL> flist = None <EOL> if flist_oid is not None : <EOL> flist = self . rpchandler . get_remote_proxy ( flist_oid ) <EOL> while tb and tb . tb_frame . f_globals [ "<STR_LIT>" ] in [ "<STR_LIT>" , "<STR_LIT>" ] : <EOL> tb = tb . tb_next <EOL> sys . last_type = typ <EOL> sys . last_value = val <EOL> item = StackViewer . StackTreeItem ( flist , tb ) <EOL> return RemoteObjectBrowser . remote_object_tree_item ( item ) </s>
<s> """<STR_LIT>""" <EOL> from HyperParser import HyperParser <EOL> from configHandler import idleConf <EOL> _openers = { '<STR_LIT:)>' : '<STR_LIT:(>' , '<STR_LIT:]>' : '<STR_LIT:[>' , '<STR_LIT:}>' : '<STR_LIT:{>' } <EOL> CHECK_DELAY = <NUM_LIT:100> <EOL> class ParenMatch : <EOL> """<STR_LIT>""" <EOL> menudefs = [ <EOL> ( '<STR_LIT>' , [ <EOL> ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> ] ) <EOL> ] <EOL> STYLE = idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> default = '<STR_LIT>' ) <EOL> FLASH_DELAY = idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> type = '<STR_LIT:int>' , default = <NUM_LIT> ) <EOL> HILITE_CONFIG = idleConf . GetHighlight ( idleConf . CurrentTheme ( ) , '<STR_LIT>' ) <EOL> BELL = idleConf . GetOption ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> type = '<STR_LIT:bool>' , default = <NUM_LIT:1> ) <EOL> RESTORE_VIRTUAL_EVENT_NAME = "<STR_LIT>" <EOL> RESTORE_SEQUENCES = ( "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ) <EOL> def __init__ ( self , editwin ) : <EOL> self . editwin = editwin <EOL> self . text = editwin . text <EOL> editwin . text . bind ( self . RESTORE_VIRTUAL_EVENT_NAME , <EOL> self . restore_event ) <EOL> self . counter = <NUM_LIT:0> <EOL> self . is_restore_active = <NUM_LIT:0> <EOL> self . set_style ( self . STYLE ) <EOL> def activate_restore ( self ) : <EOL> if not self . is_restore_active : <EOL> for seq in self . RESTORE_SEQUENCES : <EOL> self . text . event_add ( self . RESTORE_VIRTUAL_EVENT_NAME , seq ) <EOL> self . is_restore_active = True <EOL> def deactivate_restore ( self ) : <EOL> if self . is_restore_active : <EOL> for seq in self . RESTORE_SEQUENCES : <EOL> self . text . event_delete ( self . RESTORE_VIRTUAL_EVENT_NAME , seq ) <EOL> self . is_restore_active = False <EOL> def set_style ( self , style ) : <EOL> self . STYLE = style <EOL> if style == "<STR_LIT:default>" : <EOL> self . create_tag = self . create_tag_default <EOL> self . set_timeout = self . set_timeout_last <EOL> elif style == "<STR_LIT>" : <EOL> self . create_tag = self . create_tag_expression <EOL> self . set_timeout = self . set_timeout_none <EOL> def flash_paren_event ( self , event ) : <EOL> indices = HyperParser ( self . editwin , "<STR_LIT>" ) . get_surrounding_brackets ( ) <EOL> if indices is None : <EOL> self . warn_mismatched ( ) <EOL> return <EOL> self . activate_restore ( ) <EOL> self . create_tag ( indices ) <EOL> self . set_timeout_last ( ) <EOL> def paren_closed_event ( self , event ) : <EOL> closer = self . text . get ( "<STR_LIT>" ) <EOL> if closer not in _openers : <EOL> return <EOL> hp = HyperParser ( self . editwin , "<STR_LIT>" ) <EOL> if not hp . is_in_code ( ) : <EOL> return <EOL> indices = hp . get_surrounding_brackets ( _openers [ closer ] , True ) <EOL> if indices is None : <EOL> self . warn_mismatched ( ) <EOL> return <EOL> self . activate_restore ( ) <EOL> self . create_tag ( indices ) <EOL> self . set_timeout ( ) <EOL> def restore_event ( self , event = None ) : <EOL> self . text . tag_delete ( "<STR_LIT>" ) <EOL> self . deactivate_restore ( ) <EOL> self . counter += <NUM_LIT:1> <EOL> def handle_restore_timer ( self , timer_count ) : <EOL> if timer_count == self . counter : <EOL> self . restore_event ( ) <EOL> def warn_mismatched ( self ) : <EOL> if self . BELL : <EOL> self . text . bell ( ) <EOL> def create_tag_default ( self , indices ) : <EOL> """<STR_LIT>""" <EOL> self . text . tag_add ( "<STR_LIT>" , indices [ <NUM_LIT:0> ] ) <EOL> self . text . tag_config ( "<STR_LIT>" , self . HILITE_CONFIG ) <EOL> def create_tag_expression ( self , indices ) : <EOL> """<STR_LIT>""" <EOL> if self . text . get ( indices [ <NUM_LIT:1> ] ) in ( '<STR_LIT:)>' , '<STR_LIT:]>' , '<STR_LIT:}>' ) : <EOL> rightindex = indices [ <NUM_LIT:1> ] + "<STR_LIT>" <EOL> else : <EOL> rightindex = indices [ <NUM_LIT:1> ] <EOL> self . text . tag_add ( "<STR_LIT>" , indices [ <NUM_LIT:0> ] , rightindex ) <EOL> self . text . tag_config ( "<STR_LIT>" , self . HILITE_CONFIG ) <EOL> def set_timeout_none ( self ) : <EOL> """<STR_LIT>""" <EOL> self . counter += <NUM_LIT:1> <EOL> def callme ( callme , self = self , c = self . counter , <EOL> index = self . text . index ( "<STR_LIT>" ) ) : <EOL> if index != self . text . index ( "<STR_LIT>" ) : <EOL> self . handle_restore_timer ( c ) <EOL> else : <EOL> self . editwin . text_frame . after ( CHECK_DELAY , callme , callme ) <EOL> self . editwin . text_frame . after ( CHECK_DELAY , callme , callme ) <EOL> def set_timeout_last ( self ) : <EOL> """<STR_LIT>""" <EOL> self . counter += <NUM_LIT:1> <EOL> self . editwin . text_frame . after ( self . FLASH_DELAY , <EOL> lambda self = self , c = self . counter : self . handle_restore_timer ( c ) ) </s>
<s> """<STR_LIT>""" <EOL> from tkinter import * <EOL> import tkinter . messagebox as tkMessageBox <EOL> import string <EOL> from . import macosxSupport <EOL> class GetKeysDialog ( Toplevel ) : <EOL> def __init__ ( self , parent , title , action , currentKeySequences ) : <EOL> """<STR_LIT>""" <EOL> Toplevel . __init__ ( self , parent ) <EOL> self . configure ( borderwidth = <NUM_LIT:5> ) <EOL> self . resizable ( height = FALSE , width = FALSE ) <EOL> self . title ( title ) <EOL> self . transient ( parent ) <EOL> self . grab_set ( ) <EOL> self . protocol ( "<STR_LIT>" , self . Cancel ) <EOL> self . parent = parent <EOL> self . action = action <EOL> self . currentKeySequences = currentKeySequences <EOL> self . result = '<STR_LIT>' <EOL> self . keyString = StringVar ( self ) <EOL> self . keyString . set ( '<STR_LIT>' ) <EOL> self . SetModifiersForPlatform ( ) <EOL> self . modifier_vars = [ ] <EOL> for modifier in self . modifiers : <EOL> variable = StringVar ( self ) <EOL> variable . set ( '<STR_LIT>' ) <EOL> self . modifier_vars . append ( variable ) <EOL> self . advanced = False <EOL> self . CreateWidgets ( ) <EOL> self . LoadFinalKeyList ( ) <EOL> self . withdraw ( ) <EOL> self . update_idletasks ( ) <EOL> self . geometry ( "<STR_LIT>" % <EOL> ( ( parent . winfo_rootx ( ) + ( ( parent . winfo_width ( ) / <NUM_LIT:2> ) <EOL> - ( self . winfo_reqwidth ( ) / <NUM_LIT:2> ) ) , <EOL> parent . winfo_rooty ( ) + ( ( parent . winfo_height ( ) / <NUM_LIT:2> ) <EOL> - ( self . winfo_reqheight ( ) / <NUM_LIT:2> ) ) ) ) ) <EOL> self . deiconify ( ) <EOL> self . wait_window ( ) <EOL> def CreateWidgets ( self ) : <EOL> frameMain = Frame ( self , borderwidth = <NUM_LIT:2> , relief = SUNKEN ) <EOL> frameMain . pack ( side = TOP , expand = TRUE , fill = BOTH ) <EOL> frameButtons = Frame ( self ) <EOL> frameButtons . pack ( side = BOTTOM , fill = X ) <EOL> self . buttonOK = Button ( frameButtons , text = '<STR_LIT:OK>' , <EOL> width = <NUM_LIT:8> , command = self . OK ) <EOL> self . buttonOK . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> self . buttonCancel = Button ( frameButtons , text = '<STR_LIT>' , <EOL> width = <NUM_LIT:8> , command = self . Cancel ) <EOL> self . buttonCancel . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:1> , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> self . frameKeySeqBasic = Frame ( frameMain ) <EOL> self . frameKeySeqAdvanced = Frame ( frameMain ) <EOL> self . frameControlsBasic = Frame ( frameMain ) <EOL> self . frameHelpAdvanced = Frame ( frameMain ) <EOL> self . frameKeySeqAdvanced . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , sticky = NSEW , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> self . frameKeySeqBasic . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , sticky = NSEW , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> self . frameKeySeqBasic . lift ( ) <EOL> self . frameHelpAdvanced . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:0> , sticky = NSEW , padx = <NUM_LIT:5> ) <EOL> self . frameControlsBasic . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:0> , sticky = NSEW , padx = <NUM_LIT:5> ) <EOL> self . frameControlsBasic . lift ( ) <EOL> self . buttonLevel = Button ( frameMain , command = self . ToggleLevel , <EOL> text = '<STR_LIT>' ) <EOL> self . buttonLevel . grid ( row = <NUM_LIT:2> , column = <NUM_LIT:0> , stick = EW , padx = <NUM_LIT:5> , pady = <NUM_LIT:5> ) <EOL> labelTitleBasic = Label ( self . frameKeySeqBasic , <EOL> text = "<STR_LIT>" + self . action + "<STR_LIT>" ) <EOL> labelTitleBasic . pack ( anchor = W ) <EOL> labelKeysBasic = Label ( self . frameKeySeqBasic , justify = LEFT , <EOL> textvariable = self . keyString , relief = GROOVE , borderwidth = <NUM_LIT:2> ) <EOL> labelKeysBasic . pack ( ipadx = <NUM_LIT:5> , ipady = <NUM_LIT:5> , fill = X ) <EOL> self . modifier_checkbuttons = { } <EOL> column = <NUM_LIT:0> <EOL> for modifier , variable in zip ( self . modifiers , self . modifier_vars ) : <EOL> label = self . modifier_label . get ( modifier , modifier ) <EOL> check = Checkbutton ( self . frameControlsBasic , <EOL> command = self . BuildKeyString , <EOL> text = label , variable = variable , onvalue = modifier , offvalue = '<STR_LIT>' ) <EOL> check . grid ( row = <NUM_LIT:0> , column = column , padx = <NUM_LIT:2> , sticky = W ) <EOL> self . modifier_checkbuttons [ modifier ] = check <EOL> column += <NUM_LIT:1> <EOL> labelFnAdvice = Label ( self . frameControlsBasic , justify = LEFT , <EOL> text = "<STR_LIT>" + <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> labelFnAdvice . grid ( row = <NUM_LIT:1> , column = <NUM_LIT:0> , columnspan = <NUM_LIT:4> , padx = <NUM_LIT:2> , sticky = W ) <EOL> self . listKeysFinal = Listbox ( self . frameControlsBasic , width = <NUM_LIT:15> , height = <NUM_LIT:10> , <EOL> selectmode = SINGLE ) <EOL> self . listKeysFinal . bind ( '<STR_LIT>' , self . FinalKeySelected ) <EOL> self . listKeysFinal . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:4> , rowspan = <NUM_LIT:4> , sticky = NS ) <EOL> scrollKeysFinal = Scrollbar ( self . frameControlsBasic , orient = VERTICAL , <EOL> command = self . listKeysFinal . yview ) <EOL> self . listKeysFinal . config ( yscrollcommand = scrollKeysFinal . set ) <EOL> scrollKeysFinal . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:5> , rowspan = <NUM_LIT:4> , sticky = NS ) <EOL> self . buttonClear = Button ( self . frameControlsBasic , <EOL> text = '<STR_LIT>' , command = self . ClearKeySeq ) <EOL> self . buttonClear . grid ( row = <NUM_LIT:2> , column = <NUM_LIT:0> , columnspan = <NUM_LIT:4> ) <EOL> labelTitleAdvanced = Label ( self . frameKeySeqAdvanced , justify = LEFT , <EOL> text = "<STR_LIT>" + self . action + "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> labelTitleAdvanced . pack ( anchor = W ) <EOL> self . entryKeysAdvanced = Entry ( self . frameKeySeqAdvanced , <EOL> textvariable = self . keyString ) <EOL> self . entryKeysAdvanced . pack ( fill = X ) <EOL> labelHelpAdvanced = Label ( self . frameHelpAdvanced , justify = LEFT , <EOL> text = "<STR_LIT>" + <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> labelHelpAdvanced . grid ( row = <NUM_LIT:0> , column = <NUM_LIT:0> , sticky = NSEW ) <EOL> def SetModifiersForPlatform ( self ) : <EOL> """<STR_LIT>""" <EOL> import sys <EOL> if macosxSupport . runningAsOSXApp ( ) : <EOL> self . modifiers = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> else : <EOL> self . modifiers = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] <EOL> self . modifier_label = { '<STR_LIT>' : '<STR_LIT>' } <EOL> def ToggleLevel ( self ) : <EOL> if self . buttonLevel . cget ( '<STR_LIT:text>' ) [ : <NUM_LIT:8> ] == '<STR_LIT>' : <EOL> self . ClearKeySeq ( ) <EOL> self . buttonLevel . config ( text = '<STR_LIT>' ) <EOL> self . frameKeySeqAdvanced . lift ( ) <EOL> self . frameHelpAdvanced . lift ( ) <EOL> self . entryKeysAdvanced . focus_set ( ) <EOL> self . advanced = True <EOL> else : <EOL> self . ClearKeySeq ( ) <EOL> self . buttonLevel . config ( text = '<STR_LIT>' ) <EOL> self . frameKeySeqBasic . lift ( ) <EOL> self . frameControlsBasic . lift ( ) <EOL> self . advanced = False <EOL> def FinalKeySelected ( self , event ) : <EOL> self . BuildKeyString ( ) <EOL> def BuildKeyString ( self ) : <EOL> keyList = modifiers = self . GetModifiers ( ) <EOL> finalKey = self . listKeysFinal . get ( ANCHOR ) <EOL> if finalKey : <EOL> finalKey = self . TranslateKey ( finalKey , modifiers ) <EOL> keyList . append ( finalKey ) <EOL> self . keyString . set ( '<STR_LIT:<>' + '<STR_LIT:->' . join ( keyList ) + '<STR_LIT:>>' ) <EOL> def GetModifiers ( self ) : <EOL> modList = [ variable . get ( ) for variable in self . modifier_vars ] <EOL> return [ mod for mod in modList if mod ] <EOL> def ClearKeySeq ( self ) : <EOL> self . listKeysFinal . select_clear ( <NUM_LIT:0> , END ) <EOL> self . listKeysFinal . yview ( MOVETO , '<STR_LIT>' ) <EOL> for variable in self . modifier_vars : <EOL> variable . set ( '<STR_LIT>' ) <EOL> self . keyString . set ( '<STR_LIT>' ) <EOL> def LoadFinalKeyList ( self ) : <EOL> self . functionKeys = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . alphanumKeys = tuple ( string . ascii_lowercase + string . digits ) <EOL> self . punctuationKeys = tuple ( '<STR_LIT>' ) <EOL> self . whitespaceKeys = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . editKeys = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> self . moveKeys = ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> keys = ( self . alphanumKeys + self . punctuationKeys + self . functionKeys + <EOL> self . whitespaceKeys + self . editKeys + self . moveKeys ) <EOL> self . listKeysFinal . insert ( END , * keys ) <EOL> def TranslateKey ( self , key , modifiers ) : <EOL> "<STR_LIT>" <EOL> translateDict = { '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:!>' : '<STR_LIT>' , '<STR_LIT:@>' : '<STR_LIT>' , '<STR_LIT:#>' : '<STR_LIT>' , <EOL> '<STR_LIT:%>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:&>' : '<STR_LIT>' , '<STR_LIT:*>' : '<STR_LIT>' , <EOL> '<STR_LIT:(>' : '<STR_LIT>' , '<STR_LIT:)>' : '<STR_LIT>' , '<STR_LIT:_>' : '<STR_LIT>' , '<STR_LIT:->' : '<STR_LIT>' , <EOL> '<STR_LIT:+>' : '<STR_LIT>' , '<STR_LIT:=>' : '<STR_LIT>' , '<STR_LIT:{>' : '<STR_LIT>' , '<STR_LIT:}>' : '<STR_LIT>' , <EOL> '<STR_LIT:[>' : '<STR_LIT>' , '<STR_LIT:]>' : '<STR_LIT>' , '<STR_LIT:|>' : '<STR_LIT:bar>' , '<STR_LIT:;>' : '<STR_LIT>' , <EOL> '<STR_LIT::>' : '<STR_LIT>' , '<STR_LIT:U+002C>' : '<STR_LIT>' , '<STR_LIT:.>' : '<STR_LIT>' , '<STR_LIT:<>' : '<STR_LIT>' , '<STR_LIT:>>' : '<STR_LIT>' , <EOL> '<STR_LIT:/>' : '<STR_LIT>' , '<STR_LIT:?>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } <EOL> if key in translateDict : <EOL> key = translateDict [ key ] <EOL> if '<STR_LIT>' in modifiers and key in string . ascii_lowercase : <EOL> key = key . upper ( ) <EOL> key = '<STR_LIT>' + key <EOL> return key <EOL> def OK ( self , event = None ) : <EOL> if self . advanced or self . KeysOK ( ) : <EOL> self . result = self . keyString . get ( ) <EOL> self . destroy ( ) <EOL> def Cancel ( self , event = None ) : <EOL> self . result = '<STR_LIT>' <EOL> self . destroy ( ) <EOL> def KeysOK ( self ) : <EOL> '''<STR_LIT>''' <EOL> keys = self . keyString . get ( ) <EOL> keys . strip ( ) <EOL> finalKey = self . listKeysFinal . get ( ANCHOR ) <EOL> modifiers = self . GetModifiers ( ) <EOL> keySequence = keys . split ( ) <EOL> keysOK = False <EOL> title = '<STR_LIT>' <EOL> if not keys : <EOL> tkMessageBox . showerror ( title = title , parent = self , <EOL> message = '<STR_LIT>' ) <EOL> elif not keys . endswith ( '<STR_LIT:>>' ) : <EOL> tkMessageBox . showerror ( title = title , parent = self , <EOL> message = '<STR_LIT>' ) <EOL> elif ( not modifiers <EOL> and finalKey not in self . functionKeys + self . moveKeys ) : <EOL> tkMessageBox . showerror ( title = title , parent = self , <EOL> message = '<STR_LIT>' ) <EOL> elif ( modifiers == [ '<STR_LIT>' ] ) and ( finalKey not in <EOL> self . functionKeys + self . moveKeys + ( '<STR_LIT>' , '<STR_LIT>' ) ) : <EOL> msg = '<STR_LIT>' '<STR_LIT>' <EOL> tkMessageBox . showerror ( title = title , parent = self , message = msg ) <EOL> elif keySequence in self . currentKeySequences : <EOL> msg = '<STR_LIT>' <EOL> tkMessageBox . showerror ( title = title , parent = self , message = msg ) <EOL> else : <EOL> keysOK = True <EOL> return keysOK <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> root = Tk ( ) <EOL> def run ( ) : <EOL> keySeq = '<STR_LIT>' <EOL> dlg = GetKeysDialog ( root , '<STR_LIT>' , '<STR_LIT>' , [ ] ) <EOL> print ( dlg . result ) <EOL> Button ( root , text = '<STR_LIT>' , command = run ) . pack ( ) <EOL> root . mainloop ( ) </s>
<s> from visual import * <EOL> print ( "<STR_LIT>" ) <EOL> scene . autocenter = True <EOL> scene . width = <NUM_LIT> <EOL> scene . height = <NUM_LIT> <EOL> mfrm = frame ( axis = ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> rfrm = frame ( frame = mfrm ) <EOL> g1 = shapes . circle ( radius = <NUM_LIT> ) <EOL> ns = <NUM_LIT> <EOL> for i in range ( ns ) : <EOL> t = shapes . rectangle ( pos = ( <NUM_LIT> * cos ( i * <NUM_LIT:2> * pi / ns ) , <NUM_LIT> * sin ( i * <NUM_LIT:2> * pi / ns ) ) , <EOL> width = <NUM_LIT> , height = <NUM_LIT> , rotate = i * <NUM_LIT:2> * pi / ns ) <EOL> g1 = g1 - t <EOL> g1 = g1 - shapes . circle ( radius = <NUM_LIT:0.5> ) <EOL> cl = <NUM_LIT> <EOL> ge1 = extrusion ( pos = [ ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) , ( <NUM_LIT:0> , <NUM_LIT:0> , cl ) ] , shape = g1 , color = ( <NUM_LIT:1> , <NUM_LIT:0.5> , <NUM_LIT> ) , <EOL> material = materials . rough , frame = rfrm ) <EOL> g2 = shapes . circle ( radius = <NUM_LIT> ) <EOL> ns = <NUM_LIT> <EOL> sphs = [ ] <EOL> for i in range ( ns ) : <EOL> t = shapes . rectangle ( pos = ( <NUM_LIT> * cos ( i * <NUM_LIT:2> * pi / ns ) , <NUM_LIT> * sin ( i * <NUM_LIT:2> * pi / ns ) ) , <EOL> width = <NUM_LIT> , height = <NUM_LIT> , rotate = i * <NUM_LIT:2> * pi / ns ) <EOL> g2 = g2 - t <EOL> sldr = sphere ( frame = rfrm , pos = ( <NUM_LIT> * cos ( i * <NUM_LIT:2> * pi / ns + pi / ns ) , <NUM_LIT> * sin ( i * <NUM_LIT:2> * pi / ns + pi / ns ) , <NUM_LIT> ) , <EOL> radius = <NUM_LIT:0.1> , material = materials . shiny ) <EOL> sphs . append ( sldr ) <EOL> g2 = g2 - shapes . circle ( radius = <NUM_LIT> ) <EOL> ge2 = extrusion ( pos = [ ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:2> ) , ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ) ] , shape = g2 , color = ( <NUM_LIT:1> , <NUM_LIT:0.5> , <NUM_LIT> ) , <EOL> material = materials . rough , frame = rfrm ) <EOL> sc = shapes . circle ( radius = <NUM_LIT:0.5> , thickness = <NUM_LIT> ) <EOL> sce = extrusion ( pos = [ ( <NUM_LIT:0> , <NUM_LIT:0> , - <NUM_LIT:0.5> ) , ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ) ] , shape = sc , color = ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) , <EOL> material = materials . plastic , frame = rfrm ) <EOL> shaft = cylinder ( frame = rfrm , pos = ( <NUM_LIT:0> , <NUM_LIT:0> , - <NUM_LIT> ) , axis = ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:12> ) , radius = <NUM_LIT> , <EOL> material = materials . blazed ) <EOL> gr = shapes . gear ( n = <NUM_LIT:9> , radius = <NUM_LIT> , addendum = <NUM_LIT> , dedendum = <NUM_LIT> , fradius = <NUM_LIT> ) <EOL> gre = extrusion ( frame = rfrm , pos = [ ( <NUM_LIT:0> , <NUM_LIT:0> , - <NUM_LIT> ) , ( <NUM_LIT:0> , <NUM_LIT:0> , - <NUM_LIT:3> ) ] , shape = gr , <EOL> material = materials . blazed ) <EOL> g3 = shapes . circle ( radius = <NUM_LIT> ) <EOL> ns = <NUM_LIT:12> <EOL> for i in range ( ns ) : <EOL> t1 = shapes . rectangle ( pos = ( <NUM_LIT:3> * cos ( i * <NUM_LIT:2> * pi / ns ) , <NUM_LIT:3> * sin ( i * <NUM_LIT:2> * pi / ns ) ) , <EOL> width = <NUM_LIT> , height = <NUM_LIT> , rotate = i * <NUM_LIT:2> * pi / ns ) <EOL> t2 = shapes . trapezoid ( pos = ( <NUM_LIT> * cos ( i * <NUM_LIT:2> * pi / ns ) , <NUM_LIT> * sin ( i * <NUM_LIT:2> * pi / ns ) ) , <EOL> width = <NUM_LIT> , top = <NUM_LIT:0.5> , height = <NUM_LIT> , roundness = <NUM_LIT:0.1> , <EOL> rotate = i * <NUM_LIT:2> * pi / ns + pi / <NUM_LIT:2> , ) <EOL> g3 = g3 - t2 - t1 <EOL> g3 = g3 - shapes . circle ( radius = <NUM_LIT> ) <EOL> ps = <NUM_LIT> <EOL> dlt = <NUM_LIT> <EOL> thk = <NUM_LIT> <EOL> nl = <NUM_LIT:1> <EOL> cf = frame ( frame = rfrm , pos = ( <NUM_LIT:0> , <NUM_LIT:0> , thk / <NUM_LIT> + cl / <NUM_LIT> ) ) <EOL> for i in range ( nl ) : <EOL> ge3 = extrusion ( pos = [ ( <NUM_LIT:0> , <NUM_LIT:0> , i * dlt ) , ( <NUM_LIT:0> , <NUM_LIT:0> , i * dlt + thk ) ] , shape = g3 , <EOL> color = ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , twist = <NUM_LIT:0.0> , frame = cf ) <EOL> N = <NUM_LIT:20> <EOL> vright = vector ( <NUM_LIT> , <NUM_LIT> ) <EOL> r = mag ( vright ) / ( <NUM_LIT:2> * N ) <EOL> vright = norm ( vright ) <EOL> S = Polygon ( [ ( - <NUM_LIT> , - <NUM_LIT> ) , ( <NUM_LIT:0> , - <NUM_LIT> ) , ( <NUM_LIT> , <NUM_LIT> ) , ( - <NUM_LIT> , <NUM_LIT> ) ] ) <EOL> for n in range ( N ) : <EOL> right = vector ( <NUM_LIT:0> , - <NUM_LIT> ) + ( r + n * <NUM_LIT:2> * r ) * vright <EOL> S += shapes . circle ( pos = ( right . x , right . y ) , radius = r , np = <NUM_LIT:4> ) <EOL> P = shapes . rectangle ( width = <NUM_LIT> , height = thk ) <EOL> P += shapes . circle ( pos = ( <NUM_LIT:0> , - thk / <NUM_LIT:2> ) , radius = <NUM_LIT> , np = <NUM_LIT:10> ) <EOL> P += shapes . circle ( pos = ( <NUM_LIT:0> , + thk / <NUM_LIT:2> ) , radius = <NUM_LIT> , np = <NUM_LIT:10> ) <EOL> wrfs = [ ] <EOL> for i in range ( ns ) : <EOL> wrf = frame ( frame = cf , pos = ( <NUM_LIT:0> , <NUM_LIT:2> , thk / <NUM_LIT> ) ) <EOL> wrfs . append ( wrf ) <EOL> wre = extrusion ( frame = wrf , pos = P , shape = S , <EOL> color = ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ) , material = materials . rough ) <EOL> wrf . rotate ( axis = ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) , angle = ( i * <NUM_LIT:2> * pi / ns + pi / ns ) , origin = ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> for i in range ( ns ) : <EOL> curve ( frame = rfrm , pos = [ sphs [ i * <NUM_LIT:2> ] . pos , cf . pos + wrfs [ i ] . pos ] , radius = <NUM_LIT> , <EOL> color = ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> ) ) <EOL> curve ( frame = rfrm , pos = [ sphs [ i * <NUM_LIT:2> + <NUM_LIT:1> ] . pos , cf . pos + wrfs [ i ] . pos ] , radius = <NUM_LIT> , <EOL> color = ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> br = shapes . rectangle ( width = <NUM_LIT:5> , height = <NUM_LIT> ) - shapes . circle ( radius = <NUM_LIT> ) <EOL> bre = extrusion ( frame = mfrm , pos = [ ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ) , ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ) ] , color = ( <NUM_LIT:0.1> , <NUM_LIT:0.1> , <NUM_LIT> ) , <EOL> material = materials . rough , shape = br ) <EOL> bh = shapes . rectangle ( width = <NUM_LIT> , height = <NUM_LIT:0.5> , thickness = <NUM_LIT:0.1> ) <EOL> bhe1 = extrusion ( frame = mfrm , pos = [ ( <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:1> ) , ( <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:1> ) ] , shape = bh , color = ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT> ) , <EOL> material = materials . rough ) <EOL> bhe2 = extrusion ( frame = mfrm , pos = [ ( - <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:1> ) , ( - <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:1> ) ] , shape = bh , color = ( <NUM_LIT> , <NUM_LIT:1> , <NUM_LIT> ) , <EOL> material = materials . rough ) <EOL> scrh = shapes . circle ( radius = <NUM_LIT:1> ) - shapes . cross ( ) <EOL> scrh . scale ( <NUM_LIT> , <NUM_LIT> ) <EOL> scrhw1 = extrusion ( frame = mfrm , pos = [ ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) , ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) ] , shape = scrh , color = ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT> ) , <EOL> material = materials . rough ) <EOL> scrhw2 = extrusion ( frame = mfrm , pos = [ ( - <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) , ( - <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) ] , shape = scrh , color = ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT> ) , <EOL> material = materials . rough ) <EOL> scrb = shapes . rectangle ( scale = <NUM_LIT:0.1> ) <EOL> scrbe1 = extrusion ( frame = mfrm , pos = paths . line ( start = ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) , end = ( <NUM_LIT> , - <NUM_LIT> , <NUM_LIT:1> ) , <EOL> np = <NUM_LIT:20> ) , shape = scrb , twist = <NUM_LIT> , color = ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT> ) , <EOL> material = materials . rough ) <EOL> scrbe2 = extrusion ( frame = mfrm , pos = paths . line ( start = ( - <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) , end = ( - <NUM_LIT> , - <NUM_LIT> , <NUM_LIT:1> ) , <EOL> np = <NUM_LIT:20> ) , shape = scrb , twist = <NUM_LIT> , color = ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT> ) , <EOL> material = materials . rough ) <EOL> crdl = ( shapes . rectangle ( pos = ( <NUM_LIT:0> , - <NUM_LIT> ) , width = <NUM_LIT> , height = <NUM_LIT> ) - <EOL> shapes . circle ( radius = <NUM_LIT> ) - shapes . circle ( pos = ( - <NUM_LIT> , - <NUM_LIT> ) , radius = <NUM_LIT:0.1> ) ) <EOL> crdle = extrusion ( frame = mfrm , pos = [ ( <NUM_LIT:0> , - <NUM_LIT> , <NUM_LIT> ) , ( <NUM_LIT:0> , - <NUM_LIT> , <NUM_LIT> ) ] , <EOL> material = materials . plastic , shape = crdl ) <EOL> cbl1i = curve ( frame = mfrm , pos = [ scrhw1 . pos [ - <NUM_LIT:2> ] , scrhw1 . pos [ - <NUM_LIT:2> ] - vector ( - <NUM_LIT:2> , <NUM_LIT:0> , <NUM_LIT:0> ) ] , <EOL> radius = <NUM_LIT> , color = ge1 . color ) <EOL> cbl1o = curve ( frame = mfrm , pos = [ scrhw1 . pos [ - <NUM_LIT:2> ] , scrhw1 . pos [ - <NUM_LIT:2> ] - vector ( - <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ) ] , <EOL> radius = <NUM_LIT> , color = ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> cbl2i = curve ( frame = mfrm , pos = [ scrhw2 . pos [ - <NUM_LIT:2> ] , scrhw2 . pos [ - <NUM_LIT:2> ] + vector ( - <NUM_LIT:0.5> , <NUM_LIT:0> , <NUM_LIT:0> ) ] , <EOL> radius = <NUM_LIT> , color = ge1 . color ) <EOL> cbl2i . append ( pos = cbl2i . pos [ - <NUM_LIT:1> ] + ( <NUM_LIT:0> , - <NUM_LIT:2> , <NUM_LIT:0> ) ) <EOL> cbl2i . append ( pos = cbl2i . pos [ - <NUM_LIT:1> ] + ( <NUM_LIT:7> , <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> cbl2o = curve ( frame = mfrm , pos = cbl2i . pos , radius = <NUM_LIT> , color = ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> cbl2o . pos [ - <NUM_LIT:1> ] -= ( <NUM_LIT:0.5> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> br = shapes . rectangle ( width = <NUM_LIT> , height = <NUM_LIT> ) - shapes . circle ( radius = <NUM_LIT> ) - shapes . rectangle ( width = <NUM_LIT> , height = <NUM_LIT> ) <EOL> b1f = frame ( frame = rfrm , pos = ( <NUM_LIT:0> , <NUM_LIT:0> , - <NUM_LIT> ) ) <EOL> br1 = extrusion ( frame = b1f , pos = paths . circle ( radius = <NUM_LIT> ) , shape = br , <EOL> material = materials . blazed ) <EOL> b1f . rotate ( angle = pi / <NUM_LIT:2> ) <EOL> b2f = frame ( frame = rfrm , pos = ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT> ) ) <EOL> br2 = extrusion ( frame = b2f , pos = paths . circle ( radius = <NUM_LIT> ) , shape = br , <EOL> material = materials . blazed ) <EOL> b2f . rotate ( angle = pi / <NUM_LIT:2> ) <EOL> bbrs1 = [ ] <EOL> bbrs2 = [ ] <EOL> for i in range ( <NUM_LIT:7> ) : <EOL> bbrs1 . append ( sphere ( frame = rfrm , pos = ( <NUM_LIT> * cos ( i * <NUM_LIT:2> * pi / <NUM_LIT> ) , <NUM_LIT> * sin ( i * <NUM_LIT:2> * pi / <NUM_LIT> ) , - <NUM_LIT> ) , <EOL> radius = <NUM_LIT> , material = materials . rough ) ) <EOL> bbrs2 . append ( sphere ( frame = rfrm , pos = ( <NUM_LIT> * cos ( i * <NUM_LIT:2> * pi / <NUM_LIT> ) , <NUM_LIT> * sin ( i * <NUM_LIT:2> * pi / <NUM_LIT> ) , <NUM_LIT> ) , <EOL> radius = <NUM_LIT> , material = materials . rough ) ) <EOL> stb = ( shapes . rectangle ( pos = ( <NUM_LIT:0> , - <NUM_LIT> ) , width = <NUM_LIT:6> , height = <NUM_LIT:3> , roundness = <NUM_LIT:0.5> ) - <EOL> shapes . rectangle ( width = <NUM_LIT> , height = <NUM_LIT> ) - shapes . circle ( radius = <NUM_LIT> ) - <EOL> shapes . circle ( pos = ( <NUM_LIT> , - <NUM_LIT> ) , radius = <NUM_LIT> ) - <EOL> shapes . circle ( pos = ( <NUM_LIT> , - <NUM_LIT> ) , radius = <NUM_LIT> ) - <EOL> shapes . circle ( pos = ( - <NUM_LIT> , - <NUM_LIT> ) , radius = <NUM_LIT> ) - <EOL> shapes . circle ( pos = ( - <NUM_LIT> , - <NUM_LIT> ) , radius = <NUM_LIT> ) ) <EOL> stbe = extrusion ( frame = mfrm , pos = [ ( <NUM_LIT:0> , <NUM_LIT:0> , thk / <NUM_LIT> + cl / <NUM_LIT> ) , ( <NUM_LIT:0> , <NUM_LIT:0> , thk / <NUM_LIT> + cl / <NUM_LIT> + thk ) ] , shape = stb ) <EOL> tp = [ ] <EOL> pp = shapes . arc ( angle1 = - pi / <NUM_LIT> , angle2 = pi / <NUM_LIT> , radius = <NUM_LIT> ) <EOL> cp = pp . contour ( <NUM_LIT:0> ) [ <NUM_LIT:0> : len ( pp . contour ( <NUM_LIT:0> ) ) // <NUM_LIT:2> ] <EOL> for p in cp : <EOL> tp . append ( ( <NUM_LIT:0> , - p [ <NUM_LIT:0> ] , p [ <NUM_LIT:1> ] ) ) <EOL> tmp = [ ] <EOL> tmp . extend ( tp ) <EOL> tmp . reverse ( ) <EOL> tp . append ( vector ( tp [ - <NUM_LIT:1> ] ) - vector ( thk + <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> for p in tmp : <EOL> tp . append ( vector ( p ) - vector ( thk + <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> tp . append ( vector ( tp [ - <NUM_LIT:1> ] ) + vector ( thk + <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> sts = shapes . circle ( radius = <NUM_LIT> ) <EOL> sfrm = frame ( frame = mfrm , pos = ( <NUM_LIT:0> , <NUM_LIT:0> , thk + cl * <NUM_LIT:2> - <NUM_LIT> ) ) <EOL> stse = extrusion ( frame = sfrm , pos = tp , <EOL> shape = sts , color = ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> sfrm . rotate ( axis = ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ) , angle = - pi / <NUM_LIT:2> ) <EOL> cvr = ( shapes . rectangle ( width = <NUM_LIT:3> , height = <NUM_LIT> , roundness = <NUM_LIT:0.1> ) + <EOL> shapes . rectangle ( width = <NUM_LIT:9> , height = <NUM_LIT:10> , roundness = <NUM_LIT:0.1> ) ) <EOL> cvrc = Polygon ( cvr . contour ( <NUM_LIT:0> ) ) <EOL> cvrc . scale ( <NUM_LIT> , <NUM_LIT> ) <EOL> cvr = ( cvr - cvrc - shapes . rectangle ( width = <NUM_LIT> , height = <NUM_LIT:12> ) - <EOL> shapes . rectangle ( pos = ( - <NUM_LIT:4> , <NUM_LIT:0> ) , width = <NUM_LIT:7> , height = <NUM_LIT:12> ) ) <EOL> cfrm = frame ( pos = ( - <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ) ) <EOL> cvre = extrusion ( frame = cfrm , pos = paths . arc ( angle1 = - pi / <NUM_LIT:4> , angle2 = pi , radius = <NUM_LIT:0.1> ) , <EOL> shape = cvr , color = ( <NUM_LIT:0> , <NUM_LIT> , <NUM_LIT> ) , material = materials . rough ) <EOL> cfrm . rotate ( angle = pi / <NUM_LIT:2> ) <EOL> cfrm . rotate ( axis = ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ) , angle = pi / <NUM_LIT:2> ) <EOL> cfrm . rotate ( axis = ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) , angle = - pi / <NUM_LIT:2> ) <EOL> angl = pi / <NUM_LIT> <EOL> run = True <EOL> while True : <EOL> rate ( <NUM_LIT:100> ) <EOL> if run : <EOL> rfrm . rotate ( angle = angl , axis = ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) ) <EOL> if scene . mouse . events : <EOL> m = scene . mouse . getevent ( ) <EOL> if m . click == '<STR_LIT:left>' : <EOL> run = not run </s>
<s> from visual import * <EOL> from tictacdat import * <EOL> scene . width = <NUM_LIT> <EOL> scene . height = <NUM_LIT> <EOL> scene . title = "<STR_LIT>" <EOL> gray = ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> yo = <NUM_LIT> <EOL> base = grid ( n = <NUM_LIT:4> , ds = <NUM_LIT:1> , gridcolor = gray ) <EOL> base . pos = base . pos + vector ( - <NUM_LIT:0.5> , - <NUM_LIT> , - <NUM_LIT:0.5> ) <EOL> second = grid ( n = <NUM_LIT:4> , ds = <NUM_LIT:1> , gridcolor = gray ) <EOL> second . pos = second . pos + vector ( - <NUM_LIT:0.5> , - <NUM_LIT:1.> , - <NUM_LIT:0.5> ) <EOL> third = grid ( n = <NUM_LIT:4> , ds = <NUM_LIT:1> , gridcolor = gray ) <EOL> third . pos = third . pos + vector ( - <NUM_LIT:0.5> , <NUM_LIT:0> , - <NUM_LIT:0.5> ) <EOL> top = grid ( n = <NUM_LIT:4> , ds = <NUM_LIT:1> , gridcolor = gray ) <EOL> top . pos = top . pos + vector ( - <NUM_LIT:0.5> , <NUM_LIT:1.> , - <NUM_LIT:0.5> ) <EOL> wins = win ( ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT:U+0020>" ) <EOL> bars = { } <EOL> balls = { } <EOL> form = '<STR_LIT>' <EOL> for x in arange ( - <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:1> ) : <EOL> for z in arange ( - <NUM_LIT:2> , <NUM_LIT:2> , <NUM_LIT:1> ) : <EOL> cyl = cylinder ( pos = ( x , - <NUM_LIT:2> , z ) , axis = ( <NUM_LIT:0> , <NUM_LIT:3> , <NUM_LIT:0> ) , radius = <NUM_LIT> , visible = <NUM_LIT:0> ) <EOL> loc = ( int ( round ( x ) ) , int ( round ( - yo ) ) , int ( round ( z ) ) ) <EOL> bars [ form . format ( loc ) ] = cyl <EOL> scene . center = ( - <NUM_LIT> , - <NUM_LIT> , - <NUM_LIT> ) <EOL> scene . forward = ( <NUM_LIT:0> , - <NUM_LIT> , - <NUM_LIT:1> ) <EOL> scene . autoscale = <NUM_LIT:0> <EOL> nballs = <NUM_LIT:0> <EOL> visbar = None <EOL> red = ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> blue = ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT:1> ) <EOL> bcolor = red <EOL> point = None <EOL> won = None <EOL> while len ( balls ) < <NUM_LIT:4> * <NUM_LIT:4> * <NUM_LIT:4> : <EOL> while True : <EOL> rate ( <NUM_LIT:100> ) <EOL> if scene . mouse . events : <EOL> p = scene . mouse . getevent ( ) <EOL> if p . drag : <EOL> point = p . project ( normal = vector ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ) , d = - yo ) <EOL> break <EOL> if point == None : continue <EOL> point = ( int ( round ( point [ <NUM_LIT:0> ] ) ) , int ( round ( point [ <NUM_LIT:1> ] ) ) , int ( round ( point [ <NUM_LIT:2> ] ) ) ) <EOL> if not ( visbar == None ) : visbar . visible = <NUM_LIT:0> <EOL> lookup = form . format ( point ) <EOL> if not ( lookup in bars ) : <EOL> continue <EOL> visbar = bars [ lookup ] <EOL> visbar . visible = <NUM_LIT:1> <EOL> nballs = nballs + <NUM_LIT:1> <EOL> b = sphere ( pos = point , radius = <NUM_LIT> , color = bcolor ) <EOL> while not scene . mouse . events : <EOL> rate ( <NUM_LIT:100> ) <EOL> y = scene . mouse . pos . y <EOL> if y > <NUM_LIT:1.> : y = <NUM_LIT:1.> <EOL> if y < - yo : y = - yo <EOL> b . y = y <EOL> scene . mouse . getevent ( ) <EOL> bpoint = ( int ( round ( b . x ) ) , int ( round ( b . y ) ) , int ( round ( b . z ) ) ) <EOL> lookup = form . format ( bpoint ) <EOL> if not ( form . format ( lookup ) in balls ) : <EOL> b . pos = bpoint <EOL> balls [ lookup ] = b <EOL> if bcolor == red : bcolor = blue <EOL> else : bcolor = red <EOL> else : <EOL> b . visible = <NUM_LIT:0> <EOL> visbar . visible = <NUM_LIT:0> <EOL> visbar = None <EOL> for a in wins : <EOL> a0 = a [ <NUM_LIT:0> ] in balls <EOL> a1 = a [ <NUM_LIT:1> ] in balls <EOL> a2 = a [ <NUM_LIT:2> ] in balls <EOL> a3 = a [ <NUM_LIT:3> ] in balls <EOL> if a0 and a1 and a2 and a3 : <EOL> ccolor = balls [ a [ <NUM_LIT:0> ] ] . color <EOL> if balls [ a [ <NUM_LIT:1> ] ] . color == balls [ a [ <NUM_LIT:2> ] ] . color == balls [ a [ <NUM_LIT:3> ] ] . color == ccolor : <EOL> won = ccolor <EOL> print ( "<STR_LIT:U+0020>" ) <EOL> if ccolor == red : <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> else : <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> print ( "<STR_LIT>" ) <EOL> for flash in range ( <NUM_LIT:5> ) : <EOL> sleep ( <NUM_LIT:0.1> ) <EOL> balls [ a [ <NUM_LIT:0> ] ] . color = ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> balls [ a [ <NUM_LIT:1> ] ] . color = ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> balls [ a [ <NUM_LIT:2> ] ] . color = ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> balls [ a [ <NUM_LIT:3> ] ] . color = ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> sleep ( <NUM_LIT:0.1> ) <EOL> balls [ a [ <NUM_LIT:0> ] ] . color = ccolor <EOL> balls [ a [ <NUM_LIT:1> ] ] . color = ccolor <EOL> balls [ a [ <NUM_LIT:2> ] ] . color = ccolor <EOL> balls [ a [ <NUM_LIT:3> ] ] . color = ccolor <EOL> rate ( <NUM_LIT:10> ) <EOL> break <EOL> if not ( won == None ) : <EOL> break <EOL> print ( "<STR_LIT>" ) </s>
<s> from reding . managers import ObjectSubjectsManager , SubjectObjectsManager , ObjectsManager <EOL> from reding . settings import KEY_CONFIG <EOL> from reding . settings import PAGINATION_DEFAULT_OFFSET as OFFSET <EOL> from reding . settings import PAGINATION_DEFAULT_SIZE as SIZE <EOL> from flask . ext . restful import reqparse , fields , marshal_with , abort <EOL> from flask . ext import restful <EOL> from time import time <EOL> from six import text_type <EOL> def get_user_object_reply ( object_id , user_id , vote , when , review ) : <EOL> return { <EOL> '<STR_LIT>' : object_id , <EOL> '<STR_LIT>' : user_id , <EOL> '<STR_LIT>' : vote , <EOL> '<STR_LIT>' : when , <EOL> '<STR_LIT>' : review <EOL> } <EOL> object_resource_fields = { <EOL> '<STR_LIT>' : fields . Integer , <EOL> '<STR_LIT>' : fields . Integer , <EOL> '<STR_LIT>' : fields . Float , <EOL> '<STR_LIT>' : fields . String , <EOL> } <EOL> user_object_resource_fields = { <EOL> '<STR_LIT>' : fields . Integer , <EOL> '<STR_LIT>' : fields . Raw , <EOL> '<STR_LIT>' : fields . String , <EOL> '<STR_LIT>' : fields . String , <EOL> '<STR_LIT>' : fields . DateTime <EOL> } <EOL> class RedingResource ( restful . Resource ) : <EOL> parser_cls = reqparse . RequestParser <EOL> def __init__ ( self ) : <EOL> super ( RedingResource , self ) . __init__ ( ) <EOL> self . parser = self . parser_cls ( ) <EOL> self . configure ( ) <EOL> def configure ( self ) : <EOL> for key in KEY_CONFIG : <EOL> self . parser . add_argument ( key , type = str ) <EOL> class VotedListResource ( RedingResource ) : <EOL> def configure ( self ) : <EOL> super ( VotedListResource , self ) . configure ( ) <EOL> self . parser . add_argument ( '<STR_LIT>' , type = str , action = '<STR_LIT>' ) <EOL> self . parser . add_argument ( '<STR_LIT>' , type = str , default = '<STR_LIT:+>' ) <EOL> self . parser . add_argument ( '<STR_LIT>' , type = int , default = OFFSET ) <EOL> self . parser . add_argument ( '<STR_LIT:size>' , type = int , default = SIZE ) <EOL> @ marshal_with ( object_resource_fields ) <EOL> def get ( self ) : <EOL> args = self . parser . parse_args ( ) <EOL> amounts = ObjectsManager ( ** args ) . scoredrange ( <EOL> offset = args [ '<STR_LIT>' ] , <EOL> size = args [ '<STR_LIT:size>' ] , <EOL> reverse = args [ '<STR_LIT>' ] == '<STR_LIT:->' , <EOL> ) <EOL> reply = [ ] <EOL> osmanager = ObjectSubjectsManager ( ** args ) <EOL> for object_id , amount in amounts : <EOL> votes_no = osmanager . count ( object_id = object_id ) <EOL> if votes_no : <EOL> reply . append ( <EOL> dict ( <EOL> votes_no = votes_no , <EOL> average = amount / votes_no , <EOL> amount = amount , <EOL> object_id = object_id , <EOL> ) <EOL> ) <EOL> return reply <EOL> def post ( self ) : <EOL> """<STR_LIT>""" <EOL> args = self . parser . parse_args ( ) <EOL> return ObjectsManager ( ** args ) . filtered ( <EOL> objects = args [ '<STR_LIT>' ] , <EOL> now = int ( time ( ) ) , <EOL> reverse = args [ '<STR_LIT>' ] == '<STR_LIT:->' , <EOL> ) <EOL> class VotedSummaryResource ( RedingResource ) : <EOL> def configure ( self ) : <EOL> super ( VotedSummaryResource , self ) . configure ( ) <EOL> self . parser . add_argument ( '<STR_LIT>' , type = int , default = <NUM_LIT:0> ) <EOL> @ marshal_with ( object_resource_fields ) <EOL> def get ( self , object_id ) : <EOL> args = self . parser . parse_args ( ) <EOL> vote = args [ '<STR_LIT>' ] <EOL> amount = ObjectsManager ( ** args ) . score ( object_id = object_id ) or <NUM_LIT:0> <EOL> votes_no = ObjectSubjectsManager ( ** args ) . count ( <EOL> object_id = object_id , <EOL> min_vote = vote or '<STR_LIT>' , <EOL> max_vote = vote or '<STR_LIT>' , <EOL> ) <EOL> if not votes_no : <EOL> average = <NUM_LIT:0> <EOL> amount = <NUM_LIT:0> <EOL> elif vote : <EOL> average = vote <EOL> amount = vote * votes_no <EOL> else : <EOL> average = amount / votes_no <EOL> return ( <EOL> dict ( <EOL> votes_no = votes_no , <EOL> average = average , <EOL> amount = amount , <EOL> object_id = object_id , <EOL> ) <EOL> ) <EOL> class VotingUserListResource ( RedingResource ) : <EOL> def configure ( self ) : <EOL> super ( VotingUserListResource , self ) . configure ( ) <EOL> self . parser . add_argument ( '<STR_LIT>' , type = str , default = '<STR_LIT:+>' ) <EOL> self . parser . add_argument ( '<STR_LIT>' , type = int , default = OFFSET ) <EOL> self . parser . add_argument ( '<STR_LIT:size>' , type = int , default = SIZE ) <EOL> self . parser . add_argument ( '<STR_LIT>' , type = int , default = <NUM_LIT:0> ) <EOL> @ marshal_with ( user_object_resource_fields ) <EOL> def get ( self , object_id ) : <EOL> args = self . parser . parse_args ( ) <EOL> osmanager = ObjectSubjectsManager ( ** args ) <EOL> somanager = SubjectObjectsManager ( ** args ) <EOL> votes = osmanager . scoredrange ( <EOL> object_id = object_id , <EOL> offset = args [ '<STR_LIT>' ] , <EOL> size = args [ '<STR_LIT:size>' ] , <EOL> min_vote = args [ '<STR_LIT>' ] or '<STR_LIT>' , <EOL> max_vote = args [ '<STR_LIT>' ] or '<STR_LIT>' , <EOL> reverse = args [ '<STR_LIT>' ] == '<STR_LIT:->' , <EOL> ) <EOL> if not votes : <EOL> return [ ] <EOL> reviews = osmanager . reviews ( object_id , * [ user_id for user_id , _ in votes ] ) <EOL> reply = [ <EOL> get_user_object_reply ( <EOL> object_id = object_id , <EOL> user_id = user_id , <EOL> vote = vote , <EOL> when = somanager . score ( user_id = user_id , object_id = object_id ) , <EOL> review = reviews [ user_id ] , <EOL> ) for user_id , vote in votes <EOL> ] <EOL> return reply <EOL> class UserSummaryResource ( RedingResource ) : <EOL> def configure ( self ) : <EOL> super ( UserSummaryResource , self ) . configure ( ) <EOL> self . parser . add_argument ( '<STR_LIT>' , type = str , default = '<STR_LIT:+>' ) <EOL> self . parser . add_argument ( '<STR_LIT>' , type = int , default = OFFSET ) <EOL> self . parser . add_argument ( '<STR_LIT:size>' , type = int , default = SIZE ) <EOL> @ marshal_with ( user_object_resource_fields ) <EOL> def get ( self , user_id ) : <EOL> args = self . parser . parse_args ( ) <EOL> osmanager = ObjectSubjectsManager ( ** args ) <EOL> somanager = SubjectObjectsManager ( ** args ) <EOL> votetimes = somanager . scoredrange ( <EOL> user_id = user_id , <EOL> offset = args [ '<STR_LIT>' ] , <EOL> size = args [ '<STR_LIT:size>' ] , <EOL> reverse = args [ '<STR_LIT>' ] == '<STR_LIT:->' , <EOL> ) <EOL> reply = [ <EOL> get_user_object_reply ( <EOL> object_id = object_id , <EOL> user_id = user_id , <EOL> vote = osmanager . score ( object_id = object_id , user_id = user_id ) , <EOL> review = osmanager . review ( object_id = object_id , user_id = user_id ) , <EOL> when = when , <EOL> ) for object_id , when in votetimes <EOL> ] <EOL> return reply <EOL> class VoteSummaryResource ( RedingResource ) : <EOL> @ marshal_with ( user_object_resource_fields ) <EOL> def get ( self , object_id , user_id ) : <EOL> args = self . parser . parse_args ( ) <EOL> osmanager = ObjectSubjectsManager ( ** args ) <EOL> somanager = SubjectObjectsManager ( ** args ) <EOL> vote = osmanager . score ( object_id = object_id , user_id = user_id ) <EOL> when = somanager . score ( user_id = user_id , object_id = object_id ) <EOL> if not ( vote and when ) : <EOL> message = "<STR_LIT>" . format ( <EOL> object_id = object_id , <EOL> user_id = user_id <EOL> ) <EOL> abort ( <NUM_LIT> , message = message ) <EOL> return get_user_object_reply ( <EOL> object_id = object_id , <EOL> user_id = user_id , <EOL> vote = vote , <EOL> when = when , <EOL> review = osmanager . review ( object_id = object_id , user_id = user_id ) , <EOL> ) <EOL> def post ( self , object_id , user_id ) : <EOL> return self . put ( object_id , user_id ) <EOL> @ marshal_with ( user_object_resource_fields ) <EOL> def put ( self , object_id , user_id ) : <EOL> self . parser . add_argument ( '<STR_LIT>' , type = int , required = True ) <EOL> self . parser . add_argument ( '<STR_LIT>' , type = text_type ) <EOL> args = self . parser . parse_args ( ) <EOL> osmanager = ObjectSubjectsManager ( ** args ) <EOL> somanager = SubjectObjectsManager ( ** args ) <EOL> self . _perform_correction ( object_id , user_id , args [ '<STR_LIT>' ] , args ) <EOL> osmanager . create ( object_id = object_id , user_id = user_id , vote = args [ '<STR_LIT>' ] , review = args [ '<STR_LIT>' ] ) <EOL> somanager . create ( user_id = user_id , object_id = object_id , timestamp = time ( ) ) <EOL> return get_user_object_reply ( <EOL> object_id = object_id , <EOL> user_id = user_id , <EOL> vote = osmanager . score ( object_id = object_id , user_id = user_id ) , <EOL> when = somanager . score ( user_id = user_id , object_id = object_id ) , <EOL> review = osmanager . review ( object_id = object_id , user_id = user_id ) , <EOL> ) <EOL> def delete ( self , object_id , user_id ) : <EOL> args = self . parser . parse_args ( ) <EOL> self . _perform_correction ( object_id , user_id , <NUM_LIT:0> , args ) <EOL> SubjectObjectsManager ( ** args ) . remove ( user_id = user_id , object_id = object_id ) <EOL> ObjectSubjectsManager ( ** args ) . remove ( object_id = object_id , user_id = user_id ) <EOL> return '<STR_LIT>' , <NUM_LIT> <EOL> def _perform_correction ( self , object_id , user_id , next_vote , args ) : <EOL> prev_vote = ObjectSubjectsManager ( ** args ) . score ( object_id = object_id , user_id = user_id ) or <NUM_LIT:0> <EOL> correction = next_vote - prev_vote <EOL> omanager = ObjectsManager ( ** args ) <EOL> omanager . incrby ( object_id = object_id , delta = correction ) <EOL> amount = omanager . score ( object_id = object_id ) <EOL> if amount == <NUM_LIT:0> : <EOL> omanager . remove ( object_id = object_id ) <EOL> __all__ = ( <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ) </s>
<s> import pandas as pd <EOL> import datetime <EOL> import pdfplumber <EOL> from pdfplumber . utils import within_bbox , collate_chars <EOL> import sys , os <EOL> COLUMNS = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT:state>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> DATA_START_TOP = <NUM_LIT> <EOL> DATA_END_TOP = <NUM_LIT> <EOL> def parse_field ( text ) : <EOL> if text == None : return None <EOL> if text [ <NUM_LIT:0> ] in "<STR_LIT>" : <EOL> return int ( text . replace ( "<STR_LIT:U+002C>" , "<STR_LIT>" ) ) <EOL> return text <EOL> def parse_month ( month_str ) : <EOL> d = datetime . datetime . strptime ( month_str , "<STR_LIT>" ) <EOL> return d . strftime ( "<STR_LIT>" ) <EOL> def validate_data ( checks ) : <EOL> try : <EOL> assert ( len ( checks ) > <NUM_LIT:0> ) <EOL> except : <EOL> raise Exception ( "<STR_LIT>" ) <EOL> for c in COLUMNS [ <NUM_LIT:2> : ] : <EOL> v_total = checks [ c ] . iloc [ - <NUM_LIT:1> ] <EOL> v_colsum = checks [ c ] . sum ( ) <EOL> try : <EOL> assert ( v_colsum == ( v_total * <NUM_LIT:2> ) ) <EOL> except : <EOL> raise Exception ( "<STR_LIT>" . format ( c ) ) <EOL> h_colsums = checks . fillna ( <NUM_LIT:0> ) . sum ( axis = <NUM_LIT:1> ) <EOL> h_totals = checks [ "<STR_LIT>" ] . fillna ( <NUM_LIT:0> ) <EOL> zipped = zip ( checks [ "<STR_LIT:state>" ] , h_colsums , h_totals ) <EOL> for state , h_colsum , h_total in zipped : <EOL> try : <EOL> assert ( h_colsum == ( h_total * <NUM_LIT:2> ) ) <EOL> except : <EOL> raise Exception ( "<STR_LIT>" . format ( state ) ) <EOL> def parse_value ( x ) : <EOL> if pd . isnull ( x ) : return None <EOL> return int ( x . replace ( "<STR_LIT:U+002C>" , "<STR_LIT>" ) ) <EOL> def parse_page ( page ) : <EOL> month_crop = page . crop ( ( <NUM_LIT:0> , <NUM_LIT> , page . width , <NUM_LIT> ) , strict = True ) <EOL> month_text = month_crop . extract_text ( x_tolerance = <NUM_LIT:2> ) <EOL> month = parse_month ( month_text ) <EOL> sys . stderr . write ( "<STR_LIT:\r>" + month ) <EOL> table_crop = page . crop ( ( <NUM_LIT:0> , <NUM_LIT> , page . width , <NUM_LIT> ) ) <EOL> _table = table_crop . extract_table ( h = "<STR_LIT>" , <EOL> x_tolerance = <NUM_LIT:5> , <EOL> y_tolerance = <NUM_LIT:5> , <EOL> gutter_min_height = <NUM_LIT:5> ) <EOL> table = pd . DataFrame ( [ [ month ] + row for row in _table ] ) <EOL> table . columns = COLUMNS <EOL> table [ table . columns [ <NUM_LIT:2> : ] ] = table [ table . columns [ <NUM_LIT:2> : ] ] . applymap ( parse_value ) <EOL> table . loc [ ( table [ "<STR_LIT:state>" ] == "<STR_LIT>" ) , "<STR_LIT:state>" ] = "<STR_LIT>" <EOL> try : validate_data ( table ) <EOL> except : raise Exception ( "<STR_LIT>" + month ) <EOL> return table <EOL> def parse_pdf ( file_obj ) : <EOL> pdf = pdfplumber . load ( file_obj ) <EOL> checks = pd . concat ( list ( map ( parse_page , pdf . pages ) ) ) . reset_index ( drop = True ) <EOL> return checks [ checks [ "<STR_LIT:state>" ] != "<STR_LIT>" ] <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> buf = getattr ( sys . stdin , '<STR_LIT>' , sys . stdin ) <EOL> checks = parse_pdf ( buf ) <EOL> checks . to_csv ( sys . stdout , index = False , float_format = "<STR_LIT>" ) <EOL> sys . stderr . write ( "<STR_LIT:\r\n>" ) </s>
<s> from __future__ import unicode_literals <EOL> from six import PY2 , PY3 <EOL> from six . moves import filter , map , range <EOL> cal1 = """<STR_LIT>""" <EOL> cal2 = """<STR_LIT>""" <EOL> cal3 = """<STR_LIT>""" <EOL> cal4 = """<STR_LIT>""" <EOL> cal5 = """<STR_LIT>""" <EOL> cal6 = """<STR_LIT>""" <EOL> cal7 = """<STR_LIT>""" <EOL> cal8 = """<STR_LIT>""" <EOL> cal9 = """<STR_LIT>""" <EOL> cal10 = u"""<STR_LIT>""" <EOL> cal11 = u"""<STR_LIT>""" <EOL> cal12 = """<STR_LIT>""" <EOL> cal13 = """<STR_LIT>""" <EOL> cal14 = u"""<STR_LIT>""" <EOL> cal15 = u"""<STR_LIT>""" <EOL> cal16 = u"""<STR_LIT>""" <EOL> cal17 = u"""<STR_LIT>""" <EOL> cal18 = u"""<STR_LIT>""" <EOL> cal19 = u"""<STR_LIT>""" <EOL> cal20 = u"""<STR_LIT>""" <EOL> unfolded_cal2 = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> unfolded_cal1 = [ <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> unfolded_cal6 = [ '<STR_LIT>' ] </s>
<s> '''<STR_LIT>''' <EOL> import os <EOL> import shutil <EOL> import subprocess <EOL> from . . build_base import Build <EOL> from . . errors import BuildError <EOL> basedir = os . path . dirname ( __file__ ) <EOL> class WindowsBuild ( Build ) : <EOL> PLATFORM = '<STR_LIT>' <EOL> LICENSE_FILE = '<STR_LIT>' <EOL> def prune ( self ) : <EOL> super ( self . __class__ , self ) . prune ( ) <EOL> cfadir = os . path . join ( self . export_path , '<STR_LIT>' , '<STR_LIT>' ) <EOL> p_to_del = [ ] <EOL> if os . path . exists ( cfadir ) : <EOL> p_to_del . extend ( [ os . path . join ( cfadir , x ) for x in os . listdir ( cfadir ) if x != "<STR_LIT>" ] ) <EOL> for x in [ '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:test>' , <EOL> ] : <EOL> p_to_del . append ( os . path . join ( self . export_path , x ) ) <EOL> for p in p_to_del : <EOL> if os . path . isfile ( p ) : <EOL> os . remove ( p ) <EOL> elif os . path . isdir ( p ) : <EOL> shutil . rmtree ( p ) <EOL> if os . path . exists ( p ) : <EOL> raise BuildError ( "<STR_LIT>" % p ) <EOL> def package ( self ) : <EOL> '''<STR_LIT>''' <EOL> from . nsis import buildnsi <EOL> for f in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> src = os . path . join ( basedir , '<STR_LIT>' , f ) <EOL> shutil . copy ( src , self . build_dir ) <EOL> nsifile = os . path . join ( self . build_dir , '<STR_LIT>' ) <EOL> buildnsi . main ( svn_rev = self . svn_rev , outfile = nsifile , build_dir = self . build_dir ) <EOL> subprocess . call ( [ '<STR_LIT>' , nsifile ] ) </s>
<s> from . errors import AndroidEmulatorManagerError , AvdMgrError <EOL> from . errors import AvdClonerError , OrphanedProcessError <EOL> from . cloner import AvdCloner , clone_avd <EOL> from . orphan_catcher import OrphanCatcher <EOL> from . . api import AndroidEmulator , AndroidEmulatorError </s>
<s> '''<STR_LIT>''' <EOL> import logging <EOL> logger = logging . getLogger ( __name__ ) <EOL> class IterationBase ( object ) : <EOL> def __init__ ( self ) : <EOL> pass <EOL> def __enter__ ( self ) : <EOL> pass <EOL> def __exit__ ( self , etype , value , traceback ) : <EOL> pass <EOL> def keep_crash ( self , crash ) : <EOL> pass <EOL> def _create_minimizer_cfg ( self ) : <EOL> pass <EOL> def minimize ( self , crash ) : <EOL> pass <EOL> def _copy_seedfile ( self ) : <EOL> pass <EOL> def copy_files ( self , crash ) : <EOL> pass <EOL> def record_success ( self ) : <EOL> pass <EOL> def record_failure ( self ) : <EOL> pass <EOL> def _process_crash ( self , crash ) : <EOL> pass <EOL> def _log_crash ( self , crash ) : <EOL> pass <EOL> def _build_crash ( self , fuzzer , cmdlist , dbg_opts , fuzzed_file ) : <EOL> pass <EOL> def _fuzz_and_run ( self ) : <EOL> pass <EOL> def go ( self ) : <EOL> logger . info ( '<STR_LIT>' , self . current_seed , self . sf . path ) <EOL> self . _fuzz_and_run ( ) <EOL> for c in self . crashes : <EOL> self . _process_crash ( c ) </s>
<s> '''<STR_LIT>''' <EOL> import re <EOL> import hashlib <EOL> import logging <EOL> from optparse import OptionParser <EOL> import os <EOL> logger = logging . getLogger ( __name__ ) <EOL> logger . setLevel ( logging . WARNING ) <EOL> regex = { <EOL> '<STR_LIT>' : re . compile ( r'<STR_LIT>' ) , <EOL> '<STR_LIT>' : re . compile ( r'<STR_LIT>' ) , <EOL> '<STR_LIT>' : re . compile ( r'<STR_LIT>' ) , <EOL> } <EOL> class Calltracefile : <EOL> def __init__ ( self , f ) : <EOL> '''<STR_LIT>''' <EOL> logger . debug ( '<STR_LIT>' , f ) <EOL> self . file = f <EOL> self . backtrace = [ ] <EOL> self . hashable_backtrace = [ ] <EOL> self . hashable_backtrace_string = '<STR_LIT>' <EOL> with open ( self . file ) as pinfile : <EOL> for line in pinfile : <EOL> self . calltrace_line ( line ) <EOL> self . _hashable_backtrace ( ) <EOL> def _hashable_backtrace ( self ) : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> hashable = [ ] <EOL> if not self . hashable_backtrace : <EOL> for bt in self . backtrace : <EOL> hashable . append ( bt ) <EOL> if not hashable : <EOL> self . is_crash = False <EOL> self . hashable_backtrace = hashable <EOL> logger . debug ( "<STR_LIT>" , self . hashable_backtrace ) <EOL> return self . hashable_backtrace <EOL> def _hashable_backtrace_string ( self , level ) : <EOL> self . hashable_backtrace_string = '<STR_LIT:U+0020>' . join ( self . hashable_backtrace [ - level : ] ) . strip ( ) <EOL> logger . warning ( '<STR_LIT>' , self . hashable_backtrace_string ) <EOL> return self . hashable_backtrace_string <EOL> def calltrace_line ( self , l ) : <EOL> m = re . match ( regex [ '<STR_LIT>' ] , l ) <EOL> if m : <EOL> system_lib = re . match ( regex [ '<STR_LIT>' ] , l ) <EOL> n = re . match ( regex [ '<STR_LIT>' ] , l ) <EOL> if n : <EOL> function = n . group ( <NUM_LIT:2> ) <EOL> if not system_lib and function != '<STR_LIT>' and function != '<STR_LIT>' and function != '<STR_LIT>' : <EOL> item = m . group ( <NUM_LIT:1> ) <EOL> self . backtrace . append ( item ) <EOL> logger . debug ( '<STR_LIT>' , item ) <EOL> def _process_lines ( self ) : <EOL> logger . debug ( '<STR_LIT>' ) <EOL> for idx , line in enumerate ( self . lines ) : <EOL> self . calltrace_line ( idx , line ) <EOL> def get_crash_signature ( self , backtrace_level ) : <EOL> '''<STR_LIT>''' <EOL> logger . debug ( '<STR_LIT>' ) <EOL> backtrace_string = self . _hashable_backtrace_string ( backtrace_level ) <EOL> if bool ( backtrace_string ) : <EOL> return hashlib . md5 ( backtrace_string ) . hexdigest ( ) <EOL> else : <EOL> return False <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> hdlr = logging . StreamHandler ( ) <EOL> logger . addHandler ( hdlr ) <EOL> parser = OptionParser ( ) <EOL> parser . add_option ( '<STR_LIT>' , '<STR_LIT>' , dest = '<STR_LIT>' , action = '<STR_LIT:store_true>' , help = '<STR_LIT>' ) <EOL> ( options , args ) = parser . parse_args ( ) <EOL> if options . debug : <EOL> logger . setLevel ( logging . DEBUG ) <EOL> for path in args : <EOL> g = Calltracefile ( path ) <EOL> print g . get_crash_signature ( <NUM_LIT:50> ) </s>
<s> """<STR_LIT:U+0020>""" <EOL> from . import Fuzzer <EOL> from . import FuzzerError <EOL> from . import FuzzerExhaustedError <EOL> import logging <EOL> from random import getrandbits <EOL> logger = logging . getLogger ( __name__ ) <EOL> class InsertFuzzerError ( FuzzerError ) : <EOL> pass <EOL> class InsertFuzzer ( Fuzzer ) : <EOL> '''<STR_LIT>''' <EOL> def _fuzz ( self ) : <EOL> '''<STR_LIT>''' <EOL> bytes_to_fuzz = xrange ( len ( self . input ) ) <EOL> byte_pos = self . sf . tries <EOL> byte_to_insert = getrandbits ( <NUM_LIT:8> ) <EOL> if byte_pos < len ( bytes_to_fuzz ) : <EOL> self . input . insert ( byte_pos , byte_to_insert ) <EOL> else : <EOL> raise FuzzerExhaustedError ( '<STR_LIT>' ) <EOL> logger . debug ( '<STR_LIT>' , self . sf . basename , <EOL> byte_to_insert , byte_pos ) <EOL> self . fuzzed = self . input <EOL> _fuzzer_class = InsertFuzzer </s>
<s> '''<STR_LIT>''' <EOL> class MinimizerError ( Exception ) : <EOL> pass <EOL> class WindowsMinimizerError ( MinimizerError ) : <EOL> pass </s>
<s> '''<STR_LIT>''' <EOL> import unittest <EOL> from certfuzz . analyzers import Analyzer <EOL> class MockObj ( object ) : <EOL> def __init__ ( self , ** kwargs ) : <EOL> for ( kw , arg ) in kwargs : <EOL> self . __setattr__ ( kw , arg ) <EOL> class MockCfg ( MockObj ) : <EOL> def get_command_list ( self , * args ) : <EOL> pass <EOL> class MockCrash ( MockObj ) : <EOL> def __init__ ( self ) : <EOL> self . fuzzedfile = MockFile ( ) <EOL> self . killprocname = '<STR_LIT>' <EOL> class MockFile ( MockObj ) : <EOL> def __init__ ( self ) : <EOL> self . dirname = '<STR_LIT>' <EOL> self . path = '<STR_LIT:path>' <EOL> class Test ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> cfg = MockCfg ( ) <EOL> crash = MockCrash ( ) <EOL> self . analyzer = Analyzer ( cfg , crash , timeout = <NUM_LIT:0> ) <EOL> self . assertTrue ( self . analyzer , '<STR_LIT>' ) <EOL> def tearDown ( self ) : <EOL> pass <EOL> def testName ( self ) : <EOL> pass <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> '''<STR_LIT>''' <EOL> import unittest <EOL> import tempfile <EOL> import shutil <EOL> import yaml <EOL> import os <EOL> from certfuzz . campaign import config <EOL> import pprint <EOL> _count = <NUM_LIT:0> <EOL> def _counter ( ) : <EOL> global _count <EOL> _count += <NUM_LIT:1> <EOL> class Test ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> self . tempdir = tempfile . mkdtemp ( ) <EOL> def tearDown ( self ) : <EOL> shutil . rmtree ( self . tempdir ) <EOL> def _write_yaml ( self , thing = None ) : <EOL> if thing is None : <EOL> thing = dict ( a = <NUM_LIT:1> , b = <NUM_LIT:2> , c = <NUM_LIT:3> , d = <NUM_LIT:4> ) <EOL> fd , f = tempfile . mkstemp ( suffix = '<STR_LIT>' , dir = self . tempdir ) <EOL> os . close ( fd ) <EOL> with open ( f , '<STR_LIT:wb>' ) as fd : <EOL> yaml . dump ( thing , fd ) <EOL> return thing , f <EOL> def test_parse_yaml ( self ) : <EOL> thing , f = self . _write_yaml ( ) <EOL> self . assertTrue ( os . path . exists ( f ) ) <EOL> self . assertTrue ( os . path . getsize ( f ) > <NUM_LIT:0> ) <EOL> from_yaml = config . parse_yaml ( f ) <EOL> self . assertEqual ( thing , from_yaml ) <EOL> def test_config_init ( self ) : <EOL> thing , f = self . _write_yaml ( ) <EOL> c = config . Config ( f ) <EOL> self . assertEqual ( f , c . file ) <EOL> self . assertEqual ( thing , c . config ) <EOL> def test_validate ( self ) : <EOL> dummy , f = self . _write_yaml ( ) <EOL> c = config . Config ( f ) <EOL> c . validations . append ( _counter ) <EOL> c . validations . append ( _counter ) <EOL> c . validations . append ( _counter ) <EOL> self . assertEqual ( <NUM_LIT:0> , _count ) <EOL> c . validate ( ) <EOL> self . assertEqual ( <NUM_LIT:3> , _count ) <EOL> def test_load ( self ) : <EOL> dummy , f = self . _write_yaml ( ) <EOL> os . remove ( f ) <EOL> c = config . Config ( f ) <EOL> self . assertEqual ( None , c . config ) <EOL> c . load ( ) <EOL> self . assertEqual ( None , c . config ) <EOL> thing , f = self . _write_yaml ( ) <EOL> c . file = f <EOL> c . load ( ) <EOL> self . assertEqual ( thing , c . config ) <EOL> for k , v in thing . iteritems ( ) : <EOL> self . assertTrue ( hasattr ( c , k ) ) <EOL> self . assertEqual ( c . __getattribute__ ( k ) , v ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> '''<STR_LIT>''' <EOL> import unittest <EOL> import logging <EOL> import tempfile <EOL> import os <EOL> import shutil <EOL> from pprint import pprint <EOL> from certfuzz . file_handlers . seedfile_set import SeedfileSet <EOL> from certfuzz . file_handlers . directory import Directory <EOL> from certfuzz . file_handlers . seedfile import SeedFile <EOL> import hashlib <EOL> from certfuzz . scoring . scorable_set import EmptySetError <EOL> class Test ( unittest . TestCase ) : <EOL> def setUp ( self ) : <EOL> campaign_id = '<STR_LIT>' <EOL> self . origindir = tempfile . mkdtemp ( ) <EOL> self . localdir = tempfile . mkdtemp ( ) <EOL> self . outputdir = tempfile . mkdtemp ( ) <EOL> self . file_count = <NUM_LIT:5> <EOL> self . files = [ ] <EOL> for i in range ( self . file_count ) : <EOL> ( fd , f ) = tempfile . mkstemp ( dir = self . origindir ) <EOL> os . write ( fd , '<STR_LIT>' % i ) <EOL> os . close ( fd ) <EOL> self . files . append ( f ) <EOL> self . sfs = SeedfileSet ( campaign_id , self . origindir , self . localdir , self . outputdir ) <EOL> def tearDown ( self ) : <EOL> for f in self . files : <EOL> os . remove ( f ) <EOL> self . assertFalse ( os . path . exists ( f ) ) <EOL> for d in ( self . origindir , self . localdir , self . outputdir ) : <EOL> shutil . rmtree ( d ) <EOL> self . assertFalse ( os . path . exists ( d ) ) <EOL> def test_pickle ( self ) : <EOL> import pickle <EOL> self . assertTrue ( hasattr ( self . sfs , '<STR_LIT>' ) ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( self . sfs . things ) ) <EOL> self . sfs . _setup ( ) <EOL> self . assertEqual ( self . file_count , len ( self . sfs . things ) ) <EOL> unpickled = pickle . loads ( pickle . dumps ( self . sfs ) ) <EOL> pprint ( unpickled . __dict__ ) <EOL> def test_set_directories ( self ) : <EOL> self . assertEqual ( self . sfs . originpath , self . origindir ) <EOL> self . assertEqual ( self . sfs . localpath , self . localdir ) <EOL> self . assertEqual ( self . sfs . outputpath , self . outputdir ) <EOL> self . assertEqual ( None , self . sfs . origindir ) <EOL> self . assertEqual ( None , self . sfs . localdir ) <EOL> self . assertEqual ( None , self . sfs . outputdir ) <EOL> self . sfs . _set_directories ( ) <EOL> self . assertEqual ( Directory , self . sfs . origindir . __class__ ) <EOL> self . assertEqual ( Directory , self . sfs . localdir . __class__ ) <EOL> self . assertEqual ( Directory , self . sfs . outputdir . __class__ ) <EOL> self . assertEqual ( self . file_count , len ( self . sfs . origindir . files ) ) <EOL> def test_copy_files_to_localdir ( self ) : <EOL> self . sfs . origindir = [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> , <NUM_LIT:4> , <NUM_LIT:5> ] <EOL> copied = [ ] <EOL> self . sfs . copy_file_from_origin = lambda x : copied . append ( x ) <EOL> self . sfs . _copy_files_to_localdir ( ) <EOL> self . assertEqual ( self . sfs . origindir , copied ) <EOL> def test_copy_file_from_origin ( self ) : <EOL> pass <EOL> def test_add_local_files_to_set ( self ) : <EOL> pass <EOL> def test_add_file ( self ) : <EOL> self . assertNotEqual ( <NUM_LIT:0> , len ( self . files ) ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( self . sfs . things ) ) <EOL> self . sfs . add_file ( * self . files ) <EOL> self . assertEqual ( <NUM_LIT:5> , len ( self . sfs . things ) ) <EOL> for thing in self . sfs . things . itervalues ( ) : <EOL> self . assertEqual ( SeedFile , thing . __class__ ) <EOL> def test_init ( self ) : <EOL> self . assertEqual ( self . outputdir , self . sfs . seedfile_output_base_dir ) <EOL> self . assertEqual ( <NUM_LIT:0> , len ( self . sfs . things ) ) <EOL> def test_getstate_is_pickle_friendly ( self ) : <EOL> import pickle <EOL> state = self . sfs . __getstate__ ( ) <EOL> try : <EOL> pickle . dumps ( state ) <EOL> except Exception , e : <EOL> self . fail ( '<STR_LIT>' % e ) <EOL> def test_getstate ( self ) : <EOL> state = self . sfs . __getstate__ ( ) <EOL> self . assertEqual ( dict , type ( state ) ) <EOL> for k , v in self . sfs . __dict__ . iteritems ( ) : <EOL> if k in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> self . assertFalse ( k in state ) <EOL> else : <EOL> self . assertTrue ( k in state , '<STR_LIT>' % k ) <EOL> def test_setstate ( self ) : <EOL> self . sfs . __enter__ ( ) <EOL> state_before = self . sfs . __getstate__ ( ) <EOL> self . sfs . __setstate__ ( state_before ) <EOL> self . assertEqual ( self . file_count , self . sfs . sfcount ) <EOL> state_after = self . sfs . __getstate__ ( ) <EOL> for k , v in state_before . iteritems ( ) : <EOL> self . assertTrue ( k in state_after ) <EOL> if not k == '<STR_LIT>' : <EOL> self . assertEqual ( v , state_after [ k ] ) <EOL> for k , thing in state_before [ '<STR_LIT>' ] . iteritems ( ) : <EOL> self . assertTrue ( k in self . sfs . things ) <EOL> for x , y in thing . iteritems ( ) : <EOL> self . assertEqual ( thing [ x ] , self . sfs . things [ k ] . __dict__ [ x ] ) <EOL> self . assertEqual ( self . file_count , self . sfs . sfcount ) <EOL> def test_setstate_with_changed_files ( self ) : <EOL> self . sfs . __enter__ ( ) <EOL> state_before = self . sfs . __getstate__ ( ) <EOL> self . assertEqual ( len ( state_before [ '<STR_LIT>' ] ) , self . file_count ) <EOL> file_to_remove = self . files . pop ( ) <EOL> localfile_md5 = hashlib . md5 ( open ( file_to_remove , '<STR_LIT:rb>' ) . read ( ) ) . hexdigest ( ) <EOL> localfilename = "<STR_LIT>" % localfile_md5 <EOL> os . remove ( file_to_remove ) <EOL> self . assertFalse ( file_to_remove in self . files ) <EOL> self . assertFalse ( os . path . exists ( file_to_remove ) ) <EOL> localfile_to_remove = os . path . join ( self . localdir , localfilename ) <EOL> os . remove ( localfile_to_remove ) <EOL> self . assertFalse ( os . path . exists ( localfile_to_remove ) ) <EOL> new_sfs = SeedfileSet ( ) <EOL> new_sfs . __setstate__ ( state_before ) <EOL> self . assertEqual ( len ( new_sfs . things ) , ( self . file_count - <NUM_LIT:1> ) ) <EOL> for k , thing in state_before [ '<STR_LIT>' ] . iteritems ( ) : <EOL> if k == localfile_md5 : <EOL> self . assertFalse ( k in new_sfs . things ) <EOL> continue <EOL> else : <EOL> self . assertTrue ( k in new_sfs . things ) <EOL> for x , y in thing . iteritems ( ) : <EOL> sfsthing = new_sfs . things [ k ] . __dict__ [ x ] <EOL> if hasattr ( sfsthing , '<STR_LIT>' ) : <EOL> self . _same_dict ( y , sfsthing . __dict__ ) <EOL> else : <EOL> self . assertEqual ( y , sfsthing ) <EOL> self . assertEqual ( self . file_count - <NUM_LIT:1> , new_sfs . sfcount ) <EOL> def _same_dict ( self , d1 , d2 ) : <EOL> for k , v in d1 . iteritems ( ) : <EOL> self . assertTrue ( k in d2 ) <EOL> if not v == d2 [ k ] : <EOL> pprint ( v ) <EOL> pprint ( d2 [ k ] ) <EOL> self . assertEqual ( v , d2 [ k ] ) <EOL> def test_next_item ( self ) : <EOL> self . assertEqual ( <NUM_LIT:0> , len ( self . sfs . things ) ) <EOL> self . assertRaises ( EmptySetError , self . sfs . next_key ) <EOL> self . assertRaises ( EmptySetError , self . sfs . next_item ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> import os <EOL> import tempfile <EOL> from certfuzz . fuzztools . zzuflog import ZzufLog <EOL> '''<STR_LIT>''' <EOL> import unittest <EOL> class Test ( unittest . TestCase ) : <EOL> def delete_file ( self , f ) : <EOL> if os . path . exists ( f ) : <EOL> os . remove ( f ) <EOL> self . assertFalse ( os . path . exists ( f ) ) <EOL> def tearDown ( self ) : <EOL> self . delete_file ( self . infile ) <EOL> self . delete_file ( self . outfile ) <EOL> def setUp ( self ) : <EOL> ( fd1 , f1 ) = tempfile . mkstemp ( text = True ) <EOL> os . close ( fd1 ) <EOL> self . infile = f1 <EOL> ( fd2 , f2 ) = tempfile . mkstemp ( text = True ) <EOL> os . close ( fd2 ) <EOL> self . outfile = f2 <EOL> self . log = ZzufLog ( self . infile , self . outfile ) <EOL> def test_get_last_line ( self ) : <EOL> open ( self . infile , '<STR_LIT:w>' ) <EOL> self . assertEqual ( self . log . _get_last_line ( ) , '<STR_LIT>' ) <EOL> ( fd , f ) = tempfile . mkstemp ( text = True ) <EOL> os . write ( fd , "<STR_LIT>" ) <EOL> os . write ( fd , "<STR_LIT>" ) <EOL> os . write ( fd , "<STR_LIT>" ) <EOL> os . close ( fd ) <EOL> log = ZzufLog ( f , self . outfile ) <EOL> self . assertEqual ( log . line , '<STR_LIT>' ) <EOL> self . delete_file ( f ) <EOL> def test_set_exitcode ( self ) : <EOL> self . log . result = "<STR_LIT>" <EOL> self . log . _set_exitcode ( ) <EOL> self . assertEqual ( self . log . exitcode , '<STR_LIT>' ) <EOL> self . log . result = "<STR_LIT>" <EOL> self . log . _set_exitcode ( ) <EOL> self . assertEqual ( self . log . exitcode , <NUM_LIT> ) <EOL> def test_set_signal ( self ) : <EOL> self . log . result = "<STR_LIT>" <EOL> self . log . _set_signal ( ) <EOL> self . assertEqual ( self . log . signal , '<STR_LIT>' ) <EOL> self . log . result = "<STR_LIT>" <EOL> self . log . _set_signal ( ) <EOL> self . assertEqual ( self . log . signal , '<STR_LIT>' ) <EOL> def test_parse_line ( self ) : <EOL> self . log . line = "<STR_LIT>" <EOL> self . assertEqual ( self . log . _parse_line ( ) , ( False , False , '<STR_LIT>' ) ) <EOL> self . log . line = "<STR_LIT>" <EOL> self . assertEqual ( self . log . _parse_line ( ) , ( <NUM_LIT> , '<STR_LIT:foo>' , '<STR_LIT>' ) ) <EOL> def test_was_out_of_memory ( self ) : <EOL> self . log . result = "<STR_LIT>" <EOL> self . assertTrue ( self . log . _was_out_of_memory ( ) ) <EOL> self . log . result = "<STR_LIT>" <EOL> self . assertTrue ( self . log . _was_out_of_memory ( ) ) <EOL> self . log . result = "<STR_LIT>" <EOL> self . assertFalse ( self . log . _was_out_of_memory ( ) ) <EOL> self . log . result = "<STR_LIT>" <EOL> self . assertFalse ( self . log . _was_out_of_memory ( ) ) <EOL> def test_was_killed ( self ) : <EOL> self . log . result = "<STR_LIT>" <EOL> self . assertTrue ( self . log . _was_killed ( ) ) <EOL> self . log . result = "<STR_LIT>" <EOL> self . assertTrue ( self . log . _was_killed ( ) ) <EOL> self . log . result = "<STR_LIT>" <EOL> self . assertFalse ( self . log . _was_killed ( ) ) <EOL> self . log . result = "<STR_LIT>" <EOL> self . assertFalse ( self . log . _was_killed ( ) ) <EOL> def test_read_zzuf_log ( self ) : <EOL> ( fd , f ) = tempfile . mkstemp ( text = True ) <EOL> line = "<STR_LIT>" <EOL> os . write ( fd , line % ( <NUM_LIT:10> , "<STR_LIT>" , "<STR_LIT:foo>" ) ) <EOL> os . write ( fd , line % ( <NUM_LIT> , "<STR_LIT>" , "<STR_LIT:bar>" ) ) <EOL> os . close ( fd ) <EOL> log = ZzufLog ( f , self . outfile ) <EOL> self . assertEqual ( log . seed , <NUM_LIT> ) <EOL> self . assertEqual ( log . range , "<STR_LIT>" ) <EOL> self . assertEqual ( log . result , "<STR_LIT:bar>" ) <EOL> self . assertEqual ( log . line , ( line % ( <NUM_LIT> , "<STR_LIT>" , "<STR_LIT:bar>" ) ) . strip ( ) ) <EOL> self . delete_file ( f ) <EOL> def test_crash_logged ( self ) : <EOL> self . log . result = "<STR_LIT:a>" <EOL> self . log . _set_exitcode ( ) <EOL> self . assertFalse ( self . log . crash_logged ( False ) ) <EOL> self . log . result = "<STR_LIT>" <EOL> self . log . _set_exitcode ( ) <EOL> self . assertFalse ( self . log . crash_logged ( False ) ) <EOL> self . log . result = "<STR_LIT>" <EOL> self . log . _set_exitcode ( ) <EOL> self . assertFalse ( self . log . crash_logged ( False ) ) <EOL> self . log . result = "<STR_LIT:a>" <EOL> self . log . _set_exitcode ( ) <EOL> self . assertFalse ( self . log . parsed ) <EOL> self . assertFalse ( self . log . crash_logged ( False ) ) <EOL> self . log . result = "<STR_LIT:a>" <EOL> self . log . _set_exitcode ( ) <EOL> self . log . parsed = True <EOL> self . assertTrue ( self . log . crash_logged ( False ) ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> unittest . main ( ) </s>
<s> '''<STR_LIT>''' <EOL> import sys , os <EOL> sys . path . append ( os . getcwd ( ) ) <EOL> import lib . gdb_wrapper as gdb_wrapper <EOL> def assertEqual ( val1 , val2 , fmt = "<STR_LIT>" ) : <EOL> assert type ( val1 ) == type ( val2 ) , "<STR_LIT>" % ( type ( val1 ) , type ( val2 ) ) <EOL> assert val1 == val2 , ( "<STR_LIT>" % ( fmt , fmt ) ) % ( val1 , val2 ) <EOL> def testInstruction ( ) : <EOL> '''<STR_LIT>''' <EOL> gdbstr = "<STR_LIT>" <EOL> i = gdb_wrapper . Instruction ( gdbstr ) <EOL> assertEqual ( i . addr , <NUM_LIT> , "<STR_LIT>" ) <EOL> assertEqual ( str ( i . operands [ <NUM_LIT:0> ] ) , "<STR_LIT>" ) <EOL> assertEqual ( i . mnemonic , "<STR_LIT>" ) <EOL> gdbstr = "<STR_LIT>" <EOL> i = gdb_wrapper . Instruction ( gdbstr ) <EOL> assertEqual ( i . addr , <NUM_LIT> , "<STR_LIT>" ) <EOL> assertEqual ( str ( i . operands [ <NUM_LIT:0> ] ) , "<STR_LIT>" ) <EOL> assertEqual ( i . mnemonic , "<STR_LIT>" ) <EOL> gdbstr = "<STR_LIT>" <EOL> i = gdb_wrapper . Instruction ( gdbstr ) <EOL> assertEqual ( i . addr , <NUM_LIT> , "<STR_LIT>" ) <EOL> assertEqual ( len ( i . operands ) , <NUM_LIT:0> ) <EOL> assertEqual ( i . mnemonic , "<STR_LIT>" ) <EOL> gdbstr = "<STR_LIT>" <EOL> i = gdb_wrapper . Instruction ( gdbstr ) <EOL> assertEqual ( i . addr , <NUM_LIT> , "<STR_LIT>" ) <EOL> assertEqual ( str ( i . operands [ <NUM_LIT:0> ] ) , "<STR_LIT>" ) <EOL> assertEqual ( str ( i . operands [ <NUM_LIT:1> ] ) , "<STR_LIT>" ) <EOL> assertEqual ( i . mnemonic , "<STR_LIT>" ) <EOL> def testOperand ( ) : <EOL> '''<STR_LIT>''' <EOL> gdbstr = "<STR_LIT>" <EOL> o = gdb_wrapper . Operand ( gdbstr ) <EOL> assertEqual ( o . is_pointer , True ) <EOL> assertEqual ( o . expr , "<STR_LIT>" ) <EOL> gdbstr = "<STR_LIT>" <EOL> o = gdb_wrapper . Operand ( gdbstr ) <EOL> assertEqual ( o . is_pointer , True ) <EOL> assertEqual ( o . expr , "<STR_LIT>" ) <EOL> gdbstr = "<STR_LIT>" <EOL> o = gdb_wrapper . Operand ( gdbstr ) <EOL> assertEqual ( o . is_pointer , False ) <EOL> assertEqual ( o . expr , "<STR_LIT>" ) <EOL> gdbstr = "<STR_LIT>" <EOL> o = gdb_wrapper . Operand ( gdbstr ) <EOL> assertEqual ( o . is_pointer , True ) <EOL> assertEqual ( o . expr , "<STR_LIT>" ) <EOL> gdbstr = "<STR_LIT>" <EOL> o = gdb_wrapper . Operand ( gdbstr ) <EOL> assertEqual ( o . is_pointer , False ) <EOL> assertEqual ( o . expr , "<STR_LIT>" ) <EOL> gdbstr = "<STR_LIT>" <EOL> o = gdb_wrapper . Operand ( gdbstr ) <EOL> assertEqual ( o . is_pointer , False ) <EOL> assertEqual ( o . expr . replace ( "<STR_LIT:U+0020>" , "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> gdbstr = "<STR_LIT>" <EOL> o = gdb_wrapper . Operand ( gdbstr ) <EOL> assertEqual ( o . is_pointer , False ) <EOL> assertEqual ( o . expr . replace ( "<STR_LIT:U+0020>" , "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> gdbstr = "<STR_LIT>" <EOL> o = gdb_wrapper . Operand ( gdbstr ) <EOL> assertEqual ( o . is_pointer , True ) <EOL> assertEqual ( o . expr . replace ( "<STR_LIT:U+0020>" , "<STR_LIT>" ) , "<STR_LIT>" ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> testInstruction ( ) <EOL> testOperand ( ) <EOL> print "<STR_LIT>" </s>
<s> '''<STR_LIT>''' <EOL> import os <EOL> import collections <EOL> import zipfile <EOL> from optparse import OptionParser <EOL> saved_arcinfo = collections . OrderedDict ( ) <EOL> def readzip ( filepath ) : <EOL> global savedarcinfo <EOL> tempzip = zipfile . ZipFile ( filepath , '<STR_LIT:r>' ) <EOL> '''<STR_LIT>''' <EOL> unzippedbytes = '<STR_LIT>' <EOL> for i in tempzip . namelist ( ) : <EOL> data = tempzip . read ( i ) <EOL> saved_arcinfo [ i ] = ( len ( unzippedbytes ) , len ( data ) ) <EOL> unzippedbytes += data <EOL> tempzip . close ( ) <EOL> return unzippedbytes <EOL> def main ( ) : <EOL> global saved_arcinfo <EOL> usage = '<STR_LIT>' <EOL> parser = OptionParser ( usage = usage ) <EOL> ( options , args ) = parser . parse_args ( ) <EOL> if len ( args ) != <NUM_LIT:2> : <EOL> parser . error ( '<STR_LIT>' ) <EOL> return <EOL> changedbytes = [ ] <EOL> changedfiles = [ ] <EOL> zip1 = args [ <NUM_LIT:0> ] <EOL> zip2 = args [ <NUM_LIT:1> ] <EOL> zip1bytes = readzip ( zip1 ) <EOL> zip2bytes = readzip ( zip2 ) <EOL> zip1len = len ( zip1bytes ) <EOL> if zip1len != len ( zip2bytes ) : <EOL> print '<STR_LIT>' <EOL> for i in range ( <NUM_LIT:0> , zip1len ) : <EOL> if zip1bytes [ i ] != zip2bytes [ i ] : <EOL> changedbytes . append ( i ) <EOL> for changedbyte in changedbytes : <EOL> for name , info in saved_arcinfo . iteritems ( ) : <EOL> startaddr = info [ <NUM_LIT:0> ] <EOL> endaddr = info [ <NUM_LIT:0> ] + info [ <NUM_LIT:1> ] <EOL> if startaddr <= changedbyte <= endaddr and name not in changedfiles : <EOL> print '<STR_LIT>' % name <EOL> changedfiles . append ( name ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> '''<STR_LIT>''' <EOL> import string <EOL> import re <EOL> import os <EOL> import collections <EOL> import glob <EOL> import stat <EOL> import gzip <EOL> import subprocess <EOL> import itertools <EOL> import numpy <EOL> import numpy . ma <EOL> import shutil <EOL> def getFirstLine ( filename , nlines = <NUM_LIT:1> ) : <EOL> """<STR_LIT>""" <EOL> with open ( filename , '<STR_LIT>' ) as f : <EOL> line = "<STR_LIT>" . join ( [ f . readline ( ) for x in range ( nlines ) ] ) <EOL> return line <EOL> def getLastLine ( filename , nlines = <NUM_LIT:1> , read_size = <NUM_LIT> ) : <EOL> """<STR_LIT>""" <EOL> f = open ( filename , '<STR_LIT>' ) <EOL> offset = read_size <EOL> f . seek ( <NUM_LIT:0> , <NUM_LIT:2> ) <EOL> file_size = f . tell ( ) <EOL> if file_size == <NUM_LIT:0> : <EOL> return "<STR_LIT>" <EOL> while <NUM_LIT:1> : <EOL> if file_size < offset : <EOL> offset = file_size <EOL> f . seek ( - <NUM_LIT:1> * offset , <NUM_LIT:2> ) <EOL> read_str = f . read ( offset ) <EOL> if read_str [ offset - <NUM_LIT:1> ] == '<STR_LIT:\n>' : <EOL> read_str = read_str [ : - <NUM_LIT:1> ] <EOL> lines = read_str . split ( '<STR_LIT:\n>' ) <EOL> if len ( lines ) >= nlines + <NUM_LIT:1> : <EOL> return "<STR_LIT:\n>" . join ( lines [ - nlines : ] ) <EOL> if offset == file_size : <EOL> return read_str <EOL> offset += read_size <EOL> f . close ( ) <EOL> def getNumLines ( filename , ignore_comments = True ) : <EOL> """<STR_LIT>""" <EOL> if ignore_comments : <EOL> filter_cmd = '<STR_LIT>' <EOL> else : <EOL> filter_cmd = "<STR_LIT>" <EOL> if filename . endswith ( "<STR_LIT>" ) : <EOL> cmd = "<STR_LIT>" % locals ( ) <EOL> else : <EOL> cmd = "<STR_LIT>" % locals ( ) <EOL> out = subprocess . Popen ( cmd , <EOL> shell = True , <EOL> stdout = subprocess . PIPE , <EOL> stderr = subprocess . STDOUT <EOL> ) . communicate ( ) [ <NUM_LIT:0> ] <EOL> return int ( out . partition ( b'<STR_LIT:U+0020>' ) [ <NUM_LIT:0> ] ) <EOL> def isEmpty ( filename ) : <EOL> """<STR_LIT>""" <EOL> if filename == "<STR_LIT:->" : <EOL> return False <EOL> return os . stat ( filename ) [ stat . ST_SIZE ] == <NUM_LIT:0> <EOL> def isComplete ( filename ) : <EOL> '''<STR_LIT>''' <EOL> if filename . endswith ( "<STR_LIT>" ) : <EOL> raise NotImplementedError ( <EOL> '<STR_LIT>' ) <EOL> if isEmpty ( filename ) : <EOL> return False <EOL> lastline = getLastLine ( filename ) <EOL> return lastline . startswith ( "<STR_LIT>" ) <EOL> def touchFile ( filename , times = None ) : <EOL> '''<STR_LIT>''' <EOL> existed = os . path . exists ( filename ) <EOL> fhandle = file ( filename , '<STR_LIT:a>' ) <EOL> if filename . endswith ( "<STR_LIT>" ) and not existed : <EOL> fhandle = gzip . GzipFile ( filename , fileobj = fhandle ) <EOL> try : <EOL> os . utime ( filename , times ) <EOL> finally : <EOL> fhandle . close ( ) <EOL> def openFile ( filename , mode = "<STR_LIT:r>" , create_dir = False ) : <EOL> '''<STR_LIT>''' <EOL> _ , ext = os . path . splitext ( filename ) <EOL> if create_dir : <EOL> dirname = os . path . dirname ( filename ) <EOL> if dirname and not os . path . exists ( dirname ) : <EOL> os . makedirs ( dirname ) <EOL> if ext . lower ( ) in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> return gzip . open ( filename , mode ) <EOL> else : <EOL> return open ( filename , mode ) <EOL> def zapFile ( filename , outfile = None ) : <EOL> '''<STR_LIT>''' <EOL> if outfile and os . path . getsize ( outfile ) == <NUM_LIT:0> : <EOL> raise ValueError ( '<STR_LIT>' % outfile ) <EOL> original = os . stat ( filename ) <EOL> if original . st_size == <NUM_LIT:0> : <EOL> return None , None <EOL> if os . path . islink ( filename ) : <EOL> linkdest = os . readlink ( filename ) <EOL> os . unlink ( filename ) <EOL> f = open ( filename , "<STR_LIT:w>" ) <EOL> f . close ( ) <EOL> else : <EOL> linkdest = None <EOL> f = open ( filename , "<STR_LIT:w>" ) <EOL> f . truncate ( ) <EOL> f . close ( ) <EOL> os . utime ( filename , ( original . st_atime , original . st_mtime ) ) <EOL> os . chmod ( filename , original . st_mode ) <EOL> return original , linkdest <EOL> def cloneFile ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> if os . path . dirname ( infile ) != os . path . dirname ( outfile ) : <EOL> relpath = os . path . relpath ( <EOL> os . path . dirname ( infile ) , os . path . dirname ( outfile ) ) <EOL> else : <EOL> relpath = "<STR_LIT:.>" <EOL> target = os . path . join ( relpath , os . path . basename ( infile ) ) <EOL> try : <EOL> os . symlink ( target , outfile ) <EOL> except OSError : <EOL> pass <EOL> def shadowFile ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> if outfile != infile : <EOL> shutil . move ( infile , outfile ) <EOL> touchFile ( infile ) <EOL> else : <EOL> raise ValueError ( '<STR_LIT>' ) <EOL> def val2str ( val , format = "<STR_LIT>" , na = "<STR_LIT>" ) : <EOL> '''<STR_LIT>''' <EOL> if type ( val ) == int : <EOL> return format % val <EOL> elif type ( val ) == float : <EOL> return format % val <EOL> try : <EOL> x = format % val <EOL> except ( ValueError , TypeError ) : <EOL> x = na <EOL> return x <EOL> def str2val ( val , format = "<STR_LIT>" , na = "<STR_LIT>" , list_detection = False ) : <EOL> """<STR_LIT>""" <EOL> if val is None : <EOL> return val <EOL> def _convert ( v ) : <EOL> try : <EOL> x = int ( v ) <EOL> except ValueError : <EOL> try : <EOL> x = float ( v ) <EOL> except ValueError : <EOL> return v <EOL> return x <EOL> if list_detection and "<STR_LIT:U+002C>" in val : <EOL> return [ _convert ( v ) for v in val . split ( "<STR_LIT:U+002C>" ) ] <EOL> else : <EOL> return _convert ( val ) <EOL> def prettyPercent ( numerator , denominator , format = "<STR_LIT>" , na = "<STR_LIT>" ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> x = format % ( <NUM_LIT> * numerator / denominator ) <EOL> except ( ValueError , ZeroDivisionError ) : <EOL> x = "<STR_LIT>" <EOL> return x <EOL> def prettyString ( val ) : <EOL> '''<STR_LIT>''' <EOL> if val is not None : <EOL> return val <EOL> else : <EOL> return "<STR_LIT>" <EOL> def which ( program ) : <EOL> """<STR_LIT>""" <EOL> def is_exe ( fpath ) : <EOL> return os . path . exists ( fpath ) and os . access ( fpath , os . X_OK ) <EOL> fpath , fname = os . path . split ( program ) <EOL> if fpath : <EOL> if is_exe ( program ) : <EOL> return program <EOL> else : <EOL> for path in os . environ [ "<STR_LIT>" ] . split ( os . pathsep ) : <EOL> exe_file = os . path . join ( path , program ) <EOL> if is_exe ( exe_file ) : <EOL> return exe_file <EOL> return None <EOL> def iterate ( infile ) : <EOL> '''<STR_LIT>''' <EOL> n = <NUM_LIT:0> <EOL> for line in infile : <EOL> if line . startswith ( "<STR_LIT:#>" ) : <EOL> continue <EOL> n += <NUM_LIT:1> <EOL> if n == <NUM_LIT:1> : <EOL> header = re . sub ( "<STR_LIT>" , "<STR_LIT:_>" , line [ : - <NUM_LIT:1> ] ) . split ( ) <EOL> DATA = collections . namedtuple ( "<STR_LIT>" , header ) <EOL> continue <EOL> result = DATA ( * line [ : - <NUM_LIT:1> ] . split ( ) ) <EOL> yield result <EOL> def iterate_tabular ( infile , sep = "<STR_LIT:\t>" ) : <EOL> '''<STR_LIT>''' <EOL> for line in infile : <EOL> if line . startswith ( "<STR_LIT:#>" ) : <EOL> continue <EOL> yield line [ : - <NUM_LIT:1> ] . split ( sep ) <EOL> def iterator_split ( infile , regex ) : <EOL> '''<STR_LIT>''' <EOL> chunk_list = [ ] <EOL> regex = re . compile ( regex ) <EOL> for x in infile : <EOL> if regex . search ( x ) : <EOL> if len ( chunk_list ) : <EOL> yield chunk_list <EOL> chunk_list = [ ] <EOL> chunk_list . append ( x ) <EOL> else : <EOL> chunk_list . append ( x ) <EOL> yield chunk_list <EOL> def snip ( filename , extension = None , alt_extension = None , <EOL> strip_path = False ) : <EOL> '''<STR_LIT>''' <EOL> if extension : <EOL> if filename . endswith ( extension ) : <EOL> root = filename [ : - len ( extension ) ] <EOL> elif alt_extension and filename . endswith ( alt_extension ) : <EOL> root = filename [ : - len ( alt_extension ) ] <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % <EOL> ( filename , extension ) ) <EOL> else : <EOL> root , ext = os . path . splitext ( filename ) <EOL> if strip_path : <EOL> snipped = os . path . basename ( root ) <EOL> else : <EOL> snipped = root <EOL> return snipped <EOL> def checkPresenceOfFiles ( filenames ) : <EOL> """<STR_LIT>""" <EOL> missing = [ ] <EOL> for filename in filenames : <EOL> if not os . path . exists ( filename ) : <EOL> missing . append ( filename ) <EOL> return missing <EOL> def human2bytes ( s ) : <EOL> """<STR_LIT>""" <EOL> SYMBOLS = { <EOL> '<STR_LIT>' : ( '<STR_LIT:B>' , '<STR_LIT>' , '<STR_LIT:M>' , '<STR_LIT>' , '<STR_LIT:T>' , '<STR_LIT:P>' , '<STR_LIT:E>' , '<STR_LIT>' , '<STR_LIT:Y>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) , <EOL> '<STR_LIT>' : ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , <EOL> '<STR_LIT>' , '<STR_LIT>' ) , <EOL> } <EOL> init = s <EOL> num = "<STR_LIT>" <EOL> while s and s [ <NUM_LIT:0> : <NUM_LIT:1> ] . isdigit ( ) or s [ <NUM_LIT:0> : <NUM_LIT:1> ] == '<STR_LIT:.>' : <EOL> num += s [ <NUM_LIT:0> ] <EOL> s = s [ <NUM_LIT:1> : ] <EOL> num = float ( num ) <EOL> letter = s . strip ( ) <EOL> for name , sset in SYMBOLS . items ( ) : <EOL> if letter in sset : <EOL> break <EOL> else : <EOL> if letter == '<STR_LIT:k>' : <EOL> sset = SYMBOLS [ '<STR_LIT>' ] <EOL> letter = letter . upper ( ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % init ) <EOL> prefix = { sset [ <NUM_LIT:0> ] : <NUM_LIT:1> } <EOL> for i , s in enumerate ( sset [ <NUM_LIT:1> : ] ) : <EOL> prefix [ s ] = <NUM_LIT:1> << ( i + <NUM_LIT:1> ) * <NUM_LIT:10> <EOL> return int ( num * prefix [ letter ] ) <EOL> def convertDictionary ( d , map = { } ) : <EOL> """<STR_LIT>""" <EOL> rx_int = re . compile ( "<STR_LIT>" ) <EOL> rx_float = re . compile ( "<STR_LIT>" ) <EOL> if "<STR_LIT:default>" in map : <EOL> k = "<STR_LIT:default>" <EOL> if map [ k ] == "<STR_LIT:int>" : <EOL> default = int <EOL> elif map [ k ] == "<STR_LIT:float>" : <EOL> default = float <EOL> elif map [ k ] == "<STR_LIT:string>" : <EOL> default = str <EOL> else : <EOL> default = False <EOL> for k , vv in d . items ( ) : <EOL> if vv is None : <EOL> continue <EOL> v = vv . strip ( ) <EOL> try : <EOL> if k in map : <EOL> if map [ k ] == "<STR_LIT:int>" : <EOL> d [ k ] = int ( v ) <EOL> elif map [ k ] == "<STR_LIT:float>" : <EOL> d [ k ] = float ( v ) <EOL> elif map [ k ] == "<STR_LIT:string>" : <EOL> pass <EOL> continue <EOL> elif default : <EOL> if v != "<STR_LIT>" : <EOL> d [ k ] = default ( v ) <EOL> else : <EOL> d [ k ] = v <EOL> continue <EOL> except TypeError , msg : <EOL> raise TypeError ( "<STR_LIT>" % ( k , msg ) ) <EOL> try : <EOL> if rx_int . match ( v ) : <EOL> d [ k ] = int ( v ) <EOL> elif rx_float . match ( v ) : <EOL> d [ k ] = float ( v ) <EOL> except TypeError , msg : <EOL> raise TypeError ( <EOL> "<STR_LIT>" % str ( v ) ) <EOL> except ValueError , msg : <EOL> raise ValueError ( "<STR_LIT>" % ( msg , str ( d ) ) ) <EOL> return d <EOL> class nested_dict ( collections . defaultdict ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> collections . defaultdict . __init__ ( self , nested_dict ) <EOL> def iterflattened ( self ) : <EOL> """<STR_LIT>""" <EOL> for key , value in self . iteritems ( ) : <EOL> if isinstance ( value , nested_dict ) : <EOL> for keykey , value in value . iterflattened ( ) : <EOL> yield ( key , ) + keykey , value <EOL> else : <EOL> yield ( key , ) , value <EOL> def flatten ( l , ltypes = ( list , tuple ) ) : <EOL> '''<STR_LIT>''' <EOL> ltype = type ( l ) <EOL> l = list ( l ) <EOL> i = <NUM_LIT:0> <EOL> while i < len ( l ) : <EOL> while isinstance ( l [ i ] , ltypes ) : <EOL> if not l [ i ] : <EOL> l . pop ( i ) <EOL> i -= <NUM_LIT:1> <EOL> break <EOL> else : <EOL> l [ i : i + <NUM_LIT:1> ] = l [ i ] <EOL> i += <NUM_LIT:1> <EOL> return ltype ( l ) <EOL> def invert_dictionary ( dict , make_unique = False ) : <EOL> """<STR_LIT>""" <EOL> inv = { } <EOL> if make_unique : <EOL> for k , v in dict . iteritems ( ) : <EOL> inv [ v ] = k <EOL> else : <EOL> for k , v in dict . iteritems ( ) : <EOL> inv . setdefault ( v , [ ] ) . append ( k ) <EOL> return inv <EOL> class FilePool : <EOL> """<STR_LIT>""" <EOL> maxopen = <NUM_LIT> <EOL> def __init__ ( self , <EOL> output_pattern = None , <EOL> header = None , <EOL> force = True ) : <EOL> self . mFiles = { } <EOL> self . mOutputPattern = output_pattern <EOL> self . open = open <EOL> if output_pattern : <EOL> _ , ext = os . path . splitext ( output_pattern ) <EOL> if ext . lower ( ) in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> self . open = gzip . open <EOL> self . mCounts = collections . defaultdict ( int ) <EOL> self . mHeader = header <EOL> if force and output_pattern : <EOL> for f in glob . glob ( re . sub ( "<STR_LIT:%s>" , "<STR_LIT:*>" , output_pattern ) ) : <EOL> os . remove ( f ) <EOL> def __del__ ( self ) : <EOL> """<STR_LIT>""" <EOL> for file in self . mFiles . values ( ) : <EOL> file . close ( ) <EOL> def __len__ ( self ) : <EOL> return len ( self . mCounts ) <EOL> def close ( self ) : <EOL> """<STR_LIT>""" <EOL> for file in self . mFiles . values ( ) : <EOL> file . close ( ) <EOL> def values ( self ) : <EOL> return self . mCounts . values ( ) <EOL> def keys ( self ) : <EOL> return self . mCounts . keys ( ) <EOL> def iteritems ( self ) : <EOL> return self . mCounts . iteritems ( ) <EOL> def items ( self ) : <EOL> return self . mCounts . items ( ) <EOL> def __iter__ ( self ) : <EOL> return self . mCounts . __iter__ ( ) <EOL> def getFile ( self , identifier ) : <EOL> return identifier <EOL> def getFilename ( self , identifier ) : <EOL> """<STR_LIT>""" <EOL> if self . mOutputPattern : <EOL> return re . sub ( "<STR_LIT:%s>" , str ( identifier ) , self . mOutputPattern ) <EOL> else : <EOL> return identifier <EOL> def setHeader ( self , header ) : <EOL> """<STR_LIT>""" <EOL> self . mHeader = header <EOL> def openFile ( self , filename , mode = "<STR_LIT:w>" ) : <EOL> """<STR_LIT>""" <EOL> if mode in ( "<STR_LIT:w>" , "<STR_LIT:a>" ) : <EOL> dirname = os . path . dirname ( filename ) <EOL> if dirname and not os . path . exists ( dirname ) : <EOL> os . makedirs ( dirname ) <EOL> return self . open ( filename , mode ) <EOL> def write ( self , identifier , line ) : <EOL> """<STR_LIT>""" <EOL> filename = self . getFilename ( identifier ) <EOL> if filename not in self . mFiles : <EOL> if self . maxopen and len ( self . mFiles ) > self . maxopen : <EOL> for f in self . mFiles . values ( ) : <EOL> f . close ( ) <EOL> self . mFiles = { } <EOL> self . mFiles [ filename ] = self . openFile ( filename , "<STR_LIT:a>" ) <EOL> if self . mHeader : <EOL> self . mFiles [ filename ] . write ( self . mHeader ) <EOL> try : <EOL> self . mFiles [ filename ] . write ( line ) <EOL> except ValueError , msg : <EOL> raise ValueError ( <EOL> "<STR_LIT>" % ( filename , msg ) ) <EOL> self . mCounts [ filename ] += <NUM_LIT:1> <EOL> def deleteFiles ( self , min_size = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> ndeleted = <NUM_LIT:0> <EOL> for filename , counts in self . mCounts . items ( ) : <EOL> if counts < min_size : <EOL> os . remove ( filename ) <EOL> ndeleted += <NUM_LIT:1> <EOL> return ndeleted <EOL> class FilePoolMemory ( FilePool ) : <EOL> """<STR_LIT>""" <EOL> maxopen = <NUM_LIT> <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> FilePool . __init__ ( self , * args , ** kwargs ) <EOL> self . data = collections . defaultdict ( list ) <EOL> self . isClosed = False <EOL> def __del__ ( self ) : <EOL> """<STR_LIT>""" <EOL> if not self . isClosed : <EOL> self . close ( ) <EOL> def close ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . isClosed : <EOL> raise IOError ( "<STR_LIT>" ) <EOL> for filename , data in self . data . iteritems ( ) : <EOL> f = self . openFile ( filename , "<STR_LIT:a>" ) <EOL> if self . mHeader : <EOL> f . write ( self . mHeader ) <EOL> f . write ( "<STR_LIT>" . join ( data ) ) <EOL> f . close ( ) <EOL> self . isClosed = True <EOL> def write ( self , identifier , line ) : <EOL> filename = self . getFilename ( identifier ) <EOL> self . data [ filename ] . append ( line ) <EOL> self . mCounts [ filename ] += <NUM_LIT:1> <EOL> def readMap ( infile , <EOL> columns = ( <NUM_LIT:0> , <NUM_LIT:1> ) , <EOL> map_functions = ( str , str ) , <EOL> both_directions = False , <EOL> has_header = False , <EOL> dtype = dict ) : <EOL> """<STR_LIT>""" <EOL> m = dtype ( ) <EOL> r = dtype ( ) <EOL> n = <NUM_LIT:0> <EOL> if columns == "<STR_LIT:all>" : <EOL> key_column = <NUM_LIT:0> <EOL> value_column = None <EOL> else : <EOL> key_column , value_column = columns <EOL> key_function , value_function = map_functions <EOL> datatype = None <EOL> for l in infile : <EOL> if l [ <NUM_LIT:0> ] == "<STR_LIT:#>" : <EOL> continue <EOL> n += <NUM_LIT:1> <EOL> if has_header and n == <NUM_LIT:1> : <EOL> if columns == "<STR_LIT:all>" : <EOL> header = l [ : - <NUM_LIT:1> ] . split ( "<STR_LIT:\t>" ) <EOL> datatype = collections . namedtuple ( "<STR_LIT>" , header [ <NUM_LIT:1> : ] ) <EOL> continue <EOL> d = l [ : - <NUM_LIT:1> ] . split ( "<STR_LIT:\t>" ) <EOL> if len ( d ) < <NUM_LIT:2> : <EOL> continue <EOL> key = key_function ( d [ key_column ] ) <EOL> if value_column : <EOL> val = value_function ( d [ value_column ] ) <EOL> elif datatype : <EOL> val = datatype . _make ( [ d [ x ] for x in range ( <NUM_LIT:1> , len ( d ) ) ] ) <EOL> else : <EOL> val = tuple ( map ( value_function , [ d [ x ] for x in range ( <NUM_LIT:1> , len ( d ) ) ] ) ) <EOL> m [ key ] = val <EOL> if val not in r : <EOL> r [ val ] = [ ] <EOL> r [ val ] . append ( key ) <EOL> if both_directions : <EOL> return m , r <EOL> else : <EOL> return m <EOL> def readList ( infile , <EOL> column = <NUM_LIT:0> , <EOL> map_function = str , <EOL> map_category = { } , <EOL> with_title = False ) : <EOL> """<STR_LIT>""" <EOL> m = [ ] <EOL> title = None <EOL> for l in infile : <EOL> if l [ <NUM_LIT:0> ] == "<STR_LIT:#>" : <EOL> continue <EOL> if with_title and not title : <EOL> title = l [ : - <NUM_LIT:1> ] . split ( "<STR_LIT:\t>" ) [ column ] <EOL> continue <EOL> try : <EOL> d = map_function ( l [ : - <NUM_LIT:1> ] . split ( "<STR_LIT:\t>" ) [ column ] ) <EOL> except ValueError : <EOL> continue <EOL> if map_category : <EOL> d = map_category [ d ] <EOL> m . append ( d ) <EOL> return m <EOL> def readMultiMap ( infile , <EOL> columns = ( <NUM_LIT:0> , <NUM_LIT:1> ) , <EOL> map_functions = ( str , str ) , <EOL> both_directions = False , <EOL> has_header = False , <EOL> dtype = dict ) : <EOL> """<STR_LIT>""" <EOL> m = dtype ( ) <EOL> r = dtype ( ) <EOL> n = <NUM_LIT:0> <EOL> for l in infile : <EOL> if l [ <NUM_LIT:0> ] == "<STR_LIT:#>" : <EOL> continue <EOL> n += <NUM_LIT:1> <EOL> if has_header and n == <NUM_LIT:1> : <EOL> continue <EOL> d = l [ : - <NUM_LIT:1> ] . split ( "<STR_LIT:\t>" ) <EOL> try : <EOL> key = map_functions [ <NUM_LIT:0> ] ( d [ columns [ <NUM_LIT:0> ] ] ) <EOL> val = map_functions [ <NUM_LIT:1> ] ( d [ columns [ <NUM_LIT:1> ] ] ) <EOL> except ( ValueError , IndexError ) , msg : <EOL> raise ValueError ( "<STR_LIT>" % ( l [ : - <NUM_LIT:1> ] , msg ) ) <EOL> if key not in m : <EOL> m [ key ] = [ ] <EOL> m [ key ] . append ( val ) <EOL> if val not in r : <EOL> r [ val ] = [ ] <EOL> r [ val ] . append ( key ) <EOL> if both_directions : <EOL> return m , r <EOL> else : <EOL> return m <EOL> def readMatrix ( infile , dtype = numpy . float ) : <EOL> '''<STR_LIT>''' <EOL> lines = [ l for l in infile . readlines ( ) if not l . startswith ( "<STR_LIT:#>" ) ] <EOL> nrows = len ( lines ) - <NUM_LIT:1> <EOL> col_headers = lines [ <NUM_LIT:0> ] [ : - <NUM_LIT:1> ] . split ( "<STR_LIT:\t>" ) [ <NUM_LIT:1> : ] <EOL> ncols = len ( col_headers ) <EOL> matrix = numpy . zeros ( ( nrows , ncols ) , dtype = dtype ) <EOL> row_headers = [ ] <EOL> for row , l in enumerate ( lines [ <NUM_LIT:1> : ] ) : <EOL> data = l [ : - <NUM_LIT:1> ] . split ( "<STR_LIT:\t>" ) <EOL> row_headers . append ( data [ <NUM_LIT:0> ] ) <EOL> matrix [ row ] = numpy . array ( data [ <NUM_LIT:1> : ] , dtype = dtype ) <EOL> return matrix , row_headers , col_headers <EOL> def writeMatrix ( outfile , matrix , row_headers , col_headers , row_header = "<STR_LIT>" ) : <EOL> '''<STR_LIT>''' <EOL> outfile . write ( "<STR_LIT>" % ( row_header , "<STR_LIT:\t>" . join ( col_headers ) ) ) <EOL> for x , row in enumerate ( matrix ) : <EOL> assert len ( row ) == len ( col_headers ) <EOL> outfile . write ( "<STR_LIT>" % ( row_headers [ x ] , "<STR_LIT:\t>" . join ( map ( str , row ) ) ) ) <EOL> def readTable ( file , <EOL> separator = "<STR_LIT:\t>" , <EOL> numeric_type = numpy . float , <EOL> take = "<STR_LIT:all>" , <EOL> headers = True , <EOL> truncate = None , <EOL> cumulate_out_of_range = True , <EOL> ) : <EOL> """<STR_LIT>""" <EOL> lines = filter ( lambda x : x [ <NUM_LIT:0> ] != "<STR_LIT:#>" , file . readlines ( ) ) <EOL> if len ( lines ) == <NUM_LIT:0> : <EOL> return None , [ ] <EOL> if take == "<STR_LIT:all>" : <EOL> num_cols = len ( string . split ( lines [ <NUM_LIT:0> ] [ : - <NUM_LIT:1> ] , "<STR_LIT:\t>" ) ) <EOL> take = range ( <NUM_LIT:0> , num_cols ) <EOL> else : <EOL> num_cols = len ( take ) <EOL> if headers : <EOL> headers = lines [ <NUM_LIT:0> ] [ : - <NUM_LIT:1> ] . split ( "<STR_LIT:\t>" ) <EOL> headers = map ( lambda x : headers [ x ] , take ) <EOL> del lines [ <NUM_LIT:0> ] <EOL> num_rows = len ( lines ) <EOL> matrix = numpy . ma . masked_array ( <EOL> numpy . zeros ( ( num_rows , num_cols ) , numeric_type ) ) <EOL> if truncate : <EOL> min_row , max_row = truncate <EOL> nrow = <NUM_LIT:0> <EOL> min_data = [ <NUM_LIT:0> ] * num_cols <EOL> max_data = None <EOL> for l in lines : <EOL> data = l [ : - <NUM_LIT:1> ] . split ( "<STR_LIT:\t>" ) <EOL> data = map ( lambda x : data [ x ] , take ) <EOL> for x in range ( len ( data ) ) : <EOL> try : <EOL> data [ x ] = float ( data [ x ] ) <EOL> except ValueError : <EOL> data [ x ] = numpy . ma . masked <EOL> if truncate is not None : <EOL> if data [ <NUM_LIT:0> ] < min_row : <EOL> if cumulate_out_of_range : <EOL> for x in range ( <NUM_LIT:1> , num_cols ) : <EOL> min_data [ x ] += data [ x ] <EOL> continue <EOL> elif data [ <NUM_LIT:0> ] >= max_row : <EOL> if max_data is None : <EOL> max_data = [ <NUM_LIT:0> ] * num_cols <EOL> max_data [ <NUM_LIT:0> ] = max_row <EOL> for x in range ( <NUM_LIT:1> , num_cols ) : <EOL> try : <EOL> max_data [ x ] += data [ x ] <EOL> except TypeError : <EOL> continue <EOL> continue <EOL> elif min_row is not None : <EOL> if cumulate_out_of_range : <EOL> for x in range ( <NUM_LIT:0> , num_cols ) : <EOL> try : <EOL> min_data [ x ] += data [ x ] <EOL> except TypeError : <EOL> continue <EOL> else : <EOL> min_data = data <EOL> data = min_data <EOL> min_row = None <EOL> for x in range ( len ( data ) ) : <EOL> matrix [ nrow , x ] = data [ x ] <EOL> nrow += <NUM_LIT:1> <EOL> if truncate is not None : <EOL> if cumulate_out_of_range : <EOL> if max_data is not None : <EOL> matrix [ nrow ] = max_data <EOL> matrix = matrix [ <NUM_LIT:0> : nrow + <NUM_LIT:1> , <NUM_LIT:0> : num_cols ] <EOL> return matrix , headers <EOL> def writeTable ( outfile , table , columns = None , fillvalue = "<STR_LIT>" ) : <EOL> '''<STR_LIT>''' <EOL> if type ( table ) == dict : <EOL> if columns is None : <EOL> columns = table . keys ( ) <EOL> outfile . write ( "<STR_LIT:\t>" . join ( columns ) + "<STR_LIT:\n>" ) <EOL> data = [ table [ x ] for x in columns ] <EOL> data = list ( itertools . izip_longest ( * data , fillvalue = fillvalue ) ) <EOL> for d in data : <EOL> outfile . write ( "<STR_LIT:\t>" . join ( map ( str , d ) ) + "<STR_LIT:\n>" ) <EOL> else : <EOL> raise NotImplementedError <EOL> def ReadMap ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return readMap ( * args , ** kwargs ) <EOL> def ReadList ( * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> return readList ( * args , ** kwargs ) <EOL> def writeLines ( outfile , lines , header = False ) : <EOL> '''<STR_LIT>''' <EOL> handle = openFile ( outfile , "<STR_LIT:w>" ) <EOL> if header : <EOL> handle . write ( "<STR_LIT:\t>" . join ( [ str ( title ) for title in header ] ) + "<STR_LIT:\n>" ) <EOL> for line in lines : <EOL> handle . write ( "<STR_LIT:\t>" . join ( [ str ( field ) for field in line ] ) + "<STR_LIT:\n>" ) <EOL> handle . close ( ) <EOL> def txtToDict ( filename , key = None , sep = "<STR_LIT:\t>" ) : <EOL> '''<STR_LIT>''' <EOL> count = <NUM_LIT:0> <EOL> result = { } <EOL> valueidx , keyidx = False , False <EOL> field_names = [ ] <EOL> with open ( filename , "<STR_LIT:r>" ) as fh : <EOL> for line in fh : <EOL> if line . startswith ( "<STR_LIT:#>" ) : <EOL> continue <EOL> if count == <NUM_LIT:0> : <EOL> fieldn = <NUM_LIT:0> <EOL> for rawfield in line . split ( sep ) : <EOL> field = rawfield . strip ( ) <EOL> if field == key : <EOL> keyidx = fieldn <EOL> field_names . append ( field ) <EOL> fieldn += <NUM_LIT:1> <EOL> if not keyidx : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> else : <EOL> fields = [ x . strip ( ) for x in line . split ( sep ) ] <EOL> fieldn = <NUM_LIT:0> <EOL> thiskey = fields [ keyidx ] <EOL> result [ thiskey ] = { } <EOL> for field in fields : <EOL> if fieldn == keyidx : <EOL> pass <EOL> else : <EOL> colkey = field_names [ fieldn ] <EOL> result [ thiskey ] [ colkey ] = field <EOL> fieldn += <NUM_LIT:1> <EOL> count += <NUM_LIT:1> <EOL> return ( result ) <EOL> def pickle ( file_name , obj ) : <EOL> '''<STR_LIT>''' <EOL> with open ( file_name , "<STR_LIT:wb>" ) as pkl_file : <EOL> pickle . dump ( obj , pkl_file ) <EOL> return <EOL> def unpickle ( file_name ) : <EOL> '''<STR_LIT>''' <EOL> with open ( file_name , "<STR_LIT:r>" ) as pkl_file : <EOL> data = pickle . load ( pkl_file ) <EOL> return data </s>
<s> '''<STR_LIT>''' <EOL> import sklearn . metrics . cluster . supervised as supervised <EOL> from math import log <EOL> import CGAT . Experiment as E <EOL> import numpy as np <EOL> import pandas as pd <EOL> import itertools <EOL> import os <EOL> import sys <EOL> import math <EOL> from rpy2 . robjects import pandas2ri <EOL> from rpy2 . robjects . packages import importr <EOL> from rpy2 . robjects import r as R <EOL> import rpy2 . robjects as ro <EOL> import random <EOL> import cmetrics as c2m <EOL> def get_r_path ( ) : <EOL> """<STR_LIT>""" <EOL> return os . path . dirname ( __file__ ) <EOL> def get_label_map ( labels ) : <EOL> '''<STR_LIT>''' <EOL> label_set = set ( ) <EOL> map_dict = { } <EOL> for val in labels : <EOL> label_set . update ( val ) <EOL> for lab , integer in enumerate ( label_set ) : <EOL> map_dict [ integer ] = lab <EOL> return map_dict <EOL> def make_mapped_matrix ( map_dict , input_frame ) : <EOL> '''<STR_LIT>''' <EOL> frame_index = input_frame . index . tolist ( ) <EOL> nindex = len ( frame_index ) <EOL> ncols = len ( input_frame . columns ) <EOL> integer_matrix = np . ndarray ( ( nindex , ncols ) , <EOL> dtype = np . int32 ) <EOL> E . info ( "<STR_LIT>" ) <EOL> matrix_idx = [ h for h , g in enumerate ( frame_index ) ] <EOL> for idx in matrix_idx : <EOL> for col in range ( ncols ) : <EOL> mod = input_frame . iloc [ idx ] [ col + <NUM_LIT:1> ] <EOL> integer_matrix [ idx ] [ col ] = map_dict [ mod ] <EOL> return integer_matrix <EOL> def randIndexes ( clustering_results ) : <EOL> '''<STR_LIT>''' <EOL> cluster_labels = clustering_results . values <EOL> map_dict = get_label_map ( cluster_labels ) <EOL> gene_map = { } <EOL> for r , gene in enumerate ( clustering_results . index ) : <EOL> gene_map [ gene ] = r <EOL> E . info ( "<STR_LIT>" ) <EOL> integer_matrix = make_mapped_matrix ( map_dict , clustering_results ) <EOL> E . info ( "<STR_LIT>" ) <EOL> cy_rand = c2m . consensus_metrics ( integer_matrix ) <EOL> E . info ( "<STR_LIT>" ) <EOL> return cy_rand <EOL> def unravel_arrays ( metric_array ) : <EOL> '''<STR_LIT>''' <EOL> dim = metric_array . shape [ <NUM_LIT:0> ] <EOL> flat_array = [ ] <EOL> for indx in itertools . combinations ( range ( <NUM_LIT:0> , dim ) , r = <NUM_LIT:2> ) : <EOL> if indx [ <NUM_LIT:0> ] != indx [ <NUM_LIT:1> ] : <EOL> flat_array . append ( metric_array [ indx [ <NUM_LIT:1> ] , indx [ <NUM_LIT:0> ] ] ) <EOL> else : <EOL> pass <EOL> return flat_array <EOL> def mutualInformation ( cluster1 , cluster2 ) : <EOL> '''<STR_LIT>''' <EOL> cont = contingency ( cluster1 , cluster2 ) <EOL> cont_sum = np . sum ( cont ) <EOL> pi = np . sum ( cont , axis = <NUM_LIT:1> ) <EOL> pj = np . sum ( cont , axis = <NUM_LIT:0> ) <EOL> outer = np . outer ( pi , pj ) <EOL> nnz = cont != <NUM_LIT:0> <EOL> cont_nm = cont [ nnz ] <EOL> log_cont_nm = np . log ( cont_nm ) <EOL> cont_nm /= cont_sum <EOL> log_outer = - np . log ( outer [ nnz ] ) + log ( pi . sum ( ) ) + log ( pj . sum ( ) ) <EOL> mi = ( cont_nm * ( log_cont_nm - log ( cont_sum ) ) + ( cont_nm * log_outer ) ) <EOL> return mi . sum ( ) <EOL> def contingency ( cluster1 , cluster2 ) : <EOL> '''<STR_LIT>''' <EOL> cont = pd . DataFrame ( columns = cluster1 . keys ( ) , index = cluster2 . keys ( ) ) <EOL> cont = cont . fillna ( <NUM_LIT:0.0> ) <EOL> for x in itertools . product ( cluster1 . keys ( ) , cluster2 . keys ( ) ) : <EOL> set1 = cluster1 [ x [ <NUM_LIT:0> ] ] <EOL> set2 = cluster2 [ x [ <NUM_LIT:1> ] ] <EOL> intersect = len ( set1 . intersection ( set2 ) ) <EOL> cont [ x [ <NUM_LIT:0> ] ] [ x [ <NUM_LIT:1> ] ] = intersect <EOL> cont = cont . as_matrix ( ) <EOL> return cont <EOL> def entropy ( cluster_labels ) : <EOL> '''<STR_LIT>''' <EOL> if len ( cluster_labels ) == <NUM_LIT:0> : <EOL> return <NUM_LIT:1.0> <EOL> else : <EOL> pass <EOL> cluster_prob = [ len ( cluster_labels [ x ] ) for x in cluster_labels . keys ( ) ] <EOL> pi = np . array ( cluster_prob ) . astype ( np . float ) <EOL> pi = pi [ pi > <NUM_LIT:0> ] <EOL> pi_sum = np . sum ( pi ) <EOL> entropy = - np . sum ( ( pi / pi_sum ) * ( np . log ( pi ) - log ( pi_sum ) ) ) <EOL> return entropy <EOL> def adjustedMutualInformation ( cluster1 , cluster2 ) : <EOL> '''<STR_LIT>''' <EOL> cont = contingency ( cluster1 , cluster2 ) <EOL> mi = mutualInformation ( cluster1 , cluster2 ) <EOL> sample_size = float ( sum ( [ len ( cluster1 [ x ] ) for x in cluster1 . keys ( ) ] ) ) <EOL> emi = supervised . expected_mutual_information ( cont , sample_size ) <EOL> h_clust1 , h_clust2 = entropy ( cluster1 ) , entropy ( cluster2 ) <EOL> if abs ( h_clust1 ) == <NUM_LIT:0.0> : <EOL> h_clust1 = <NUM_LIT:0.0> <EOL> else : <EOL> pass <EOL> if abs ( h_clust2 ) == <NUM_LIT:0.0> : <EOL> h_clust2 = <NUM_LIT:0.0> <EOL> else : <EOL> pass <EOL> ami = ( mi - emi ) / ( max ( h_clust1 , h_clust2 ) - emi ) <EOL> if np . isnan ( ami ) : <EOL> ami = np . nan_to_num ( ami ) <EOL> else : <EOL> pass <EOL> return ami <EOL> def deseqNormalize ( infile , <EOL> time_points , <EOL> reps , <EOL> conditions = None ) : <EOL> '''<STR_LIT>''' <EOL> pandas2ri . activate ( ) <EOL> reps = reps <EOL> R ( '''<STR_LIT>''' ) <EOL> E . info ( "<STR_LIT>" ) <EOL> if infile . split ( "<STR_LIT:.>" ) [ - <NUM_LIT:1> ] == "<STR_LIT>" : <EOL> comp = "<STR_LIT>" <EOL> else : <EOL> comp = None <EOL> data_frame = pd . read_table ( infile , <EOL> index_col = <NUM_LIT:0> , <EOL> header = <NUM_LIT:0> , <EOL> sep = "<STR_LIT:\t>" , <EOL> compression = comp ) <EOL> pandas2ri . activate ( ) <EOL> rdf = pandas2ri . py2ri ( data_frame ) <EOL> if not conditions : <EOL> time_rep_comb = [ x for x in itertools . product ( time_points , reps ) ] <EOL> time_cond = ro . StrVector ( [ x [ <NUM_LIT:0> ] for x in time_rep_comb ] ) <EOL> rep_cond = ro . StrVector ( [ x [ <NUM_LIT:1> ] for x in time_rep_comb ] ) <EOL> R . assign ( '<STR_LIT>' , rdf ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % ( time_cond . r_repr ( ) , <EOL> rep_cond . r_repr ( ) ) ) <EOL> elif conditions : <EOL> design_dict = { } <EOL> for x in data_frame . columns . values : <EOL> sample_dict = { } <EOL> sample_dict [ '<STR_LIT>' ] = str ( x ) . split ( "<STR_LIT:.>" ) [ <NUM_LIT:0> ] <EOL> sample_dict [ '<STR_LIT>' ] = int ( str ( x ) . split ( "<STR_LIT:.>" ) [ <NUM_LIT:1> ] ) <EOL> sample_dict [ '<STR_LIT>' ] = str ( x ) . split ( "<STR_LIT:.>" ) [ <NUM_LIT:2> ] <EOL> design_dict [ x ] = sample_dict <EOL> design_frame = pd . DataFrame ( design_dict ) <EOL> design_frame = design_frame . T <EOL> des_cond = design_frame [ '<STR_LIT>' ] . values . tolist ( ) <EOL> des_time = design_frame [ '<STR_LIT>' ] . values . tolist ( ) <EOL> des_reps = design_frame [ '<STR_LIT>' ] . values . tolist ( ) <EOL> cond_cond = ro . StrVector ( [ x for x in des_cond ] ) <EOL> time_cond = ro . StrVector ( [ x for x in des_time ] ) <EOL> rep_cond = ro . StrVector ( [ x for x in des_reps ] ) <EOL> R . assign ( '<STR_LIT>' , rdf ) <EOL> R . assign ( '<STR_LIT>' , design_frame ) <EOL> E . info ( "<STR_LIT>" ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> E . info ( "<STR_LIT>" ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' % rep_cond . r_repr ( ) ) <EOL> R ( '''<STR_LIT>''' % time_cond . r_repr ( ) ) <EOL> if conditions : <EOL> R ( '''<STR_LIT>''' % cond_cond . r_repr ( ) ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' ) <EOL> else : <EOL> R ( '''<STR_LIT>''' ) <EOL> data_file = pandas2ri . ri2py ( R [ "<STR_LIT>" ] ) <EOL> return data_file <EOL> def avTimeExpression ( infile ) : <EOL> '''<STR_LIT>''' <EOL> if infile . split ( "<STR_LIT:.>" ) [ - <NUM_LIT:1> ] == "<STR_LIT>" : <EOL> comp = "<STR_LIT>" <EOL> else : <EOL> comp = None <EOL> df = pd . read_table ( infile , sep = "<STR_LIT:\t>" , <EOL> header = <NUM_LIT:0> , index_col = <NUM_LIT:0> , <EOL> compression = comp ) <EOL> df_groups = df . groupby ( by = '<STR_LIT>' ) <EOL> data_frame = pd . DataFrame ( index = df . columns , <EOL> columns = None ) <EOL> for names , groups in df_groups : <EOL> _df = groups . drop ( [ '<STR_LIT>' , '<STR_LIT>' ] , axis = <NUM_LIT:1> ) <EOL> _df = _df . apply ( np . mean , axis = <NUM_LIT:0> ) <EOL> data_frame [ names ] = _df <EOL> try : <EOL> data_frame . drop ( [ '<STR_LIT>' , '<STR_LIT>' ] , <EOL> inplace = True , <EOL> axis = <NUM_LIT:0> ) <EOL> except KeyError : <EOL> pass <EOL> return data_frame <EOL> def covarFilter ( infile , <EOL> time_points , <EOL> replicates , <EOL> quantile ) : <EOL> '''<STR_LIT>''' <EOL> time_points . sort ( ) <EOL> time_rep_comb = [ x for x in itertools . product ( time_points , replicates ) ] <EOL> time_cond = ro . StrVector ( [ x [ <NUM_LIT:0> ] for x in time_rep_comb ] ) <EOL> rep_cond = ro . StrVector ( [ x [ <NUM_LIT:1> ] for x in time_rep_comb ] ) <EOL> df = pd . read_table ( infile , sep = "<STR_LIT:\t>" , header = <NUM_LIT:0> , index_col = <NUM_LIT:0> ) <EOL> df . drop ( [ '<STR_LIT>' ] , inplace = True , axis = <NUM_LIT:1> ) <EOL> df . drop ( [ '<STR_LIT>' ] , inplace = True , axis = <NUM_LIT:1> ) <EOL> df = df . fillna ( <NUM_LIT:0.0> ) <EOL> pandas2ri . activate ( ) <EOL> R . assign ( '<STR_LIT>' , pandas2ri . py2ri ( df ) ) <EOL> E . info ( "<STR_LIT>" ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' % time_cond . r_repr ( ) ) <EOL> R ( '''<STR_LIT>''' % rep_cond . r_repr ( ) ) <EOL> E . info ( "<STR_LIT>" ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> E . info ( "<STR_LIT>" ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' ) <EOL> filtered_frame = pandas2ri . ri2py ( R [ "<STR_LIT>" ] ) . T <EOL> return filtered_frame <EOL> def clusterPCA ( infile , <EOL> cluster_file , <EOL> image_dir ) : <EOL> '''<STR_LIT>''' <EOL> header = cluster_file . split ( "<STR_LIT:/>" ) [ - <NUM_LIT:1> ] . split ( "<STR_LIT:->" ) [ <NUM_LIT:0> ] <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' % os . path . join ( get_r_path ( ) , "<STR_LIT>" ) ) <EOL> R ( '''<STR_LIT>''' % os . path . join ( get_r_path ( ) , "<STR_LIT>" ) ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> eigen_frame = pandas2ri . ri2py ( R [ "<STR_LIT>" ] ) <EOL> eigen_frame . index = eigen_frame [ '<STR_LIT>' ] <EOL> eigen_frame . drop ( [ '<STR_LIT>' ] , inplace = True , axis = <NUM_LIT:1> ) <EOL> return eigen_frame <EOL> def conditionDESeq2 ( data_frame , header , alpha , res_dir ) : <EOL> '''<STR_LIT>''' <EOL> E . info ( "<STR_LIT>" % header ) <EOL> cols = data_frame . columns <EOL> pandas2ri . activate ( ) <EOL> counts = pandas2ri . py2ri ( data_frame ) <EOL> des_times = ro . IntVector ( [ x . split ( "<STR_LIT:.>" ) [ <NUM_LIT:1> ] for x in cols ] ) <EOL> des_reps = ro . StrVector ( [ x . split ( "<STR_LIT:.>" ) [ <NUM_LIT:2> ] for x in cols ] ) <EOL> des_cond = ro . StrVector ( [ x . split ( "<STR_LIT:.>" ) [ <NUM_LIT:0> ] for x in cols ] ) <EOL> genes = ro . StrVector ( [ x for x in data_frame . index ] ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' % des_times . r_repr ( ) ) <EOL> R ( '''<STR_LIT>''' % des_reps . r_repr ( ) ) <EOL> R ( '''<STR_LIT>''' % des_cond . r_repr ( ) ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' % counts . r_repr ( ) ) <EOL> R ( '''<STR_LIT>''' % genes . r_repr ( ) ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' % ( res_dir , <EOL> header ) ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' % ( res_dir , <EOL> header ) ) <EOL> R ( '''<STR_LIT>''' % alpha ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> df = pandas2ri . ri2py ( R [ '<STR_LIT>' ] ) <EOL> return df <EOL> def timepointDESeq2 ( data_frame , header , alpha , res_dir ) : <EOL> '''<STR_LIT>''' <EOL> E . info ( "<STR_LIT>" % header ) <EOL> cols = data_frame . columns <EOL> pandas2ri . activate ( ) <EOL> counts = pandas2ri . py2ri ( data_frame ) <EOL> des_times = ro . IntVector ( [ x . split ( "<STR_LIT:.>" ) [ <NUM_LIT:1> ] for x in cols ] ) <EOL> des_reps = ro . StrVector ( [ x . split ( "<STR_LIT:.>" ) [ <NUM_LIT:2> ] for x in cols ] ) <EOL> genes = ro . StrVector ( [ x for x in data_frame . index ] ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' % des_times . r_repr ( ) ) <EOL> R ( '''<STR_LIT>''' % des_reps . r_repr ( ) ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' % counts . r_repr ( ) ) <EOL> R ( '''<STR_LIT>''' % genes . r_repr ( ) ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' % ( res_dir , <EOL> header ) ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' % ( res_dir , <EOL> header ) ) <EOL> R ( '''<STR_LIT>''' % alpha ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> df = pandas2ri . ri2py ( R [ '<STR_LIT>' ] ) <EOL> return df <EOL> def genSigGenes ( file_list , alpha , out_dir ) : <EOL> '''<STR_LIT>''' <EOL> alpha = float ( alpha ) <EOL> deg_dict = { } <EOL> for infle in file_list : <EOL> if infle . split ( "<STR_LIT:/>" ) [ <NUM_LIT:0> ] . split ( "<STR_LIT:.>" ) [ <NUM_LIT:0> ] == "<STR_LIT>" : <EOL> header = infle . split ( "<STR_LIT:/>" ) [ <NUM_LIT:1> ] . split ( "<STR_LIT:.>" ) [ <NUM_LIT:1> ] <EOL> header = header . rstrip ( "<STR_LIT>" ) <EOL> header = "<STR_LIT:%s>" % header <EOL> elif infle . split ( "<STR_LIT:/>" ) [ <NUM_LIT:0> ] . split ( "<STR_LIT:.>" ) [ <NUM_LIT:0> ] == "<STR_LIT>" : <EOL> header = infle . split ( "<STR_LIT:/>" ) [ <NUM_LIT:1> ] . split ( "<STR_LIT:->" ) [ <NUM_LIT:0> ] <EOL> header = "<STR_LIT>" % ( header . split ( "<STR_LIT:_>" ) [ <NUM_LIT:0> ] , <EOL> header . split ( "<STR_LIT:_>" ) [ <NUM_LIT:2> ] ) <EOL> in_df = pd . read_table ( infle , sep = "<STR_LIT:\t>" , header = <NUM_LIT:0> , index_col = <NUM_LIT:0> ) <EOL> sig_genes = in_df [ in_df [ '<STR_LIT>' ] <= alpha ] <EOL> deg_dict [ header ] = sig_genes . index . tolist ( ) <EOL> if file_list [ <NUM_LIT:0> ] . split ( "<STR_LIT:/>" ) [ <NUM_LIT:0> ] . split ( "<STR_LIT:.>" ) [ <NUM_LIT:0> ] == "<STR_LIT>" : <EOL> condition = file_list [ <NUM_LIT:0> ] . split ( "<STR_LIT:/>" ) [ <NUM_LIT:1> ] . split ( "<STR_LIT:.>" ) [ <NUM_LIT:0> ] <EOL> condition = "<STR_LIT>" % condition <EOL> elif file_list [ <NUM_LIT:0> ] . split ( "<STR_LIT:/>" ) [ <NUM_LIT:0> ] . split ( "<STR_LIT:.>" ) [ <NUM_LIT:0> ] == "<STR_LIT>" : <EOL> condition = file_list [ <NUM_LIT:0> ] . split ( "<STR_LIT:/>" ) [ <NUM_LIT:1> ] . split ( "<STR_LIT:_>" ) [ <NUM_LIT:0> ] <EOL> condition = "<STR_LIT>" % condition <EOL> drawVennDiagram ( deg_dict , condition , out_dir ) <EOL> def drawVennDiagram ( deg_dict , header , out_dir ) : <EOL> '''<STR_LIT>''' <EOL> keys = deg_dict . keys ( ) <EOL> try : <EOL> keys = sorted ( keys , key = lambda x : int ( x . split ( "<STR_LIT:_>" ) [ <NUM_LIT:1> ] . rstrip ( "<STR_LIT>" ) ) ) <EOL> except IndexError : <EOL> pass <EOL> venn_size = len ( keys ) <EOL> R ( '''<STR_LIT>''' ) <EOL> n1 = set ( deg_dict [ keys [ <NUM_LIT:0> ] ] ) <EOL> n2 = set ( deg_dict [ keys [ <NUM_LIT:1> ] ] ) <EOL> area1 = len ( n1 ) <EOL> area2 = len ( n2 ) <EOL> n12 = len ( n1 . intersection ( n2 ) ) <EOL> if venn_size == <NUM_LIT:3> : <EOL> n3 = set ( deg_dict [ keys [ <NUM_LIT:2> ] ] ) <EOL> area3 = len ( n3 ) <EOL> n13 = len ( n1 . intersection ( n3 ) ) <EOL> n23 = len ( n2 . intersection ( n3 ) ) <EOL> n123 = len ( ( n1 . intersection ( n2 ) ) . intersection ( n3 ) ) <EOL> cat1 , cat2 , cat3 = keys <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> R ( '''<STR_LIT>''' ) <EOL> elif venn_size == <NUM_LIT:4> : <EOL> n3 = set ( deg_dict [ keys [ <NUM_LIT:2> ] ] ) <EOL> area3 = len ( n3 ) <EOL> n13 = len ( n1 . intersection ( n3 ) ) <EOL> n23 = len ( n2 . intersection ( n3 ) ) <EOL> n123 = len ( ( n1 . intersection ( n2 ) ) . intersection ( n3 ) ) <EOL> n4 = set ( deg_dict [ keys [ <NUM_LIT:3> ] ] ) <EOL> area4 = len ( n4 ) <EOL> n14 = len ( n1 . intersection ( n4 ) ) <EOL> n24 = len ( n2 . intersection ( n4 ) ) <EOL> n34 = len ( n3 . intersection ( n4 ) ) <EOL> n124 = len ( ( n1 . intersection ( n2 ) ) . intersection ( n4 ) ) <EOL> n134 = len ( ( n1 . intersection ( n3 ) ) . intersection ( n4 ) ) <EOL> n234 = len ( ( n2 . intersection ( n3 ) ) . intersection ( n4 ) ) <EOL> n1234 = len ( ( ( n1 . intersection ( n2 ) ) . intersection ( n3 ) ) . intersection ( n4 ) ) <EOL> cat1 , cat2 , cat3 , cat4 = keys <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> R ( '''<STR_LIT>''' ) <EOL> elif venn_size == <NUM_LIT:5> : <EOL> n3 = set ( deg_dict [ keys [ <NUM_LIT:2> ] ] ) <EOL> area3 = len ( n3 ) <EOL> n13 = len ( n1 . intersection ( n3 ) ) <EOL> n23 = len ( n2 . intersection ( n3 ) ) <EOL> n123 = len ( ( n1 . intersection ( n2 ) ) . intersection ( n3 ) ) <EOL> n4 = set ( deg_dict [ keys [ <NUM_LIT:3> ] ] ) <EOL> area4 = len ( n4 ) <EOL> n14 = len ( n1 . intersection ( n4 ) ) <EOL> n24 = len ( n2 . intersection ( n4 ) ) <EOL> n34 = len ( n3 . intersection ( n4 ) ) <EOL> n124 = len ( ( n1 . intersection ( n2 ) ) . intersection ( n4 ) ) <EOL> n134 = len ( ( n1 . intersection ( n3 ) ) . intersection ( n4 ) ) <EOL> n234 = len ( ( n2 . intersection ( n3 ) ) . intersection ( n4 ) ) <EOL> n1234 = len ( ( ( n1 . intersection ( n2 ) ) . intersection ( n3 ) ) . intersection ( n4 ) ) <EOL> n5 = set ( deg_dict [ keys [ <NUM_LIT:4> ] ] ) <EOL> area5 = len ( n5 ) <EOL> n15 = len ( n1 . intersection ( n5 ) ) <EOL> n25 = len ( n2 . intersection ( n5 ) ) <EOL> n35 = len ( n3 . intersection ( n5 ) ) <EOL> n45 = len ( n4 . intersection ( n5 ) ) <EOL> n125 = len ( ( n1 . intersection ( n2 ) ) . intersection ( n5 ) ) <EOL> n135 = len ( ( n1 . intersection ( n3 ) ) . intersection ( n5 ) ) <EOL> n145 = len ( ( n1 . intersection ( n4 ) ) . intersection ( n5 ) ) <EOL> n235 = len ( ( n2 . intersection ( n3 ) ) . intersection ( n5 ) ) <EOL> n245 = len ( ( n2 . intersection ( n4 ) ) . intersection ( n5 ) ) <EOL> n345 = len ( ( n3 . intersection ( n4 ) ) . intersection ( n5 ) ) <EOL> n1235 = len ( ( ( n1 . intersection ( n2 ) ) . intersection ( n3 ) ) . intersection ( n5 ) ) <EOL> n1245 = len ( ( ( n1 . intersection ( n2 ) ) . intersection ( n4 ) ) . intersection ( n5 ) ) <EOL> n1345 = len ( ( ( n1 . intersection ( n3 ) ) . intersection ( n4 ) ) . intersection ( n5 ) ) <EOL> n2345 = len ( ( ( n2 . intersection ( n3 ) ) . intersection ( n4 ) ) . intersection ( n5 ) ) <EOL> nstep = ( ( n1 . intersection ( n2 ) ) . intersection ( n3 ) ) <EOL> n12345 = len ( ( nstep . intersection ( n4 ) ) . intersection ( n5 ) ) <EOL> cat1 , cat2 , cat3 , cat4 , cat5 = keys <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> R ( '''<STR_LIT>''' ) <EOL> elif venn_size > <NUM_LIT:5> : <EOL> raise ValueError ( "<STR_LIT>" ) <EOL> def maSigPro ( infile , <EOL> order_terms = <NUM_LIT:1> , <EOL> fdr = <NUM_LIT> , <EOL> adjust = "<STR_LIT>" , <EOL> stepwise = "<STR_LIT>" , <EOL> include_p = <NUM_LIT> , <EOL> rsq = <NUM_LIT> , <EOL> var_group = "<STR_LIT:all>" ) : <EOL> '''<STR_LIT>''' <EOL> ref_gtf = str ( infile ) . split ( "<STR_LIT:->" ) [ <NUM_LIT:1> ] <EOL> data_frame = pd . read_table ( infile , sep = "<STR_LIT:\t>" , index_col = <NUM_LIT:0> , header = <NUM_LIT:0> ) <EOL> design_dict = { } <EOL> for x in data_frame . index . values : <EOL> sample_dict = { } <EOL> condition = str ( x ) . split ( "<STR_LIT:.>" ) [ <NUM_LIT:0> ] <EOL> sample_dict [ condition ] = <NUM_LIT:1> <EOL> sample_dict [ '<STR_LIT>' ] = int ( str ( x ) . split ( "<STR_LIT:.>" ) [ <NUM_LIT:1> ] ) <EOL> sample_dict [ '<STR_LIT>' ] = str ( x ) . split ( "<STR_LIT:.>" ) [ <NUM_LIT:2> ] <EOL> design_dict [ x ] = sample_dict <EOL> design_frame = pd . DataFrame ( design_dict ) <EOL> design_frame = design_frame . T <EOL> cols = [ '<STR_LIT>' , '<STR_LIT>' , condition ] <EOL> design_frame = design_frame [ cols ] <EOL> design_file = "<STR_LIT>" % ( condition , ref_gtf ) <EOL> design_frame . to_csv ( design_file , sep = "<STR_LIT:\t>" ) <EOL> data_file = "<STR_LIT>" % ( condition , ref_gtf ) <EOL> results_file = "<STR_LIT>" % ( condition , ref_gtf ) <EOL> masigpro_out = "<STR_LIT>" <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> R ( '''<STR_LIT>''' ) <EOL> E . info ( "<STR_LIT>" ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> R ( '''<STR_LIT>''' % locals ( ) ) <EOL> E . info ( "<STR_LIT>" <EOL> "<STR_LIT>" % order_terms ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> E . info ( "<STR_LIT>" <EOL> "<STR_LIT>" % fdr ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> p_file = "<STR_LIT>" % locals ( ) <EOL> coef_file = "<STR_LIT>" % ( condition , <EOL> ref_gtf ) <EOL> p_frame = pd . read_table ( p_file , sep = "<STR_LIT:\t>" ) <EOL> coef_frame = pd . read_table ( coef_file , sep = "<STR_LIT:\t>" ) <EOL> results_frame = pd . merge ( coef_frame , p_frame , <EOL> how = '<STR_LIT:right>' , <EOL> left_index = True , <EOL> right_index = True ) <EOL> results_frame . to_csv ( results_file , sep = "<STR_LIT:\t>" ) <EOL> R ( '''<STR_LIT>''' % locals ( ) ) <EOL> diff_genes = pandas2ri . ri2py [ <STR_LIT> ] <EOL> return diff_genes <EOL> def splitReplicates ( infile , <EOL> axis , <EOL> group_var , <EOL> outdir ) : <EOL> '''<STR_LIT>''' <EOL> if axis == "<STR_LIT>" : <EOL> axis = <NUM_LIT:1> <EOL> elif axis == "<STR_LIT>" : <EOL> axis = <NUM_LIT:0> <EOL> inf_prefix = infile . split ( "<STR_LIT:/>" ) [ <NUM_LIT:1> ] . split ( "<STR_LIT:->" ) <EOL> inf_prefix = inf_prefix [ <NUM_LIT:0> ] + "<STR_LIT:->" + inf_prefix [ <NUM_LIT:1> ] <EOL> df = pd . read_table ( infile , <EOL> sep = "<STR_LIT:\t>" , <EOL> header = <NUM_LIT:0> , <EOL> index_col = <NUM_LIT:0> ) . T <EOL> rep_groups = df . groupby ( by = group_var , <EOL> axis = axis ) <EOL> for name , groups in rep_groups : <EOL> outfile = outdir + "<STR_LIT:/>" + inf_prefix + "<STR_LIT>" % name <EOL> _df = groups . T <EOL> _df . columns = _df . loc [ '<STR_LIT>' ] <EOL> _df . drop ( [ '<STR_LIT>' ] , axis = axis , inplace = True ) <EOL> _df . drop ( [ '<STR_LIT>' ] , axis = axis , inplace = True ) <EOL> _df . to_csv ( outfile , sep = "<STR_LIT:\t>" , index_label = "<STR_LIT>" ) <EOL> def genResampleData ( data_frame , <EOL> multiple_index , <EOL> replicates , <EOL> sample_reps , <EOL> times , <EOL> condition , <EOL> ref_gtf , <EOL> out_dir , <EOL> seed ) : <EOL> '''<STR_LIT>''' <EOL> vst_long = data_frame . T <EOL> vst_long . index = multiple_index <EOL> reps_dict = { } <EOL> random . seed ( seed ) <EOL> for it in range ( <NUM_LIT:1> , replicates + <NUM_LIT:1> ) : <EOL> df = pd . DataFrame ( ) <EOL> df_dict = { } <EOL> for i in times : <EOL> k = str ( random . randint ( <NUM_LIT:1> , <EOL> len ( sample_reps ) ) ) <EOL> series = vst_long . loc [ str ( i ) , '<STR_LIT>' % k ] <EOL> df_dict [ str ( i ) ] = series <EOL> df = pd . DataFrame ( df_dict ) <EOL> cols = df . columns . tolist ( ) <EOL> cols = [ int ( x ) for x in cols ] <EOL> cols . sort ( ) <EOL> cols = [ str ( x ) for x in cols ] <EOL> df = df [ cols ] <EOL> reps_dict [ str ( it ) ] = df <EOL> table = "<STR_LIT>" % ( condition , <EOL> ref_gtf , <EOL> it ) <EOL> seg_file = "<STR_LIT>" % ( out_dir , table ) <EOL> df . to_csv ( seg_file , sep = "<STR_LIT:\t>" ) <EOL> sys . stdout . write ( "<STR_LIT>" % replicates ) <EOL> def temporalCorrelate ( series1 , series2 ) : <EOL> '''<STR_LIT>''' <EOL> series1 = list ( series1 ) <EOL> series2 = list ( series2 ) <EOL> sum_prod = [ ] <EOL> sum_usq = [ ] <EOL> sum_vsq = [ ] <EOL> for i in range ( len ( series1 ) - <NUM_LIT:1> ) : <EOL> u = float ( series1 [ i + <NUM_LIT:1> ] ) - float ( series1 [ i ] ) <EOL> v = float ( series2 [ i + <NUM_LIT:1> ] ) - float ( series2 [ i ] ) <EOL> prod = u * v <EOL> sum_prod . append ( prod ) <EOL> sq_u = u ** <NUM_LIT:2> <EOL> sq_v = v ** <NUM_LIT:2> <EOL> sum_usq . append ( sq_u ) <EOL> sum_vsq . append ( sq_v ) <EOL> nume = sum ( sum_prod ) <EOL> denom = math . sqrt ( sum ( sum_usq ) ) * math . sqrt ( sum ( sum_vsq ) ) <EOL> if denom != <NUM_LIT:0> : <EOL> return ( nume / float ( denom ) ) <EOL> else : <EOL> return <NUM_LIT:0> <EOL> def crossCorrelate ( t , s , lag = <NUM_LIT:0> ) : <EOL> '''<STR_LIT>''' <EOL> t_mean = np . mean ( t ) <EOL> s_mean = np . mean ( s ) <EOL> t_std = np . std ( t ) <EOL> s_std = np . std ( s ) <EOL> len_t = len ( t ) <EOL> t_norm = [ ( ( x - t_mean ) / ( t_std * len_t ) ) for x in t ] <EOL> s_norm = [ ( ( y - s_mean ) / s_std ) for y in s ] <EOL> if lag == <NUM_LIT:0> : <EOL> xcorr = np . correlate ( t_norm , s_norm ) <EOL> elif lag != <NUM_LIT:0> : <EOL> xcorr = np . correlate ( t_norm , s_norm , mode = <NUM_LIT:2> ) [ len_t - <NUM_LIT:1> + lag ] <EOL> return xcorr <EOL> def adaptiveTune ( value , k ) : <EOL> '''<STR_LIT>''' <EOL> if k == <NUM_LIT:0> : <EOL> return <NUM_LIT:1.0> <EOL> else : <EOL> return ( <NUM_LIT:2> / ( <NUM_LIT:1> + math . exp ( k * abs ( value ) ) ) ) <EOL> def dtwWrapper ( data , rows , columns , k ) : <EOL> '''<STR_LIT>''' <EOL> DTW = importr ( "<STR_LIT>" ) <EOL> df_ = pd . DataFrame ( index = rows , <EOL> columns = columns ) <EOL> df_ = df_ . fillna ( <NUM_LIT:0.0> ) . astype ( np . float64 ) <EOL> pandas2ri . activate ( ) <EOL> for i in rows : <EOL> E . info ( "<STR_LIT>" % i ) <EOL> for j in columns : <EOL> series1 = data . loc [ i ] . values . tolist ( ) <EOL> series2 = data . loc [ j ] . values . tolist ( ) <EOL> DTW_value = ( R . dtw ( series1 , <EOL> series2 ) ) . rx ( '<STR_LIT>' ) [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] <EOL> cort_value = temporalCorrelate ( series1 , series2 ) <EOL> tuned_value = adaptiveTune ( cort_value , k ) <EOL> time_dist = DTW_value * tuned_value <EOL> df_ . loc [ i ] [ j ] = float ( time_dist ) <EOL> df_ [ j ] [ i ] = float ( time_dist ) <EOL> return df_ <EOL> def correlateDistanceMetric ( data , rows , columns , method , lag = <NUM_LIT:0> ) : <EOL> '''<STR_LIT>''' <EOL> df_ = pd . DataFrame ( index = rows , <EOL> columns = columns ) <EOL> df_ = df_ . fillna ( <NUM_LIT:0.0> ) <EOL> if method == "<STR_LIT>" : <EOL> for i in rows : <EOL> E . info ( "<STR_LIT>" % i ) <EOL> for j in columns : <EOL> series1 = data . loc [ i ] . values . tolist ( ) <EOL> series2 = data . loc [ j ] . values . tolist ( ) <EOL> corr = crossCorrelate ( series1 , series2 , lag = lag ) <EOL> df_ . loc [ i ] [ j ] = <NUM_LIT:1.0> - abs ( corr ) <EOL> df_ [ j ] [ i ] = <NUM_LIT:1.0> - abs ( corr ) <EOL> elif method == "<STR_LIT>" : <EOL> for i in rows : <EOL> E . info ( "<STR_LIT>" % i ) <EOL> for j in columns : <EOL> series1 = data . loc [ i ] . tolist ( ) <EOL> series2 = data . loc [ j ] . tolist ( ) <EOL> corr = temporalCorrelate ( series1 , series2 ) <EOL> df_ . loc [ i ] [ j ] = <NUM_LIT:1.0> - abs ( corr ) <EOL> df_ [ j ] [ i ] = <NUM_LIT:1.0> - abs ( corr ) <EOL> return df_ <EOL> def splitFiles ( infile , nchunks , out_dir ) : <EOL> '''<STR_LIT>''' <EOL> df = pd . read_table ( infile , sep = "<STR_LIT:\t>" , header = <NUM_LIT:0> , index_col = <NUM_LIT:0> ) <EOL> total = len ( df . index . tolist ( ) ) <EOL> if total / nchunks < <NUM_LIT:100> : <EOL> step = <NUM_LIT:100> <EOL> E . warn ( "<STR_LIT>" ) <EOL> elif total / nchunks > <NUM_LIT> : <EOL> step = <NUM_LIT> <EOL> E . warn ( "<STR_LIT>" ) <EOL> else : <EOL> step = total / nchunks <EOL> E . info ( "<STR_LIT>" % step ) <EOL> file_pattern = infile . split ( "<STR_LIT:/>" ) [ <NUM_LIT:1> ] . rstrip ( "<STR_LIT>" ) <EOL> idx = <NUM_LIT:0> <EOL> for i in range ( step , total , step ) : <EOL> start = "<STR_LIT:%s>" % idx <EOL> end = "<STR_LIT:%s>" % i <EOL> file_name = "<STR_LIT>" % ( out_dir , <EOL> file_pattern , <EOL> start , <EOL> end ) <EOL> with open ( file_name , "<STR_LIT:w>" ) as file_handle : <EOL> file_handle . write ( file_name + "<STR_LIT:\n>" ) <EOL> idx = i <EOL> start = "<STR_LIT:%s>" % idx <EOL> end = "<STR_LIT:%s>" % total <EOL> file_name = "<STR_LIT>" % ( out_dir , <EOL> file_pattern , <EOL> start , <EOL> end ) <EOL> with open ( file_name , "<STR_LIT:w>" ) as file_handle : <EOL> file_handle . write ( file_name + "<STR_LIT:\n>" ) <EOL> def mergeFiles ( file_list , outfile ) : <EOL> '''<STR_LIT>''' <EOL> res_list = sorted ( file_list , <EOL> key = lambda x : int ( x . split ( "<STR_LIT:/>" ) [ - <NUM_LIT:1> ] . split ( "<STR_LIT:->" ) [ <NUM_LIT:3> ] . split ( "<STR_LIT:_>" ) [ <NUM_LIT:0> ] ) ) <EOL> full_frame = pd . read_table ( res_list [ <NUM_LIT:0> ] , sep = "<STR_LIT:\t>" , index_col = <NUM_LIT:0> , header = <NUM_LIT:0> ) <EOL> res_list . remove ( res_list [ <NUM_LIT:0> ] ) <EOL> for fle in res_list : <EOL> df = pd . read_table ( fle , sep = "<STR_LIT:\t>" , index_col = <NUM_LIT:0> , header = <NUM_LIT:0> ) <EOL> full_frame = pd . merge ( left = full_frame , <EOL> right = df , <EOL> how = '<STR_LIT>' , <EOL> left_index = True , <EOL> right_index = True ) <EOL> full_frame . to_csv ( outfile , sep = "<STR_LIT:\t>" ) <EOL> def treeCutting ( infile , <EOL> expression_file , <EOL> cluster_file , <EOL> cluster_algorithm , <EOL> deepsplit = False ) : <EOL> '''<STR_LIT>''' <EOL> wgcna_out = "<STR_LIT>" <EOL> E . info ( "<STR_LIT>" ) <EOL> df = pd . read_table ( infile , sep = "<STR_LIT:\t>" , <EOL> header = <NUM_LIT:0> , index_col = <NUM_LIT:0> ) <EOL> df = df . fillna ( <NUM_LIT:0.0> ) <EOL> genes = df . index <EOL> genes_r = ro . StrVector ( [ g for g in genes ] ) <EOL> pandas2ri . activate ( ) <EOL> rdf = pandas2ri . py2ri ( df ) <EOL> R . assign ( "<STR_LIT>" , rdf ) <EOL> R . assign ( "<STR_LIT>" , genes_r ) <EOL> R ( '''<STR_LIT>''' % locals ( ) ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> E . info ( "<STR_LIT>" % cluster_algorithm ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> if deepsplit : <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' ) <EOL> else : <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> cluster_frame = pandas2ri . ri2py ( R [ "<STR_LIT>" ] ) <EOL> cluster_frame . columns = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> cluster_frame . index = cluster_frame [ '<STR_LIT>' ] <EOL> cluster_frame . drop ( [ '<STR_LIT>' ] , inplace = True , axis = <NUM_LIT:1> ) <EOL> return cluster_frame <EOL> def clusterAverage ( file_list ) : <EOL> '''<STR_LIT>''' <EOL> df_dict = { } <EOL> for fle in file_list : <EOL> f = fle . split ( "<STR_LIT:/>" ) [ - <NUM_LIT:1> ] <EOL> rep = f . split ( "<STR_LIT:->" ) [ <NUM_LIT:2> ] <EOL> _df = pd . read_table ( fle , sep = "<STR_LIT:\t>" , <EOL> header = <NUM_LIT:0> , index_col = <NUM_LIT:0> ) <EOL> df_dict [ rep ] = _df <EOL> concat_df = pd . concat ( df_dict ) <EOL> group_df = concat_df . groupby ( level = <NUM_LIT:1> ) <EOL> agg_dict = { } <EOL> for names , groups in group_df : <EOL> agg_dict [ names ] = np . mean ( groups , axis = <NUM_LIT:0> ) <EOL> agg_df = pd . DataFrame ( agg_dict ) <EOL> return agg_df <EOL> def clusterAgreement ( infile ) : <EOL> '''<STR_LIT>''' <EOL> df = pd . read_table ( infile , sep = "<STR_LIT:\t>" , header = <NUM_LIT:0> , index_col = <NUM_LIT:0> ) <EOL> genes = df . index . values <EOL> dmat = pd . DataFrame ( index = genes , <EOL> columns = genes ) <EOL> dmat = dmat . fillna ( <NUM_LIT:0> ) <EOL> reps = df . columns . values <EOL> for i in reps : <EOL> clusters = set ( df [ i ] . values . tolist ( ) ) <EOL> cluster_dict = { } <EOL> for col in clusters : <EOL> cluster_dict [ col ] = [ ] <EOL> for gene in genes : <EOL> k_gene = df [ i ] [ gene ] <EOL> cluster_dict [ k_gene ] . append ( gene ) <EOL> rep_list = [ ] <EOL> for col in clusters : <EOL> col_set = set ( ) <EOL> clust_col = cluster_dict [ col ] <EOL> gene_members = itertools . combinations_with_replacement ( clust_col , <EOL> <NUM_LIT:2> ) <EOL> col_set . add ( gene_members ) <EOL> rep_list . append ( col_set ) <EOL> for cluster_set in rep_list : <EOL> for combs in cluster_set : <EOL> for x in combs : <EOL> if x [ <NUM_LIT:0> ] == x [ <NUM_LIT:1> ] : <EOL> dmat [ x [ <NUM_LIT:0> ] ] [ x [ <NUM_LIT:1> ] ] += <NUM_LIT:1> <EOL> else : <EOL> dmat [ x [ <NUM_LIT:0> ] ] [ x [ <NUM_LIT:1> ] ] += <NUM_LIT:1> <EOL> dmat [ x [ <NUM_LIT:1> ] ] [ x [ <NUM_LIT:0> ] ] += <NUM_LIT:1> <EOL> prob = lambda x : x / float ( len ( reps ) ) <EOL> probs_df = dmat . applymap ( prob ) <EOL> return probs_df <EOL> def consensusClustering ( infile , <EOL> cutHeight , <EOL> cluster_algorithm , <EOL> min_size = <NUM_LIT:30> , <EOL> deepsplit = False ) : <EOL> '''<STR_LIT>''' <EOL> condition = infile . split ( "<STR_LIT:/>" ) [ <NUM_LIT:1> ] . split ( "<STR_LIT:->" ) [ <NUM_LIT:0> ] <EOL> wgcna_out = "<STR_LIT>" <EOL> R ( '''<STR_LIT>''' % locals ( ) ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> E . info ( "<STR_LIT>" ) <EOL> df = pd . read_table ( infile , sep = "<STR_LIT:\t>" , header = <NUM_LIT:0> , index_col = <NUM_LIT:0> ) <EOL> labels = df . index . tolist ( ) <EOL> labels_r = ro . StrVector ( [ l for l in labels ] ) <EOL> pandas2ri . activate ( ) <EOL> df_r = pandas2ri . py2ri ( df ) <EOL> R . assign ( "<STR_LIT>" , df_r ) <EOL> R . assign ( "<STR_LIT>" , labels_r ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> E . info ( "<STR_LIT>" % cluster_algorithm ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> if cutHeight > float ( <NUM_LIT> ) : <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> elif deepsplit : <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> else : <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' % locals ( ) ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' ) <EOL> R ( '''<STR_LIT>''' <EOL> '''<STR_LIT>''' ) <EOL> cluster_frame = pandas2ri . ri2py ( R [ "<STR_LIT>" ] ) <EOL> return cluster_frame </s>
<s> '''<STR_LIT>''' <EOL> USAGE = """<STR_LIT>""" <EOL> import sys , re , os , string , getopt <EOL> import alignlib <EOL> param_master = <NUM_LIT:0> <EOL> param_format = None <EOL> param_multiple_alignment = None <EOL> GAPCHARS = ( "<STR_LIT:.>" , "<STR_LIT:->" ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> try : <EOL> optlist , args = getopt . getopt ( sys . argv [ <NUM_LIT:1> : ] , <EOL> "<STR_LIT>" , <EOL> [ "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" ] ) <EOL> except getopt . error , msg : <EOL> print USAGE , msg <EOL> sys . exit ( <NUM_LIT:2> ) <EOL> for o , a in optlist : <EOL> if o in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> param_format = a <EOL> elif o in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> param_max_iterations = string . atoi ( a ) <EOL> if len ( args ) != <NUM_LIT:1> : <EOL> print "<STR_LIT>" <EOL> print USAGE <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> residues = map ( lambda x : string . split ( x [ : - <NUM_LIT:1> ] , "<STR_LIT:\t>" ) [ <NUM_LIT:0> ] , filter ( lambda x : not re . match ( "<STR_LIT:#>" , x ) , open ( args [ <NUM_LIT:0> ] , "<STR_LIT:r>" ) . readlines ( ) ) ) <EOL> residues_map = { } <EOL> for r in residues : <EOL> residues_map [ r ] = <NUM_LIT:1> <EOL> for line in sys . stdin : <EOL> if line [ : <NUM_LIT:6> ] not in ( "<STR_LIT>" , "<STR_LIT>" ) : continue <EOL> chain = line [ <NUM_LIT> ] <EOL> number = string . strip ( line [ <NUM_LIT> : <NUM_LIT> ] ) <EOL> aa = line [ <NUM_LIT> : <NUM_LIT:20> ] <EOL> atom = string . strip ( line [ <NUM_LIT> : <NUM_LIT> ] ) <EOL> if not residues_map . has_key ( number ) : <EOL> continue <EOL> print line [ : - <NUM_LIT:1> ] </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import string <EOL> import getopt <EOL> import socket <EOL> class SocketException ( Exception ) : <EOL> def __init__ ( self , value ) : <EOL> self . value = value <EOL> def __str__ ( self ) : <EOL> return repr ( self . value ) <EOL> class Socket : <EOL> mBufferSize = <NUM_LIT> <EOL> mHeaderSize = <NUM_LIT:10> <EOL> def __init__ ( self , sock = None ) : <EOL> if sock : <EOL> self . mSocket = sock <EOL> else : <EOL> self . mSocket = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) <EOL> def __del__ ( self ) : <EOL> self . Close ( ) <EOL> def MakeServer ( self , host , port ) : <EOL> hostname = socket . gethostbyname ( host ) <EOL> self . mSocket . bind ( ( hostname , port ) ) <EOL> self . mSocket . listen ( <NUM_LIT:1> ) <EOL> def MakeClient ( self , host , port ) : <EOL> hostname = socket . gethostbyname ( host ) <EOL> self . mSocket = socket . socket ( socket . AF_INET , socket . SOCK_STREAM ) <EOL> self . mSocket . connect ( ( hostname , port ) ) <EOL> def SendMessage ( self , msg ) : <EOL> """<STR_LIT>""" <EOL> lmsg = len ( msg ) <EOL> m = str ( lmsg ) + "<STR_LIT:U+0020>" * ( self . mHeaderSize - len ( str ( lmsg ) ) ) <EOL> sent = self . mSocket . send ( m ) <EOL> if sent != self . mHeaderSize : <EOL> raise SocketException ( "<STR_LIT>" ) <EOL> sent = <NUM_LIT:0> <EOL> while sent < lmsg : <EOL> r = self . mSocket . send ( msg [ sent : sent + self . mBufferSize ] ) <EOL> if r == <NUM_LIT:0> : <EOL> raise SocketException ( "<STR_LIT>" ) <EOL> sent += r <EOL> def ReceiveMessage ( self ) : <EOL> """<STR_LIT>""" <EOL> msg = "<STR_LIT>" <EOL> while len ( msg ) < self . mHeaderSize : <EOL> chunk = self . mSocket . recv ( self . mHeaderSize - len ( msg ) ) <EOL> if chunk == "<STR_LIT>" : <EOL> raise SocketException ( "<STR_LIT>" ) <EOL> msg += chunk <EOL> lmsg = string . atoi ( msg ) <EOL> msg = "<STR_LIT>" <EOL> while lmsg > <NUM_LIT:0> : <EOL> chunk = self . mSocket . recv ( min ( self . mBufferSize , lmsg ) ) <EOL> if chunk == "<STR_LIT>" : <EOL> raise SocketException ( "<STR_LIT>" ) <EOL> msg += chunk <EOL> lmsg -= len ( chunk ) <EOL> return msg <EOL> def Accept ( self ) : <EOL> """<STR_LIT>""" <EOL> conn , addr = self . mSocket . accept ( ) <EOL> return Socket ( conn ) , addr <EOL> def Close ( self ) : <EOL> """<STR_LIT>""" <EOL> self . mSocket . close ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> param_mode = None <EOL> param_host = "<STR_LIT>" <EOL> param_port = <NUM_LIT> <EOL> param_loglevel = <NUM_LIT:4> <EOL> try : <EOL> optlist , args = getopt . getopt ( sys . argv [ <NUM_LIT:1> : ] , <EOL> "<STR_LIT>" , <EOL> [ "<STR_LIT>" ] ) <EOL> except getopt . error , msg : <EOL> print USAGE <EOL> print msg <EOL> sys . exit ( <NUM_LIT:2> ) <EOL> for o , a in optlist : <EOL> if o in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> param_host = a <EOL> if len ( args ) != <NUM_LIT:1> : <EOL> raise "<STR_LIT>" <EOL> param_mode = args [ <NUM_LIT:0> ] <EOL> if param_loglevel >= <NUM_LIT:2> : <EOL> print "<STR_LIT>" , param_host <EOL> print "<STR_LIT>" , param_port <EOL> print "<STR_LIT>" , param_mode <EOL> sock = Socket ( ) <EOL> if param_mode == "<STR_LIT>" : <EOL> print "<STR_LIT>" <EOL> sock . MakeServer ( param_host , param_port ) <EOL> while <NUM_LIT:1> : <EOL> conn , addr = sock . Accept ( ) <EOL> msg = conn . ReceiveMessage ( ) <EOL> print "<STR_LIT>" , msg <EOL> msg += "<STR_LIT:a>" <EOL> conn . SendMessage ( msg ) <EOL> print "<STR_LIT>" , msg <EOL> sock . Close ( ) <EOL> elif param_mode == "<STR_LIT>" : <EOL> print "<STR_LIT>" <EOL> msg = "<STR_LIT:b>" <EOL> for x in range ( <NUM_LIT:1> , <NUM_LIT:10> ) : <EOL> sock . MakeClient ( param_host , param_port ) <EOL> sock . SendMessage ( msg ) <EOL> print "<STR_LIT>" , msg <EOL> msg = sock . ReceiveMessage ( ) <EOL> print "<STR_LIT>" , msg <EOL> msg += "<STR_LIT:b>" <EOL> sock . Close ( ) </s>
<s> """<STR_LIT>""" <EOL> import sys <EOL> import tempfile <EOL> import optparse <EOL> import shutil <EOL> import itertools <EOL> import csv <EOL> import math <EOL> import random <EOL> import re <EOL> import glob <EOL> import os <EOL> import shutil <EOL> import collections <EOL> import CGAT . Experiment as E <EOL> import logging as L <EOL> from ruffus import * <EOL> import csv <EOL> import sqlite3 <EOL> import CGAT . IndexedFasta as IndexedFasta <EOL> import CGAT . IndexedGenome as IndexedGenome <EOL> import CGAT . FastaIterator as FastaIterator <EOL> import CGAT . Genomics as Genomics <EOL> import CGAT . IOTools as IOTools <EOL> import CGAT . MAST as MAST <EOL> import CGAT . GTF as GTF <EOL> import CGAT . GFF as GFF <EOL> import CGAT . Bed as Bed <EOL> import cStringIO <EOL> import pysam <EOL> import numpy <EOL> import gzip <EOL> import fileinput <EOL> import CGATPipelines . PipelineTracks as PipelineTracks <EOL> import CGATPipelines . PipelineMapping as PipelineMapping <EOL> from bein . util import * <EOL> USECLUSTER = True <EOL> import CGAT . Pipeline as P <EOL> P . getParameters ( [ "<STR_LIT>" % __file__ [ : - len ( "<STR_LIT>" ) ] , "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> PARAMS = P . PARAMS <EOL> bowtie_options = { '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , <EOL> '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" , '<STR_LIT>' : "<STR_LIT>" } <EOL> @ files ( [ ( PARAMS [ "<STR_LIT>" ] , "<STR_LIT>" % x , bowtie_options . get ( x ) ) for x in bowtie_options . keys ( ) ] ) <EOL> def buildBAM ( infile , outfile , options ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = True <EOL> job_options = "<STR_LIT>" % PARAMS [ "<STR_LIT>" ] <EOL> m = PipelineMapping . Bowtie ( ) <EOL> reffile = PARAMS [ "<STR_LIT>" ] <EOL> bowtie_options = options <EOL> statement = m . build ( ( infile , ) , outfile ) <EOL> P . run ( ) <EOL> @ transform ( buildBAM , <EOL> regex ( r"<STR_LIT>" ) , <EOL> r"<STR_LIT>" ) <EOL> def sortByName ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = USECLUSTER <EOL> track = P . snip ( outfile , "<STR_LIT>" ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( sortByName , <EOL> regex ( r"<STR_LIT>" ) , <EOL> r"<STR_LIT>" ) <EOL> def addNHTag ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = USECLUSTER <EOL> inf = pysam . Samfile ( infile , "<STR_LIT:rb>" ) <EOL> outf = pysam . Samfile ( outfile , "<STR_LIT:wb>" , template = inf ) <EOL> for readset in read_sets ( inf , keep_unmapped = True ) : <EOL> nh = len ( readset ) <EOL> for read in readset : <EOL> if ( read . is_unmapped ) : <EOL> nh = <NUM_LIT:0> <EOL> read . tags = read . tags + [ ( "<STR_LIT>" , nh ) ] <EOL> outf . write ( read ) <EOL> inf . close ( ) <EOL> outf . close ( ) <EOL> @ transform ( addNHTag , <EOL> regex ( r"<STR_LIT>" ) , <EOL> r"<STR_LIT>" ) <EOL> def sortByPosition ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = USECLUSTER <EOL> track = P . snip ( outfile , "<STR_LIT>" ) <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( sortByPosition , <EOL> regex ( r"<STR_LIT>" ) , <EOL> r"<STR_LIT>" ) <EOL> def dedup ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = USECLUSTER <EOL> track = P . snip ( outfile , "<STR_LIT>" ) <EOL> statement = '''<STR_LIT>''' % locals ( ) <EOL> statement += '''<STR_LIT>''' % locals ( ) <EOL> P . run ( ) <EOL> @ merge ( dedup , "<STR_LIT>" ) <EOL> def loadPicardDuplicateStats ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> tablename = P . toTable ( outfile ) <EOL> outf = open ( '<STR_LIT>' , '<STR_LIT:w>' ) <EOL> first = True <EOL> for f in infiles : <EOL> track = P . snip ( os . path . basename ( f ) , "<STR_LIT>" ) <EOL> statfile = P . snip ( f , "<STR_LIT>" ) + "<STR_LIT>" <EOL> if not os . path . exists ( statfile ) : <EOL> E . warn ( "<STR_LIT>" % statfile ) <EOL> continue <EOL> lines = [ x for x in open ( statfile , "<STR_LIT:r>" ) . readlines ( ) if not x . startswith ( "<STR_LIT:#>" ) and x . strip ( ) ] <EOL> if first : outf . write ( "<STR_LIT>" % ( "<STR_LIT>" , lines [ <NUM_LIT:0> ] ) ) <EOL> first = False <EOL> outf . write ( "<STR_LIT>" % ( track , lines [ <NUM_LIT:1> ] ) ) <EOL> outf . close ( ) <EOL> tmpfilename = outf . name <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ transform ( dedup , <EOL> regex ( r"<STR_LIT>" ) , <EOL> r"<STR_LIT>" ) <EOL> def buildBAMStats ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = USECLUSTER <EOL> scriptsdir = PARAMS [ "<STR_LIT>" ] <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> @ merge ( buildBAMStats , "<STR_LIT>" ) <EOL> def loadBAMStats ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> scriptsdir = PARAMS [ "<STR_LIT>" ] <EOL> header = "<STR_LIT:U+002C>" . join ( [ P . snip ( os . path . basename ( x ) , "<STR_LIT>" ) for x in infiles ] ) <EOL> filenames = "<STR_LIT:U+0020>" . join ( [ "<STR_LIT>" % x for x in infiles ] ) <EOL> tablename = P . toTable ( outfile ) <EOL> E . info ( "<STR_LIT>" ) <EOL> statement = """<STR_LIT>""" <EOL> P . run ( ) <EOL> for suffix in ( "<STR_LIT>" , "<STR_LIT>" ) : <EOL> E . info ( "<STR_LIT>" % suffix ) <EOL> filenames = "<STR_LIT:U+0020>" . join ( [ "<STR_LIT>" % ( x , suffix ) for x in infiles ] ) <EOL> tname = "<STR_LIT>" % ( tablename , suffix ) <EOL> statement = """<STR_LIT>""" <EOL> P . run ( ) <EOL> @ transform ( dedup , <EOL> regex ( r"<STR_LIT>" ) , <EOL> r"<STR_LIT>" ) <EOL> def buildPicardAlignStats ( infile , outfile ) : <EOL> '''<STR_LIT>''' <EOL> to_cluster = USECLUSTER <EOL> track = P . snip ( os . path . basename ( infile ) , "<STR_LIT>" ) <EOL> statement = '''<STR_LIT>''' % locals ( ) <EOL> P . run ( ) <EOL> @ merge ( buildPicardAlignStats , "<STR_LIT>" ) <EOL> def loadPicardAlignStats ( infiles , outfile ) : <EOL> '''<STR_LIT>''' <EOL> tablename = P . toTable ( outfile ) <EOL> outf = P . getTempFile ( ) <EOL> first = True <EOL> for f in infiles : <EOL> track = P . snip ( os . path . basename ( f ) , "<STR_LIT>" ) <EOL> if not os . path . exists ( f ) : <EOL> E . warn ( "<STR_LIT>" % f ) <EOL> continue <EOL> lines = [ x for x in open ( f , "<STR_LIT:r>" ) . readlines ( ) if not x . startswith ( "<STR_LIT:#>" ) and x . strip ( ) ] <EOL> if first : outf . write ( "<STR_LIT>" % ( "<STR_LIT>" , lines [ <NUM_LIT:0> ] ) ) <EOL> first = False <EOL> for i in range ( <NUM_LIT:1> , len ( lines ) ) : <EOL> outf . write ( "<STR_LIT>" % ( track , lines [ i ] ) ) <EOL> outf . close ( ) <EOL> tmpfilename = outf . name <EOL> statement = '''<STR_LIT>''' <EOL> P . run ( ) <EOL> os . unlink ( tmpfilename ) <EOL> @ follows ( buildBAM , sortByName , addNHTag , sortByPosition , dedup , <EOL> loadPicardDuplicateStats , buildBAMStats , loadBAMStats ) <EOL> def mapReads ( ) : <EOL> '''<STR_LIT>''' <EOL> pass <EOL> @ follows ( mapReads ) <EOL> def full ( ) : <EOL> '''<STR_LIT>''' <EOL> pass <EOL> @ follows ( mkdir ( "<STR_LIT>" ) ) <EOL> def build_report ( ) : <EOL> '''<STR_LIT>''' <EOL> E . info ( "<STR_LIT>" ) <EOL> P . run_report ( clean = True ) <EOL> @ follows ( mkdir ( "<STR_LIT>" ) ) <EOL> def update_report ( ) : <EOL> '''<STR_LIT>''' <EOL> E . info ( "<STR_LIT>" ) <EOL> P . run_report ( clean = False ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( P . main ( sys . argv ) ) </s>
<s> from math import log <EOL> from commands import getoutput <EOL> from string import letters <EOL> from random import choice <EOL> import os , sys , re , optparse <EOL> import Experiment as E <EOL> def main ( argv = None ) : <EOL> """<STR_LIT>""" <EOL> if not argv : argv = sys . argv <EOL> parser = optparse . OptionParser ( version = "<STR_LIT>" , <EOL> usage = globals ( ) [ "<STR_LIT>" ] ) <EOL> ( options , args ) = E . Start ( parser , argv = argv ) <EOL> infile = open ( args [ <NUM_LIT:0> ] , '<STR_LIT:r>' ) <EOL> min_rpkm = float ( args [ <NUM_LIT:1> ] ) <EOL> outfile = open ( args [ <NUM_LIT:2> ] + '<STR_LIT>' , '<STR_LIT:w>' ) <EOL> if argv [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] == '<STR_LIT:/>' or argv [ <NUM_LIT:1> ] [ <NUM_LIT:0> ] == '<STR_LIT>' : <EOL> ABS_PATH = True <EOL> else : <EOL> ABS_PATH = False <EOL> for line in infile : <EOL> if line . startswith ( "<STR_LIT:#>" ) : continue <EOL> if line . startswith ( "<STR_LIT>" ) : <EOL> header = line [ : - <NUM_LIT:1> ] . split ( '<STR_LIT:\t>' ) [ <NUM_LIT:2> : : ] <EOL> num_samples = len ( header ) <EOL> outfile . write ( "<STR_LIT>" % num_samples ) <EOL> the_matrix = [ ] <EOL> for i in range ( num_samples ) : <EOL> the_matrix . append ( [ <NUM_LIT:0.0> ] * num_samples ) <EOL> continue <EOL> la = map ( float , line . rstrip ( '<STR_LIT:\n>' ) . split ( '<STR_LIT:\t>' ) [ <NUM_LIT:2> : : ] ) <EOL> if max ( la ) < min_rpkm : <EOL> continue <EOL> la = map ( lambda x : x + <NUM_LIT> , la ) <EOL> avg_rpkm = float ( sum ( la ) ) / len ( la ) <EOL> ratios = map ( lambda x : log ( x / avg_rpkm , <NUM_LIT:2> ) , la ) <EOL> for i in range ( num_samples ) : <EOL> for j in range ( num_samples ) : <EOL> the_matrix [ i ] [ j ] += abs ( ratios [ i ] - ratios [ j ] ) <EOL> for i in range ( num_samples ) : <EOL> outfile . write ( "<STR_LIT>" % header [ i ] ) <EOL> for j in range ( num_samples ) : <EOL> outfile . write ( '<STR_LIT:U+0020>' + str ( the_matrix [ i ] [ j ] ) ) <EOL> outfile . write ( '<STR_LIT:\n>' ) <EOL> infile . close ( ) ; outfile . close ( ) <EOL> commands_file = open ( argv [ <NUM_LIT:3> ] + '<STR_LIT>' , '<STR_LIT:w>' ) <EOL> TMP_DIR = "<STR_LIT>" . join ( [ choice ( letters ) for x in xrange ( <NUM_LIT:10> ) ] ) ; getoutput ( '<STR_LIT>' % TMP_DIR ) <EOL> if ABS_PATH : <EOL> commands_file . write ( '<STR_LIT>' % ( argv [ <NUM_LIT:3> ] + '<STR_LIT>' ) ) <EOL> commands_file . close ( ) <EOL> getoutput ( '<STR_LIT>' % ( TMP_DIR , argv [ <NUM_LIT:3> ] + '<STR_LIT>' , argv [ <NUM_LIT:3> ] ) ) <EOL> else : <EOL> commands_file . write ( '<STR_LIT>' % ( argv [ <NUM_LIT:3> ] + '<STR_LIT>' ) ) <EOL> commands_file . close ( ) <EOL> getoutput ( '<STR_LIT>' % ( TMP_DIR , argv [ <NUM_LIT:3> ] + '<STR_LIT>' , argv [ <NUM_LIT:3> ] ) ) <EOL> getoutput ( '<STR_LIT>' % TMP_DIR ) <EOL> E . Stop ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( sys . argv ) ) </s>
<s> import random <EOL> import sys <EOL> import gzip <EOL> import CGAT . Pipeline as P <EOL> import CGAT . Experiment as E <EOL> def write_random_records ( fqa , fqb , outfa , outfb , N ) : <EOL> """<STR_LIT>""" <EOL> records = sum ( <NUM_LIT:1> for _ in gzip . open ( fqa ) ) / <NUM_LIT:4> <EOL> rand_records = sorted ( [ random . randint ( <NUM_LIT:0> , records - <NUM_LIT:1> ) for _ in xrange ( N ) ] ) <EOL> suba , subb = gzip . open ( outfa , "<STR_LIT:w>" ) , gzip . open ( outfb , "<STR_LIT:w>" ) <EOL> fha , fhb = gzip . open ( fqa ) , gzip . open ( fqb ) <EOL> rec_no = - <NUM_LIT:1> <EOL> for rr in rand_records : <EOL> while rec_no < rr : <EOL> rec_no += <NUM_LIT:1> <EOL> for i in range ( <NUM_LIT:4> ) : fha . readline ( ) <EOL> for i in range ( <NUM_LIT:4> ) : fhb . readline ( ) <EOL> for i in range ( <NUM_LIT:4> ) : <EOL> suba . write ( fha . readline ( ) ) <EOL> subb . write ( fhb . readline ( ) ) <EOL> rec_no += <NUM_LIT:1> <EOL> print >> sys . stderr , "<STR_LIT>" % ( suba . name , subb . name ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> assert int ( sys . argv [ <NUM_LIT:5> ] ) , "<STR_LIT>" <EOL> write_random_records ( sys . argv [ <NUM_LIT:1> ] , sys . argv [ <NUM_LIT:2> ] , sys . argv [ <NUM_LIT:3> ] , sys . argv [ <NUM_LIT:4> ] , int ( sys . argv [ <NUM_LIT:5> ] ) ) </s>
<s> '''<STR_LIT>''' <EOL> import os <EOL> import sys <EOL> import CGAT . Experiment as E <EOL> import CGAT . IOTools as IOTools <EOL> import CGAT . GTF as GTF <EOL> import pysam <EOL> try : <EOL> import pyximport <EOL> pyximport . install ( build_in_temp = False ) <EOL> import _bam2stats <EOL> except ImportError : <EOL> import CGAT . _bam2stats as _bam2stats <EOL> FLAGS = { <EOL> <NUM_LIT:1> : '<STR_LIT>' , <EOL> <NUM_LIT:2> : '<STR_LIT>' , <EOL> <NUM_LIT:4> : '<STR_LIT>' , <EOL> <NUM_LIT:8> : '<STR_LIT>' , <EOL> <NUM_LIT:16> : '<STR_LIT>' , <EOL> <NUM_LIT:32> : '<STR_LIT>' , <EOL> <NUM_LIT:64> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> <NUM_LIT> : '<STR_LIT>' , <EOL> } <EOL> def computeMappedReadsFromAlignments ( total_alignments , nh , max_hi ) : <EOL> '''<STR_LIT>''' <EOL> nreads_mapped = total_alignments <EOL> if len ( nh ) > <NUM_LIT:0> : <EOL> max_nh = max ( nh . keys ( ) ) <EOL> if max_hi > <NUM_LIT:0> : <EOL> for x in xrange ( <NUM_LIT:2> , min ( max_nh + <NUM_LIT:1> , max_hi ) ) : <EOL> nreads_mapped -= ( nh [ x ] / x ) * ( x - <NUM_LIT:1> ) <EOL> for x in xrange ( max_hi , max_nh + <NUM_LIT:1> ) : <EOL> nreads_mapped -= ( nh [ x ] / max_hi ) * ( max_hi - <NUM_LIT:1> ) <EOL> else : <EOL> for x in xrange ( <NUM_LIT:2> , max ( nh . keys ( ) ) + <NUM_LIT:1> ) : <EOL> nreads_mapped -= ( nh [ x ] / x ) * ( x - <NUM_LIT:1> ) <EOL> return nreads_mapped <EOL> def writeNH ( outfile , nh , max_hi ) : <EOL> '''<STR_LIT>''' <EOL> max_nh = max ( nh . keys ( ) ) <EOL> if max_hi > <NUM_LIT:0> : <EOL> for x in xrange ( <NUM_LIT:1> , min ( max_nh + <NUM_LIT:1> , max_hi ) ) : <EOL> if nh [ x ] == <NUM_LIT:0> : <EOL> continue <EOL> outfile . write ( "<STR_LIT>" % ( x , nh [ x ] / x ) ) <EOL> for x in xrange ( max_hi , max_nh + <NUM_LIT:1> ) : <EOL> if nh [ x ] == <NUM_LIT:0> : <EOL> continue <EOL> outfile . write ( "<STR_LIT>" % ( x , nh [ x ] / max_hi ) ) <EOL> else : <EOL> for x in xrange ( <NUM_LIT:1> , max_nh + <NUM_LIT:1> ) : <EOL> if nh [ x ] == <NUM_LIT:0> : <EOL> continue <EOL> outfile . write ( "<STR_LIT>" % ( x , nh [ x ] / x ) ) <EOL> def main ( argv = None ) : <EOL> """<STR_LIT>""" <EOL> if not argv : <EOL> argv = sys . argv <EOL> parser = E . OptionParser ( version = "<STR_LIT>" , <EOL> usage = globals ( ) [ "<STR_LIT>" ] ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> metavar = '<STR_LIT>' , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . set_defaults ( <EOL> filename_rna = None , <EOL> remove_rna = False , <EOL> input_reads = <NUM_LIT:0> , <EOL> force_output = False , <EOL> filename_fastq = None , <EOL> output_details = False , <EOL> ) <EOL> ( options , args ) = E . Start ( parser , argv = argv , add_output_options = True ) <EOL> if options . filename_rna : <EOL> rna = GTF . readAndIndex ( <EOL> GTF . iterator ( IOTools . openFile ( options . filename_rna ) ) ) <EOL> else : <EOL> rna = None <EOL> if len ( args ) > <NUM_LIT:0> : <EOL> pysam_in = pysam . AlignmentFile ( args [ <NUM_LIT:0> ] , "<STR_LIT:rb>" ) <EOL> elif options . stdin == sys . stdin : <EOL> pysam_in = pysam . AlignmentFile ( "<STR_LIT:->" , "<STR_LIT:rb>" ) <EOL> else : <EOL> pysam_in = pysam . AlignmentFile ( options . stdin , "<STR_LIT:rb>" ) <EOL> if options . output_details : <EOL> outfile_details = E . openOutputFile ( "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> else : <EOL> outfile_details = None <EOL> if options . filename_fastq and not os . path . exists ( options . filename_fastq ) : <EOL> raise IOError ( "<STR_LIT>" % options . filename_fastq ) <EOL> ( counter , flags_counts , nh_filtered , nh_all , <EOL> nm_filtered , nm_all , mapq , mapq_all , max_hi ) = _bam2stats . count ( pysam_in , <EOL> options . remove_rna , <EOL> rna , <EOL> filename_fastq = options . filename_fastq , <EOL> outfile_details = outfile_details ) <EOL> if max_hi > <NUM_LIT:0> and max_hi != max ( nh_all . keys ( ) ) : <EOL> E . warn ( "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> % ( max_hi , max ( nh_all . keys ( ) ) ) ) <EOL> outs = options . stdout <EOL> outs . write ( "<STR_LIT>" ) <EOL> def _write ( outs , text , numerator , denominator , base ) : <EOL> percent = IOTools . prettyPercent ( numerator , denominator ) <EOL> outs . write ( '<STR_LIT>' % ( text , <EOL> numerator , <EOL> percent , <EOL> base ) ) <EOL> nalignments_unmapped = flags_counts [ "<STR_LIT>" ] <EOL> nalignments_mapped = counter . alignments_input - nalignments_unmapped <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . alignments_input , <EOL> counter . alignments_input , <EOL> "<STR_LIT>" ) <EOL> if counter . alignments_input == <NUM_LIT:0> : <EOL> E . warn ( "<STR_LIT>" ) <EOL> E . Stop ( ) <EOL> return <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> nalignments_mapped , <EOL> counter . alignments_input , <EOL> '<STR_LIT>' ) <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> nalignments_unmapped , <EOL> counter . alignments_input , <EOL> '<STR_LIT>' ) <EOL> if nalignments_mapped == <NUM_LIT:0> : <EOL> E . warn ( "<STR_LIT>" ) <EOL> E . Stop ( ) <EOL> return <EOL> for flag , counts in flags_counts . iteritems ( ) : <EOL> if flag == "<STR_LIT>" : <EOL> continue <EOL> _write ( outs , <EOL> '<STR_LIT>' + flag , <EOL> counts , <EOL> nalignments_mapped , <EOL> '<STR_LIT>' ) <EOL> if options . filename_rna : <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . alignments_rna , <EOL> nalignments_mapped , <EOL> '<STR_LIT>' ) <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . alignments_no_rna , <EOL> nalignments_mapped , <EOL> '<STR_LIT>' ) <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . alignments_filtered , <EOL> nalignments_mapped , <EOL> "<STR_LIT>" ) <EOL> if counter . filtered == nalignments_mapped : <EOL> normby = "<STR_LIT>" <EOL> else : <EOL> normby = "<STR_LIT>" <EOL> if counter . filtered > <NUM_LIT:0> : <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . alignments_duplicates , <EOL> counter . alignments_filtered , <EOL> normby ) <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . aligmnments_filtered - counter . alignments_duplicates , <EOL> counter . alignments_filtered , <EOL> normby ) <EOL> if options . filename_fastq : <EOL> nreads_total = counter . total_read <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . total_read , <EOL> nreads_total , <EOL> '<STR_LIT>' ) <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . total_read_is_unmapped , <EOL> nreads_total , <EOL> '<STR_LIT>' ) <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . total_read_is_mapped , <EOL> nreads_total , <EOL> '<STR_LIT>' ) <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . total_read_is_missing , <EOL> nreads_total , <EOL> '<STR_LIT>' ) <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . total_read_is_mapped_uniq , <EOL> counter . total_read_is_mapped , <EOL> '<STR_LIT>' ) <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . total_read_is_mmap , <EOL> counter . total_read_is_mapped , <EOL> '<STR_LIT>' ) <EOL> else : <EOL> E . warn ( '<STR_LIT>' ) <EOL> nreads_unmapped = flags_counts [ "<STR_LIT>" ] <EOL> nreads_mapped = computeMappedReadsFromAlignments ( nalignments_mapped , <EOL> nh_all , max_hi ) <EOL> nreads_missing = <NUM_LIT:0> <EOL> if options . input_reads : <EOL> nreads_total = options . input_reads <EOL> if nreads_unmapped : <EOL> nreads_missing = nreads_total - nreads_unmapped - nreads_mapped <EOL> else : <EOL> nreads_unmapped = nreads_total - nreads_mapped <EOL> elif nreads_unmapped : <EOL> nreads_total = nreads_mapped + nreads_unmapped <EOL> else : <EOL> nreads_unmapped = <NUM_LIT:0> <EOL> nreads_total = nreads_mapped <EOL> outs . write ( "<STR_LIT>" % <EOL> ( nreads_total , <NUM_LIT> ) ) <EOL> outs . write ( "<STR_LIT>" % <EOL> ( nreads_mapped , <NUM_LIT> * nreads_mapped / nreads_total ) ) <EOL> outs . write ( "<STR_LIT>" % <EOL> ( nreads_unmapped , <NUM_LIT> * nreads_unmapped / nreads_total ) ) <EOL> outs . write ( "<STR_LIT>" % <EOL> ( nreads_missing , <NUM_LIT> * nreads_missing / nreads_total ) ) <EOL> if len ( nh_all ) > <NUM_LIT:1> : <EOL> outs . write ( "<STR_LIT>" % <EOL> ( nh_all [ <NUM_LIT:1> ] , <NUM_LIT> * nh_all [ <NUM_LIT:1> ] / nreads_mapped ) ) <EOL> if options . filename_rna : <EOL> nreads_norna = computeMappedReadsFromAlignments ( <EOL> counter . filtered , nh_filtered , max_hi ) <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> nreads_norna , <EOL> nreads_mapped , <EOL> "<STR_LIT>" ) <EOL> if len ( nh_filtered ) > <NUM_LIT:1> : <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> nh_filtered [ <NUM_LIT:1> ] , <EOL> nreads_norna , <EOL> "<STR_LIT>" ) <EOL> pysam_in . close ( ) <EOL> if flags_counts [ "<STR_LIT>" ] > <NUM_LIT:0> : <EOL> if options . filename_fastq : <EOL> pairs_mapped = counter . total_pair_is_mapped <EOL> assert counter . total_pair_is_mapped == ( counter . total_pair_is_proper_uniq + <EOL> counter . total_pair_is_incomplete_uniq + <EOL> counter . total_pair_is_incomplete_mmap + <EOL> counter . total_pair_is_proper_duplicate + <EOL> counter . total_pair_is_proper_mmap + <EOL> counter . total_pair_not_proper_uniq + <EOL> counter . total_pair_is_other ) <EOL> outs . write ( "<STR_LIT>" % <EOL> ( counter . total_pairs , <EOL> <NUM_LIT> * counter . total_pairs / counter . total_pairs ) ) <EOL> outs . write ( "<STR_LIT>" % <EOL> ( pairs_mapped , <EOL> <NUM_LIT> * pairs_mapped / counter . total_pairs ) ) <EOL> outs . write ( <EOL> "<STR_LIT>" % <EOL> ( counter . total_pair_is_unmapped , <EOL> <NUM_LIT> * counter . total_pair_is_unmapped / counter . total_pairs ) ) <EOL> outs . write ( <EOL> "<STR_LIT>" % <EOL> ( counter . total_pair_is_proper_uniq , <EOL> <NUM_LIT> * counter . total_pair_is_proper_uniq / <EOL> counter . total_pairs ) ) <EOL> outs . write ( <EOL> "<STR_LIT>" % <EOL> ( counter . total_pair_is_incomplete_uniq , <EOL> <NUM_LIT> * counter . total_pair_is_incomplete_uniq / <EOL> counter . total_pairs ) ) <EOL> outs . write ( <EOL> "<STR_LIT>" % <EOL> ( counter . total_pair_is_incomplete_mmap , <EOL> <NUM_LIT> * counter . total_pair_is_incomplete_mmap / <EOL> counter . total_pairs ) ) <EOL> outs . write ( <EOL> "<STR_LIT>" % <EOL> ( counter . total_pair_is_proper_duplicate , <EOL> <NUM_LIT> * counter . total_pair_is_proper_duplicate / <EOL> counter . total_pairs ) ) <EOL> outs . write ( <EOL> "<STR_LIT>" % <EOL> ( counter . total_pair_is_proper_mmap , <EOL> <NUM_LIT> * counter . total_pair_is_proper_mmap / <EOL> counter . total_pairs ) ) <EOL> outs . write ( <EOL> "<STR_LIT>" % <EOL> ( counter . total_pair_not_proper_uniq , <EOL> <NUM_LIT> * counter . total_pair_not_proper_uniq / <EOL> counter . total_pairs ) ) <EOL> outs . write ( <EOL> "<STR_LIT>" % <EOL> ( counter . total_pair_is_other , <EOL> <NUM_LIT> * counter . total_pair_is_other / <EOL> counter . total_pairs ) ) <EOL> nread1_total = counter . total_read1 <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . total_read1 , <EOL> nread1_total , <EOL> '<STR_LIT>' ) <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . total_read1_is_unmapped , <EOL> nread1_total , <EOL> '<STR_LIT>' ) <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . total_read1_is_mapped , <EOL> nread1_total , <EOL> '<STR_LIT>' ) <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . total_read1_is_mapped_uniq , <EOL> counter . total_read1_is_mapped , <EOL> '<STR_LIT>' ) <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . total_read1_is_mmap , <EOL> counter . total_read1_is_mapped , <EOL> '<STR_LIT>' ) <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . total_read1_is_missing , <EOL> counter . total_read1_is_mapped , <EOL> '<STR_LIT>' ) <EOL> nread2_total = counter . total_read2 <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . total_read2 , <EOL> nread2_total , <EOL> '<STR_LIT>' ) <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . total_read2_is_unmapped , <EOL> nread2_total , <EOL> '<STR_LIT>' ) <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . total_read2_is_mapped , <EOL> nread2_total , <EOL> '<STR_LIT>' ) <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . total_read2_is_mapped_uniq , <EOL> counter . total_read2_is_mapped , <EOL> '<STR_LIT>' ) <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . total_read2_is_mmap , <EOL> counter . total_read2_is_mapped , <EOL> '<STR_LIT>' ) <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> counter . total_read2_is_missing , <EOL> counter . total_read2_is_mapped , <EOL> '<STR_LIT>' ) <EOL> else : <EOL> pairs_total = nreads_total // <NUM_LIT:2> <EOL> pairs_mapped = flags_counts [ "<STR_LIT>" ] // <NUM_LIT:2> <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> pairs_total , <EOL> pairs_total , <EOL> "<STR_LIT>" ) <EOL> _write ( outs , <EOL> "<STR_LIT>" , <EOL> pairs_mapped , <EOL> pairs_total , <EOL> "<STR_LIT>" ) <EOL> else : <EOL> pairs_total = pairs_mapped = <NUM_LIT:0> <EOL> outs . write ( "<STR_LIT>" % <EOL> ( pairs_total , <NUM_LIT:0.0> ) ) <EOL> outs . write ( "<STR_LIT>" % <EOL> ( pairs_mapped , <NUM_LIT:0.0> ) ) <EOL> if options . force_output or len ( nm_filtered ) > <NUM_LIT:0> : <EOL> outfile = E . openOutputFile ( "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> outfile . write ( "<STR_LIT>" ) <EOL> if len ( nm_filtered ) > <NUM_LIT:0> : <EOL> for x in xrange ( <NUM_LIT:0> , max ( nm_filtered . keys ( ) ) + <NUM_LIT:1> ) : <EOL> outfile . write ( "<STR_LIT>" % ( x , nm_filtered [ x ] ) ) <EOL> else : <EOL> outfile . write ( "<STR_LIT>" % ( counter . filtered ) ) <EOL> outfile . close ( ) <EOL> if options . force_output or len ( nh_all ) > <NUM_LIT:1> : <EOL> outfile = E . openOutputFile ( "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> outfile . write ( "<STR_LIT>" ) <EOL> if len ( nh_all ) > <NUM_LIT:0> : <EOL> writeNH ( outfile , nh_all , max_hi ) <EOL> else : <EOL> outfile . write ( "<STR_LIT>" % ( counter . mapped_reads ) ) <EOL> outfile . close ( ) <EOL> if options . force_output or len ( nh_filtered ) > <NUM_LIT:1> : <EOL> outfile = E . openOutputFile ( "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> outfile . write ( "<STR_LIT>" ) <EOL> if len ( nh_filtered ) > <NUM_LIT:0> : <EOL> writeNH ( outfile , nh_filtered , max_hi ) <EOL> else : <EOL> outfile . write ( "<STR_LIT>" % ( counter . filtered ) ) <EOL> outfile . close ( ) <EOL> if options . force_output or len ( mapq_all ) > <NUM_LIT:1> : <EOL> outfile = E . openOutputFile ( "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> outfile . write ( "<STR_LIT>" ) <EOL> for x in xrange ( <NUM_LIT:0> , max ( mapq_all . keys ( ) ) + <NUM_LIT:1> ) : <EOL> outfile . write ( "<STR_LIT>" % ( x , mapq_all [ x ] , mapq [ x ] ) ) <EOL> outfile . close ( ) <EOL> E . Stop ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( sys . argv ) ) </s>
<s> '''<STR_LIT>''' <EOL> import sys <EOL> import CGAT . Experiment as E <EOL> import CGAT . IOTools as IOTools <EOL> def makeSplicedFasta ( infile ) : <EOL> '''<STR_LIT>''' <EOL> fasta_dict = { } <EOL> with IOTools . openFile ( infile , "<STR_LIT:rb>" ) as fafile : <EOL> for line in fafile . readlines ( ) : <EOL> if line [ <NUM_LIT:0> ] == '<STR_LIT:>>' : <EOL> header = line . rstrip ( "<STR_LIT:\n>" ) <EOL> fasta_dict [ header ] = '<STR_LIT>' <EOL> else : <EOL> fasta_dict [ header ] += line . rstrip ( "<STR_LIT:\n>" ) <EOL> for key , value in fasta_dict . items ( ) : <EOL> yield "<STR_LIT>" % ( key , value ) <EOL> def main ( argv = None ) : <EOL> """<STR_LIT>""" <EOL> if argv is None : <EOL> argv = sys . argv <EOL> parser = E . OptionParser ( version = "<STR_LIT>" , <EOL> usage = globals ( ) [ "<STR_LIT>" ] ) <EOL> ( options , args ) = E . Start ( parser , argv = argv ) <EOL> infile = argv [ - <NUM_LIT:1> ] <EOL> for record in makeSplicedFasta ( infile ) : <EOL> options . stdout . write ( record ) <EOL> E . Stop ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( sys . argv ) ) </s>
<s> '''<STR_LIT>''' <EOL> import sys <EOL> import CGAT . Experiment as E <EOL> import CGAT . IOTools as IOTools <EOL> import CGAT . CSV as CSV <EOL> import csv <EOL> import hashlib <EOL> class UniqueBuffer : <EOL> mKeys = { } <EOL> def __init__ ( self , outfile ) : <EOL> self . mOutfile = outfile <EOL> def write ( self , out ) : <EOL> key = hashlib . md5 ( out ) . digest ( ) <EOL> if key not in self . mKeys : <EOL> self . mKeys [ key ] = True <EOL> self . mOutfile . write ( out ) <EOL> def main ( argv = None ) : <EOL> """<STR_LIT>""" <EOL> if argv is None : <EOL> argv = sys . argv <EOL> parser = E . OptionParser ( <EOL> version = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . set_defaults ( <EOL> remove = False , <EOL> unique = False , <EOL> ) <EOL> ( options , args ) = E . Start ( parser , add_csv_options = True ) <EOL> if len ( args ) != <NUM_LIT:2> : <EOL> raise "<STR_LIT>" <EOL> options . filename1 , options . filename2 = args <EOL> table1 = CSV . readTable ( IOTools . openFile ( options . filename1 , "<STR_LIT:r>" ) ) <EOL> table2 = CSV . readTable ( IOTools . openFile ( options . filename2 , "<STR_LIT:r>" ) ) <EOL> if options . unique : <EOL> outfile = UniqueBuffer ( sys . stdout ) <EOL> else : <EOL> outfile = options . stdout <EOL> new_fields = [ ] <EOL> for x in options . join_fields1 : <EOL> new_fields . append ( x ) <EOL> for x in fields1 : <EOL> if x not in options . join_fields1 : <EOL> new_fields . append ( x ) <EOL> if x not in options . join_fields2 : <EOL> new_fields . append ( x ) <EOL> writer = csv . DictWriter ( outfile , <EOL> fields , <EOL> dialect = options . csv_dialect , <EOL> lineterminator = options . csv_lineterminator , <EOL> extrasaction = '<STR_LIT:ignore>' ) <EOL> if len ( lines ) > <NUM_LIT:0> : <EOL> old_fields = lines [ <NUM_LIT:0> ] [ : - <NUM_LIT:1> ] . split ( "<STR_LIT:\t>" ) <EOL> if options . remove : <EOL> fields = [ ] <EOL> for x in old_fields : <EOL> if x not in input_fields : <EOL> fields . append ( x ) <EOL> else : <EOL> fields = input_fields <EOL> reader = csv . DictReader ( lines , <EOL> dialect = options . csv_dialect ) <EOL> print "<STR_LIT:\t>" . join ( fields ) <EOL> first_row = True <EOL> for row in reader : <EOL> row = IOTools . convertDictionary ( row ) <EOL> writer . writerow ( row ) <EOL> E . Stop ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( sys . argv ) ) </s>
<s> '''<STR_LIT>''' <EOL> import sys <EOL> import math <EOL> import CGAT . Experiment as E <EOL> import CGAT . Genomics as Genomics <EOL> import CGAT . FastaIterator as FastaIterator <EOL> def FilterAlignedPairForPositions ( seq1 , seq2 , method ) : <EOL> """<STR_LIT>""" <EOL> l1 = len ( seq1 ) <EOL> l2 = len ( seq2 ) <EOL> if method == "<STR_LIT:all>" : <EOL> return seq1 , seq2 <EOL> elif method == "<STR_LIT>" : <EOL> return ( "<STR_LIT>" . join ( [ seq1 [ x ] for x in range ( <NUM_LIT:0> , l1 , <NUM_LIT:3> ) ] ) , <EOL> "<STR_LIT>" . join ( [ seq2 [ x ] for x in range ( <NUM_LIT:0> , l2 , <NUM_LIT:3> ) ] ) ) <EOL> elif method == "<STR_LIT>" : <EOL> return ( "<STR_LIT>" . join ( [ seq1 [ x ] for x in range ( <NUM_LIT:1> , l1 , <NUM_LIT:3> ) ] ) , <EOL> "<STR_LIT>" . join ( [ seq2 [ x ] for x in range ( <NUM_LIT:1> , l2 , <NUM_LIT:3> ) ] ) ) <EOL> elif method == "<STR_LIT>" : <EOL> return ( "<STR_LIT>" . join ( [ seq1 [ x ] for x in range ( <NUM_LIT:2> , l1 , <NUM_LIT:3> ) ] ) , <EOL> "<STR_LIT>" . join ( [ seq2 [ x ] for x in range ( <NUM_LIT:2> , l2 , <NUM_LIT:3> ) ] ) ) <EOL> elif method == "<STR_LIT>" : <EOL> s1 = [ ] <EOL> s2 = [ ] <EOL> for x in range ( <NUM_LIT:0> , l1 , <NUM_LIT:3> ) : <EOL> codon1 = seq1 [ x : x + <NUM_LIT:3> ] <EOL> codon2 = seq2 [ x : x + <NUM_LIT:3> ] <EOL> try : <EOL> aa1 , deg11 , deg12 , deg13 = Genomics . GetDegeneracy ( codon1 ) <EOL> aa2 , deg11 , deg22 , deg23 = Genomics . GetDegeneracy ( codon2 ) <EOL> except KeyError : <EOL> continue <EOL> if aa1 == aa2 and deg13 == <NUM_LIT:4> and deg23 == <NUM_LIT:4> : <EOL> s1 . append ( codon1 [ <NUM_LIT:2> ] ) <EOL> s2 . append ( codon2 [ <NUM_LIT:2> ] ) <EOL> return "<STR_LIT>" . join ( s1 ) , "<STR_LIT>" . join ( s2 ) <EOL> def CalculateDistanceJC69 ( info , do_gamma = False , alpha = None ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> p = float ( info . mNDifferent ) / info . mNAligned <EOL> if do_gamma : <EOL> distance = <NUM_LIT> * alpha * ( pow ( <NUM_LIT:1> - <NUM_LIT:4> * p / <NUM_LIT:3> , - <NUM_LIT:1> / alpha ) - <NUM_LIT:1> ) <EOL> variance = p * ( <NUM_LIT:1> - p ) / ( pow ( <NUM_LIT:1> - <NUM_LIT:4> * p / <NUM_LIT:3> , - <NUM_LIT:2> / ( alpha + <NUM_LIT:1> ) ) * L ) <EOL> else : <EOL> distance = - <NUM_LIT> * math . log ( <NUM_LIT:1.0> - <NUM_LIT> * p / <NUM_LIT> ) <EOL> variance = p * ( <NUM_LIT:1.0> - p ) / ( math . pow ( <NUM_LIT:1.0> - <NUM_LIT> * p / <NUM_LIT:3> , <NUM_LIT> ) * info . mNAligned ) <EOL> except : <EOL> raise ValueError <EOL> return distance , variance <EOL> def CalculateDistanceT92 ( info ) : <EOL> """<STR_LIT>""" <EOL> gc = info . getGCContent ( ) <EOL> if gc == <NUM_LIT:0> or gc == <NUM_LIT:1> : <EOL> raise ValueError <EOL> wg = <NUM_LIT> * gc * ( <NUM_LIT:1.0> - gc ) <EOL> P = float ( info . mNTransitions ) / info . mNAligned <EOL> Q = float ( info . mNTransversions ) / info . mNAligned <EOL> a1 = <NUM_LIT:1.0> - P / wg - Q <EOL> if a1 <= <NUM_LIT:0> : <EOL> raise ValueError <EOL> a2 = <NUM_LIT:1.0> - <NUM_LIT> * Q <EOL> if a2 <= <NUM_LIT:0> : <EOL> raise ValueError <EOL> distance = - wg * math . log ( a1 ) - <NUM_LIT:0.5> * ( <NUM_LIT:1.0> - wg ) * math . log ( a2 ) <EOL> c1 = <NUM_LIT:1> / a1 <EOL> c2 = <NUM_LIT:1> / a2 <EOL> c3 = wg * ( c1 - c2 ) + c2 <EOL> variance = ( <EOL> c1 * c1 * P + c3 * c3 * Q - math . pow ( c1 * P + c3 * Q , <NUM_LIT> ) ) / info . mNAligned <EOL> return distance , variance <EOL> def main ( argv = None ) : <EOL> """<STR_LIT>""" <EOL> if argv is None : <EOL> argv = sys . argv <EOL> parser = E . OptionParser ( <EOL> version = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . set_defaults ( <EOL> filename_map = None , <EOL> filters = "<STR_LIT>" , <EOL> gap_char = "<STR_LIT:->" , <EOL> fields = "<STR_LIT>" , <EOL> ) <EOL> ( options , args ) = E . Start ( parser , add_pipe_options = True ) <EOL> options . filters = options . filters . split ( "<STR_LIT:U+002C>" ) <EOL> options . fields = options . fields . split ( "<STR_LIT:U+002C>" ) <EOL> iterator = FastaIterator . FastaIterator ( options . stdin ) <EOL> headers = [ "<STR_LIT>" , "<STR_LIT>" ] <EOL> for f in options . filters : <EOL> headers += list ( map ( lambda x : "<STR_LIT>" % ( f , x ) , options . fields ) ) <EOL> options . stdout . write ( "<STR_LIT:\t>" . join ( headers ) + "<STR_LIT:\n>" ) <EOL> while <NUM_LIT:1> : <EOL> try : <EOL> cur_record = iterator . next ( ) <EOL> if cur_record is None : <EOL> break <EOL> first_record = cur_record <EOL> cur_record = iterator . next ( ) <EOL> if cur_record is None : <EOL> break <EOL> second_record = cur_record <EOL> except StopIteration : <EOL> break <EOL> if len ( first_record . sequence ) != len ( second_record . sequence ) : <EOL> raise "<STR_LIT>" % ( <EOL> first_record . title , second_record . title ) <EOL> if len ( first_record . sequence ) % <NUM_LIT:3> != <NUM_LIT:0> : <EOL> raise "<STR_LIT>" % first_record . title <EOL> alphabet = "<STR_LIT>" + options . gap_char <EOL> result = [ ] <EOL> for f in options . filters : <EOL> s1 , s2 = FilterAlignedPairForPositions ( first_record . sequence , <EOL> second_record . sequence , <EOL> f ) <EOL> info = Genomics . CalculatePairIndices ( s1 , s2 , options . gap_char ) <EOL> for field in options . fields : <EOL> if field == "<STR_LIT>" : <EOL> c = "<STR_LIT>" % info . mNAligned <EOL> elif field == "<STR_LIT>" : <EOL> c = "<STR_LIT>" % info . mNUnaligned1 <EOL> elif field == "<STR_LIT>" : <EOL> c = "<STR_LIT>" % info . mNUnaligned2 <EOL> elif field == "<STR_LIT>" : <EOL> c = "<STR_LIT>" % info . mNTransversions <EOL> elif field == "<STR_LIT>" : <EOL> c = "<STR_LIT>" % info . mNTransitions <EOL> elif field == "<STR_LIT>" : <EOL> c = "<STR_LIT>" % info . mNIdentical <EOL> elif field == "<STR_LIT>" : <EOL> try : <EOL> c = "<STR_LIT>" % CalculateDistanceJC69 ( info ) [ <NUM_LIT:0> ] <EOL> except ValueError : <EOL> c = "<STR_LIT>" <EOL> elif field == "<STR_LIT>" : <EOL> try : <EOL> c = "<STR_LIT>" % CalculateDistanceT92 ( info ) [ <NUM_LIT:0> ] <EOL> except ValueError : <EOL> c = "<STR_LIT>" <EOL> else : <EOL> raise "<STR_LIT>" % field <EOL> result . append ( c ) <EOL> options . stdout . write ( "<STR_LIT>" % ( first_record . title , <EOL> second_record . title , <EOL> "<STR_LIT:\t>" . join ( result ) ) ) <EOL> E . Stop ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( sys . argv ) ) </s>
<s> '''<STR_LIT>''' <EOL> import sys <EOL> import CGAT . Experiment as E <EOL> import CGAT . GTF as GTF <EOL> import CGAT . Genomics as Genomics <EOL> import CGAT . IndexedFasta as IndexedFasta <EOL> import CGAT . Intervals as Intervals <EOL> import CGAT . Masker as Masker <EOL> import bx . intervals . intersection <EOL> def main ( argv = None ) : <EOL> """<STR_LIT>""" <EOL> if argv is None : <EOL> argv = sys . argv <EOL> parser = E . OptionParser ( <EOL> version = "<STR_LIT>" , <EOL> usage = globals ( ) [ "<STR_LIT>" ] ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> type = "<STR_LIT:string>" , metavar = "<STR_LIT>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , dest = "<STR_LIT:max_length>" , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT>" , <EOL> choices = ( "<STR_LIT:none>" , "<STR_LIT:3>" , "<STR_LIT:5>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT>" , <EOL> choices = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT:none>" ) , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . set_defaults ( <EOL> is_gtf = False , <EOL> genome_file = None , <EOL> merge = False , <EOL> feature = None , <EOL> filename_masks = None , <EOL> remove_masked_regions = False , <EOL> min_length = <NUM_LIT:0> , <EOL> max_length = <NUM_LIT:0> , <EOL> extend_at = None , <EOL> extend_by = <NUM_LIT:100> , <EOL> extend_with = None , <EOL> masker = None , <EOL> fold_at = None , <EOL> naming_attribute = False <EOL> ) <EOL> ( options , args ) = E . Start ( parser ) <EOL> if options . genome_file : <EOL> fasta = IndexedFasta . IndexedFasta ( options . genome_file ) <EOL> contigs = fasta . getContigSizes ( ) <EOL> if options . is_gtf : <EOL> iterator = GTF . transcript_iterator ( GTF . iterator ( options . stdin ) ) <EOL> else : <EOL> gffs = GTF . iterator ( options . stdin ) <EOL> if options . merge : <EOL> iterator = GTF . joined_iterator ( gffs ) <EOL> else : <EOL> iterator = GTF . chunk_iterator ( gffs ) <EOL> masks = None <EOL> if options . filename_masks : <EOL> masks = { } <EOL> with open ( options . filename_masks , "<STR_LIT:r>" ) as infile : <EOL> e = GTF . readAsIntervals ( GTF . iterator ( infile ) ) <EOL> for contig in e . keys ( ) : <EOL> intersector = bx . intervals . intersection . Intersecter ( ) <EOL> for start , end in e [ contig ] : <EOL> intersector . add_interval ( bx . intervals . Interval ( start , end ) ) <EOL> masks [ contig ] = intersector <EOL> ninput , noutput , nmasked , nskipped_masked = <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> <EOL> nskipped_length = <NUM_LIT:0> <EOL> nskipped_noexons = <NUM_LIT:0> <EOL> feature = options . feature <EOL> for ichunk in iterator : <EOL> ninput += <NUM_LIT:1> <EOL> if feature : <EOL> chunk = filter ( lambda x : x . feature == feature , ichunk ) <EOL> else : <EOL> chunk = ichunk <EOL> if len ( chunk ) == <NUM_LIT:0> : <EOL> nskipped_noexons += <NUM_LIT:1> <EOL> E . info ( "<STR_LIT>" <EOL> "<STR_LIT>" % ( ichunk [ <NUM_LIT:0> ] . contig , <EOL> ichunk [ <NUM_LIT:0> ] . start , <EOL> ichunk [ <NUM_LIT:0> ] . end , <EOL> str ( ichunk [ <NUM_LIT:0> ] ) ) ) <EOL> continue <EOL> contig , strand = chunk [ <NUM_LIT:0> ] . contig , chunk [ <NUM_LIT:0> ] . strand <EOL> if options . is_gtf : <EOL> name = chunk [ <NUM_LIT:0> ] . transcript_id <EOL> else : <EOL> if options . naming_attribute : <EOL> attr_dict = { x . split ( "<STR_LIT:=>" ) [ <NUM_LIT:0> ] : x . split ( "<STR_LIT:=>" ) [ <NUM_LIT:1> ] <EOL> for x in chunk [ <NUM_LIT:0> ] . attributes . split ( "<STR_LIT:;>" ) } <EOL> name = attr_dict [ options . naming_attribute ] <EOL> else : <EOL> name = str ( chunk [ <NUM_LIT:0> ] . attributes ) <EOL> lcontig = contigs [ contig ] <EOL> positive = Genomics . IsPositiveStrand ( strand ) <EOL> intervals = [ ( x . start , x . end ) for x in chunk ] <EOL> intervals . sort ( ) <EOL> if masks : <EOL> if contig in masks : <EOL> masked_regions = [ ] <EOL> for start , end in intervals : <EOL> masked_regions += [ ( x . start , x . end ) <EOL> for x in masks [ contig ] . find ( start , end ) ] <EOL> masked_regions = Intervals . combine ( masked_regions ) <EOL> if len ( masked_regions ) : <EOL> nmasked += <NUM_LIT:1> <EOL> if options . remove_masked_regions : <EOL> intervals = Intervals . truncate ( intervals , masked_regions ) <EOL> else : <EOL> raise "<STR_LIT>" <EOL> if len ( intervals ) == <NUM_LIT:0> : <EOL> nskipped_masked += <NUM_LIT:1> <EOL> if options . loglevel >= <NUM_LIT:1> : <EOL> options . stdlog . write ( "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> ( name , <EOL> str ( [ ( x . start , <EOL> x . end ) for x in chunk ] ) , <EOL> masked_regions ) ) <EOL> continue <EOL> out = intervals <EOL> if options . extend_at and not options . extend_with : <EOL> if options . extend_at == "<STR_LIT>" : <EOL> intervals = [ ( max ( <NUM_LIT:0> , intervals [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] - options . extend_by ) , <EOL> intervals [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] ) ] <EOL> elif options . extend_at == "<STR_LIT>" : <EOL> intervals = [ ( intervals [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] , <EOL> min ( lcontig , <EOL> intervals [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] + options . extend_by ) ) ] <EOL> else : <EOL> if options . extend_at in ( "<STR_LIT:5>" , "<STR_LIT>" ) : <EOL> intervals [ <NUM_LIT:0> ] = ( max ( <NUM_LIT:0> , <EOL> intervals [ <NUM_LIT:0> ] [ <NUM_LIT:0> ] - options . extend_by ) , <EOL> intervals [ <NUM_LIT:0> ] [ <NUM_LIT:1> ] ) <EOL> if options . extend_at in ( "<STR_LIT:3>" , "<STR_LIT>" ) : <EOL> intervals [ - <NUM_LIT:1> ] = ( intervals [ - <NUM_LIT:1> ] [ <NUM_LIT:0> ] , <EOL> min ( lcontig , <EOL> intervals [ - <NUM_LIT:1> ] [ <NUM_LIT:1> ] + options . extend_by ) ) <EOL> if not positive : <EOL> intervals = [ ( lcontig - x [ <NUM_LIT:1> ] , lcontig - x [ <NUM_LIT:0> ] ) <EOL> for x in intervals [ : : - <NUM_LIT:1> ] ] <EOL> out . reverse ( ) <EOL> s = [ fasta . getSequence ( contig , strand , start , end ) <EOL> for start , end in intervals ] <EOL> s = Masker . maskSequences ( s , options . masker ) <EOL> l = sum ( [ len ( x ) for x in s ] ) <EOL> if ( l < options . min_length or <EOL> ( options . max_length and l > options . max_length ) ) : <EOL> nskipped_length += <NUM_LIT:1> <EOL> if options . loglevel >= <NUM_LIT:1> : <EOL> options . stdlog . write ( "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> ( name , str ( intervals ) , l ) ) <EOL> continue <EOL> if options . extend_at and options . extend_with : <EOL> extension = "<STR_LIT>" . join ( ( options . extend_with , ) * options . extend_by ) <EOL> if options . extend_at in ( "<STR_LIT:5>" , "<STR_LIT>" ) : <EOL> s [ <NUM_LIT:1> ] = extension + s [ <NUM_LIT:1> ] <EOL> if options . extend_at in ( "<STR_LIT:3>" , "<STR_LIT>" ) : <EOL> s [ - <NUM_LIT:1> ] = s [ - <NUM_LIT:1> ] + extension <EOL> if options . fold_at : <EOL> n = options . fold_at <EOL> s = "<STR_LIT>" . join ( s ) <EOL> seq = "<STR_LIT:\n>" . join ( [ s [ i : i + n ] for i in range ( <NUM_LIT:0> , len ( s ) , n ) ] ) <EOL> else : <EOL> seq = "<STR_LIT:\n>" . join ( s ) <EOL> options . stdout . write ( "<STR_LIT>" % ( name , <EOL> contig , <EOL> strand , <EOL> "<STR_LIT:;>" . join ( <EOL> [ "<STR_LIT>" % <EOL> x for x in out ] ) , <EOL> seq ) ) <EOL> noutput += <NUM_LIT:1> <EOL> E . info ( "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> ( ninput , noutput , nmasked , nskipped_noexons , <EOL> nskipped_masked , nskipped_length ) ) <EOL> E . Stop ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( sys . argv ) ) </s>
<s> '''<STR_LIT>''' <EOL> import CGAT . IndexedFasta as IndexedFasta <EOL> import CGAT . Experiment as E <EOL> import sys <EOL> def main ( argv = None ) : <EOL> if argv is None : <EOL> argv = sys . argv <EOL> parser = E . OptionParser ( version = "<STR_LIT>" , <EOL> usage = globals ( ) [ "<STR_LIT>" ] ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> input_format_choices = ( "<STR_LIT>" , "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> type = "<STR_LIT>" , <EOL> choices = input_format_choices , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> "<STR_LIT:U+002CU+0020>" . join ( input_format_choices ) ) <EOL> parser . add_option ( <EOL> "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> group = E . OptionGroup ( parser , "<STR_LIT>" ) <EOL> group . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> group . add_option ( "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> group . add_option ( "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option_group ( group ) <EOL> group = E . OptionGroup ( parser , "<STR_LIT>" ) <EOL> group . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> group . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option_group ( group ) <EOL> file_format_choices = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT>" , <EOL> choices = file_format_choices , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> "<STR_LIT:U+002CU+0020>" . join ( file_format_choices ) ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> translator_choices = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT>" , <EOL> choices = translator_choices , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" % <EOL> "<STR_LIT:U+002CU+0020>" . join ( translator_choices ) ) <EOL> group = E . OptionGroup ( parser , '<STR_LIT>' ) <EOL> compression_choices = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) <EOL> group . add_option ( "<STR_LIT:-c>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT>" , <EOL> choices = compression_choices , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" % "<STR_LIT:U+002CU+0020>" . join ( compression_choices ) ) <EOL> group . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> group . add_option ( <EOL> "<STR_LIT>" , dest = "<STR_LIT>" , <EOL> action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> parser . add_option_group ( group ) <EOL> parser . set_defaults ( <EOL> extract = None , <EOL> input_format = "<STR_LIT>" , <EOL> benchmark_fragment_size = <NUM_LIT:1000> , <EOL> benchmark_num_iterations = <NUM_LIT> , <EOL> benchmark = False , <EOL> compression = None , <EOL> random_access_points = <NUM_LIT:0> , <EOL> synonyms = None , <EOL> verify = None , <EOL> verify_num_iterations = <NUM_LIT> , <EOL> verify_fragment_size = <NUM_LIT:100> , <EOL> clean_sequence = False , <EOL> allow_duplicates = False , <EOL> regex_identifier = None , <EOL> compress_index = False , <EOL> file_format = "<STR_LIT>" , <EOL> force = False , <EOL> translator = None ) <EOL> ( options , args ) = E . Start ( parser ) <EOL> if options . synonyms : <EOL> synonyms = { } <EOL> for x in options . synonyms . split ( "<STR_LIT:U+002C>" ) : <EOL> a , b = x . split ( "<STR_LIT:=>" ) <EOL> a = a . strip ( ) <EOL> b = b . strip ( ) <EOL> if a not in synonyms : <EOL> synonyms [ a ] = [ ] <EOL> synonyms [ a ] . append ( b ) <EOL> else : <EOL> synonyms = None <EOL> if options . translator : <EOL> if options . translator == "<STR_LIT>" : <EOL> options . translator = IndexedFasta . TranslatorPhred ( ) <EOL> elif options . translator == "<STR_LIT>" : <EOL> options . translator = IndexedFasta . TranslatorSolexa ( ) <EOL> elif options . translator == "<STR_LIT>" : <EOL> options . translator = IndexedFasta . TranslatorBytes ( ) <EOL> elif options . translator == "<STR_LIT>" : <EOL> options . translator = IndexedFasta . TranslatorRange200 ( ) <EOL> else : <EOL> raise ValueError ( "<STR_LIT>" % options . translator ) <EOL> if options . extract : <EOL> fasta = IndexedFasta . IndexedFasta ( args [ <NUM_LIT:0> ] ) <EOL> fasta . setTranslator ( options . translator ) <EOL> converter = IndexedFasta . getConverter ( options . input_format ) <EOL> contig , strand , start , end = IndexedFasta . parseCoordinates ( <EOL> options . extract ) <EOL> sequence = fasta . getSequence ( contig , strand , <EOL> start , end , <EOL> converter = converter ) <EOL> options . stdout . write ( "<STR_LIT>" % <EOL> ( options . extract , sequence ) ) <EOL> elif options . benchmark : <EOL> import timeit <EOL> timer = timeit . Timer ( <EOL> stmt = "<STR_LIT>" % <EOL> ( options . benchmark_fragment_size ) , <EOL> setup = "<STR_LIT>" <EOL> "<STR_LIT>" % ( args [ <NUM_LIT:0> ] ) ) <EOL> t = timer . timeit ( number = options . benchmark_num_iterations ) <EOL> options . stdout . write ( "<STR_LIT>" ) <EOL> options . stdout . write ( "<STR_LIT>" % ( <EOL> options . benchmark_num_iterations , <EOL> options . benchmark_fragment_size , t ) ) <EOL> elif options . verify : <EOL> fasta1 = IndexedFasta . IndexedFasta ( args [ <NUM_LIT:0> ] ) <EOL> fasta2 = IndexedFasta . IndexedFasta ( options . verify ) <EOL> nerrors1 = IndexedFasta . verify ( fasta1 , fasta2 , <EOL> options . verify_num_iterations , <EOL> options . verify_fragment_size , <EOL> stdout = options . stdout ) <EOL> options . stdout . write ( "<STR_LIT>" % ( nerrors1 ) ) <EOL> nerrors2 = IndexedFasta . verify ( fasta2 , fasta1 , <EOL> options . verify_num_iterations , <EOL> options . verify_fragment_size , <EOL> stdout = options . stdout ) <EOL> options . stdout . write ( "<STR_LIT>" % ( nerrors2 ) ) <EOL> elif options . compress_index : <EOL> fasta = IndexedFasta . IndexedFasta ( args [ <NUM_LIT:0> ] ) <EOL> fasta . compressIndex ( ) <EOL> else : <EOL> if options . loglevel >= <NUM_LIT:1> : <EOL> options . stdlog . write ( "<STR_LIT>" % args [ <NUM_LIT:0> ] ) <EOL> options . stdlog . write ( "<STR_LIT>" % <EOL> ( "<STR_LIT>" . join ( args [ <NUM_LIT:1> : ] ) ) ) <EOL> options . stdlog . flush ( ) <EOL> if synonyms : <EOL> options . stdlog . write ( "<STR_LIT>" ) <EOL> for k , v in synonyms . items ( ) : <EOL> options . stdlog . write ( "<STR_LIT>" % ( k , "<STR_LIT:U+002C>" . join ( v ) ) ) <EOL> options . stdlog . flush ( ) <EOL> if len ( args ) < <NUM_LIT:2> : <EOL> print globals ( ) [ "<STR_LIT>" ] <EOL> sys . exit ( <NUM_LIT:1> ) <EOL> iterator = IndexedFasta . MultipleFastaIterator ( <EOL> args [ <NUM_LIT:1> : ] , <EOL> regex_identifier = options . regex_identifier , <EOL> format = options . file_format ) <EOL> IndexedFasta . createDatabase ( <EOL> args [ <NUM_LIT:0> ] , <EOL> iterator , <EOL> synonyms = synonyms , <EOL> random_access_points = options . random_access_points , <EOL> compression = options . compression , <EOL> clean_sequence = options . clean_sequence , <EOL> allow_duplicates = options . allow_duplicates , <EOL> translator = options . translator , <EOL> force = options . force ) <EOL> E . Stop ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( ) ) </s>
<s> '''<STR_LIT>''' <EOL> import sys <EOL> import CGAT . Intervals as Intervals <EOL> import CGAT . Experiment as E <EOL> import CGAT . Histogram as Histogram <EOL> import CGAT . Blat as Blat <EOL> import CGAT . GTF as GTF <EOL> import CGAT . IndexedFasta as IndexedFasta <EOL> def printHistogram ( values , section , options , min_value = <NUM_LIT:0> , increment = <NUM_LIT:1.0> ) : <EOL> if len ( values ) == <NUM_LIT:0> : <EOL> if options . loglevel >= <NUM_LIT:1> : <EOL> options . stdlog . write ( <EOL> "<STR_LIT>" % ( section ) ) <EOL> return <EOL> outfile = open ( options . output_filename_pattern % section , "<STR_LIT:w>" ) <EOL> h = Histogram . Calculate ( <EOL> values , no_empty_bins = True , min_value = <NUM_LIT:0> , increment = <NUM_LIT:1.0> ) <EOL> outfile . write ( "<STR_LIT>" % section ) <EOL> for bin , val in h : <EOL> outfile . write ( "<STR_LIT>" % ( bin , val ) ) <EOL> outfile . close ( ) <EOL> def printMatched ( query_ids , section , options ) : <EOL> outfile = open ( options . output_filename_pattern % section , "<STR_LIT:w>" ) <EOL> for query_id in query_ids : <EOL> outfile . write ( "<STR_LIT>" % ( query_id ) ) <EOL> outfile . close ( ) <EOL> def detectPolyA ( query_id , matches , options , queries_fasta = None ) : <EOL> """<STR_LIT>""" <EOL> max_total = matches [ <NUM_LIT:0> ] . mQueryLength <EOL> best_start , best_end , best_pA , best_pT , best_tail = <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , "<STR_LIT>" <EOL> tail_matches = [ ] <EOL> new_matches = [ ] <EOL> for match in matches : <EOL> missing_start = match . mQueryFrom <EOL> missing_end = match . mQueryLength - match . mQueryTo <EOL> if missing_start < missing_end : <EOL> smaller = missing_start <EOL> larger = missing_end <EOL> start , end = match . mQueryTo , match . mQueryLength <EOL> else : <EOL> smaller = missing_end <EOL> larger = missing_start <EOL> start , end = <NUM_LIT:0> , match . mQueryFrom <EOL> if not ( smaller < options . polyA_max_unaligned and larger > options . polyA_min_unaligned ) : <EOL> new_matches . append ( match ) <EOL> continue <EOL> tail = queries_fasta . getSequence ( query_id ) [ start : end ] <EOL> counts = { "<STR_LIT:A>" : <NUM_LIT:0> , "<STR_LIT:T>" : <NUM_LIT:0> , "<STR_LIT:N>" : <NUM_LIT:0> } <EOL> for c in tail . upper ( ) : <EOL> counts [ c ] = counts . get ( c , <NUM_LIT:0> ) + <NUM_LIT:1> <EOL> total = end - start <EOL> pA = <NUM_LIT> * ( counts [ "<STR_LIT:A>" ] + counts [ "<STR_LIT:N>" ] ) / total <EOL> pT = <NUM_LIT> * ( counts [ "<STR_LIT:T>" ] + counts [ "<STR_LIT:N>" ] ) / total <EOL> if options . loglevel >= <NUM_LIT:5> : <EOL> options . stdlog . write ( <EOL> "<STR_LIT>" % ( query_id , start , end , pA , pT , tail ) ) <EOL> if max ( pA , pT ) < options . polyA_min_percent : <EOL> new_matches . append ( match ) <EOL> continue <EOL> if total < max_total : <EOL> max_total = total <EOL> best_start , best_end , best_pA , best_pT , best_tail = start , end , pA , pT , tail <EOL> if not ( best_start == start or best_end == end ) : <EOL> if options . loglevel >= <NUM_LIT:1> : <EOL> options . stdlog . write ( "<STR_LIT>" % <EOL> ( query_id , best_start , best_end , start , end ) ) <EOL> return matches <EOL> tail_matches . append ( match ) <EOL> if tail_matches : <EOL> for match in tail_matches : <EOL> match . mQueryCoverage += <NUM_LIT> * float ( len ( best_tail ) ) / match . mQueryLength <EOL> assert match . mQueryCoverage <= <NUM_LIT> , "<STR_LIT>" % ( <EOL> query_id , match . mQueryCoverage , float ( len ( best_tail ) ) / match . mQueryLength , str ( match ) ) <EOL> new_matches . append ( match ) <EOL> options . outfile_polyA . write ( "<STR_LIT>" % <EOL> ( query_id , <EOL> len ( tail_matches ) , <EOL> best_start , best_end , <EOL> best_pA , best_pT , best_tail ) ) <EOL> assert len ( new_matches ) == len ( matches ) <EOL> return new_matches <EOL> def selectMatches ( query_id , matches , options , queries_fasta = None ) : <EOL> """<STR_LIT>""" <EOL> if options . loglevel >= <NUM_LIT:2> : <EOL> options . stdlog . write ( <EOL> "<STR_LIT>" % query_id ) <EOL> if options . loglevel >= <NUM_LIT:3> : <EOL> for match in matches : <EOL> options . stdlog . write ( "<STR_LIT>" % str ( match ) ) <EOL> new_matches = [ ] <EOL> if options . polyA : <EOL> matches = detectPolyA ( query_id , matches , options , queries_fasta ) <EOL> if options . matching_mode == "<STR_LIT:all>" : <EOL> return matches , None <EOL> elif options . matching_mode in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , ) : <EOL> if options . matching_mode == "<STR_LIT>" : <EOL> f = lambda match : min ( match . mQueryCoverage , match . mSbjctCoverage ) <EOL> elif options . matching_mode == "<STR_LIT>" : <EOL> f = lambda match : match . mQueryCoverage <EOL> elif options . matching_mode == "<STR_LIT>" : <EOL> f = lambda match : match . mSbjctCoverage <EOL> elif options . matching_mode == "<STR_LIT>" : <EOL> f = lambda match : match . mPid <EOL> elif options . matching_mode == "<STR_LIT>" : <EOL> f = lambda match : min ( <EOL> match . mQueryCoverage , match . mSbjctCoverage ) * match . mPid <EOL> elif options . matching_mode == "<STR_LIT>" : <EOL> f = lambda match : match . mQueryCoverage * match . mPid <EOL> elif options . matching_mode == "<STR_LIT>" : <EOL> f = lambda match : match . mSbjctCoverage * match . mPid <EOL> elif options . matching_mode == "<STR_LIT>" : <EOL> f = lambda match : min ( <EOL> ( match . mQueryCoverage , match . mSbjctCoverage , match . mPid ) ) <EOL> elif options . matching_mode == "<STR_LIT>" : <EOL> f = lambda match : min ( match . mQueryCoverage , match . mPid ) <EOL> elif options . matching_mode == "<STR_LIT>" : <EOL> f = lambda match : min ( match . mSbjctCoverage , match . mPid ) <EOL> for match in matches : <EOL> match . mMatchScore = f ( match ) <EOL> matches . sort ( lambda x , y : cmp ( x . mMatchScore , y . mMatchScore ) ) <EOL> matches . reverse ( ) <EOL> best_score = min ( matches [ <NUM_LIT:0> ] . mMatchScore * options . collection_threshold , <EOL> matches [ <NUM_LIT:0> ] . mMatchScore - options . collection_distance ) <EOL> for match in matches : <EOL> if match . mMatchScore < best_score : <EOL> break <EOL> new_matches . append ( match ) <EOL> if not options . keep_all_best : <EOL> if len ( new_matches ) > <NUM_LIT:1> : <EOL> if len ( new_matches ) == <NUM_LIT:2> : <EOL> if new_matches [ <NUM_LIT:0> ] . mSbjctId == "<STR_LIT>" % new_matches [ <NUM_LIT:1> ] . mSbjctId : <EOL> return new_matches [ <NUM_LIT:1> : ] , None <EOL> elif new_matches [ <NUM_LIT:1> ] . mSbjctId == "<STR_LIT>" % new_matches [ <NUM_LIT:0> ] . mSbjctId : <EOL> return new_matches [ : <NUM_LIT:1> ] , None <EOL> else : <EOL> new_matches = [ x for x in new_matches if not ( <EOL> x . mSbjctId . endswith ( "<STR_LIT>" ) or x . mSbjctId . endswith ( "<STR_LIT>" ) ) ] <EOL> if len ( new_matches ) == <NUM_LIT:1> : <EOL> return new_matches , None <EOL> if options . ignore_all_random : <EOL> new_matches = [ x for x in new_matches if not ( x . mSbjctId . endswith ( <EOL> "<STR_LIT>" ) or x . mSbjctId . endswith ( "<STR_LIT>" ) or x . mSbjctId . endswith ( "<STR_LIT>" ) ) ] <EOL> if len ( new_matches ) == <NUM_LIT:1> : <EOL> return new_matches , None <EOL> return [ ] , "<STR_LIT>" % ( "<STR_LIT:U+0020>" . join ( map ( lambda x : str ( x . mMatchScore ) , matches ) ) ) <EOL> elif options . matching_mode == "<STR_LIT>" : <EOL> if len ( matches ) == <NUM_LIT:1> : <EOL> new_matches . append ( matches [ <NUM_LIT:0> ] ) <EOL> else : <EOL> return [ ] , "<STR_LIT>" % ( "<STR_LIT:U+0020>" . join ( map ( lambda x : str ( x . mMatchScore ) , matches ) ) ) <EOL> matches = new_matches <EOL> if options . best_per_sbjct : <EOL> new_matches = [ ] <EOL> sbjcts = set ( ) <EOL> for match in matches : <EOL> if match . mSbjctId in sbjcts : <EOL> continue <EOL> new_matches . append ( match ) <EOL> sbjcts . add ( match . mSbjctId ) <EOL> matches = new_matches <EOL> return matches , None <EOL> def main ( argv = None ) : <EOL> """<STR_LIT>""" <EOL> if argv is None : <EOL> argv = sys . argv <EOL> parser = E . OptionParser ( <EOL> version = "<STR_LIT>" , usage = globals ( ) [ "<STR_LIT>" ] ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT>" , <EOL> choices = ( "<STR_LIT>" , "<STR_LIT>" ) , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:float>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:float>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:float>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:float>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT:test>" , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT>" , <EOL> choices = ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT:all>" ) , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:float>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:float>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . set_defaults ( input_filename_domains = None , <EOL> input_filename_queries = None , <EOL> threshold_good_query_coverage = <NUM_LIT> , <EOL> threshold_min_pid = <NUM_LIT> , <EOL> threshold_min_matches = <NUM_LIT:0> , <EOL> threshold_max_error_rate = None , <EOL> output_filename_pattern = "<STR_LIT:%s>" , <EOL> keep_unique_matches = False , <EOL> output_format = "<STR_LIT>" , <EOL> print_matched = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] , <EOL> from_zipped = False , <EOL> combine_overlaps = True , <EOL> min_length_domain = <NUM_LIT:30> , <EOL> threshold_min_query_coverage = <NUM_LIT:50> , <EOL> min_length_singletons = <NUM_LIT:30> , <EOL> new_family_id = <NUM_LIT> , <EOL> add_singletons = False , <EOL> matching_mode = "<STR_LIT>" , <EOL> best_per_sbjct = False , <EOL> threshold_max_query_gapchars = None , <EOL> threshold_max_query_gaps = None , <EOL> threshold_max_sbjct_gapchars = None , <EOL> threshold_max_sbjct_gaps = None , <EOL> filename_filter_sbjct = None , <EOL> keep_forbidden = False , <EOL> keep_all_best = False , <EOL> test = None , <EOL> query_forward_coordinates = False , <EOL> output_filename_empty = None , <EOL> collection_threshold = <NUM_LIT:1.0> , <EOL> collection_distance = <NUM_LIT:0> , <EOL> polyA = False , <EOL> polyA_max_unaligned = <NUM_LIT:3> , <EOL> polyA_min_unaligned = <NUM_LIT:10> , <EOL> polyA_min_percent = <NUM_LIT> , <EOL> ignore_all_random = False , <EOL> ) <EOL> ( options , args ) = E . Start ( parser , add_pipe_options = True ) <EOL> if len ( args ) == <NUM_LIT:1> : <EOL> if options . from_zipped or args [ <NUM_LIT:0> ] [ - <NUM_LIT:3> : ] == "<STR_LIT>" : <EOL> import gzip <EOL> infile = gzip . open ( args [ <NUM_LIT:0> ] , "<STR_LIT:r>" ) <EOL> else : <EOL> infile = open ( args [ <NUM_LIT:0> ] , "<STR_LIT:r>" ) <EOL> else : <EOL> infile = sys . stdin <EOL> if options . input_filename_queries : <EOL> queries_fasta = IndexedFasta . IndexedFasta ( <EOL> options . input_filename_queries ) <EOL> else : <EOL> queries_fasta = None <EOL> if options . filename_filter_sbjct : <EOL> try : <EOL> import bx . intervals . intersection <EOL> except ImportError : <EOL> raise "<STR_LIT>" <EOL> intervals = GTF . readGFFFromFileAsIntervals ( <EOL> open ( options . filename_filter_sbjct , "<STR_LIT:r>" ) ) <EOL> intersectors = { } <EOL> for contig , values in intervals . items ( ) : <EOL> intersector = bx . intervals . intersection . Intersecter ( ) <EOL> for start , end in values : <EOL> intersector . add_interval ( bx . intervals . Interval ( start , end ) ) <EOL> intersectors [ contig ] = intersector <EOL> if options . loglevel >= <NUM_LIT:1> : <EOL> options . stdlog . write ( "<STR_LIT>" % <EOL> ( sum ( [ len ( x ) for x in intervals . values ( ) ] ) , <EOL> len ( intersectors ) ) ) <EOL> else : <EOL> intersectors = None <EOL> ninput , noutput , nskipped = <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> <EOL> nfull_matches , npartial_matches , ngood_matches = <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> <EOL> nfully_matched , npartially_matched , nwell_matched = <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> <EOL> nremoved_pid , nremoved_query_coverage , nempty = <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> <EOL> nremoved_gaps , nremoved_nmatches = <NUM_LIT:0> , <NUM_LIT:0> <EOL> nremoved_regions = <NUM_LIT:0> <EOL> nqueries_removed_region = <NUM_LIT:0> <EOL> aggregate_coverages = [ ] <EOL> mapped_coverages = [ ] <EOL> fully_matched = [ ] <EOL> well_matched = [ ] <EOL> partially_matched = [ ] <EOL> new_family_id = options . new_family_id <EOL> if options . output_filename_empty : <EOL> outfile_empty = open ( options . output_filename_empty , "<STR_LIT:w>" ) <EOL> outfile_empty . write ( "<STR_LIT>" ) <EOL> else : <EOL> outfile_empty = None <EOL> if options . polyA : <EOL> options . outfile_polyA = open ( <EOL> options . output_filename_pattern % "<STR_LIT>" , "<STR_LIT:w>" ) <EOL> options . outfile_polyA . write ( "<STR_LIT>" ) <EOL> def processChunk ( query_id , matches ) : <EOL> """<STR_LIT>""" <EOL> global ninput , noutput , nskipped <EOL> global nfull_matches , npartial_matches , ngood_matches <EOL> global nremoved_pid , nremoved_query_coverage , nempty , nremoved_gaps , nremoved_nmatches <EOL> global nremoved_regions , nqueries_removed_region <EOL> global outfile_empty <EOL> ninput += <NUM_LIT:1> <EOL> full_matches = [ ] <EOL> good_matches = [ ] <EOL> partial_matches = [ ] <EOL> x_nremoved_pid , x_nquery_coverage , x_nremoved_gaps , x_nremoved_nmatches = <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> <EOL> nmatches = len ( matches ) <EOL> new_matches = [ ] <EOL> for match in matches : <EOL> if match . mPid < options . threshold_min_pid : <EOL> nremoved_pid += <NUM_LIT:1> <EOL> continue <EOL> if match . mNMatches < options . threshold_min_matches : <EOL> nremoved_nmatches += <NUM_LIT:1> <EOL> continue <EOL> if options . threshold_max_error_rate : <EOL> r = <NUM_LIT> * math . power ( <EOL> options . threshold_max_error_rate , match . mNMatches + match . mNMismatches ) <EOL> if match . mPid < r : <EOL> nremoved_pid += <NUM_LIT:1> <EOL> x_nremoved_pid += <NUM_LIT:1> <EOL> continue <EOL> new_matches . append ( match ) <EOL> matches = new_matches <EOL> if len ( matches ) == <NUM_LIT:0> : <EOL> if outfile_empty : <EOL> outfile_empty . write ( "<STR_LIT>" % <EOL> ( query_id , nmatches , x_nremoved_pid , x_nquery_coverage , x_nremoved_gaps , x_nremoved_nmatches ) ) <EOL> nskipped += <NUM_LIT:1> <EOL> return <EOL> if options . keep_unique_matches and len ( matches ) == <NUM_LIT:1> : <EOL> pass <EOL> else : <EOL> new_matches = [ ] <EOL> for match in matches : <EOL> if match . mQueryCoverage < options . threshold_min_query_coverage : <EOL> nremoved_query_coverage += <NUM_LIT:1> <EOL> x_nquery_coverage += <NUM_LIT:1> <EOL> continue <EOL> if options . threshold_max_query_gaps and options . threshold_max_query_gaps > match . mQueryNGapsCounts : <EOL> nremoved_gaps += <NUM_LIT:1> <EOL> x_nremoved_gaps += <NUM_LIT:1> <EOL> continue <EOL> if options . threshold_max_query_gapchars and options . threshold_max_query_gapchars > match . mQueryNGapsBases : <EOL> nremoved_gaps += <NUM_LIT:1> <EOL> x_nremoved_gaps += <NUM_LIT:1> <EOL> continue <EOL> if options . threshold_max_sbjct_gaps and options . threshold_max_sbjct_gaps > match . mSbjctNGapsCounts : <EOL> nremoved_gaps += <NUM_LIT:1> <EOL> x_nremoved_gaps += <NUM_LIT:1> <EOL> continue <EOL> if options . threshold_max_sbjct_gapchars and options . threshold_max_sbjct_gapchars > match . mSbjctNGapsBases : <EOL> nremoved_gaps += <NUM_LIT:1> <EOL> x_nremoved_gaps += <NUM_LIT:1> <EOL> continue <EOL> new_matches . append ( match ) <EOL> matches = new_matches <EOL> if len ( matches ) == <NUM_LIT:0> : <EOL> if outfile_empty : <EOL> outfile_empty . write ( "<STR_LIT>" % <EOL> ( query_id , nmatches , x_nremoved_pid , x_nquery_coverage , x_nremoved_gaps , x_nremoved_nmatches ) ) <EOL> nskipped += <NUM_LIT:1> <EOL> return <EOL> keep = True <EOL> for match in matches : <EOL> if intersectors and match . mSbjctId in intersectors : <EOL> found = intersectors [ match . mSbjctId ] . find ( <EOL> match . mSbjctFrom , match . mSbjctTo ) <EOL> if found and not options . keep_forbidden or ( found and not options . keep_forbidden ) : <EOL> nremoved_regions += <NUM_LIT:1> <EOL> keep = False <EOL> continue <EOL> if not keep : <EOL> nqueries_removed_region += <NUM_LIT:1> <EOL> if outfile_empty : <EOL> outfile_empty . write ( <EOL> "<STR_LIT>" % query_id ) <EOL> return <EOL> for match in matches : <EOL> if match . mQueryCoverage >= <NUM_LIT> : <EOL> full_matches . append ( match ) <EOL> if match . mQueryCoverage > options . threshold_good_query_coverage : <EOL> good_matches . append ( match ) <EOL> else : <EOL> partial_matches . append ( match ) <EOL> if full_matches : <EOL> nfull_matches += <NUM_LIT:1> <EOL> elif good_matches : <EOL> ngood_matches += <NUM_LIT:1> <EOL> elif partial_matches : <EOL> npartial_matches += <NUM_LIT:1> <EOL> intervals = [ ] <EOL> for match in full_matches + good_matches + partial_matches : <EOL> intervals . append ( ( match . mQueryFrom , match . mQueryTo ) ) <EOL> rest = Intervals . complement ( intervals , <NUM_LIT:0> , match . mQueryLength ) <EOL> query_coverage = <NUM_LIT> * ( match . mQueryLength - <EOL> sum ( map ( lambda x : x [ <NUM_LIT:1> ] - x [ <NUM_LIT:0> ] , rest ) ) ) / match . mQueryLength <EOL> if query_coverage >= <NUM_LIT> : <EOL> fully_matched . append ( query_id ) <EOL> elif query_coverage > options . threshold_good_query_coverage : <EOL> well_matched . append ( query_id ) <EOL> else : <EOL> partially_matched . append ( query_id ) <EOL> aggregate_coverages . append ( query_coverage ) <EOL> matches , msg = selectMatches ( query_id , matches , options , queries_fasta ) <EOL> if len ( matches ) > <NUM_LIT:0> : <EOL> for match in matches : <EOL> if options . query_forward_coordinates : <EOL> match . convertCoordinates ( ) <EOL> if options . output_format == "<STR_LIT>" : <EOL> options . stdout . write ( "<STR_LIT>" % <EOL> "<STR_LIT:\t>" . join ( map ( str , ( <EOL> match . mQueryId , match . mSbjctId , <EOL> match . strand , <EOL> "<STR_LIT>" % match . mQueryCoverage , <EOL> "<STR_LIT>" % match . mSbjctCoverage , <EOL> "<STR_LIT>" % match . mPid , <EOL> match . mQueryLength , <EOL> match . mSbjctLength , <EOL> match . mQueryFrom , match . mQueryTo , <EOL> match . mSbjctFrom , match . mSbjctTo , <EOL> "<STR_LIT:U+002C>" . join ( <EOL> map ( str , match . mBlockSizes ) ) , <EOL> "<STR_LIT:U+002C>" . join ( <EOL> map ( str , match . mQueryBlockStarts ) ) , <EOL> "<STR_LIT:U+002C>" . join ( <EOL> map ( str , match . mSbjctBlockStarts ) ) , <EOL> ) ) ) ) <EOL> elif options . output_format == "<STR_LIT>" : <EOL> options . stdout . write ( str ( match ) + "<STR_LIT:\n>" ) <EOL> noutput += <NUM_LIT:1> <EOL> else : <EOL> if outfile_empty : <EOL> outfile_empty . write ( <EOL> "<STR_LIT>" % ( query_id , msg ) ) <EOL> nempty += <NUM_LIT:1> <EOL> if options . output_format == "<STR_LIT>" : <EOL> options . stdout . write ( "<STR_LIT:\t>" . join ( ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) ) + "<STR_LIT:\n>" ) <EOL> elif options . output_format == "<STR_LIT>" : <EOL> options . stdout . write ( Blat . Match ( ) . getHeader ( ) + "<STR_LIT:\n>" ) <EOL> nfully_covered = None <EOL> matches = [ ] <EOL> last_query_id = None <EOL> is_complete = True <EOL> ninput_lines = <NUM_LIT:0> <EOL> skip = <NUM_LIT:0> <EOL> iterator = Blat . BlatIterator ( infile ) <EOL> while <NUM_LIT:1> : <EOL> try : <EOL> match = iterator . next ( ) <EOL> except Blat . ParsingError : <EOL> iterator = Blat . BlatIterator ( infile ) <EOL> continue <EOL> if match is None : <EOL> break <EOL> ninput_lines += <NUM_LIT:1> <EOL> if options . test and ninput_lines > options . test : <EOL> break <EOL> if match . mQueryId != last_query_id : <EOL> if last_query_id : <EOL> processChunk ( last_query_id , matches ) <EOL> matches = [ ] <EOL> last_query_id = match . mQueryId <EOL> matches . append ( match ) <EOL> processChunk ( last_query_id , matches ) <EOL> printHistogram ( aggregate_coverages , "<STR_LIT>" , options ) <EOL> printHistogram ( mapped_coverages , "<STR_LIT>" , options ) <EOL> if "<STR_LIT>" in options . print_matched : <EOL> printMatched ( fully_matched , "<STR_LIT>" , options ) <EOL> if "<STR_LIT>" in options . print_matched : <EOL> printMatched ( well_matched , "<STR_LIT>" , options ) <EOL> if "<STR_LIT>" in options . print_matched : <EOL> printMatched ( partially_matched , "<STR_LIT>" , options ) <EOL> if options . loglevel >= <NUM_LIT:1> : <EOL> options . stdlog . write ( <EOL> "<STR_LIT>" % ( ninput_lines , str ( is_complete ) ) ) <EOL> options . stdlog . write ( <EOL> "<STR_LIT>" % ( ninput , noutput ) ) <EOL> options . stdlog . write ( "<STR_LIT>" % ( <EOL> nfull_matches , ngood_matches , npartial_matches ) ) <EOL> options . stdlog . write ( "<STR_LIT>" % ( <EOL> len ( fully_matched ) , len ( well_matched ) , len ( partially_matched ) ) ) <EOL> options . stdlog . write ( "<STR_LIT>" % <EOL> ( nskipped + nqueries_removed_region + nempty , <EOL> nskipped , nqueries_removed_region , nempty ) ) <EOL> options . stdlog . write ( "<STR_LIT>" % ( <EOL> nremoved_pid , nremoved_query_coverage , nremoved_gaps , nremoved_regions , nremoved_nmatches ) ) <EOL> E . Stop ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( sys . argv ) ) </s>
<s> '''<STR_LIT>''' <EOL> import sys <EOL> import re <EOL> import os <EOL> import CGAT . FastaIterator as FastaIterator <EOL> import CGAT . IOTools as IOTools <EOL> import CGAT . Experiment as E <EOL> class Files : <EOL> mFiles = { } <EOL> def __init__ ( self , <EOL> output_pattern = None , <EOL> skip_identifiers = False ) : <EOL> self . mOutputPattern = output_pattern <EOL> self . mSkipIdentifiers = skip_identifiers <EOL> self . mCounts = { } <EOL> def __del__ ( self ) : <EOL> """<STR_LIT>""" <EOL> for file in self . mFiles . values ( ) : <EOL> file . close ( ) <EOL> def GetFile ( self , identifier ) : <EOL> return identifier <EOL> def GetFilename ( self , identifier ) : <EOL> """<STR_LIT>""" <EOL> if self . mOutputPattern : <EOL> return re . sub ( "<STR_LIT:%s>" , str ( identifier ) , self . mOutputPattern ) <EOL> else : <EOL> return identifier <EOL> def OpenFile ( self , filename , mode = "<STR_LIT:w>" ) : <EOL> """<STR_LIT>""" <EOL> if mode in ( "<STR_LIT:w>" , "<STR_LIT:a>" ) : <EOL> dirname = os . path . dirname ( filename ) <EOL> if dirname and not os . path . exists ( dirname ) : <EOL> os . makedirs ( dirname ) <EOL> return open ( filename , mode ) <EOL> def Write ( self , identifier , sequence ) : <EOL> filename = self . GetFilename ( identifier ) <EOL> if filename not in self . mFiles : <EOL> if len ( self . mFiles ) > <NUM_LIT:1000> : <EOL> for f in self . mFiles . values ( ) : <EOL> f . close ( ) <EOL> self . mFiles = { } <EOL> self . mFiles [ filename ] = self . OpenFile ( filename , "<STR_LIT:a>" ) <EOL> if self . mSkipIdentifiers : <EOL> self . mFiles [ filename ] . write ( "<STR_LIT>" % ( sequence . sequence ) ) <EOL> else : <EOL> self . mFiles [ filename ] . write ( <EOL> "<STR_LIT>" % ( sequence . title , sequence . sequence ) ) <EOL> if filename not in self . mCounts : <EOL> self . mCounts [ filename ] = <NUM_LIT:0> <EOL> self . mCounts [ filename ] += <NUM_LIT:1> <EOL> def DeleteFiles ( self , min_size = <NUM_LIT:0> ) : <EOL> """<STR_LIT>""" <EOL> ndeleted = <NUM_LIT:0> <EOL> for filename , counts in self . mCounts . items ( ) : <EOL> if counts < min_size : <EOL> os . remove ( filename ) <EOL> ndeleted += <NUM_LIT:1> <EOL> return ndeleted <EOL> class FilesChunks ( Files ) : <EOL> def __init__ ( self , <EOL> chunk_size , ** kwargs ) : <EOL> Files . __init__ ( self , ** kwargs ) <EOL> self . mChunkSize = chunk_size <EOL> self . mFilename = <NUM_LIT:0> <EOL> def GetFilename ( self , identifier ) : <EOL> if not self . mFilename or self . mCounts [ self . mFilename ] % self . mChunkSize == <NUM_LIT:0> : <EOL> self . mFilename = re . sub ( <EOL> "<STR_LIT:%s>" , str ( len ( self . mCounts ) + <NUM_LIT:1> ) , self . mOutputPattern ) <EOL> return self . mFilename <EOL> def main ( argv = None ) : <EOL> """<STR_LIT>""" <EOL> if argv is None : <EOL> argv = sys . argv <EOL> parser = E . OptionParser ( <EOL> version = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" , <EOL> metavar = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:string>" , <EOL> help = "<STR_LIT>" , <EOL> metavar = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , dest = "<STR_LIT>" , action = "<STR_LIT:store_true>" , <EOL> help = "<STR_LIT>" , <EOL> metavar = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , dest = "<STR_LIT>" , type = "<STR_LIT:int>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . set_defaults ( <EOL> input_filename = None , <EOL> map_filename = None , <EOL> skip_identifiers = False , <EOL> input_pattern = "<STR_LIT>" , <EOL> min_size = <NUM_LIT:0> , <EOL> num_sequences = None , <EOL> output_pattern = "<STR_LIT:%s>" ) <EOL> ( options , args ) = E . Start ( parser ) <EOL> if options . input_filename : <EOL> infile = IOTools . openFile ( options . input_filename , "<STR_LIT:r>" ) <EOL> else : <EOL> infile = sys . stdin <EOL> if options . map_filename : <EOL> map_id2filename = IOTools . ReadMap ( open ( options . map_filename , "<STR_LIT:r>" ) ) <EOL> else : <EOL> map_id2filename = { } <EOL> if options . num_sequences : <EOL> files = FilesChunks ( chunk_size = options . num_sequences , <EOL> output_pattern = options . output_pattern , <EOL> skip_identifiers = options . skip_identifiers ) <EOL> else : <EOL> files = Files ( output_pattern = options . output_pattern , <EOL> skip_identifiers = options . skip_identifiers ) <EOL> if options . input_pattern : <EOL> rx = re . compile ( options . input_pattern ) <EOL> else : <EOL> rx = None <EOL> ninput = <NUM_LIT:0> <EOL> noutput = <NUM_LIT:0> <EOL> identifier = None <EOL> chunk = <NUM_LIT:0> <EOL> for seq in FastaIterator . iterate ( infile ) : <EOL> ninput += <NUM_LIT:1> <EOL> if rx : <EOL> try : <EOL> identifier = rx . search ( seq . title ) . groups ( ) [ <NUM_LIT:0> ] <EOL> except AttributeError : <EOL> print "<STR_LIT>" % ( seq . title ) <EOL> else : <EOL> identifier = seq . title <EOL> if map_id2filename : <EOL> if identifier in map_id2filename : <EOL> identifier = map_id2filename [ identifier ] <EOL> else : <EOL> continue <EOL> files . Write ( identifier , seq ) <EOL> noutput += <NUM_LIT:1> <EOL> if options . input_filename : <EOL> infile . close ( ) <EOL> if options . min_size : <EOL> ndeleted = files . DeleteFiles ( min_size = options . min_size ) <EOL> else : <EOL> ndeleted = <NUM_LIT:0> <EOL> if options . loglevel >= <NUM_LIT:1> : <EOL> print "<STR_LIT>" % ( ninput , noutput , ndeleted ) <EOL> E . Stop ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> sys . exit ( main ( sys . argv ) ) </s>
<s> '''<STR_LIT>''' <EOL> import glob <EOL> import os <EOL> import importlib <EOL> import yaml <EOL> import re <EOL> import sys <EOL> import copy <EOL> from nose . tools import ok_ <EOL> import CGAT . Experiment as E <EOL> import CGAT . IOTools as IOTools <EOL> ORIGINAL_START = None <EOL> PARSER = None <EOL> EXPRESSIONS = ( <EOL> ( '<STR_LIT>' , '<STR_LIT>' ) , ) <EOL> EXCLUDE = ( "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ) <EOL> FILENAME_OPTIONLIST = "<STR_LIT>" <EOL> class DummyError ( Exception ) : <EOL> pass <EOL> def filterFiles ( files ) : <EOL> '''<STR_LIT>''' <EOL> if os . path . exists ( "<STR_LIT>" ) : <EOL> config = yaml . load ( open ( "<STR_LIT>" ) ) <EOL> if config is not None : <EOL> if "<STR_LIT>" in config and config [ "<STR_LIT>" ] : <EOL> values = config [ "<STR_LIT>" ] <EOL> if "<STR_LIT>" in values : <EOL> scriptdirs = [ x for x in open ( "<STR_LIT>" ) <EOL> if x . startswith ( "<STR_LIT>" ) and <EOL> x . endswith ( "<STR_LIT>" ) ] <EOL> take = set ( [ re . sub ( "<STR_LIT>" , "<STR_LIT>" , <EOL> x [ : - <NUM_LIT:1> ] ) for x in scriptdirs ] ) <EOL> files = [ x for x in files if x in take ] <EOL> if "<STR_LIT>" in values : <EOL> rx = re . compile ( values [ "<STR_LIT>" ] ) <EOL> files = filter ( rx . search , files ) <EOL> return files <EOL> def LocalStart ( parser , * args , ** kwargs ) : <EOL> '''<STR_LIT>''' <EOL> global PARSER <EOL> d = copy . copy ( kwargs ) <EOL> d . update ( { '<STR_LIT>' : True } ) <EOL> PARSER = ORIGINAL_START ( parser , ** d ) <EOL> raise DummyError ( ) <EOL> def loadScript ( script_name ) : <EOL> prefix , suffix = os . path . splitext ( script_name ) <EOL> dirname = os . path . relpath ( os . path . dirname ( script_name ) ) <EOL> basename = os . path . basename ( script_name ) [ : - <NUM_LIT:3> ] <EOL> if os . path . exists ( prefix + "<STR_LIT>" ) : <EOL> try : <EOL> os . remove ( prefix + "<STR_LIT>" ) <EOL> except OSError : <EOL> pass <EOL> modulename = "<STR_LIT:.>" . join ( ( re . sub ( "<STR_LIT:/>" , "<STR_LIT:.>" , dirname ) , basename ) ) <EOL> try : <EOL> module = importlib . import_module ( modulename ) <EOL> except ImportError , msg : <EOL> sys . stderr . write ( '<STR_LIT>' % <EOL> ( modulename , msg ) ) <EOL> module = None <EOL> return module , modulename <EOL> def check_option ( option , script_name , map_option2action ) : <EOL> '''<STR_LIT>''' <EOL> if option in map_option2action : <EOL> ok_ ( option in map_option2action , <EOL> '<STR_LIT>' ) <EOL> ok_ ( map_option2action [ option ] == "<STR_LIT>" , <EOL> '<STR_LIT>' % <EOL> ( script_name , option , map_option2action [ option ] ) ) <EOL> def failTest ( msg ) : <EOL> '''<STR_LIT>''' <EOL> ok_ ( False , msg ) <EOL> def test_cmdline ( ) : <EOL> '''<STR_LIT>''' <EOL> global ORIGINAL_START <EOL> if ORIGINAL_START is None : <EOL> ORIGINAL_START = E . Start <EOL> map_option2action = IOTools . readMap ( <EOL> IOTools . openFile ( FILENAME_OPTIONLIST ) , <EOL> columns = ( <NUM_LIT:0> , <NUM_LIT:1> ) , <EOL> has_header = True ) <EOL> files = [ ] <EOL> for label , expression in EXPRESSIONS : <EOL> f = glob . glob ( expression ) <EOL> files . extend ( sorted ( f ) ) <EOL> files = filterFiles ( files ) <EOL> sys . path . insert ( <NUM_LIT:0> , "<STR_LIT:.>" ) <EOL> for f in files : <EOL> if os . path . isdir ( f ) : <EOL> continue <EOL> if os . path . basename ( f ) in EXCLUDE : <EOL> continue <EOL> script_name = os . path . abspath ( f ) <EOL> pyxfile = ( os . path . join ( os . path . dirname ( f ) , "<STR_LIT:_>" ) + <EOL> os . path . basename ( f ) + "<STR_LIT:x>" ) <EOL> failTest . description = script_name <EOL> with IOTools . openFile ( script_name ) as inf : <EOL> if "<STR_LIT>" in inf . read ( ) : <EOL> yield ( failTest , <EOL> "<STR_LIT>" % script_name ) <EOL> continue <EOL> module , modulename = loadScript ( script_name ) <EOL> if module is None : <EOL> yield ( failTest , <EOL> "<STR_LIT>" % script_name ) <EOL> continue <EOL> E . Start = LocalStart <EOL> try : <EOL> module . main ( argv = [ "<STR_LIT>" ] ) <EOL> except AttributeError : <EOL> yield ( failTest , <EOL> "<STR_LIT>" % script_name ) <EOL> ok_ ( False , "<STR_LIT>" % script_name ) <EOL> except SystemExit : <EOL> yield ( failTest , <EOL> "<STR_LIT>" % script_name ) <EOL> except DummyError : <EOL> pass <EOL> for option in PARSER . option_list : <EOL> if option . dest is None : <EOL> continue <EOL> optstring = option . get_opt_string ( ) <EOL> if optstring . startswith ( "<STR_LIT>" ) : <EOL> optstring = optstring [ <NUM_LIT:2> : ] <EOL> check_option . description = script_name + "<STR_LIT::>" + optstring <EOL> yield ( check_option , optstring , os . path . abspath ( f ) , <EOL> map_option2action ) <EOL> del sys . modules [ modulename ] <EOL> if os . path . exists ( pyxfile ) : <EOL> sys . meta_path = [ ] </s>
<s> import os <EOL> import platform <EOL> __all__ = [ '<STR_LIT>' ] <EOL> class InsufficientParametersException ( Exception ) : <EOL> pass <EOL> class CacheBrowserSettings ( dict ) : <EOL> def __init__ ( self , * args , ** kwargs ) : <EOL> super ( CacheBrowserSettings , self ) . __init__ ( * args , ** kwargs ) <EOL> if platform . system ( ) == '<STR_LIT>' : <EOL> self . data_dir = os . path . join ( os . environ [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> else : <EOL> self . data_dir = '<STR_LIT>' <EOL> self [ '<STR_LIT:host>' ] = '<STR_LIT>' <EOL> self [ '<STR_LIT:port>' ] = <NUM_LIT> <EOL> self [ '<STR_LIT>' ] = os . path . join ( self . data_dir , '<STR_LIT>' ) <EOL> self . host = '<STR_LIT>' <EOL> self . port = <NUM_LIT> <EOL> self . database = os . path . join ( self . data_dir , '<STR_LIT>' ) <EOL> self . default_bootstrap_sources = [ <EOL> { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT:path>' : '<STR_LIT>' <EOL> } , <EOL> { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT:url>' : '<STR_LIT>' <EOL> } <EOL> ] <EOL> self . bootstrap_sources = [ ] <EOL> def get_or_error ( self , key ) : <EOL> if self . get ( key , None ) : <EOL> return self [ key ] <EOL> raise InsufficientParametersException ( "<STR_LIT>" % key ) <EOL> def update_from_args ( self , args ) : <EOL> self . host = self . _read_arg ( args , '<STR_LIT:host>' , self . host ) <EOL> self . port = self . _read_arg ( args , '<STR_LIT:port>' , self . port ) <EOL> self . database = self . _read_arg ( args , '<STR_LIT>' , self . database ) <EOL> self . read_bootstrap_sources ( args ) <EOL> def read_bootstrap_sources ( self , args ) : <EOL> local_sources = args . get ( '<STR_LIT>' ) or [ ] <EOL> for source in local_sources : <EOL> self . bootstrap_sources . append ( { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT:path>' : source <EOL> } ) <EOL> @ staticmethod <EOL> def _read_arg ( args , key , default ) : <EOL> try : <EOL> return args [ key ] <EOL> except KeyError : <EOL> return default <EOL> settings = CacheBrowserSettings ( ) </s>
<s> """<STR_LIT>""" <EOL> from client . sources . common import core <EOL> from client . sources . common import models as sources_models <EOL> from client . protocols . common import models as protocol_models <EOL> from client . utils import auth <EOL> from client . utils import format <EOL> import json <EOL> import logging <EOL> import os <EOL> import pickle <EOL> import random <EOL> import re <EOL> import urllib . error <EOL> import urllib . request <EOL> log = logging . getLogger ( __name__ ) <EOL> class HintingProtocol ( protocol_models . Protocol ) : <EOL> """<STR_LIT>""" <EOL> HINT_SERVER = "<STR_LIT>" <EOL> HINT_ENDPOINT = '<STR_LIT>' <EOL> SMALL_EFFORT = <NUM_LIT:5> <EOL> LARGE_EFFORT = <NUM_LIT:8> <EOL> WAIT_ATTEMPTS = <NUM_LIT:5> <EOL> def run ( self , messages ) : <EOL> """<STR_LIT>""" <EOL> if self . args . local : <EOL> return <EOL> if '<STR_LIT>' not in messages : <EOL> log . info ( '<STR_LIT>' ) <EOL> return <EOL> if '<STR_LIT>' not in messages : <EOL> log . info ( '<STR_LIT>' ) <EOL> return <EOL> if self . args . no_hints : <EOL> messages [ '<STR_LIT>' ] = { '<STR_LIT>' : '<STR_LIT:user>' } <EOL> return <EOL> messages [ '<STR_LIT>' ] = { } <EOL> history = messages [ '<STR_LIT>' ] . get ( '<STR_LIT>' , { } ) <EOL> questions = history . get ( '<STR_LIT>' , [ ] ) <EOL> current_q = history . get ( '<STR_LIT>' , { } ) <EOL> for question in current_q : <EOL> if question not in questions : <EOL> continue <EOL> stats = questions [ question ] <EOL> messages [ '<STR_LIT>' ] [ question ] = { '<STR_LIT>' : { } , '<STR_LIT>' : { } } <EOL> hint_info = messages [ '<STR_LIT>' ] [ question ] <EOL> if stats [ '<STR_LIT>' ] and stats [ '<STR_LIT>' ] > self . SMALL_EFFORT : <EOL> hint_info [ '<STR_LIT>' ] = False <EOL> if self . args . question : <EOL> log . info ( '<STR_LIT>' , question ) <EOL> reflection = random . choice ( SOLVE_SUCCESS_MSG ) <EOL> if not confirm ( "<STR_LIT>" <EOL> "<STR_LIT>" ) : <EOL> hint_info [ '<STR_LIT>' ] [ '<STR_LIT>' ] = False <EOL> else : <EOL> hint_info [ '<STR_LIT>' ] [ '<STR_LIT>' ] = True <EOL> prompt_user ( reflection , hint_info ) <EOL> elif stats [ '<STR_LIT>' ] < self . SMALL_EFFORT : <EOL> log . info ( "<STR_LIT>" , <EOL> question , stats [ '<STR_LIT>' ] , stats [ '<STR_LIT>' ] ) <EOL> hint_info [ '<STR_LIT>' ] = False <EOL> else : <EOL> if stats [ '<STR_LIT>' ] % self . WAIT_ATTEMPTS != <NUM_LIT:0> : <EOL> hint_info [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> hint_info [ '<STR_LIT>' ] = False <EOL> log . info ( '<STR_LIT>' , <EOL> stats [ '<STR_LIT>' ] % self . WAIT_ATTEMPTS ) <EOL> else : <EOL> hint_info [ '<STR_LIT>' ] = not stats [ '<STR_LIT>' ] <EOL> if not hint_info [ '<STR_LIT>' ] : <EOL> continue <EOL> log . info ( '<STR_LIT>' , question ) <EOL> if confirm ( "<STR_LIT>" . format ( question ) ) : <EOL> hint_info [ '<STR_LIT>' ] = True <EOL> print ( "<STR_LIT>" ) <EOL> try : <EOL> response = self . query_server ( messages , question ) <EOL> hint_info [ '<STR_LIT>' ] = response <EOL> hint = response [ '<STR_LIT:message>' ] <EOL> pre_prompt = response [ '<STR_LIT>' ] <EOL> post_prompt = response [ '<STR_LIT>' ] <EOL> log . info ( "<STR_LIT>" . format ( response ) ) <EOL> if not hint and not pre_prompt : <EOL> print ( "<STR_LIT>" . format ( question ) ) <EOL> continue <EOL> if pre_prompt : <EOL> print ( "<STR_LIT>" <EOL> "<STR_LIT>" ) <EOL> if not prompt_user ( pre_prompt , hint_info ) : <EOL> continue <EOL> print ( "<STR_LIT>" . format ( hint ) ) <EOL> if post_prompt : <EOL> prompt_user ( post_prompt , hint_info ) <EOL> except urllib . error . URLError : <EOL> log . debug ( "<STR_LIT>" ) <EOL> hint_info [ '<STR_LIT>' ] = True <EOL> print ( "<STR_LIT>" ) <EOL> else : <EOL> log . info ( '<STR_LIT>' , question ) <EOL> hint_info [ '<STR_LIT>' ] = False <EOL> def query_server ( self , messages , test ) : <EOL> access_token , _ , _ = auth . get_storage ( ) <EOL> user = auth . get_student_email ( access_token ) or access_token <EOL> if user : <EOL> user = hash ( user ) <EOL> data = { <EOL> '<STR_LIT>' : self . assignment . endpoint , <EOL> '<STR_LIT:test>' : test , <EOL> '<STR_LIT>' : messages , <EOL> '<STR_LIT:user>' : user <EOL> } <EOL> serialized_data = json . dumps ( data ) . encode ( encoding = '<STR_LIT:utf-8>' ) <EOL> address = self . HINT_SERVER + self . HINT_ENDPOINT <EOL> log . info ( '<STR_LIT>' , address ) <EOL> request = urllib . request . Request ( address ) <EOL> request . add_header ( "<STR_LIT:Content-Type>" , "<STR_LIT:application/json>" ) <EOL> response = urllib . request . urlopen ( request , serialized_data , <NUM_LIT:10> ) <EOL> return json . loads ( response . read ( ) . decode ( '<STR_LIT:utf-8>' ) ) <EOL> def prompt_user ( query , results ) : <EOL> try : <EOL> response = None <EOL> short_respones = <NUM_LIT:0> <EOL> while not response : <EOL> response = input ( "<STR_LIT>" . format ( query ) ) <EOL> if not response or len ( response ) < <NUM_LIT:5> : <EOL> short_respones += <NUM_LIT:1> <EOL> if short_respones > <NUM_LIT:2> : <EOL> break <EOL> print ( "<STR_LIT>" ) <EOL> results [ '<STR_LIT>' ] [ query ] = response <EOL> return response <EOL> except KeyboardInterrupt : <EOL> results [ '<STR_LIT>' ] [ query ] = '<STR_LIT>' <EOL> try : <EOL> print ( "<STR_LIT>" ) <EOL> return '<STR_LIT>' <EOL> except KeyboardInterrupt : <EOL> return '<STR_LIT>' <EOL> def confirm ( message ) : <EOL> response = input ( "<STR_LIT>" . format ( message ) ) <EOL> return response . lower ( ) == "<STR_LIT:yes>" or response . lower ( ) == "<STR_LIT:y>" <EOL> SOLVE_SUCCESS_MSG = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> ] <EOL> protocol = HintingProtocol </s>
<s> """<STR_LIT>""" <EOL> import os <EOL> import sys <EOL> class _OutputLogger ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self ) : <EOL> self . _current_stream = self . _stdout = sys . stdout <EOL> self . _devnull = open ( os . devnull , '<STR_LIT:w>' ) <EOL> self . _logs = { } <EOL> self . _num_logs = <NUM_LIT:0> <EOL> def on ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _current_stream = self . _stdout <EOL> def off ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _current_stream = self . _devnull <EOL> def new_log ( self ) : <EOL> """<STR_LIT>""" <EOL> log_id = self . _num_logs <EOL> self . _logs [ log_id ] = [ ] <EOL> self . _num_logs += <NUM_LIT:1> <EOL> return log_id <EOL> def get_log ( self , log_id ) : <EOL> assert log_id in self . _logs <EOL> return self . _logs [ log_id ] <EOL> def remove_log ( self , log_id ) : <EOL> assert log_id in self . _logs , '<STR_LIT>' . format ( log_id ) <EOL> del self . _logs [ log_id ] <EOL> def remove_all_logs ( self ) : <EOL> self . _logs = { } <EOL> def is_on ( self ) : <EOL> return self . _current_stream == self . _stdout <EOL> def write ( self , msg ) : <EOL> """<STR_LIT>""" <EOL> self . _current_stream . write ( msg ) <EOL> for log in self . _logs . values ( ) : <EOL> log . append ( msg ) <EOL> def flush ( self ) : <EOL> self . _current_stream . flush ( ) <EOL> def __getattr__ ( self , attr ) : <EOL> return getattr ( self . _current_stream , attr ) <EOL> _logger = sys . stdout = _OutputLogger ( ) <EOL> def on ( ) : <EOL> _logger . on ( ) <EOL> def off ( ) : <EOL> _logger . off ( ) <EOL> def get_log ( log_id ) : <EOL> return _logger . get_log ( log_id ) <EOL> def new_log ( ) : <EOL> return _logger . new_log ( ) <EOL> def remove_log ( log_id ) : <EOL> _logger . remove_log ( log_id ) <EOL> def remove_all_logs ( ) : <EOL> _logger . remove_all_logs ( ) </s>
<s> from client import exceptions as ex <EOL> from client . sources . ok_test import concept <EOL> import unittest <EOL> class ConceptSuiteTest ( unittest . TestCase ) : <EOL> TEST_NAME = '<STR_LIT:A>' <EOL> SUITE_NUMBER = <NUM_LIT:0> <EOL> def makeTest ( self , cases ) : <EOL> return concept . ConceptSuite ( False , False , type = '<STR_LIT>' , cases = cases ) <EOL> def testConstructor_noCases ( self ) : <EOL> try : <EOL> self . makeTest ( [ ] ) <EOL> except TypeError : <EOL> self . fail ( ) <EOL> def testConstructor_validTestCase ( self ) : <EOL> try : <EOL> self . makeTest ( [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> ] ) <EOL> except TypeError : <EOL> self . fail ( ) <EOL> def testConstructor_missingQuestion ( self ) : <EOL> self . assertRaises ( ex . SerializeException , self . makeTest , [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> ] ) <EOL> def testConstructor_missingAnswer ( self ) : <EOL> self . assertRaises ( ex . SerializeException , self . makeTest , [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> ] ) <EOL> def testRun_noCases ( self ) : <EOL> test = self . makeTest ( [ ] ) <EOL> self . assertEqual ( { <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> } , test . run ( self . TEST_NAME , self . SUITE_NUMBER ) ) <EOL> def testRun_lockedCases ( self ) : <EOL> test = self . makeTest ( [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : True , <EOL> } , <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> ] ) <EOL> self . assertEqual ( { <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> } , test . run ( self . TEST_NAME , self . SUITE_NUMBER ) ) <EOL> def testRun_noLockedCases ( self ) : <EOL> test = self . makeTest ( [ <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } , <EOL> ] ) <EOL> self . assertEqual ( { <EOL> '<STR_LIT>' : <NUM_LIT:2> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> '<STR_LIT>' : <NUM_LIT:0> , <EOL> } , test . run ( self . TEST_NAME , self . SUITE_NUMBER ) ) </s>
<s> from metakernel import Magic <EOL> class GetMagic ( Magic ) : <EOL> def line_get ( self , variable ) : <EOL> """<STR_LIT>""" <EOL> self . retval = self . kernel . get_variable ( variable ) <EOL> def post_process ( self , retval ) : <EOL> return self . retval <EOL> def register_magics ( kernel ) : <EOL> kernel . register_magics ( GetMagic ) </s>
<s> from metakernel . tests . utils import ( get_kernel , get_log_text , <EOL> clear_log_text , EvalKernel ) <EOL> import os <EOL> def test_download_magic ( ) : <EOL> kernel = get_kernel ( EvalKernel ) <EOL> kernel . do_execute ( "<STR_LIT>" ) <EOL> text = get_log_text ( kernel ) <EOL> assert "<STR_LIT>" in text , text <EOL> assert os . path . isfile ( "<STR_LIT>" ) , "<STR_LIT>" <EOL> clear_log_text ( kernel ) <EOL> kernel . do_execute ( "<STR_LIT>" ) <EOL> text = get_log_text ( kernel ) <EOL> assert "<STR_LIT>" in text , text <EOL> assert os . path . isfile ( "<STR_LIT>" ) , "<STR_LIT>" <EOL> def teardown ( ) : <EOL> for fname in [ '<STR_LIT>' , '<STR_LIT>' ] : <EOL> try : <EOL> os . remove ( fname ) <EOL> except : <EOL> pass </s>
<s> """<STR_LIT>""" <EOL> from metakernel import Magic , option <EOL> from IPython . display import IFrame <EOL> import sys <EOL> if sys . version_info . major == <NUM_LIT:2> and sys . version_info . minor == <NUM_LIT:7> : <EOL> from urllib import quote <EOL> elif sys . version_info . major == <NUM_LIT:3> and sys . version_info . minor >= <NUM_LIT:3> : <EOL> from urllib . parse import quote <EOL> class TutorMagic ( Magic ) : <EOL> @ option ( <EOL> '<STR_LIT>' , '<STR_LIT>' , action = '<STR_LIT:store>' , nargs = <NUM_LIT:1> , <EOL> help = ( "<STR_LIT>" + <EOL> "<STR_LIT>" ) <EOL> ) <EOL> def cell_tutor ( self , language = None ) : <EOL> """<STR_LIT>""" <EOL> if language is None : <EOL> language = self . kernel . language_info [ "<STR_LIT:name>" ] <EOL> if language not in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] : <EOL> raise ValueError ( "<STR_LIT>" <EOL> "<STR_LIT>" . format ( language ) ) <EOL> url = "<STR_LIT>" <EOL> url += quote ( self . code ) <EOL> url += "<STR_LIT>" <EOL> url += "<STR_LIT>" <EOL> if language in [ "<STR_LIT>" , "<STR_LIT>" ] : <EOL> url += "<STR_LIT>" <EOL> elif language == "<STR_LIT>" : <EOL> url += "<STR_LIT>" <EOL> elif language == "<STR_LIT>" : <EOL> url += "<STR_LIT>" <EOL> elif language == "<STR_LIT>" : <EOL> url += "<STR_LIT>" <EOL> self . kernel . Display ( IFrame ( url , height = <NUM_LIT> , width = "<STR_LIT>" ) ) <EOL> self . evaluate = False <EOL> def register_magics ( kernel ) : <EOL> kernel . register_magics ( TutorMagic ) <EOL> def register_ipython_magics ( ) : <EOL> from metakernel import IPythonKernel <EOL> from IPython . core . magic import register_cell_magic <EOL> kernel = IPythonKernel ( ) <EOL> magic = TutorMagic ( kernel ) <EOL> @ register_cell_magic <EOL> def tutor ( line , cell ) : <EOL> magic . code = cell <EOL> magic . cell_tutor ( language = "<STR_LIT>" ) </s>
<s> @ register_line_magic ( '<STR_LIT>' ) <EOL> @ register_line_magic ( '<STR_LIT>' ) <EOL> @ register_line_magic ( '<STR_LIT>' ) <EOL> @ register_line_magic ( '<STR_LIT>' ) <EOL> def _pyplot ( line ) : <EOL> _ip . run_line_magic ( '<STR_LIT>' , line ) <EOL> _ip . run_code ( """<STR_LIT>""" ) <EOL> _ip . run_code ( """<STR_LIT>""" ) <EOL> def hist_ ( * args , ** kwargs ) : <EOL> kwargs . pop ( '<STR_LIT>' , None ) <EOL> kwargs . pop ( '<STR_LIT>' , None ) <EOL> kwargs . pop ( '<STR_LIT>' , None ) <EOL> return plt . hist ( * args , histtype = '<STR_LIT>' , alpha = <NUM_LIT> , normed = True , ** kwargs ) <EOL> def figsize ( sizex , sizey ) : <EOL> """<STR_LIT>""" <EOL> import matplotlib <EOL> matplotlib . rcParams [ '<STR_LIT>' ] = [ sizex , sizey ] <EOL> _ip . user_ns [ '<STR_LIT>' ] = hist_ <EOL> _ip . user_ns [ '<STR_LIT>' ] = figsize <EOL> _ip . user_ns [ '<STR_LIT>' ] = plt . plot <EOL> _ip . user_ns [ '<STR_LIT>' ] = plt . subplot <EOL> del _pyplot </s>
<s> from __future__ import print_function <EOL> import os <EOL> import pytest <EOL> import matplotlib . pyplot as plt <EOL> from lifelines . estimation import KaplanMeierFitter , NelsonAalenFitter <EOL> from lifelines . generate_datasets import exponential_survival_data <EOL> def test_exponential_data_sets_correct_censor ( ) : <EOL> print ( os . environ ) <EOL> N = <NUM_LIT> <EOL> censorship = <NUM_LIT> <EOL> T , C = exponential_survival_data ( N , censorship , scale = <NUM_LIT:10> ) <EOL> assert abs ( C . mean ( ) - ( <NUM_LIT:1> - censorship ) ) < <NUM_LIT> <EOL> @ pytest . mark . skipif ( "<STR_LIT>" not in os . environ , reason = "<STR_LIT>" ) <EOL> def test_exponential_data_sets_fit ( ) : <EOL> N = <NUM_LIT> <EOL> T , C = exponential_survival_data ( N , <NUM_LIT> , scale = <NUM_LIT:10> ) <EOL> naf = NelsonAalenFitter ( ) <EOL> naf . fit ( T , C ) . plot ( ) <EOL> plt . title ( "<STR_LIT>" ) <EOL> @ pytest . mark . skipif ( "<STR_LIT>" not in os . environ , reason = "<STR_LIT>" ) <EOL> def test_kmf_minimum_observation_bias ( ) : <EOL> N = <NUM_LIT> <EOL> kmf = KaplanMeierFitter ( ) <EOL> T , C = exponential_survival_data ( N , <NUM_LIT:0.1> , scale = <NUM_LIT:10> ) <EOL> B = <NUM_LIT> * T <EOL> kmf . fit ( T , C , entry = B ) <EOL> kmf . plot ( ) <EOL> plt . title ( "<STR_LIT>" ) </s>
<s> api = "<STR_LIT:0>" <EOL> version = "<STR_LIT:0>" <EOL> revision = "<STR_LIT>" </s>
<s> """<STR_LIT>""" <EOL> class UnderComp ( object ) : <EOL> """<STR_LIT>""" <EOL> m_conId = <NUM_LIT:0> <EOL> m_delta = float ( ) <EOL> m_price = float ( ) <EOL> def __init__ ( self ) : <EOL> """<STR_LIT>""" <EOL> self . m_conId = <NUM_LIT:0> <EOL> self . m_delta = <NUM_LIT:0> <EOL> self . m_price = <NUM_LIT:0> <EOL> def __eq__ ( self , p_other ) : <EOL> """<STR_LIT>""" <EOL> if self is p_other : <EOL> return True <EOL> if p_other is None or not ( isinstance ( p_other , ( UnderComp , ) ) ) : <EOL> return False <EOL> l_theOther = p_other <EOL> if self . m_conId != l_theOther . m_conId : <EOL> return False <EOL> if self . m_delta != l_theOther . m_delta : <EOL> return False <EOL> if self . m_price != l_theOther . m_price : <EOL> return False <EOL> return True </s>
<s> import copy <EOL> import functools <EOL> import socket <EOL> import struct <EOL> import sys <EOL> def toTypeName ( value ) : <EOL> return '<STR_LIT>' % ( value [ <NUM_LIT:0> ] . upper ( ) , value [ <NUM_LIT:1> : ] ) <EOL> def maybeName ( obj ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> return obj . __name__ <EOL> except ( AttributeError , ) : <EOL> return str ( obj ) <EOL> class classmethod_ ( classmethod ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , func ) : <EOL> classmethod . __init__ ( self , func ) <EOL> self . func = func <EOL> def __getattr__ ( self , name ) : <EOL> return getattr ( self . func , name ) <EOL> def synchronized ( lock ) : <EOL> """<STR_LIT>""" <EOL> def wrapper ( func ) : <EOL> @ functools . wraps ( func ) <EOL> def inner ( * args , ** kwds ) : <EOL> lock . acquire ( ) <EOL> try : <EOL> return func ( * args , ** kwds ) <EOL> finally : <EOL> lock . release ( ) <EOL> return inner <EOL> return wrapper <EOL> class Boolean ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , value ) : <EOL> """<STR_LIT>""" <EOL> self . value = value <EOL> def booleanValue ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . value <EOL> @ classmethod <EOL> def valueOf ( cls , text ) : <EOL> """<STR_LIT>""" <EOL> value = str ( text ) . lower ( ) == '<STR_LIT:true>' <EOL> return cls ( value ) <EOL> class Cloneable ( object ) : <EOL> """<STR_LIT>""" <EOL> def clone ( self ) : <EOL> return copy . copy ( self ) <EOL> class DataInputStream ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , stream ) : <EOL> """<STR_LIT>""" <EOL> self . stream = stream <EOL> self . recv = stream . recv <EOL> def readByte ( self , unpack = struct . unpack ) : <EOL> """<STR_LIT>""" <EOL> return unpack ( '<STR_LIT>' , self . recv ( <NUM_LIT:1> ) ) [ <NUM_LIT:0> ] <EOL> class DataOutputStream ( object ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , stream ) : <EOL> """<STR_LIT>""" <EOL> self . send = stream . send <EOL> def write ( self , data , pack = struct . pack , eol = struct . pack ( '<STR_LIT>' , <NUM_LIT:0> ) ) : <EOL> """<STR_LIT>""" <EOL> send = self . send <EOL> if data == <NUM_LIT:0> : <EOL> send ( eol ) <EOL> else : <EOL> for char in data : <EOL> send ( pack ( '<STR_LIT>' , char ) ) <EOL> class Double ( float ) : <EOL> """<STR_LIT>""" <EOL> MAX_VALUE = sys . maxint <EOL> @ staticmethod <EOL> def parseDouble ( text ) : <EOL> """<STR_LIT>""" <EOL> return float ( text or <NUM_LIT:0> ) <EOL> class Integer ( int ) : <EOL> """<STR_LIT>""" <EOL> MAX_VALUE = sys . maxint <EOL> @ staticmethod <EOL> def parseInt ( text ) : <EOL> """<STR_LIT>""" <EOL> return int ( text or <NUM_LIT:0> ) <EOL> @ staticmethod <EOL> def parseLong ( text ) : <EOL> """<STR_LIT>""" <EOL> return long ( text or <NUM_LIT:0> ) <EOL> Long = Integer <EOL> class Socket ( socket . socket ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , host , port ) : <EOL> """<STR_LIT>""" <EOL> socket . socket . __init__ ( self , socket . AF_INET , socket . SOCK_STREAM ) <EOL> self . connect ( ( host , port ) ) <EOL> def getInputStream ( self ) : <EOL> """<STR_LIT>""" <EOL> return self <EOL> def getOutputStream ( self ) : <EOL> """<STR_LIT>""" <EOL> return self <EOL> def disconnect ( self ) : <EOL> self . shutdown ( socket . SHUT_RDWR ) <EOL> self . close ( ) <EOL> def isConnected ( self ) : <EOL> try : <EOL> throwaway = self . getpeername ( ) <EOL> return True <EOL> except ( socket . error , ) , ex : <EOL> return False <EOL> class StringBuffer ( list ) : <EOL> """<STR_LIT>""" <EOL> def __str__ ( self , join = str . join , chr = chr ) : <EOL> """<STR_LIT>""" <EOL> return join ( '<STR_LIT>' , [ chr ( v ) for v in self ] ) <EOL> if '<STR_LIT>' in sys . modules : <EOL> from qt import QThread <EOL> class ThreadType ( QThread ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name ) : <EOL> """<STR_LIT>""" <EOL> QThread . __init__ ( self ) <EOL> def interrupt ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . terminate ( ) <EOL> def isInterrupted ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . finished ( ) <EOL> def setDaemon ( self , value ) : <EOL> """<STR_LIT>""" <EOL> def setName ( self , value ) : <EOL> """<STR_LIT>""" <EOL> elif '<STR_LIT>' in sys . modules : <EOL> from PyQt4 . QtCore import QThread <EOL> class ThreadType ( QThread ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name ) : <EOL> """<STR_LIT>""" <EOL> QThread . __init__ ( self ) <EOL> def interrupt ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . exit ( ) <EOL> def isInterrupted ( self ) : <EOL> """<STR_LIT>""" <EOL> return self . isFinished ( ) <EOL> def setDaemon ( self , value ) : <EOL> """<STR_LIT>""" <EOL> def setName ( self , value ) : <EOL> """<STR_LIT>""" <EOL> self . setObjectName ( value ) <EOL> else : <EOL> import threading <EOL> class ThreadType ( threading . Thread ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name ) : <EOL> """<STR_LIT>""" <EOL> threading . Thread . __init__ ( self , name = name ) <EOL> self . setDaemon ( True ) <EOL> def interrupt ( self ) : <EOL> """<STR_LIT>""" <EOL> return False <EOL> def isInterrupted ( self ) : <EOL> """<STR_LIT>""" <EOL> return False <EOL> class Thread ( ThreadType ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , name , parent , dis ) : <EOL> """<STR_LIT>""" <EOL> ThreadType . __init__ ( self , name = name ) <EOL> def term ( self ) : <EOL> def isInterrupted ( ) : <EOL> print '<STR_LIT>' <EOL> return True <EOL> self . isInterrupted = isInterrupted <EOL> self . m_dis . stream . shutdown ( socket . SHUT_RDWR ) <EOL> self . m_dis . stream . close ( ) </s>
<s> from cStringIO import StringIO <EOL> from urllib import quote <EOL> from threading import Event <EOL> try : <EOL> True <EOL> except : <EOL> True = <NUM_LIT:1> <EOL> False = <NUM_LIT:0> <EOL> INIT_STATE = ( ( '<STR_LIT:R>' , '<STR_LIT>' ) , ( '<STR_LIT:L>' , '<STR_LIT>' ) ) <EOL> class DownloaderFeedback : <EOL> def __init__ ( self , choker , httpdl , add_task , upfunc , downfunc , <EOL> ratemeasure , leftfunc , file_length , finflag , sp , statistics , <EOL> statusfunc = None , interval = None ) : <EOL> self . choker = choker <EOL> self . httpdl = httpdl <EOL> self . add_task = add_task <EOL> self . upfunc = upfunc <EOL> self . downfunc = downfunc <EOL> self . ratemeasure = ratemeasure <EOL> self . leftfunc = leftfunc <EOL> self . file_length = file_length <EOL> self . finflag = finflag <EOL> self . sp = sp <EOL> self . statistics = statistics <EOL> self . lastids = [ ] <EOL> self . spewdata = None <EOL> self . doneprocessing = Event ( ) <EOL> self . doneprocessing . set ( ) <EOL> if statusfunc : <EOL> self . autodisplay ( statusfunc , interval ) <EOL> def _rotate ( self ) : <EOL> cs = self . choker . connections <EOL> for id in self . lastids : <EOL> for i in xrange ( len ( cs ) ) : <EOL> if cs [ i ] . get_id ( ) == id : <EOL> return cs [ i : ] + cs [ : i ] <EOL> return cs <EOL> def spews ( self ) : <EOL> l = [ ] <EOL> cs = self . _rotate ( ) <EOL> self . lastids = [ c . get_id ( ) for c in cs ] <EOL> for c in cs : <EOL> a = { } <EOL> a [ '<STR_LIT:id>' ] = c . get_readable_id ( ) <EOL> a [ '<STR_LIT>' ] = c . get_ip ( ) <EOL> a [ '<STR_LIT>' ] = ( c is self . choker . connections [ <NUM_LIT:0> ] ) <EOL> a [ '<STR_LIT>' ] = INIT_STATE [ c . is_locally_initiated ( ) ] [ c . is_encrypted ( ) ] <EOL> u = c . get_upload ( ) <EOL> a [ '<STR_LIT>' ] = int ( u . measure . get_rate ( ) ) <EOL> a [ '<STR_LIT>' ] = u . is_interested ( ) <EOL> a [ '<STR_LIT>' ] = u . is_choked ( ) <EOL> d = c . get_download ( ) <EOL> a [ '<STR_LIT>' ] = int ( d . measure . get_rate ( ) ) <EOL> a [ '<STR_LIT>' ] = d . is_interested ( ) <EOL> a [ '<STR_LIT>' ] = d . is_choked ( ) <EOL> a [ '<STR_LIT>' ] = d . is_snubbed ( ) <EOL> a [ '<STR_LIT>' ] = d . connection . upload . measure . get_total ( ) <EOL> a [ '<STR_LIT>' ] = d . connection . download . measure . get_total ( ) <EOL> if len ( d . connection . download . have ) > <NUM_LIT:0> : <EOL> a [ '<STR_LIT>' ] = float ( len ( d . connection . download . have ) - d . connection . download . have . numfalse ) / float ( len ( d . connection . download . have ) ) <EOL> else : <EOL> a [ '<STR_LIT>' ] = <NUM_LIT:1.0> <EOL> a [ '<STR_LIT>' ] = d . connection . download . peermeasure . get_rate ( ) <EOL> l . append ( a ) <EOL> for dl in self . httpdl . get_downloads ( ) : <EOL> if dl . goodseed : <EOL> a = { } <EOL> a [ '<STR_LIT:id>' ] = '<STR_LIT>' <EOL> a [ '<STR_LIT>' ] = dl . baseurl <EOL> a [ '<STR_LIT>' ] = False <EOL> a [ '<STR_LIT>' ] = '<STR_LIT:L>' <EOL> a [ '<STR_LIT>' ] = <NUM_LIT:0> <EOL> a [ '<STR_LIT>' ] = False <EOL> a [ '<STR_LIT>' ] = False <EOL> a [ '<STR_LIT>' ] = int ( dl . measure . get_rate ( ) ) <EOL> a [ '<STR_LIT>' ] = True <EOL> a [ '<STR_LIT>' ] = not dl . active <EOL> a [ '<STR_LIT>' ] = not dl . active <EOL> a [ '<STR_LIT>' ] = None <EOL> a [ '<STR_LIT>' ] = dl . measure . get_total ( ) <EOL> a [ '<STR_LIT>' ] = <NUM_LIT:1.0> <EOL> a [ '<STR_LIT>' ] = None <EOL> l . append ( a ) <EOL> return l <EOL> def gather ( self , displayfunc = None ) : <EOL> s = { '<STR_LIT>' : self . statistics . update ( ) } <EOL> if self . sp . isSet ( ) : <EOL> s [ '<STR_LIT>' ] = self . spews ( ) <EOL> else : <EOL> s [ '<STR_LIT>' ] = None <EOL> s [ '<STR_LIT>' ] = self . upfunc ( ) <EOL> if self . finflag . isSet ( ) : <EOL> s [ '<STR_LIT>' ] = self . file_length <EOL> return s <EOL> s [ '<STR_LIT>' ] = self . downfunc ( ) <EOL> obtained , desired = self . leftfunc ( ) <EOL> s [ '<STR_LIT>' ] = obtained <EOL> s [ '<STR_LIT>' ] = desired <EOL> if desired > <NUM_LIT:0> : <EOL> s [ '<STR_LIT>' ] = float ( obtained ) / desired <EOL> else : <EOL> s [ '<STR_LIT>' ] = <NUM_LIT:1.0> <EOL> if desired == obtained : <EOL> s [ '<STR_LIT:time>' ] = <NUM_LIT:0> <EOL> else : <EOL> s [ '<STR_LIT:time>' ] = self . ratemeasure . get_time_left ( desired - obtained ) <EOL> return s <EOL> def display ( self , displayfunc ) : <EOL> if not self . doneprocessing . isSet ( ) : <EOL> return <EOL> self . doneprocessing . clear ( ) <EOL> stats = self . gather ( ) <EOL> if self . finflag . isSet ( ) : <EOL> displayfunc ( dpflag = self . doneprocessing , <EOL> upRate = stats [ '<STR_LIT>' ] , <EOL> statistics = stats [ '<STR_LIT>' ] , spew = stats [ '<STR_LIT>' ] ) <EOL> elif stats [ '<STR_LIT:time>' ] is not None : <EOL> displayfunc ( dpflag = self . doneprocessing , <EOL> fractionDone = stats [ '<STR_LIT>' ] , sizeDone = stats [ '<STR_LIT>' ] , <EOL> downRate = stats [ '<STR_LIT>' ] , upRate = stats [ '<STR_LIT>' ] , <EOL> statistics = stats [ '<STR_LIT>' ] , spew = stats [ '<STR_LIT>' ] , <EOL> timeEst = stats [ '<STR_LIT:time>' ] ) <EOL> else : <EOL> displayfunc ( dpflag = self . doneprocessing , <EOL> fractionDone = stats [ '<STR_LIT>' ] , sizeDone = stats [ '<STR_LIT>' ] , <EOL> downRate = stats [ '<STR_LIT>' ] , upRate = stats [ '<STR_LIT>' ] , <EOL> statistics = stats [ '<STR_LIT>' ] , spew = stats [ '<STR_LIT>' ] ) <EOL> def autodisplay ( self , displayfunc , interval ) : <EOL> self . displayfunc = displayfunc <EOL> self . interval = interval <EOL> self . _autodisplay ( ) <EOL> def _autodisplay ( self ) : <EOL> self . add_task ( self . _autodisplay , self . interval ) <EOL> self . display ( self . displayfunc ) </s>
<s> from clock import clock <EOL> try : <EOL> True <EOL> except : <EOL> True = <NUM_LIT:1> <EOL> False = <NUM_LIT:0> <EOL> FACTOR = <NUM_LIT> <EOL> class RateMeasure : <EOL> def __init__ ( self ) : <EOL> self . last = None <EOL> self . time = <NUM_LIT:1.0> <EOL> self . got = <NUM_LIT:0.0> <EOL> self . remaining = None <EOL> self . broke = False <EOL> self . got_anything = False <EOL> self . last_checked = None <EOL> self . rate = <NUM_LIT:0> <EOL> self . lastten = False <EOL> def data_came_in ( self , amount ) : <EOL> if not self . got_anything : <EOL> self . got_anything = True <EOL> self . last = clock ( ) <EOL> return <EOL> self . update ( amount ) <EOL> def data_rejected ( self , amount ) : <EOL> pass <EOL> def get_time_left ( self , left ) : <EOL> t = clock ( ) <EOL> if not self . got_anything : <EOL> return None <EOL> if t - self . last > <NUM_LIT:15> : <EOL> self . update ( <NUM_LIT:0> ) <EOL> try : <EOL> remaining = left / self . rate <EOL> if not self . lastten and remaining <= <NUM_LIT:10> : <EOL> self . lastten = True <EOL> if self . lastten : <EOL> return remaining <EOL> delta = max ( remaining / <NUM_LIT:20> , <NUM_LIT:2> ) <EOL> if self . remaining is None : <EOL> self . remaining = remaining <EOL> elif abs ( self . remaining - remaining ) > delta : <EOL> self . remaining = remaining <EOL> else : <EOL> self . remaining -= t - self . last_checked <EOL> except ZeroDivisionError : <EOL> self . remaining = None <EOL> if self . remaining is not None and self . remaining < <NUM_LIT:0.1> : <EOL> self . remaining = <NUM_LIT:0.1> <EOL> self . last_checked = t <EOL> return self . remaining <EOL> def update ( self , amount ) : <EOL> t = clock ( ) <EOL> t1 = int ( t ) <EOL> l1 = int ( self . last ) <EOL> for i in xrange ( l1 , t1 ) : <EOL> self . time *= FACTOR <EOL> self . got *= FACTOR <EOL> self . got += amount <EOL> if t - self . last < <NUM_LIT:20> : <EOL> self . time += t - self . last <EOL> self . last = t <EOL> try : <EOL> self . rate = self . got / self . time <EOL> except ZeroDivisionError : <EOL> pass </s>
<s> from colorbarpanel import ColorBarPanel <EOL> from dbconnect import DBConnect , UniqueImageClause , image_key_columns <EOL> from platemappanel import * <EOL> import imagetools <EOL> from properties import Properties <EOL> import numpy as np <EOL> import os <EOL> import re <EOL> import wx <EOL> from PlotPanelTS import * <EOL> p = Properties . getInstance ( ) <EOL> db = DBConnect . getInstance ( ) <EOL> ID_IMPORT = <NUM_LIT> <EOL> ID_ADDPOINTS = <NUM_LIT> <EOL> ID_TABLE_SELECT = <NUM_LIT> <EOL> ID_REMOVEPOINTS = <NUM_LIT> <EOL> class DataSourcePanel ( wx . Panel ) : <EOL> def __init__ ( self , parent , figurepanel , ** kwargs ) : <EOL> wx . Panel . __init__ ( self , parent , ** kwargs ) <EOL> self . figurepanel = figurepanel <EOL> self . sizer = wx . BoxSizer ( wx . VERTICAL ) <EOL> testpanel = wx . Panel ( self , style = wx . BORDER ) <EOL> sizer2 = wx . BoxSizer ( wx . VERTICAL ) <EOL> import_button = wx . Button ( testpanel , ID_IMPORT , "<STR_LIT>" ) <EOL> self . importpathtext = wx . StaticText ( testpanel , - <NUM_LIT:1> , "<STR_LIT>" ) <EOL> self . tabledropdown = wx . Choice ( testpanel , ID_TABLE_SELECT ) <EOL> wx . EVT_BUTTON ( import_button , ID_IMPORT , self . loadproperties ) <EOL> wx . EVT_CHOICE ( self . tabledropdown , ID_TABLE_SELECT , self . selecttable ) <EOL> sizer2 . Add ( import_button ) <EOL> sizer2 . Add ( self . importpathtext ) <EOL> sizer2 . Add ( self . tabledropdown ) <EOL> testpanel . SetSizer ( sizer2 ) <EOL> testpanel2 = wx . Panel ( self , style = wx . BORDER ) <EOL> sizer3 = wx . BoxSizer ( wx . VERTICAL ) <EOL> self . field1dropdown = wx . Choice ( testpanel2 ) <EOL> self . field2dropdown = wx . Choice ( testpanel2 ) <EOL> self . addtochartbutton = wx . Button ( testpanel2 , ID_ADDPOINTS , "<STR_LIT>" ) <EOL> sizer3 . Add ( wx . StaticText ( testpanel2 , - <NUM_LIT:1> , "<STR_LIT>" ) ) <EOL> sizer3 . Add ( wx . StaticText ( testpanel2 , - <NUM_LIT:1> , "<STR_LIT>" ) ) <EOL> sizer3 . Add ( self . field1dropdown ) <EOL> sizer3 . Add ( wx . StaticText ( testpanel2 , - <NUM_LIT:1> , "<STR_LIT>" ) ) <EOL> sizer3 . Add ( self . field2dropdown ) <EOL> sizer3 . Add ( self . addtochartbutton ) <EOL> wx . EVT_BUTTON ( self . addtochartbutton , ID_ADDPOINTS , self . addtochart ) <EOL> testpanel2 . SetSizer ( sizer3 ) <EOL> testpanel3 = wx . Panel ( self , style = wx . BORDER ) <EOL> self . plotfieldslistbox = wx . ListBox ( testpanel3 ) <EOL> self . removechartbutton = wx . Button ( testpanel3 , ID_REMOVEPOINTS , "<STR_LIT>" ) <EOL> sizer4 = wx . BoxSizer ( wx . VERTICAL ) <EOL> sizer4 . Add ( self . plotfieldslistbox ) <EOL> sizer4 . Add ( self . removechartbutton ) <EOL> wx . EVT_BUTTON ( self . removechartbutton , ID_REMOVEPOINTS , self . removefromchart ) <EOL> testpanel3 . SetSizer ( sizer4 ) <EOL> self . sizer . Add ( testpanel , <NUM_LIT:1> , wx . EXPAND ) <EOL> self . sizer . Add ( testpanel2 , <NUM_LIT:1> , wx . EXPAND ) <EOL> self . sizer . Add ( testpanel3 , <NUM_LIT:1> , wx . EXPAND ) <EOL> self . SetSizer ( self . sizer ) <EOL> self . SetAutoLayout ( <NUM_LIT:1> ) <EOL> self . sizer . Fit ( self ) <EOL> self . Show ( <NUM_LIT:1> ) <EOL> def loadproperties ( self , event ) : <EOL> dlg = wx . FileDialog ( None , "<STR_LIT>" , style = wx . OPEN ) <EOL> if dlg . ShowModal ( ) == wx . ID_OK : <EOL> filename = dlg . GetPath ( ) <EOL> os . chdir ( os . path . split ( filename ) [ <NUM_LIT:0> ] ) <EOL> p . LoadFile ( filename ) <EOL> self . importpathtext . SetLabel ( filename ) <EOL> table_list = db . GetTableNames ( ) <EOL> self . tabledropdown . Clear ( ) <EOL> self . tabledropdown . AppendItems ( table_list ) <EOL> else : <EOL> print '<STR_LIT>' <EOL> def selecttable ( self , event ) : <EOL> tablename = event . GetString ( ) <EOL> fieldnames = db . GetColumnNames ( tablename ) <EOL> self . field1dropdown . Clear ( ) <EOL> self . field1dropdown . AppendItems ( fieldnames ) <EOL> self . field1dropdown . SetSelection ( <NUM_LIT:0> ) <EOL> self . field2dropdown . Clear ( ) <EOL> self . field2dropdown . AppendItems ( fieldnames ) <EOL> self . field2dropdown . SetSelection ( <NUM_LIT:0> ) <EOL> def addtochart ( self , event ) : <EOL> addition = self . field1dropdown . GetStringSelection ( ) + '<STR_LIT>' + self . field2dropdown . GetStringSelection ( ) <EOL> pointstuple = ( self . tabledropdown . GetStringSelection ( ) , <EOL> self . field1dropdown . GetStringSelection ( ) , <EOL> self . field2dropdown . GetStringSelection ( ) ) <EOL> self . plotfieldslistbox . Append ( addition , clientData = pointstuple ) <EOL> points = self . loadpoints ( pointstuple [ <NUM_LIT:0> ] , pointstuple [ <NUM_LIT:1> ] , pointstuple [ <NUM_LIT:2> ] ) <EOL> self . plotpoints ( points ) <EOL> def removefromchart ( self , event ) : <EOL> selected = self . plotfieldslistbox . GetSelection ( ) <EOL> self . plotfieldslistbox . Delete ( selected ) <EOL> def loadpoints ( self , tablename , xpoints , ypoints ) : <EOL> points = db . execute ( '<STR_LIT>' % ( xpoints , ypoints , tablename ) ) <EOL> return [ points ] <EOL> def plotpoints ( self , points ) : <EOL> self . figurepanel . setpointslists ( points ) <EOL> self . figurepanel . draw ( ) <EOL> self . figurepanel . Refresh ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> theta = np . arange ( <NUM_LIT:0> , <NUM_LIT> * <NUM_LIT:2> * np . pi , <NUM_LIT> ) <EOL> rad0 = ( <NUM_LIT> * theta / ( <NUM_LIT:2> * np . pi ) + <NUM_LIT:1> ) <EOL> r0 = rad0 * ( <NUM_LIT:8> + np . sin ( theta * <NUM_LIT:7> + rad0 / <NUM_LIT> ) ) <EOL> x0 = r0 * np . cos ( theta ) <EOL> y0 = r0 * np . sin ( theta ) <EOL> rad1 = ( <NUM_LIT> * theta / ( <NUM_LIT:2> * np . pi ) + <NUM_LIT:1> ) <EOL> r1 = rad1 * ( <NUM_LIT:6> + np . sin ( theta * <NUM_LIT:7> + rad1 / <NUM_LIT> ) ) <EOL> x1 = r1 * np . cos ( theta ) <EOL> y1 = r1 * np . sin ( theta ) <EOL> points = [ [ ( <NUM_LIT:1> , <NUM_LIT:1> ) ] , <EOL> [ ( <NUM_LIT:2> , <NUM_LIT:2> ) ] , <EOL> [ ( <NUM_LIT:3> , <NUM_LIT:3> ) ] , <EOL> [ ( <NUM_LIT:4> , <NUM_LIT:4> ) ] , <EOL> [ ( <NUM_LIT:5> , <NUM_LIT:5> ) ] <EOL> ] <EOL> clrs = [ [ <NUM_LIT> , <NUM_LIT:200> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] , [ <NUM_LIT> , <NUM_LIT> , <NUM_LIT> ] ] <EOL> app = wx . PySimpleApp ( ) <EOL> frame = wx . Frame ( None , - <NUM_LIT:1> , "<STR_LIT>" ) <EOL> nb = wx . Notebook ( frame , - <NUM_LIT:1> ) <EOL> simplepanel = wx . Panel ( nb , style = wx . BORDER ) <EOL> figpanel = FigurePanel ( simplepanel , points , clrs ) <EOL> sizer = wx . BoxSizer ( ) <EOL> sizer . Add ( figpanel , <NUM_LIT:1> , wx . EXPAND ) <EOL> simplepanel . SetSizer ( sizer ) <EOL> nb . AddPage ( simplepanel , "<STR_LIT>" ) <EOL> nb . AddPage ( DataSourcePanel ( nb , figpanel ) , "<STR_LIT>" ) <EOL> frame . Show ( <NUM_LIT:1> ) <EOL> app . MainLoop ( ) </s>
<s> from __future__ import with_statement <EOL> import matplotlib <EOL> matplotlib . use ( '<STR_LIT>' ) <EOL> import matplotlib . pyplot as plt <EOL> import seaborn as sns <EOL> import pandas as pd <EOL> import tableviewer <EOL> from datamodel import DataModel <EOL> from imagecontrolpanel import ImageControlPanel <EOL> from properties import Properties <EOL> from scoredialog import ScoreDialog <EOL> import tilecollection <EOL> from trainingset import TrainingSet <EOL> from cStringIO import StringIO <EOL> from time import time <EOL> import icons <EOL> import dbconnect <EOL> import dirichletintegrate <EOL> import imagetools <EOL> import polyafit <EOL> import sortbin <EOL> import logging <EOL> import numpy as np <EOL> import os <EOL> import wx <EOL> import re <EOL> import cpa . helpmenu <EOL> from imageviewer import ImageViewer <EOL> import fastgentleboostingmulticlass <EOL> from fastgentleboosting import FastGentleBoosting <EOL> from generalclassifier import GeneralClassifier <EOL> MAX_ATTEMPTS = <NUM_LIT> <EOL> ID_IMAGE_GALLERY = wx . NewId ( ) <EOL> CREATE_NEW_FILTER = '<STR_LIT>' <EOL> class ImageGallery ( wx . Frame ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , properties = None , parent = None , id = ID_IMAGE_GALLERY , ** kwargs ) : <EOL> if properties is not None : <EOL> global p <EOL> p = properties <EOL> global db <EOL> db = dbconnect . DBConnect . getInstance ( ) <EOL> wx . Frame . __init__ ( self , parent , id = id , title = '<STR_LIT>' % ( os . path . basename ( p . _filename ) ) , size = ( <NUM_LIT> , <NUM_LIT> ) , ** kwargs ) <EOL> if parent is None and not sys . platform . startswith ( '<STR_LIT>' ) : <EOL> self . tbicon = wx . TaskBarIcon ( ) <EOL> self . tbicon . SetIcon ( icons . get_cpa_icon ( ) , '<STR_LIT>' ) <EOL> else : <EOL> self . SetIcon ( icons . get_cpa_icon ( ) ) <EOL> self . SetName ( '<STR_LIT>' ) <EOL> db . register_gui_parent ( self ) <EOL> global dm <EOL> dm = DataModel . getInstance ( ) <EOL> if not p . is_initialized ( ) : <EOL> logging . critical ( '<STR_LIT>' ) <EOL> raise Exception ( '<STR_LIT>' ) <EOL> self . pmb = None <EOL> self . worker = None <EOL> self . trainingSet = None <EOL> self . classBins = [ ] <EOL> self . binsCreated = <NUM_LIT:0> <EOL> self . chMap = p . image_channel_colors [ : ] <EOL> self . toggleChMap = p . image_channel_colors [ <EOL> : ] <EOL> self . brightness = <NUM_LIT:1.0> <EOL> self . scale = <NUM_LIT:1.0> <EOL> self . contrast = '<STR_LIT>' <EOL> self . defaultTSFileName = None <EOL> self . defaultModelFileName = None <EOL> self . lastScoringFilter = None <EOL> self . menuBar = wx . MenuBar ( ) <EOL> self . SetMenuBar ( self . menuBar ) <EOL> self . CreateMenus ( ) <EOL> self . CreateStatusBar ( ) <EOL> self . splitter = wx . SplitterWindow ( self , style = wx . NO_BORDER | wx . SP_3DSASH ) <EOL> self . fetch_and_rules_panel = wx . Panel ( self . splitter ) <EOL> self . bins_splitter = wx . SplitterWindow ( self . splitter , style = wx . NO_BORDER | wx . SP_3DSASH ) <EOL> self . fetch_panel = wx . Panel ( self . fetch_and_rules_panel ) <EOL> self . find_rules_panel = wx . Panel ( self . fetch_and_rules_panel ) <EOL> self . gallery_panel = wx . Panel ( self . bins_splitter ) <EOL> o_label = p . object_name [ <NUM_LIT:0> ] if p . classification_type == '<STR_LIT:image>' else '<STR_LIT>' + '<STR_LIT>' <EOL> self . gallery_box = wx . StaticBox ( self . gallery_panel , label = o_label ) <EOL> self . gallery_sizer = wx . StaticBoxSizer ( self . gallery_box , wx . VERTICAL ) <EOL> self . galleryBin = sortbin . SortBin ( parent = self . gallery_panel , <EOL> classifier = self , <EOL> label = '<STR_LIT>' , <EOL> parentSizer = self . gallery_sizer ) <EOL> self . gallery_sizer . Add ( self . galleryBin , proportion = <NUM_LIT:1> , flag = wx . EXPAND ) <EOL> self . gallery_panel . SetSizer ( self . gallery_sizer ) <EOL> self . objects_bin_panel = wx . Panel ( self . bins_splitter ) <EOL> self . startId = wx . TextCtrl ( self . fetch_panel , id = - <NUM_LIT:1> , value = '<STR_LIT:1>' , size = ( <NUM_LIT> , - <NUM_LIT:1> ) , style = wx . TE_PROCESS_ENTER ) <EOL> self . endId = wx . TextCtrl ( self . fetch_panel , id = - <NUM_LIT:1> , value = '<STR_LIT:100>' , size = ( <NUM_LIT> , - <NUM_LIT:1> ) , style = wx . TE_PROCESS_ENTER ) <EOL> self . fetchChoice = wx . Choice ( self . fetch_panel , id = - <NUM_LIT:1> , choices = [ '<STR_LIT>' , '<STR_LIT:all>' , '<STR_LIT>' ] ) <EOL> self . fetchChoice . SetSelection ( <NUM_LIT:0> ) <EOL> self . filterChoice = wx . Choice ( self . fetch_panel , id = - <NUM_LIT:1> , <EOL> choices = [ '<STR_LIT>' ] + p . _filters_ordered + p . _groups_ordered + [ <EOL> CREATE_NEW_FILTER ] ) <EOL> self . fetchFromGroupSizer = wx . BoxSizer ( wx . HORIZONTAL ) <EOL> self . fetchBtn = wx . Button ( self . fetch_panel , - <NUM_LIT:1> , '<STR_LIT>' ) <EOL> self . fetchSizer = wx . BoxSizer ( wx . HORIZONTAL ) <EOL> self . find_rules_sizer = wx . BoxSizer ( wx . HORIZONTAL ) <EOL> self . fetch_and_rules_sizer = wx . BoxSizer ( wx . VERTICAL ) <EOL> self . classified_bins_sizer = wx . BoxSizer ( wx . HORIZONTAL ) <EOL> self . fetchSizer . AddStretchSpacer ( ) <EOL> self . fetchSizer . Add ( wx . StaticText ( self . fetch_panel , - <NUM_LIT:1> , '<STR_LIT>' ) , flag = wx . ALIGN_CENTER_VERTICAL ) <EOL> self . fetchSizer . AddSpacer ( ( <NUM_LIT:5> , <NUM_LIT:20> ) ) <EOL> self . fetchSizer . Add ( self . fetchChoice , flag = wx . ALIGN_CENTER_VERTICAL ) <EOL> self . fetchSizer . AddSpacer ( ( <NUM_LIT:5> , <NUM_LIT:20> ) ) <EOL> self . fetchTxt = wx . StaticText ( self . fetch_panel , - <NUM_LIT:1> , label = '<STR_LIT>' ) <EOL> self . fetchSizer . Add ( self . fetchTxt , flag = wx . ALIGN_CENTER_VERTICAL ) <EOL> self . fetchSizer . AddSpacer ( ( <NUM_LIT:5> , <NUM_LIT:20> ) ) <EOL> self . fetchSizer . Add ( self . startId , flag = wx . ALIGN_CENTER_VERTICAL ) <EOL> self . fetchSizer . AddSpacer ( ( <NUM_LIT:5> , <NUM_LIT:20> ) ) <EOL> self . fetchTxt2 = wx . StaticText ( self . fetch_panel , - <NUM_LIT:1> , label = '<STR_LIT:to>' ) <EOL> self . fetchSizer . Add ( self . fetchTxt2 , flag = wx . ALIGN_CENTER_VERTICAL ) <EOL> self . fetchSizer . AddSpacer ( ( <NUM_LIT:5> , <NUM_LIT:20> ) ) <EOL> self . fetchSizer . Add ( self . endId , flag = wx . ALIGN_CENTER_VERTICAL ) <EOL> self . fetchSizer . AddSpacer ( ( <NUM_LIT:5> , <NUM_LIT:20> ) ) <EOL> self . fetchTxt3 = wx . StaticText ( self . fetch_panel , - <NUM_LIT:1> , label = '<STR_LIT>' ) <EOL> self . fetchSizer . Add ( self . fetchTxt3 , flag = wx . ALIGN_CENTER_VERTICAL ) <EOL> self . fetchSizer . AddSpacer ( ( <NUM_LIT:5> , <NUM_LIT:20> ) ) <EOL> self . fetchSizer . Add ( wx . StaticText ( self . fetch_panel , - <NUM_LIT:1> , '<STR_LIT>' ) , flag = wx . ALIGN_CENTER_VERTICAL ) <EOL> self . fetchSizer . AddSpacer ( ( <NUM_LIT:5> , <NUM_LIT:20> ) ) <EOL> self . fetchSizer . Add ( self . filterChoice , flag = wx . ALIGN_CENTER_VERTICAL ) <EOL> self . fetchSizer . AddSpacer ( ( <NUM_LIT:10> , <NUM_LIT:20> ) ) <EOL> self . fetchSizer . Add ( self . fetchFromGroupSizer , flag = wx . ALIGN_CENTER_VERTICAL ) <EOL> self . fetchSizer . AddSpacer ( ( <NUM_LIT:5> , <NUM_LIT:20> ) ) <EOL> self . fetchSizer . Add ( self . fetchBtn , flag = wx . ALIGN_CENTER_VERTICAL ) <EOL> self . fetchSizer . AddStretchSpacer ( ) <EOL> self . fetch_panel . SetSizerAndFit ( self . fetchSizer ) <EOL> self . fetch_and_rules_sizer . Add ( ( <NUM_LIT:5> , <NUM_LIT:5> ) ) <EOL> self . fetch_and_rules_sizer . Add ( self . fetch_panel , flag = wx . EXPAND ) <EOL> self . fetch_and_rules_sizer . Add ( ( <NUM_LIT:5> , <NUM_LIT:5> ) ) <EOL> self . fetch_and_rules_panel . SetSizerAndFit ( self . fetch_and_rules_sizer ) <EOL> self . objects_bin_panel . SetSizer ( self . classified_bins_sizer ) <EOL> self . splitter . SplitHorizontally ( self . fetch_and_rules_panel , self . bins_splitter , <EOL> self . fetch_and_rules_panel . GetMinSize ( ) [ <NUM_LIT:1> ] ) <EOL> self . bins_splitter . SplitHorizontally ( self . gallery_panel , self . objects_bin_panel ) <EOL> self . splitter . SetSashGravity ( <NUM_LIT:0.0> ) <EOL> self . bins_splitter . SetSashGravity ( <NUM_LIT:0.5> ) <EOL> self . splitter . SetMinimumPaneSize ( max ( <NUM_LIT:50> , self . fetch_and_rules_panel . GetMinHeight ( ) ) ) <EOL> self . bins_splitter . SetMinimumPaneSize ( <NUM_LIT:50> ) <EOL> self . SetMinSize ( ( self . fetch_and_rules_panel . GetMinWidth ( ) , <NUM_LIT:4> * <NUM_LIT:50> + self . fetch_and_rules_panel . GetMinHeight ( ) ) ) <EOL> self . filterChoice . SetSelection ( <NUM_LIT:0> ) <EOL> self . fetchSizer . Hide ( self . fetchFromGroupSizer ) <EOL> self . AddSortClass ( '<STR_LIT>' ) <EOL> self . Layout ( ) <EOL> self . Center ( ) <EOL> self . MapChannels ( p . image_channel_colors [ : ] ) <EOL> self . BindMouseOverHelpText ( ) <EOL> self . Bind ( wx . EVT_BUTTON , self . OnFetch , self . fetchBtn ) <EOL> self . startId . Bind ( wx . EVT_TEXT , self . ValidateIntegerField ) <EOL> self . startId . Bind ( wx . EVT_TEXT_ENTER , self . OnFetch ) <EOL> self . Bind ( wx . EVT_CLOSE , self . OnClose ) <EOL> self . Bind ( wx . EVT_CHAR , self . OnKey ) <EOL> tilecollection . EVT_TILE_UPDATED ( self , self . OnTileUpdated ) <EOL> self . Bind ( sortbin . EVT_QUANTITY_CHANGED , self . QuantityChanged ) <EOL> self . Bind ( wx . EVT_CHOICE , self . OnSelectFetchChoice , self . fetchChoice ) <EOL> self . Bind ( wx . EVT_CHOICE , self . OnSelectFilter , self . filterChoice ) <EOL> def BindMouseOverHelpText ( self ) : <EOL> self . startId . SetToolTip ( wx . ToolTip ( '<STR_LIT>' % ( p . object_name [ <NUM_LIT:1> ] ) ) ) <EOL> self . filterChoice . SetToolTip ( wx . ToolTip ( <EOL> '<STR_LIT>' % ( <EOL> p . object_name [ <NUM_LIT:1> ] ) ) ) <EOL> self . filterChoice . GetToolTip ( ) . SetDelay ( <NUM_LIT> ) <EOL> self . fetchBtn . SetToolTip ( wx . ToolTip ( '<STR_LIT>' % ( p . object_name [ <NUM_LIT:1> ] ) ) ) <EOL> self . galleryBin . SetToolTip ( <EOL> wx . ToolTip ( '<STR_LIT>' % ( p . object_name [ <NUM_LIT:1> ] . capitalize ( ) ) ) ) <EOL> def OnKey ( self , evt ) : <EOL> '''<STR_LIT>''' <EOL> keycode = evt . GetKeyCode ( ) <EOL> chIdx = keycode - <NUM_LIT> <EOL> if evt . ControlDown ( ) or evt . CmdDown ( ) : <EOL> if len ( self . chMap ) > chIdx >= <NUM_LIT:0> : <EOL> self . ToggleChannel ( chIdx ) <EOL> else : <EOL> evt . Skip ( ) <EOL> else : <EOL> evt . Skip ( ) <EOL> def ToggleChannel ( self , chIdx ) : <EOL> if self . chMap [ chIdx ] == '<STR_LIT:None>' : <EOL> for ( idx , color , item , menu ) in self . chMapById . values ( ) : <EOL> if idx == chIdx and color . lower ( ) == self . toggleChMap [ chIdx ] . lower ( ) : <EOL> item . Check ( ) <EOL> self . chMap [ chIdx ] = self . toggleChMap [ chIdx ] <EOL> self . MapChannels ( self . chMap ) <EOL> else : <EOL> for ( idx , color , item , menu ) in self . chMapById . values ( ) : <EOL> if idx == chIdx and color . lower ( ) == '<STR_LIT:none>' : <EOL> item . Check ( ) <EOL> self . chMap [ chIdx ] = '<STR_LIT:None>' <EOL> self . MapChannels ( self . chMap ) <EOL> def CreateMenus ( self ) : <EOL> '''<STR_LIT>''' <EOL> viewMenu = wx . Menu ( ) <EOL> self . fileMenu = wx . Menu ( ) <EOL> loadMenuItem = self . fileMenu . Append ( - <NUM_LIT:1> , text = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> exitMenuItem = self . fileMenu . Append ( id = wx . ID_EXIT , text = '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> self . GetMenuBar ( ) . Append ( self . fileMenu , '<STR_LIT>' ) <EOL> imageControlsMenuItem = viewMenu . Append ( - <NUM_LIT:1> , text = '<STR_LIT>' , <EOL> help = '<STR_LIT>' ) <EOL> self . GetMenuBar ( ) . Append ( viewMenu , '<STR_LIT>' ) <EOL> self . CreateChannelMenus ( ) <EOL> advancedMenu = wx . Menu ( ) <EOL> fetchObjMenuItem = advancedMenu . Append ( - <NUM_LIT:1> , text = u'<STR_LIT>' , help = '<STR_LIT>' ) <EOL> fetchAllObjMenuItem = advancedMenu . Append ( - <NUM_LIT:1> , text = u'<STR_LIT>' , help = '<STR_LIT>' ) <EOL> saveImgMenuItem = advancedMenu . Append ( - <NUM_LIT:1> , text = u'<STR_LIT>' , help = '<STR_LIT>' ) <EOL> saveObjMenuItem = advancedMenu . Append ( - <NUM_LIT:1> , text = u'<STR_LIT>' , help = '<STR_LIT>' ) <EOL> self . GetMenuBar ( ) . Append ( advancedMenu , '<STR_LIT>' ) <EOL> self . GetMenuBar ( ) . Append ( cpa . helpmenu . make_help_menu ( self ) , '<STR_LIT>' ) <EOL> self . Bind ( wx . EVT_MENU , self . OnShowImageControls , imageControlsMenuItem ) <EOL> self . Bind ( wx . EVT_MENU , self . OnFetchObjThumbnails , fetchObjMenuItem ) <EOL> self . Bind ( wx . EVT_MENU , self . OnFetchAllObjThumbnails , fetchAllObjMenuItem ) <EOL> self . Bind ( wx . EVT_MENU , self . OnSaveImgThumbnails , saveImgMenuItem ) <EOL> self . Bind ( wx . EVT_MENU , self . OnSaveObjThumbnails , saveObjMenuItem ) <EOL> self . Bind ( wx . EVT_MENU , self . OnClose , exitMenuItem ) <EOL> self . Bind ( wx . EVT_CLOSE , self . OnClose ) <EOL> self . Bind ( wx . EVT_MENU , self . OnLoadImageSet , loadMenuItem ) <EOL> def CreateChannelMenus ( self ) : <EOL> '''<STR_LIT>''' <EOL> try : <EOL> menus = set ( [ items [ <NUM_LIT:2> ] . Menu for items in self . chMapById . values ( ) ] ) <EOL> for menu in menus : <EOL> for i , mbmenu in enumerate ( self . MenuBar . Menus ) : <EOL> if mbmenu [ <NUM_LIT:0> ] == menu : <EOL> self . MenuBar . Remove ( i ) <EOL> for menu in menus : <EOL> menu . Destroy ( ) <EOL> if '<STR_LIT>' in self . __dict__ : <EOL> self . MenuBar . Remove ( self . MenuBar . FindMenu ( '<STR_LIT>' ) ) <EOL> self . imagesMenu . Destroy ( ) <EOL> except : <EOL> pass <EOL> self . imagesMenu = wx . Menu ( ) <EOL> chIndex = <NUM_LIT:0> <EOL> self . chMapById = { } <EOL> self . imMapById = { } <EOL> channel_names = [ ] <EOL> startIndex = <NUM_LIT:0> <EOL> channelIds = [ ] <EOL> for i , chans in enumerate ( p . channels_per_image ) : <EOL> chans = int ( chans ) <EOL> name = p . image_names [ i ] <EOL> if chans == <NUM_LIT:1> : <EOL> channel_names += [ name ] <EOL> elif chans == <NUM_LIT:3> : <EOL> channel_names += [ '<STR_LIT>' % ( name , x ) for x in '<STR_LIT>' ] <EOL> elif chans == <NUM_LIT:4> : <EOL> channel_names += [ '<STR_LIT>' % ( name , x ) for x in '<STR_LIT>' ] <EOL> else : <EOL> channel_names += [ '<STR_LIT>' % ( name , x + <NUM_LIT:1> ) for x in range ( chans ) ] <EOL> zippedChNamesChMap = zip ( channel_names , self . chMap ) <EOL> for i , chans in enumerate ( p . image_names ) : <EOL> channelIds = [ ] <EOL> for j in range ( <NUM_LIT:0> , int ( p . channels_per_image [ i ] ) ) : <EOL> ( channel , setColor ) = zippedChNamesChMap [ chIndex ] <EOL> channel_menu = wx . Menu ( ) <EOL> for color in [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT:None>' ] : <EOL> id = wx . NewId ( ) <EOL> item = channel_menu . AppendRadioItem ( id , color ) <EOL> self . chMapById [ id ] = ( chIndex , color , item , channel_menu ) <EOL> if color . lower ( ) == setColor . lower ( ) : <EOL> item . Check ( ) <EOL> self . Bind ( wx . EVT_MENU , self . OnMapChannels , item ) <EOL> if ( ( int ( p . channels_per_image [ i ] ) == <NUM_LIT:1> and color == '<STR_LIT>' ) or <EOL> ( int ( p . channels_per_image [ i ] ) > <NUM_LIT:1> and j == <NUM_LIT:0> and color == '<STR_LIT>' ) or <EOL> ( int ( p . channels_per_image [ i ] ) > <NUM_LIT:1> and j == <NUM_LIT:2> and color == '<STR_LIT>' ) or <EOL> ( int ( p . channels_per_image [ i ] ) > <NUM_LIT:1> and j == <NUM_LIT:1> and color == '<STR_LIT>' ) ) : <EOL> channelIds = channelIds + [ id ] <EOL> self . GetMenuBar ( ) . Append ( channel_menu , channel ) <EOL> chIndex += <NUM_LIT:1> <EOL> id = wx . NewId ( ) <EOL> item = self . imagesMenu . AppendRadioItem ( id , p . image_names [ i ] ) <EOL> self . imMapById [ id ] = ( int ( p . channels_per_image [ i ] ) , item , startIndex , channelIds ) <EOL> self . Bind ( wx . EVT_MENU , self . OnFetchImage , item ) <EOL> startIndex += int ( p . channels_per_image [ i ] ) <EOL> id = wx . NewId ( ) <EOL> item = self . imagesMenu . AppendRadioItem ( id , '<STR_LIT:None>' ) <EOL> self . Bind ( wx . EVT_MENU , self . OnFetchImage , item ) <EOL> item . Check ( ) <EOL> self . GetMenuBar ( ) . Append ( self . imagesMenu , '<STR_LIT>' ) <EOL> def OnFetchImage ( self , evt = None ) : <EOL> for ids in self . chMapById . keys ( ) : <EOL> ( chIndex , color , item , channel_menu ) = self . chMapById [ ids ] <EOL> if ( color . lower ( ) == '<STR_LIT:none>' ) : <EOL> item . Check ( ) <EOL> for ids in self . imMapById . keys ( ) : <EOL> ( cpi , itm , si , channelIds ) = self . imMapById [ ids ] <EOL> if cpi == <NUM_LIT:3> : <EOL> self . chMap [ si ] = '<STR_LIT:none>' <EOL> self . chMap [ si + <NUM_LIT:1> ] = '<STR_LIT:none>' <EOL> self . chMap [ si + <NUM_LIT:2> ] = '<STR_LIT:none>' <EOL> self . toggleChMap [ si ] = '<STR_LIT:none>' <EOL> self . toggleChMap [ si + <NUM_LIT:1> ] = '<STR_LIT:none>' <EOL> self . toggleChMap [ si + <NUM_LIT:2> ] = '<STR_LIT:none>' <EOL> else : <EOL> self . chMap [ si ] = '<STR_LIT:none>' <EOL> self . toggleChMap [ si ] = '<STR_LIT:none>' <EOL> if evt . GetId ( ) in self . imMapById . keys ( ) : <EOL> ( chanPerIm , item , startIndex , channelIds ) = self . imMapById [ evt . GetId ( ) ] <EOL> if chanPerIm == <NUM_LIT:1> : <EOL> self . chMap [ startIndex ] = '<STR_LIT>' <EOL> self . toggleChMap [ startIndex ] = '<STR_LIT>' <EOL> ( chIndex , color , item , channel_menu ) = self . chMapById [ channelIds [ <NUM_LIT:0> ] ] <EOL> item . Check ( ) <EOL> else : <EOL> RGB = [ '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ] + [ '<STR_LIT:none>' ] * chanPerIm <EOL> for i in range ( chanPerIm ) : <EOL> self . chMap [ startIndex + i ] = RGB [ i ] <EOL> self . toggleChMap [ startIndex + i ] = RGB [ i ] <EOL> ( chIndex , color , item , channel_menu ) = self . chMapById [ channelIds [ i ] ] <EOL> item . Check ( ) <EOL> self . MapChannels ( self . chMap ) <EOL> def OnLoadImageSet ( self , evt ) : <EOL> '''<STR_LIT>''' <EOL> dlg = wx . FileDialog ( self , "<STR_LIT>" , <EOL> defaultDir = os . getcwd ( ) , <EOL> wildcard = '<STR_LIT>' , <EOL> style = wx . OPEN | wx . FD_CHANGE_DIR ) <EOL> if dlg . ShowModal ( ) == wx . ID_OK : <EOL> filename = dlg . GetPath ( ) <EOL> name , file_extension = os . path . splitext ( filename ) <EOL> if '<STR_LIT>' == file_extension : <EOL> self . LoadImageSet ( filename ) <EOL> else : <EOL> logging . error ( "<STR_LIT>" ) <EOL> def LoadImageSet ( self , filename ) : <EOL> '''<STR_LIT>''' <EOL> with tilecollection . load_lock ( ) : <EOL> self . PostMessage ( '<STR_LIT>' % filename ) <EOL> import pandas as pd <EOL> df = pd . read_csv ( filename ) <EOL> def cb ( ) : <EOL> keys = [ tuple ( [ k , - <NUM_LIT:1> ] ) for k in df [ '<STR_LIT>' ] ] <EOL> self . galleryBin . AddObjects ( keys , self . chMap , pos = '<STR_LIT>' , display_whole_image = True ) <EOL> self . PostMessage ( '<STR_LIT>' ) <EOL> if df . shape [ <NUM_LIT:0> ] > <NUM_LIT:100> : <EOL> dlg = wx . MessageDialog ( self , <EOL> '<STR_LIT>' % ( <EOL> df . shape [ <NUM_LIT:0> ] ) , <EOL> '<STR_LIT>' , wx . YES_NO | wx . ICON_QUESTION ) <EOL> response = dlg . ShowModal ( ) <EOL> if response == wx . ID_YES : <EOL> self . galleryBin . SelectAll ( ) <EOL> self . galleryBin . RemoveSelectedTiles ( ) <EOL> wx . CallAfter ( cb ) <EOL> else : <EOL> self . galleryBin . SelectAll ( ) <EOL> self . galleryBin . RemoveSelectedTiles ( ) <EOL> wx . CallAfter ( cb ) <EOL> def OnFetch ( self , evt ) : <EOL> start = int ( self . startId . Value ) <EOL> end = int ( self . endId . Value ) <EOL> fltr_sel = self . filterChoice . GetStringSelection ( ) <EOL> fetch_sel = self . fetchChoice . GetStringSelection ( ) <EOL> statusMsg = '<STR_LIT>' % ( start , end ) <EOL> def flatten ( * args ) : <EOL> for x in args : <EOL> if hasattr ( x , '<STR_LIT>' ) : <EOL> for y in flatten ( * x ) : <EOL> yield y <EOL> else : <EOL> yield x <EOL> if fetch_sel == '<STR_LIT:all>' : <EOL> if fltr_sel == '<STR_LIT>' : <EOL> self . FetchAll ( ) <EOL> return <EOL> elif fltr_sel in p . _filters_ordered : <EOL> imKeys = db . GetFilteredImages ( fltr_sel ) <EOL> if imKeys == [ ] : <EOL> self . PostMessage ( '<STR_LIT>' % ( fltr_sel ) ) <EOL> return <EOL> if len ( imKeys ) > <NUM_LIT:100> : <EOL> dlg = wx . MessageDialog ( self , <EOL> '<STR_LIT>' % ( <EOL> len ( imKeys ) ) , <EOL> '<STR_LIT>' , wx . YES_NO | wx . ICON_QUESTION ) <EOL> response = dlg . ShowModal ( ) <EOL> if response == wx . ID_YES : <EOL> self . galleryBin . SelectAll ( ) <EOL> self . galleryBin . RemoveSelectedTiles ( ) <EOL> def cb ( ) : <EOL> filteredImKeys = db . GetFilteredImages ( fltr_sel ) <EOL> imKeys = map ( lambda x : tuple ( list ( flatten ( x , - <NUM_LIT:1> ) ) ) , filteredImKeys ) <EOL> self . galleryBin . AddObjects ( imKeys , self . chMap , pos = '<STR_LIT>' , display_whole_image = True ) <EOL> wx . CallAfter ( cb ) <EOL> statusMsg += '<STR_LIT>' % ( fltr_sel ) <EOL> else : <EOL> self . galleryBin . SelectAll ( ) <EOL> self . galleryBin . RemoveSelectedTiles ( ) <EOL> def cb ( ) : <EOL> filteredImKeys = db . GetFilteredImages ( fltr_sel ) <EOL> imKeys = map ( lambda x : tuple ( list ( flatten ( x , - <NUM_LIT:1> ) ) ) , filteredImKeys ) <EOL> self . galleryBin . AddObjects ( imKeys , self . chMap , pos = '<STR_LIT>' , display_whole_image = True ) <EOL> wx . CallAfter ( cb ) <EOL> statusMsg += '<STR_LIT>' % ( fltr_sel ) <EOL> elif fltr_sel in p . _groups_ordered : <EOL> groupName = fltr_sel <EOL> groupKey = self . GetGroupKeyFromGroupSizer ( groupName ) <EOL> imKeys = dm . GetImagesInGroupWithWildcards ( groupName , groupKey ) <EOL> colNames = dm . GetGroupColumnNames ( groupName ) <EOL> if imKeys == [ ] : <EOL> self . PostMessage ( '<STR_LIT>' % ( groupName , <EOL> '<STR_LIT:U+002CU+0020>' . join ( [ '<STR_LIT>' % ( n , v ) for n , v in <EOL> zip ( colNames , groupKey ) ] ) ) ) <EOL> return <EOL> if len ( imKeys ) > <NUM_LIT:100> : <EOL> dlg = wx . MessageDialog ( self , <EOL> '<STR_LIT>' % ( <EOL> len ( imKeys ) ) , <EOL> '<STR_LIT>' , wx . YES_NO | wx . ICON_QUESTION ) <EOL> response = dlg . ShowModal ( ) <EOL> if response == wx . ID_YES : <EOL> self . galleryBin . SelectAll ( ) <EOL> self . galleryBin . RemoveSelectedTiles ( ) <EOL> groupName = fltr_sel <EOL> groupKey = self . GetGroupKeyFromGroupSizer ( groupName ) <EOL> filteredImKeys = dm . GetImagesInGroupWithWildcards ( groupName , groupKey ) <EOL> colNames = dm . GetGroupColumnNames ( groupName ) <EOL> def cb ( ) : <EOL> imKeys = map ( lambda x : tuple ( list ( flatten ( x , - <NUM_LIT:1> ) ) ) , filteredImKeys ) <EOL> self . galleryBin . AddObjects ( imKeys , self . chMap , pos = '<STR_LIT>' , display_whole_image = True ) <EOL> statusMsg += '<STR_LIT>' % ( groupName , <EOL> '<STR_LIT:U+002CU+0020>' . join ( [ '<STR_LIT>' % ( n , v ) for n , v in zip ( colNames , groupKey ) ] ) ) <EOL> wx . CallAfter ( cb ) <EOL> else : <EOL> self . galleryBin . SelectAll ( ) <EOL> self . galleryBin . RemoveSelectedTiles ( ) <EOL> groupName = fltr_sel <EOL> groupKey = self . GetGroupKeyFromGroupSizer ( groupName ) <EOL> filteredImKeys = dm . GetImagesInGroupWithWildcards ( groupName , groupKey ) <EOL> colNames = dm . GetGroupColumnNames ( groupName ) <EOL> def cb ( ) : <EOL> imKeys = map ( lambda x : tuple ( list ( flatten ( x , - <NUM_LIT:1> ) ) ) , filteredImKeys ) <EOL> self . galleryBin . AddObjects ( imKeys , self . chMap , pos = '<STR_LIT>' , display_whole_image = True ) <EOL> statusMsg += '<STR_LIT>' % ( groupName , <EOL> '<STR_LIT:U+002CU+0020>' . join ( [ '<STR_LIT>' % ( n , v ) for n , v in zip ( colNames , groupKey ) ] ) ) <EOL> wx . CallAfter ( cb ) <EOL> elif fetch_sel == '<STR_LIT>' : <EOL> if p . table_id : <EOL> imgKey = [ ( start , end , - <NUM_LIT:1> ) ] <EOL> else : <EOL> imgKey = [ ( end , - <NUM_LIT:1> ) ] <EOL> self . galleryBin . AddObjects ( imgKey , self . chMap , pos = '<STR_LIT>' , display_whole_image = True ) <EOL> return <EOL> elif fltr_sel == '<STR_LIT>' : <EOL> self . galleryBin . SelectAll ( ) <EOL> self . galleryBin . RemoveSelectedTiles ( ) <EOL> def cb ( ) : <EOL> imKeys = db . GetAllImageKeys ( ) <EOL> imKeys = map ( lambda x : tuple ( list ( flatten ( x , - <NUM_LIT:1> ) ) ) , imKeys ) <EOL> self . galleryBin . AddObjects ( imKeys [ ( start - <NUM_LIT:1> ) : end ] , self . chMap , pos = '<STR_LIT>' , display_whole_image = True ) <EOL> wx . CallAfter ( cb ) <EOL> statusMsg += '<STR_LIT>' <EOL> elif fltr_sel in p . _filters_ordered : <EOL> self . galleryBin . SelectAll ( ) <EOL> self . galleryBin . RemoveSelectedTiles ( ) <EOL> def cb ( ) : <EOL> filteredImKeys = db . GetFilteredImages ( fltr_sel ) <EOL> if filteredImKeys == [ ] : <EOL> self . PostMessage ( '<STR_LIT>' % ( fltr_sel ) ) <EOL> return <EOL> imKeys = map ( lambda x : tuple ( list ( flatten ( x , - <NUM_LIT:1> ) ) ) , filteredImKeys ) <EOL> self . galleryBin . AddObjects ( imKeys [ ( start - <NUM_LIT:1> ) : end ] , self . chMap , pos = '<STR_LIT>' , display_whole_image = True ) <EOL> wx . CallAfter ( cb ) <EOL> statusMsg += '<STR_LIT>' % ( fltr_sel ) <EOL> elif fltr_sel in p . _groups_ordered : <EOL> self . galleryBin . SelectAll ( ) <EOL> self . galleryBin . RemoveSelectedTiles ( ) <EOL> groupName = fltr_sel <EOL> groupKey = self . GetGroupKeyFromGroupSizer ( groupName ) <EOL> filteredImKeys = dm . GetImagesInGroupWithWildcards ( groupName , groupKey ) <EOL> colNames = dm . GetGroupColumnNames ( groupName ) <EOL> def cb ( ) : <EOL> if filteredImKeys == [ ] : <EOL> self . PostMessage ( '<STR_LIT>' % ( groupName , <EOL> '<STR_LIT:U+002CU+0020>' . join ( [ '<STR_LIT>' % ( n , v ) for n , v in <EOL> zip ( colNames , groupKey ) ] ) ) ) <EOL> return <EOL> imKeys = map ( lambda x : tuple ( list ( flatten ( x , - <NUM_LIT:1> ) ) ) , filteredImKeys ) <EOL> self . galleryBin . AddObjects ( imKeys [ ( start - <NUM_LIT:1> ) : end ] , self . chMap , pos = '<STR_LIT>' , display_whole_image = True ) <EOL> statusMsg += '<STR_LIT>' % ( groupName , <EOL> '<STR_LIT:U+002CU+0020>' . join ( [ '<STR_LIT>' % ( n , v ) for n , v in zip ( colNames , groupKey ) ] ) ) <EOL> wx . CallAfter ( cb ) <EOL> self . PostMessage ( statusMsg ) <EOL> def FetchAll ( self ) : <EOL> def flatten ( * args ) : <EOL> for x in args : <EOL> if hasattr ( x , '<STR_LIT>' ) : <EOL> for y in flatten ( * x ) : <EOL> yield y <EOL> else : <EOL> yield x <EOL> imKeys = db . GetAllImageKeys ( ) <EOL> if len ( imKeys ) > <NUM_LIT:200> : <EOL> dlg = wx . MessageDialog ( self , <EOL> '<STR_LIT>' % ( <EOL> len ( imKeys ) ) , <EOL> '<STR_LIT>' , wx . YES_NO | wx . ICON_QUESTION ) <EOL> response = dlg . ShowModal ( ) <EOL> if response == wx . ID_YES : <EOL> self . galleryBin . SelectAll ( ) <EOL> self . galleryBin . RemoveSelectedTiles ( ) <EOL> def cb ( ) : <EOL> imKeys = db . GetAllImageKeys ( ) <EOL> imKeys = map ( lambda x : tuple ( list ( flatten ( x , - <NUM_LIT:1> ) ) ) , imKeys ) <EOL> self . galleryBin . AddObjects ( imKeys , self . chMap , pos = '<STR_LIT>' , display_whole_image = True ) <EOL> self . PostMessage ( "<STR_LIT>" ) <EOL> wx . CallAfter ( cb ) <EOL> else : <EOL> self . galleryBin . SelectAll ( ) <EOL> self . galleryBin . RemoveSelectedTiles ( ) <EOL> def cb ( ) : <EOL> imKeys = db . GetAllImageKeys ( ) <EOL> imKeys = map ( lambda x : tuple ( list ( flatten ( x , - <NUM_LIT:1> ) ) ) , imKeys ) <EOL> self . galleryBin . AddObjects ( imKeys , self . chMap , pos = '<STR_LIT>' , display_whole_image = True ) <EOL> self . PostMessage ( "<STR_LIT>" ) <EOL> wx . CallAfter ( cb ) <EOL> def AddSortClass ( self , label ) : <EOL> '''<STR_LIT>''' <EOL> bin = sortbin . SortBin ( parent = self . objects_bin_panel , label = label , <EOL> classifier = self ) <EOL> box = wx . StaticBox ( self . objects_bin_panel , label = label ) <EOL> sizer = wx . StaticBoxSizer ( box , wx . VERTICAL ) <EOL> bin . parentSizer = sizer <EOL> sizer . Add ( bin , proportion = <NUM_LIT:1> , flag = wx . EXPAND ) <EOL> self . classified_bins_sizer . Add ( sizer , proportion = <NUM_LIT:1> , flag = wx . EXPAND ) <EOL> self . classBins . append ( bin ) <EOL> self . objects_bin_panel . Layout ( ) <EOL> self . binsCreated += <NUM_LIT:1> <EOL> self . QuantityChanged ( ) <EOL> box . Lower ( ) <EOL> def RemoveSortClass ( self , label , clearModel = True ) : <EOL> for bin in self . classBins : <EOL> if bin . label == label : <EOL> self . classBins . remove ( bin ) <EOL> self . classified_bins_sizer . Remove ( bin . parentSizer ) <EOL> wx . CallAfter ( bin . Destroy ) <EOL> self . objects_bin_panel . Layout ( ) <EOL> break <EOL> for bin in self . classBins : <EOL> bin . trained = False <EOL> self . UpdateClassChoices ( ) <EOL> self . QuantityChanged ( ) <EOL> def RemoveAllSortClasses ( self , clearModel = True ) : <EOL> for label in [ bin . label for bin in self . classBins ] : <EOL> self . RemoveSortClass ( label , clearModel ) <EOL> def RenameClass ( self , label ) : <EOL> dlg = wx . TextEntryDialog ( self , '<STR_LIT>' , '<STR_LIT>' ) <EOL> dlg . SetValue ( label ) <EOL> if dlg . ShowModal ( ) == wx . ID_OK : <EOL> newLabel = dlg . GetValue ( ) <EOL> if newLabel != label and newLabel in [ bin . label for bin in self . classBins ] : <EOL> errdlg = wx . MessageDialog ( self , '<STR_LIT>' , "<STR_LIT>" , <EOL> wx . OK | wx . ICON_EXCLAMATION ) <EOL> if errdlg . ShowModal ( ) == wx . ID_OK : <EOL> return self . RenameClass ( label ) <EOL> if '<STR_LIT:U+0020>' in newLabel : <EOL> errdlg = wx . MessageDialog ( self , '<STR_LIT>' , "<STR_LIT>" , <EOL> wx . OK | wx . ICON_EXCLAMATION ) <EOL> if errdlg . ShowModal ( ) == wx . ID_OK : <EOL> return self . RenameClass ( label ) <EOL> for bin in self . classBins : <EOL> if bin . label == label : <EOL> bin . label = newLabel <EOL> bin . UpdateQuantity ( ) <EOL> break <EOL> dlg . Destroy ( ) <EOL> for i in xrange ( len ( updatedList ) ) : <EOL> if updatedList [ i ] == label : <EOL> updatedList [ i ] = newLabel <EOL> return wx . ID_OK <EOL> return wx . ID_CANCEL <EOL> def all_sort_bins ( self ) : <EOL> return [ self . galleryBin ] + self . classBins <EOL> def QuantityChanged ( self , evt = None ) : <EOL> pass <EOL> def OnTileUpdated ( self , evt ) : <EOL> '''<STR_LIT>''' <EOL> self . galleryBin . UpdateTile ( evt . data ) <EOL> for bin in self . classBins : <EOL> bin . UpdateTile ( evt . data ) <EOL> def OnAddSortClass ( self , evt ) : <EOL> label = '<STR_LIT>' + str ( self . binsCreated ) <EOL> self . AddSortClass ( label ) <EOL> if self . RenameClass ( label ) == wx . ID_CANCEL : <EOL> self . RemoveSortClass ( label ) <EOL> def OnMapChannels ( self , evt ) : <EOL> '''<STR_LIT>''' <EOL> ( chIdx , color , item , menu ) = self . chMapById [ evt . GetId ( ) ] <EOL> item . Check ( ) <EOL> self . chMap [ chIdx ] = color . lower ( ) <EOL> if color . lower ( ) != '<STR_LIT:none>' : <EOL> self . toggleChMap [ chIdx ] = color . lower ( ) <EOL> self . MapChannels ( self . chMap ) <EOL> def MapChannels ( self , chMap ) : <EOL> '''<STR_LIT>''' <EOL> self . chMap = chMap <EOL> for bin in self . all_sort_bins ( ) : <EOL> bin . MapChannels ( chMap ) <EOL> def ValidateImageKey ( self , evt ) : <EOL> '''<STR_LIT>''' <EOL> txtCtrl = evt . GetEventObject ( ) <EOL> try : <EOL> if p . table_id : <EOL> imKey = ( int ( self . tableTxt . Value ) , int ( self . imageTxt . Value ) ) <EOL> else : <EOL> imKey = ( int ( self . imageTxt . Value ) , ) <EOL> if dm . GetObjectCountFromImage ( imKey ) > <NUM_LIT:0> : <EOL> txtCtrl . SetForegroundColour ( '<STR_LIT>' ) <EOL> self . SetStatusText ( '<STR_LIT>' % ( dm . GetObjectCountFromImage ( imKey ) , p . object_name [ <NUM_LIT:1> ] ) ) <EOL> else : <EOL> txtCtrl . SetForegroundColour ( '<STR_LIT>' ) <EOL> self . SetStatusText ( '<STR_LIT>' % ( p . object_name [ <NUM_LIT:1> ] ) ) <EOL> except ( Exception ) : <EOL> txtCtrl . SetForegroundColour ( '<STR_LIT>' ) <EOL> self . SetStatusText ( '<STR_LIT>' ) <EOL> def OnSelectFetchChoice ( self , evt ) : <EOL> '''<STR_LIT>''' <EOL> fetchChoice = self . fetchChoice . GetStringSelection ( ) <EOL> if fetchChoice == '<STR_LIT>' : <EOL> self . fetchTxt . SetLabel ( '<STR_LIT>' ) <EOL> self . fetchTxt2 . SetLabel ( '<STR_LIT:to>' ) <EOL> self . fetchTxt2 . Show ( ) <EOL> self . fetchTxt3 . SetLabel ( '<STR_LIT>' ) <EOL> self . fetchTxt3 . Show ( ) <EOL> self . startId . Show ( ) <EOL> self . endId . Show ( ) <EOL> self . filterChoice . Enable ( ) <EOL> self . fetch_panel . SetSizerAndFit ( self . fetchSizer ) <EOL> self . fetch_and_rules_panel . SetSizerAndFit ( self . fetch_and_rules_sizer ) <EOL> elif fetchChoice == '<STR_LIT:all>' : <EOL> self . fetchTxt . SetLabel ( '<STR_LIT>' ) <EOL> self . fetchTxt2 . Hide ( ) <EOL> self . fetchTxt3 . SetLabel ( '<STR_LIT>' ) <EOL> self . fetchTxt3 . Show ( ) <EOL> self . startId . Hide ( ) <EOL> self . endId . Hide ( ) <EOL> self . filterChoice . Enable ( ) <EOL> self . fetch_panel . SetSizerAndFit ( self . fetchSizer ) <EOL> self . fetch_and_rules_panel . SetSizerAndFit ( self . fetch_and_rules_sizer ) <EOL> elif fetchChoice == '<STR_LIT>' : <EOL> self . fetchTxt . SetLabel ( '<STR_LIT>' ) <EOL> if p . table_id : <EOL> self . startId . Show ( ) <EOL> else : <EOL> self . startId . Hide ( ) <EOL> self . endId . Show ( ) <EOL> self . fetchTxt2 . Hide ( ) <EOL> self . fetchTxt3 . Hide ( ) <EOL> self . filterChoice . Disable ( ) <EOL> self . fetch_panel . SetSizerAndFit ( self . fetchSizer ) <EOL> self . fetch_and_rules_panel . SetSizerAndFit ( self . fetch_and_rules_sizer ) <EOL> def OnSelectFilter ( self , evt ) : <EOL> '''<STR_LIT>''' <EOL> filter = self . filterChoice . GetStringSelection ( ) <EOL> if filter == '<STR_LIT>' or filter in p . _filters_ordered : <EOL> self . fetchSizer . Hide ( self . fetchFromGroupSizer , True ) <EOL> elif filter == '<STR_LIT:image>' or filter in p . _groups_ordered : <EOL> self . SetupFetchFromGroupSizer ( filter ) <EOL> self . fetchSizer . Show ( self . fetchFromGroupSizer , True ) <EOL> elif filter == CREATE_NEW_FILTER : <EOL> self . fetchSizer . Hide ( self . fetchFromGroupSizer , True ) <EOL> from columnfilter import ColumnFilterDialog <EOL> cff = ColumnFilterDialog ( self , tables = [ p . image_table ] , size = ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> if cff . ShowModal ( ) == wx . OK : <EOL> fltr = cff . get_filter ( ) <EOL> fname = cff . get_filter_name ( ) <EOL> p . _filters [ fname ] = fltr <EOL> items = self . filterChoice . GetItems ( ) <EOL> self . filterChoice . SetItems ( items [ : - <NUM_LIT:1> ] + [ fname ] + items [ - <NUM_LIT:1> : ] ) <EOL> self . filterChoice . Select ( len ( items ) - <NUM_LIT:1> ) <EOL> else : <EOL> self . filterChoice . Select ( <NUM_LIT:0> ) <EOL> cff . Destroy ( ) <EOL> self . fetch_panel . Layout ( ) <EOL> self . fetch_panel . Refresh ( ) <EOL> def SetupFetchFromGroupSizer ( self , group ) : <EOL> '''<STR_LIT>''' <EOL> if group == '<STR_LIT:image>' : <EOL> fieldNames = [ '<STR_LIT>' , '<STR_LIT:image>' ] if p . table_id else [ '<STR_LIT:image>' ] <EOL> fieldTypes = [ int , int ] <EOL> validKeys = dm . GetAllImageKeys ( ) <EOL> else : <EOL> fieldNames = dm . GetGroupColumnNames ( group ) <EOL> fieldTypes = dm . GetGroupColumnTypes ( group ) <EOL> validKeys = dm . GetGroupKeysInGroup ( group ) <EOL> self . groupInputs = [ ] <EOL> self . groupFieldValidators = [ ] <EOL> self . fetchFromGroupSizer . Clear ( True ) <EOL> for i , field in enumerate ( fieldNames ) : <EOL> label = wx . StaticText ( self . fetch_panel , wx . NewId ( ) , field + '<STR_LIT::>' ) <EOL> validVals = list ( set ( [ col [ i ] for col in validKeys ] ) ) <EOL> validVals . sort ( ) <EOL> validVals = [ str ( col ) for col in validVals ] <EOL> if group == '<STR_LIT:image>' or fieldTypes [ i ] == int or fieldTypes [ i ] == long : <EOL> fieldInp = wx . TextCtrl ( self . fetch_panel , - <NUM_LIT:1> , value = validVals [ <NUM_LIT:0> ] , size = ( <NUM_LIT> , - <NUM_LIT:1> ) ) <EOL> else : <EOL> fieldInp = wx . Choice ( self . fetch_panel , - <NUM_LIT:1> , size = ( <NUM_LIT> , - <NUM_LIT:1> ) , <EOL> choices = [ '<STR_LIT>' ] + validVals ) <EOL> validVals = [ '<STR_LIT>' ] + validVals <EOL> fieldInp . SetSelection ( <NUM_LIT:0> ) <EOL> def ValidateGroupField ( evt , validVals = validVals ) : <EOL> ctrl = evt . GetEventObject ( ) <EOL> if ctrl . GetValue ( ) in validVals : <EOL> ctrl . SetForegroundColour ( '<STR_LIT>' ) <EOL> else : <EOL> ctrl . SetForegroundColour ( '<STR_LIT>' ) <EOL> self . groupFieldValidators += [ ValidateGroupField ] <EOL> fieldInp . Bind ( wx . EVT_TEXT , self . groupFieldValidators [ - <NUM_LIT:1> ] ) <EOL> self . groupInputs += [ fieldInp ] <EOL> self . fetchFromGroupSizer . Add ( label ) <EOL> self . fetchFromGroupSizer . Add ( fieldInp ) <EOL> self . fetchFromGroupSizer . AddSpacer ( ( <NUM_LIT:10> , <NUM_LIT:20> ) ) <EOL> def ValidateIntegerField ( self , evt ) : <EOL> '''<STR_LIT>''' <EOL> txtCtrl = evt . GetEventObject ( ) <EOL> try : <EOL> int ( txtCtrl . GetValue ( ) ) <EOL> txtCtrl . SetForegroundColour ( '<STR_LIT>' ) <EOL> except ( Exception ) : <EOL> txtCtrl . SetForegroundColour ( '<STR_LIT>' ) <EOL> def GetGroupKeyFromGroupSizer ( self , group = None ) : <EOL> '''<STR_LIT>''' <EOL> if group is not None : <EOL> fieldTypes = dm . GetGroupColumnTypes ( group ) <EOL> else : <EOL> fieldTypes = [ int for input in self . groupInputs ] <EOL> groupKey = [ ] <EOL> for input , ftype in zip ( self . groupInputs , fieldTypes ) : <EOL> val = str ( input . GetStringSelection ( ) ) <EOL> if val != '<STR_LIT>' : <EOL> val = ftype ( val ) <EOL> groupKey += [ val ] <EOL> return tuple ( groupKey ) <EOL> def OnShowImageControls ( self , evt ) : <EOL> '''<STR_LIT>''' <EOL> self . imageControlFrame = wx . Frame ( self , size = ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> ImageControlPanel ( self . imageControlFrame , self , brightness = self . brightness , scale = self . scale , <EOL> contrast = self . contrast ) <EOL> self . imageControlFrame . Show ( True ) <EOL> def OnSaveImgThumbnails ( self , evt ) : <EOL> saveDialog = wx . DirDialog ( self , "<STR_LIT>" , <EOL> style = wx . FD_SAVE | wx . FD_OVERWRITE_PROMPT | wx . FD_CHANGE_DIR ) <EOL> if saveDialog . ShowModal ( ) == wx . ID_OK : <EOL> directory = saveDialog . GetPath ( ) <EOL> if not os . path . exists ( directory + '<STR_LIT>' ) : <EOL> os . makedirs ( directory + '<STR_LIT>' ) <EOL> for tile in self . galleryBin . tiles : <EOL> imagetools . SaveBitmap ( tile . bitmap , directory + '<STR_LIT>' + str ( tile . obKey ) + '<STR_LIT>' ) <EOL> def OnFetchObjThumbnails ( self , evt ) : <EOL> self . classBins [ <NUM_LIT:0> ] . SelectAll ( ) <EOL> self . classBins [ <NUM_LIT:0> ] . RemoveSelectedTiles ( ) <EOL> def cb ( ) : <EOL> for tile in self . galleryBin . tiles : <EOL> pseudo_obKeys = tile . obKey <EOL> imKey = pseudo_obKeys [ : - <NUM_LIT:1> ] <EOL> obKeys = db . GetObjectsFromImage ( imKey ) <EOL> self . classBins [ <NUM_LIT:0> ] . AddObjects ( obKeys , self . chMap , pos = '<STR_LIT>' , display_whole_image = False ) <EOL> wx . CallAfter ( cb ) <EOL> def OnFetchAllObjThumbnails ( self , evt ) : <EOL> self . classBins [ <NUM_LIT:0> ] . SelectAll ( ) <EOL> self . classBins [ <NUM_LIT:0> ] . RemoveSelectedTiles ( ) <EOL> def flatten ( * args ) : <EOL> for x in args : <EOL> if hasattr ( x , '<STR_LIT>' ) : <EOL> for y in flatten ( * x ) : <EOL> yield y <EOL> else : <EOL> yield x <EOL> def cb ( ) : <EOL> imKeys = db . GetAllImageKeys ( ) <EOL> imKeys = map ( lambda x : tuple ( list ( flatten ( x , - <NUM_LIT:1> ) ) ) , imKeys ) <EOL> for imKey in imKeys : <EOL> pseudo_obKeys = imKey <EOL> imKey = pseudo_obKeys [ : - <NUM_LIT:1> ] <EOL> obKeys = db . GetObjectsFromImage ( imKey ) <EOL> self . classBins [ <NUM_LIT:0> ] . AddObjects ( obKeys , self . chMap , pos = '<STR_LIT>' , display_whole_image = False ) <EOL> wx . CallAfter ( cb ) <EOL> def OnSaveObjThumbnails ( self , evt ) : <EOL> saveDialog = wx . DirDialog ( self , "<STR_LIT>" , <EOL> style = wx . FD_SAVE | wx . FD_OVERWRITE_PROMPT | wx . FD_CHANGE_DIR ) <EOL> if saveDialog . ShowModal ( ) == wx . ID_OK : <EOL> directory = saveDialog . GetPath ( ) <EOL> if not os . path . exists ( directory + '<STR_LIT>' ) : <EOL> os . makedirs ( directory + '<STR_LIT>' ) <EOL> for tile in self . classBins [ <NUM_LIT:0> ] . tiles : <EOL> imagetools . SaveBitmap ( tile . bitmap , directory + '<STR_LIT>' + str ( tile . obKey ) + '<STR_LIT>' ) <EOL> def SetBrightness ( self , brightness ) : <EOL> '''<STR_LIT>''' <EOL> self . brightness = brightness <EOL> [ t . SetBrightness ( brightness ) for bin in self . all_sort_bins ( ) for t in bin . tiles ] <EOL> def SetScale ( self , scale ) : <EOL> '''<STR_LIT>''' <EOL> self . scale = scale <EOL> [ t . SetScale ( scale ) for bin in self . all_sort_bins ( ) for t in bin . tiles ] <EOL> [ bin . UpdateSizer ( ) for bin in self . all_sort_bins ( ) ] <EOL> def SetContrastMode ( self , mode ) : <EOL> self . contrast = mode <EOL> [ t . SetContrastMode ( mode ) for bin in self . all_sort_bins ( ) for t in bin . tiles ] <EOL> def PostMessage ( self , message ) : <EOL> '''<STR_LIT>''' <EOL> self . SetStatusText ( message ) <EOL> logging . info ( message ) <EOL> def OnClose ( self , evt ) : <EOL> '''<STR_LIT>''' <EOL> self . Destroy ( ) <EOL> def Destroy ( self ) : <EOL> '''<STR_LIT>''' <EOL> super ( ImageGallery , self ) . Destroy ( ) <EOL> import threading <EOL> for thread in threading . enumerate ( ) : <EOL> if thread != threading . currentThread ( ) and thread . getName ( ) . lower ( ) . startswith ( '<STR_LIT>' ) : <EOL> logging . debug ( '<STR_LIT>' % thread . getName ( ) ) <EOL> try : <EOL> thread . abort ( ) <EOL> except : <EOL> pass <EOL> tilecollection . TileCollection . _forgetClassInstanceReferenceForTesting ( ) <EOL> class StopCalculating ( Exception ) : <EOL> pass <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> import sys <EOL> import logging <EOL> from errors import show_exception_as_dialog <EOL> logging . basicConfig ( level = logging . DEBUG , ) <EOL> global defaultDir <EOL> defaultDir = os . getcwd ( ) <EOL> if len ( sys . argv ) > <NUM_LIT:1> and sys . argv [ <NUM_LIT:1> ] . startswith ( '<STR_LIT>' ) : <EOL> del sys . argv [ <NUM_LIT:1> ] <EOL> app = wx . App ( ) <EOL> if sys . excepthook == sys . __excepthook__ : <EOL> sys . excepthook = show_exception_as_dialog <EOL> p = Properties . getInstance ( ) <EOL> db = dbconnect . DBConnect . getInstance ( ) <EOL> dm = DataModel . getInstance ( ) <EOL> if len ( sys . argv ) > <NUM_LIT:1> : <EOL> propsFile = sys . argv [ <NUM_LIT:1> ] <EOL> p . LoadFile ( propsFile ) <EOL> else : <EOL> if not p . show_load_dialog ( ) : <EOL> logging . error ( '<STR_LIT>' ) <EOL> wx . GetApp ( ) . Exit ( ) <EOL> classifier = Classifier ( ) <EOL> classifier . Show ( True ) <EOL> if len ( sys . argv ) > <NUM_LIT:2> : <EOL> training_set_filename = sys . argv [ <NUM_LIT:2> ] <EOL> classifier . LoadTrainingSet ( training_set_filename ) <EOL> app . MainLoop ( ) <EOL> try : <EOL> import javabridge <EOL> javabridge . kill_vm ( ) <EOL> except : <EOL> import traceback <EOL> traceback . print_exc ( ) <EOL> print "<STR_LIT>" </s>
<s> import sys <EOL> import re <EOL> import logging <EOL> from optparse import OptionParser <EOL> import numpy as np <EOL> from scipy . spatial . distance import cdist <EOL> import pylab <EOL> import cpa <EOL> from . profiles import add_common_options <EOL> from . preprocessing import NullPreprocessor <EOL> from . cache import Cache , normalizations <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> logging . basicConfig ( level = logging . DEBUG ) <EOL> parser = OptionParser ( "<STR_LIT>" ) <EOL> parser . add_option ( '<STR_LIT>' , dest = '<STR_LIT>' , help = '<STR_LIT>' ) <EOL> add_common_options ( parser ) <EOL> options , args = parser . parse_args ( ) <EOL> if len ( args ) not in [ <NUM_LIT:3> , <NUM_LIT:4> ] : <EOL> parser . error ( '<STR_LIT>' ) <EOL> properties_file , cache_dir , preprocess_file = args [ : <NUM_LIT:3> ] <EOL> nsteps = int ( args [ <NUM_LIT:3> ] ) if len ( args ) == <NUM_LIT:4> else <NUM_LIT:20> <EOL> normalization = normalizations [ options . normalization ] <EOL> if preprocess_file is None : <EOL> preprocessor = NullPreprocessor ( normalization . colnames ) <EOL> else : <EOL> preprocessor = cpa . util . unpickle1 ( preprocess_file ) <EOL> cpa . properties . LoadFile ( properties_file ) <EOL> cache = Cache ( cache_dir ) <EOL> if options . filter : <EOL> image_keys = cpa . db . GetFilteredImages ( options . filter ) <EOL> else : <EOL> image_keys = cpa . db . GetAllImageKeys ( ) <EOL> nfactors = len ( preprocessor . variables ) <EOL> min_distances = np . ones ( nfactors * nsteps ) * np . inf <EOL> nearest_neighbors = [ None ] * nfactors * nsteps <EOL> min_profile = np . ones ( nfactors ) * np . inf <EOL> max_profile = np . ones ( nfactors ) * - np . inf <EOL> njobs = len ( image_keys ) <EOL> def make_progress ( ) : <EOL> show_progress = True <EOL> if show_progress : <EOL> import progressbar <EOL> return progressbar . ProgressBar ( widgets = [ progressbar . Percentage ( ) , '<STR_LIT:U+0020>' , <EOL> progressbar . Bar ( ) , '<STR_LIT:U+0020>' , <EOL> progressbar . Counter ( ) , '<STR_LIT:/>' , <EOL> str ( njobs ) , '<STR_LIT:U+0020>' , <EOL> progressbar . ETA ( ) ] , <EOL> maxval = njobs ) <EOL> else : <EOL> return lambda x : x <EOL> for image_key in make_progress ( ) ( image_keys ) : <EOL> data , colnames , object_keys = cache . load ( [ image_key ] , normalization = normalization ) <EOL> if len ( data ) == <NUM_LIT:0> : <EOL> continue <EOL> data = preprocessor ( data ) <EOL> min_profile = np . minimum ( min_profile , np . min ( data , <NUM_LIT:0> ) ) <EOL> max_profile = np . maximum ( max_profile , np . max ( data , <NUM_LIT:0> ) ) <EOL> print >> sys . stderr , '<STR_LIT>' <EOL> for i in range ( nfactors ) : <EOL> print >> sys . stderr , i + <NUM_LIT:1> , min_profile [ i ] , max_profile [ i ] <EOL> print >> sys . stderr <EOL> values = np . vstack ( [ np . linspace ( min_profile [ i ] , max_profile [ i ] , nsteps ) <EOL> for i in range ( nfactors ) ] ) <EOL> for image_key in make_progress ( ) ( image_keys ) : <EOL> data , colnames , object_keys = cache . load ( [ image_key ] , normalization = normalization ) <EOL> if len ( data ) == <NUM_LIT:0> : <EOL> continue <EOL> data = preprocessor ( data ) <EOL> distances = np . zeros ( ( len ( data ) , nfactors * nsteps ) ) <EOL> for i in range ( len ( data ) ) : <EOL> for factor in range ( nfactors ) : <EOL> for step in range ( nsteps ) : <EOL> distance = np . abs ( data [ i , factor ] - values [ factor , step ] ) <EOL> distances [ i , factor * nsteps + step ] = distance <EOL> assert distances . shape [ <NUM_LIT:1> ] == nfactors * nsteps <EOL> cell_indices , target_indices = np . nonzero ( distances < min_distances ) <EOL> for i , j in zip ( cell_indices , target_indices ) : <EOL> min_distances [ j ] = distances [ i , j ] <EOL> nearest_neighbors [ j ] = image_key + ( object_keys [ i ] , ) <EOL> print '<STR_LIT:label>' , '<STR_LIT:U+0020>' . join ( [ re . sub ( '<STR_LIT:U+0020>' , '<STR_LIT:_>' , v ) for v in preprocessor . variables ] ) <EOL> for i , label in enumerate ( preprocessor . variables ) : <EOL> for j in range ( nsteps ) : <EOL> print re . sub ( '<STR_LIT:U+0020>' , '<STR_LIT:_>' , label ) , '<STR_LIT:U+0020>' . join ( map ( str , nearest_neighbors [ i * nsteps + j ] ) ) </s>
<s> import logging <EOL> import wx <EOL> import sys <EOL> from properties import Properties <EOL> import tableviewer <EOL> import dbconnect <EOL> import numpy as np <EOL> class QueryMaker ( wx . Frame ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self , parent , size = ( <NUM_LIT> , <NUM_LIT> ) , ** kwargs ) : <EOL> wx . Frame . __init__ ( self , parent , - <NUM_LIT:1> , size = size , title = '<STR_LIT>' , ** kwargs ) <EOL> panel = wx . Panel ( self ) <EOL> self . query_textctrl = wx . TextCtrl ( panel , - <NUM_LIT:1> , size = ( - <NUM_LIT:1> , - <NUM_LIT:1> ) , style = wx . TE_MULTILINE ) <EOL> self . execute_btn = wx . Button ( panel , - <NUM_LIT:1> , '<STR_LIT>' ) <EOL> sizer = wx . BoxSizer ( wx . VERTICAL ) <EOL> panel . SetSizer ( sizer ) <EOL> sizer . Add ( self . query_textctrl , <NUM_LIT:1> , wx . EXPAND | wx . TOP | wx . LEFT | wx . RIGHT , <NUM_LIT:10> ) <EOL> button_sizer = wx . BoxSizer ( wx . HORIZONTAL ) <EOL> sizer . Add ( button_sizer , <NUM_LIT:0> , wx . EXPAND ) <EOL> button_sizer . AddStretchSpacer ( ) <EOL> button_sizer . Add ( self . execute_btn , <NUM_LIT:0> , wx . ALL , <NUM_LIT:10> ) <EOL> self . query_textctrl . Bind ( wx . EVT_KEY_UP , self . on_enter ) <EOL> self . execute_btn . Bind ( wx . EVT_BUTTON , self . on_execute ) <EOL> def on_enter ( self , evt ) : <EOL> '''<STR_LIT>''' <EOL> if evt . CmdDown ( ) and evt . GetKeyCode ( ) == wx . WXK_RETURN : <EOL> self . on_execute ( ) <EOL> evt . Skip ( ) <EOL> def on_execute ( self , evt = None ) : <EOL> '''<STR_LIT>''' <EOL> db = dbconnect . DBConnect . getInstance ( ) <EOL> q = self . query_textctrl . Value <EOL> try : <EOL> res = db . execute ( q ) <EOL> if res is None : <EOL> logging . info ( '<STR_LIT>' ) <EOL> return <EOL> res = np . array ( db . execute ( q ) ) <EOL> colnames = db . GetResultColumnNames ( ) <EOL> grid = tableviewer . TableViewer ( self , title = '<STR_LIT>' ) <EOL> grid . table_from_array ( res , colnames ) <EOL> grid . Show ( ) <EOL> logging . info ( '<STR_LIT>' ) <EOL> except Exception , e : <EOL> logging . error ( '<STR_LIT>' ) <EOL> logging . error ( e ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> app = wx . PySimpleApp ( ) <EOL> logging . basicConfig ( stream = sys . stdout , level = logging . DEBUG ) <EOL> p = Properties . getInstance ( ) <EOL> if len ( sys . argv ) > <NUM_LIT:1> : <EOL> propsFile = sys . argv [ <NUM_LIT:1> ] <EOL> p . LoadFile ( propsFile ) <EOL> else : <EOL> if not p . show_load_dialog ( ) : <EOL> print '<STR_LIT>' <EOL> wx . GetApp ( ) . Exit ( ) <EOL> sys . exit ( ) <EOL> QueryMaker ( None ) . Show ( ) <EOL> app . MainLoop ( ) </s>
<s> from sys import stderr <EOL> import logging <EOL> import numpy <EOL> import cPickle <EOL> import base64 <EOL> import zlib <EOL> import wx <EOL> import collections <EOL> import pandas as pd <EOL> from dbconnect import * <EOL> from singleton import Singleton <EOL> db = DBConnect . getInstance ( ) <EOL> class TrainingSet : <EOL> "<STR_LIT>" <EOL> def __init__ ( self , properties , filename = '<STR_LIT>' , labels_only = False , csv = False ) : <EOL> self . properties = properties <EOL> self . colnames = db . GetColnamesForClassifier ( ) <EOL> self . key_labels = object_key_columns ( ) <EOL> self . filename = filename <EOL> self . cache = CellCache . getInstance ( ) <EOL> if filename != '<STR_LIT>' : <EOL> if csv : <EOL> self . LoadCSV ( filename , labels_only = labels_only ) <EOL> else : <EOL> self . Load ( filename , labels_only = labels_only ) <EOL> def normalize ( self ) : <EOL> import pandas as pd <EOL> df = pd . DataFrame ( self . values , columns = self . colnames ) <EOL> df_norm = ( df - df . mean ( ) ) / ( df . max ( ) - df . min ( ) ) <EOL> return df . values <EOL> def get_class_per_object ( self ) : <EOL> return [ self . labels [ self . label_array [ i ] - <NUM_LIT:1> ] for i in range ( len ( self . label_array ) ) ] <EOL> def Clear ( self ) : <EOL> self . saved = False <EOL> self . labels = [ ] <EOL> self . classifier_labels = [ ] <EOL> self . label_matrix = [ ] <EOL> self . label_array = [ ] <EOL> self . values = [ ] <EOL> self . entries = [ ] <EOL> self . coordinates = [ ] <EOL> try : <EOL> self . cache . clear_if_objects_modified ( ) <EOL> except : <EOL> logging . info ( "<STR_LIT>" ) <EOL> def Create ( self , labels , keyLists , labels_only = False , callback = None ) : <EOL> '''<STR_LIT>''' <EOL> assert len ( labels ) == len ( keyLists ) , '<STR_LIT>' <EOL> self . Clear ( ) <EOL> self . labels = numpy . array ( labels ) <EOL> self . classifier_labels = <NUM_LIT:2> * numpy . eye ( len ( labels ) , dtype = numpy . int ) - <NUM_LIT:1> <EOL> num_to_fetch = sum ( [ len ( k ) for k in keyLists ] ) <EOL> num_fetched = [ <NUM_LIT:0> ] <EOL> for label , cl_label , keyList in zip ( labels , self . classifier_labels , keyLists ) : <EOL> self . label_matrix += ( [ cl_label ] * len ( keyList ) ) <EOL> self . entries += zip ( [ label ] * len ( keyList ) , keyList ) <EOL> if labels_only : <EOL> self . values += [ ] <EOL> self . coordinates += [ db . GetObjectCoords ( k ) for k in keyList ] <EOL> else : <EOL> def get_data ( k ) : <EOL> d = self . cache . get_object_data ( k ) <EOL> if callback is not None : <EOL> callback ( num_fetched [ <NUM_LIT:0> ] / float ( num_to_fetch ) ) <EOL> num_fetched [ <NUM_LIT:0> ] = num_fetched [ <NUM_LIT:0> ] + <NUM_LIT:1> <EOL> return d <EOL> self . values += [ get_data ( k ) for k in keyList ] <EOL> self . coordinates += [ db . GetObjectCoords ( k ) for k in keyList ] <EOL> self . label_matrix = numpy . array ( self . label_matrix ) <EOL> self . values = numpy . array ( self . values , np . float64 ) <EOL> if len ( self . label_matrix ) > <NUM_LIT:0> : <EOL> self . label_array = numpy . nonzero ( self . label_matrix + <NUM_LIT:1> ) [ <NUM_LIT:1> ] + <NUM_LIT:1> <EOL> else : <EOL> self . label_array = self . label_matrix <EOL> def Load ( self , filename , labels_only = False ) : <EOL> self . Clear ( ) <EOL> f = open ( filename , '<STR_LIT>' ) <EOL> lines = f . read ( ) <EOL> lines = lines . split ( '<STR_LIT:\n>' ) <EOL> labelDict = collections . OrderedDict ( ) <EOL> self . key_labels = object_key_columns ( ) <EOL> for l in lines : <EOL> try : <EOL> if l . strip ( ) == '<STR_LIT>' : continue <EOL> if l . startswith ( '<STR_LIT:#>' ) : <EOL> self . cache . load_from_string ( l [ <NUM_LIT:2> : ] ) <EOL> continue <EOL> label = l . strip ( ) . split ( '<STR_LIT:U+0020>' ) [ <NUM_LIT:0> ] <EOL> if ( label == "<STR_LIT:label>" ) : <EOL> for labelname in l . strip ( ) . split ( '<STR_LIT:U+0020>' ) [ <NUM_LIT:1> : ] : <EOL> if labelname not in labelDict . keys ( ) : <EOL> labelDict [ labelname ] = [ ] <EOL> continue <EOL> obKey = tuple ( [ int ( float ( k ) ) for k in l . strip ( ) . split ( '<STR_LIT:U+0020>' ) [ <NUM_LIT:1> : len ( object_key_columns ( ) ) + <NUM_LIT:1> ] ] ) <EOL> labelDict [ label ] = labelDict . get ( label , [ ] ) + [ obKey ] <EOL> except : <EOL> logging . error ( '<STR_LIT>' % ( filename , l . strip ( ) ) ) <EOL> f . close ( ) <EOL> raise <EOL> self . Renumber ( labelDict ) <EOL> self . Create ( labelDict . keys ( ) , labelDict . values ( ) , labels_only = labels_only ) <EOL> f . close ( ) <EOL> def LoadCSV ( self , filename , labels_only = True ) : <EOL> self . Clear ( ) <EOL> df = pd . read_csv ( filename ) <EOL> labels = list ( set ( df [ '<STR_LIT>' ] . values ) ) <EOL> labelDict = collections . OrderedDict ( ) <EOL> self . key_labels = object_key_columns ( ) <EOL> key_names = [ key for key in self . key_labels ] <EOL> for label in labels : <EOL> keys = df [ key_names ] [ df [ '<STR_LIT>' ] == label ] . values <EOL> if len ( key_names ) == <NUM_LIT:2> : <EOL> keys = map ( lambda x : tuple ( ( x [ <NUM_LIT:0> ] , x [ <NUM_LIT:1> ] ) ) , keys ) <EOL> labelDict [ label ] = keys <EOL> else : <EOL> assert ( len ( key_names ) == <NUM_LIT:3> ) <EOL> keys = map ( lambda x : tuple ( ( x [ <NUM_LIT:0> ] , x [ <NUM_LIT:1> ] , x [ <NUM_LIT:2> ] ) ) , keys ) <EOL> labelDict [ label ] = keys <EOL> self . Renumber ( labelDict ) <EOL> self . Create ( labelDict . keys ( ) , labelDict . values ( ) , labels_only = labels_only ) <EOL> def Renumber ( self , label_dict ) : <EOL> from properties import Properties <EOL> obkey_length = <NUM_LIT:3> if Properties . getInstance ( ) . table_id else <NUM_LIT:2> <EOL> have_asked = False <EOL> progress = None <EOL> for label in label_dict . keys ( ) : <EOL> for idx , key in enumerate ( label_dict [ label ] ) : <EOL> if len ( key ) > obkey_length : <EOL> obkey = key [ : obkey_length ] <EOL> x , y = key [ obkey_length : obkey_length + <NUM_LIT:2> ] <EOL> coord = db . GetObjectCoords ( obkey , none_ok = True , silent = True ) <EOL> if coord == None or ( int ( coord [ <NUM_LIT:0> ] ) , int ( coord [ <NUM_LIT:1> ] ) ) != ( x , y ) : <EOL> if not have_asked : <EOL> dlg = wx . MessageDialog ( None , '<STR_LIT>' , <EOL> '<STR_LIT>' , wx . CANCEL | wx . YES_NO | wx . ICON_QUESTION ) <EOL> response = dlg . ShowModal ( ) <EOL> have_asked = True <EOL> if response == wx . ID_NO : <EOL> return <EOL> elif response == wx . ID_CANCEL : <EOL> label_dict . clear ( ) <EOL> return <EOL> if progress is None : <EOL> total = sum ( [ len ( v ) for v in label_dict . values ( ) ] ) <EOL> done = <NUM_LIT:0> <EOL> progress = wx . ProgressDialog ( "<STR_LIT>" , "<STR_LIT>" , maximum = total , style = wx . PD_ELAPSED_TIME | wx . PD_ESTIMATED_TIME | wx . PD_REMAINING_TIME | wx . PD_CAN_ABORT ) <EOL> label_dict [ label ] [ idx ] = db . GetObjectNear ( obkey [ : - <NUM_LIT:1> ] , x , y , silent = True ) <EOL> done = done + <NUM_LIT:1> <EOL> cont , skip = progress . Update ( done , '<STR_LIT>' % ( ( <NUM_LIT:100> * done ) / total ) ) <EOL> if not cont : <EOL> label_dict . clear ( ) <EOL> return <EOL> have_asked = False <EOL> for label in label_dict . keys ( ) : <EOL> if None in label_dict [ label ] : <EOL> if not have_asked : <EOL> dlg = wx . MessageDialog ( None , '<STR_LIT>' , <EOL> '<STR_LIT>' , wx . YES_NO | wx . ICON_ERROR ) <EOL> response = dlg . ShowModal ( ) <EOL> have_asked = True <EOL> if response == wx . ID_NO : <EOL> label_dict . clear ( ) <EOL> return <EOL> label_dict [ label ] = [ k for k in label_dict [ label ] if k is not None ] <EOL> def Save ( self , filename ) : <EOL> try : <EOL> self . cache . clear_if_objects_modified ( ) <EOL> except : <EOL> logging . info ( "<STR_LIT>" ) <EOL> f = open ( filename , '<STR_LIT:w>' ) <EOL> try : <EOL> from properties import Properties <EOL> p = Properties . getInstance ( ) <EOL> f . write ( '<STR_LIT>' % ( p . _filename ) ) <EOL> f . write ( '<STR_LIT>' + '<STR_LIT:U+0020>' . join ( self . labels ) + '<STR_LIT:\n>' ) <EOL> i = <NUM_LIT:0> <EOL> for label , obKey in self . entries : <EOL> line = '<STR_LIT>' % ( label , '<STR_LIT:U+0020>' . join ( [ str ( int ( k ) ) for k in obKey ] ) , '<STR_LIT:U+0020>' . join ( [ str ( int ( k ) ) for k in self . coordinates [ i ] ] ) ) <EOL> f . write ( line ) <EOL> i += <NUM_LIT:1> <EOL> try : <EOL> f . write ( '<STR_LIT>' + self . cache . save_to_string ( [ k [ <NUM_LIT:1> ] for k in self . entries ] ) + '<STR_LIT:\n>' ) <EOL> except : <EOL> logging . error ( "<STR_LIT>" ) <EOL> except : <EOL> logging . error ( "<STR_LIT>" % ( filename ) ) <EOL> f . close ( ) <EOL> raise <EOL> f . close ( ) <EOL> logging . info ( '<STR_LIT>' % filename ) <EOL> self . saved = True <EOL> def SaveAsCSV ( self , filename ) : <EOL> try : <EOL> self . cache . clear_if_objects_modified ( ) <EOL> df = pd . DataFrame ( self . values , columns = self . colnames ) <EOL> except : <EOL> logging . info ( "<STR_LIT>" ) <EOL> df = pd . DataFrame ( [ ] ) <EOL> try : <EOL> from properties import Properties <EOL> tuples = self . get_object_keys ( ) <EOL> key_labels = self . key_labels <EOL> if len ( key_labels ) == <NUM_LIT:2> : <EOL> keyList = map ( lambda x : [ x [ <NUM_LIT:0> ] , x [ <NUM_LIT:1> ] ] , tuples ) <EOL> df_keys = pd . DataFrame ( keyList , columns = key_labels ) <EOL> else : <EOL> keyList = map ( lambda x : [ x [ <NUM_LIT:0> ] , x [ <NUM_LIT:1> ] , x [ <NUM_LIT:2> ] ] , tuples ) <EOL> df_keys = pd . DataFrame ( keyList , columns = key_labels ) <EOL> labels = self . labels <EOL> label_array = self . label_array <EOL> labels = [ labels [ label_array [ i ] - <NUM_LIT:1> ] for i in range ( len ( label_array ) ) ] <EOL> df_class = pd . DataFrame ( labels , columns = [ "<STR_LIT>" ] ) <EOL> df_labeled = pd . concat ( [ df_keys , df_class , df ] , axis = <NUM_LIT:1> ) <EOL> df_labeled . to_csv ( filename , index = False ) <EOL> except : <EOL> logging . error ( "<STR_LIT>" % ( filename ) ) <EOL> raise <EOL> logging . info ( '<STR_LIT>' % filename ) <EOL> self . saved = True <EOL> def get_object_keys ( self ) : <EOL> return [ e [ <NUM_LIT:1> ] for e in self . entries ] <EOL> class CellCache ( Singleton ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self ) : <EOL> self . data = { } <EOL> self . colnames = db . GetColumnNames ( p . object_table ) <EOL> if db . GetColnamesForClassifier ( ) is not None : <EOL> self . col_indices = [ self . colnames . index ( v ) for v in db . GetColnamesForClassifier ( ) ] <EOL> else : <EOL> self . col_indices = [ ] <EOL> self . last_update = db . get_objects_modify_date ( ) <EOL> def load_from_string ( self , str ) : <EOL> '<STR_LIT>' <EOL> try : <EOL> date , colnames , oldcache = cPickle . loads ( zlib . decompress ( base64 . b64decode ( str ) ) ) <EOL> except : <EOL> return <EOL> if len ( oldcache ) > <NUM_LIT:0> : <EOL> if oldcache . values ( ) [ <NUM_LIT:0> ] . dtype . kind == '<STR_LIT:S>' : <EOL> return <EOL> if db . verify_objects_modify_date_earlier ( date ) : <EOL> self . data . update ( oldcache ) <EOL> self . colnames = colnames <EOL> def save_to_string ( self , keys ) : <EOL> '<STR_LIT>' <EOL> temp = { } <EOL> for k in keys : <EOL> if k in self . data : <EOL> temp [ k ] = self . data [ k ] <EOL> output = ( db . get_objects_modify_date ( ) , self . colnames , temp ) <EOL> return base64 . b64encode ( zlib . compress ( cPickle . dumps ( output ) ) ) <EOL> def get_object_data ( self , key ) : <EOL> if key not in self . data : <EOL> self . data [ key ] = db . GetCellData ( key ) <EOL> return self . data [ key ] [ self . col_indices ] <EOL> def clear_if_objects_modified ( self ) : <EOL> if not db . verify_objects_modify_date_earlier ( self . last_update ) : <EOL> self . data = { } <EOL> self . last_update = db . get_objects_modify_date ( ) <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> from sys import argv <EOL> from properties import Properties <EOL> p = Properties . getInstance ( ) <EOL> p . LoadFile ( argv [ <NUM_LIT:1> ] ) <EOL> tr = TrainingSet ( p ) <EOL> tr . Load ( argv [ <NUM_LIT:2> ] ) <EOL> for i in range ( len ( tr . labels ) ) : <EOL> print tr . labels [ i ] , <EOL> print "<STR_LIT:U+0020>" . join ( [ str ( v ) for v in tr . values [ i ] ] ) </s>
<s> """<STR_LIT>""" <EOL> import wx <EOL> class TreeCheckboxDialog ( wx . Dialog ) : <EOL> """<STR_LIT>""" <EOL> def __init__ ( self , parent , d , * args , ** kwargs ) : <EOL> """<STR_LIT>""" <EOL> wx . Dialog . __init__ ( self , parent , * args , ** kwargs ) <EOL> self . bitmaps = [ ] <EOL> self . parent_reflects_child = True <EOL> sizer = wx . BoxSizer ( wx . VERTICAL ) <EOL> self . SetSizer ( sizer ) <EOL> tree_style = wx . TR_DEFAULT_STYLE <EOL> self . tree_ctrl = wx . TreeCtrl ( self , <EOL> style = tree_style ) <EOL> sizer . Add ( self . tree_ctrl , <NUM_LIT:1> , wx . EXPAND | wx . ALL , <NUM_LIT:5> ) <EOL> image_list = wx . ImageList ( <NUM_LIT:16> , <NUM_LIT:16> ) <EOL> for i , state_flag in enumerate ( <EOL> ( <NUM_LIT:0> , wx . CONTROL_CHECKED , wx . CONTROL_UNDETERMINED ) ) : <EOL> for j , selection_flag in enumerate ( ( <NUM_LIT:0> , wx . CONTROL_CURRENT ) ) : <EOL> idx = image_list . Add ( <EOL> self . get_checkbox_bitmap ( state_flag | selection_flag , <EOL> <NUM_LIT:16> , <NUM_LIT:16> ) ) <EOL> self . tree_ctrl . SetImageList ( image_list ) <EOL> self . image_list = image_list <EOL> image_index , selected_image_index = self . img_idx ( d ) <EOL> root_id = self . tree_ctrl . AddRoot ( "<STR_LIT>" , image_index , <EOL> selected_image_index , <EOL> wx . TreeItemData ( d ) ) <EOL> self . tree_ctrl . SetItemImage ( root_id , image_index , <EOL> wx . TreeItemIcon_Normal ) <EOL> self . tree_ctrl . SetItemImage ( root_id , selected_image_index , <EOL> wx . TreeItemIcon_Selected ) <EOL> self . tree_ctrl . SetItemImage ( root_id , image_index , <EOL> wx . TreeItemIcon_Expanded ) <EOL> self . tree_ctrl . SetItemImage ( root_id , image_index , <EOL> wx . TreeItemIcon_SelectedExpanded ) <EOL> self . root_id = root_id <EOL> self . tree_ctrl . SetItemHasChildren ( root_id , len ( d ) > <NUM_LIT:1> ) <EOL> self . Bind ( wx . EVT_TREE_ITEM_EXPANDING , self . on_expanding , self . tree_ctrl ) <EOL> self . tree_ctrl . Bind ( wx . EVT_LEFT_DOWN , self . on_left_down ) <EOL> self . tree_ctrl . Expand ( root_id ) <EOL> table_sizer = wx . GridBagSizer ( ) <EOL> sizer . Add ( table_sizer , <NUM_LIT:0> , wx . EXPAND ) <EOL> table_sizer . Add ( wx . StaticText ( self , label = '<STR_LIT>' ) , ( <NUM_LIT:0> , <NUM_LIT:0> ) , flag = wx . LEFT | wx . RIGHT , border = <NUM_LIT:3> ) <EOL> for i , ( bitmap , description ) in enumerate ( ( <EOL> ( image_list . GetBitmap ( <NUM_LIT:0> ) , "<STR_LIT>" ) , <EOL> ( image_list . GetBitmap ( <NUM_LIT:2> ) , "<STR_LIT>" ) , <EOL> ( image_list . GetBitmap ( <NUM_LIT:4> ) , "<STR_LIT>" ) ) ) : <EOL> bitmap_ctrl = wx . StaticBitmap ( self ) <EOL> bitmap_ctrl . SetBitmap ( bitmap ) <EOL> table_sizer . Add ( bitmap_ctrl , ( i , <NUM_LIT:1> ) , flag = wx . RIGHT , border = <NUM_LIT:5> ) <EOL> table_sizer . Add ( wx . StaticText ( self , label = description ) , ( i , <NUM_LIT:2> ) ) <EOL> table_sizer . AddGrowableCol ( <NUM_LIT:2> ) <EOL> sizer . Add ( self . CreateStdDialogButtonSizer ( wx . CANCEL | wx . OK ) , <EOL> flag = wx . CENTER ) <EOL> self . Layout ( ) <EOL> def set_parent_reflects_child ( self , value ) : <EOL> """<STR_LIT>""" <EOL> self . parent_reflects_child = value <EOL> @ staticmethod <EOL> def img_idx ( d ) : <EOL> if d [ None ] is False : <EOL> return <NUM_LIT:0> , <NUM_LIT:1> <EOL> elif d [ None ] is True : <EOL> return <NUM_LIT:2> , <NUM_LIT:3> <EOL> else : <EOL> return <NUM_LIT:4> , <NUM_LIT:5> <EOL> def get_item_data ( self , item_id ) : <EOL> x = self . tree_ctrl . GetItemData ( item_id ) <EOL> d = x . GetData ( ) <EOL> return d <EOL> def on_expanding ( self , event ) : <EOL> """<STR_LIT>""" <EOL> item_id = event . GetItem ( ) <EOL> d = self . get_item_data ( item_id ) <EOL> if len ( d ) > <NUM_LIT:1> : <EOL> self . populate ( item_id ) <EOL> def populate ( self , item_id ) : <EOL> """<STR_LIT>""" <EOL> try : <EOL> d = self . get_item_data ( item_id ) <EOL> assert len ( d ) > <NUM_LIT:1> <EOL> if self . tree_ctrl . GetChildrenCount ( item_id , False ) == <NUM_LIT:0> : <EOL> for key in sorted ( [ x for x in d . keys ( ) if x is not None ] ) : <EOL> d1 = d [ key ] <EOL> if hasattr ( d1 , "<STR_LIT>" ) : <EOL> self . SetCursor ( wx . StockCursor ( wx . CURSOR_WAIT ) ) <EOL> d1 = d1 ( ) <EOL> d [ key ] = d1 <EOL> image_index , selected_index = self . img_idx ( d1 ) <EOL> sub_id = self . tree_ctrl . AppendItem ( item_id , key , image_index , <EOL> selected_index , <EOL> wx . TreeItemData ( d1 ) ) <EOL> self . tree_ctrl . SetItemImage ( sub_id , image_index , <EOL> wx . TreeItemIcon_Normal ) <EOL> self . tree_ctrl . SetItemImage ( sub_id , selected_index , <EOL> wx . TreeItemIcon_Selected ) <EOL> self . tree_ctrl . SetItemImage ( sub_id , image_index , <EOL> wx . TreeItemIcon_Expanded ) <EOL> self . tree_ctrl . SetItemImage ( sub_id , selected_index , <EOL> wx . TreeItemIcon_SelectedExpanded ) <EOL> self . tree_ctrl . SetItemHasChildren ( sub_id , len ( d1 ) > <NUM_LIT:1> ) <EOL> finally : <EOL> self . SetCursor ( wx . NullCursor ) <EOL> def on_left_down ( self , event ) : <EOL> item_id , where = self . tree_ctrl . HitTest ( event . Position ) <EOL> if where & wx . TREE_HITTEST_ONITEMICON == <NUM_LIT:0> : <EOL> event . Skip ( ) <EOL> return <EOL> d = self . get_item_data ( item_id ) <EOL> if d [ None ] is None or d [ None ] is False : <EOL> state = True <EOL> else : <EOL> state = False <EOL> self . set_item_state ( item_id , state ) <EOL> self . set_parent_state ( item_id ) <EOL> def set_parent_state ( self , item_id ) : <EOL> if item_id != self . root_id : <EOL> parent_id = self . tree_ctrl . GetItemParent ( item_id ) <EOL> d_parent = self . get_item_data ( parent_id ) <EOL> child_id , _ = self . tree_ctrl . GetFirstChild ( parent_id ) <EOL> state = self . get_item_data ( child_id ) [ None ] <EOL> while True : <EOL> if child_id == self . tree_ctrl . GetLastChild ( parent_id ) : <EOL> break <EOL> child_id = self . tree_ctrl . GetNextSibling ( child_id ) <EOL> next_state = self . get_item_data ( child_id ) [ None ] <EOL> if next_state != state : <EOL> state = None <EOL> break <EOL> if d_parent [ None ] is not state : <EOL> if state is False and not self . parent_reflects_child : <EOL> state = None <EOL> d_parent [ None ] = state <EOL> image_index , selected_index = self . img_idx ( d_parent ) <EOL> self . tree_ctrl . SetItemImage ( parent_id , image_index , wx . TreeItemIcon_Normal ) <EOL> self . tree_ctrl . SetItemImage ( parent_id , selected_index , wx . TreeItemIcon_Selected ) <EOL> self . tree_ctrl . SetItemImage ( parent_id , image_index , wx . TreeItemIcon_Expanded ) <EOL> self . tree_ctrl . SetItemImage ( parent_id , selected_index , wx . TreeItemIcon_SelectedExpanded ) <EOL> self . set_parent_state ( parent_id ) <EOL> def set_item_state ( self , item_id , state ) : <EOL> d = self . get_item_data ( item_id ) <EOL> d [ None ] = state <EOL> image_index , selected_index = self . img_idx ( d ) <EOL> self . tree_ctrl . SetItemImage ( item_id , image_index , wx . TreeItemIcon_Normal ) <EOL> self . tree_ctrl . SetItemImage ( item_id , selected_index , wx . TreeItemIcon_Selected ) <EOL> self . tree_ctrl . SetItemImage ( item_id , image_index , wx . TreeItemIcon_Expanded ) <EOL> self . tree_ctrl . SetItemImage ( item_id , selected_index , wx . TreeItemIcon_SelectedExpanded ) <EOL> if len ( d ) > <NUM_LIT:1> : <EOL> if self . tree_ctrl . GetChildrenCount ( item_id ) == <NUM_LIT:0> : <EOL> self . populate ( item_id ) <EOL> child_id , _ = self . tree_ctrl . GetFirstChild ( item_id ) <EOL> while True : <EOL> d1 = self . get_item_data ( child_id ) <EOL> if d1 [ None ] is not state : <EOL> self . set_item_state ( child_id , state ) <EOL> if child_id == self . tree_ctrl . GetLastChild ( item_id ) : <EOL> break <EOL> child_id = self . tree_ctrl . GetNextSibling ( child_id ) <EOL> def get_checkbox_bitmap ( self , flags , width , height ) : <EOL> """<STR_LIT>""" <EOL> dc = wx . MemoryDC ( ) <EOL> bitmap = wx . EmptyBitmap ( width , height ) <EOL> dc . SelectObject ( bitmap ) <EOL> dc . SetBrush ( wx . BLACK_BRUSH ) <EOL> dc . SetTextForeground ( wx . BLACK ) <EOL> try : <EOL> dc . Clear ( ) <EOL> render = wx . RendererNative . Get ( ) <EOL> render . DrawCheckBox ( self , dc , ( <NUM_LIT:0> , <NUM_LIT:0> , width , height ) , flags ) <EOL> finally : <EOL> dc . SelectObject ( wx . NullBitmap ) <EOL> dc . Destroy ( ) <EOL> self . bitmaps . append ( bitmap ) <EOL> return bitmap </s>
<s> '''<STR_LIT>''' <EOL> import numpy as np <EOL> import cellprofiler . cpimage as cpi <EOL> import cellprofiler . cpmodule as cpm <EOL> import cellprofiler . measurements as cpmeas <EOL> import cellprofiler . objects as cpo <EOL> import cellprofiler . preferences as cpprefs <EOL> import cellprofiler . settings as cps <EOL> import cellprofiler . workspace as cpw <EOL> from cellprofiler . modules . identify import M_LOCATION_CENTER_X , M_LOCATION_CENTER_Y <EOL> OI_OBJECTS = "<STR_LIT>" <EOL> OI_IMAGE = "<STR_LIT>" <EOL> E_FIGURE = "<STR_LIT>" <EOL> E_AXES = "<STR_LIT>" <EOL> E_IMAGE = "<STR_LIT>" <EOL> CT_COLOR = "<STR_LIT>" <EOL> CT_TEXT = "<STR_LIT>" <EOL> CMS_USE_MEASUREMENT_RANGE = "<STR_LIT>" <EOL> CMS_MANUAL = "<STR_LIT>" <EOL> class DisplayDataOnImage ( cpm . CPModule ) : <EOL> module_name = '<STR_LIT>' <EOL> category = '<STR_LIT>' <EOL> variable_revision_number = <NUM_LIT:6> <EOL> def create_settings ( self ) : <EOL> """<STR_LIT>""" <EOL> self . objects_or_image = cps . Choice ( <EOL> "<STR_LIT>" , <EOL> [ OI_OBJECTS , OI_IMAGE ] , doc = """<STR_LIT>""" % globals ( ) ) <EOL> self . objects_name = cps . ObjectNameSubscriber ( <EOL> "<STR_LIT>" , cps . NONE , doc = """<STR_LIT>""" ) <EOL> def object_fn ( ) : <EOL> if self . objects_or_image == OI_OBJECTS : <EOL> return self . objects_name . value <EOL> else : <EOL> return cpmeas . IMAGE <EOL> self . measurement = cps . Measurement ( <EOL> "<STR_LIT>" , object_fn , doc = """<STR_LIT>""" ) <EOL> self . wants_image = cps . Binary ( <EOL> "<STR_LIT>" , True , <EOL> doc = """<STR_LIT>""" ) <EOL> self . image_name = cps . ImageNameSubscriber ( <EOL> "<STR_LIT>" , cps . NONE , doc = """<STR_LIT>""" ) <EOL> self . color_or_text = cps . Choice ( <EOL> "<STR_LIT>" , [ CT_TEXT , CT_COLOR ] , <EOL> doc = """<STR_LIT>""" % globals ( ) <EOL> ) <EOL> self . colormap = cps . Colormap ( <EOL> "<STR_LIT>" , <EOL> doc = """<STR_LIT>""" ) <EOL> self . text_color = cps . Color ( <EOL> "<STR_LIT>" , "<STR_LIT>" , doc = """<STR_LIT>""" ) <EOL> self . display_image = cps . ImageNameProvider ( <EOL> "<STR_LIT>" , "<STR_LIT>" , doc = """<STR_LIT>""" ) <EOL> self . font_size = cps . Integer ( <EOL> "<STR_LIT>" , <NUM_LIT:10> , minval = <NUM_LIT:1> ) <EOL> self . decimals = cps . Integer ( <EOL> "<STR_LIT>" , <NUM_LIT:2> , minval = <NUM_LIT:0> ) <EOL> self . saved_image_contents = cps . Choice ( <EOL> "<STR_LIT>" , <EOL> [ E_IMAGE , E_FIGURE , E_AXES ] , doc = """<STR_LIT>""" % globals ( ) ) <EOL> self . offset = cps . Integer ( <EOL> "<STR_LIT>" , <NUM_LIT:0> , doc = """<STR_LIT>""" ) <EOL> self . color_map_scale_choice = cps . Choice ( <EOL> "<STR_LIT>" , <EOL> [ CMS_USE_MEASUREMENT_RANGE , CMS_MANUAL ] , <EOL> doc = """<STR_LIT>""" % globals ( ) ) <EOL> self . color_map_scale = cps . FloatRange ( <EOL> "<STR_LIT>" , <EOL> value = ( <NUM_LIT:0.0> , <NUM_LIT:1.0> ) , <EOL> doc = """<STR_LIT>""" ) <EOL> def settings ( self ) : <EOL> """<STR_LIT>""" <EOL> return [ self . objects_or_image , self . objects_name , self . measurement , <EOL> self . image_name , self . text_color , self . display_image , <EOL> self . font_size , self . decimals , self . saved_image_contents , <EOL> self . offset , self . color_or_text , self . colormap , <EOL> self . wants_image , self . color_map_scale_choice , <EOL> self . color_map_scale ] <EOL> def visible_settings ( self ) : <EOL> """<STR_LIT>""" <EOL> result = [ self . objects_or_image ] <EOL> if self . objects_or_image == OI_OBJECTS : <EOL> result += [ self . objects_name ] <EOL> result += [ self . measurement , self . wants_image , self . image_name ] <EOL> if self . objects_or_image == OI_OBJECTS and not self . use_as_data_tool : <EOL> result += [ self . color_or_text ] <EOL> if self . use_color_map ( ) : <EOL> result += [ self . colormap , self . color_map_scale_choice ] <EOL> if self . color_map_scale_choice == CMS_MANUAL : <EOL> result += [ self . color_map_scale ] <EOL> else : <EOL> result += [ self . text_color , self . font_size , self . decimals , <EOL> self . offset ] <EOL> result += [ self . display_image , self . saved_image_contents ] <EOL> return result <EOL> def use_color_map ( self ) : <EOL> '''<STR_LIT>''' <EOL> return self . objects_or_image == OI_OBJECTS and self . color_or_text == CT_COLOR and not self . use_as_data_tool <EOL> def run ( self , workspace ) : <EOL> import matplotlib <EOL> import matplotlib . cm <EOL> import matplotlib . backends . backend_agg <EOL> import matplotlib . transforms <EOL> from cellprofiler . gui . cpfigure_tools import figure_to_image , only_display_image <EOL> image = workspace . image_set . get_image ( self . image_name . value ) <EOL> if self . wants_image : <EOL> pixel_data = image . pixel_data <EOL> else : <EOL> pixel_data = np . zeros ( image . pixel_data . shape [ : <NUM_LIT:2> ] ) <EOL> object_set = workspace . object_set <EOL> if self . objects_or_image == OI_OBJECTS : <EOL> if self . objects_name . value in object_set . get_object_names ( ) : <EOL> objects = object_set . get_objects ( self . objects_name . value ) <EOL> else : <EOL> objects = None <EOL> workspace . display_data . pixel_data = pixel_data <EOL> if self . use_color_map ( ) : <EOL> workspace . display_data . labels = objects . segmented <EOL> measurements = workspace . measurements <EOL> if self . objects_or_image == OI_IMAGE : <EOL> value = measurements . get_current_image_measurement ( <EOL> self . measurement . value ) <EOL> values = [ value ] <EOL> x = [ pixel_data . shape [ <NUM_LIT:1> ] / <NUM_LIT:2> ] <EOL> x_offset = np . random . uniform ( high = <NUM_LIT:1.0> , low = - <NUM_LIT:1.0> ) <EOL> x [ <NUM_LIT:0> ] += x_offset <EOL> y = [ pixel_data . shape [ <NUM_LIT:0> ] / <NUM_LIT:2> ] <EOL> y_offset = np . sqrt ( <NUM_LIT:1> - x_offset ** <NUM_LIT:2> ) <EOL> y [ <NUM_LIT:0> ] += y_offset <EOL> else : <EOL> values = measurements . get_current_measurement ( <EOL> self . objects_name . value , <EOL> self . measurement . value ) <EOL> if objects is not None and len ( values ) < objects . count : <EOL> temp = np . zeros ( objects . count , values . dtype ) <EOL> temp [ : len ( values ) ] = values <EOL> temp [ len ( values ) : ] = np . nan <EOL> values = temp <EOL> x = measurements . get_current_measurement ( <EOL> self . objects_name . value , M_LOCATION_CENTER_X ) <EOL> x_offset = np . random . uniform ( high = <NUM_LIT:1.0> , low = - <NUM_LIT:1.0> , size = x . shape ) <EOL> y_offset = np . sqrt ( <NUM_LIT:1> - x_offset ** <NUM_LIT:2> ) <EOL> x += self . offset . value * x_offset <EOL> y = measurements . get_current_measurement ( <EOL> self . objects_name . value , M_LOCATION_CENTER_Y ) <EOL> y += self . offset . value * y_offset <EOL> mask = ~ ( np . isnan ( values ) | np . isnan ( x ) | np . isnan ( y ) ) <EOL> values = values [ mask ] <EOL> x = x [ mask ] <EOL> y = y [ mask ] <EOL> workspace . display_data . mask = mask <EOL> workspace . display_data . values = values <EOL> workspace . display_data . x = x <EOL> workspace . display_data . y = y <EOL> fig = matplotlib . figure . Figure ( ) <EOL> axes = fig . add_subplot ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) <EOL> def imshow_fn ( pixel_data ) : <EOL> img = pixel_data * <NUM_LIT:255> <EOL> img [ img < <NUM_LIT:0> ] = <NUM_LIT:0> <EOL> img [ img > <NUM_LIT:255> ] = <NUM_LIT:255> <EOL> img = img . astype ( np . uint8 ) <EOL> axes . imshow ( img , cmap = matplotlib . cm . Greys_r ) <EOL> self . display_on_figure ( workspace , axes , imshow_fn ) <EOL> canvas = matplotlib . backends . backend_agg . FigureCanvasAgg ( fig ) <EOL> if self . saved_image_contents == E_AXES : <EOL> fig . set_frameon ( False ) <EOL> if not self . use_color_map ( ) : <EOL> fig . subplots_adjust ( <NUM_LIT:0.1> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> shape = pixel_data . shape <EOL> width = float ( shape [ <NUM_LIT:1> ] ) / fig . dpi <EOL> height = float ( shape [ <NUM_LIT:0> ] ) / fig . dpi <EOL> fig . set_figheight ( height ) <EOL> fig . set_figwidth ( width ) <EOL> elif self . saved_image_contents == E_IMAGE : <EOL> if self . use_color_map ( ) : <EOL> fig . axes [ <NUM_LIT:1> ] . set_visible ( False ) <EOL> only_display_image ( fig , pixel_data . shape ) <EOL> else : <EOL> if not self . use_color_map ( ) : <EOL> fig . subplots_adjust ( <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT> , <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> pixel_data = figure_to_image ( fig , dpi = fig . dpi ) <EOL> image = cpi . Image ( pixel_data ) <EOL> workspace . image_set . add ( self . display_image . value , image ) <EOL> def run_as_data_tool ( self , workspace ) : <EOL> import wx <EOL> import loadimages as LI <EOL> import os . path <EOL> im_id = self . image_name . value <EOL> m = workspace . measurements <EOL> image_name = self . image_name . value <EOL> pathname_feature = "<STR_LIT:_>" . join ( ( LI . C_PATH_NAME , image_name ) ) <EOL> filename_feature = "<STR_LIT:_>" . join ( ( LI . C_FILE_NAME , image_name ) ) <EOL> if not all ( [ m . has_feature ( cpmeas . IMAGE , f ) <EOL> for f in pathname_feature , filename_feature ] ) : <EOL> with wx . FileDialog ( <EOL> None , <EOL> message = "<STR_LIT>" , <EOL> wildcard = "<STR_LIT>" <EOL> "<STR_LIT>" ) as dlg : <EOL> if dlg . ShowModal ( ) != wx . ID_OK : <EOL> return <EOL> pathname , filename = os . path . split ( dlg . Path ) <EOL> else : <EOL> pathname = m . get_current_image_measurement ( pathname_feature ) <EOL> filename = m . get_current_image_measurement ( filename_feature ) <EOL> image_set_list = workspace . image_set_list <EOL> image_set = image_set_list . get_image_set ( <NUM_LIT:0> ) <EOL> ip = LI . LoadImagesImageProvider ( im_id , pathname , filename ) <EOL> image_set . providers . append ( ip ) <EOL> self . run ( workspace ) <EOL> def display ( self , workspace , figure ) : <EOL> figure . set_subplots ( ( <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> ax = figure . subplot ( <NUM_LIT:0> , <NUM_LIT:0> ) <EOL> title = "<STR_LIT>" % ( self . objects_name . value if self . objects_or_image == OI_OBJECTS else cpmeas . IMAGE , <EOL> self . measurement . value ) <EOL> def imshow_fn ( pixel_data ) : <EOL> if pixel_data . ndim == <NUM_LIT:3> : <EOL> figure . subplot_imshow_color ( <NUM_LIT:0> , <NUM_LIT:0> , pixel_data , title = title ) <EOL> else : <EOL> figure . subplot_imshow_grayscale ( <NUM_LIT:0> , <NUM_LIT:0> , pixel_data , title = title ) <EOL> self . display_on_figure ( workspace , ax , imshow_fn ) <EOL> def display_on_figure ( self , workspace , axes , imshow_fn ) : <EOL> import matplotlib <EOL> import matplotlib . cm <EOL> if self . use_color_map ( ) : <EOL> labels = workspace . display_data . labels <EOL> if self . wants_image : <EOL> pixel_data = workspace . display_data . pixel_data <EOL> else : <EOL> pixel_data = ( labels != <NUM_LIT:0> ) . astype ( np . float32 ) <EOL> if pixel_data . ndim == <NUM_LIT:3> : <EOL> pixel_data = np . sum ( pixel_data , <NUM_LIT:2> ) / pixel_data . shape [ <NUM_LIT:2> ] <EOL> colormap_name = self . colormap . value <EOL> if colormap_name == cps . DEFAULT : <EOL> colormap_name = cpprefs . get_default_colormap ( ) <EOL> colormap = matplotlib . cm . get_cmap ( colormap_name ) <EOL> values = workspace . display_data . values <EOL> vmask = workspace . display_data . mask <EOL> colors = np . ones ( ( len ( vmask ) + <NUM_LIT:1> , <NUM_LIT:4> ) ) <EOL> colors [ <NUM_LIT:1> : ] [ ~ vmask , : <NUM_LIT:3> ] = <NUM_LIT:1> <EOL> sm = matplotlib . cm . ScalarMappable ( cmap = colormap ) <EOL> if self . color_map_scale_choice == CMS_MANUAL : <EOL> sm . set_clim ( self . color_map_scale . min , <EOL> self . color_map_scale . max ) <EOL> sm . set_array ( values ) <EOL> colors [ <NUM_LIT:1> : ] [ vmask , : ] = sm . to_rgba ( values ) <EOL> img = colors [ labels , : <NUM_LIT:3> ] * pixel_data [ : , : , np . newaxis ] <EOL> imshow_fn ( img ) <EOL> assert isinstance ( axes , matplotlib . axes . Axes ) <EOL> figure = axes . get_figure ( ) <EOL> assert isinstance ( figure , matplotlib . figure . Figure ) <EOL> figure . colorbar ( sm , ax = axes ) <EOL> else : <EOL> imshow_fn ( workspace . display_data . pixel_data ) <EOL> for x , y , value in zip ( workspace . display_data . x , <EOL> workspace . display_data . y , <EOL> workspace . display_data . values ) : <EOL> try : <EOL> fvalue = float ( value ) <EOL> svalue = "<STR_LIT>" % ( self . decimals . value , value ) <EOL> except : <EOL> svalue = str ( value ) <EOL> text = matplotlib . text . Text ( x = x , y = y , text = svalue , <EOL> size = self . font_size . value , <EOL> color = self . text_color . value , <EOL> verticalalignment = '<STR_LIT>' , <EOL> horizontalalignment = '<STR_LIT>' ) <EOL> axes . add_artist ( text ) <EOL> def upgrade_settings ( self , setting_values , variable_revision_number , <EOL> module_name , from_matlab ) : <EOL> if from_matlab and ( variable_revision_number == <NUM_LIT:2> ) : <EOL> object_name , category , feature_nbr , image_name , size_scale , display_image , data_image , dpi_to_save , saved_image_contents = setting_values <EOL> objects_or_image = ( OI_IMAGE if object_name == cpmeas . IMAGE <EOL> else OI_OBJECTS ) <EOL> measurement = '<STR_LIT:_>' . join ( ( category , feature_nbr , image_name , size_scale ) ) <EOL> setting_values = [ <EOL> objects_or_image , object_name , measurement , display_image , <EOL> "<STR_LIT>" , data_image , dpi_to_save , saved_image_contents ] <EOL> from_matlab = False <EOL> variable_revision_number = <NUM_LIT:1> <EOL> if variable_revision_number == <NUM_LIT:1> : <EOL> objects_or_image , objects_name , measurement , image_name , text_color , display_image , dpi , saved_image_contents = setting_values <EOL> setting_values = [ objects_or_image , objects_name , measurement , <EOL> image_name , text_color , display_image , <EOL> <NUM_LIT:10> , <NUM_LIT:2> , saved_image_contents ] <EOL> variable_revision_number = <NUM_LIT:2> <EOL> if variable_revision_number == <NUM_LIT:2> : <EOL> '''<STR_LIT>''' <EOL> setting_values = setting_values + [ "<STR_LIT:0>" ] <EOL> variable_revision_number = <NUM_LIT:3> <EOL> if variable_revision_number == <NUM_LIT:3> : <EOL> setting_values = setting_values + [ <EOL> CT_TEXT , cpprefs . get_default_colormap ( ) ] <EOL> variable_revision_number = <NUM_LIT:4> <EOL> if variable_revision_number == <NUM_LIT:4> : <EOL> setting_values = setting_values + [ cps . YES ] <EOL> variable_revision_number = <NUM_LIT:5> <EOL> if variable_revision_number == <NUM_LIT:5> : <EOL> setting_values = setting_values + [ <EOL> CMS_USE_MEASUREMENT_RANGE , "<STR_LIT>" ] <EOL> variable_revision_number = <NUM_LIT:6> <EOL> return setting_values , variable_revision_number , from_matlab <EOL> if __name__ == "<STR_LIT:__main__>" : <EOL> '''<STR_LIT>''' <EOL> import wx <EOL> from cellprofiler . gui . datatoolframe import DataToolFrame <EOL> app = wx . PySimpleApp ( ) <EOL> tool_name = '<STR_LIT>' <EOL> dlg = wx . FileDialog ( None , "<STR_LIT>" % <EOL> tool_name , wildcard = "<STR_LIT>" , <EOL> style = ( wx . FD_OPEN | wx . FILE_MUST_EXIST ) ) <EOL> if dlg . ShowModal ( ) == wx . ID_OK : <EOL> data_tool_frame = DataToolFrame ( None , module_name = tool_name , measurements_file_name = dlg . Path ) <EOL> data_tool_frame . Show ( ) <EOL> app . MainLoop ( ) </s>
<s> import cellprofiler . icons <EOL> from cellprofiler . gui . help import PROTIP_RECOMEND_ICON , PROTIP_AVOID_ICON , TECH_NOTE_ICON , IMAGES_FILELIST_BLANK , IMAGES_FILELIST_FILLED <EOL> __doc__ = """<STR_LIT>""" % globals ( ) <EOL> import cellprofiler . cpmodule as cpm <EOL> import cellprofiler . pipeline as cpp <EOL> import cellprofiler . preferences as cpprefs <EOL> import cellprofiler . settings as cps <EOL> from cellprofiler . settings import YES , NO <EOL> import cellprofiler . workspace as cpw <EOL> import cellprofiler . utilities . walk_in_background as W <EOL> import javabridge as J <EOL> import os <EOL> import sys <EOL> import urllib <EOL> import uuid <EOL> from . loadimages import pathname2url , SUPPORTED_IMAGE_EXTENSIONS <EOL> from . loadimages import SUPPORTED_MOVIE_EXTENSIONS <EOL> from cellprofiler . utilities . hdf5_dict import HDF5FileList <EOL> from cellprofiler . gui . help import FILTER_RULES_BUTTONS_HELP <EOL> FILTER_CHOICE_NONE = "<STR_LIT>" <EOL> FILTER_CHOICE_IMAGES = "<STR_LIT>" <EOL> FILTER_CHOICE_CUSTOM = "<STR_LIT>" <EOL> FILTER_CHOICE_ALL = [ FILTER_CHOICE_NONE , FILTER_CHOICE_IMAGES , <EOL> FILTER_CHOICE_CUSTOM ] <EOL> FILTER_DEFAULT = '<STR_LIT>' <EOL> class Images ( cpm . CPModule ) : <EOL> variable_revision_number = <NUM_LIT:2> <EOL> module_name = "<STR_LIT>" <EOL> category = "<STR_LIT>" <EOL> MI_SHOW_IMAGE = "<STR_LIT>" <EOL> MI_REMOVE = cps . FileCollectionDisplay . DeleteMenuItem ( "<STR_LIT>" ) <EOL> MI_REFRESH = "<STR_LIT>" <EOL> def create_settings ( self ) : <EOL> self . workspace = None <EOL> module_explanation = [ <EOL> "<STR_LIT>" % self . module_name , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ] <EOL> self . set_notes ( [ "<STR_LIT:U+0020>" . join ( module_explanation ) ] ) <EOL> self . path_list_display = cps . PathListDisplay ( ) <EOL> predicates = [ FilePredicate ( ) , <EOL> DirectoryPredicate ( ) , <EOL> ExtensionPredicate ( ) ] <EOL> self . filter_choice = cps . Choice ( <EOL> "<STR_LIT>" , FILTER_CHOICE_ALL , value = FILTER_CHOICE_IMAGES , <EOL> doc = """<STR_LIT>""" % globals ( ) ) <EOL> self . filter = cps . Filter ( "<STR_LIT>" , predicates , <EOL> FILTER_DEFAULT , doc = """<STR_LIT>""" % globals ( ) ) <EOL> self . update_button = cps . PathListRefreshButton ( <EOL> "<STR_LIT>" , "<STR_LIT>" , doc = """<STR_LIT>""" ) <EOL> @ staticmethod <EOL> def modpath_to_url ( modpath ) : <EOL> if modpath [ <NUM_LIT:0> ] in ( "<STR_LIT:http>" , "<STR_LIT>" , "<STR_LIT>" ) : <EOL> if len ( modpath ) == <NUM_LIT:1> : <EOL> return modpath [ <NUM_LIT:0> ] + "<STR_LIT::>" <EOL> elif len ( modpath ) == <NUM_LIT:2> : <EOL> return modpath [ <NUM_LIT:0> ] + "<STR_LIT::>" + modpath [ <NUM_LIT:1> ] <EOL> else : <EOL> return modpath [ <NUM_LIT:0> ] + "<STR_LIT::>" + modpath [ <NUM_LIT:1> ] + "<STR_LIT:/>" + "<STR_LIT:/>" . join ( <EOL> [ urllib . quote ( part ) for part in modpath [ <NUM_LIT:2> : ] ] ) <EOL> path = os . path . join ( * modpath ) <EOL> return pathname2url ( path ) <EOL> @ staticmethod <EOL> def url_to_modpath ( url ) : <EOL> if not url . lower ( ) . startswith ( "<STR_LIT>" ) : <EOL> schema , rest = HDF5FileList . split_url ( url ) <EOL> return [ schema ] + rest [ <NUM_LIT:0> : <NUM_LIT:1> ] + [ urllib . unquote ( part ) for part in rest [ <NUM_LIT:1> : ] ] <EOL> path = urllib . url2pathname ( url [ <NUM_LIT:5> : ] ) <EOL> parts = [ ] <EOL> while True : <EOL> new_path , part = os . path . split ( path ) <EOL> if len ( new_path ) == <NUM_LIT:0> or len ( part ) == <NUM_LIT:0> : <EOL> parts . insert ( <NUM_LIT:0> , path ) <EOL> break <EOL> parts . insert ( <NUM_LIT:0> , part ) <EOL> path = new_path <EOL> return parts <EOL> @ classmethod <EOL> def make_modpath_from_path ( cls , path ) : <EOL> result = [ ] <EOL> while True : <EOL> new_path , part = os . path . split ( path ) <EOL> if len ( new_path ) == <NUM_LIT:0> or len ( part ) == <NUM_LIT:0> : <EOL> return [ path ] + result <EOL> result . insert ( <NUM_LIT:0> , part ) <EOL> path = new_path <EOL> def settings ( self ) : <EOL> return [ self . path_list_display , self . filter_choice , self . filter ] <EOL> def visible_settings ( self ) : <EOL> result = [ self . path_list_display , self . filter_choice ] <EOL> if self . filter_choice == FILTER_CHOICE_CUSTOM : <EOL> result += [ self . filter , self . update_button ] <EOL> self . path_list_display . using_filter = True <EOL> elif self . filter_choice == FILTER_CHOICE_IMAGES : <EOL> result += [ self . update_button ] <EOL> self . path_list_display . using_filter = True <EOL> else : <EOL> self . path_list_display . using_filter = False <EOL> return result <EOL> def change_causes_prepare_run ( self , setting ) : <EOL> '''<STR_LIT>''' <EOL> return setting in self . settings ( ) <EOL> @ classmethod <EOL> def is_input_module ( self ) : <EOL> return True <EOL> def prepare_run ( self , workspace ) : <EOL> '''<STR_LIT>''' <EOL> if workspace . pipeline . in_batch_mode ( ) : <EOL> return True <EOL> file_list = workspace . pipeline . file_list <EOL> if self . filter_choice != FILTER_CHOICE_NONE : <EOL> if self . filter_choice == FILTER_CHOICE_IMAGES : <EOL> expression = FILTER_DEFAULT <EOL> else : <EOL> expression = self . filter . value_text <EOL> env = J . get_env ( ) <EOL> ifcls = J . class_for_name ( "<STR_LIT>" ) <EOL> scls = env . find_class ( "<STR_LIT>" ) <EOL> iffilter = J . make_instance ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> expression , ifcls ) <EOL> file_array = env . make_object_array ( len ( file_list ) , scls ) <EOL> for i , url in enumerate ( file_list ) : <EOL> if isinstance ( url , unicode ) : <EOL> ourl = env . new_string ( url ) <EOL> else : <EOL> ourl = env . new_string_utf ( url ) <EOL> env . set_object_array_element ( file_array , i , ourl ) <EOL> passes_filter = J . call ( <EOL> iffilter , "<STR_LIT>" , <EOL> "<STR_LIT>" , file_array ) <EOL> if isinstance ( passes_filter , J . JB_Object ) : <EOL> passes_filter = J . get_env ( ) . get_boolean_array_elements ( <EOL> passes_filter ) <EOL> file_list = [ f for f , passes in zip ( file_list , passes_filter ) <EOL> if passes ] <EOL> workspace . pipeline . set_filtered_file_list ( file_list , self ) <EOL> return True <EOL> def run ( self , workspace ) : <EOL> pass <EOL> def upgrade_settings ( self , setting_values , variable_revision_number , <EOL> module_name , from_matlab ) : <EOL> '''<STR_LIT>''' <EOL> if variable_revision_number == <NUM_LIT:1> : <EOL> filter_choice = FILTER_CHOICE_CUSTOM if setting_values [ <NUM_LIT:1> ] == cps . YES else FILTER_CHOICE_NONE <EOL> setting_values = setting_values [ : <NUM_LIT:1> ] + [ filter_choice ] + setting_values [ <NUM_LIT:2> : ] <EOL> variable_revision_number = <NUM_LIT:2> <EOL> return setting_values , variable_revision_number , from_matlab <EOL> class DirectoryPredicate ( cps . Filter . FilterPredicate ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self ) : <EOL> subpredicates = ( <EOL> cps . Filter . CONTAINS_PREDICATE , <EOL> cps . Filter . CONTAINS_REGEXP_PREDICATE , <EOL> cps . Filter . STARTS_WITH_PREDICATE , <EOL> cps . Filter . ENDSWITH_PREDICATE , <EOL> cps . Filter . EQ_PREDICATE ) <EOL> predicates = [ cps . Filter . DoesPredicate ( subpredicates ) , <EOL> cps . Filter . DoesNotPredicate ( subpredicates ) ] <EOL> cps . Filter . FilterPredicate . __init__ ( self , <EOL> '<STR_LIT>' , "<STR_LIT>" , self . fn_filter , <EOL> predicates , doc = "<STR_LIT>" ) <EOL> def fn_filter ( self , ( node_type , modpath , module ) , * args ) : <EOL> '''<STR_LIT>''' <EOL> if isinstance ( modpath [ - <NUM_LIT:1> ] , tuple ) and len ( modpath [ - <NUM_LIT:1> ] ) == <NUM_LIT:3> : <EOL> path = os . path . join ( * modpath [ : - <NUM_LIT:2> ] ) <EOL> else : <EOL> path = os . path . join ( * modpath [ : - <NUM_LIT:1> ] ) <EOL> return args [ <NUM_LIT:0> ] ( path , * args [ <NUM_LIT:1> : ] ) <EOL> def test_valid ( self , pipeline , * args ) : <EOL> self ( ( cps . FileCollectionDisplay . NODE_FILE , <EOL> [ "<STR_LIT>" , "<STR_LIT>" ] , None ) , * args ) <EOL> class FilePredicate ( cps . Filter . FilterPredicate ) : <EOL> '''<STR_LIT>''' <EOL> def __init__ ( self ) : <EOL> subpredicates = ( <EOL> cps . Filter . CONTAINS_PREDICATE , <EOL> cps . Filter . CONTAINS_REGEXP_PREDICATE , <EOL> cps . Filter . STARTS_WITH_PREDICATE , <EOL> cps . Filter . ENDSWITH_PREDICATE , <EOL> cps . Filter . EQ_PREDICATE ) <EOL> predicates = [ cps . Filter . DoesPredicate ( subpredicates ) , <EOL> cps . Filter . DoesNotPredicate ( subpredicates ) ] <EOL> cps . Filter . FilterPredicate . __init__ ( self , <EOL> '<STR_LIT:file>' , "<STR_LIT>" , self . fn_filter , predicates , <EOL> doc = "<STR_LIT>" ) <EOL> def fn_filter ( self , ( node_type , modpath , module ) , * args ) : <EOL> '''<STR_LIT>''' <EOL> if node_type == cps . FileCollectionDisplay . NODE_DIRECTORY : <EOL> return None <EOL> elif isinstance ( modpath [ - <NUM_LIT:1> ] , tuple ) and len ( modpath [ - <NUM_LIT:1> ] ) == <NUM_LIT:3> : <EOL> filename = modpath [ - <NUM_LIT:2> ] <EOL> else : <EOL> filename = modpath [ - <NUM_LIT:1> ] <EOL> return args [ <NUM_LIT:0> ] ( filename , * args [ <NUM_LIT:1> : ] ) <EOL> def test_valid ( self , pipeline , * args ) : <EOL> self ( ( cps . FileCollectionDisplay . NODE_FILE , <EOL> [ "<STR_LIT>" , "<STR_LIT>" ] , None ) , * args ) <EOL> def is_image_extension ( suffix ) : <EOL> '''<STR_LIT>''' <EOL> extensions = J . get_collection_wrapper ( <EOL> J . static_call ( "<STR_LIT>" , <EOL> "<STR_LIT>" , "<STR_LIT>" ) ) <EOL> return extensions . contains ( suffix . lower ( ) ) <EOL> class ExtensionPredicate ( cps . Filter . FilterPredicate ) : <EOL> '''<STR_LIT>''' <EOL> IS_TIF_PREDICATE = cps . Filter . FilterPredicate ( <EOL> "<STR_LIT>" , '<STR_LIT>' , <EOL> lambda x : x . lower ( ) in ( "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ) , [ ] , <EOL> doc = "<STR_LIT>" ) <EOL> IS_JPEG_PREDICATE = cps . Filter . FilterPredicate ( <EOL> "<STR_LIT>" , '<STR_LIT>' , <EOL> lambda x : x . lower ( ) in ( "<STR_LIT>" , "<STR_LIT>" ) , [ ] , <EOL> doc = "<STR_LIT>" ) <EOL> IS_PNG_PREDICATE = cps . Filter . FilterPredicate ( <EOL> "<STR_LIT>" , '<STR_LIT>' , <EOL> lambda x : x . lower ( ) == "<STR_LIT>" , [ ] , <EOL> doc = "<STR_LIT>" ) <EOL> IS_IMAGE_PREDICATE = cps . Filter . FilterPredicate ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> is_image_extension , [ ] , <EOL> '<STR_LIT>' ) <EOL> IS_FLEX_PREDICATE = cps . Filter . FilterPredicate ( <EOL> '<STR_LIT>' , '<STR_LIT>' , <EOL> lambda x : x . lower ( ) == "<STR_LIT>" , [ ] , <EOL> doc = "<STR_LIT>" ) <EOL> IS_MOVIE_PREDICATE = cps . Filter . FilterPredicate ( <EOL> "<STR_LIT>" , '<STR_LIT>' , <EOL> lambda x : x . lower ( ) in ( "<STR_LIT>" , "<STR_LIT>" ) , [ ] , <EOL> doc = "<STR_LIT>" ) <EOL> def __init__ ( self ) : <EOL> subpredicates = ( <EOL> self . IS_TIF_PREDICATE , <EOL> self . IS_JPEG_PREDICATE , <EOL> self . IS_PNG_PREDICATE , <EOL> self . IS_IMAGE_PREDICATE , <EOL> self . IS_FLEX_PREDICATE , <EOL> self . IS_MOVIE_PREDICATE ) <EOL> predicates = [ cps . Filter . DoesPredicate ( subpredicates , "<STR_LIT>" ) , <EOL> cps . Filter . DoesNotPredicate ( subpredicates , "<STR_LIT>" ) ] <EOL> cps . Filter . FilterPredicate . __init__ ( self , <EOL> '<STR_LIT>' , "<STR_LIT>" , self . fn_filter , predicates , <EOL> doc = "<STR_LIT>" ) <EOL> def fn_filter ( self , ( node_type , modpath , module ) , * args ) : <EOL> '''<STR_LIT>''' <EOL> if node_type == cps . FileCollectionDisplay . NODE_DIRECTORY : <EOL> return None <EOL> elif isinstance ( modpath [ - <NUM_LIT:1> ] , tuple ) and len ( modpath [ - <NUM_LIT:1> ] ) == <NUM_LIT:3> : <EOL> filename = modpath [ - <NUM_LIT:2> ] <EOL> else : <EOL> filename = modpath [ - <NUM_LIT:1> ] <EOL> exts = [ ] <EOL> while True : <EOL> filename , ext = os . path . splitext ( filename ) <EOL> if len ( filename ) == <NUM_LIT:0> or len ( ext ) == <NUM_LIT:0> : <EOL> return False <EOL> exts . insert ( <NUM_LIT:0> , ext [ <NUM_LIT:1> : ] ) <EOL> ext = '<STR_LIT:.>' . join ( exts ) <EOL> if args [ <NUM_LIT:0> ] ( ext , * args [ <NUM_LIT:1> : ] ) : <EOL> return True <EOL> def test_valid ( self , pipeline , * args ) : <EOL> self ( ( cps . FileCollectionDisplay . NODE_FILE , <EOL> [ "<STR_LIT>" , "<STR_LIT>" ] , None ) , * args ) <EOL> class ImagePredicate ( cps . Filter . FilterPredicate ) : <EOL> '''<STR_LIT>''' <EOL> IS_COLOR_PREDICATE = cps . Filter . FilterPredicate ( <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> lambda x : ( <EOL> x . metadata . has_key ( cpp . ImagePlaneDetails . MD_COLOR_FORMAT ) and <EOL> x . metadata [ cpp . ImagePlaneDetails . MD_COLOR_FORMAT ] == <EOL> cpp . ImagePlaneDetails . MD_RGB ) , [ ] , <EOL> doc = "<STR_LIT>" ) <EOL> IS_MONOCHROME_PREDICATE = cps . Filter . FilterPredicate ( <EOL> "<STR_LIT>" , "<STR_LIT>" , <EOL> lambda x : ( <EOL> x . metadata . has_key ( cpp . ImagePlaneDetails . MD_COLOR_FORMAT ) and <EOL> x . metadata [ cpp . ImagePlaneDetails . MD_COLOR_FORMAT ] == <EOL> cpp . ImagePlaneDetails . MD_MONOCHROME ) , [ ] , <EOL> doc = "<STR_LIT>" ) <EOL> @ staticmethod <EOL> def is_stack ( x ) : <EOL> if ( x . metadata . has_key ( cpp . ImagePlaneDetails . MD_SIZE_T ) and <EOL> x . metadata [ cpp . ImagePlaneDetails . MD_SIZE_T ] > <NUM_LIT:1> ) : <EOL> return True <EOL> if ( x . metadata . has_key ( cpp . ImagePlaneDetails . MD_SIZE_Z ) and <EOL> x . metadata [ cpp . ImagePlaneDetails . MD_SIZE_Z ] > <NUM_LIT:1> ) : <EOL> return True <EOL> return False <EOL> IS_STACK_PREDICATE = cps . Filter . FilterPredicate ( <EOL> "<STR_LIT>" , "<STR_LIT>" , lambda x : ImagePredicate . is_stack ( x ) , [ ] , <EOL> doc = "<STR_LIT>" ) <EOL> IS_STACK_FRAME_PREDICATE = cps . Filter . FilterPredicate ( <EOL> "<STR_LIT>" , "<STR_LIT>" , lambda x : x . index is not None , [ ] , <EOL> doc = "<STR_LIT>" ) <EOL> def __init__ ( self ) : <EOL> subpredicates = ( self . IS_COLOR_PREDICATE , <EOL> self . IS_MONOCHROME_PREDICATE , <EOL> self . IS_STACK_PREDICATE , <EOL> self . IS_STACK_FRAME_PREDICATE ) <EOL> predicates = [ pred_class ( subpredicates , text ) <EOL> for pred_class , text in ( <EOL> ( cps . Filter . DoesPredicate , "<STR_LIT>" ) , <EOL> ( cps . Filter . DoesNotPredicate , "<STR_LIT>" ) ) ] <EOL> cps . Filter . FilterPredicate . __init__ ( self , <EOL> '<STR_LIT:image>' , "<STR_LIT>" , self . fn_filter , <EOL> predicates , <EOL> doc = "<STR_LIT>" ) <EOL> def fn_filter ( self , ( node_type , modpath , module ) , * args ) : <EOL> if node_type == cps . FileCollectionDisplay . NODE_DIRECTORY : <EOL> return None <EOL> ipd = module . get_image_plane_details ( modpath ) <EOL> if ipd is None : <EOL> return None <EOL> return args [ <NUM_LIT:0> ] ( ipd , * args [ <NUM_LIT:1> : ] ) <EOL> class FakeModule ( cpm . CPModule ) : <EOL> '''<STR_LIT>''' <EOL> def get_image_plane_details ( self , modpath ) : <EOL> url = Images . modpath_to_url ( modpath ) <EOL> return cpp . ImagePlaneDetails ( url , None , None , None ) <EOL> def test_valid ( self , pipeline , * args ) : <EOL> self ( ( cps . FileCollectionDisplay . NODE_FILE , <EOL> [ "<STR_LIT>" , "<STR_LIT>" ] , self . FakeModule ( ) ) , * args ) </s>
<s> '''<STR_LIT>''' <EOL> import centrosome . outline <EOL> import numpy as np <EOL> from scipy . ndimage import distance_transform_edt <EOL> import cellprofiler . cpimage as cpi <EOL> import cellprofiler . cpmodule as cpm <EOL> import cellprofiler . settings as cps <EOL> from cellprofiler . settings import YES , NO <EOL> WANTS_COLOR = "<STR_LIT>" <EOL> WANTS_GRAYSCALE = "<STR_LIT>" <EOL> MAX_IMAGE = "<STR_LIT>" <EOL> MAX_POSSIBLE = "<STR_LIT>" <EOL> COLORS = { "<STR_LIT>" : ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:1> ) , <EOL> "<STR_LIT>" : ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:0> ) , <EOL> "<STR_LIT>" : ( <NUM_LIT:1> , <NUM_LIT:0> , <NUM_LIT:0> ) , <EOL> "<STR_LIT>" : ( <NUM_LIT:0> , <NUM_LIT:1> , <NUM_LIT:0> ) , <EOL> "<STR_LIT>" : ( <NUM_LIT:0> , <NUM_LIT:0> , <NUM_LIT:1> ) , <EOL> "<STR_LIT>" : ( <NUM_LIT:1> , <NUM_LIT:1> , <NUM_LIT:0> ) } <EOL> COLOR_ORDER = [ "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" , "<STR_LIT>" ] <EOL> FROM_IMAGES = "<STR_LIT>" <EOL> FROM_OBJECTS = "<STR_LIT>" <EOL> NUM_FIXED_SETTINGS_V1 = <NUM_LIT:5> <EOL> NUM_FIXED_SETTINGS_V2 = <NUM_LIT:6> <EOL> NUM_FIXED_SETTINGS_V3 = <NUM_LIT:6> <EOL> NUM_FIXED_SETTINGS = <NUM_LIT:6> <EOL> NUM_OUTLINE_SETTINGS_V2 = <NUM_LIT:2> <EOL> NUM_OUTLINE_SETTINGS_V3 = <NUM_LIT:4> <EOL> NUM_OUTLINE_SETTINGS = <NUM_LIT:4> <EOL> class OverlayOutlines ( cpm . CPModule ) : <EOL> module_name = '<STR_LIT>' <EOL> variable_revision_number = <NUM_LIT:3> <EOL> category = "<STR_LIT>" <EOL> def create_settings ( self ) : <EOL> self . blank_image = cps . Binary ( <EOL> "<STR_LIT>" , <EOL> False , doc = """<STR_LIT>""" % globals ( ) ) <EOL> self . image_name = cps . ImageNameSubscriber ( <EOL> "<STR_LIT>" , cps . NONE , doc = """<STR_LIT>""" ) <EOL> self . line_width = cps . Float ( <EOL> "<STR_LIT>" , "<STR_LIT:1>" , doc = """<STR_LIT>""" ) <EOL> self . output_image_name = cps . ImageNameProvider ( <EOL> "<STR_LIT>" , "<STR_LIT>" , doc = """<STR_LIT>""" ) <EOL> self . wants_color = cps . Choice ( <EOL> "<STR_LIT>" , <EOL> [ WANTS_COLOR , WANTS_GRAYSCALE ] , doc = """<STR_LIT>""" ) <EOL> self . spacer = cps . Divider ( line = False ) <EOL> self . max_type = cps . Choice ( <EOL> "<STR_LIT>" , <EOL> [ MAX_IMAGE , MAX_POSSIBLE ] , doc = """<STR_LIT>""" % globals ( ) ) <EOL> self . outlines = [ ] <EOL> self . add_outline ( can_remove = False ) <EOL> self . add_outline_button = cps . DoSomething ( "<STR_LIT>" , "<STR_LIT>" , self . add_outline ) <EOL> def add_outline ( self , can_remove = True ) : <EOL> group = cps . SettingsGroup ( ) <EOL> if can_remove : <EOL> group . append ( "<STR_LIT>" , cps . Divider ( line = False ) ) <EOL> group . append ( "<STR_LIT>" , cps . Choice ( <EOL> "<STR_LIT>" , <EOL> [ FROM_OBJECTS , FROM_IMAGES ] , doc = """<STR_LIT>""" % globals ( ) ) ) <EOL> group . append ( "<STR_LIT>" , cps . ObjectNameSubscriber ( <EOL> "<STR_LIT>" , cps . NONE , <EOL> doc = """<STR_LIT>""" ) ) <EOL> group . append ( "<STR_LIT>" , cps . OutlineNameSubscriber ( <EOL> "<STR_LIT>" , <EOL> cps . NONE , doc = """<STR_LIT>""" ) ) <EOL> default_color = ( COLOR_ORDER [ len ( self . outlines ) ] <EOL> if len ( self . outlines ) < len ( COLOR_ORDER ) <EOL> else COLOR_ORDER [ <NUM_LIT:0> ] ) <EOL> group . append ( "<STR_LIT>" , cps . Color ( <EOL> "<STR_LIT>" , default_color ) ) <EOL> if can_remove : <EOL> group . append ( "<STR_LIT>" , cps . RemoveSettingButton ( "<STR_LIT>" , "<STR_LIT>" , self . outlines , group ) ) <EOL> self . outlines . append ( group ) <EOL> def prepare_settings ( self , setting_values ) : <EOL> num_settings = ( len ( setting_values ) - NUM_FIXED_SETTINGS ) / NUM_OUTLINE_SETTINGS <EOL> if len ( self . outlines ) == <NUM_LIT:0> : <EOL> self . add_outline ( False ) <EOL> elif len ( self . outlines ) > num_settings : <EOL> del self . outlines [ num_settings : ] <EOL> else : <EOL> for i in range ( len ( self . outlines ) , num_settings ) : <EOL> self . add_outline ( ) <EOL> def settings ( self ) : <EOL> result = [ self . blank_image , self . image_name , self . output_image_name , <EOL> self . wants_color , self . max_type , self . line_width ] <EOL> for outline in self . outlines : <EOL> result += [ outline . outline_name , outline . color , <EOL> outline . outline_choice , outline . objects_name ] <EOL> return result <EOL> def visible_settings ( self ) : <EOL> result = [ self . blank_image ] <EOL> if not self . blank_image . value : <EOL> result += [ self . image_name ] <EOL> result += [ self . output_image_name , self . wants_color , <EOL> self . line_width , self . spacer ] <EOL> if ( self . wants_color . value == WANTS_GRAYSCALE and not <EOL> self . blank_image . value ) : <EOL> result += [ self . max_type ] <EOL> for outline in self . outlines : <EOL> result += [ outline . outline_choice ] <EOL> if self . wants_color . value == WANTS_COLOR : <EOL> result += [ outline . color ] <EOL> if outline . outline_choice == FROM_IMAGES : <EOL> result += [ outline . outline_name ] <EOL> else : <EOL> result += [ outline . objects_name ] <EOL> if hasattr ( outline , "<STR_LIT>" ) : <EOL> result += [ outline . remover ] <EOL> result += [ self . add_outline_button ] <EOL> return result <EOL> def run ( self , workspace ) : <EOL> if self . wants_color . value == WANTS_COLOR : <EOL> pixel_data = self . run_color ( workspace ) <EOL> else : <EOL> pixel_data = self . run_bw ( workspace ) <EOL> if self . blank_image . value : <EOL> output_image = cpi . Image ( pixel_data ) <EOL> workspace . image_set . add ( self . output_image_name . value , output_image ) <EOL> else : <EOL> image = workspace . image_set . get_image ( self . image_name . value ) <EOL> output_image = cpi . Image ( pixel_data , parent_image = image ) <EOL> workspace . image_set . add ( self . output_image_name . value , output_image ) <EOL> workspace . display_data . image_pixel_data = image . pixel_data <EOL> if self . __can_composite_objects ( ) and self . show_window : <EOL> workspace . display_data . labels = { } <EOL> for outline in self . outlines : <EOL> name = outline . objects_name . value <EOL> objects = workspace . object_set . get_objects ( name ) <EOL> workspace . display_data . labels [ name ] = [ labels for labels , indexes in objects . get_labels ( ) ] <EOL> workspace . display_data . pixel_data = pixel_data <EOL> def __can_composite_objects ( self ) : <EOL> '''<STR_LIT>''' <EOL> for outline in self . outlines : <EOL> if outline . outline_choice == FROM_IMAGES : <EOL> return False <EOL> return True <EOL> def display ( self , workspace , figure ) : <EOL> from cellprofiler . gui . cpfigure import CPLD_LABELS , CPLD_NAME , CPLD_OUTLINE_COLOR , CPLD_MODE , CPLDM_OUTLINES , CPLDM_ALPHA , CPLDM_NONE , CPLD_LINE_WIDTH , CPLD_ALPHA_COLORMAP , CPLD_ALPHA_VALUE <EOL> figure . set_subplots ( ( <NUM_LIT:1> , <NUM_LIT:1> ) ) <EOL> if self . __can_composite_objects ( ) : <EOL> if self . blank_image : <EOL> pixel_data = np . zeros ( workspace . display_data . pixel_data . shape ) <EOL> else : <EOL> pixel_data = workspace . display_data . image_pixel_data <EOL> cplabels = [ ] <EOL> ldict = workspace . display_data . labels <EOL> for outline in self . outlines : <EOL> name = outline . objects_name . value <EOL> if self . wants_color . value == WANTS_COLOR : <EOL> color = np . array ( outline . color . to_rgb ( ) , float ) <EOL> else : <EOL> color = np . ones ( <NUM_LIT:3> ) * <NUM_LIT> <EOL> d = { CPLD_NAME : name , <EOL> CPLD_LABELS : ldict [ name ] , <EOL> CPLD_OUTLINE_COLOR : color , <EOL> CPLD_MODE : CPLDM_OUTLINES , <EOL> CPLD_LINE_WIDTH : self . line_width . value } <EOL> cplabels . append ( d ) <EOL> else : <EOL> pixel_data = workspace . display_data . pixel_data <EOL> cplabels = None <EOL> if self . blank_image . value : <EOL> if self . wants_color . value == WANTS_COLOR : <EOL> figure . subplot_imshow ( <NUM_LIT:0> , <NUM_LIT:0> , pixel_data , <EOL> self . output_image_name . value , <EOL> cplabels = cplabels ) <EOL> else : <EOL> figure . subplot_imshow_bw ( <NUM_LIT:0> , <NUM_LIT:0> , pixel_data , <EOL> self . output_image_name . value , <EOL> cplabels = cplabels ) <EOL> else : <EOL> figure . set_subplots ( ( <NUM_LIT:2> , <NUM_LIT:1> ) ) <EOL> image_pixel_data = workspace . display_data . image_pixel_data <EOL> if image_pixel_data . ndim == <NUM_LIT:2> : <EOL> figure . subplot_imshow_bw ( <NUM_LIT:0> , <NUM_LIT:0> , image_pixel_data , <EOL> "<STR_LIT>" % <EOL> self . image_name . value ) <EOL> else : <EOL> figure . subplot_imshow_color ( <NUM_LIT:0> , <NUM_LIT:0> , image_pixel_data , <EOL> "<STR_LIT>" % <EOL> self . image_name . value ) <EOL> if self . wants_color . value == WANTS_COLOR : <EOL> if cplabels is not None and pixel_data . ndim == <NUM_LIT:2> : <EOL> fn = figure . subplot_imshow_grayscale <EOL> else : <EOL> fn = figure . subplot_imshow <EOL> fn ( <NUM_LIT:1> , <NUM_LIT:0> , pixel_data , <EOL> self . output_image_name . value , <EOL> sharexy = figure . subplot ( <NUM_LIT:0> , <NUM_LIT:0> ) , <EOL> cplabels = cplabels ) <EOL> else : <EOL> figure . subplot_imshow_bw ( <NUM_LIT:1> , <NUM_LIT:0> , pixel_data , <EOL> self . output_image_name . value , <EOL> sharexy = figure . subplot ( <NUM_LIT:0> , <NUM_LIT:0> ) , <EOL> cplabels = cplabels ) <EOL> def run_bw ( self , workspace ) : <EOL> image_set = workspace . image_set <EOL> if self . blank_image . value : <EOL> shape = self . get_outline ( workspace , self . outlines [ <NUM_LIT:0> ] ) . shape [ : <NUM_LIT:2> ] <EOL> pixel_data = np . zeros ( shape ) <EOL> maximum = <NUM_LIT:1> <EOL> else : <EOL> image = image_set . get_image ( self . image_name . value , <EOL> must_be_grayscale = True ) <EOL> pixel_data = image . pixel_data <EOL> maximum = <NUM_LIT:1> if self . max_type == MAX_POSSIBLE else np . max ( pixel_data ) <EOL> pixel_data = pixel_data . copy ( ) <EOL> for outline in self . outlines : <EOL> mask = self . get_outline ( workspace , outline ) <EOL> i_max = min ( mask . shape [ <NUM_LIT:0> ] , pixel_data . shape [ <NUM_LIT:0> ] ) <EOL> j_max = min ( mask . shape [ <NUM_LIT:1> ] , pixel_data . shape [ <NUM_LIT:1> ] ) <EOL> mask = mask [ : i_max , : j_max ] <EOL> pixel_data [ : i_max , : j_max ] [ mask ] = maximum <EOL> return pixel_data <EOL> def run_color ( self , workspace ) : <EOL> image_set = workspace . image_set <EOL> if self . blank_image . value : <EOL> pixel_data = None <EOL> pdmax = <NUM_LIT:1> <EOL> else : <EOL> image = image_set . get_image ( self . image_name . value ) <EOL> pixel_data = image . pixel_data <EOL> if pixel_data . ndim == <NUM_LIT:2> : <EOL> pixel_data = np . dstack ( ( pixel_data , pixel_data , pixel_data ) ) <EOL> else : <EOL> pixel_data = pixel_data . copy ( ) <EOL> pdmax = float ( np . max ( pixel_data ) ) <EOL> if pdmax <= <NUM_LIT:0> : <EOL> pdmax = <NUM_LIT:1> <EOL> for outline in self . outlines : <EOL> outline_img = self . get_outline ( workspace , outline ) <EOL> if pixel_data is None : <EOL> pixel_data = np . zeros ( list ( outline_img . shape [ : <NUM_LIT:2> ] ) + [ <NUM_LIT:3> ] , np . float32 ) <EOL> i_max = min ( outline_img . shape [ <NUM_LIT:0> ] , pixel_data . shape [ <NUM_LIT:0> ] ) <EOL> j_max = min ( outline_img . shape [ <NUM_LIT:1> ] , pixel_data . shape [ <NUM_LIT:1> ] ) <EOL> outline_img = outline_img [ : i_max , : j_max , : ] <EOL> window = pixel_data [ : i_max , : j_max , : ] <EOL> alpha = outline_img [ : , : , <NUM_LIT:3> ] <EOL> outline_img [ : , : , : <NUM_LIT:3> ] *= pdmax <EOL> outline_img [ : , : , : <NUM_LIT:3> ] *= alpha [ : , : , np . newaxis ] <EOL> window *= ( <NUM_LIT:1> - alpha ) [ : , : , np . newaxis ] <EOL> window += outline_img [ : , : , : <NUM_LIT:3> ] <EOL> return pixel_data <EOL> def get_outline ( self , workspace , outline ) : <EOL> '''<STR_LIT>''' <EOL> if outline . outline_choice == FROM_IMAGES : <EOL> name = outline . outline_name . value <EOL> pixel_data = workspace . image_set . get_image ( name ) . pixel_data <EOL> else : <EOL> name = outline . objects_name . value <EOL> objects = workspace . object_set . get_objects ( name ) <EOL> pixel_data = np . zeros ( objects . shape , bool ) <EOL> for labels , indexes in objects . get_labels ( ) : <EOL> pixel_data = pixel_data | centrosome . outline . outline ( labels ) <EOL> if self . wants_color == WANTS_GRAYSCALE : <EOL> return pixel_data . astype ( bool ) <EOL> color = np . array ( outline . color . to_rgb ( ) , float ) / <NUM_LIT> <EOL> if pixel_data . ndim == <NUM_LIT:2> : <EOL> if len ( color ) == <NUM_LIT:3> : <EOL> color = np . hstack ( ( color , [ <NUM_LIT:1> ] ) ) <EOL> pixel_data = pixel_data > <NUM_LIT:0> <EOL> output_image = color [ np . newaxis , np . newaxis , : ] * pixel_data [ : , : , np . newaxis ] <EOL> else : <EOL> output_image = np . dstack ( [ pixel_data [ : , : , i ] for i in range ( <NUM_LIT:3> ) ] + <EOL> [ np . sum ( pixel_data , <NUM_LIT:2> ) > <NUM_LIT:0> ] ) <EOL> if hasattr ( np , '<STR_LIT>' ) : <EOL> output_image = output_image . astype ( np . float16 ) <EOL> if self . line_width . value > <NUM_LIT:1> : <EOL> half_line_width = float ( self . line_width . value ) / <NUM_LIT:2> <EOL> d , ( i , j ) = distance_transform_edt ( output_image [ : , : , <NUM_LIT:3> ] == <NUM_LIT:0> , <EOL> return_indices = True ) <EOL> mask = ( d > <NUM_LIT:0> ) & ( d <= half_line_width - <NUM_LIT> ) <EOL> output_image [ mask , : ] = output_image [ i [ mask ] , j [ mask ] , : ] <EOL> mask = ( ( d > max ( <NUM_LIT:0> , half_line_width - <NUM_LIT> ) ) & <EOL> ( d < half_line_width + <NUM_LIT> ) ) <EOL> d = half_line_width + <NUM_LIT> - d <EOL> output_image [ mask , : <NUM_LIT:3> ] = output_image [ i [ mask ] , j [ mask ] , : <NUM_LIT:3> ] <EOL> output_image [ mask , <NUM_LIT:3> ] = d [ mask ] <EOL> return output_image <EOL> def upgrade_settings ( self , setting_values , variable_revision_number , <EOL> module_name , from_matlab ) : <EOL> if from_matlab and variable_revision_number == <NUM_LIT:2> : <EOL> setting_values = [ cps . YES if setting_values [ <NUM_LIT:0> ] == "<STR_LIT>" else cps . NO , <EOL> setting_values [ <NUM_LIT:0> ] , <EOL> setting_values [ <NUM_LIT:3> ] , <EOL> WANTS_COLOR , <EOL> setting_values [ <NUM_LIT:2> ] , <EOL> setting_values [ <NUM_LIT:1> ] , <EOL> setting_values [ <NUM_LIT:4> ] ] <EOL> from_matlab = False <EOL> variable_revision_number = <NUM_LIT:1> <EOL> if ( not from_matlab ) and variable_revision_number == <NUM_LIT:1> : <EOL> setting_values = setting_values [ : NUM_FIXED_SETTINGS_V1 ] + [ "<STR_LIT:1>" ] + setting_values [ NUM_FIXED_SETTINGS_V1 : ] <EOL> variable_revision_number = <NUM_LIT:2> <EOL> if ( not from_matlab ) and variable_revision_number == <NUM_LIT:2> : <EOL> new_setting_values = setting_values [ : NUM_FIXED_SETTINGS_V2 ] <EOL> for i in range ( NUM_FIXED_SETTINGS_V2 , len ( setting_values ) , <EOL> NUM_OUTLINE_SETTINGS_V2 ) : <EOL> new_setting_values += setting_values [ i : ( i + NUM_OUTLINE_SETTINGS_V2 ) ] <EOL> new_setting_values += [ FROM_IMAGES , cps . NONE ] <EOL> setting_values = new_setting_values <EOL> variable_revision_number = <NUM_LIT:3> <EOL> return setting_values , variable_revision_number , from_matlab </s>
<s> """<STR_LIT>""" <EOL> import base64 <EOL> import sys <EOL> import unittest <EOL> import zlib <EOL> from StringIO import StringIO <EOL> import numpy as np <EOL> from cellprofiler . preferences import set_headless <EOL> set_headless ( ) <EOL> import cellprofiler . pipeline as cpp <EOL> import cellprofiler . settings as cps <EOL> import cellprofiler . cpimage as cpi <EOL> import cellprofiler . workspace as cpw <EOL> import cellprofiler . objects as cpo <EOL> import cellprofiler . measurements as cpm <EOL> import cellprofiler . modules . injectimage as inj <EOL> import cellprofiler . modules . correctilluminationcalculate as calc <EOL> INPUT_IMAGE_NAME = "<STR_LIT>" <EOL> OUTPUT_IMAGE_NAME = "<STR_LIT>" <EOL> AVERAGE_IMAGE_NAME = "<STR_LIT>" <EOL> DILATED_IMAGE_NAME = "<STR_LIT>" <EOL> class TestCorrectImage_Calculate ( unittest . TestCase ) : <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> '''<STR_LIT>''' <EOL> if not hasattr ( cls , "<STR_LIT>" ) : <EOL> cls . assertIn = lambda self , x , y : self . assertTrue ( x in y ) <EOL> if not hasattr ( cls , "<STR_LIT>" ) : <EOL> cls . assertNotIn = lambda self , x , y : self . assertFalse ( x in y ) <EOL> def error_callback ( self , calller , event ) : <EOL> if isinstance ( event , cpp . RunExceptionEvent ) : <EOL> self . fail ( event . error . message ) <EOL> def make_workspaces ( self , images_and_masks ) : <EOL> '''<STR_LIT>''' <EOL> image_set_list = cpi . ImageSetList ( ) <EOL> workspaces = [ ] <EOL> module = calc . CorrectIlluminationCalculate ( ) <EOL> module . module_num = <NUM_LIT:1> <EOL> module . image_name . value = INPUT_IMAGE_NAME <EOL> module . illumination_image_name . value = OUTPUT_IMAGE_NAME <EOL> module . average_image_name . value = AVERAGE_IMAGE_NAME <EOL> module . dilated_image_name . value = DILATED_IMAGE_NAME <EOL> pipeline = cpp . Pipeline ( ) <EOL> pipeline . add_listener ( self . error_callback ) <EOL> measurements = cpm . Measurements ( ) <EOL> for i , ( image , mask ) in enumerate ( images_and_masks ) : <EOL> image_set = image_set_list . get_image_set ( i ) <EOL> if mask is None : <EOL> image = cpi . Image ( image ) <EOL> else : <EOL> image = cpi . Image ( image , mask ) <EOL> image_set . add ( INPUT_IMAGE_NAME , image ) <EOL> workspace = cpw . Workspace ( <EOL> pipeline , module , image_set , cpo . ObjectSet ( ) , <EOL> measurements , image_set_list ) <EOL> workspaces . append ( workspace ) <EOL> return workspaces , module <EOL> def test_00_00_zeros ( self ) : <EOL> """<STR_LIT>""" <EOL> for image in ( np . zeros ( ( <NUM_LIT:10> , <NUM_LIT:10> ) ) , np . zeros ( ( <NUM_LIT:10> , <NUM_LIT:10> , <NUM_LIT:3> ) ) ) : <EOL> pipeline = cpp . Pipeline ( ) <EOL> pipeline . add_listener ( self . error_callback ) <EOL> inj_module = inj . InjectImage ( "<STR_LIT>" , image ) <EOL> inj_module . module_num = <NUM_LIT:1> <EOL> pipeline . add_module ( inj_module ) <EOL> module = calc . CorrectIlluminationCalculate ( ) <EOL> module . module_num = <NUM_LIT:2> <EOL> pipeline . add_module ( module ) <EOL> module . image_name . value = "<STR_LIT>" <EOL> module . illumination_image_name . value = "<STR_LIT>" <EOL> module . save_average_image . value = True <EOL> module . save_dilated_image . value = True <EOL> for ea in ( calc . EA_EACH , calc . EA_ALL_ACROSS , calc . EA_ALL_FIRST ) : <EOL> module . each_or_all . value = ea <EOL> for intensity_choice in ( calc . IC_BACKGROUND , calc . IC_REGULAR ) : <EOL> module . intensity_choice . value = intensity_choice <EOL> for dilate_objects in ( True , False ) : <EOL> module . dilate_objects . value = dilate_objects <EOL> for rescale_option in ( cps . YES , cps . NO , calc . RE_MEDIAN ) : <EOL> module . rescale_option . value = rescale_option <EOL> for smoothing_method in ( calc . SM_NONE , calc . SM_FIT_POLYNOMIAL , <EOL> calc . SM_GAUSSIAN_FILTER , calc . SM_MEDIAN_FILTER , <EOL> calc . SM_TO_AVERAGE , calc . SM_SPLINES , <EOL> calc . SM_CONVEX_HULL ) : <EOL> module . smoothing_method . value = smoothing_method <EOL> for ow in ( calc . FI_AUTOMATIC , calc . FI_MANUALLY , <EOL> calc . FI_OBJECT_SIZE ) : <EOL> module . automatic_object_width . value = ow <EOL> measurements = cpm . Measurements ( ) <EOL> image_set_list = cpi . ImageSetList ( ) <EOL> workspace = cpw . Workspace ( <EOL> pipeline , None , None , None , <EOL> measurements , image_set_list ) <EOL> pipeline . prepare_run ( workspace ) <EOL> inj_module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> image_set = image_set_list . get_image_set ( <NUM_LIT:0> ) <EOL> object_set = cpo . ObjectSet ( ) <EOL> workspace = cpw . Workspace ( pipeline , <EOL> inj_module , <EOL> image_set , <EOL> object_set , <EOL> measurements , <EOL> image_set_list ) <EOL> inj_module . run ( workspace ) <EOL> module . run ( workspace ) <EOL> image = image_set . get_image ( "<STR_LIT>" ) <EOL> self . assertTrue ( image is not None ) <EOL> self . assertTrue ( np . all ( image . pixel_data == <NUM_LIT:0> ) , <EOL> """<STR_LIT>""" % locals ( ) ) <EOL> def test_01_01_ones_image ( self ) : <EOL> """<STR_LIT>""" <EOL> pipeline = cpp . Pipeline ( ) <EOL> pipeline . add_listener ( self . error_callback ) <EOL> for image in ( np . ones ( ( <NUM_LIT:10> , <NUM_LIT:10> ) ) , np . ones ( ( <NUM_LIT:10> , <NUM_LIT:10> , <NUM_LIT:3> ) ) ) : <EOL> inj_module = inj . InjectImage ( "<STR_LIT>" , image ) <EOL> inj_module . module_num = <NUM_LIT:1> <EOL> pipeline . add_module ( inj_module ) <EOL> module = calc . CorrectIlluminationCalculate ( ) <EOL> module . module_num = <NUM_LIT:2> <EOL> pipeline . add_module ( module ) <EOL> module . image_name . value = "<STR_LIT>" <EOL> module . illumination_image_name . value = "<STR_LIT>" <EOL> module . rescale_option . value = cps . YES <EOL> for ea in ( calc . EA_EACH , calc . EA_ALL_ACROSS , calc . EA_ALL_FIRST ) : <EOL> module . each_or_all . value = ea <EOL> for intensity_choice in ( calc . IC_BACKGROUND , calc . IC_REGULAR ) : <EOL> module . intensity_choice . value = intensity_choice <EOL> for dilate_objects in ( True , False ) : <EOL> module . dilate_objects . value = dilate_objects <EOL> for smoothing_method in ( calc . SM_NONE , calc . SM_FIT_POLYNOMIAL , <EOL> calc . SM_GAUSSIAN_FILTER , calc . SM_MEDIAN_FILTER , <EOL> calc . SM_TO_AVERAGE , calc . SM_SPLINES , <EOL> calc . SM_CONVEX_HULL ) : <EOL> module . smoothing_method . value = smoothing_method <EOL> for ow in ( calc . FI_AUTOMATIC , calc . FI_MANUALLY , <EOL> calc . FI_OBJECT_SIZE ) : <EOL> module . automatic_object_width . value = ow <EOL> measurements = cpm . Measurements ( ) <EOL> image_set_list = cpi . ImageSetList ( ) <EOL> workspace = cpw . Workspace ( <EOL> pipeline , None , None , None , <EOL> measurements , image_set_list ) <EOL> pipeline . prepare_run ( workspace ) <EOL> inj_module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> image_set = image_set_list . get_image_set ( <NUM_LIT:0> ) <EOL> object_set = cpo . ObjectSet ( ) <EOL> workspace = cpw . Workspace ( pipeline , <EOL> inj_module , <EOL> image_set , <EOL> object_set , <EOL> measurements , <EOL> image_set_list ) <EOL> inj_module . run ( workspace ) <EOL> module . run ( workspace ) <EOL> image = image_set . get_image ( "<STR_LIT>" ) <EOL> self . assertTrue ( image is not None ) <EOL> self . assertTrue ( np . all ( np . std ( image . pixel_data ) < <NUM_LIT> ) , <EOL> """<STR_LIT>""" % locals ( ) ) <EOL> def test_01_02_masked_image ( self ) : <EOL> """<STR_LIT>""" <EOL> pipeline = cpp . Pipeline ( ) <EOL> pipeline . add_listener ( self . error_callback ) <EOL> np . random . seed ( <NUM_LIT:12> ) <EOL> for image in ( np . random . uniform ( size = ( <NUM_LIT:10> , <NUM_LIT:10> ) ) , <EOL> np . random . uniform ( size = ( <NUM_LIT:10> , <NUM_LIT:10> , <NUM_LIT:3> ) ) ) : <EOL> mask = np . zeros ( ( <NUM_LIT:10> , <NUM_LIT:10> ) , bool ) <EOL> mask [ <NUM_LIT:2> : <NUM_LIT:7> , <NUM_LIT:3> : <NUM_LIT:8> ] = True <EOL> image [ mask ] = <NUM_LIT:1> <EOL> inj_module = inj . InjectImage ( "<STR_LIT>" , image , mask ) <EOL> inj_module . module_num = <NUM_LIT:1> <EOL> pipeline . add_module ( inj_module ) <EOL> module = calc . CorrectIlluminationCalculate ( ) <EOL> module . module_num = <NUM_LIT:2> <EOL> pipeline . add_module ( module ) <EOL> module . image_name . value = "<STR_LIT>" <EOL> module . illumination_image_name . value = "<STR_LIT>" <EOL> module . rescale_option . value = cps . YES <EOL> module . dilate_objects . value = False <EOL> for ea in ( calc . EA_EACH , calc . EA_ALL_ACROSS , calc . EA_ALL_FIRST ) : <EOL> module . each_or_all . value = ea <EOL> for intensity_choice in ( calc . IC_BACKGROUND , calc . IC_REGULAR ) : <EOL> module . intensity_choice . value = intensity_choice <EOL> for smoothing_method in ( calc . SM_NONE , calc . SM_FIT_POLYNOMIAL , <EOL> calc . SM_GAUSSIAN_FILTER , calc . SM_MEDIAN_FILTER , <EOL> calc . SM_TO_AVERAGE , calc . SM_CONVEX_HULL ) : <EOL> module . smoothing_method . value = smoothing_method <EOL> for ow in ( calc . FI_AUTOMATIC , calc . FI_MANUALLY , <EOL> calc . FI_OBJECT_SIZE ) : <EOL> module . automatic_object_width . value = ow <EOL> measurements = cpm . Measurements ( ) <EOL> image_set_list = cpi . ImageSetList ( ) <EOL> workspace = cpw . Workspace ( <EOL> pipeline , None , None , None , <EOL> measurements , image_set_list ) <EOL> pipeline . prepare_run ( workspace ) <EOL> inj_module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> image_set = image_set_list . get_image_set ( <NUM_LIT:0> ) <EOL> object_set = cpo . ObjectSet ( ) <EOL> workspace = cpw . Workspace ( pipeline , <EOL> inj_module , <EOL> image_set , <EOL> object_set , <EOL> measurements , <EOL> image_set_list ) <EOL> inj_module . run ( workspace ) <EOL> module . run ( workspace ) <EOL> image = image_set . get_image ( "<STR_LIT>" ) <EOL> self . assertTrue ( image is not None ) <EOL> self . assertTrue ( np . all ( abs ( image . pixel_data [ mask ] - <NUM_LIT:1> < <NUM_LIT> ) ) , <EOL> """<STR_LIT>""" % locals ( ) ) <EOL> def test_01_03_filtered ( self ) : <EOL> '''<STR_LIT>''' <EOL> r = np . random . RandomState ( ) <EOL> r . seed ( <NUM_LIT> ) <EOL> i0 = r . uniform ( size = ( <NUM_LIT:11> , <NUM_LIT> ) ) <EOL> i1 = r . uniform ( size = ( <NUM_LIT:11> , <NUM_LIT> ) ) <EOL> i2 = r . uniform ( size = ( <NUM_LIT:11> , <NUM_LIT> ) ) <EOL> workspaces , module = self . make_workspaces ( ( <EOL> ( i0 , None ) , <EOL> ( i1 , None ) , <EOL> ( i2 , None ) ) ) <EOL> module . each_or_all . value = calc . EA_ALL_ACROSS <EOL> module . smoothing_method . value = calc . SM_TO_AVERAGE <EOL> module . save_average_image . value = True <EOL> module . save_dilated_image . value = True <EOL> module . prepare_group ( workspaces [ <NUM_LIT:0> ] , None , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) <EOL> assert isinstance ( module , calc . CorrectIlluminationCalculate ) <EOL> for workspace in workspaces [ : - <NUM_LIT:1> ] : <EOL> assert isinstance ( workspace , cpw . Workspace ) <EOL> module . run ( workspace ) <EOL> image_set = workspaces [ - <NUM_LIT:1> ] . image_set <EOL> self . assertNotIn ( OUTPUT_IMAGE_NAME , image_set . get_names ( ) ) <EOL> self . assertNotIn ( DILATED_IMAGE_NAME , image_set . get_names ( ) ) <EOL> self . assertNotIn ( AVERAGE_IMAGE_NAME , image_set . get_names ( ) ) <EOL> module . post_group ( workspaces [ - <NUM_LIT:1> ] , None ) <EOL> self . assertIn ( OUTPUT_IMAGE_NAME , image_set . get_names ( ) ) <EOL> self . assertIn ( DILATED_IMAGE_NAME , image_set . get_names ( ) ) <EOL> self . assertIn ( AVERAGE_IMAGE_NAME , image_set . get_names ( ) ) <EOL> def test_01_04_not_filtered ( self ) : <EOL> '''<STR_LIT>''' <EOL> r = np . random . RandomState ( ) <EOL> r . seed ( <NUM_LIT> ) <EOL> i0 = r . uniform ( size = ( <NUM_LIT:11> , <NUM_LIT> ) ) <EOL> i1 = r . uniform ( size = ( <NUM_LIT:11> , <NUM_LIT> ) ) <EOL> i2 = r . uniform ( size = ( <NUM_LIT:11> , <NUM_LIT> ) ) <EOL> workspaces , module = self . make_workspaces ( ( <EOL> ( i0 , None ) , <EOL> ( i1 , None ) , <EOL> ( i2 , None ) ) ) <EOL> module . each_or_all . value = calc . EA_ALL_ACROSS <EOL> module . smoothing_method . value = calc . SM_TO_AVERAGE <EOL> module . save_average_image . value = True <EOL> module . save_dilated_image . value = True <EOL> module . prepare_group ( workspaces [ <NUM_LIT:0> ] , None , [ <NUM_LIT:1> , <NUM_LIT:2> , <NUM_LIT:3> ] ) <EOL> assert isinstance ( module , calc . CorrectIlluminationCalculate ) <EOL> for workspace in workspaces : <EOL> assert isinstance ( workspace , cpw . Workspace ) <EOL> module . run ( workspace ) <EOL> image_set = workspaces [ - <NUM_LIT:1> ] . image_set <EOL> self . assertIn ( OUTPUT_IMAGE_NAME , image_set . get_names ( ) ) <EOL> self . assertIn ( DILATED_IMAGE_NAME , image_set . get_names ( ) ) <EOL> self . assertIn ( AVERAGE_IMAGE_NAME , image_set . get_names ( ) ) <EOL> module . post_group ( workspaces [ - <NUM_LIT:1> ] , None ) <EOL> for image_name in ( <EOL> OUTPUT_IMAGE_NAME , DILATED_IMAGE_NAME , AVERAGE_IMAGE_NAME ) : <EOL> self . assertEqual ( len ( filter ( lambda x : x == image_name , <EOL> image_set . get_names ( ) ) ) , <NUM_LIT:1> ) <EOL> def test_02_02_Background ( self ) : <EOL> """<STR_LIT>""" <EOL> pipeline = cpp . Pipeline ( ) <EOL> pipeline . add_listener ( self . error_callback ) <EOL> image = np . ones ( ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> image [ <NUM_LIT:10> , <NUM_LIT:10> ] = <NUM_LIT> <EOL> image [ <NUM_LIT:10> , <NUM_LIT:30> ] = <NUM_LIT> <EOL> image [ <NUM_LIT:30> , <NUM_LIT:10> ] = <NUM_LIT> <EOL> image [ <NUM_LIT:30> , <NUM_LIT:30> ] = <NUM_LIT> <EOL> inj_module = inj . InjectImage ( "<STR_LIT>" , image ) <EOL> inj_module . module_num = <NUM_LIT:1> <EOL> pipeline . add_module ( inj_module ) <EOL> module = calc . CorrectIlluminationCalculate ( ) <EOL> module . module_num = <NUM_LIT:2> <EOL> pipeline . add_module ( module ) <EOL> module . image_name . value = "<STR_LIT>" <EOL> module . illumination_image_name . value = "<STR_LIT>" <EOL> module . intensity_choice . value = calc . IC_BACKGROUND <EOL> module . each_or_all . value == calc . EA_EACH <EOL> module . block_size . value = <NUM_LIT:20> <EOL> module . rescale_option . value = cps . NO <EOL> module . dilate_objects . value = False <EOL> module . smoothing_method . value = calc . SM_NONE <EOL> measurements = cpm . Measurements ( ) <EOL> image_set_list = cpi . ImageSetList ( ) <EOL> workspace = cpw . Workspace ( pipeline , None , None , None , <EOL> measurements , image_set_list ) <EOL> pipeline . prepare_run ( workspace ) <EOL> inj_module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> image_set = image_set_list . get_image_set ( <NUM_LIT:0> ) <EOL> object_set = cpo . ObjectSet ( ) <EOL> workspace = cpw . Workspace ( pipeline , <EOL> inj_module , <EOL> image_set , <EOL> object_set , <EOL> measurements , <EOL> image_set_list ) <EOL> inj_module . run ( workspace ) <EOL> module . run ( workspace ) <EOL> image = image_set . get_image ( "<STR_LIT>" ) <EOL> self . assertTrue ( np . all ( image . pixel_data [ : <NUM_LIT:20> , : <NUM_LIT:20> ] == <NUM_LIT> ) ) <EOL> self . assertTrue ( np . all ( image . pixel_data [ : <NUM_LIT:20> , <NUM_LIT:20> : ] == <NUM_LIT> ) ) <EOL> self . assertTrue ( np . all ( image . pixel_data [ <NUM_LIT:20> : , : <NUM_LIT:20> ] == <NUM_LIT> ) ) <EOL> self . assertTrue ( np . all ( image . pixel_data [ <NUM_LIT:20> : , <NUM_LIT:20> : ] == <NUM_LIT> ) ) <EOL> def test_03_00_no_smoothing ( self ) : <EOL> """<STR_LIT>""" <EOL> input_image = np . random . uniform ( size = ( <NUM_LIT:10> , <NUM_LIT:10> ) ) <EOL> image_name = "<STR_LIT>" <EOL> pipeline = cpp . Pipeline ( ) <EOL> pipeline . add_listener ( self . error_callback ) <EOL> inj_module = inj . InjectImage ( image_name , input_image ) <EOL> inj_module . module_num = <NUM_LIT:1> <EOL> pipeline . add_module ( inj_module ) <EOL> module = calc . CorrectIlluminationCalculate ( ) <EOL> module . module_num = <NUM_LIT:2> <EOL> pipeline . add_module ( module ) <EOL> module . image_name . value = image_name <EOL> module . illumination_image_name . value = "<STR_LIT>" <EOL> module . intensity_choice . value = calc . IC_REGULAR <EOL> module . each_or_all . value == calc . EA_EACH <EOL> module . smoothing_method . value = calc . SM_NONE <EOL> module . rescale_option . value = cps . NO <EOL> module . dilate_objects . value = False <EOL> measurements = cpm . Measurements ( ) <EOL> image_set_list = cpi . ImageSetList ( ) <EOL> workspace = cpw . Workspace ( pipeline , None , None , None , <EOL> measurements , image_set_list ) <EOL> pipeline . prepare_run ( workspace ) <EOL> inj_module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> image_set = image_set_list . get_image_set ( <NUM_LIT:0> ) <EOL> object_set = cpo . ObjectSet ( ) <EOL> workspace = cpw . Workspace ( pipeline , <EOL> inj_module , <EOL> image_set , <EOL> object_set , <EOL> measurements , <EOL> image_set_list ) <EOL> inj_module . run ( workspace ) <EOL> module . run ( workspace ) <EOL> image = image_set . get_image ( "<STR_LIT>" ) <EOL> self . assertTrue ( np . all ( np . abs ( image . pixel_data - input_image ) < <NUM_LIT> ) , <EOL> "<STR_LIT>" % image_name ) <EOL> def test_03_01_FitPolynomial ( self ) : <EOL> """<STR_LIT>""" <EOL> y , x = ( np . mgrid [ <NUM_LIT:0> : <NUM_LIT:20> , <NUM_LIT:0> : <NUM_LIT:20> ] ) . astype ( float ) / <NUM_LIT> <EOL> image_x = x <EOL> image_y = y <EOL> image_x2 = x ** <NUM_LIT:2> <EOL> image_y2 = y ** <NUM_LIT:2> <EOL> image_xy = x * y <EOL> for input_image , image_name in ( ( image_x , "<STR_LIT>" ) , <EOL> ( image_y , "<STR_LIT>" ) , <EOL> ( image_x2 , "<STR_LIT>" ) , <EOL> ( image_y2 , "<STR_LIT>" ) , <EOL> ( image_xy , "<STR_LIT>" ) ) : <EOL> pipeline = cpp . Pipeline ( ) <EOL> pipeline . add_listener ( self . error_callback ) <EOL> inj_module = inj . InjectImage ( image_name , input_image ) <EOL> inj_module . module_num = <NUM_LIT:1> <EOL> pipeline . add_module ( inj_module ) <EOL> module = calc . CorrectIlluminationCalculate ( ) <EOL> module . module_num = <NUM_LIT:2> <EOL> pipeline . add_module ( module ) <EOL> module . image_name . value = image_name <EOL> module . illumination_image_name . value = "<STR_LIT>" <EOL> module . intensity_choice . value = calc . IC_REGULAR <EOL> module . each_or_all . value == calc . EA_EACH <EOL> module . smoothing_method . value = calc . SM_FIT_POLYNOMIAL <EOL> module . rescale_option . value = cps . NO <EOL> module . dilate_objects . value = False <EOL> measurements = cpm . Measurements ( ) <EOL> image_set_list = cpi . ImageSetList ( ) <EOL> workspace = cpw . Workspace ( pipeline , None , None , None , <EOL> measurements , image_set_list ) <EOL> pipeline . prepare_run ( workspace ) <EOL> inj_module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> image_set = image_set_list . get_image_set ( <NUM_LIT:0> ) <EOL> object_set = cpo . ObjectSet ( ) <EOL> workspace = cpw . Workspace ( pipeline , <EOL> inj_module , <EOL> image_set , <EOL> object_set , <EOL> measurements , <EOL> image_set_list ) <EOL> inj_module . run ( workspace ) <EOL> module . run ( workspace ) <EOL> image = image_set . get_image ( "<STR_LIT>" ) <EOL> self . assertTrue ( np . all ( np . abs ( image . pixel_data - input_image ) < <NUM_LIT> ) , <EOL> "<STR_LIT>" % image_name ) <EOL> def test_03_02_gaussian_filter ( self ) : <EOL> """<STR_LIT>""" <EOL> input_image = np . zeros ( ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> input_image [ <NUM_LIT:50> , <NUM_LIT:50> ] = <NUM_LIT:1> <EOL> image_name = "<STR_LIT>" <EOL> i , j = np . mgrid [ - <NUM_LIT:50> : <NUM_LIT> , - <NUM_LIT:50> : <NUM_LIT> ] <EOL> expected_image = np . e ** ( - ( i ** <NUM_LIT:2> + j ** <NUM_LIT:2> ) / ( <NUM_LIT:2> * ( <NUM_LIT> / <NUM_LIT> ) ** <NUM_LIT:2> ) ) <EOL> pipeline = cpp . Pipeline ( ) <EOL> pipeline . add_listener ( self . error_callback ) <EOL> inj_module = inj . InjectImage ( image_name , input_image ) <EOL> inj_module . module_num = <NUM_LIT:1> <EOL> pipeline . add_module ( inj_module ) <EOL> module = calc . CorrectIlluminationCalculate ( ) <EOL> module . module_num = <NUM_LIT:2> <EOL> pipeline . add_module ( module ) <EOL> module . image_name . value = image_name <EOL> module . illumination_image_name . value = "<STR_LIT>" <EOL> module . intensity_choice . value = calc . IC_REGULAR <EOL> module . each_or_all . value == calc . EA_EACH <EOL> module . smoothing_method . value = calc . SM_GAUSSIAN_FILTER <EOL> module . automatic_object_width . value = calc . FI_MANUALLY <EOL> module . size_of_smoothing_filter . value = <NUM_LIT:10> <EOL> module . rescale_option . value = cps . NO <EOL> module . dilate_objects . value = False <EOL> measurements = cpm . Measurements ( ) <EOL> image_set_list = cpi . ImageSetList ( ) <EOL> workspace = cpw . Workspace ( pipeline , None , None , None , <EOL> measurements , image_set_list ) <EOL> pipeline . prepare_run ( workspace ) <EOL> inj_module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> image_set = image_set_list . get_image_set ( <NUM_LIT:0> ) <EOL> object_set = cpo . ObjectSet ( ) <EOL> workspace = cpw . Workspace ( pipeline , <EOL> inj_module , <EOL> image_set , <EOL> object_set , <EOL> measurements , <EOL> image_set_list ) <EOL> inj_module . run ( workspace ) <EOL> module . run ( workspace ) <EOL> image = image_set . get_image ( "<STR_LIT>" ) <EOL> ipd = image . pixel_data [ <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> ] <EOL> expected_image = expected_image [ <NUM_LIT> : <NUM_LIT> , <NUM_LIT> : <NUM_LIT> ] <EOL> self . assertTrue ( np . all ( np . abs ( ipd / ipd . mean ( ) - <EOL> expected_image / expected_image . mean ( ) ) < <EOL> <NUM_LIT> ) ) <EOL> def test_03_03_median_filter ( self ) : <EOL> """<STR_LIT>""" <EOL> input_image = np . zeros ( ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> input_image [ <NUM_LIT:50> , <NUM_LIT:50> ] = <NUM_LIT:1> <EOL> image_name = "<STR_LIT>" <EOL> expected_image = np . zeros ( ( <NUM_LIT> , <NUM_LIT> ) ) <EOL> filter_distance = int ( <NUM_LIT> + <NUM_LIT:10> / <NUM_LIT> ) <EOL> expected_image [ - filter_distance : filter_distance + <NUM_LIT:1> , <EOL> - filter_distance : filter_distance + <NUM_LIT:1> ] = <NUM_LIT:1> <EOL> pipeline = cpp . Pipeline ( ) <EOL> pipeline . add_listener ( self . error_callback ) <EOL> inj_module = inj . InjectImage ( image_name , input_image ) <EOL> inj_module . module_num = <NUM_LIT:1> <EOL> pipeline . add_module ( inj_module ) <EOL> module = calc . CorrectIlluminationCalculate ( ) <EOL> module . module_num = <NUM_LIT:2> <EOL> pipeline . add_module ( module ) <EOL> module . image_name . value = image_name <EOL> module . illumination_image_name . value = "<STR_LIT>" <EOL> module . intensity_choice . value = calc . IC_REGULAR <EOL> module . each_or_all . value == calc . EA_EACH <EOL> module . smoothing_method . value = calc . SM_MEDIAN_FILTER <EOL> module . automatic_object_width . value = calc . FI_MANUALLY <EOL> module . size_of_smoothing_filter . value = <NUM_LIT:10> <EOL> module . rescale_option . value = cps . NO <EOL> module . dilate_objects . value = False <EOL> measurements = cpm . Measurements ( ) <EOL> image_set_list = cpi . ImageSetList ( ) <EOL> workspace = cpw . Workspace ( pipeline , None , None , None , <EOL> measurements , image_set_list ) <EOL> pipeline . prepare_run ( workspace ) <EOL> inj_module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> image_set = image_set_list . get_image_set ( <NUM_LIT:0> ) <EOL> object_set = cpo . ObjectSet ( ) <EOL> workspace = cpw . Workspace ( pipeline , <EOL> inj_module , <EOL> image_set , <EOL> object_set , <EOL> measurements , <EOL> image_set_list ) <EOL> inj_module . run ( workspace ) <EOL> module . run ( workspace ) <EOL> image = image_set . get_image ( "<STR_LIT>" ) <EOL> self . assertTrue ( np . all ( image . pixel_data == expected_image ) ) <EOL> def test_03_04_smooth_to_average ( self ) : <EOL> """<STR_LIT>""" <EOL> np . random . seed ( <NUM_LIT:0> ) <EOL> input_image = np . random . uniform ( size = ( <NUM_LIT:10> , <NUM_LIT:10> ) ) . astype ( np . float32 ) <EOL> image_name = "<STR_LIT>" <EOL> expected_image = np . ones ( ( <NUM_LIT:10> , <NUM_LIT:10> ) ) * input_image . mean ( ) <EOL> pipeline = cpp . Pipeline ( ) <EOL> pipeline . add_listener ( self . error_callback ) <EOL> inj_module = inj . InjectImage ( image_name , input_image ) <EOL> inj_module . module_num = <NUM_LIT:1> <EOL> pipeline . add_module ( inj_module ) <EOL> module = calc . CorrectIlluminationCalculate ( ) <EOL> module . module_num = <NUM_LIT:2> <EOL> pipeline . add_module ( module ) <EOL> module . image_name . value = image_name <EOL> module . illumination_image_name . value = "<STR_LIT>" <EOL> module . intensity_choice . value = calc . IC_REGULAR <EOL> module . each_or_all . value == calc . EA_EACH <EOL> module . smoothing_method . value = calc . SM_TO_AVERAGE <EOL> module . automatic_object_width . value = calc . FI_MANUALLY <EOL> module . size_of_smoothing_filter . value = <NUM_LIT:10> <EOL> module . rescale_option . value = cps . NO <EOL> module . dilate_objects . value = False <EOL> measurements = cpm . Measurements ( ) <EOL> image_set_list = cpi . ImageSetList ( ) <EOL> workspace = cpw . Workspace ( pipeline , None , None , None , <EOL> measurements , image_set_list ) <EOL> pipeline . prepare_run ( workspace ) <EOL> inj_module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> image_set = image_set_list . get_image_set ( <NUM_LIT:0> ) <EOL> object_set = cpo . ObjectSet ( ) <EOL> workspace = cpw . Workspace ( pipeline , <EOL> inj_module , <EOL> image_set , <EOL> object_set , <EOL> measurements , <EOL> image_set_list ) <EOL> inj_module . run ( workspace ) <EOL> module . run ( workspace ) <EOL> image = image_set . get_image ( "<STR_LIT>" ) <EOL> np . testing . assert_almost_equal ( image . pixel_data , expected_image ) <EOL> def test_03_05_splines ( self ) : <EOL> for automatic , bg_mode , spline_points , threshold , convergence , offset , hi , lo , succeed in ( <EOL> ( True , calc . MODE_AUTO , <NUM_LIT:5> , <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT:0> , True , False , True ) , <EOL> ( True , calc . MODE_AUTO , <NUM_LIT:5> , <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT> , False , True , True ) , <EOL> ( True , calc . MODE_AUTO , <NUM_LIT:5> , <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT> , True , True , True ) , <EOL> ( False , calc . MODE_AUTO , <NUM_LIT:5> , <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT:0> , True , False , True ) , <EOL> ( False , calc . MODE_AUTO , <NUM_LIT:5> , <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT> , False , True , True ) , <EOL> ( False , calc . MODE_AUTO , <NUM_LIT:5> , <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT> , True , True , True ) , <EOL> ( False , calc . MODE_BRIGHT , <NUM_LIT:5> , <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT> , False , True , True ) , <EOL> ( False , calc . MODE_DARK , <NUM_LIT:5> , <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT:0> , True , False , True ) , <EOL> ( False , calc . MODE_GRAY , <NUM_LIT:5> , <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT> , True , True , True ) , <EOL> ( False , calc . MODE_AUTO , <NUM_LIT:7> , <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT:0> , True , False , True ) , <EOL> ( False , calc . MODE_AUTO , <NUM_LIT:4> , <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT:0> , True , False , True ) , <EOL> ( False , calc . MODE_DARK , <NUM_LIT:5> , <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT> , False , True , False ) , <EOL> ( False , calc . MODE_BRIGHT , <NUM_LIT:5> , <NUM_LIT:2> , <NUM_LIT> , <NUM_LIT:0> , True , False , False ) <EOL> ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> image = np . random . uniform ( size = ( <NUM_LIT> , <NUM_LIT> ) ) * <NUM_LIT> + offset <EOL> if hi : <EOL> fg = np . random . permutation ( <NUM_LIT> ) [ : <NUM_LIT:100> ] <EOL> image [ fg % image . shape [ <NUM_LIT:0> ] , ( fg / image . shape [ <NUM_LIT:0> ] ) . astype ( int ) ] *= <NUM_LIT:10> <EOL> if lo : <EOL> bg = np . random . permutation ( <NUM_LIT> ) [ : <NUM_LIT:100> ] <EOL> image [ bg % image . shape [ <NUM_LIT:0> ] , ( bg / image . shape [ <NUM_LIT:0> ] ) . astype ( int ) ] -= offset <EOL> ii , jj = np . mgrid [ - <NUM_LIT:10> : <NUM_LIT:11> , - <NUM_LIT:15> : <NUM_LIT:16> ] <EOL> bg = ( ( ii . astype ( float ) / <NUM_LIT:10> ) ** <NUM_LIT:2> ) * ( ( jj . astype ( float ) / <NUM_LIT:15> ) ** <NUM_LIT:2> ) <EOL> bg *= <NUM_LIT> <EOL> image += bg <EOL> workspaces , module = self . make_workspaces ( ( ( image , None ) , ) ) <EOL> self . assertTrue ( isinstance ( module , calc . CorrectIlluminationCalculate ) ) <EOL> module . intensity_choice . value = calc . IC_BACKGROUND <EOL> module . each_or_all . value = calc . EA_EACH <EOL> module . rescale_option . value = cps . NO <EOL> module . smoothing_method . value = calc . SM_SPLINES <EOL> module . automatic_splines . value = automatic <EOL> module . spline_bg_mode . value = bg_mode <EOL> module . spline_convergence . value = convergence <EOL> module . spline_threshold . value = threshold <EOL> module . spline_points . value = spline_points <EOL> module . spline_rescale . value = <NUM_LIT:1> <EOL> module . prepare_group ( workspaces [ <NUM_LIT:0> ] , { } , [ <NUM_LIT:1> ] ) <EOL> module . run ( workspaces [ <NUM_LIT:0> ] ) <EOL> img = workspaces [ <NUM_LIT:0> ] . image_set . get_image ( OUTPUT_IMAGE_NAME ) <EOL> pixel_data = img . pixel_data <EOL> diff = pixel_data - np . min ( pixel_data ) - bg <EOL> if succeed : <EOL> self . assertTrue ( np . all ( diff < <NUM_LIT> ) ) <EOL> else : <EOL> self . assertFalse ( np . all ( diff < <NUM_LIT> ) ) <EOL> def test_03_06_splines_scaled ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> image = np . random . uniform ( size = ( <NUM_LIT> , <NUM_LIT> ) ) * <NUM_LIT> <EOL> fg = np . random . permutation ( np . prod ( image . shape ) ) [ : <NUM_LIT:200> ] <EOL> image [ fg % image . shape [ <NUM_LIT:0> ] , ( fg / image . shape [ <NUM_LIT:0> ] ) . astype ( int ) ] *= <NUM_LIT:15> <EOL> ii , jj = np . mgrid [ - <NUM_LIT:50> : <NUM_LIT> , - <NUM_LIT> : <NUM_LIT> ] <EOL> bg = ( ( ii . astype ( float ) / <NUM_LIT:10> ) ** <NUM_LIT:2> ) * ( ( jj . astype ( float ) / <NUM_LIT:15> ) ** <NUM_LIT:2> ) <EOL> bg *= <NUM_LIT> <EOL> image += bg <EOL> workspaces , module = self . make_workspaces ( ( ( image , None ) , ) ) <EOL> self . assertTrue ( isinstance ( module , calc . CorrectIlluminationCalculate ) ) <EOL> module . intensity_choice . value = calc . IC_BACKGROUND <EOL> module . each_or_all . value = calc . EA_EACH <EOL> module . rescale_option . value = cps . NO <EOL> module . smoothing_method . value = calc . SM_SPLINES <EOL> module . automatic_splines . value = False <EOL> module . spline_rescale . value = <NUM_LIT:2> <EOL> module . prepare_group ( workspaces [ <NUM_LIT:0> ] , { } , [ <NUM_LIT:1> ] ) <EOL> module . run ( workspaces [ <NUM_LIT:0> ] ) <EOL> img = workspaces [ <NUM_LIT:0> ] . image_set . get_image ( OUTPUT_IMAGE_NAME ) <EOL> pixel_data = img . pixel_data <EOL> diff = pixel_data - np . min ( pixel_data ) - bg <EOL> np . all ( diff < <NUM_LIT> ) <EOL> def test_03_07_splines_masked ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> image = np . random . uniform ( size = ( <NUM_LIT> , <NUM_LIT> ) ) * <NUM_LIT> <EOL> mask = np . random . uniform ( size = ( <NUM_LIT> , <NUM_LIT> ) ) < <NUM_LIT> <EOL> ii , jj = np . mgrid [ - <NUM_LIT:10> : <NUM_LIT:11> , - <NUM_LIT:15> : <NUM_LIT:16> ] <EOL> bg = ( ( ii . astype ( float ) / <NUM_LIT:10> ) ** <NUM_LIT:2> ) * ( ( jj . astype ( float ) / <NUM_LIT:15> ) ** <NUM_LIT:2> ) <EOL> bg *= <NUM_LIT> <EOL> image += bg <EOL> image [ ~ mask ] += bg [ ~ mask ] <EOL> workspaces , module = self . make_workspaces ( ( ( image , mask ) , ) ) <EOL> self . assertTrue ( isinstance ( module , calc . CorrectIlluminationCalculate ) ) <EOL> module . intensity_choice . value = calc . IC_BACKGROUND <EOL> module . each_or_all . value = calc . EA_EACH <EOL> module . rescale_option . value = cps . NO <EOL> module . smoothing_method . value = calc . SM_SPLINES <EOL> module . automatic_splines . value = True <EOL> module . prepare_group ( workspaces [ <NUM_LIT:0> ] , { } , [ <NUM_LIT:1> ] ) <EOL> module . run ( workspaces [ <NUM_LIT:0> ] ) <EOL> img = workspaces [ <NUM_LIT:0> ] . image_set . get_image ( OUTPUT_IMAGE_NAME ) <EOL> pixel_data = img . pixel_data <EOL> diff = pixel_data - np . min ( pixel_data ) - bg <EOL> self . assertTrue ( np . all ( diff < <NUM_LIT> ) ) <EOL> workspaces , module = self . make_workspaces ( ( ( image , None ) , ) ) <EOL> self . assertTrue ( isinstance ( module , calc . CorrectIlluminationCalculate ) ) <EOL> module . intensity_choice . value = calc . IC_BACKGROUND <EOL> module . each_or_all . value = calc . EA_EACH <EOL> module . rescale_option . value = cps . NO <EOL> module . smoothing_method . value = calc . SM_SPLINES <EOL> module . automatic_splines . value = True <EOL> module . prepare_group ( workspaces [ <NUM_LIT:0> ] , { } , [ <NUM_LIT:1> ] ) <EOL> module . run ( workspaces [ <NUM_LIT:0> ] ) <EOL> img = workspaces [ <NUM_LIT:0> ] . image_set . get_image ( OUTPUT_IMAGE_NAME ) <EOL> pixel_data = img . pixel_data <EOL> diff = pixel_data - np . min ( pixel_data ) - bg <EOL> self . assertFalse ( np . all ( diff < <NUM_LIT> ) ) <EOL> def test_03_07_splines_cropped ( self ) : <EOL> np . random . seed ( <NUM_LIT> ) <EOL> image = np . random . uniform ( size = ( <NUM_LIT> , <NUM_LIT> ) ) * <NUM_LIT> <EOL> mask = np . zeros ( image . shape , bool ) <EOL> mask [ <NUM_LIT:4> : - <NUM_LIT:4> , <NUM_LIT:6> : - <NUM_LIT:6> ] = True <EOL> ii , jj = np . mgrid [ - <NUM_LIT:10> : <NUM_LIT:11> , - <NUM_LIT:15> : <NUM_LIT:16> ] <EOL> bg = ( ( ii . astype ( float ) / <NUM_LIT:10> ) ** <NUM_LIT:2> ) * ( ( jj . astype ( float ) / <NUM_LIT:15> ) ** <NUM_LIT:2> ) <EOL> bg *= <NUM_LIT> <EOL> image += bg <EOL> image [ ~ mask ] += bg [ ~ mask ] <EOL> workspaces , module = self . make_workspaces ( ( ( image , mask ) , ) ) <EOL> self . assertTrue ( isinstance ( module , calc . CorrectIlluminationCalculate ) ) <EOL> module . intensity_choice . value = calc . IC_BACKGROUND <EOL> module . each_or_all . value = calc . EA_EACH <EOL> module . rescale_option . value = cps . NO <EOL> module . smoothing_method . value = calc . SM_SPLINES <EOL> module . automatic_splines . value = True <EOL> module . prepare_group ( workspaces [ <NUM_LIT:0> ] , { } , [ <NUM_LIT:1> ] ) <EOL> module . run ( workspaces [ <NUM_LIT:0> ] ) <EOL> img = workspaces [ <NUM_LIT:0> ] . image_set . get_image ( OUTPUT_IMAGE_NAME ) <EOL> pixel_data = img . pixel_data <EOL> diff = pixel_data - np . min ( pixel_data ) - bg <EOL> self . assertTrue ( np . all ( diff < <NUM_LIT> ) ) <EOL> workspaces , module = self . make_workspaces ( ( ( image , None ) , ) ) <EOL> self . assertTrue ( isinstance ( module , calc . CorrectIlluminationCalculate ) ) <EOL> module . intensity_choice . value = calc . IC_BACKGROUND <EOL> module . each_or_all . value = calc . EA_EACH <EOL> module . rescale_option . value = cps . NO <EOL> module . smoothing_method . value = calc . SM_SPLINES <EOL> module . automatic_splines . value = True <EOL> module . prepare_group ( workspaces [ <NUM_LIT:0> ] , { } , [ <NUM_LIT:1> ] ) <EOL> module . run ( workspaces [ <NUM_LIT:0> ] ) <EOL> img = workspaces [ <NUM_LIT:0> ] . image_set . get_image ( OUTPUT_IMAGE_NAME ) <EOL> pixel_data = img . pixel_data <EOL> diff = pixel_data - np . min ( pixel_data ) - bg <EOL> self . assertFalse ( np . all ( diff < <NUM_LIT> ) ) <EOL> def test_04_01_intermediate_images ( self ) : <EOL> """<STR_LIT>""" <EOL> for average_flag , dilated_flag in ( ( False , False ) , <EOL> ( False , True ) , <EOL> ( True , False ) , <EOL> ( True , True ) ) : <EOL> pipeline = cpp . Pipeline ( ) <EOL> pipeline . add_listener ( self . error_callback ) <EOL> inj_module = inj . InjectImage ( "<STR_LIT>" , np . zeros ( ( <NUM_LIT:10> , <NUM_LIT:10> ) ) ) <EOL> inj_module . module_num = <NUM_LIT:1> <EOL> pipeline . add_module ( inj_module ) <EOL> module = calc . CorrectIlluminationCalculate ( ) <EOL> module . module_num = <NUM_LIT:2> <EOL> pipeline . add_module ( module ) <EOL> module . image_name . value = "<STR_LIT>" <EOL> module . illumination_image_name . value = "<STR_LIT>" <EOL> module . save_average_image . value = average_flag <EOL> module . average_image_name . value = "<STR_LIT>" <EOL> module . save_dilated_image . value = dilated_flag <EOL> module . dilated_image_name . value = "<STR_LIT>" <EOL> measurements = cpm . Measurements ( ) <EOL> image_set_list = cpi . ImageSetList ( ) <EOL> workspace = cpw . Workspace ( pipeline , None , None , None , <EOL> measurements , image_set_list ) <EOL> pipeline . prepare_run ( workspace ) <EOL> inj_module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> image_set = image_set_list . get_image_set ( <NUM_LIT:0> ) <EOL> object_set = cpo . ObjectSet ( ) <EOL> workspace = cpw . Workspace ( pipeline , <EOL> inj_module , <EOL> image_set , <EOL> object_set , <EOL> measurements , <EOL> image_set_list ) <EOL> inj_module . run ( workspace ) <EOL> module . run ( workspace ) <EOL> if average_flag : <EOL> img = image_set . get_image ( "<STR_LIT>" ) <EOL> else : <EOL> self . assertRaises ( AssertionError , <EOL> image_set . get_image , <EOL> "<STR_LIT>" ) <EOL> if dilated_flag : <EOL> img = image_set . get_image ( "<STR_LIT>" ) <EOL> else : <EOL> self . assertRaises ( AssertionError , <EOL> image_set . get_image , <EOL> "<STR_LIT>" ) <EOL> def test_05_01_rescale ( self ) : <EOL> """<STR_LIT>""" <EOL> input_image = np . ones ( ( <NUM_LIT:10> , <NUM_LIT:10> ) ) <EOL> input_image [ <NUM_LIT:0> : <NUM_LIT:5> , : ] *= <NUM_LIT> <EOL> image_name = "<STR_LIT>" <EOL> expected_image = input_image * <NUM_LIT:2> <EOL> pipeline = cpp . Pipeline ( ) <EOL> pipeline . add_listener ( self . error_callback ) <EOL> inj_module = inj . InjectImage ( image_name , input_image ) <EOL> inj_module . module_num = <NUM_LIT:1> <EOL> pipeline . add_module ( inj_module ) <EOL> module = calc . CorrectIlluminationCalculate ( ) <EOL> module . module_num = <NUM_LIT:2> <EOL> pipeline . add_module ( module ) <EOL> module . image_name . value = image_name <EOL> module . illumination_image_name . value = "<STR_LIT>" <EOL> module . intensity_choice . value = calc . IC_REGULAR <EOL> module . each_or_all . value == calc . EA_EACH <EOL> module . smoothing_method . value = calc . SM_NONE <EOL> module . automatic_object_width . value = calc . FI_MANUALLY <EOL> module . size_of_smoothing_filter . value = <NUM_LIT:10> <EOL> module . rescale_option . value = cps . YES <EOL> module . dilate_objects . value = False <EOL> measurements = cpm . Measurements ( ) <EOL> image_set_list = cpi . ImageSetList ( ) <EOL> workspace = cpw . Workspace ( pipeline , None , None , None , <EOL> measurements , image_set_list ) <EOL> pipeline . prepare_run ( workspace ) <EOL> inj_module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> image_set = image_set_list . get_image_set ( <NUM_LIT:0> ) <EOL> object_set = cpo . ObjectSet ( ) <EOL> workspace = cpw . Workspace ( pipeline , <EOL> inj_module , <EOL> image_set , <EOL> object_set , <EOL> measurements , <EOL> image_set_list ) <EOL> inj_module . run ( workspace ) <EOL> module . run ( workspace ) <EOL> image = image_set . get_image ( "<STR_LIT>" ) <EOL> self . assertTrue ( np . all ( image . pixel_data == expected_image ) ) <EOL> def test_05_02_rescale_outlier ( self ) : <EOL> """<STR_LIT>""" <EOL> input_image = np . ones ( ( <NUM_LIT:10> , <NUM_LIT:10> ) ) <EOL> input_image [ <NUM_LIT:0> : <NUM_LIT:5> , : ] *= <NUM_LIT> <EOL> input_image [ <NUM_LIT:0> , <NUM_LIT:0> ] = <NUM_LIT> <EOL> image_name = "<STR_LIT>" <EOL> expected_image = input_image * <NUM_LIT:2> <EOL> expected_image [ <NUM_LIT:0> , <NUM_LIT:0> ] = <NUM_LIT:1> <EOL> pipeline = cpp . Pipeline ( ) <EOL> pipeline . add_listener ( self . error_callback ) <EOL> inj_module = inj . InjectImage ( image_name , input_image ) <EOL> inj_module . module_num = <NUM_LIT:1> <EOL> pipeline . add_module ( inj_module ) <EOL> module = calc . CorrectIlluminationCalculate ( ) <EOL> module . module_num = <NUM_LIT:2> <EOL> pipeline . add_module ( module ) <EOL> module . image_name . value = image_name <EOL> module . illumination_image_name . value = "<STR_LIT>" <EOL> module . intensity_choice . value = calc . IC_REGULAR <EOL> module . each_or_all . value == calc . EA_EACH <EOL> module . smoothing_method . value = calc . SM_NONE <EOL> module . automatic_object_width . value = calc . FI_MANUALLY <EOL> module . size_of_smoothing_filter . value = <NUM_LIT:10> <EOL> module . rescale_option . value = cps . YES <EOL> module . dilate_objects . value = False <EOL> measurements = cpm . Measurements ( ) <EOL> image_set_list = cpi . ImageSetList ( ) <EOL> workspace = cpw . Workspace ( pipeline , None , None , None , <EOL> measurements , image_set_list ) <EOL> pipeline . prepare_run ( workspace ) <EOL> inj_module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> module . prepare_group ( workspace , { } , [ <NUM_LIT:1> ] ) <EOL> image_set = image_set_list . get_image_set ( <NUM_LIT:0> ) <EOL> object_set = cpo . ObjectSet ( ) <EOL> workspace = cpw . Workspace ( pipeline , <EOL> inj_module , <EOL> image_set , <EOL> object_set , <EOL> measurements , <EOL> image_set_list ) <EOL> inj_module . run ( workspace ) <EOL> module . run ( workspace ) <EOL> image = image_set . get_image ( "<STR_LIT>" ) <EOL> self . assertTrue ( np . all ( image . pixel_data == expected_image ) ) <EOL> def test_06_01_load_matlab ( self ) : <EOL> data = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> pipeline = cpp . Pipeline ( ) <EOL> def callback ( caller , event ) : <EOL> self . assertFalse ( isinstance ( event , cpp . LoadExceptionEvent ) ) <EOL> pipeline . add_listener ( callback ) <EOL> pipeline . load ( StringIO ( zlib . decompress ( base64 . b64decode ( data ) ) ) ) <EOL> self . assertEqual ( len ( pipeline . modules ( ) ) , <NUM_LIT:1> ) <EOL> module = pipeline . modules ( ) [ <NUM_LIT:0> ] <EOL> self . assertTrue ( isinstance ( module , calc . CorrectIlluminationCalculate ) ) <EOL> self . assertEqual ( module . image_name , "<STR_LIT>" ) <EOL> self . assertEqual ( module . illumination_image_name , "<STR_LIT>" ) <EOL> self . assertEqual ( module . intensity_choice , calc . IC_REGULAR ) <EOL> self . assertFalse ( module . dilate_objects ) <EOL> self . assertEqual ( module . rescale_option , cps . YES ) <EOL> self . assertEqual ( module . each_or_all , calc . EA_EACH ) <EOL> self . assertEqual ( module . smoothing_method , calc . SM_NONE ) <EOL> self . assertEqual ( module . automatic_object_width , calc . FI_AUTOMATIC ) <EOL> self . assertFalse ( module . save_average_image ) <EOL> self . assertFalse ( module . save_dilated_image ) <EOL> def test_06_02_load_v1 ( self ) : <EOL> data = r"""<STR_LIT>""" <EOL> pipeline = cpp . Pipeline ( ) <EOL> def callback ( caller , event ) : <EOL> self . assertFalse ( isinstance ( event , cpp . LoadExceptionEvent ) ) <EOL> pipeline . add_listener ( callback ) <EOL> pipeline . load ( StringIO ( data ) ) <EOL> self . assertEqual ( len ( pipeline . modules ( ) ) , <NUM_LIT:6> ) <EOL> for i , ( image_name , illumination_image_name , intensity_choice , <EOL> dilate_objects , object_dilation_radius , block_size , <EOL> rescale_option , each_or_all , smoothing_method , <EOL> automatic_object_width , object_width , size_of_smoothing_filter , <EOL> save_average_image , average_image_name , save_dilated_image , <EOL> dilated_image_name ) in enumerate ( ( <EOL> ( "<STR_LIT>" , "<STR_LIT>" , calc . IC_REGULAR , False , <NUM_LIT:1> , <NUM_LIT> , cps . YES , <EOL> calc . EA_ALL_FIRST , calc . SM_NONE , calc . FI_AUTOMATIC , <NUM_LIT:10> , <NUM_LIT:10> , True , <EOL> "<STR_LIT>" , True , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , calc . IC_BACKGROUND , True , <NUM_LIT:2> , <NUM_LIT> , cps . NO , <EOL> calc . EA_ALL_FIRST , calc . SM_MEDIAN_FILTER , calc . FI_MANUALLY , <NUM_LIT:15> , <NUM_LIT:20> , <EOL> True , "<STR_LIT>" , True , "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , calc . IC_REGULAR , False , <NUM_LIT:1> , <NUM_LIT> , <EOL> calc . RE_MEDIAN , calc . EA_ALL_ACROSS , calc . SM_MEDIAN_FILTER , <EOL> calc . FI_AUTOMATIC , <NUM_LIT:10> , <NUM_LIT:10> , False , "<STR_LIT>" , True , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , calc . IC_REGULAR , cps . NO , <NUM_LIT:1> , <NUM_LIT> , <EOL> calc . RE_MEDIAN , calc . EA_EACH , calc . SM_GAUSSIAN_FILTER , <EOL> calc . FI_OBJECT_SIZE , <NUM_LIT:15> , <NUM_LIT:10> , False , "<STR_LIT>" , True , <EOL> "<STR_LIT>" ) , <EOL> ( "<STR_LIT>" , "<STR_LIT>" , calc . IC_REGULAR , cps . NO , <NUM_LIT:1> , <NUM_LIT> , <EOL> calc . RE_MEDIAN , calc . EA_ALL_ACROSS , calc . SM_TO_AVERAGE , <EOL> calc . FI_OBJECT_SIZE , <NUM_LIT:15> , <NUM_LIT:10> , False , "<STR_LIT>" , <EOL> False , "<STR_LIT>" ) ) ) : <EOL> module = pipeline . modules ( ) [ i + <NUM_LIT:1> ] <EOL> self . assertTrue ( isinstance ( module , calc . CorrectIlluminationCalculate ) ) <EOL> self . assertEqual ( module . image_name , image_name ) <EOL> self . assertEqual ( module . illumination_image_name , illumination_image_name ) <EOL> self . assertEqual ( module . intensity_choice , intensity_choice ) <EOL> self . assertEqual ( module . dilate_objects , dilate_objects ) <EOL> self . assertEqual ( module . object_dilation_radius , object_dilation_radius ) <EOL> self . assertEqual ( module . block_size , block_size ) <EOL> self . assertEqual ( module . rescale_option , rescale_option ) <EOL> self . assertEqual ( module . each_or_all , each_or_all ) <EOL> self . assertEqual ( module . smoothing_method , smoothing_method ) <EOL> self . assertEqual ( module . automatic_object_width , automatic_object_width ) <EOL> self . assertEqual ( module . object_width , object_width ) <EOL> self . assertEqual ( module . size_of_smoothing_filter , size_of_smoothing_filter ) <EOL> self . assertEqual ( module . save_average_image , save_average_image ) <EOL> self . assertEqual ( module . average_image_name , average_image_name ) <EOL> self . assertEqual ( module . save_dilated_image , save_dilated_image ) <EOL> self . assertEqual ( module . dilated_image_name , dilated_image_name ) <EOL> def test_06_03_load_v2 ( self ) : <EOL> data = r"""<STR_LIT>""" <EOL> pipeline = cpp . Pipeline ( ) <EOL> def callback ( caller , event ) : <EOL> self . assertFalse ( isinstance ( event , cpp . LoadExceptionEvent ) ) <EOL> pipeline . add_listener ( callback ) <EOL> pipeline . load ( StringIO ( data ) ) <EOL> self . assertEqual ( len ( pipeline . modules ( ) ) , <NUM_LIT:5> ) <EOL> module = pipeline . modules ( ) [ <NUM_LIT:0> ] <EOL> self . assertTrue ( isinstance ( module , calc . CorrectIlluminationCalculate ) ) <EOL> self . assertEqual ( module . image_name , "<STR_LIT>" ) <EOL> self . assertEqual ( module . illumination_image_name , "<STR_LIT>" ) <EOL> self . assertEqual ( module . intensity_choice , calc . IC_BACKGROUND ) <EOL> self . assertFalse ( module . dilate_objects ) <EOL> self . assertEqual ( module . object_dilation_radius , <NUM_LIT:2> ) <EOL> self . assertEqual ( module . block_size , <NUM_LIT> ) <EOL> self . assertEqual ( module . rescale_option , cps . NO ) <EOL> self . assertEqual ( module . each_or_all , calc . EA_EACH ) <EOL> self . assertEqual ( module . smoothing_method , calc . SM_SPLINES ) <EOL> self . assertEqual ( module . automatic_object_width , calc . FI_AUTOMATIC ) <EOL> self . assertEqual ( module . object_width , <NUM_LIT:11> ) <EOL> self . assertEqual ( module . size_of_smoothing_filter , <NUM_LIT:12> ) <EOL> self . assertFalse ( module . save_average_image ) <EOL> self . assertEqual ( module . average_image_name , "<STR_LIT>" ) <EOL> self . assertFalse ( module . save_dilated_image ) <EOL> self . assertEqual ( module . dilated_image_name , "<STR_LIT>" ) <EOL> self . assertFalse ( module . automatic_splines ) <EOL> self . assertEqual ( module . spline_bg_mode , calc . MODE_BRIGHT ) <EOL> self . assertEqual ( module . spline_points , <NUM_LIT:4> ) <EOL> self . assertEqual ( module . spline_threshold , <NUM_LIT:2> ) <EOL> self . assertEqual ( module . spline_rescale , <NUM_LIT:2> ) <EOL> self . assertEqual ( module . spline_maximum_iterations , <NUM_LIT> ) <EOL> self . assertAlmostEqual ( module . spline_convergence . value , <NUM_LIT> ) <EOL> self . assertTrue ( pipeline . modules ( ) [ <NUM_LIT:1> ] . automatic_splines ) <EOL> for module , spline_bg_mode in zip ( pipeline . modules ( ) [ <NUM_LIT:1> : <NUM_LIT:4> ] , ( <EOL> calc . MODE_AUTO , calc . MODE_DARK , calc . MODE_GRAY ) ) : <EOL> self . assertTrue ( isinstance ( module , calc . CorrectIlluminationCalculate ) ) <EOL> self . assertEqual ( module . spline_bg_mode , spline_bg_mode ) <EOL> module = pipeline . modules ( ) [ <NUM_LIT:4> ] <EOL> self . assertEqual ( module . smoothing_method , calc . SM_CONVEX_HULL ) </s>
<s> '''<STR_LIT>''' <EOL> import base64 <EOL> import os <EOL> import unittest <EOL> import zlib <EOL> from StringIO import StringIO <EOL> import numpy as np <EOL> import scipy . ndimage <EOL> from cellprofiler . preferences import set_headless <EOL> set_headless ( ) <EOL> import cellprofiler . pipeline as cpp <EOL> import cellprofiler . cpmodule as cpm <EOL> import cellprofiler . cpimage as cpi <EOL> import cellprofiler . measurements as cpmeas <EOL> import cellprofiler . objects as cpo <EOL> import cellprofiler . workspace as cpw <EOL> import cellprofiler . modules . labelimages as L <EOL> class TestLabelImages ( unittest . TestCase ) : <EOL> def test_01_00_load_matlab ( self ) : <EOL> data = ( '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> pipeline = cpp . Pipeline ( ) <EOL> def callback ( caller , event ) : <EOL> self . assertFalse ( isinstance ( event , cpp . LoadExceptionEvent ) ) <EOL> pipeline . add_listener ( callback ) <EOL> pipeline . load ( StringIO ( zlib . decompress ( base64 . b64decode ( data ) ) ) ) <EOL> self . assertEqual ( len ( pipeline . modules ( ) ) , <NUM_LIT:2> ) <EOL> module = pipeline . modules ( ) [ - <NUM_LIT:1> ] <EOL> self . assertTrue ( isinstance ( module , L . LabelImages ) ) <EOL> self . assertEqual ( module . row_count . value , <NUM_LIT:16> ) <EOL> self . assertEqual ( module . column_count . value , <NUM_LIT> ) <EOL> self . assertEqual ( module . site_count . value , <NUM_LIT:2> ) <EOL> self . assertEqual ( module . order , L . O_COLUMN ) <EOL> def test_01_01_load_v1 ( self ) : <EOL> data = r"""<STR_LIT>""" <EOL> pipeline = cpp . Pipeline ( ) <EOL> def callback ( caller , event ) : <EOL> self . assertFalse ( isinstance ( event , cpp . LoadExceptionEvent ) ) <EOL> pipeline . add_listener ( callback ) <EOL> pipeline . load ( StringIO ( data ) ) <EOL> self . assertEqual ( len ( pipeline . modules ( ) ) , <NUM_LIT:2> ) <EOL> module = pipeline . modules ( ) [ <NUM_LIT:0> ] <EOL> self . assertTrue ( isinstance ( module , L . LabelImages ) ) <EOL> self . assertEqual ( module . site_count , <NUM_LIT:3> ) <EOL> self . assertEqual ( module . row_count , <NUM_LIT:32> ) <EOL> self . assertEqual ( module . column_count , <NUM_LIT> ) <EOL> self . assertEqual ( module . order , L . O_COLUMN ) <EOL> module = pipeline . modules ( ) [ <NUM_LIT:1> ] <EOL> self . assertTrue ( isinstance ( module , L . LabelImages ) ) <EOL> self . assertEqual ( module . site_count , <NUM_LIT:1> ) <EOL> self . assertEqual ( module . row_count , <NUM_LIT:8> ) <EOL> self . assertEqual ( module . column_count , <NUM_LIT:12> ) <EOL> self . assertEqual ( module . order , L . O_ROW ) <EOL> def make_workspace ( self , image_set_count ) : <EOL> image_set_list = cpi . ImageSetList ( ) <EOL> for i in range ( image_set_count ) : <EOL> image_set = image_set_list . get_image_set ( i ) <EOL> module = L . LabelImages ( ) <EOL> pipeline = cpp . Pipeline ( ) <EOL> def callback ( caller , event ) : <EOL> self . assertFalse ( isinstance ( event , cpp . RunExceptionEvent ) ) <EOL> pipeline . add_listener ( callback ) <EOL> module . module_num = <NUM_LIT:1> <EOL> pipeline . add_module ( module ) <EOL> workspace = cpw . Workspace ( pipeline , module , <EOL> image_set_list . get_image_set ( <NUM_LIT:0> ) , <EOL> cpo . ObjectSet ( ) , cpmeas . Measurements ( ) , <EOL> image_set_list ) <EOL> return workspace , module <EOL> def test_02_01_label_plate_by_row ( self ) : <EOL> '''<STR_LIT>''' <EOL> nsites = <NUM_LIT:6> <EOL> nimagesets = <NUM_LIT> * nsites <EOL> workspace , module = self . make_workspace ( nimagesets ) <EOL> measurements = workspace . measurements <EOL> self . assertTrue ( isinstance ( measurements , cpmeas . Measurements ) ) <EOL> self . assertTrue ( isinstance ( module , L . LabelImages ) ) <EOL> module . row_count . value = <NUM_LIT:8> <EOL> module . column_count . value = <NUM_LIT:12> <EOL> module . order . value = L . O_ROW <EOL> module . site_count . value = nsites <EOL> for i in range ( nimagesets ) : <EOL> if i != <NUM_LIT:0> : <EOL> measurements . next_image_set ( ) <EOL> module . run ( workspace ) <EOL> sites = measurements . get_all_measurements ( cpmeas . IMAGE , cpmeas . M_SITE ) <EOL> rows = measurements . get_all_measurements ( cpmeas . IMAGE , cpmeas . M_ROW ) <EOL> columns = measurements . get_all_measurements ( cpmeas . IMAGE , cpmeas . M_COLUMN ) <EOL> plates = measurements . get_all_measurements ( cpmeas . IMAGE , cpmeas . M_PLATE ) <EOL> wells = measurements . get_all_measurements ( cpmeas . IMAGE , cpmeas . M_WELL ) <EOL> for i in range ( nimagesets ) : <EOL> self . assertEqual ( sites [ i ] , ( i % <NUM_LIT:6> ) + <NUM_LIT:1> ) <EOL> this_row = '<STR_LIT>' [ int ( i / <NUM_LIT:6> / <NUM_LIT:12> ) ] <EOL> this_column = ( int ( i / <NUM_LIT:6> ) % <NUM_LIT:12> ) + <NUM_LIT:1> <EOL> self . assertEqual ( rows [ i ] , this_row ) <EOL> self . assertEqual ( columns [ i ] , this_column ) <EOL> self . assertEqual ( wells [ i ] , '<STR_LIT>' % ( this_row , this_column ) ) <EOL> self . assertEqual ( plates [ i ] , <NUM_LIT:1> ) <EOL> def test_02_02_label_plate_by_column ( self ) : <EOL> '''<STR_LIT>''' <EOL> nsites = <NUM_LIT:6> <EOL> nimagesets = <NUM_LIT> * nsites <EOL> workspace , module = self . make_workspace ( nimagesets ) <EOL> measurements = workspace . measurements <EOL> self . assertTrue ( isinstance ( measurements , cpmeas . Measurements ) ) <EOL> self . assertTrue ( isinstance ( module , L . LabelImages ) ) <EOL> module . row_count . value = <NUM_LIT:8> <EOL> module . column_count . value = <NUM_LIT:12> <EOL> module . order . value = L . O_COLUMN <EOL> module . site_count . value = nsites <EOL> for i in range ( nimagesets ) : <EOL> if i != <NUM_LIT:0> : <EOL> measurements . next_image_set ( ) <EOL> module . run ( workspace ) <EOL> sites = measurements . get_all_measurements ( cpmeas . IMAGE , cpmeas . M_SITE ) <EOL> rows = measurements . get_all_measurements ( cpmeas . IMAGE , cpmeas . M_ROW ) <EOL> columns = measurements . get_all_measurements ( cpmeas . IMAGE , cpmeas . M_COLUMN ) <EOL> plates = measurements . get_all_measurements ( cpmeas . IMAGE , cpmeas . M_PLATE ) <EOL> wells = measurements . get_all_measurements ( cpmeas . IMAGE , cpmeas . M_WELL ) <EOL> for i in range ( nimagesets ) : <EOL> self . assertEqual ( sites [ i ] , ( i % <NUM_LIT:6> ) + <NUM_LIT:1> ) <EOL> this_row = '<STR_LIT>' [ int ( i / <NUM_LIT:6> ) % <NUM_LIT:8> ] <EOL> this_column = int ( i / <NUM_LIT:6> / <NUM_LIT:8> ) + <NUM_LIT:1> <EOL> self . assertEqual ( rows [ i ] , this_row ) <EOL> self . assertEqual ( columns [ i ] , this_column ) <EOL> self . assertEqual ( wells [ i ] , '<STR_LIT>' % ( this_row , this_column ) ) <EOL> self . assertEqual ( plates [ i ] , <NUM_LIT:1> ) <EOL> def test_02_03_label_many_plates ( self ) : <EOL> nsites = <NUM_LIT:1> <EOL> nplates = <NUM_LIT:6> <EOL> nimagesets = <NUM_LIT> * nsites * nplates <EOL> workspace , module = self . make_workspace ( nimagesets ) <EOL> measurements = workspace . measurements <EOL> self . assertTrue ( isinstance ( measurements , cpmeas . Measurements ) ) <EOL> self . assertTrue ( isinstance ( module , L . LabelImages ) ) <EOL> module . row_count . value = <NUM_LIT:8> <EOL> module . column_count . value = <NUM_LIT:12> <EOL> module . order . value = L . O_ROW <EOL> module . site_count . value = nsites <EOL> for i in range ( nimagesets ) : <EOL> if i != <NUM_LIT:0> : <EOL> measurements . next_image_set ( ) <EOL> module . run ( workspace ) <EOL> sites = measurements . get_all_measurements ( cpmeas . IMAGE , cpmeas . M_SITE ) <EOL> rows = measurements . get_all_measurements ( cpmeas . IMAGE , cpmeas . M_ROW ) <EOL> columns = measurements . get_all_measurements ( cpmeas . IMAGE , cpmeas . M_COLUMN ) <EOL> plates = measurements . get_all_measurements ( cpmeas . IMAGE , cpmeas . M_PLATE ) <EOL> wells = measurements . get_all_measurements ( cpmeas . IMAGE , cpmeas . M_WELL ) <EOL> for i in range ( nimagesets ) : <EOL> self . assertEqual ( sites [ i ] , <NUM_LIT:1> ) <EOL> this_row = '<STR_LIT>' [ int ( i / <NUM_LIT:12> ) % <NUM_LIT:8> ] <EOL> this_column = ( i % <NUM_LIT:12> ) + <NUM_LIT:1> <EOL> self . assertEqual ( rows [ i ] , this_row ) <EOL> self . assertEqual ( columns [ i ] , this_column ) <EOL> self . assertEqual ( wells [ i ] , '<STR_LIT>' % ( this_row , this_column ) ) <EOL> self . assertEqual ( plates [ i ] , int ( i / <NUM_LIT:8> / <NUM_LIT:12> ) + <NUM_LIT:1> ) <EOL> def test_02_04_multichar_row_names ( self ) : <EOL> nimagesets = <NUM_LIT:1000> <EOL> workspace , module = self . make_workspace ( nimagesets ) <EOL> measurements = workspace . measurements <EOL> self . assertTrue ( isinstance ( measurements , cpmeas . Measurements ) ) <EOL> self . assertTrue ( isinstance ( module , L . LabelImages ) ) <EOL> module . row_count . value = <NUM_LIT:1000> <EOL> module . column_count . value = <NUM_LIT:1> <EOL> module . order . value = L . O_ROW <EOL> module . site_count . value = <NUM_LIT:1> <EOL> for i in range ( nimagesets ) : <EOL> if i != <NUM_LIT:0> : <EOL> measurements . next_image_set ( ) <EOL> module . run ( workspace ) <EOL> sites = measurements . get_all_measurements ( cpmeas . IMAGE , cpmeas . M_SITE ) <EOL> rows = measurements . get_all_measurements ( cpmeas . IMAGE , cpmeas . M_ROW ) <EOL> columns = measurements . get_all_measurements ( cpmeas . IMAGE , cpmeas . M_COLUMN ) <EOL> plates = measurements . get_all_measurements ( cpmeas . IMAGE , cpmeas . M_PLATE ) <EOL> wells = measurements . get_all_measurements ( cpmeas . IMAGE , cpmeas . M_WELL ) <EOL> abc = '<STR_LIT>' <EOL> for i in range ( nimagesets ) : <EOL> self . assertEqual ( sites [ i ] , <NUM_LIT:1> ) <EOL> this_row = ( abc [ int ( i / <NUM_LIT> / <NUM_LIT> ) ] + <EOL> abc [ int ( i / <NUM_LIT> ) % <NUM_LIT> ] + <EOL> abc [ i % <NUM_LIT> ] ) <EOL> self . assertEqual ( rows [ i ] , this_row ) </s>
<s> </s>
<s> '''<STR_LIT>''' <EOL> import csv <EOL> import traceback <EOL> import numpy as np <EOL> OUTPUT_PRESENT = "<STR_LIT>" <EOL> MATCHING_COLUMNS = "<STR_LIT>" <EOL> MEASUREMENT_COUNT = "<STR_LIT>" <EOL> '''<STR_LIT>''' <EOL> ERROR_TYPE_LOGICAL = "<STR_LIT>" <EOL> '''<STR_LIT>''' <EOL> ERROR_TYPE_QUANTITY = "<STR_LIT>" <EOL> '''<STR_LIT>''' <EOL> ERROR_TYPE_MEASUREMENT = "<STR_LIT>" <EOL> def test_files ( test_name , test_file , reference_file , output_file , <EOL> max_deviation = <NUM_LIT> , max_nan_deviation = <NUM_LIT> , <EOL> max_obj_deviation = <NUM_LIT> ) : <EOL> '''<STR_LIT>''' <EOL> output_fd = open ( output_file , "<STR_LIT:w>" ) <EOL> output_fd . write ( """<STR_LIT>""" % locals ( ) ) <EOL> try : <EOL> test_reader = csv . reader ( open ( test_file , "<STR_LIT:r>" ) ) <EOL> reference_reader = csv . reader ( open ( reference_file , "<STR_LIT:r>" ) ) <EOL> test_measurements = collect_measurements ( test_reader ) <EOL> reference_measurements = collect_measurements ( reference_reader ) <EOL> statistics = test_matching_columns ( test_measurements , reference_measurements ) <EOL> statistics += test_deviations ( test_measurements , reference_measurements , <EOL> max_deviation , max_nan_deviation , <EOL> max_obj_deviation ) <EOL> if ( test_measurements . has_key ( "<STR_LIT>" ) and <EOL> reference_measurements . has_key ( "<STR_LIT>" ) ) : <EOL> image_numbers = np . unique ( np . hstack ( ( <EOL> test_measurements [ "<STR_LIT>" ] , <EOL> reference_measurements [ "<STR_LIT>" ] ) ) ) <EOL> test_measurements_per_image = collect_per_image ( test_measurements , <EOL> image_numbers ) <EOL> reference_measurements_per_image = collect_per_image ( <EOL> reference_measurements , image_numbers ) <EOL> for feature in test_measurements_per_image . keys ( ) : <EOL> test_measurement = test_measurements_per_image [ feature ] <EOL> reference_measurement = reference_measurements_per_image [ feature ] <EOL> fs_all = None <EOL> for i , tm , rm in zip ( image_numbers , test_measurement , reference_measurement ) : <EOL> fs = test_deviation ( feature , tm , rm , <EOL> max_deviation , max_nan_deviation , <EOL> max_obj_deviation , True ) <EOL> if any ( statistic [ <NUM_LIT:2> ] is not None for statistic in fs ) : <EOL> fs_all = fs <EOL> break <EOL> for s in fs : <EOL> s [ <NUM_LIT:1> ] [ "<STR_LIT>" ] = i <EOL> if fs_all is None : <EOL> fs_all = [ ( name , [ attributes ] , error_type , message , body ) <EOL> for name , attributes , error_type , message , body in fs ] <EOL> else : <EOL> for i in range ( len ( fs ) ) : <EOL> fs_all [ i ] [ <NUM_LIT:1> ] . append ( fs [ i ] [ <NUM_LIT:1> ] ) <EOL> statistics += fs_all <EOL> except Exception , e : <EOL> stacktrace = traceback . format_exc ( ) <EOL> message = e . message <EOL> output_fd . write ( """<STR_LIT>""" % ( OUTPUT_PRESENT , type ( e ) , message , stacktrace ) ) <EOL> output_fd . close ( ) <EOL> return <EOL> error_count = count_errors ( statistics ) <EOL> output_fd . write ( """<STR_LIT>""" % error_count ) <EOL> for statistic in statistics : <EOL> write_statistic ( output_fd , statistic ) <EOL> output_fd . write ( "<STR_LIT>" ) <EOL> output_fd . close ( ) <EOL> ignore_categories = ( <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" ) <EOL> def make_success_statistic ( name , attributes , per_image = False ) : <EOL> if per_image : <EOL> name += "<STR_LIT>" <EOL> return name , attributes , None , None , None <EOL> def make_error_statistic ( name , attributes , error_type , message , body , <EOL> per_image = False ) : <EOL> if per_image : <EOL> name += "<STR_LIT>" <EOL> return name , attributes , error_type , message , body <EOL> def write_statistic ( output_fd , statistic ) : <EOL> name , attributes , error_type , message , body = statistic <EOL> output_fd . write ( '<STR_LIT>' % name ) <EOL> if isinstance ( attributes , list ) : <EOL> output_fd . write ( "<STR_LIT>" ) <EOL> for attribute in attributes : <EOL> output_fd . write ( "<STR_LIT>" % <EOL> "<STR_LIT:U+0020>" . join ( [ '<STR_LIT>' % ( key , str ( value ) ) <EOL> for key , value in attribute . iteritems ( ) ] ) ) <EOL> output_fd . write ( "<STR_LIT>" ) <EOL> elif error_type is None : <EOL> output_fd . write ( '<STR_LIT:U+0020>' . join ( [ '<STR_LIT>' % ( key , str ( value ) ) <EOL> for key , value in attributes . iteritems ( ) ] ) ) <EOL> output_fd . write ( '<STR_LIT>' ) <EOL> else : <EOL> output_fd . write ( '<STR_LIT>' ) <EOL> output_fd . write ( '<STR_LIT>' % <EOL> ( error_type , message ) ) <EOL> output_fd . write ( body ) <EOL> output_fd . write ( '<STR_LIT>' ) <EOL> output_fd . write ( '<STR_LIT>' ) <EOL> def count_errors ( statistics ) : <EOL> result = <NUM_LIT:0> <EOL> for name , attributes , error_type , message , body in statistics : <EOL> if error_type is not None : <EOL> result += <NUM_LIT:1> <EOL> return result <EOL> def collect_measurements ( rdr ) : <EOL> '''<STR_LIT>''' <EOL> header = rdr . next ( ) <EOL> d = { } <EOL> for field in header : <EOL> ignore = False <EOL> for ignore_category in ignore_categories : <EOL> if field . find ( ignore_category ) != - <NUM_LIT:1> : <EOL> ignore = True <EOL> break <EOL> if ignore : <EOL> continue <EOL> d [ field ] = [ ] <EOL> for i , row in enumerate ( rdr ) : <EOL> if len ( row ) != len ( header ) : <EOL> raise ValueError ( "<STR_LIT>" % <EOL> ( len ( row ) , len ( header ) , i + <NUM_LIT:1> ) ) <EOL> for value , field in zip ( row , header ) : <EOL> if d . has_key ( field ) : <EOL> d [ field ] . append ( value ) <EOL> for field in d . keys ( ) : <EOL> d [ field ] = np . array ( d [ field ] ) <EOL> for field in d . keys ( ) : <EOL> try : <EOL> tmp = d [ field ] <EOL> tmp_not_nan = tmp [ tmp != '<STR_LIT>' ] . astype ( np . float32 ) <EOL> if ( np . all ( tmp_not_nan == tmp_not_nan . astype ( int ) ) and <EOL> not np . any ( tmp == '<STR_LIT>' ) ) : <EOL> tmp_out = np . zeros ( len ( tmp ) , int ) <EOL> tmp_not_nan = tmp_not_nan . astype ( int ) <EOL> else : <EOL> tmp_out = np . zeros ( len ( tmp ) , np . float32 ) <EOL> if np . any ( tmp == '<STR_LIT>' ) : <EOL> tmp_out [ tmp == '<STR_LIT>' ] = np . nan <EOL> tmp_out [ tmp != '<STR_LIT>' ] = tmp_not_nan <EOL> else : <EOL> tmp_out = tmp_not_nan <EOL> d [ field ] = tmp_out <EOL> except : <EOL> pass <EOL> return d <EOL> def collect_per_image ( measurements , image_numbers ) : <EOL> image_indexes = measurements [ "<STR_LIT>" ] <EOL> result = { } <EOL> for key in measurements . keys ( ) : <EOL> result [ key ] = [ measurements [ key ] [ image_indexes == i ] <EOL> for i in image_numbers ] <EOL> return result <EOL> def test_matching_columns ( test_measurements , reference_measurements ) : <EOL> '''<STR_LIT>''' <EOL> assert isinstance ( test_measurements , dict ) <EOL> assert isinstance ( reference_measurements , dict ) <EOL> missing_in_test = [ ] <EOL> missing_in_reference = [ ] <EOL> for feature in test_measurements . keys ( ) : <EOL> if not reference_measurements . has_key ( feature ) : <EOL> missing_in_reference . append ( feature ) <EOL> for feature in reference_measurements . keys ( ) : <EOL> if not test_measurements . has_key ( feature ) : <EOL> missing_in_test . append ( feature ) <EOL> for feature in missing_in_test : <EOL> del reference_measurements [ feature ] <EOL> for feature in missing_in_reference : <EOL> del test_measurements [ feature ] <EOL> if len ( missing_in_reference ) + len ( missing_in_test ) > <NUM_LIT:0> : <EOL> body = "<STR_LIT>" <EOL> if len ( missing_in_reference ) : <EOL> body += ( "<STR_LIT>" % <EOL> '<STR_LIT>' . join ( missing_in_reference ) ) <EOL> message = "<STR_LIT>" <EOL> if len ( missing_in_test ) : <EOL> body += ( "<STR_LIT>" % <EOL> '<STR_LIT>' . join ( missing_in_test ) ) <EOL> if len ( missing_in_reference ) : <EOL> message += "<STR_LIT>" <EOL> else : <EOL> message = "<STR_LIT>" <EOL> return [ make_error_statistic ( MATCHING_COLUMNS , { } , <EOL> ERROR_TYPE_LOGICAL , message , body ) ] <EOL> return [ ] <EOL> def test_deviations ( test_measurements , reference_measurements , <EOL> max_deviation , max_nan_deviation , max_obj_deviation , <EOL> per_image = False ) : <EOL> statistics = [ ] <EOL> feature = test_measurements . keys ( ) [ <NUM_LIT:0> ] <EOL> tm_len = len ( test_measurements [ feature ] ) <EOL> rm_len = len ( reference_measurements [ feature ] ) <EOL> if tm_len + rm_len > <NUM_LIT:0> : <EOL> deviance = ( float ( abs ( tm_len - rm_len ) ) / <EOL> float ( tm_len + rm_len ) ) <EOL> if deviance > max_obj_deviation : <EOL> message = ( "<STR_LIT>" % <EOL> ( tm_len , rm_len ) ) <EOL> s = make_error_statistic ( MEASUREMENT_COUNT , { } , <EOL> ERROR_TYPE_QUANTITY , message , "<STR_LIT>" , <EOL> per_image ) <EOL> statistics += [ s ] <EOL> for feature in test_measurements . keys ( ) : <EOL> statistics += test_deviation ( feature , <EOL> test_measurements [ feature ] , <EOL> reference_measurements [ feature ] , <EOL> max_deviation , <EOL> max_nan_deviation , <EOL> max_obj_deviation , <EOL> per_image ) <EOL> return statistics <EOL> def test_deviation ( feature , test_measurement , reference_measurement , <EOL> max_deviation , max_nan_deviation , max_obj_deviation , <EOL> per_image ) : <EOL> statistics = [ ] <EOL> if test_measurement . dtype == np . float32 : <EOL> return test_float_deviation ( feature , test_measurement , <EOL> reference_measurement , <EOL> max_deviation , max_nan_deviation , <EOL> per_image ) <EOL> elif test_measurement . dtype == int : <EOL> return test_integer_deviation ( feature , test_measurement , <EOL> reference_measurement , <EOL> max_deviation , per_image ) <EOL> else : <EOL> return test_string_deviation ( feature , test_measurement , <EOL> reference_measurement , per_image ) <EOL> def test_float_deviation ( feature , test_measurement , reference_measurement , <EOL> max_deviation , max_nan_deviation , per_image ) : <EOL> tm_no_nan = test_measurement [ ~ np . isnan ( test_measurement ) ] <EOL> rm_no_nan = reference_measurement [ ~ np . isnan ( reference_measurement ) ] <EOL> tm_nan_fraction = <NUM_LIT:1.0> - float ( len ( tm_no_nan ) ) / float ( len ( test_measurement ) ) <EOL> rm_nan_fraction = <NUM_LIT:1.0> - float ( len ( rm_no_nan ) ) / float ( len ( reference_measurement ) ) <EOL> if tm_nan_fraction + rm_nan_fraction > <NUM_LIT:0> : <EOL> nan_deviation = ( abs ( tm_nan_fraction - rm_nan_fraction ) / <EOL> ( tm_nan_fraction + rm_nan_fraction ) ) <EOL> if nan_deviation > max_nan_deviation : <EOL> message = ( "<STR_LIT>" % <EOL> ( np . sum ( np . isnan ( test_measurement ) ) , <EOL> np . sum ( np . isnan ( reference_measurement ) ) ) ) <EOL> s = make_error_statistic ( feature , { } , ERROR_TYPE_QUANTITY , <EOL> message , "<STR_LIT>" , per_image ) <EOL> return [ s ] <EOL> test_mean = np . mean ( tm_no_nan ) <EOL> reference_mean = np . mean ( rm_no_nan ) <EOL> sd = ( np . std ( tm_no_nan ) + np . std ( rm_no_nan ) ) / <NUM_LIT> <EOL> sd = max ( sd , <NUM_LIT> , <NUM_LIT> * ( test_mean + reference_mean ) / <NUM_LIT> ) <EOL> mean_diff = abs ( test_mean - reference_mean ) / sd <EOL> if mean_diff > max_deviation : <EOL> message = ( "<STR_LIT>" % <EOL> ( test_mean , np . std ( tm_no_nan ) , reference_mean , <EOL> np . std ( rm_no_nan ) ) ) <EOL> s = make_error_statistic ( feature , { } , ERROR_TYPE_MEASUREMENT , <EOL> message , "<STR_LIT>" , per_image ) <EOL> return [ s ] <EOL> attributes = dict ( test_mean = test_mean , <EOL> reference_mean = reference_mean , <EOL> sd = sd , <EOL> test_nan_fraction = tm_nan_fraction , <EOL> reference_nan_fraction = rm_nan_fraction ) <EOL> return [ make_success_statistic ( feature , attributes , per_image ) ] <EOL> def test_integer_deviation ( feature , test_measurement , reference_measurement , <EOL> max_deviation , per_image ) : <EOL> do_like_float = False <EOL> for allowed_feature in ( "<STR_LIT:count>" , "<STR_LIT>" ) : <EOL> if feature . lower ( ) . find ( allowed_feature ) != - <NUM_LIT:1> : <EOL> do_like_float = True <EOL> break <EOL> if do_like_float : <EOL> return test_float_deviation ( feature , <EOL> test_measurement . astype ( np . float32 ) , <EOL> reference_measurement . astype ( np . float32 ) , <EOL> max_deviation , <NUM_LIT:1> , per_image ) <EOL> return [ ] <EOL> def test_string_deviation ( feature , test_measurement , reference_measurement , <EOL> per_image ) : <EOL> if len ( test_measurement ) != len ( reference_measurement ) : <EOL> return [ ] <EOL> indexes = np . argwhere ( test_measurement != reference_measurement ) <EOL> if len ( indexes != <NUM_LIT:0> ) : <EOL> body = '<STR_LIT:\n>' . join ( <EOL> [ "<STR_LIT>" % <EOL> ( i + <NUM_LIT:1> , test_measurement [ i ] , reference_measurement [ i ] ) <EOL> for i in indexes ] ) <EOL> message = "<STR_LIT>" <EOL> return [ make_error_statistic ( feature , { } , ERROR_TYPE_MEASUREMENT , <EOL> message , body , per_image ) ] <EOL> return [ make_success_statistic ( feature , { } , per_image ) ] <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> import optparse <EOL> parser = optparse . OptionParser ( ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> parser . add_option ( "<STR_LIT>" , "<STR_LIT>" , <EOL> dest = "<STR_LIT>" , <EOL> default = "<STR_LIT>" , <EOL> help = "<STR_LIT>" ) <EOL> options , args = parser . parse_args ( ) <EOL> test_files ( options . test_name , <EOL> options . test_file , <EOL> options . reference_file , <EOL> options . output_file ) </s>
<s> '''<STR_LIT>''' <EOL> import unittest <EOL> from StringIO import StringIO <EOL> import numpy as np <EOL> import cellprofiler . measurements as cpmeas <EOL> import cellprofiler . utilities . rules as R <EOL> OBJECT_NAME = "<STR_LIT>" <EOL> M_FEATURES = [ "<STR_LIT>" % i for i in range ( <NUM_LIT:1> , <NUM_LIT:11> ) ] <EOL> class TestRules ( unittest . TestCase ) : <EOL> def test_01_01_load_rules ( self ) : <EOL> data = """<STR_LIT>""" <EOL> fd = StringIO ( data ) <EOL> rules = R . Rules ( ) <EOL> rules . parse ( fd ) <EOL> self . assertEqual ( len ( rules . rules ) , <NUM_LIT:20> ) <EOL> for rule in rules . rules : <EOL> self . assertEqual ( rule . object_name , "<STR_LIT>" ) <EOL> self . assertEqual ( rule . comparitor , "<STR_LIT:>>" ) <EOL> rule = rules . rules [ <NUM_LIT:0> ] <EOL> self . assertEqual ( rule . feature , "<STR_LIT>" ) <EOL> self . assertAlmostEqual ( rule . threshold , <NUM_LIT> ) <EOL> self . assertAlmostEqual ( rule . weights [ <NUM_LIT:0> , <NUM_LIT:0> ] , <NUM_LIT> ) <EOL> self . assertAlmostEqual ( rule . weights [ <NUM_LIT:0> , <NUM_LIT:1> ] , - <NUM_LIT> ) <EOL> self . assertAlmostEqual ( rule . weights [ <NUM_LIT:1> , <NUM_LIT:0> ] , - <NUM_LIT> ) <EOL> self . assertAlmostEqual ( rule . weights [ <NUM_LIT:1> , <NUM_LIT:1> ] , <NUM_LIT> ) <EOL> def test_02_00_no_measurements ( self ) : <EOL> m = cpmeas . Measurements ( ) <EOL> m . add_measurement ( OBJECT_NAME , M_FEATURES [ <NUM_LIT:0> ] , np . array ( [ ] , float ) ) <EOL> rules = R . Rules ( ) <EOL> rules . rules += [ R . Rules . Rule ( OBJECT_NAME , M_FEATURES [ <NUM_LIT:0> ] , "<STR_LIT:>>" , <NUM_LIT:0> , <EOL> np . array ( [ [ <NUM_LIT:1.0> , - <NUM_LIT:1.0> ] , [ - <NUM_LIT:1.0> , <NUM_LIT:1.0> ] ] ) ) ] <EOL> score = rules . score ( m ) <EOL> self . assertEqual ( score . shape [ <NUM_LIT:0> ] , <NUM_LIT:0> ) <EOL> self . assertEqual ( score . shape [ <NUM_LIT:1> ] , <NUM_LIT:2> ) <EOL> def test_02_01_score_one_positive ( self ) : <EOL> m = cpmeas . Measurements ( ) <EOL> m . add_measurement ( OBJECT_NAME , M_FEATURES [ <NUM_LIT:0> ] , np . array ( [ <NUM_LIT> ] , float ) ) <EOL> rules = R . Rules ( ) <EOL> rules . rules += [ R . Rules . Rule ( OBJECT_NAME , M_FEATURES [ <NUM_LIT:0> ] , "<STR_LIT:>>" , <NUM_LIT:0> , <EOL> np . array ( [ [ <NUM_LIT:1.0> , - <NUM_LIT:0.5> ] , [ - <NUM_LIT> , <NUM_LIT> ] ] ) ) ] <EOL> score = rules . score ( m ) <EOL> self . assertEqual ( score . shape [ <NUM_LIT:0> ] , <NUM_LIT:1> ) <EOL> self . assertEqual ( score . shape [ <NUM_LIT:1> ] , <NUM_LIT:2> ) <EOL> self . assertAlmostEqual ( score [ <NUM_LIT:0> , <NUM_LIT:0> ] , <NUM_LIT:1.0> ) <EOL> self . assertAlmostEqual ( score [ <NUM_LIT:0> , <NUM_LIT:1> ] , - <NUM_LIT:0.5> ) <EOL> def test_02_02_score_one_negative ( self ) : <EOL> m = cpmeas . Measurements ( ) <EOL> m . add_measurement ( OBJECT_NAME , M_FEATURES [ <NUM_LIT:0> ] , np . array ( [ <NUM_LIT> ] , float ) ) <EOL> rules = R . Rules ( ) <EOL> rules . rules += [ R . Rules . Rule ( OBJECT_NAME , M_FEATURES [ <NUM_LIT:0> ] , "<STR_LIT:>>" , <NUM_LIT> , <EOL> np . array ( [ [ <NUM_LIT:1.0> , - <NUM_LIT:0.5> ] , [ - <NUM_LIT> , <NUM_LIT> ] ] ) ) ] <EOL> score = rules . score ( m ) <EOL> self . assertEqual ( score . shape [ <NUM_LIT:0> ] , <NUM_LIT:1> ) <EOL> self . assertEqual ( score . shape [ <NUM_LIT:1> ] , <NUM_LIT:2> ) <EOL> self . assertAlmostEqual ( score [ <NUM_LIT:0> , <NUM_LIT:0> ] , - <NUM_LIT> ) <EOL> self . assertAlmostEqual ( score [ <NUM_LIT:0> , <NUM_LIT:1> ] , <NUM_LIT> ) <EOL> def test_02_03_score_one_nan ( self ) : <EOL> m = cpmeas . Measurements ( ) <EOL> m . add_measurement ( OBJECT_NAME , M_FEATURES [ <NUM_LIT:0> ] , np . array ( [ np . NaN ] , float ) ) <EOL> rules = R . Rules ( ) <EOL> rules . rules += [ R . Rules . Rule ( OBJECT_NAME , M_FEATURES [ <NUM_LIT:0> ] , "<STR_LIT:>>" , <NUM_LIT> , <EOL> np . array ( [ [ <NUM_LIT:1.0> , - <NUM_LIT:0.5> ] , [ - <NUM_LIT> , <NUM_LIT> ] ] ) ) ] <EOL> score = rules . score ( m ) <EOL> self . assertEqual ( score . shape [ <NUM_LIT:0> ] , <NUM_LIT:1> ) <EOL> self . assertEqual ( score . shape [ <NUM_LIT:1> ] , <NUM_LIT:2> ) <EOL> self . assertTrue ( score [ <NUM_LIT:0> , <NUM_LIT:0> ] , - <NUM_LIT:2> ) <EOL> self . assertTrue ( score [ <NUM_LIT:0> , <NUM_LIT:1> ] , <NUM_LIT> ) <EOL> def test_03_01_score_two_rules ( self ) : <EOL> m = cpmeas . Measurements ( ) <EOL> m . add_measurement ( OBJECT_NAME , M_FEATURES [ <NUM_LIT:0> ] , np . array ( [ <NUM_LIT> ] , float ) ) <EOL> m . add_measurement ( OBJECT_NAME , M_FEATURES [ <NUM_LIT:1> ] , np . array ( [ - <NUM_LIT> ] , float ) ) <EOL> rules = R . Rules ( ) <EOL> rules . rules += [ R . Rules . Rule ( OBJECT_NAME , M_FEATURES [ <NUM_LIT:0> ] , "<STR_LIT:>>" , <NUM_LIT:0> , <EOL> np . array ( [ [ <NUM_LIT:1.0> , - <NUM_LIT:0.5> ] , [ - <NUM_LIT> , <NUM_LIT> ] ] ) ) , <EOL> R . Rules . Rule ( OBJECT_NAME , M_FEATURES [ <NUM_LIT:1> ] , "<STR_LIT:>>" , <NUM_LIT:0> , <EOL> np . array ( [ [ <NUM_LIT> , - <NUM_LIT> ] , [ - <NUM_LIT> , <NUM_LIT> ] ] ) ) ] <EOL> score = rules . score ( m ) <EOL> self . assertEqual ( score . shape [ <NUM_LIT:0> ] , <NUM_LIT:1> ) <EOL> self . assertEqual ( score . shape [ <NUM_LIT:1> ] , <NUM_LIT:2> ) <EOL> self . assertAlmostEqual ( score [ <NUM_LIT:0> , <NUM_LIT:0> ] , <NUM_LIT:1.0> - <NUM_LIT> ) <EOL> self . assertAlmostEqual ( score [ <NUM_LIT:0> , <NUM_LIT:1> ] , - <NUM_LIT:0.5> + <NUM_LIT> ) <EOL> def test_03_02_score_two_objects ( self ) : <EOL> m = cpmeas . Measurements ( ) <EOL> m . add_measurement ( OBJECT_NAME , M_FEATURES [ <NUM_LIT:0> ] , np . array ( [ <NUM_LIT> , <NUM_LIT> ] , float ) ) <EOL> rules = R . Rules ( ) <EOL> rules . rules += [ R . Rules . Rule ( OBJECT_NAME , M_FEATURES [ <NUM_LIT:0> ] , "<STR_LIT:<>" , <NUM_LIT> , <EOL> np . array ( [ [ <NUM_LIT:1.0> , - <NUM_LIT:0.5> ] , [ - <NUM_LIT> , <NUM_LIT> ] ] ) ) ] <EOL> score = rules . score ( m ) <EOL> self . assertEqual ( score . shape [ <NUM_LIT:0> ] , <NUM_LIT:2> ) <EOL> self . assertEqual ( score . shape [ <NUM_LIT:1> ] , <NUM_LIT:2> ) <EOL> self . assertAlmostEqual ( score [ <NUM_LIT:0> , <NUM_LIT:0> ] , <NUM_LIT:1.0> ) <EOL> self . assertAlmostEqual ( score [ <NUM_LIT:0> , <NUM_LIT:1> ] , - <NUM_LIT:0.5> ) <EOL> self . assertAlmostEqual ( score [ <NUM_LIT:1> , <NUM_LIT:0> ] , - <NUM_LIT> ) <EOL> self . assertAlmostEqual ( score [ <NUM_LIT:1> , <NUM_LIT:1> ] , <NUM_LIT> ) </s>
<s> import sys <EOL> from importlib import import_module <EOL> app_name = "<STR_LIT>" <EOL> commands = { <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" , <EOL> } <EOL> def load_command_class ( name ) : <EOL> module = import_module ( "<STR_LIT>" % ( app_name , name ) ) <EOL> return module . Command ( ) <EOL> def get_command ( name ) : <EOL> try : <EOL> commands [ name ] <EOL> except : <EOL> print "<STR_LIT>" % name <EOL> return None <EOL> return load_command_class ( name ) <EOL> def help_text ( ) : <EOL> text = [ <EOL> "<STR_LIT>" , <EOL> "<STR_LIT>" <EOL> ] <EOL> for key , value in commands . iteritems ( ) : <EOL> cmd_text = "<STR_LIT>" % ( key , value ) <EOL> text . append ( cmd_text ) <EOL> return "<STR_LIT:\n>" . join ( text ) + "<STR_LIT>" <EOL> def execute_from_command_line ( ) : <EOL> try : <EOL> cmd = sys . argv [ <NUM_LIT:1> ] <EOL> except IndexError : <EOL> cmd = "<STR_LIT>" <EOL> try : <EOL> subcmd = sys . argv [ <NUM_LIT:2> : ] <EOL> except : <EOL> print "<STR_LIT:error>" <EOL> if cmd == "<STR_LIT>" : <EOL> sys . stdout . write ( help_text ( ) ) <EOL> else : <EOL> exe = get_command ( cmd ) <EOL> if exe : <EOL> exe . execute ( subcmd ) </s>
<s> from django . conf . urls import url <EOL> from django . contrib . auth . decorators import login_required as login <EOL> from . import views <EOL> urlpatterns = [ <EOL> url ( r'<STR_LIT>' , login ( views . OSFStatisticsListView . as_view ( ) ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , login ( views . update_metrics ) , name = '<STR_LIT>' ) , <EOL> url ( r'<STR_LIT>' , login ( views . download_csv ) , name = '<STR_LIT>' ) , <EOL> ] </s>
<s> from nose . tools import * <EOL> from tests . base import AdminTestCase <EOL> from tests . factories import NodeFactory , UserFactory <EOL> from admin . nodes . serializers import serialize_simple_user , serialize_node <EOL> class TestNodeSerializers ( AdminTestCase ) : <EOL> def test_serialize_node ( self ) : <EOL> node = NodeFactory ( ) <EOL> info = serialize_node ( node ) <EOL> assert_is_instance ( info , dict ) <EOL> assert_equal ( info [ '<STR_LIT>' ] , node . parent_id ) <EOL> assert_equal ( info [ '<STR_LIT:title>' ] , node . title ) <EOL> assert_equal ( info [ '<STR_LIT>' ] , [ ] ) <EOL> assert_equal ( info [ '<STR_LIT:id>' ] , node . _id ) <EOL> assert_equal ( info [ '<STR_LIT>' ] , node . is_public ) <EOL> assert_equal ( len ( info [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> assert_false ( info [ '<STR_LIT>' ] ) <EOL> def test_serialize_deleted ( self ) : <EOL> node = NodeFactory ( ) <EOL> info = serialize_node ( node ) <EOL> assert_false ( info [ '<STR_LIT>' ] ) <EOL> node . is_deleted = True <EOL> info = serialize_node ( node ) <EOL> assert_true ( info [ '<STR_LIT>' ] ) <EOL> node . is_deleted = False <EOL> info = serialize_node ( node ) <EOL> assert_false ( info [ '<STR_LIT>' ] ) <EOL> def test_serialize_simple_user ( self ) : <EOL> user = UserFactory ( ) <EOL> info = serialize_simple_user ( ( user . _id , '<STR_LIT>' ) ) <EOL> assert_is_instance ( info , dict ) <EOL> assert_equal ( info [ '<STR_LIT:id>' ] , user . _id ) <EOL> assert_equal ( info [ '<STR_LIT:name>' ] , user . fullname ) <EOL> assert_equal ( info [ '<STR_LIT>' ] , '<STR_LIT>' ) </s>
<s> from modularodm import Q <EOL> from modularodm . exceptions import NoResultsFound <EOL> from rest_framework . exceptions import NotFound <EOL> from rest_framework . reverse import reverse <EOL> import furl <EOL> from website import util as website_util <EOL> from website import settings as website_settings <EOL> from framework . auth import Auth , User <EOL> from api . base . exceptions import Gone <EOL> TRUTHY = set ( ( '<STR_LIT:t>' , '<STR_LIT:T>' , '<STR_LIT:true>' , '<STR_LIT:True>' , '<STR_LIT>' , '<STR_LIT:1>' , <NUM_LIT:1> , True ) ) <EOL> FALSY = set ( ( '<STR_LIT:f>' , '<STR_LIT:F>' , '<STR_LIT:false>' , '<STR_LIT:False>' , '<STR_LIT>' , '<STR_LIT:0>' , <NUM_LIT:0> , <NUM_LIT:0.0> , False ) ) <EOL> UPDATE_METHODS = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> def decompose_field ( field ) : <EOL> from api . base . serializers import ( <EOL> HideIfRetraction , HideIfRegistration , <EOL> HideIfDisabled , AllowMissing <EOL> ) <EOL> WRAPPER_FIELDS = ( HideIfRetraction , HideIfRegistration , HideIfDisabled , AllowMissing ) <EOL> while isinstance ( field , WRAPPER_FIELDS ) : <EOL> try : <EOL> field = getattr ( field , '<STR_LIT>' ) <EOL> except AttributeError : <EOL> break <EOL> return field <EOL> def is_bulk_request ( request ) : <EOL> """<STR_LIT>""" <EOL> content_type = request . content_type <EOL> return '<STR_LIT>' in content_type <EOL> def is_truthy ( value ) : <EOL> return value in TRUTHY <EOL> def is_falsy ( value ) : <EOL> return value in FALSY <EOL> def get_user_auth ( request ) : <EOL> """<STR_LIT>""" <EOL> user = request . user <EOL> private_key = request . query_params . get ( '<STR_LIT>' , None ) <EOL> if user . is_anonymous ( ) : <EOL> auth = Auth ( None , private_key = private_key ) <EOL> else : <EOL> auth = Auth ( user , private_key = private_key ) <EOL> return auth <EOL> def absolute_reverse ( view_name , query_kwargs = None , args = None , kwargs = None ) : <EOL> """<STR_LIT>""" <EOL> relative_url = reverse ( view_name , kwargs = kwargs ) <EOL> url = website_util . api_v2_url ( relative_url , params = query_kwargs , base_prefix = '<STR_LIT>' ) <EOL> return url <EOL> def get_object_or_error ( model_cls , query_or_pk , display_name = None , ** kwargs ) : <EOL> if isinstance ( query_or_pk , basestring ) : <EOL> obj = model_cls . load ( query_or_pk ) <EOL> if obj is None : <EOL> raise NotFound <EOL> else : <EOL> try : <EOL> obj = model_cls . find_one ( query_or_pk , ** kwargs ) <EOL> except NoResultsFound : <EOL> raise NotFound <EOL> if model_cls is User and obj . is_disabled : <EOL> raise Gone ( detail = '<STR_LIT>' , <EOL> meta = { '<STR_LIT>' : obj . fullname , '<STR_LIT>' : obj . family_name , '<STR_LIT>' : obj . given_name , <EOL> '<STR_LIT>' : obj . middle_names , '<STR_LIT>' : obj . profile_image_url ( ) } ) <EOL> elif model_cls is not User and not getattr ( obj , '<STR_LIT>' , True ) or getattr ( obj , '<STR_LIT>' , False ) : <EOL> if display_name is None : <EOL> raise Gone <EOL> else : <EOL> raise Gone ( detail = '<STR_LIT>' . format ( name = display_name ) ) <EOL> return obj <EOL> def waterbutler_url_for ( request_type , provider , path , node_id , token , obj_args = None , ** query ) : <EOL> """<STR_LIT>""" <EOL> url = furl . furl ( website_settings . WATERBUTLER_URL ) <EOL> url . path . segments . append ( request_type ) <EOL> url . args . update ( { <EOL> '<STR_LIT:path>' : path , <EOL> '<STR_LIT>' : node_id , <EOL> '<STR_LIT>' : provider , <EOL> } ) <EOL> if token is not None : <EOL> url . args [ '<STR_LIT>' ] = token <EOL> if '<STR_LIT>' in obj_args : <EOL> url . args [ '<STR_LIT>' ] = obj_args [ '<STR_LIT>' ] <EOL> url . args . update ( query ) <EOL> return url . url <EOL> def default_node_list_query ( ) : <EOL> return ( <EOL> Q ( '<STR_LIT>' , '<STR_LIT>' , True ) & <EOL> Q ( '<STR_LIT>' , '<STR_LIT>' , True ) & <EOL> Q ( '<STR_LIT>' , '<STR_LIT>' , True ) <EOL> ) <EOL> def default_node_permission_query ( user ) : <EOL> permission_query = Q ( '<STR_LIT>' , '<STR_LIT>' , True ) <EOL> if not user . is_anonymous ( ) : <EOL> permission_query = ( permission_query | Q ( '<STR_LIT>' , '<STR_LIT>' , user . _id ) ) <EOL> return permission_query <EOL> def extend_querystring_params ( url , params ) : <EOL> return furl . furl ( url ) . add ( args = params ) . url </s>
<s> from rest_framework import serializers as ser <EOL> from rest_framework import exceptions <EOL> from modularodm import Q <EOL> from modularodm . exceptions import ValidationValueError <EOL> from framework . auth . core import Auth <EOL> from framework . exceptions import PermissionsError <EOL> from website . models import Node , User , Comment , Institution <EOL> from website . exceptions import NodeStateError , UserNotAffiliatedError <EOL> from website . util import permissions as osf_permissions <EOL> from website . project . model import NodeUpdateError <EOL> from api . base . utils import get_user_auth , get_object_or_error , absolute_reverse <EOL> from api . base . serializers import ( JSONAPISerializer , WaterbutlerLink , NodeFileHyperLinkField , IDField , TypeField , <EOL> TargetTypeField , JSONAPIListField , LinksField , RelationshipField , DevOnly , <EOL> HideIfRegistration ) <EOL> from api . base . exceptions import InvalidModelValueError <EOL> class NodeTagField ( ser . Field ) : <EOL> def to_representation ( self , obj ) : <EOL> if obj is not None : <EOL> return obj . _id <EOL> return None <EOL> def to_internal_value ( self , data ) : <EOL> return data <EOL> class NodeSerializer ( JSONAPISerializer ) : <EOL> filterable_fields = frozenset ( [ <EOL> '<STR_LIT:id>' , <EOL> '<STR_LIT:title>' , <EOL> '<STR_LIT:description>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:root>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] ) <EOL> non_anonymized_fields = [ <EOL> '<STR_LIT:id>' , <EOL> '<STR_LIT:title>' , <EOL> '<STR_LIT:description>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:root>' , <EOL> '<STR_LIT>' , <EOL> ] <EOL> id = IDField ( source = '<STR_LIT>' , read_only = True ) <EOL> type = TypeField ( ) <EOL> category_choices = Node . CATEGORY_MAP . items ( ) <EOL> category_choices_string = '<STR_LIT:U+002CU+0020>' . join ( [ "<STR_LIT>" . format ( choice [ <NUM_LIT:0> ] ) for choice in category_choices ] ) <EOL> title = ser . CharField ( required = True ) <EOL> description = ser . CharField ( required = False , allow_blank = True , allow_null = True ) <EOL> category = ser . ChoiceField ( choices = category_choices , help_text = "<STR_LIT>" + category_choices_string ) <EOL> date_created = ser . DateTimeField ( read_only = True ) <EOL> date_modified = ser . DateTimeField ( read_only = True ) <EOL> registration = ser . BooleanField ( read_only = True , source = '<STR_LIT>' ) <EOL> fork = ser . BooleanField ( read_only = True , source = '<STR_LIT>' ) <EOL> collection = ser . BooleanField ( read_only = True , source = '<STR_LIT>' ) <EOL> tags = JSONAPIListField ( child = NodeTagField ( ) , required = False ) <EOL> template_from = ser . CharField ( required = False , allow_blank = False , allow_null = False , <EOL> help_text = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> current_user_permissions = ser . SerializerMethodField ( help_text = '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> public = ser . BooleanField ( source = '<STR_LIT>' , required = False , <EOL> help_text = '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' <EOL> '<STR_LIT>' ) <EOL> links = LinksField ( { '<STR_LIT:html>' : '<STR_LIT>' } ) <EOL> children = RelationshipField ( <EOL> related_view = '<STR_LIT>' , <EOL> related_view_kwargs = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> related_meta = { '<STR_LIT:count>' : '<STR_LIT>' } , <EOL> ) <EOL> comments = RelationshipField ( <EOL> related_view = '<STR_LIT>' , <EOL> related_view_kwargs = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> related_meta = { '<STR_LIT>' : '<STR_LIT>' } ) <EOL> contributors = RelationshipField ( <EOL> related_view = '<STR_LIT>' , <EOL> related_view_kwargs = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> related_meta = { '<STR_LIT:count>' : '<STR_LIT>' } , <EOL> ) <EOL> files = RelationshipField ( <EOL> related_view = '<STR_LIT>' , <EOL> related_view_kwargs = { '<STR_LIT>' : '<STR_LIT>' } <EOL> ) <EOL> forked_from = RelationshipField ( <EOL> related_view = '<STR_LIT>' , <EOL> related_view_kwargs = { '<STR_LIT>' : '<STR_LIT>' } <EOL> ) <EOL> node_links = RelationshipField ( <EOL> related_view = '<STR_LIT>' , <EOL> related_view_kwargs = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> related_meta = { '<STR_LIT:count>' : '<STR_LIT>' } , <EOL> ) <EOL> parent = RelationshipField ( <EOL> related_view = '<STR_LIT>' , <EOL> related_view_kwargs = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> filter_key = '<STR_LIT>' <EOL> ) <EOL> registrations = DevOnly ( HideIfRegistration ( RelationshipField ( <EOL> related_view = '<STR_LIT>' , <EOL> related_view_kwargs = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> related_meta = { '<STR_LIT:count>' : '<STR_LIT>' } <EOL> ) ) ) <EOL> primary_institution = RelationshipField ( <EOL> related_view = '<STR_LIT>' , <EOL> related_view_kwargs = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> self_view = '<STR_LIT>' , <EOL> self_view_kwargs = { '<STR_LIT>' : '<STR_LIT>' } <EOL> ) <EOL> root = RelationshipField ( <EOL> related_view = '<STR_LIT>' , <EOL> related_view_kwargs = { '<STR_LIT>' : '<STR_LIT>' } <EOL> ) <EOL> logs = RelationshipField ( <EOL> related_view = '<STR_LIT>' , <EOL> related_view_kwargs = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> related_meta = { '<STR_LIT:count>' : '<STR_LIT>' } <EOL> ) <EOL> def get_current_user_permissions ( self , obj ) : <EOL> user = self . context [ '<STR_LIT>' ] . user <EOL> if user . is_anonymous ( ) : <EOL> return [ '<STR_LIT>' ] <EOL> permissions = obj . get_permissions ( user = user ) <EOL> if not permissions : <EOL> permissions = [ '<STR_LIT>' ] <EOL> return permissions <EOL> class Meta : <EOL> type_ = '<STR_LIT>' <EOL> def get_absolute_url ( self , obj ) : <EOL> return obj . get_absolute_url ( ) <EOL> def get_logs_count ( self , obj ) : <EOL> return len ( obj . logs ) <EOL> def get_node_count ( self , obj ) : <EOL> auth = get_user_auth ( self . context [ '<STR_LIT>' ] ) <EOL> nodes = [ node for node in obj . nodes if node . can_view ( auth ) and node . primary and not node . is_deleted ] <EOL> return len ( nodes ) <EOL> def get_contrib_count ( self , obj ) : <EOL> return len ( obj . contributors ) <EOL> def get_registration_count ( self , obj ) : <EOL> auth = get_user_auth ( self . context [ '<STR_LIT>' ] ) <EOL> registrations = [ node for node in obj . registrations_all if node . can_view ( auth ) ] <EOL> return len ( registrations ) <EOL> def get_pointers_count ( self , obj ) : <EOL> return len ( obj . nodes_pointer ) <EOL> def get_unread_comments_count ( self , obj ) : <EOL> user = get_user_auth ( self . context [ '<STR_LIT>' ] ) . user <EOL> node_comments = Comment . find_n_unread ( user = user , node = obj , page = '<STR_LIT>' ) <EOL> return { <EOL> '<STR_LIT>' : node_comments <EOL> } <EOL> def create ( self , validated_data ) : <EOL> if '<STR_LIT>' in validated_data : <EOL> request = self . context [ '<STR_LIT>' ] <EOL> user = request . user <EOL> template_from = validated_data . pop ( '<STR_LIT>' ) <EOL> template_node = Node . load ( key = template_from ) <EOL> if template_node is None : <EOL> raise exceptions . NotFound <EOL> if not template_node . has_permission ( user , '<STR_LIT>' , check_parent = False ) : <EOL> raise exceptions . PermissionDenied <EOL> validated_data . pop ( '<STR_LIT>' ) <EOL> changed_data = { template_from : validated_data } <EOL> node = template_node . use_as_template ( auth = get_user_auth ( request ) , changes = changed_data ) <EOL> else : <EOL> node = Node ( ** validated_data ) <EOL> try : <EOL> node . save ( ) <EOL> except ValidationValueError as e : <EOL> raise InvalidModelValueError ( detail = e . message ) <EOL> return node <EOL> def update ( self , node , validated_data ) : <EOL> """<STR_LIT>""" <EOL> assert isinstance ( node , Node ) , '<STR_LIT>' <EOL> auth = get_user_auth ( self . context [ '<STR_LIT>' ] ) <EOL> old_tags = set ( [ tag . _id for tag in node . tags ] ) <EOL> if '<STR_LIT>' in validated_data : <EOL> current_tags = set ( validated_data . get ( '<STR_LIT>' ) ) <EOL> del validated_data [ '<STR_LIT>' ] <EOL> elif self . partial : <EOL> current_tags = set ( old_tags ) <EOL> else : <EOL> current_tags = set ( ) <EOL> for new_tag in ( current_tags - old_tags ) : <EOL> node . add_tag ( new_tag , auth = auth ) <EOL> for deleted_tag in ( old_tags - current_tags ) : <EOL> node . remove_tag ( deleted_tag , auth = auth ) <EOL> if validated_data : <EOL> try : <EOL> node . update ( validated_data , auth = auth ) <EOL> except ValidationValueError as e : <EOL> raise InvalidModelValueError ( detail = e . message ) <EOL> except PermissionsError : <EOL> raise exceptions . PermissionDenied <EOL> except NodeUpdateError as e : <EOL> raise exceptions . ValidationError ( detail = e . reason ) <EOL> except NodeStateError as e : <EOL> raise InvalidModelValueError ( detail = e . message ) <EOL> return node <EOL> class NodeDetailSerializer ( NodeSerializer ) : <EOL> """<STR_LIT>""" <EOL> id = IDField ( source = '<STR_LIT>' , required = True ) <EOL> class NodeContributorsSerializer ( JSONAPISerializer ) : <EOL> """<STR_LIT>""" <EOL> non_anonymized_fields = [ '<STR_LIT>' , '<STR_LIT>' ] <EOL> filterable_fields = frozenset ( [ <EOL> '<STR_LIT:id>' , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT>' <EOL> ] ) <EOL> id = IDField ( source = '<STR_LIT>' , required = True ) <EOL> type = TypeField ( ) <EOL> bibliographic = ser . BooleanField ( help_text = '<STR_LIT>' , <EOL> default = True ) <EOL> permission = ser . ChoiceField ( choices = osf_permissions . PERMISSIONS , required = False , allow_null = True , <EOL> default = osf_permissions . reduce_permissions ( osf_permissions . DEFAULT_CONTRIBUTOR_PERMISSIONS ) , <EOL> help_text = '<STR_LIT>' ) <EOL> links = LinksField ( { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ) <EOL> users = RelationshipField ( <EOL> related_view = '<STR_LIT>' , <EOL> related_view_kwargs = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> always_embed = True <EOL> ) <EOL> class Meta : <EOL> type_ = '<STR_LIT>' <EOL> def get_absolute_url ( self , obj ) : <EOL> node_id = self . context [ '<STR_LIT>' ] . parser_context [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> return absolute_reverse ( <EOL> '<STR_LIT>' , <EOL> kwargs = { <EOL> '<STR_LIT>' : node_id , <EOL> '<STR_LIT>' : obj . _id <EOL> } <EOL> ) <EOL> class NodeContributorsCreateSerializer ( NodeContributorsSerializer ) : <EOL> """<STR_LIT>""" <EOL> target_type = TargetTypeField ( target_type = '<STR_LIT>' ) <EOL> def create ( self , validated_data ) : <EOL> auth = Auth ( self . context [ '<STR_LIT>' ] . user ) <EOL> node = self . context [ '<STR_LIT>' ] . get_node ( ) <EOL> contributor = get_object_or_error ( User , validated_data [ '<STR_LIT>' ] , display_name = '<STR_LIT:user>' ) <EOL> if contributor in node . contributors : <EOL> raise exceptions . ValidationError ( '<STR_LIT>' . format ( contributor . fullname ) ) <EOL> bibliographic = validated_data [ '<STR_LIT>' ] <EOL> permissions = osf_permissions . expand_permissions ( validated_data . get ( '<STR_LIT>' ) ) or osf_permissions . DEFAULT_CONTRIBUTOR_PERMISSIONS <EOL> node . add_contributor ( contributor = contributor , auth = auth , visible = bibliographic , permissions = permissions , save = True ) <EOL> contributor . permission = osf_permissions . reduce_permissions ( node . get_permissions ( contributor ) ) <EOL> contributor . bibliographic = node . get_visible ( contributor ) <EOL> contributor . node_id = node . _id <EOL> return contributor <EOL> class NodeContributorDetailSerializer ( NodeContributorsSerializer ) : <EOL> """<STR_LIT>""" <EOL> def update ( self , instance , validated_data ) : <EOL> contributor = instance <EOL> auth = Auth ( self . context [ '<STR_LIT>' ] . user ) <EOL> node = self . context [ '<STR_LIT>' ] . get_node ( ) <EOL> visible = validated_data . get ( '<STR_LIT>' ) <EOL> permission = validated_data . get ( '<STR_LIT>' ) <EOL> try : <EOL> node . update_contributor ( contributor , permission , visible , auth , save = True ) <EOL> except NodeStateError as e : <EOL> raise exceptions . ValidationError ( detail = e . message ) <EOL> contributor . permission = osf_permissions . reduce_permissions ( node . get_permissions ( contributor ) ) <EOL> contributor . bibliographic = node . get_visible ( contributor ) <EOL> contributor . node_id = node . _id <EOL> return contributor <EOL> class NodeLinksSerializer ( JSONAPISerializer ) : <EOL> id = IDField ( source = '<STR_LIT>' ) <EOL> type = TypeField ( ) <EOL> target_type = TargetTypeField ( target_type = '<STR_LIT>' ) <EOL> target_node = RelationshipField ( <EOL> related_view = '<STR_LIT>' , <EOL> related_view_kwargs = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> always_embed = True <EOL> ) <EOL> class Meta : <EOL> type_ = '<STR_LIT>' <EOL> links = LinksField ( { <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } ) <EOL> def get_absolute_url ( self , obj ) : <EOL> node_id = self . context [ '<STR_LIT>' ] . parser_context [ '<STR_LIT>' ] [ '<STR_LIT>' ] <EOL> return absolute_reverse ( <EOL> '<STR_LIT>' , <EOL> kwargs = { <EOL> '<STR_LIT>' : node_id , <EOL> '<STR_LIT>' : obj . _id <EOL> } <EOL> ) <EOL> def create ( self , validated_data ) : <EOL> request = self . context [ '<STR_LIT>' ] <EOL> user = request . user <EOL> auth = Auth ( user ) <EOL> node = self . context [ '<STR_LIT>' ] . get_node ( ) <EOL> target_node_id = validated_data [ '<STR_LIT>' ] <EOL> pointer_node = Node . load ( target_node_id ) <EOL> if not pointer_node or pointer_node . is_collection : <EOL> raise InvalidModelValueError ( <EOL> source = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> detail = '<STR_LIT>' . format ( target_node_id ) <EOL> ) <EOL> try : <EOL> pointer = node . add_pointer ( pointer_node , auth , save = True ) <EOL> return pointer <EOL> except ValueError : <EOL> raise InvalidModelValueError ( <EOL> source = { '<STR_LIT>' : '<STR_LIT>' } , <EOL> detail = '<STR_LIT>' . format ( target_node_id , node . _id ) <EOL> ) <EOL> def update ( self , instance , validated_data ) : <EOL> pass <EOL> class NodeProviderSerializer ( JSONAPISerializer ) : <EOL> id = ser . SerializerMethodField ( read_only = True ) <EOL> kind = ser . CharField ( read_only = True ) <EOL> name = ser . CharField ( read_only = True ) <EOL> path = ser . CharField ( read_only = True ) <EOL> node = ser . CharField ( source = '<STR_LIT>' , read_only = True ) <EOL> provider = ser . CharField ( read_only = True ) <EOL> files = NodeFileHyperLinkField ( <EOL> related_view = '<STR_LIT>' , <EOL> related_view_kwargs = { '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT:path>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' } , <EOL> kind = '<STR_LIT>' , <EOL> never_embed = True <EOL> ) <EOL> links = LinksField ( { <EOL> '<STR_LIT>' : WaterbutlerLink ( ) , <EOL> '<STR_LIT>' : WaterbutlerLink ( kind = '<STR_LIT>' ) <EOL> } ) <EOL> class Meta : <EOL> type_ = '<STR_LIT>' <EOL> @ staticmethod <EOL> def get_id ( obj ) : <EOL> return '<STR_LIT>' . format ( obj . node . _id , obj . provider ) <EOL> def get_absolute_url ( self , obj ) : <EOL> return absolute_reverse ( <EOL> '<STR_LIT>' , <EOL> kwargs = { <EOL> '<STR_LIT>' : obj . node . _id , <EOL> '<STR_LIT>' : obj . provider <EOL> } <EOL> ) <EOL> class NodeInstitutionRelationshipSerializer ( ser . Serializer ) : <EOL> id = ser . CharField ( source = '<STR_LIT>' , required = False , allow_null = True ) <EOL> type = TypeField ( required = False , allow_null = True ) <EOL> links = LinksField ( { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> } ) <EOL> class Meta : <EOL> type_ = '<STR_LIT>' <EOL> def get_self_link ( self , obj ) : <EOL> return obj . institution_relationship_url ( ) <EOL> def get_related_link ( self , obj ) : <EOL> return obj . institution_url ( ) <EOL> def update ( self , instance , validated_data ) : <EOL> node = instance <EOL> user = self . context [ '<STR_LIT>' ] . user <EOL> inst = validated_data . get ( '<STR_LIT>' , None ) <EOL> if inst : <EOL> inst = Institution . load ( inst ) <EOL> if not inst : <EOL> raise exceptions . NotFound <EOL> try : <EOL> node . add_primary_institution ( inst = inst , user = user ) <EOL> except UserNotAffiliatedError : <EOL> raise exceptions . ValidationError ( detail = '<STR_LIT>' ) <EOL> node . save ( ) <EOL> return node <EOL> node . remove_primary_institution ( user ) <EOL> node . save ( ) <EOL> return node <EOL> def to_representation ( self , obj ) : <EOL> data = { } <EOL> meta = getattr ( self , '<STR_LIT:Meta>' , None ) <EOL> type_ = getattr ( meta , '<STR_LIT>' , None ) <EOL> assert type_ is not None , '<STR_LIT>' <EOL> relation_id_field = self . fields [ '<STR_LIT:id>' ] <EOL> data [ '<STR_LIT:data>' ] = None <EOL> if obj . primary_institution : <EOL> attribute = obj . primary_institution . _id <EOL> relationship = relation_id_field . to_representation ( attribute ) <EOL> data [ '<STR_LIT:data>' ] = { '<STR_LIT:type>' : type_ , '<STR_LIT:id>' : relationship } <EOL> data [ '<STR_LIT>' ] = { key : val for key , val in self . fields . get ( '<STR_LIT>' ) . to_representation ( obj ) . iteritems ( ) } <EOL> return data <EOL> class NodeAlternativeCitationSerializer ( JSONAPISerializer ) : <EOL> id = IDField ( source = "<STR_LIT>" , read_only = True ) <EOL> type = TypeField ( ) <EOL> name = ser . CharField ( required = True ) <EOL> text = ser . CharField ( required = True ) <EOL> class Meta : <EOL> type_ = '<STR_LIT>' <EOL> def create ( self , validated_data ) : <EOL> errors = self . error_checker ( validated_data ) <EOL> if len ( errors ) > <NUM_LIT:0> : <EOL> raise exceptions . ValidationError ( detail = errors ) <EOL> node = self . context [ '<STR_LIT>' ] . get_node ( ) <EOL> auth = Auth ( self . context [ '<STR_LIT>' ] . _user ) <EOL> citation = node . add_citation ( auth , save = True , ** validated_data ) <EOL> return citation <EOL> def update ( self , instance , validated_data ) : <EOL> errors = self . error_checker ( validated_data ) <EOL> if len ( errors ) > <NUM_LIT:0> : <EOL> raise exceptions . ValidationError ( detail = errors ) <EOL> node = self . context [ '<STR_LIT>' ] . get_node ( ) <EOL> auth = Auth ( self . context [ '<STR_LIT>' ] . _user ) <EOL> instance = node . edit_citation ( auth , instance , save = True , ** validated_data ) <EOL> return instance <EOL> def error_checker ( self , data ) : <EOL> errors = [ ] <EOL> name = data . get ( '<STR_LIT:name>' , None ) <EOL> text = data . get ( '<STR_LIT:text>' , None ) <EOL> citations = self . context [ '<STR_LIT>' ] . get_node ( ) . alternative_citations <EOL> if not ( self . instance and self . instance . name == name ) and citations . find ( Q ( '<STR_LIT:name>' , '<STR_LIT>' , name ) ) . count ( ) > <NUM_LIT:0> : <EOL> errors . append ( "<STR_LIT>" . format ( name ) ) <EOL> if not ( self . instance and self . instance . text == text ) : <EOL> matching_citations = citations . find ( Q ( '<STR_LIT:text>' , '<STR_LIT>' , text ) ) <EOL> if matching_citations . count ( ) > <NUM_LIT:0> : <EOL> names = "<STR_LIT>" . join ( [ str ( citation . name ) for citation in matching_citations ] ) <EOL> errors . append ( "<STR_LIT>" . format ( names ) ) <EOL> return errors <EOL> def get_absolute_url ( self , obj ) : <EOL> raise NotImplementedError </s>
<s> from nose . tools import * <EOL> from datetime import datetime <EOL> from framework . guid . model import Guid <EOL> from api . base . settings . defaults import API_BASE <EOL> from api_tests import utils as test_utils <EOL> from tests . base import ApiTestCase <EOL> from tests . factories import ProjectFactory , AuthUserFactory , CommentFactory , NodeWikiFactory <EOL> class CommentReportsMixin ( object ) : <EOL> def setUp ( self ) : <EOL> super ( CommentReportsMixin , self ) . setUp ( ) <EOL> self . user = AuthUserFactory ( ) <EOL> self . contributor = AuthUserFactory ( ) <EOL> self . non_contributor = AuthUserFactory ( ) <EOL> self . payload = { <EOL> '<STR_LIT:data>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:message>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> } <EOL> def _set_up_private_project_comment_reports ( self ) : <EOL> raise NotImplementedError <EOL> def _set_up_public_project_comment_reports ( self , comment_level = '<STR_LIT>' ) : <EOL> raise NotImplementedError <EOL> def test_private_node_logged_out_user_cannot_view_reports ( self ) : <EOL> self . _set_up_private_project_comment_reports ( ) <EOL> res = self . app . get ( self . private_url , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_private_node_logged_in_non_contributor_cannot_view_reports ( self ) : <EOL> self . _set_up_private_project_comment_reports ( ) <EOL> res = self . app . get ( self . private_url , auth = self . non_contributor . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_private_node_only_reporting_user_can_view_reports ( self ) : <EOL> self . _set_up_private_project_comment_reports ( ) <EOL> res = self . app . get ( self . private_url , auth = self . user . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> report_json = res . json [ '<STR_LIT:data>' ] <EOL> report_ids = [ report [ '<STR_LIT:id>' ] for report in report_json ] <EOL> assert_equal ( len ( report_json ) , <NUM_LIT:1> ) <EOL> assert_in ( self . user . _id , report_ids ) <EOL> def test_private_node_reported_user_does_not_see_report ( self ) : <EOL> self . _set_up_private_project_comment_reports ( ) <EOL> res = self . app . get ( self . private_url , auth = self . contributor . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> report_json = res . json [ '<STR_LIT:data>' ] <EOL> report_ids = [ report [ '<STR_LIT:id>' ] for report in report_json ] <EOL> assert_equal ( len ( report_json ) , <NUM_LIT:0> ) <EOL> assert_not_in ( self . contributor . _id , report_ids ) <EOL> def test_public_node_only_reporting_contributor_can_view_report ( self ) : <EOL> self . _set_up_public_project_comment_reports ( ) <EOL> res = self . app . get ( self . public_url , auth = self . user . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> report_json = res . json [ '<STR_LIT:data>' ] <EOL> report_ids = [ report [ '<STR_LIT:id>' ] for report in report_json ] <EOL> assert_equal ( len ( report_json ) , <NUM_LIT:1> ) <EOL> assert_in ( self . user . _id , report_ids ) <EOL> def test_public_node_reported_user_does_not_see_report ( self ) : <EOL> self . _set_up_public_project_comment_reports ( ) <EOL> res = self . app . get ( self . public_url , auth = self . contributor . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> report_json = res . json [ '<STR_LIT:data>' ] <EOL> report_ids = [ report [ '<STR_LIT:id>' ] for report in report_json ] <EOL> assert_equal ( len ( report_json ) , <NUM_LIT:0> ) <EOL> assert_not_in ( self . contributor . _id , report_ids ) <EOL> def test_public_node_non_contributor_does_not_see_other_user_reports ( self ) : <EOL> self . _set_up_public_project_comment_reports ( ) <EOL> res = self . app . get ( self . public_url , auth = self . non_contributor . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> report_json = res . json [ '<STR_LIT:data>' ] <EOL> report_ids = [ report [ '<STR_LIT:id>' ] for report in report_json ] <EOL> assert_equal ( len ( report_json ) , <NUM_LIT:0> ) <EOL> assert_not_in ( self . non_contributor . _id , report_ids ) <EOL> def test_public_node_non_contributor_reporter_can_view_own_report ( self ) : <EOL> self . _set_up_public_project_comment_reports ( ) <EOL> self . public_comment . reports [ self . non_contributor . _id ] = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:text>' : '<STR_LIT>' , <EOL> '<STR_LIT:date>' : datetime . utcnow ( ) , <EOL> '<STR_LIT>' : False , <EOL> } <EOL> self . public_comment . save ( ) <EOL> res = self . app . get ( self . public_url , auth = self . non_contributor . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> report_json = res . json [ '<STR_LIT:data>' ] <EOL> report_ids = [ report [ '<STR_LIT:id>' ] for report in report_json ] <EOL> assert_equal ( len ( report_json ) , <NUM_LIT:1> ) <EOL> assert_in ( self . non_contributor . _id , report_ids ) <EOL> def test_public_node_logged_out_user_cannot_view_reports ( self ) : <EOL> self . _set_up_public_project_comment_reports ( ) <EOL> res = self . app . get ( self . public_url , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_public_node_private_comment_level_non_contributor_cannot_see_reports ( self ) : <EOL> self . _set_up_public_project_comment_reports ( comment_level = '<STR_LIT>' ) <EOL> res = self . app . get ( self . public_url , auth = self . non_contributor . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_report_comment_invalid_type ( self ) : <EOL> self . _set_up_private_project_comment_reports ( ) <EOL> payload = { <EOL> '<STR_LIT:data>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:message>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> } <EOL> res = self . app . post_json_api ( self . private_url , payload , auth = self . user . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_report_comment_no_type ( self ) : <EOL> self . _set_up_private_project_comment_reports ( ) <EOL> payload = { <EOL> '<STR_LIT:data>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:message>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> } <EOL> res = self . app . post_json_api ( self . private_url , payload , auth = self . user . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:source>' ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_report_comment_invalid_spam_category ( self ) : <EOL> self . _set_up_private_project_comment_reports ( ) <EOL> category = '<STR_LIT>' <EOL> payload = { <EOL> '<STR_LIT:data>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : category , <EOL> '<STR_LIT:message>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> } <EOL> res = self . app . post_json_api ( self . private_url , payload , auth = self . user . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' + category + '<STR_LIT>' + '<STR_LIT>' ) <EOL> def test_report_comment_allow_blank_message ( self ) : <EOL> self . _set_up_private_project_comment_reports ( ) <EOL> comment = CommentFactory ( node = self . private_project , user = self . contributor , target = self . comment . target ) <EOL> url = '<STR_LIT>' . format ( API_BASE , comment . _id ) <EOL> payload = { <EOL> '<STR_LIT:data>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:message>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> } <EOL> res = self . app . post_json_api ( url , payload , auth = self . user . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT:id>' ] , self . user . _id ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:message>' ] , payload [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:message>' ] ) <EOL> def test_private_node_logged_out_user_cannot_report_comment ( self ) : <EOL> self . _set_up_private_project_comment_reports ( ) <EOL> res = self . app . post_json_api ( self . private_url , self . payload , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_private_node_logged_in_non_contributor_cannot_report_comment ( self ) : <EOL> self . _set_up_private_project_comment_reports ( ) <EOL> res = self . app . post_json_api ( self . private_url , self . payload , auth = self . non_contributor . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_private_node_logged_in_contributor_can_report_comment ( self ) : <EOL> self . _set_up_private_project_comment_reports ( ) <EOL> comment = CommentFactory ( node = self . private_project , user = self . contributor , target = self . comment . target ) <EOL> url = '<STR_LIT>' . format ( API_BASE , comment . _id ) <EOL> res = self . app . post_json_api ( url , self . payload , auth = self . user . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT:id>' ] , self . user . _id ) <EOL> def test_user_cannot_report_own_comment ( self ) : <EOL> self . _set_up_private_project_comment_reports ( ) <EOL> res = self . app . post_json_api ( self . private_url , self . payload , auth = self . contributor . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_user_cannot_report_comment_twice ( self ) : <EOL> self . _set_up_private_project_comment_reports ( ) <EOL> res = self . app . post_json_api ( self . private_url , self . payload , auth = self . user . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_public_node_logged_out_user_cannot_report_comment ( self ) : <EOL> self . _set_up_public_project_comment_reports ( ) <EOL> res = self . app . post_json_api ( self . public_url , self . payload , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_public_node_contributor_can_report_comment ( self ) : <EOL> self . _set_up_public_project_comment_reports ( ) <EOL> comment = CommentFactory ( node = self . public_project , user = self . contributor , target = self . public_comment . target ) <EOL> url = '<STR_LIT>' . format ( API_BASE , comment . _id ) <EOL> res = self . app . post_json_api ( url , self . payload , auth = self . user . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT:id>' ] , self . user . _id ) <EOL> def test_public_node_non_contributor_can_report_comment ( self ) : <EOL> """<STR_LIT>""" <EOL> self . _set_up_public_project_comment_reports ( ) <EOL> res = self . app . post_json_api ( self . public_url , self . payload , auth = self . non_contributor . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT:id>' ] , self . non_contributor . _id ) <EOL> def test_public_node_private_comment_level_non_contributor_cannot_report_comment ( self ) : <EOL> self . _set_up_public_project_comment_reports ( comment_level = '<STR_LIT>' ) <EOL> res = self . app . get ( self . public_url , auth = self . non_contributor . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> class TestCommentReportsView ( CommentReportsMixin , ApiTestCase ) : <EOL> def _set_up_private_project_comment_reports ( self ) : <EOL> self . private_project = ProjectFactory . create ( is_public = False , creator = self . user ) <EOL> self . private_project . add_contributor ( contributor = self . contributor , save = True ) <EOL> self . comment = CommentFactory . build ( node = self . private_project , user = self . contributor ) <EOL> self . comment . reports = self . comment . reports or { } <EOL> self . comment . reports [ self . user . _id ] = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:text>' : '<STR_LIT>' , <EOL> '<STR_LIT:date>' : datetime . utcnow ( ) , <EOL> '<STR_LIT>' : False , <EOL> } <EOL> self . comment . save ( ) <EOL> self . private_url = '<STR_LIT>' . format ( API_BASE , self . comment . _id ) <EOL> def _set_up_public_project_comment_reports ( self , comment_level = '<STR_LIT>' ) : <EOL> self . public_project = ProjectFactory . create ( is_public = True , creator = self . user , comment_level = comment_level ) <EOL> self . public_project . add_contributor ( contributor = self . contributor , save = True ) <EOL> self . public_comment = CommentFactory . build ( node = self . public_project , user = self . contributor ) <EOL> self . public_comment . reports = self . public_comment . reports or { } <EOL> self . public_comment . reports [ self . user . _id ] = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:text>' : '<STR_LIT>' , <EOL> '<STR_LIT:date>' : datetime . utcnow ( ) , <EOL> '<STR_LIT>' : False , <EOL> } <EOL> self . public_comment . save ( ) <EOL> self . public_url = '<STR_LIT>' . format ( API_BASE , self . public_comment . _id ) <EOL> class TestFileCommentReportsView ( CommentReportsMixin , ApiTestCase ) : <EOL> def _set_up_private_project_comment_reports ( self ) : <EOL> self . private_project = ProjectFactory . create ( is_public = False , creator = self . user ) <EOL> self . private_project . add_contributor ( contributor = self . contributor , save = True ) <EOL> self . file = test_utils . create_test_file ( self . private_project , self . user ) <EOL> self . comment = CommentFactory . build ( node = self . private_project , target = self . file . get_guid ( ) , user = self . contributor ) <EOL> self . comment . reports = self . comment . reports or { } <EOL> self . comment . reports [ self . user . _id ] = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:text>' : '<STR_LIT>' , <EOL> '<STR_LIT:date>' : datetime . utcnow ( ) , <EOL> '<STR_LIT>' : False , <EOL> } <EOL> self . comment . save ( ) <EOL> self . private_url = '<STR_LIT>' . format ( API_BASE , self . comment . _id ) <EOL> def _set_up_public_project_comment_reports ( self , comment_level = '<STR_LIT>' ) : <EOL> self . public_project = ProjectFactory . create ( is_public = True , creator = self . user , comment_level = comment_level ) <EOL> self . public_project . add_contributor ( contributor = self . contributor , save = True ) <EOL> self . public_file = test_utils . create_test_file ( self . public_project , self . user ) <EOL> self . public_comment = CommentFactory . build ( node = self . public_project , target = self . public_file . get_guid ( ) , user = self . contributor ) <EOL> self . public_comment . reports = self . public_comment . reports or { } <EOL> self . public_comment . reports [ self . user . _id ] = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:text>' : '<STR_LIT>' , <EOL> '<STR_LIT:date>' : datetime . utcnow ( ) , <EOL> '<STR_LIT>' : False , <EOL> } <EOL> self . public_comment . save ( ) <EOL> self . public_url = '<STR_LIT>' . format ( API_BASE , self . public_comment . _id ) <EOL> class TestWikiCommentReportsView ( CommentReportsMixin , ApiTestCase ) : <EOL> def _set_up_private_project_comment_reports ( self ) : <EOL> self . private_project = ProjectFactory . create ( is_public = False , creator = self . user ) <EOL> self . private_project . add_contributor ( contributor = self . contributor , save = True ) <EOL> self . wiki = NodeWikiFactory ( node = self . private_project , user = self . user ) <EOL> self . comment = CommentFactory . build ( node = self . private_project , target = Guid . load ( self . wiki . _id ) , user = self . contributor ) <EOL> self . comment . reports = self . comment . reports or { } <EOL> self . comment . reports [ self . user . _id ] = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:text>' : '<STR_LIT>' , <EOL> '<STR_LIT:date>' : datetime . utcnow ( ) , <EOL> '<STR_LIT>' : False , <EOL> } <EOL> self . comment . save ( ) <EOL> self . private_url = '<STR_LIT>' . format ( API_BASE , self . comment . _id ) <EOL> def _set_up_public_project_comment_reports ( self , comment_level = '<STR_LIT>' ) : <EOL> self . public_project = ProjectFactory . create ( is_public = True , creator = self . user , comment_level = comment_level ) <EOL> self . public_project . add_contributor ( contributor = self . contributor , save = True ) <EOL> self . public_wiki = NodeWikiFactory ( node = self . public_project , user = self . user ) <EOL> self . public_comment = CommentFactory . build ( node = self . public_project , target = Guid . load ( self . public_wiki . _id ) , user = self . contributor ) <EOL> self . public_comment . reports = self . public_comment . reports or { } <EOL> self . public_comment . reports [ self . user . _id ] = { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT:text>' : '<STR_LIT>' , <EOL> '<STR_LIT:date>' : datetime . utcnow ( ) , <EOL> '<STR_LIT>' : False , <EOL> } <EOL> self . public_comment . save ( ) <EOL> self . public_url = '<STR_LIT>' . format ( API_BASE , self . public_comment . _id ) </s>
<s> from nose . tools import * <EOL> from modularodm import Q <EOL> from framework . auth . core import Auth <EOL> from website . models import Node , NodeLog <EOL> from website . util import permissions <EOL> from website . util . sanitize import strip_html <EOL> from api . base . settings . defaults import API_BASE , MAX_PAGE_SIZE <EOL> from tests . base import ApiTestCase <EOL> from tests . factories import ( <EOL> BookmarkCollectionFactory , <EOL> CollectionFactory , <EOL> ProjectFactory , <EOL> RegistrationFactory , <EOL> AuthUserFactory , <EOL> UserFactory , <EOL> RetractedRegistrationFactory <EOL> ) <EOL> class TestNodeList ( ApiTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestNodeList , self ) . setUp ( ) <EOL> self . user = AuthUserFactory ( ) <EOL> self . non_contrib = AuthUserFactory ( ) <EOL> self . deleted = ProjectFactory ( is_deleted = True ) <EOL> self . private = ProjectFactory ( is_public = False , creator = self . user ) <EOL> self . public = ProjectFactory ( is_public = True , creator = self . user ) <EOL> self . url = '<STR_LIT>' . format ( API_BASE ) <EOL> def tearDown ( self ) : <EOL> super ( TestNodeList , self ) . tearDown ( ) <EOL> Node . remove ( ) <EOL> def test_only_returns_non_deleted_public_projects ( self ) : <EOL> res = self . app . get ( self . url ) <EOL> node_json = res . json [ '<STR_LIT:data>' ] <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in node_json ] <EOL> assert_in ( self . public . _id , ids ) <EOL> assert_not_in ( self . deleted . _id , ids ) <EOL> assert_not_in ( self . private . _id , ids ) <EOL> def test_return_public_node_list_logged_out_user ( self ) : <EOL> res = self . app . get ( self . url , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> assert_equal ( res . content_type , '<STR_LIT>' ) <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in res . json [ '<STR_LIT:data>' ] ] <EOL> assert_in ( self . public . _id , ids ) <EOL> assert_not_in ( self . private . _id , ids ) <EOL> def test_return_public_node_list_logged_in_user ( self ) : <EOL> res = self . app . get ( self . url , auth = self . non_contrib ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> assert_equal ( res . content_type , '<STR_LIT>' ) <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in res . json [ '<STR_LIT:data>' ] ] <EOL> assert_in ( self . public . _id , ids ) <EOL> assert_not_in ( self . private . _id , ids ) <EOL> def test_return_private_node_list_logged_out_user ( self ) : <EOL> res = self . app . get ( self . url ) <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in res . json [ '<STR_LIT:data>' ] ] <EOL> assert_in ( self . public . _id , ids ) <EOL> assert_not_in ( self . private . _id , ids ) <EOL> def test_return_private_node_list_logged_in_contributor ( self ) : <EOL> res = self . app . get ( self . url , auth = self . user . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> assert_equal ( res . content_type , '<STR_LIT>' ) <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in res . json [ '<STR_LIT:data>' ] ] <EOL> assert_in ( self . public . _id , ids ) <EOL> assert_in ( self . private . _id , ids ) <EOL> def test_return_private_node_list_logged_in_non_contributor ( self ) : <EOL> res = self . app . get ( self . url , auth = self . non_contrib . auth ) <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in res . json [ '<STR_LIT:data>' ] ] <EOL> assert_in ( self . public . _id , ids ) <EOL> assert_not_in ( self . private . _id , ids ) <EOL> def test_node_list_does_not_returns_registrations ( self ) : <EOL> registration = RegistrationFactory ( project = self . public , creator = self . user ) <EOL> res = self . app . get ( self . url , auth = self . user . auth ) <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in res . json [ '<STR_LIT:data>' ] ] <EOL> assert_not_in ( registration . _id , ids ) <EOL> def test_omit_retracted_registration ( self ) : <EOL> registration = RegistrationFactory ( creator = self . user , project = self . public ) <EOL> res = self . app . get ( self . url , auth = self . user . auth ) <EOL> assert_equal ( len ( res . json [ '<STR_LIT:data>' ] ) , <NUM_LIT:2> ) <EOL> retraction = RetractedRegistrationFactory ( registration = registration , user = registration . creator ) <EOL> res = self . app . get ( self . url , auth = self . user . auth ) <EOL> assert_equal ( len ( res . json [ '<STR_LIT:data>' ] ) , <NUM_LIT:2> ) <EOL> def test_node_list_has_root ( self ) : <EOL> res = self . app . get ( self . url , auth = self . user . auth ) <EOL> projects_with_root = <NUM_LIT:0> <EOL> for project in res . json [ '<STR_LIT:data>' ] : <EOL> if project [ '<STR_LIT>' ] . get ( '<STR_LIT:root>' , None ) : <EOL> projects_with_root += <NUM_LIT:1> <EOL> assert_not_equal ( projects_with_root , <NUM_LIT:0> ) <EOL> assert_true ( <EOL> all ( [ each [ '<STR_LIT>' ] . get ( <EOL> '<STR_LIT:root>' <EOL> ) is not None for each in res . json [ '<STR_LIT:data>' ] ] ) <EOL> ) <EOL> def test_node_list_has_proper_root ( self ) : <EOL> project_one = ProjectFactory ( title = "<STR_LIT>" , is_public = True ) <EOL> ProjectFactory ( parent = project_one , is_public = True ) <EOL> res = self . app . get ( self . url + '<STR_LIT>' , auth = self . user . auth ) <EOL> for project_json in res . json [ '<STR_LIT:data>' ] : <EOL> project = Node . load ( project_json [ '<STR_LIT:id>' ] ) <EOL> assert_equal ( project_json [ '<STR_LIT>' ] [ '<STR_LIT:root>' ] [ '<STR_LIT:data>' ] [ '<STR_LIT:id>' ] , project . root . _id ) <EOL> class TestNodeFiltering ( ApiTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestNodeFiltering , self ) . setUp ( ) <EOL> self . user_one = AuthUserFactory ( ) <EOL> self . user_two = AuthUserFactory ( ) <EOL> self . project_one = ProjectFactory ( title = "<STR_LIT>" , is_public = True ) <EOL> self . project_two = ProjectFactory ( title = "<STR_LIT>" , description = "<STR_LIT>" , is_public = True ) <EOL> self . project_three = ProjectFactory ( title = "<STR_LIT>" , is_public = True ) <EOL> self . private_project_user_one = ProjectFactory ( title = "<STR_LIT>" , <EOL> is_public = False , <EOL> creator = self . user_one ) <EOL> self . private_project_user_two = ProjectFactory ( title = "<STR_LIT>" , <EOL> is_public = False , <EOL> creator = self . user_two ) <EOL> self . folder = CollectionFactory ( ) <EOL> self . bookmark_collection = BookmarkCollectionFactory ( ) <EOL> self . url = "<STR_LIT>" . format ( API_BASE ) <EOL> self . tag1 , self . tag2 = '<STR_LIT>' , '<STR_LIT>' <EOL> self . project_one . add_tag ( self . tag1 , Auth ( self . project_one . creator ) , save = False ) <EOL> self . project_one . add_tag ( self . tag2 , Auth ( self . project_one . creator ) , save = False ) <EOL> self . project_one . save ( ) <EOL> self . project_two . add_tag ( self . tag1 , Auth ( self . project_two . creator ) , save = True ) <EOL> def tearDown ( self ) : <EOL> super ( TestNodeFiltering , self ) . tearDown ( ) <EOL> Node . remove ( ) <EOL> def test_filtering_by_id ( self ) : <EOL> url = '<STR_LIT>' . format ( API_BASE , self . project_one . _id ) <EOL> res = self . app . get ( url , auth = self . user_one . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in res . json [ '<STR_LIT:data>' ] ] <EOL> assert_in ( self . project_one . _id , ids ) <EOL> assert_equal ( len ( ids ) , <NUM_LIT:1> ) <EOL> def test_filtering_by_multiple_ids ( self ) : <EOL> url = '<STR_LIT>' . format ( API_BASE , self . project_one . _id , self . project_two . _id ) <EOL> res = self . app . get ( url , auth = self . user_one . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in res . json [ '<STR_LIT:data>' ] ] <EOL> assert_in ( self . project_one . _id , ids ) <EOL> assert_in ( self . project_two . _id , ids ) <EOL> assert_equal ( len ( ids ) , <NUM_LIT:2> ) <EOL> def test_filtering_by_multiple_ids_one_private ( self ) : <EOL> url = '<STR_LIT>' . format ( API_BASE , self . project_one . _id , self . private_project_user_two . _id ) <EOL> res = self . app . get ( url , auth = self . user_one . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in res . json [ '<STR_LIT:data>' ] ] <EOL> assert_in ( self . project_one . _id , ids ) <EOL> assert_not_in ( self . private_project_user_two . _id , ids ) <EOL> assert_equal ( len ( ids ) , <NUM_LIT:1> ) <EOL> def test_filtering_by_multiple_ids_brackets_in_query_params ( self ) : <EOL> url = '<STR_LIT>' . format ( API_BASE , self . project_one . _id , self . project_two . _id ) <EOL> res = self . app . get ( url , auth = self . user_one . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in res . json [ '<STR_LIT:data>' ] ] <EOL> assert_in ( self . project_one . _id , ids ) <EOL> assert_in ( self . project_two . _id , ids ) <EOL> assert_equal ( len ( ids ) , <NUM_LIT:2> ) <EOL> def test_filtering_by_category ( self ) : <EOL> project = ProjectFactory ( creator = self . user_one , category = '<STR_LIT>' ) <EOL> project2 = ProjectFactory ( creator = self . user_one , category = '<STR_LIT>' ) <EOL> url = '<STR_LIT>' . format ( API_BASE ) <EOL> res = self . app . get ( url , auth = self . user_one . auth ) <EOL> node_json = res . json [ '<STR_LIT:data>' ] <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in node_json ] <EOL> assert_in ( project . _id , ids ) <EOL> assert_not_in ( project2 . _id , ids ) <EOL> def test_filtering_by_public ( self ) : <EOL> project = ProjectFactory ( creator = self . user_one , is_public = True ) <EOL> project2 = ProjectFactory ( creator = self . user_one , is_public = False ) <EOL> url = '<STR_LIT>' . format ( API_BASE ) <EOL> res = self . app . get ( url , auth = self . user_one . auth ) <EOL> node_json = res . json [ '<STR_LIT:data>' ] <EOL> assert_false ( <EOL> any ( [ each [ '<STR_LIT>' ] [ '<STR_LIT>' ] for each in node_json ] ) <EOL> ) <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in node_json ] <EOL> assert_not_in ( project . _id , ids ) <EOL> assert_in ( project2 . _id , ids ) <EOL> url = '<STR_LIT>' . format ( API_BASE ) <EOL> res = self . app . get ( url , auth = self . user_one . auth ) <EOL> node_json = res . json [ '<STR_LIT:data>' ] <EOL> assert_true ( <EOL> all ( [ each [ '<STR_LIT>' ] [ '<STR_LIT>' ] for each in node_json ] ) <EOL> ) <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in node_json ] <EOL> assert_not_in ( project2 . _id , ids ) <EOL> assert_in ( project . _id , ids ) <EOL> def test_filtering_tags ( self ) : <EOL> url = '<STR_LIT>' . format ( API_BASE , self . tag1 ) <EOL> res = self . app . get ( url , auth = self . project_one . creator . auth ) <EOL> node_json = res . json [ '<STR_LIT:data>' ] <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in node_json ] <EOL> assert_in ( self . project_one . _id , ids ) <EOL> assert_in ( self . project_two . _id , ids ) <EOL> url = '<STR_LIT>' . format ( API_BASE , self . tag1 , self . tag2 ) <EOL> res = self . app . get ( url , auth = self . project_one . creator . auth ) <EOL> node_json = res . json [ '<STR_LIT:data>' ] <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in node_json ] <EOL> assert_in ( self . project_one . _id , ids ) <EOL> assert_not_in ( self . project_two . _id , ids ) <EOL> def test_get_all_projects_with_no_filter_logged_in ( self ) : <EOL> res = self . app . get ( self . url , auth = self . user_one . auth ) <EOL> node_json = res . json [ '<STR_LIT:data>' ] <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in node_json ] <EOL> assert_in ( self . project_one . _id , ids ) <EOL> assert_in ( self . project_two . _id , ids ) <EOL> assert_in ( self . project_three . _id , ids ) <EOL> assert_in ( self . private_project_user_one . _id , ids ) <EOL> assert_not_in ( self . private_project_user_two . _id , ids ) <EOL> assert_not_in ( self . folder . _id , ids ) <EOL> assert_not_in ( self . bookmark_collection . _id , ids ) <EOL> def test_get_all_projects_with_no_filter_not_logged_in ( self ) : <EOL> res = self . app . get ( self . url ) <EOL> node_json = res . json [ '<STR_LIT:data>' ] <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in node_json ] <EOL> assert_in ( self . project_one . _id , ids ) <EOL> assert_in ( self . project_two . _id , ids ) <EOL> assert_in ( self . project_three . _id , ids ) <EOL> assert_not_in ( self . private_project_user_one . _id , ids ) <EOL> assert_not_in ( self . private_project_user_two . _id , ids ) <EOL> assert_not_in ( self . folder . _id , ids ) <EOL> assert_not_in ( self . bookmark_collection . _id , ids ) <EOL> def test_get_one_project_with_exact_filter_logged_in ( self ) : <EOL> url = "<STR_LIT>" . format ( API_BASE ) <EOL> res = self . app . get ( url , auth = self . user_one . auth ) <EOL> node_json = res . json [ '<STR_LIT:data>' ] <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in node_json ] <EOL> assert_in ( self . project_one . _id , ids ) <EOL> assert_not_in ( self . project_two . _id , ids ) <EOL> assert_not_in ( self . project_three . _id , ids ) <EOL> assert_not_in ( self . private_project_user_one . _id , ids ) <EOL> assert_not_in ( self . private_project_user_two . _id , ids ) <EOL> assert_not_in ( self . folder . _id , ids ) <EOL> assert_not_in ( self . bookmark_collection . _id , ids ) <EOL> def test_get_one_project_with_exact_filter_not_logged_in ( self ) : <EOL> url = "<STR_LIT>" . format ( API_BASE ) <EOL> res = self . app . get ( url ) <EOL> node_json = res . json [ '<STR_LIT:data>' ] <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in node_json ] <EOL> assert_in ( self . project_one . _id , ids ) <EOL> assert_not_in ( self . project_two . _id , ids ) <EOL> assert_not_in ( self . project_three . _id , ids ) <EOL> assert_not_in ( self . private_project_user_one . _id , ids ) <EOL> assert_not_in ( self . private_project_user_two . _id , ids ) <EOL> assert_not_in ( self . folder . _id , ids ) <EOL> assert_not_in ( self . bookmark_collection . _id , ids ) <EOL> def test_get_some_projects_with_substring_logged_in ( self ) : <EOL> url = "<STR_LIT>" . format ( API_BASE ) <EOL> res = self . app . get ( url , auth = self . user_one . auth ) <EOL> node_json = res . json [ '<STR_LIT:data>' ] <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in node_json ] <EOL> assert_not_in ( self . project_one . _id , ids ) <EOL> assert_in ( self . project_two . _id , ids ) <EOL> assert_not_in ( self . project_three . _id , ids ) <EOL> assert_not_in ( self . private_project_user_one . _id , ids ) <EOL> assert_not_in ( self . private_project_user_two . _id , ids ) <EOL> assert_not_in ( self . folder . _id , ids ) <EOL> assert_not_in ( self . bookmark_collection . _id , ids ) <EOL> def test_get_some_projects_with_substring_not_logged_in ( self ) : <EOL> url = "<STR_LIT>" . format ( API_BASE ) <EOL> res = self . app . get ( url , auth = self . user_one . auth ) <EOL> node_json = res . json [ '<STR_LIT:data>' ] <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in node_json ] <EOL> assert_not_in ( self . project_one . _id , ids ) <EOL> assert_in ( self . project_two . _id , ids ) <EOL> assert_not_in ( self . project_three . _id , ids ) <EOL> assert_not_in ( self . private_project_user_one . _id , ids ) <EOL> assert_not_in ( self . private_project_user_two . _id , ids ) <EOL> assert_not_in ( self . folder . _id , ids ) <EOL> assert_not_in ( self . bookmark_collection . _id , ids ) <EOL> def test_get_only_public_or_my_projects_with_filter_logged_in ( self ) : <EOL> url = "<STR_LIT>" . format ( API_BASE ) <EOL> res = self . app . get ( url , auth = self . user_one . auth ) <EOL> node_json = res . json [ '<STR_LIT:data>' ] <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in node_json ] <EOL> assert_in ( self . project_one . _id , ids ) <EOL> assert_in ( self . project_two . _id , ids ) <EOL> assert_not_in ( self . project_three . _id , ids ) <EOL> assert_in ( self . private_project_user_one . _id , ids ) <EOL> assert_not_in ( self . private_project_user_two . _id , ids ) <EOL> assert_not_in ( self . folder . _id , ids ) <EOL> assert_not_in ( self . bookmark_collection . _id , ids ) <EOL> def test_get_only_public_projects_with_filter_not_logged_in ( self ) : <EOL> url = "<STR_LIT>" . format ( API_BASE ) <EOL> res = self . app . get ( url ) <EOL> node_json = res . json [ '<STR_LIT:data>' ] <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in node_json ] <EOL> assert_in ( self . project_one . _id , ids ) <EOL> assert_in ( self . project_two . _id , ids ) <EOL> assert_not_in ( self . project_three . _id , ids ) <EOL> assert_not_in ( self . private_project_user_one . _id , ids ) <EOL> assert_not_in ( self . private_project_user_two . _id , ids ) <EOL> assert_not_in ( self . folder . _id , ids ) <EOL> assert_not_in ( self . bookmark_collection . _id , ids ) <EOL> def test_alternate_filtering_field_logged_in ( self ) : <EOL> url = "<STR_LIT>" . format ( API_BASE ) <EOL> res = self . app . get ( url , auth = self . user_one . auth ) <EOL> node_json = res . json [ '<STR_LIT:data>' ] <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in node_json ] <EOL> assert_not_in ( self . project_one . _id , ids ) <EOL> assert_in ( self . project_two . _id , ids ) <EOL> assert_not_in ( self . project_three . _id , ids ) <EOL> assert_not_in ( self . private_project_user_one . _id , ids ) <EOL> assert_not_in ( self . private_project_user_two . _id , ids ) <EOL> assert_not_in ( self . folder . _id , ids ) <EOL> assert_not_in ( self . bookmark_collection . _id , ids ) <EOL> def test_alternate_filtering_field_not_logged_in ( self ) : <EOL> url = "<STR_LIT>" . format ( API_BASE ) <EOL> res = self . app . get ( url ) <EOL> node_json = res . json [ '<STR_LIT:data>' ] <EOL> ids = [ each [ '<STR_LIT:id>' ] for each in node_json ] <EOL> assert_not_in ( self . project_one . _id , ids ) <EOL> assert_in ( self . project_two . _id , ids ) <EOL> assert_not_in ( self . project_three . _id , ids ) <EOL> assert_not_in ( self . private_project_user_one . _id , ids ) <EOL> assert_not_in ( self . private_project_user_two . _id , ids ) <EOL> assert_not_in ( self . folder . _id , ids ) <EOL> assert_not_in ( self . bookmark_collection . _id , ids ) <EOL> def test_incorrect_filtering_field_not_logged_in ( self ) : <EOL> url = '<STR_LIT>' . format ( API_BASE ) <EOL> res = self . app . get ( url , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> errors = res . json [ '<STR_LIT>' ] <EOL> assert_equal ( len ( errors ) , <NUM_LIT:1> ) <EOL> assert_equal ( errors [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , "<STR_LIT>" ) <EOL> def test_filtering_on_root ( self ) : <EOL> root = ProjectFactory ( is_public = True ) <EOL> child = ProjectFactory ( parent = root , is_public = True ) <EOL> ProjectFactory ( parent = root , is_public = True ) <EOL> ProjectFactory ( parent = child , is_public = True ) <EOL> ProjectFactory ( title = "<STR_LIT>" , is_public = True ) <EOL> ProjectFactory ( title = "<STR_LIT>" , is_public = True ) <EOL> url = '<STR_LIT>' . format ( API_BASE , root . _id ) <EOL> res = self . app . get ( url , auth = self . user_one . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> root_nodes = Node . find ( Q ( '<STR_LIT>' , '<STR_LIT>' , True ) & Q ( '<STR_LIT:root>' , '<STR_LIT>' , root . _id ) ) <EOL> assert_equal ( len ( res . json [ '<STR_LIT:data>' ] ) , root_nodes . count ( ) ) <EOL> def test_filtering_on_null_parent ( self ) : <EOL> new_user = AuthUserFactory ( ) <EOL> root = ProjectFactory ( is_public = True ) <EOL> ProjectFactory ( is_public = True ) <EOL> child = ProjectFactory ( parent = root , is_public = True ) <EOL> ProjectFactory ( parent = root , is_public = True ) <EOL> ProjectFactory ( parent = child , is_public = True ) <EOL> url = '<STR_LIT>' . format ( API_BASE ) <EOL> res = self . app . get ( url , auth = new_user . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> public_root_nodes = Node . find ( Q ( '<STR_LIT>' , '<STR_LIT>' , True ) & Q ( '<STR_LIT>' , '<STR_LIT>' , None ) ) <EOL> assert_equal ( len ( res . json [ '<STR_LIT:data>' ] ) , public_root_nodes . count ( ) ) <EOL> def test_filtering_on_title_not_equal ( self ) : <EOL> url = '<STR_LIT>' . format ( API_BASE ) <EOL> res = self . app . get ( url , auth = self . user_one . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> data = res . json [ '<STR_LIT:data>' ] <EOL> assert_equal ( len ( data ) , <NUM_LIT:3> ) <EOL> titles = [ each [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] for each in data ] <EOL> assert_not_in ( self . project_one . title , titles ) <EOL> assert_in ( self . project_two . title , titles ) <EOL> assert_in ( self . project_three . title , titles ) <EOL> assert_in ( self . private_project_user_one . title , titles ) <EOL> def test_filtering_on_description_not_equal ( self ) : <EOL> url = '<STR_LIT>' . format ( API_BASE ) <EOL> res = self . app . get ( url , auth = self . user_one . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> data = res . json [ '<STR_LIT:data>' ] <EOL> assert_equal ( len ( data ) , <NUM_LIT:3> ) <EOL> descriptions = [ each [ '<STR_LIT>' ] [ '<STR_LIT:description>' ] for each in data ] <EOL> assert_not_in ( self . project_two . description , descriptions ) <EOL> assert_in ( self . project_one . description , descriptions ) <EOL> assert_in ( self . project_three . description , descriptions ) <EOL> assert_in ( self . private_project_user_one . description , descriptions ) <EOL> class TestNodeCreate ( ApiTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestNodeCreate , self ) . setUp ( ) <EOL> self . user_one = AuthUserFactory ( ) <EOL> self . url = '<STR_LIT>' . format ( API_BASE ) <EOL> self . title = '<STR_LIT>' <EOL> self . description = '<STR_LIT>' <EOL> self . category = '<STR_LIT:data>' <EOL> self . user_two = AuthUserFactory ( ) <EOL> self . public_project = { <EOL> '<STR_LIT:data>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT:title>' : self . title , <EOL> '<STR_LIT:description>' : self . description , <EOL> '<STR_LIT>' : self . category , <EOL> '<STR_LIT>' : True , <EOL> } <EOL> } <EOL> } <EOL> self . private_project = { <EOL> '<STR_LIT:data>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : self . title , <EOL> '<STR_LIT:description>' : self . description , <EOL> '<STR_LIT>' : self . category , <EOL> '<STR_LIT>' : False <EOL> } <EOL> } <EOL> } <EOL> def test_node_create_invalid_data ( self ) : <EOL> res = self . app . post_json_api ( self . url , "<STR_LIT>" , auth = self . user_one . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , "<STR_LIT>" ) <EOL> res = self . app . post_json_api ( self . url , [ "<STR_LIT>" ] , auth = self . user_one . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , "<STR_LIT>" ) <EOL> def test_creates_public_project_logged_out ( self ) : <EOL> res = self . app . post_json_api ( self . url , self . public_project , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_in ( '<STR_LIT>' , res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] ) <EOL> def test_creates_public_project_logged_in ( self ) : <EOL> res = self . app . post_json_api ( self . url , self . public_project , expect_errors = True , auth = self . user_one . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . public_project [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:description>' ] , self . public_project [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:description>' ] ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] , self . public_project [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> assert_equal ( res . content_type , '<STR_LIT>' ) <EOL> pid = res . json [ '<STR_LIT:data>' ] [ '<STR_LIT:id>' ] <EOL> project = Node . load ( pid ) <EOL> assert_equal ( project . logs [ - <NUM_LIT:1> ] . action , NodeLog . PROJECT_CREATED ) <EOL> def test_creates_private_project_logged_out ( self ) : <EOL> res = self . app . post_json_api ( self . url , self . private_project , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_in ( '<STR_LIT>' , res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] ) <EOL> def test_creates_private_project_logged_in_contributor ( self ) : <EOL> res = self . app . post_json_api ( self . url , self . private_project , auth = self . user_one . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . content_type , '<STR_LIT>' ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . private_project [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:description>' ] , self . private_project [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:description>' ] ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] , self . private_project [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> pid = res . json [ '<STR_LIT:data>' ] [ '<STR_LIT:id>' ] <EOL> project = Node . load ( pid ) <EOL> assert_equal ( project . logs [ - <NUM_LIT:1> ] . action , NodeLog . PROJECT_CREATED ) <EOL> def test_creates_project_from_template ( self ) : <EOL> template_from = ProjectFactory ( creator = self . user_one , is_public = True ) <EOL> template_component = ProjectFactory ( creator = self . user_one , is_public = True , parent = template_from ) <EOL> templated_project_title = '<STR_LIT>' <EOL> templated_project_data = { <EOL> '<STR_LIT:data>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT:title>' : templated_project_title , <EOL> '<STR_LIT>' : self . category , <EOL> '<STR_LIT>' : template_from . _id , <EOL> } <EOL> } <EOL> } <EOL> res = self . app . post_json_api ( self . url , templated_project_data , auth = self . user_one . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> json_data = res . json [ '<STR_LIT:data>' ] <EOL> new_project_id = json_data [ '<STR_LIT:id>' ] <EOL> new_project = Node . load ( new_project_id ) <EOL> assert_equal ( new_project . title , templated_project_title ) <EOL> assert_equal ( new_project . description , None ) <EOL> assert_false ( new_project . is_public ) <EOL> assert_equal ( len ( new_project . nodes ) , len ( template_from . nodes ) ) <EOL> assert_equal ( new_project . nodes [ <NUM_LIT:0> ] . title , template_component . title ) <EOL> def test_404_on_create_from_template_of_nonexistent_project ( self ) : <EOL> template_from_id = '<STR_LIT>' <EOL> templated_project_data = { <EOL> '<STR_LIT:data>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : template_from_id , <EOL> } <EOL> } <EOL> } <EOL> res = self . app . post_json_api ( self . url , templated_project_data , auth = self . user_one . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_403_on_create_from_template_of_unauthorized_project ( self ) : <EOL> template_from = ProjectFactory ( creator = self . user_two , is_public = True ) <EOL> templated_project_data = { <EOL> '<STR_LIT:data>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : <EOL> { <EOL> '<STR_LIT:title>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : template_from . _id , <EOL> } <EOL> } <EOL> } <EOL> res = self . app . post_json_api ( self . url , templated_project_data , auth = self . user_one . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_creates_project_creates_project_and_sanitizes_html ( self ) : <EOL> title = '<STR_LIT>' <EOL> description = '<STR_LIT>' <EOL> res = self . app . post_json_api ( self . url , { <EOL> '<STR_LIT:data>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : title , <EOL> '<STR_LIT:description>' : description , <EOL> '<STR_LIT>' : self . category , <EOL> '<STR_LIT>' : True <EOL> } , <EOL> '<STR_LIT:type>' : '<STR_LIT>' <EOL> } <EOL> } , auth = self . user_one . auth ) <EOL> project_id = res . json [ '<STR_LIT:data>' ] [ '<STR_LIT:id>' ] <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . content_type , '<STR_LIT>' ) <EOL> url = '<STR_LIT>' . format ( API_BASE , project_id ) <EOL> project = Node . load ( project_id ) <EOL> assert_equal ( project . logs [ - <NUM_LIT:1> ] . action , NodeLog . PROJECT_CREATED ) <EOL> res = self . app . get ( url , auth = self . user_one . auth ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , strip_html ( title ) ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:description>' ] , strip_html ( description ) ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] , self . category ) <EOL> def test_creates_project_no_type ( self ) : <EOL> project = { <EOL> '<STR_LIT:data>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : self . title , <EOL> '<STR_LIT:description>' : self . description , <EOL> '<STR_LIT>' : self . category , <EOL> '<STR_LIT>' : False <EOL> } <EOL> } <EOL> } <EOL> res = self . app . post_json_api ( self . url , project , auth = self . user_one . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:source>' ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_creates_project_incorrect_type ( self ) : <EOL> project = { <EOL> '<STR_LIT:data>' : { <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : self . title , <EOL> '<STR_LIT:description>' : self . description , <EOL> '<STR_LIT>' : self . category , <EOL> '<STR_LIT>' : False <EOL> } , <EOL> '<STR_LIT:type>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> res = self . app . post_json_api ( self . url , project , auth = self . user_one . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_creates_project_properties_not_nested ( self ) : <EOL> project = { <EOL> '<STR_LIT:data>' : { <EOL> '<STR_LIT:title>' : self . title , <EOL> '<STR_LIT:description>' : self . description , <EOL> '<STR_LIT>' : self . category , <EOL> '<STR_LIT>' : False , <EOL> '<STR_LIT:type>' : '<STR_LIT>' <EOL> } <EOL> } <EOL> res = self . app . post_json_api ( self . url , project , auth = self . user_one . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:source>' ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_create_project_invalid_title ( self ) : <EOL> project = { <EOL> '<STR_LIT:data>' : { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : '<STR_LIT:A>' * <NUM_LIT> , <EOL> '<STR_LIT:description>' : self . description , <EOL> '<STR_LIT>' : self . category , <EOL> '<STR_LIT>' : False , <EOL> } <EOL> } <EOL> } <EOL> res = self . app . post_json_api ( self . url , project , auth = self . user_one . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> class TestNodeBulkCreate ( ApiTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestNodeBulkCreate , self ) . setUp ( ) <EOL> self . user_one = AuthUserFactory ( ) <EOL> self . url = '<STR_LIT>' . format ( API_BASE ) <EOL> self . title = '<STR_LIT>' <EOL> self . description = '<STR_LIT>' <EOL> self . category = '<STR_LIT:data>' <EOL> self . user_two = AuthUserFactory ( ) <EOL> self . public_project = { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : self . title , <EOL> '<STR_LIT:description>' : self . description , <EOL> '<STR_LIT>' : self . category , <EOL> '<STR_LIT>' : True <EOL> } <EOL> } <EOL> self . private_project = { <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : self . title , <EOL> '<STR_LIT:description>' : self . description , <EOL> '<STR_LIT>' : self . category , <EOL> '<STR_LIT>' : False <EOL> } <EOL> } <EOL> self . empty_project = { '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT>' : { '<STR_LIT:title>' : "<STR_LIT>" , '<STR_LIT:description>' : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } } <EOL> def test_bulk_create_nodes_blank_request ( self ) : <EOL> res = self . app . post_json_api ( self . url , auth = self . user_one . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_bulk_create_all_or_nothing ( self ) : <EOL> res = self . app . post_json_api ( self . url , { '<STR_LIT:data>' : [ self . public_project , self . empty_project ] } , bulk = True , auth = self . user_one . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> res = self . app . get ( self . url , auth = self . user_one . auth ) <EOL> assert_equal ( len ( res . json [ '<STR_LIT:data>' ] ) , <NUM_LIT:0> ) <EOL> def test_bulk_create_logged_out ( self ) : <EOL> res = self . app . post_json_api ( self . url , { '<STR_LIT:data>' : [ self . public_project , self . private_project ] } , bulk = True , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> res = self . app . get ( self . url , auth = self . user_one . auth ) <EOL> assert_equal ( len ( res . json [ '<STR_LIT:data>' ] ) , <NUM_LIT:0> ) <EOL> def test_bulk_create_error_formatting ( self ) : <EOL> res = self . app . post_json_api ( self . url , { '<STR_LIT:data>' : [ self . empty_project , self . empty_project ] } , bulk = True , auth = self . user_one . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( len ( res . json [ '<STR_LIT>' ] ) , <NUM_LIT:2> ) <EOL> errors = res . json [ '<STR_LIT>' ] <EOL> assert_items_equal ( [ errors [ <NUM_LIT:0> ] [ '<STR_LIT:source>' ] , errors [ <NUM_LIT:1> ] [ '<STR_LIT:source>' ] ] , <EOL> [ { '<STR_LIT>' : '<STR_LIT>' } , { '<STR_LIT>' : '<STR_LIT>' } ] ) <EOL> assert_items_equal ( [ errors [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , errors [ <NUM_LIT:1> ] [ '<STR_LIT>' ] ] , <EOL> [ "<STR_LIT>" , "<STR_LIT>" ] ) <EOL> def test_bulk_create_limits ( self ) : <EOL> node_create_list = { '<STR_LIT:data>' : [ self . public_project ] * <NUM_LIT> } <EOL> res = self . app . post_json_api ( self . url , node_create_list , auth = self . user_one . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:source>' ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> res = self . app . get ( self . url , auth = self . user_one . auth ) <EOL> assert_equal ( len ( res . json [ '<STR_LIT:data>' ] ) , <NUM_LIT:0> ) <EOL> def test_bulk_create_no_type ( self ) : <EOL> payload = { '<STR_LIT:data>' : [ { "<STR_LIT>" : { '<STR_LIT>' : self . category , '<STR_LIT:title>' : self . title } } ] } <EOL> res = self . app . post_json_api ( self . url , payload , auth = self . user_one . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:source>' ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> res = self . app . get ( self . url , auth = self . user_one . auth ) <EOL> assert_equal ( len ( res . json [ '<STR_LIT:data>' ] ) , <NUM_LIT:0> ) <EOL> def test_bulk_create_incorrect_type ( self ) : <EOL> payload = { '<STR_LIT:data>' : [ self . public_project , { '<STR_LIT:type>' : '<STR_LIT>' , "<STR_LIT>" : { '<STR_LIT>' : self . category , '<STR_LIT:title>' : self . title } } ] } <EOL> res = self . app . post_json_api ( self . url , payload , auth = self . user_one . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> res = self . app . get ( self . url , auth = self . user_one . auth ) <EOL> assert_equal ( len ( res . json [ '<STR_LIT:data>' ] ) , <NUM_LIT:0> ) <EOL> def test_bulk_create_no_attributes ( self ) : <EOL> payload = { '<STR_LIT:data>' : [ self . public_project , { '<STR_LIT:type>' : '<STR_LIT>' , } ] } <EOL> res = self . app . post_json_api ( self . url , payload , auth = self . user_one . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:source>' ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> res = self . app . get ( self . url , auth = self . user_one . auth ) <EOL> assert_equal ( len ( res . json [ '<STR_LIT:data>' ] ) , <NUM_LIT:0> ) <EOL> def test_bulk_create_no_title ( self ) : <EOL> payload = { '<STR_LIT:data>' : [ self . public_project , { '<STR_LIT:type>' : '<STR_LIT>' , "<STR_LIT>" : { '<STR_LIT>' : self . category } } ] } <EOL> res = self . app . post_json_api ( self . url , payload , auth = self . user_one . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:source>' ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> res = self . app . get ( self . url , auth = self . user_one . auth ) <EOL> assert_equal ( len ( res . json [ '<STR_LIT:data>' ] ) , <NUM_LIT:0> ) <EOL> def test_ugly_payload ( self ) : <EOL> payload = '<STR_LIT>' <EOL> res = self . app . post_json_api ( self . url , payload , auth = self . user_one . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> res = self . app . get ( self . url , auth = self . user_one . auth ) <EOL> assert_equal ( len ( res . json [ '<STR_LIT:data>' ] ) , <NUM_LIT:0> ) <EOL> def test_bulk_create_logged_in ( self ) : <EOL> res = self . app . post_json_api ( self . url , { '<STR_LIT:data>' : [ self . public_project , self . private_project ] } , auth = self . user_one . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( len ( res . json [ '<STR_LIT:data>' ] ) , <NUM_LIT:2> ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . public_project [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] , self . public_project [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ '<STR_LIT:description>' ] , self . public_project [ '<STR_LIT>' ] [ '<STR_LIT:description>' ] ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . private_project [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] , self . public_project [ '<STR_LIT>' ] [ '<STR_LIT>' ] ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT:description>' ] , self . public_project [ '<STR_LIT>' ] [ '<STR_LIT:description>' ] ) <EOL> assert_equal ( res . content_type , '<STR_LIT>' ) <EOL> res = self . app . get ( self . url , auth = self . user_one . auth ) <EOL> assert_equal ( len ( res . json [ '<STR_LIT:data>' ] ) , <NUM_LIT:2> ) <EOL> id_one = res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] <EOL> id_two = res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:1> ] [ '<STR_LIT:id>' ] <EOL> res = self . app . delete_json_api ( self . url , { '<STR_LIT:data>' : [ { '<STR_LIT:id>' : id_one , '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:id>' : id_two , '<STR_LIT:type>' : '<STR_LIT>' } ] } , <EOL> auth = self . user_one . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> class TestNodeBulkUpdate ( ApiTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestNodeBulkUpdate , self ) . setUp ( ) <EOL> self . user = AuthUserFactory ( ) <EOL> self . title = '<STR_LIT>' <EOL> self . new_title = '<STR_LIT>' <EOL> self . description = '<STR_LIT>' <EOL> self . new_description = '<STR_LIT>' <EOL> self . category = '<STR_LIT:data>' <EOL> self . new_category = '<STR_LIT>' <EOL> self . user_two = AuthUserFactory ( ) <EOL> self . public_project = ProjectFactory ( title = self . title , <EOL> description = self . description , <EOL> category = self . category , <EOL> is_public = True , <EOL> creator = self . user ) <EOL> self . public_project_two = ProjectFactory ( title = self . title , <EOL> description = self . description , <EOL> category = self . category , <EOL> is_public = True , <EOL> creator = self . user ) <EOL> self . public_payload = { <EOL> '<STR_LIT:data>' : [ <EOL> { <EOL> '<STR_LIT:id>' : self . public_project . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : self . new_title , <EOL> '<STR_LIT:description>' : self . new_description , <EOL> '<STR_LIT>' : self . new_category , <EOL> '<STR_LIT>' : True <EOL> } <EOL> } , <EOL> { <EOL> '<STR_LIT:id>' : self . public_project_two . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : self . new_title , <EOL> '<STR_LIT:description>' : self . new_description , <EOL> '<STR_LIT>' : self . new_category , <EOL> '<STR_LIT>' : True <EOL> } <EOL> } <EOL> ] <EOL> } <EOL> self . url = '<STR_LIT>' . format ( API_BASE ) <EOL> self . private_project = ProjectFactory ( title = self . title , <EOL> description = self . description , <EOL> category = self . category , <EOL> is_public = False , <EOL> creator = self . user ) <EOL> self . private_project_two = ProjectFactory ( title = self . title , <EOL> description = self . description , <EOL> category = self . category , <EOL> is_public = False , <EOL> creator = self . user ) <EOL> self . private_payload = { '<STR_LIT:data>' : [ <EOL> { <EOL> '<STR_LIT:id>' : self . private_project . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : self . new_title , <EOL> '<STR_LIT:description>' : self . new_description , <EOL> '<STR_LIT>' : self . new_category , <EOL> '<STR_LIT>' : False <EOL> } <EOL> } , <EOL> { <EOL> '<STR_LIT:id>' : self . private_project_two . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : self . new_title , <EOL> '<STR_LIT:description>' : self . new_description , <EOL> '<STR_LIT>' : self . new_category , <EOL> '<STR_LIT>' : False <EOL> } <EOL> } <EOL> ] <EOL> } <EOL> self . empty_payload = { '<STR_LIT:data>' : [ <EOL> { '<STR_LIT:id>' : self . public_project . _id , '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT>' : { '<STR_LIT:title>' : "<STR_LIT>" , '<STR_LIT:description>' : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } } , <EOL> { '<STR_LIT:id>' : self . public_project_two . _id , '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT>' : { '<STR_LIT:title>' : "<STR_LIT>" , '<STR_LIT:description>' : "<STR_LIT>" , "<STR_LIT>" : "<STR_LIT>" } } <EOL> ] } <EOL> def test_bulk_update_nodes_blank_request ( self ) : <EOL> res = self . app . put_json_api ( self . url , auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_bulk_update_blank_but_not_empty_title ( self ) : <EOL> payload = { <EOL> "<STR_LIT:data>" : [ <EOL> { <EOL> "<STR_LIT:id>" : self . public_project . _id , <EOL> "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:title>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> } , <EOL> { <EOL> "<STR_LIT:id>" : self . public_project_two . _id , <EOL> "<STR_LIT:type>" : "<STR_LIT>" , <EOL> "<STR_LIT>" : { <EOL> "<STR_LIT:title>" : "<STR_LIT:U+0020>" , <EOL> "<STR_LIT>" : "<STR_LIT>" <EOL> } <EOL> } <EOL> ] <EOL> } <EOL> url = '<STR_LIT>' . format ( API_BASE , self . public_project . _id ) <EOL> res = self . app . put_json_api ( self . url , payload , auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> res = self . app . get ( url ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> def test_bulk_update_with_tags ( self ) : <EOL> new_payload = { '<STR_LIT:data>' : [ { '<STR_LIT:id>' : self . public_project . _id , '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT>' : { '<STR_LIT:title>' : '<STR_LIT>' , '<STR_LIT>' : '<STR_LIT>' , '<STR_LIT>' : [ '<STR_LIT>' ] } } ] } <EOL> res = self . app . put_json_api ( self . url , new_payload , auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] , [ '<STR_LIT>' ] ) <EOL> def test_bulk_update_public_projects_one_not_found ( self ) : <EOL> empty_payload = { '<STR_LIT:data>' : [ <EOL> { <EOL> '<STR_LIT:id>' : <NUM_LIT> , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : self . new_title , <EOL> '<STR_LIT>' : self . new_category <EOL> } <EOL> } , self . public_payload [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] <EOL> ] } <EOL> res = self . app . put_json_api ( self . url , empty_payload , auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> url = '<STR_LIT>' . format ( API_BASE , self . public_project . _id ) <EOL> res = self . app . get ( url ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> def test_bulk_update_public_projects_logged_out ( self ) : <EOL> res = self . app . put_json_api ( self . url , self . public_payload , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , "<STR_LIT>" ) <EOL> url = '<STR_LIT>' . format ( API_BASE , self . public_project . _id ) <EOL> url_two = '<STR_LIT>' . format ( API_BASE , self . public_project_two . _id ) <EOL> res = self . app . get ( url ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> res = self . app . get ( url_two ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> def test_bulk_update_public_projects_logged_in ( self ) : <EOL> res = self . app . put_json_api ( self . url , self . public_payload , auth = self . user . auth , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> assert_equal ( { self . public_project . _id , self . public_project_two . _id } , <EOL> { res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] , res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:1> ] [ '<STR_LIT:id>' ] } ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . new_title ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . new_title ) <EOL> def test_bulk_update_private_projects_logged_out ( self ) : <EOL> res = self . app . put_json_api ( self . url , self . private_payload , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> url = '<STR_LIT>' . format ( API_BASE , self . private_project . _id ) <EOL> url_two = '<STR_LIT>' . format ( API_BASE , self . private_project_two . _id ) <EOL> res = self . app . get ( url , auth = self . user . auth ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> res = self . app . get ( url_two , auth = self . user . auth ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> def test_bulk_update_private_projects_logged_in_contrib ( self ) : <EOL> res = self . app . put_json_api ( self . url , self . private_payload , auth = self . user . auth , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> assert_equal ( { self . private_project . _id , self . private_project_two . _id } , <EOL> { res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] , res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:1> ] [ '<STR_LIT:id>' ] } ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . new_title ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . new_title ) <EOL> def test_bulk_update_private_projects_logged_in_non_contrib ( self ) : <EOL> res = self . app . put_json_api ( self . url , self . private_payload , auth = self . user_two . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> url = '<STR_LIT>' . format ( API_BASE , self . private_project . _id ) <EOL> url_two = '<STR_LIT>' . format ( API_BASE , self . private_project_two . _id ) <EOL> res = self . app . get ( url , auth = self . user . auth ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> res = self . app . get ( url_two , auth = self . user . auth ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> def test_bulk_update_private_projects_logged_in_read_only_contrib ( self ) : <EOL> self . private_project . add_contributor ( self . user_two , permissions = [ permissions . READ ] , save = True ) <EOL> self . private_project_two . add_contributor ( self . user_two , permissions = [ permissions . READ ] , save = True ) <EOL> res = self . app . put_json_api ( self . url , self . private_payload , auth = self . user_two . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> url = '<STR_LIT>' . format ( API_BASE , self . private_project . _id ) <EOL> url_two = '<STR_LIT>' . format ( API_BASE , self . private_project_two . _id ) <EOL> res = self . app . get ( url , auth = self . user . auth ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> res = self . app . get ( url_two , auth = self . user . auth ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> def test_bulk_update_projects_send_dictionary_not_list ( self ) : <EOL> res = self . app . put_json_api ( self . url , { '<STR_LIT:data>' : { '<STR_LIT:id>' : self . public_project . _id , '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { '<STR_LIT:title>' : self . new_title , '<STR_LIT>' : "<STR_LIT>" } } } , <EOL> auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_bulk_update_error_formatting ( self ) : <EOL> res = self . app . put_json_api ( self . url , self . empty_payload , auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( len ( res . json [ '<STR_LIT>' ] ) , <NUM_LIT:2> ) <EOL> errors = res . json [ '<STR_LIT>' ] <EOL> assert_items_equal ( [ errors [ <NUM_LIT:0> ] [ '<STR_LIT:source>' ] , errors [ <NUM_LIT:1> ] [ '<STR_LIT:source>' ] ] , <EOL> [ { '<STR_LIT>' : '<STR_LIT>' } , { '<STR_LIT>' : '<STR_LIT>' } ] ) <EOL> assert_items_equal ( [ errors [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , errors [ <NUM_LIT:1> ] [ '<STR_LIT>' ] ] , <EOL> [ '<STR_LIT>' ] * <NUM_LIT:2> ) <EOL> def test_bulk_update_id_not_supplied ( self ) : <EOL> res = self . app . put_json_api ( self . url , { '<STR_LIT:data>' : [ self . public_payload [ '<STR_LIT:data>' ] [ <NUM_LIT:1> ] , { '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT>' : <EOL> { '<STR_LIT:title>' : self . new_title , '<STR_LIT>' : self . new_category } } ] } , auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( len ( res . json [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:source>' ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , "<STR_LIT>" ) <EOL> url = '<STR_LIT>' . format ( API_BASE , self . public_project_two . _id ) <EOL> res = self . app . get ( url , auth = self . user . auth ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> def test_bulk_update_type_not_supplied ( self ) : <EOL> res = self . app . put_json_api ( self . url , { '<STR_LIT:data>' : [ self . public_payload [ '<STR_LIT:data>' ] [ <NUM_LIT:1> ] , { '<STR_LIT:id>' : self . public_project . _id , '<STR_LIT>' : <EOL> { '<STR_LIT:title>' : self . new_title , '<STR_LIT>' : self . new_category } } ] } , auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( len ( res . json [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:source>' ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , "<STR_LIT>" ) <EOL> url = '<STR_LIT>' . format ( API_BASE , self . public_project_two . _id ) <EOL> res = self . app . get ( url , auth = self . user . auth ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> def test_bulk_update_incorrect_type ( self ) : <EOL> res = self . app . put_json_api ( self . url , { '<STR_LIT:data>' : [ self . public_payload [ '<STR_LIT:data>' ] [ <NUM_LIT:1> ] , { '<STR_LIT:id>' : self . public_project . _id , '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT>' : <EOL> { '<STR_LIT:title>' : self . new_title , '<STR_LIT>' : self . new_category } } ] } , auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> url = '<STR_LIT>' . format ( API_BASE , self . public_project_two . _id ) <EOL> res = self . app . get ( url , auth = self . user . auth ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> def test_bulk_update_limits ( self ) : <EOL> node_update_list = { '<STR_LIT:data>' : [ self . public_payload [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] ] * <NUM_LIT> } <EOL> res = self . app . put_json_api ( self . url , node_update_list , auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:source>' ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_bulk_update_no_title_or_category ( self ) : <EOL> new_payload = { '<STR_LIT:id>' : self . public_project . _id , '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT>' : { } } <EOL> res = self . app . put_json_api ( self . url , { '<STR_LIT:data>' : [ self . public_payload [ '<STR_LIT:data>' ] [ <NUM_LIT:1> ] , new_payload ] } , auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> url = '<STR_LIT>' . format ( API_BASE , self . public_project_two . _id ) <EOL> res = self . app . get ( url , auth = self . user . auth ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> class TestNodeBulkPartialUpdate ( ApiTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestNodeBulkPartialUpdate , self ) . setUp ( ) <EOL> self . user = AuthUserFactory ( ) <EOL> self . title = '<STR_LIT>' <EOL> self . new_title = '<STR_LIT>' <EOL> self . description = '<STR_LIT>' <EOL> self . new_description = '<STR_LIT>' <EOL> self . category = '<STR_LIT:data>' <EOL> self . new_category = '<STR_LIT>' <EOL> self . user_two = AuthUserFactory ( ) <EOL> self . public_project = ProjectFactory ( title = self . title , <EOL> description = self . description , <EOL> category = self . category , <EOL> is_public = True , <EOL> creator = self . user ) <EOL> self . public_project_two = ProjectFactory ( title = self . title , <EOL> description = self . description , <EOL> category = self . category , <EOL> is_public = True , <EOL> creator = self . user ) <EOL> self . public_payload = { '<STR_LIT:data>' : [ <EOL> { <EOL> '<STR_LIT:id>' : self . public_project . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : self . new_title <EOL> } <EOL> } , <EOL> { <EOL> '<STR_LIT:id>' : self . public_project_two . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : self . new_title <EOL> } <EOL> } <EOL> ] } <EOL> self . url = '<STR_LIT>' . format ( API_BASE ) <EOL> self . private_project = ProjectFactory ( title = self . title , <EOL> description = self . description , <EOL> category = self . category , <EOL> is_public = False , <EOL> creator = self . user ) <EOL> self . private_project_two = ProjectFactory ( title = self . title , <EOL> description = self . description , <EOL> category = self . category , <EOL> is_public = False , <EOL> creator = self . user ) <EOL> self . private_payload = { '<STR_LIT:data>' : [ <EOL> { <EOL> '<STR_LIT:id>' : self . private_project . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : self . new_title <EOL> } <EOL> } , <EOL> { <EOL> '<STR_LIT:id>' : self . private_project_two . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : self . new_title <EOL> } <EOL> } <EOL> ] } <EOL> self . empty_payload = { '<STR_LIT:data>' : [ <EOL> { '<STR_LIT:id>' : self . public_project . _id , '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT>' : { '<STR_LIT:title>' : "<STR_LIT>" } } , <EOL> { '<STR_LIT:id>' : self . public_project_two . _id , '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT>' : { '<STR_LIT:title>' : "<STR_LIT>" } } <EOL> ] <EOL> } <EOL> def test_bulk_patch_nodes_blank_request ( self ) : <EOL> res = self . app . patch_json_api ( self . url , auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_bulk_partial_update_public_projects_one_not_found ( self ) : <EOL> empty_payload = { '<STR_LIT:data>' : [ <EOL> { <EOL> '<STR_LIT:id>' : <NUM_LIT> , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : self . new_title <EOL> } <EOL> } , <EOL> self . public_payload [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] <EOL> ] } <EOL> res = self . app . patch_json_api ( self . url , empty_payload , auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> url = '<STR_LIT>' . format ( API_BASE , self . public_project . _id ) <EOL> res = self . app . get ( url ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> def test_bulk_partial_update_public_projects_logged_out ( self ) : <EOL> res = self . app . patch_json_api ( self . url , self . public_payload , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , "<STR_LIT>" ) <EOL> url = '<STR_LIT>' . format ( API_BASE , self . public_project . _id ) <EOL> url_two = '<STR_LIT>' . format ( API_BASE , self . public_project_two . _id ) <EOL> res = self . app . get ( url ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> res = self . app . get ( url_two ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> def test_bulk_partial_update_public_projects_logged_in ( self ) : <EOL> res = self . app . patch_json_api ( self . url , self . public_payload , auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> assert_equal ( { self . public_project . _id , self . public_project_two . _id } , <EOL> { res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] , res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:1> ] [ '<STR_LIT:id>' ] } ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . new_title ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . new_title ) <EOL> def test_bulk_partial_update_private_projects_logged_out ( self ) : <EOL> res = self . app . patch_json_api ( self . url , self . private_payload , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> url = '<STR_LIT>' . format ( API_BASE , self . private_project . _id ) <EOL> url_two = '<STR_LIT>' . format ( API_BASE , self . private_project_two . _id ) <EOL> res = self . app . get ( url , auth = self . user . auth ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> res = self . app . get ( url_two , auth = self . user . auth ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> def test_bulk_partial_update_private_projects_logged_in_contrib ( self ) : <EOL> res = self . app . patch_json_api ( self . url , self . private_payload , auth = self . user . auth , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> assert_equal ( { self . private_project . _id , self . private_project_two . _id } , <EOL> { res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] , res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:1> ] [ '<STR_LIT:id>' ] } ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . new_title ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:1> ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . new_title ) <EOL> def test_bulk_partial_update_private_projects_logged_in_non_contrib ( self ) : <EOL> res = self . app . patch_json_api ( self . url , self . private_payload , auth = self . user_two . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> url = '<STR_LIT>' . format ( API_BASE , self . private_project . _id ) <EOL> url_two = '<STR_LIT>' . format ( API_BASE , self . private_project_two . _id ) <EOL> res = self . app . get ( url , auth = self . user . auth ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> res = self . app . get ( url_two , auth = self . user . auth ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> def test_bulk_partial_update_private_projects_logged_in_read_only_contrib ( self ) : <EOL> self . private_project . add_contributor ( self . user_two , permissions = [ permissions . READ ] , save = True ) <EOL> self . private_project_two . add_contributor ( self . user_two , permissions = [ permissions . READ ] , save = True ) <EOL> res = self . app . patch_json_api ( self . url , self . private_payload , auth = self . user_two . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> url = '<STR_LIT>' . format ( API_BASE , self . private_project . _id ) <EOL> url_two = '<STR_LIT>' . format ( API_BASE , self . private_project_two . _id ) <EOL> res = self . app . get ( url , auth = self . user . auth ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> res = self . app . get ( url_two , auth = self . user . auth ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ '<STR_LIT>' ] [ '<STR_LIT:title>' ] , self . title ) <EOL> def test_bulk_partial_update_projects_send_dictionary_not_list ( self ) : <EOL> res = self . app . patch_json_api ( self . url , { '<STR_LIT:data>' : { '<STR_LIT:id>' : self . public_project . _id , '<STR_LIT>' : { '<STR_LIT:title>' : self . new_title , '<STR_LIT>' : "<STR_LIT>" } } } , <EOL> auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_bulk_partial_update_error_formatting ( self ) : <EOL> res = self . app . patch_json_api ( self . url , self . empty_payload , auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( len ( res . json [ '<STR_LIT>' ] ) , <NUM_LIT:2> ) <EOL> errors = res . json [ '<STR_LIT>' ] <EOL> assert_items_equal ( [ errors [ <NUM_LIT:0> ] [ '<STR_LIT:source>' ] , errors [ <NUM_LIT:1> ] [ '<STR_LIT:source>' ] ] , <EOL> [ { '<STR_LIT>' : '<STR_LIT>' } , { '<STR_LIT>' : '<STR_LIT>' } ] ) <EOL> assert_items_equal ( [ errors [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , errors [ <NUM_LIT:1> ] [ '<STR_LIT>' ] ] , <EOL> [ '<STR_LIT>' ] * <NUM_LIT:2> ) <EOL> def test_bulk_partial_update_id_not_supplied ( self ) : <EOL> res = self . app . patch_json_api ( self . url , { '<STR_LIT:data>' : [ { '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT>' : { '<STR_LIT:title>' : self . new_title } } ] } , <EOL> auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( len ( res . json [ '<STR_LIT>' ] ) , <NUM_LIT:1> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_bulk_partial_update_limits ( self ) : <EOL> node_update_list = { '<STR_LIT:data>' : [ self . public_payload [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] ] * <NUM_LIT> } <EOL> res = self . app . patch_json_api ( self . url , node_update_list , auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:source>' ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_bulk_partial_update_privacy_has_no_effect_on_tags ( self ) : <EOL> self . public_project . add_tag ( '<STR_LIT>' , Auth ( self . public_project . creator ) , save = True ) <EOL> payload = { '<STR_LIT:id>' : self . public_project . _id , '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT>' : { '<STR_LIT>' : False } } <EOL> res = self . app . patch_json_api ( self . url , { '<STR_LIT:data>' : [ payload ] } , auth = self . user . auth , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> self . public_project . reload ( ) <EOL> assert_equal ( self . public_project . tags , [ '<STR_LIT>' ] ) <EOL> assert_equal ( self . public_project . is_public , False ) <EOL> class TestNodeBulkUpdateSkipUneditable ( ApiTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestNodeBulkUpdateSkipUneditable , self ) . setUp ( ) <EOL> self . user = AuthUserFactory ( ) <EOL> self . user_two = AuthUserFactory ( ) <EOL> self . title = '<STR_LIT>' <EOL> self . new_title = '<STR_LIT>' <EOL> self . description = '<STR_LIT>' <EOL> self . new_description = '<STR_LIT>' <EOL> self . category = '<STR_LIT:data>' <EOL> self . new_category = '<STR_LIT>' <EOL> self . public_project = ProjectFactory ( title = self . title , <EOL> description = self . description , <EOL> category = self . category , <EOL> is_public = True , <EOL> creator = self . user ) <EOL> self . public_project_two = ProjectFactory ( title = self . title , <EOL> description = self . description , <EOL> category = self . category , <EOL> is_public = True , <EOL> creator = self . user ) <EOL> self . public_project_three = ProjectFactory ( title = self . title , <EOL> description = self . description , <EOL> category = self . category , <EOL> is_public = True , <EOL> creator = self . user_two ) <EOL> self . public_project_four = ProjectFactory ( title = self . title , <EOL> description = self . description , <EOL> category = self . category , <EOL> is_public = True , <EOL> creator = self . user_two ) <EOL> self . public_payload = { <EOL> '<STR_LIT:data>' : [ <EOL> { <EOL> '<STR_LIT:id>' : self . public_project . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : self . new_title , <EOL> '<STR_LIT:description>' : self . new_description , <EOL> '<STR_LIT>' : self . new_category , <EOL> '<STR_LIT>' : True <EOL> } <EOL> } , <EOL> { <EOL> '<STR_LIT:id>' : self . public_project_two . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : self . new_title , <EOL> '<STR_LIT:description>' : self . new_description , <EOL> '<STR_LIT>' : self . new_category , <EOL> '<STR_LIT>' : True <EOL> } <EOL> } , <EOL> { <EOL> '<STR_LIT:id>' : self . public_project_three . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : self . new_title , <EOL> '<STR_LIT:description>' : self . new_description , <EOL> '<STR_LIT>' : self . new_category , <EOL> '<STR_LIT>' : True <EOL> } <EOL> } , <EOL> { <EOL> '<STR_LIT:id>' : self . public_project_four . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT:title>' : self . new_title , <EOL> '<STR_LIT:description>' : self . new_description , <EOL> '<STR_LIT>' : self . new_category , <EOL> '<STR_LIT>' : True <EOL> } <EOL> } <EOL> ] <EOL> } <EOL> self . url = '<STR_LIT>' . format ( API_BASE ) <EOL> def test_skip_uneditable_bulk_update ( self ) : <EOL> res = self . app . put_json_api ( self . url , self . public_payload , auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> edited = res . json [ '<STR_LIT:data>' ] <EOL> skipped = res . json [ '<STR_LIT>' ] <EOL> assert_items_equal ( [ edited [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] , edited [ <NUM_LIT:1> ] [ '<STR_LIT:id>' ] ] , <EOL> [ self . public_project . _id , self . public_project_two . _id ] ) <EOL> assert_items_equal ( [ skipped [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , skipped [ <NUM_LIT:1> ] [ '<STR_LIT>' ] ] , <EOL> [ self . public_project_three . _id , self . public_project_four . _id ] ) <EOL> self . public_project . reload ( ) <EOL> self . public_project_two . reload ( ) <EOL> self . public_project_three . reload ( ) <EOL> self . public_project_four . reload ( ) <EOL> assert_equal ( self . public_project . title , self . new_title ) <EOL> assert_equal ( self . public_project_two . title , self . new_title ) <EOL> assert_equal ( self . public_project_three . title , self . title ) <EOL> assert_equal ( self . public_project_four . title , self . title ) <EOL> def test_skip_uneditable_bulk_update_query_param_required ( self ) : <EOL> url = '<STR_LIT>' . format ( API_BASE ) <EOL> res = self . app . put_json_api ( url , self . public_payload , auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> self . public_project . reload ( ) <EOL> self . public_project_two . reload ( ) <EOL> self . public_project_three . reload ( ) <EOL> self . public_project_four . reload ( ) <EOL> assert_equal ( self . public_project . title , self . title ) <EOL> assert_equal ( self . public_project_two . title , self . title ) <EOL> assert_equal ( self . public_project_three . title , self . title ) <EOL> assert_equal ( self . public_project_four . title , self . title ) <EOL> def test_skip_uneditable_equals_false_bulk_update ( self ) : <EOL> url = '<STR_LIT>' . format ( API_BASE ) <EOL> res = self . app . put_json_api ( url , self . public_payload , auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> self . public_project . reload ( ) <EOL> self . public_project_two . reload ( ) <EOL> self . public_project_three . reload ( ) <EOL> self . public_project_four . reload ( ) <EOL> assert_equal ( self . public_project . title , self . title ) <EOL> assert_equal ( self . public_project_two . title , self . title ) <EOL> assert_equal ( self . public_project_three . title , self . title ) <EOL> assert_equal ( self . public_project_four . title , self . title ) <EOL> def test_skip_uneditable_bulk_partial_update ( self ) : <EOL> res = self . app . patch_json_api ( self . url , self . public_payload , auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> edited = res . json [ '<STR_LIT:data>' ] <EOL> skipped = res . json [ '<STR_LIT>' ] <EOL> assert_items_equal ( [ edited [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] , edited [ <NUM_LIT:1> ] [ '<STR_LIT:id>' ] ] , <EOL> [ self . public_project . _id , self . public_project_two . _id ] ) <EOL> assert_items_equal ( [ skipped [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , skipped [ <NUM_LIT:1> ] [ '<STR_LIT>' ] ] , <EOL> [ self . public_project_three . _id , self . public_project_four . _id ] ) <EOL> self . public_project . reload ( ) <EOL> self . public_project_two . reload ( ) <EOL> self . public_project_three . reload ( ) <EOL> self . public_project_four . reload ( ) <EOL> assert_equal ( self . public_project . title , self . new_title ) <EOL> assert_equal ( self . public_project_two . title , self . new_title ) <EOL> assert_equal ( self . public_project_three . title , self . title ) <EOL> assert_equal ( self . public_project_four . title , self . title ) <EOL> def test_skip_uneditable_bulk_partial_update_query_param_required ( self ) : <EOL> url = '<STR_LIT>' . format ( API_BASE ) <EOL> res = self . app . patch_json_api ( url , self . public_payload , auth = self . user . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> self . public_project . reload ( ) <EOL> self . public_project_two . reload ( ) <EOL> self . public_project_three . reload ( ) <EOL> self . public_project_four . reload ( ) <EOL> assert_equal ( self . public_project . title , self . title ) <EOL> assert_equal ( self . public_project_two . title , self . title ) <EOL> assert_equal ( self . public_project_three . title , self . title ) <EOL> assert_equal ( self . public_project_four . title , self . title ) <EOL> class TestNodeBulkDelete ( ApiTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestNodeBulkDelete , self ) . setUp ( ) <EOL> self . user_one = AuthUserFactory ( ) <EOL> self . user_two = AuthUserFactory ( ) <EOL> self . project_one = ProjectFactory ( title = "<STR_LIT>" , is_public = True , creator = self . user_one , category = "<STR_LIT>" ) <EOL> self . project_two = ProjectFactory ( title = "<STR_LIT>" , description = "<STR_LIT>" , is_public = True , creator = self . user_one ) <EOL> self . private_project_user_one = ProjectFactory ( title = "<STR_LIT>" , <EOL> is_public = False , <EOL> creator = self . user_one ) <EOL> self . private_project_user_two = ProjectFactory ( title = "<STR_LIT>" , <EOL> is_public = False , <EOL> creator = self . user_two ) <EOL> self . url = "<STR_LIT>" . format ( API_BASE ) <EOL> self . project_one_url = '<STR_LIT>' . format ( API_BASE , self . project_one . _id ) <EOL> self . project_two_url = '<STR_LIT>' . format ( API_BASE , self . project_two . _id ) <EOL> self . private_project_url = "<STR_LIT>" . format ( API_BASE , self . private_project_user_one . _id ) <EOL> self . public_payload = { '<STR_LIT:data>' : [ { '<STR_LIT:id>' : self . project_one . _id , '<STR_LIT:type>' : '<STR_LIT>' } , { '<STR_LIT:id>' : self . project_two . _id , '<STR_LIT:type>' : '<STR_LIT>' } ] } <EOL> self . private_payload = { '<STR_LIT:data>' : [ { '<STR_LIT:id>' : self . private_project_user_one . _id , '<STR_LIT:type>' : '<STR_LIT>' } ] } <EOL> def test_bulk_delete_nodes_blank_request ( self ) : <EOL> res = self . app . delete_json_api ( self . url , auth = self . user_one . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_bulk_delete_no_type ( self ) : <EOL> payload = { '<STR_LIT:data>' : [ <EOL> { '<STR_LIT:id>' : self . project_one . _id } , <EOL> { '<STR_LIT:id>' : self . project_two . _id } <EOL> ] } <EOL> res = self . app . delete_json_api ( self . url , payload , auth = self . user_one . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_bulk_delete_no_id ( self ) : <EOL> payload = { '<STR_LIT:data>' : [ <EOL> { '<STR_LIT:type>' : '<STR_LIT>' } , <EOL> { '<STR_LIT:id>' : '<STR_LIT>' } <EOL> ] } <EOL> res = self . app . delete_json_api ( self . url , payload , auth = self . user_one . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_bulk_delete_dict_inside_data ( self ) : <EOL> res = self . app . delete_json_api ( self . url , { '<STR_LIT:data>' : { '<STR_LIT:id>' : self . project_one . _id , '<STR_LIT:type>' : '<STR_LIT>' } } , <EOL> auth = self . user_one . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_bulk_delete_invalid_type ( self ) : <EOL> res = self . app . delete_json_api ( self . url , { '<STR_LIT:data>' : [ { '<STR_LIT:type>' : '<STR_LIT>' , '<STR_LIT:id>' : self . project_one . _id } ] } , <EOL> auth = self . user_one . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> def test_bulk_delete_public_projects_logged_in ( self ) : <EOL> res = self . app . delete_json_api ( self . url , self . public_payload , auth = self . user_one . auth , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> res = self . app . get ( self . project_one_url , auth = self . user_one . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> self . project_one . reload ( ) <EOL> self . project_two . reload ( ) <EOL> def test_bulk_delete_public_projects_logged_out ( self ) : <EOL> res = self . app . delete_json_api ( self . url , self . public_payload , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> res = self . app . get ( self . project_one_url , auth = self . user_one . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> res = self . app . get ( self . project_two_url , auth = self . user_one . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> def test_bulk_delete_private_projects_logged_out ( self ) : <EOL> res = self . app . delete_json_api ( self . url , self . private_payload , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_bulk_delete_private_projects_logged_in_contributor ( self ) : <EOL> res = self . app . delete_json_api ( self . url , self . private_payload , <EOL> auth = self . user_one . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> res = self . app . get ( self . private_project_url , auth = self . user_one . auth , expect_errors = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> self . private_project_user_one . reload ( ) <EOL> def test_bulk_delete_private_projects_logged_in_non_contributor ( self ) : <EOL> res = self . app . delete_json_api ( self . url , self . private_payload , <EOL> auth = self . user_two . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> res = self . app . get ( self . private_project_url , auth = self . user_one . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> def test_bulk_delete_private_projects_logged_in_read_only_contributor ( self ) : <EOL> self . private_project_user_one . add_contributor ( self . user_two , permissions = [ permissions . READ ] , save = True ) <EOL> res = self . app . delete_json_api ( self . url , self . private_payload , <EOL> auth = self . user_two . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> res = self . app . get ( self . private_project_url , auth = self . user_one . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> def test_bulk_delete_all_or_nothing ( self ) : <EOL> new_payload = { '<STR_LIT:data>' : [ { '<STR_LIT:id>' : self . private_project_user_one . _id , '<STR_LIT:type>' : '<STR_LIT>' } , { '<STR_LIT:id>' : self . private_project_user_two . _id , '<STR_LIT:type>' : '<STR_LIT>' } ] } <EOL> res = self . app . delete_json_api ( self . url , new_payload , <EOL> auth = self . user_one . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> res = self . app . get ( self . private_project_url , auth = self . user_one . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> url = "<STR_LIT>" . format ( API_BASE , self . private_project_user_two . _id ) <EOL> res = self . app . get ( url , auth = self . user_two . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> def test_bulk_delete_limits ( self ) : <EOL> new_payload = { '<STR_LIT:data>' : [ { '<STR_LIT:id>' : self . private_project_user_one . _id , '<STR_LIT:type>' : '<STR_LIT>' } ] * <NUM_LIT> } <EOL> res = self . app . delete_json_api ( self . url , new_payload , <EOL> auth = self . user_one . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:source>' ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> def test_bulk_delete_invalid_payload_one_not_found ( self ) : <EOL> new_payload = { '<STR_LIT:data>' : [ self . public_payload [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] , { '<STR_LIT:id>' : '<STR_LIT>' , '<STR_LIT:type>' : '<STR_LIT>' } ] } <EOL> res = self . app . delete_json_api ( self . url , new_payload , auth = self . user_one . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] , '<STR_LIT>' ) <EOL> res = self . app . get ( self . project_one_url , auth = self . user_one . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> def test_bulk_delete_no_payload ( self ) : <EOL> res = self . app . delete_json_api ( self . url , auth = self . user_one . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> class TestNodeBulkDeleteSkipUneditable ( ApiTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestNodeBulkDeleteSkipUneditable , self ) . setUp ( ) <EOL> self . user_one = AuthUserFactory ( ) <EOL> self . user_two = AuthUserFactory ( ) <EOL> self . project_one = ProjectFactory ( title = "<STR_LIT>" , is_public = True , creator = self . user_one ) <EOL> self . project_two = ProjectFactory ( title = "<STR_LIT>" , is_public = True , creator = self . user_one ) <EOL> self . project_three = ProjectFactory ( title = "<STR_LIT>" , is_public = True , creator = self . user_two ) <EOL> self . project_four = ProjectFactory ( title = "<STR_LIT>" , is_public = True , creator = self . user_two ) <EOL> self . payload = { <EOL> '<STR_LIT:data>' : [ <EOL> { <EOL> '<STR_LIT:id>' : self . project_one . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> } , <EOL> { <EOL> '<STR_LIT:id>' : self . project_two . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> } , <EOL> { <EOL> '<STR_LIT:id>' : self . project_three . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> } , <EOL> { <EOL> '<STR_LIT:id>' : self . project_four . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> } <EOL> ] <EOL> } <EOL> self . url = "<STR_LIT>" . format ( API_BASE ) <EOL> def tearDown ( self ) : <EOL> super ( TestNodeBulkDeleteSkipUneditable , self ) . tearDown ( ) <EOL> Node . remove ( ) <EOL> def test_skip_uneditable_bulk_delete ( self ) : <EOL> res = self . app . delete_json_api ( self . url , self . payload , auth = self . user_one . auth , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> skipped = res . json [ '<STR_LIT>' ] <EOL> assert_items_equal ( [ skipped [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] , skipped [ <NUM_LIT:1> ] [ '<STR_LIT:id>' ] ] , <EOL> [ self . project_three . _id , self . project_four . _id ] ) <EOL> res = self . app . get ( '<STR_LIT>' . format ( API_BASE ) , auth = self . user_one . auth ) <EOL> assert_items_equal ( [ res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] , res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:1> ] [ '<STR_LIT:id>' ] ] , <EOL> [ self . project_three . _id , self . project_four . _id ] ) <EOL> def test_skip_uneditable_bulk_delete_query_param_required ( self ) : <EOL> url = '<STR_LIT>' . format ( API_BASE ) <EOL> res = self . app . delete_json_api ( url , self . payload , auth = self . user_one . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> res = self . app . get ( '<STR_LIT>' . format ( API_BASE ) , auth = self . user_one . auth ) <EOL> assert_equal ( res . status_code , <NUM_LIT:200> ) <EOL> assert_equal ( len ( res . json [ '<STR_LIT:data>' ] ) , <NUM_LIT:4> ) <EOL> def test_skip_uneditable_has_admin_permission_for_all_nodes ( self ) : <EOL> payload = { <EOL> '<STR_LIT:data>' : [ <EOL> { <EOL> '<STR_LIT:id>' : self . project_one . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> } , <EOL> { <EOL> '<STR_LIT:id>' : self . project_two . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> } <EOL> ] <EOL> } <EOL> res = self . app . delete_json_api ( self . url , payload , auth = self . user_one . auth , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> self . project_one . reload ( ) <EOL> self . project_two . reload ( ) <EOL> assert_equal ( self . project_one . is_deleted , True ) <EOL> assert_equal ( self . project_two . is_deleted , True ) <EOL> def test_skip_uneditable_does_not_have_admin_permission_for_any_nodes ( self ) : <EOL> payload = { <EOL> '<STR_LIT:data>' : [ <EOL> { <EOL> '<STR_LIT:id>' : self . project_three . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> } , <EOL> { <EOL> '<STR_LIT:id>' : self . project_four . _id , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> } <EOL> ] <EOL> } <EOL> res = self . app . delete_json_api ( self . url , payload , auth = self . user_one . auth , expect_errors = True , bulk = True ) <EOL> assert_equal ( res . status_code , <NUM_LIT> ) <EOL> class TestNodeListPagination ( ApiTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestNodeListPagination , self ) . setUp ( ) <EOL> self . users = [ UserFactory ( ) for _ in range ( <NUM_LIT:11> ) ] <EOL> self . projects = [ ProjectFactory ( is_public = True , creator = self . users [ <NUM_LIT:0> ] ) for _ in range ( <NUM_LIT:11> ) ] <EOL> self . url = '<STR_LIT>' . format ( API_BASE ) <EOL> def tearDown ( self ) : <EOL> super ( TestNodeListPagination , self ) . tearDown ( ) <EOL> Node . remove ( ) <EOL> def test_default_pagination_size ( self ) : <EOL> res = self . app . get ( self . url , auth = Auth ( self . users [ <NUM_LIT:0> ] ) ) <EOL> pids = [ e [ '<STR_LIT:id>' ] for e in res . json [ '<STR_LIT:data>' ] ] <EOL> for project in self . projects [ <NUM_LIT:1> : ] : <EOL> assert_in ( project . _id , pids ) <EOL> assert_not_in ( self . projects [ <NUM_LIT:0> ] . _id , pids ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <NUM_LIT:10> ) <EOL> def test_max_page_size_enforced ( self ) : <EOL> url = '<STR_LIT>' . format ( self . url , MAX_PAGE_SIZE + <NUM_LIT:1> ) <EOL> res = self . app . get ( url , auth = Auth ( self . users [ <NUM_LIT:0> ] ) ) <EOL> pids = [ e [ '<STR_LIT:id>' ] for e in res . json [ '<STR_LIT:data>' ] ] <EOL> for project in self . projects : <EOL> assert_in ( project . _id , pids ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] , MAX_PAGE_SIZE ) <EOL> def test_embed_page_size_not_affected ( self ) : <EOL> for user in self . users [ <NUM_LIT:1> : ] : <EOL> self . projects [ - <NUM_LIT:1> ] . add_contributor ( user , auth = Auth ( self . users [ <NUM_LIT:0> ] ) , save = True ) <EOL> url = '<STR_LIT>' . format ( self . url , MAX_PAGE_SIZE + <NUM_LIT:1> ) <EOL> res = self . app . get ( url , auth = Auth ( self . users [ <NUM_LIT:0> ] ) ) <EOL> pids = [ e [ '<STR_LIT:id>' ] for e in res . json [ '<STR_LIT:data>' ] ] <EOL> for project in self . projects : <EOL> assert_in ( project . _id , pids ) <EOL> assert_equal ( res . json [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] , MAX_PAGE_SIZE ) <EOL> uids = [ e [ '<STR_LIT:id>' ] for e in res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT:data>' ] ] <EOL> for user in self . users [ : <NUM_LIT:9> ] : <EOL> assert_in ( user . _id , uids ) <EOL> assert_not_in ( self . users [ <NUM_LIT:10> ] . _id , uids ) <EOL> assert_equal ( res . json [ '<STR_LIT:data>' ] [ <NUM_LIT:0> ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] [ '<STR_LIT>' ] , <NUM_LIT:10> ) </s>
<s> import logging <EOL> import threading <EOL> from website import settings <EOL> _local = threading . local ( ) <EOL> logger = logging . getLogger ( __name__ ) <EOL> def postcommit_queue ( ) : <EOL> if not hasattr ( _local , '<STR_LIT>' ) : <EOL> _local . postcommit_queue = set ( ) <EOL> return _local . postcommit_queue <EOL> def postcommit_before_request ( ) : <EOL> _local . postcommit_queue = set ( ) <EOL> def postcommit_after_request ( response , base_status_error_code = <NUM_LIT> ) : <EOL> if response . status_code >= base_status_error_code : <EOL> _local . postcommit_queue = set ( ) <EOL> return response <EOL> try : <EOL> if settings . ENABLE_VARNISH and postcommit_queue ( ) : <EOL> import gevent <EOL> threads = [ gevent . spawn ( func , * args ) for func , args in postcommit_queue ( ) ] <EOL> gevent . joinall ( threads ) <EOL> except AttributeError : <EOL> if not settings . DEBUG_MODE : <EOL> logger . error ( '<STR_LIT>' ) <EOL> return response <EOL> def enqueue_postcommit_task ( function_and_args ) : <EOL> postcommit_queue ( ) . add ( function_and_args ) <EOL> handlers = { <EOL> '<STR_LIT>' : postcommit_before_request , <EOL> '<STR_LIT>' : postcommit_after_request , <EOL> } </s>
<s> import os <EOL> import matplotlib . pyplot as plt <EOL> from framework . mongo import database <EOL> from website import settings <EOL> from . utils import plot_dates , mkdirp <EOL> log_collection = database [ '<STR_LIT>' ] <EOL> FIG_PATH = os . path . join ( settings . ANALYTICS_PATH , '<STR_LIT>' , '<STR_LIT>' ) <EOL> mkdirp ( FIG_PATH ) <EOL> def analyze_log_action ( action ) : <EOL> logs = log_collection . find ( { '<STR_LIT:action>' : action } ) <EOL> dates = [ <EOL> log [ '<STR_LIT:date>' ] <EOL> for log in logs <EOL> if log [ '<STR_LIT:date>' ] <EOL> ] <EOL> if not dates : <EOL> return <EOL> fig = plot_dates ( dates ) <EOL> plt . title ( '<STR_LIT>' . format ( action , len ( dates ) ) ) <EOL> plt . savefig ( os . path . join ( FIG_PATH , '<STR_LIT>' . format ( action ) ) ) <EOL> plt . close ( ) <EOL> def main ( ) : <EOL> actions = log_collection . find ( <EOL> { } , <EOL> { '<STR_LIT:action>' : True } <EOL> ) . distinct ( <EOL> '<STR_LIT:action>' <EOL> ) <EOL> for action in actions : <EOL> analyze_log_action ( action ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> from nose . tools import * <EOL> from tests . base import OsfTestCase <EOL> from tests . factories import NodeFactory <EOL> from website . app import init_app <EOL> from website . project . model import Node <EOL> from website . addons . wiki . model import AddonWikiNodeSettings <EOL> from website . addons . osffiles . model import AddonFilesNodeSettings <EOL> logger = logging . getLogger ( __name__ ) <EOL> ADDONS = { AddonFilesNodeSettings , AddonWikiNodeSettings } <EOL> def main ( ) : <EOL> from framework . mongo import db <EOL> init_app ( routes = False ) <EOL> migrate_nodes ( db ) <EOL> def migrate_addons ( node ) : <EOL> ret = False <EOL> if not node . has_addon ( '<STR_LIT>' ) : <EOL> node . add_addon ( '<STR_LIT>' , auth = node . creator , log = False ) <EOL> ret = True <EOL> if not node . has_addon ( '<STR_LIT>' ) : <EOL> node . add_addon ( '<STR_LIT>' , auth = node . creator , log = False ) <EOL> ret = True <EOL> return ret <EOL> def migrate_nodes ( db ) : <EOL> for addon_class in ADDONS : <EOL> print ( '<STR_LIT>' + addon_class . __name__ ) <EOL> for node in get_affected_nodes ( db , addon_class ) : <EOL> print ( '<STR_LIT>' + node . _id ) <EOL> migrate_addons ( node ) <EOL> print ( '<STR_LIT>' ) <EOL> print ( '<STR_LIT>' ) <EOL> def get_affected_nodes ( db , addon_class ) : <EOL> """<STR_LIT>""" <EOL> query = db [ '<STR_LIT>' ] . find ( { <EOL> '<STR_LIT:.>' . join ( <EOL> ( '<STR_LIT>' , <EOL> '<STR_LIT>' , <EOL> addon_class . __name__ . lower ( ) , <EOL> '<STR_LIT>' , <EOL> '<STR_LIT:0>' <EOL> ) <EOL> ) : { '<STR_LIT>' : False } <EOL> } ) <EOL> return ( Node . load ( node [ '<STR_LIT>' ] ) for node in query ) <EOL> class TestMigratingAddons ( OsfTestCase ) : <EOL> def test_migrate_wiki ( self ) : <EOL> node = NodeFactory ( ) <EOL> wiki_addon = node . get_addon ( '<STR_LIT>' ) <EOL> AddonWikiNodeSettings . remove_one ( wiki_addon ) <EOL> assert_false ( node . has_addon ( '<STR_LIT>' ) ) <EOL> was_migrated = migrate_addons ( node ) <EOL> assert_true ( was_migrated ) <EOL> assert_true ( node . has_addon ( '<STR_LIT>' ) ) <EOL> def test_migrate_osffiles ( self ) : <EOL> node = NodeFactory ( ) <EOL> osf_addon = node . get_addon ( '<STR_LIT>' ) <EOL> AddonFilesNodeSettings . remove_one ( osf_addon ) <EOL> assert_false ( node . has_addon ( '<STR_LIT>' ) ) <EOL> was_migrated = migrate_addons ( node ) <EOL> assert_true ( was_migrated ) <EOL> assert_true ( node . has_addon ( '<STR_LIT>' ) ) <EOL> def test_no_migration_if_addon_exists ( self ) : <EOL> node = NodeFactory ( ) <EOL> assert_true ( node . has_addon ( '<STR_LIT>' ) ) <EOL> assert_true ( node . has_addon ( '<STR_LIT>' ) ) <EOL> migrate_nodes ( self . db ) <EOL> assert_false ( migrate_addons ( node ) ) <EOL> def test_affected_nodes ( self ) : <EOL> affected_node = NodeFactory ( ) <EOL> AddonWikiNodeSettings . remove_one ( affected_node . get_addon ( '<STR_LIT>' ) ) <EOL> assert_false ( affected_node . has_addon ( '<STR_LIT>' ) ) <EOL> unaffected_node = NodeFactory ( ) <EOL> assert_true ( unaffected_node . has_addon ( '<STR_LIT>' ) ) <EOL> affected_nodes = list ( get_affected_nodes ( self . db , AddonWikiNodeSettings ) ) <EOL> assert_in ( affected_node , affected_nodes ) <EOL> assert_not_in ( unaffected_node , affected_nodes ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> from __future__ import unicode_literals <EOL> import sys <EOL> import logging <EOL> from website . app import init_app <EOL> from website . models import User <EOL> from scripts import utils as script_utils <EOL> from modularodm import Q <EOL> from bson . son import SON <EOL> from framework . mongo import database as db <EOL> from framework . transactions . context import TokuTransaction <EOL> logger = logging . getLogger ( __name__ ) <EOL> pipeline = [ <EOL> { "<STR_LIT>" : "<STR_LIT>" } , <EOL> { "<STR_LIT>" : { "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" } } } , <EOL> { "<STR_LIT>" : { "<STR_LIT>" : "<STR_LIT>" , "<STR_LIT:count>" : { "<STR_LIT>" : <NUM_LIT:1> } } } , <EOL> { "<STR_LIT>" : SON ( [ ( "<STR_LIT:count>" , - <NUM_LIT:1> ) , ( "<STR_LIT>" , - <NUM_LIT:1> ) ] ) } <EOL> ] <EOL> def get_duplicate_email ( ) : <EOL> duplicate_emails = [ ] <EOL> result = db [ '<STR_LIT:user>' ] . aggregate ( pipeline ) <EOL> for each in result [ '<STR_LIT:result>' ] : <EOL> if each [ '<STR_LIT:count>' ] > <NUM_LIT:1> : <EOL> duplicate_emails . append ( each [ '<STR_LIT>' ] ) <EOL> return duplicate_emails <EOL> def log_duplicate_acount ( dry ) : <EOL> duplicate_emails = get_duplicate_email ( ) <EOL> count = <NUM_LIT:0> <EOL> if duplicate_emails : <EOL> for email in duplicate_emails : <EOL> users = User . find ( Q ( '<STR_LIT>' , '<STR_LIT>' , email ) & Q ( '<STR_LIT>' , '<STR_LIT>' , None ) & Q ( '<STR_LIT:username>' , '<STR_LIT>' , None ) ) <EOL> for user in users : <EOL> count += <NUM_LIT:1> <EOL> logger . info ( "<STR_LIT>" <EOL> . format ( user . fullname , user . username , user . _id , user . emails ) ) <EOL> logger . info ( "<STR_LIT>" . format ( count ) ) <EOL> def main ( ) : <EOL> init_app ( routes = False ) <EOL> dry = '<STR_LIT>' in sys . argv <EOL> if not dry : <EOL> script_utils . add_file_logger ( logger , __file__ ) <EOL> with TokuTransaction ( ) : <EOL> log_duplicate_acount ( dry ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> import sys <EOL> import logging <EOL> from website . app import init_app <EOL> from website . models import User <EOL> from scripts import utils as script_utils <EOL> from modularodm import Q <EOL> logger = logging . getLogger ( __name__ ) <EOL> def do_migration ( records , dry = False ) : <EOL> for user in records : <EOL> log_info ( user ) <EOL> if not dry : <EOL> user . username = None <EOL> user . password = None <EOL> user . email_verifications = { } <EOL> user . verification_key = None <EOL> user . save ( ) <EOL> logger . info ( '<STR_LIT>' . format ( '<STR_LIT>' if dry else '<STR_LIT>' , len ( records ) ) ) <EOL> def get_targets ( ) : <EOL> return User . find ( Q ( '<STR_LIT>' , '<STR_LIT>' , None ) & Q ( '<STR_LIT:username>' , '<STR_LIT>' , None ) ) <EOL> def log_info ( user ) : <EOL> logger . info ( <EOL> '<STR_LIT>' . format ( <EOL> user . _id , <EOL> user . merged_by . _id , <EOL> ) <EOL> ) <EOL> def main ( ) : <EOL> init_app ( routes = False ) <EOL> dry = '<STR_LIT>' in sys . argv <EOL> if not dry : <EOL> script_utils . add_file_logger ( logger , __file__ ) <EOL> do_migration ( get_targets ( ) , dry ) <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> """<STR_LIT>""" <EOL> import logging <EOL> import sys <EOL> from modularodm import Q <EOL> from nose . tools import * <EOL> from website import models <EOL> from website . app import init_app <EOL> from scripts import utils as scripts_utils <EOL> logger = logging . getLogger ( __name__ ) <EOL> def main ( ) : <EOL> init_app ( routes = False ) <EOL> dry_run = '<STR_LIT>' in sys . argv <EOL> if not dry_run : <EOL> scripts_utils . add_file_logger ( logger , __file__ ) <EOL> logger . info ( "<STR_LIT>" <EOL> "<STR_LIT:s>" ) <EOL> for user in get_users_with_unconfirmed_emails ( ) : <EOL> remove_unconfirmed_emails ( user ) <EOL> logger . info ( repr ( user ) ) <EOL> if not dry_run : <EOL> user . save ( ) <EOL> def get_users_with_unconfirmed_emails ( ) : <EOL> return models . User . find ( <EOL> Q ( '<STR_LIT>' , '<STR_LIT>' , None ) <EOL> & Q ( '<STR_LIT>' , '<STR_LIT>' , [ ] ) <EOL> ) <EOL> def remove_unconfirmed_emails ( user ) : <EOL> user . emails = [ ] <EOL> if __name__ == '<STR_LIT:__main__>' : <EOL> main ( ) </s>
<s> import logging <EOL> import datetime <EOL> from modularodm import Q <EOL> from dateutil . relativedelta import relativedelta <EOL> from framework . celery_tasks import app as celery_app <EOL> from scripts import utils as scripts_utils <EOL> from website . app import init_app <EOL> from website . addons . box . model import Box <EOL> from website . oauth . models import ExternalAccount <EOL> from website . addons . base . exceptions import AddonError <EOL> logger = logging . getLogger ( __name__ ) <EOL> logging . basicConfig ( level = logging . INFO ) <EOL> def get_targets ( delta ) : <EOL> return ExternalAccount . find ( <EOL> Q ( '<STR_LIT>' , '<STR_LIT>' , datetime . datetime . utcnow ( ) - delta ) & <EOL> Q ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) <EOL> ) <EOL> def main ( delta , dry_run ) : <EOL> for record in get_targets ( delta ) : <EOL> logger . info ( <EOL> '<STR_LIT>' . format ( <EOL> record . _id , <EOL> record . expires_at . strftime ( '<STR_LIT>' ) <EOL> ) <EOL> ) <EOL> if not dry_run : <EOL> try : <EOL> Box ( record ) . refresh_oauth_key ( force = True ) <EOL> except AddonError as ex : <EOL> logger . error ( ex . message ) <EOL> @ celery_app . task ( name = '<STR_LIT>' ) <EOL> def run_main ( days = None , dry_run = True ) : <EOL> init_app ( set_backends = True , routes = False ) <EOL> try : <EOL> days = int ( days ) <EOL> except ( ValueError , TypeError ) : <EOL> days = <NUM_LIT> - <NUM_LIT:7> <EOL> delta = relativedelta ( days = days ) <EOL> if not dry_run : <EOL> scripts_utils . add_file_logger ( logger , __file__ ) <EOL> main ( delta , dry_run = dry_run ) </s>
<s> from nose . tools import * <EOL> from scripts . googledrive . migrate_to_external_account import do_migration , get_targets <EOL> from framework . auth import Auth <EOL> from tests . base import OsfTestCase <EOL> from tests . factories import ProjectFactory , UserFactory <EOL> from website . addons . googledrive . model import GoogleDriveUserSettings <EOL> from website . addons . googledrive . tests . factories import GoogleDriveOAuthSettingsFactory <EOL> class TestGoogleDriveMigration ( OsfTestCase ) : <EOL> def test_migration_no_project ( self ) : <EOL> user = UserFactory ( ) <EOL> user . add_addon ( '<STR_LIT>' ) <EOL> user_addon = user . get_addon ( '<STR_LIT>' ) <EOL> user_addon . oauth_settings = GoogleDriveOAuthSettingsFactory ( ) <EOL> user_addon . save ( ) <EOL> do_migration ( [ user_addon ] ) <EOL> user_addon . reload ( ) <EOL> assert_is_none ( user_addon . oauth_settings ) <EOL> assert_equal ( len ( user . external_accounts ) , <NUM_LIT:1> ) <EOL> account = user . external_accounts [ <NUM_LIT:0> ] <EOL> assert_equal ( account . provider , '<STR_LIT>' ) <EOL> assert_equal ( account . oauth_key , '<STR_LIT>' ) <EOL> def test_migration_removes_targets ( self ) : <EOL> GoogleDriveUserSettings . remove ( ) <EOL> user = UserFactory ( ) <EOL> project = ProjectFactory ( creator = user ) <EOL> user . add_addon ( '<STR_LIT>' , auth = Auth ( user ) ) <EOL> user_addon = user . get_addon ( '<STR_LIT>' ) <EOL> user_addon . oauth_settings = GoogleDriveOAuthSettingsFactory ( ) <EOL> user_addon . save ( ) <EOL> project . add_addon ( '<STR_LIT>' , auth = Auth ( user ) ) <EOL> node_addon = project . get_addon ( '<STR_LIT>' ) <EOL> node_addon . foreign_user_settings = user_addon <EOL> node_addon . save ( ) <EOL> assert_equal ( get_targets ( ) . count ( ) , <NUM_LIT:1> ) <EOL> do_migration ( [ user_addon ] ) <EOL> user_addon . reload ( ) <EOL> assert_equal ( get_targets ( ) . count ( ) , <NUM_LIT:0> ) <EOL> def test_migration_multiple_users ( self ) : <EOL> user1 = UserFactory ( ) <EOL> user2 = UserFactory ( ) <EOL> oauth_settings = GoogleDriveOAuthSettingsFactory ( ) <EOL> user1 . add_addon ( '<STR_LIT>' ) <EOL> user1_addon = user1 . get_addon ( '<STR_LIT>' ) <EOL> user1_addon . oauth_settings = oauth_settings <EOL> user1_addon . save ( ) <EOL> user2 . add_addon ( '<STR_LIT>' ) <EOL> user2_addon = user2 . get_addon ( '<STR_LIT>' ) <EOL> user2_addon . oauth_settings = oauth_settings <EOL> user2_addon . save ( ) <EOL> do_migration ( [ user1_addon , user2_addon ] ) <EOL> user1_addon . reload ( ) <EOL> user2_addon . reload ( ) <EOL> assert_equal ( <EOL> user1 . external_accounts [ <NUM_LIT:0> ] , <EOL> user2 . external_accounts [ <NUM_LIT:0> ] , <EOL> ) <EOL> def test_get_targets ( self ) : <EOL> GoogleDriveUserSettings . remove ( ) <EOL> addons = [ <EOL> GoogleDriveUserSettings ( ) , <EOL> GoogleDriveUserSettings ( oauth_settings = GoogleDriveOAuthSettingsFactory ( ) ) , <EOL> ] <EOL> for addon in addons : <EOL> addon . save ( ) <EOL> targets = get_targets ( ) <EOL> assert_equal ( targets . count ( ) , <NUM_LIT:1> ) <EOL> assert_equal ( targets [ <NUM_LIT:0> ] . _id , addons [ - <NUM_LIT:1> ] . _id ) </s>
<s> import logging <EOL> from datetime import datetime <EOL> from modularodm import Q <EOL> from framework . auth import User <EOL> from framework . celery_tasks import app as celery_app <EOL> from framework . transactions . context import TokuTransaction <EOL> from website . app import init_app <EOL> from website import mails , settings <EOL> from scripts . utils import add_file_logger <EOL> logger = logging . getLogger ( __name__ ) <EOL> logging . basicConfig ( level = logging . INFO ) <EOL> def main ( dry_run = True ) : <EOL> for user in find_inactive_users_with_no_inactivity_email_sent_or_queued ( ) : <EOL> if dry_run : <EOL> logger . warn ( '<STR_LIT>' ) <EOL> logger . warn ( '<STR_LIT>' . format ( user . username ) ) <EOL> if not dry_run : <EOL> with TokuTransaction ( ) : <EOL> mails . queue_mail ( <EOL> to_addr = user . username , <EOL> mail = mails . NO_LOGIN , <EOL> send_at = datetime . utcnow ( ) , <EOL> user = user , <EOL> fullname = user . fullname , <EOL> ) <EOL> def find_inactive_users_with_no_inactivity_email_sent_or_queued ( ) : <EOL> inactive_users = User . find ( <EOL> ( Q ( '<STR_LIT>' , '<STR_LIT>' , datetime . utcnow ( ) - settings . NO_LOGIN_WAIT_TIME ) & Q ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) | <EOL> ( Q ( '<STR_LIT>' , '<STR_LIT>' , datetime . utcnow ( ) - settings . NO_LOGIN_OSF4M_WAIT_TIME ) & Q ( '<STR_LIT>' , '<STR_LIT>' , '<STR_LIT>' ) ) <EOL> ) <EOL> inactive_emails = mails . QueuedMail . find ( Q ( '<STR_LIT>' , '<STR_LIT>' , mails . NO_LOGIN_TYPE ) ) <EOL> users_sent_id = [ email . user . _id for email in inactive_emails ] <EOL> inactive_ids = [ user . _id for user in inactive_users if user . is_active ] <EOL> users_to_send = [ User . load ( id ) for id in ( set ( inactive_ids ) - set ( users_sent_id ) ) ] <EOL> return users_to_send <EOL> @ celery_app . task ( name = '<STR_LIT>' ) <EOL> def run_main ( dry_run = True ) : <EOL> init_app ( routes = False ) <EOL> if not dry_run : <EOL> add_file_logger ( logger , __file__ ) <EOL> main ( dry_run = dry_run ) </s>
<s> import datetime <EOL> from nose . tools import * <EOL> from scripts import parse_citation_styles <EOL> from framework . auth . core import Auth <EOL> from website . util import api_url_for <EOL> from website . citations . utils import datetime_to_csl <EOL> from website . models import Node , User <EOL> from flask import redirect <EOL> from tests . base import OsfTestCase <EOL> from tests . factories import ProjectFactory , UserFactory , AuthUserFactory <EOL> class CitationsUtilsTestCase ( OsfTestCase ) : <EOL> def test_datetime_to_csl ( self ) : <EOL> now = datetime . datetime . utcnow ( ) <EOL> assert_equal ( <EOL> datetime_to_csl ( now ) , <EOL> { '<STR_LIT>' : [ [ now . year , now . month , now . day ] ] } , <EOL> ) <EOL> class CitationsNodeTestCase ( OsfTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( CitationsNodeTestCase , self ) . setUp ( ) <EOL> self . node = ProjectFactory ( ) <EOL> def tearDown ( self ) : <EOL> super ( CitationsNodeTestCase , self ) . tearDown ( ) <EOL> Node . remove ( ) <EOL> User . remove ( ) <EOL> def test_csl_single_author ( self ) : <EOL> assert_equal ( <EOL> self . node . csl , <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ { <EOL> '<STR_LIT>' : self . node . creator . given_name , <EOL> '<STR_LIT>' : self . node . creator . family_name , <EOL> } ] , <EOL> '<STR_LIT>' : self . node . display_absolute_url , <EOL> '<STR_LIT>' : datetime_to_csl ( self . node . logs [ - <NUM_LIT:1> ] . date ) , <EOL> '<STR_LIT:title>' : self . node . title , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT:id>' : self . node . _id , <EOL> } , <EOL> ) <EOL> def test_csl_multiple_authors ( self ) : <EOL> user = UserFactory ( ) <EOL> self . node . add_contributor ( user ) <EOL> self . node . save ( ) <EOL> assert_equal ( <EOL> self . node . csl , <EOL> { <EOL> '<STR_LIT>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : [ <EOL> { <EOL> '<STR_LIT>' : self . node . creator . given_name , <EOL> '<STR_LIT>' : self . node . creator . family_name , <EOL> } , <EOL> { <EOL> '<STR_LIT>' : user . given_name , <EOL> '<STR_LIT>' : user . family_name , <EOL> } <EOL> ] , <EOL> '<STR_LIT>' : self . node . display_absolute_url , <EOL> '<STR_LIT>' : datetime_to_csl ( self . node . logs [ - <NUM_LIT:1> ] . date ) , <EOL> '<STR_LIT:title>' : self . node . title , <EOL> '<STR_LIT:type>' : '<STR_LIT>' , <EOL> '<STR_LIT:id>' : self . node . _id , <EOL> } , <EOL> ) <EOL> def test_non_visible_contributors_arent_included_in_csl ( self ) : <EOL> node = ProjectFactory ( ) <EOL> visible = UserFactory ( ) <EOL> node . add_contributor ( visible , auth = Auth ( node . creator ) ) <EOL> invisible = UserFactory ( ) <EOL> node . add_contributor ( invisible , auth = Auth ( node . creator ) , visible = False ) <EOL> node . save ( ) <EOL> assert_equal ( len ( node . csl [ '<STR_LIT>' ] ) , <NUM_LIT:2> ) <EOL> expected_authors = [ <EOL> contrib . csl_name for contrib in [ node . creator , visible ] <EOL> ] <EOL> assert_equal ( node . csl [ '<STR_LIT>' ] , expected_authors ) <EOL> class CitationsUserTestCase ( OsfTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( CitationsUserTestCase , self ) . setUp ( ) <EOL> self . user = UserFactory ( ) <EOL> def tearDown ( self ) : <EOL> super ( CitationsUserTestCase , self ) . tearDown ( ) <EOL> User . remove ( ) <EOL> def test_user_csl ( self ) : <EOL> assert_equal ( <EOL> self . user . csl_name , <EOL> { <EOL> '<STR_LIT>' : self . user . given_name , <EOL> '<STR_LIT>' : self . user . family_name , <EOL> } , <EOL> ) <EOL> class CitationsViewsTestCase ( OsfTestCase ) : <EOL> @ classmethod <EOL> def setUpClass ( cls ) : <EOL> super ( CitationsViewsTestCase , cls ) . setUpClass ( ) <EOL> try : <EOL> parse_citation_styles . main ( ) <EOL> except OSError : <EOL> pass <EOL> def test_list_styles ( self ) : <EOL> response = self . app . get ( api_url_for ( '<STR_LIT>' ) ) <EOL> assert_true ( response . json ) <EOL> assert_equal ( <EOL> len ( <EOL> [ <EOL> style for style in response . json [ '<STR_LIT>' ] <EOL> if style . get ( '<STR_LIT:id>' ) == '<STR_LIT>' <EOL> ] <EOL> ) , <EOL> <NUM_LIT:1> , <EOL> ) <EOL> def test_list_styles_filter ( self ) : <EOL> response = self . app . get ( api_url_for ( '<STR_LIT>' , q = '<STR_LIT>' ) ) <EOL> assert_true ( response . json ) <EOL> assert_equal ( <EOL> len ( response . json [ '<STR_LIT>' ] ) , <NUM_LIT:1> <EOL> ) <EOL> assert_equal ( <EOL> response . json [ '<STR_LIT>' ] [ <NUM_LIT:0> ] [ '<STR_LIT:id>' ] , '<STR_LIT>' <EOL> ) <EOL> def test_node_citation_view ( self ) : <EOL> node = ProjectFactory ( ) <EOL> user = AuthUserFactory ( ) <EOL> node . add_contributor ( user ) <EOL> node . save ( ) <EOL> response = self . app . get ( "<STR_LIT>" + "<STR_LIT>" + node . _id + "<STR_LIT>" , auto_follow = True , auth = user . auth ) <EOL> assert_true ( response . json ) </s>
<s> import httplib as http <EOL> import mock <EOL> import unittest <EOL> from nose . tools import * <EOL> import datetime <EOL> from modularodm import fields , storage , Q <EOL> from tests . base import OsfTestCase <EOL> from tests import factories <EOL> from tests . utils import mock_archive , assert_logs <EOL> from framework . auth import Auth <EOL> from framework . mongo import handlers <EOL> from website . exceptions import NodeStateError <EOL> from website . project . model import ensure_schemas , Node , NodeLog <EOL> from website . project . sanctions import Sanction , TokenApprovableSanction , EmailApprovableSanction , PreregCallbackMixin <EOL> def valid_user ( ) : <EOL> return factories . UserFactory ( system_tags = [ '<STR_LIT>' ] ) <EOL> class SanctionTestClass ( TokenApprovableSanction ) : <EOL> DISPLAY_NAME = '<STR_LIT>' <EOL> initiated_by = fields . ForeignField ( '<STR_LIT:user>' , backref = '<STR_LIT>' ) <EOL> def _validate_authorizer ( self , user ) : <EOL> return '<STR_LIT>' in user . system_tags <EOL> def _get_registration ( self ) : <EOL> return factories . RegistrationFactory ( ) <EOL> class EmailApprovableSanctionTestClass ( PreregCallbackMixin , EmailApprovableSanction ) : <EOL> AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = '<STR_LIT>' <EOL> NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE = '<STR_LIT>' <EOL> def _get_registration ( self ) : <EOL> return factories . RegistrationFactory ( ) <EOL> class SanctionsTestCase ( OsfTestCase ) : <EOL> def setUp ( self , * args , ** kwargs ) : <EOL> super ( SanctionsTestCase , self ) . setUp ( * args , ** kwargs ) <EOL> handlers . set_up_storage ( [ <EOL> SanctionTestClass , <EOL> EmailApprovableSanctionTestClass <EOL> ] , storage . MongoStorage ) <EOL> class TestSanction ( SanctionsTestCase ) : <EOL> def setUp ( self , * args , ** kwargs ) : <EOL> super ( TestSanction , self ) . setUp ( * args , ** kwargs ) <EOL> self . user = valid_user ( ) <EOL> self . invalid_user = factories . UserFactory ( ) <EOL> self . sanction = SanctionTestClass ( <EOL> initiated_by = self . user , <EOL> end_date = datetime . datetime . now ( ) + datetime . timedelta ( days = <NUM_LIT:2> ) <EOL> ) <EOL> self . registration = factories . RegistrationFactory ( ) <EOL> self . sanction . add_authorizer ( self . user , self . registration , save = True ) <EOL> def test_pending_approval ( self ) : <EOL> assert_true ( self . sanction . is_pending_approval ) <EOL> self . sanction . state = Sanction . APPROVED <EOL> assert_false ( self . sanction . is_pending_approval ) <EOL> def test_validate_authorizer ( self ) : <EOL> assert_false ( self . sanction . _validate_authorizer ( self . invalid_user ) ) <EOL> assert_true ( self . sanction . _validate_authorizer ( self . user ) ) <EOL> def test_add_authorizer ( self ) : <EOL> new_user = valid_user ( ) <EOL> added = self . sanction . add_authorizer ( new_user , node = self . registration ) <EOL> assert_true ( added ) <EOL> assert_in ( new_user . _id , self . sanction . approval_state . keys ( ) ) <EOL> assert_in ( '<STR_LIT>' , self . sanction . approval_state [ new_user . _id ] ) <EOL> assert_in ( '<STR_LIT>' , self . sanction . approval_state [ new_user . _id ] ) <EOL> assert_equal ( self . sanction . approval_state [ new_user . _id ] [ '<STR_LIT>' ] , self . registration . _id ) <EOL> def test_add_authorizer_already_added ( self ) : <EOL> added = self . sanction . add_authorizer ( self . user , self . registration ) <EOL> assert_false ( added ) <EOL> assert_in ( self . user . _id , self . sanction . approval_state . keys ( ) ) <EOL> def test_add_authorizer_invalid ( self ) : <EOL> invalid_user = factories . UserFactory ( ) <EOL> added = self . sanction . add_authorizer ( invalid_user , self . registration ) <EOL> assert_false ( added ) <EOL> assert_not_in ( invalid_user . _id , self . sanction . approval_state . keys ( ) ) <EOL> def test_remove_authorizer ( self ) : <EOL> removed = self . sanction . remove_authorizer ( self . user ) <EOL> self . sanction . save ( ) <EOL> assert_true ( removed ) <EOL> assert_not_in ( self . user . _id , self . sanction . approval_state . keys ( ) ) <EOL> def test_remove_authorizer_not_added ( self ) : <EOL> not_added = factories . UserFactory ( ) <EOL> removed = self . sanction . remove_authorizer ( not_added ) <EOL> self . sanction . save ( ) <EOL> assert_false ( removed ) <EOL> assert_not_in ( not_added , self . sanction . approval_state . keys ( ) ) <EOL> @ mock . patch . object ( SanctionTestClass , '<STR_LIT>' ) <EOL> def test_on_approve_incomplete ( self , mock_complete ) : <EOL> another_user = valid_user ( ) <EOL> self . sanction . add_authorizer ( another_user , self . sanction . _get_registration ( ) , approved = True ) <EOL> self . sanction . _on_approve ( self . user , '<STR_LIT>' ) <EOL> assert_false ( mock_complete . called ) <EOL> @ mock . patch . object ( SanctionTestClass , '<STR_LIT>' ) <EOL> def test_on_approve_complete ( self , mock_complete ) : <EOL> self . sanction . approval_state [ self . user . _id ] [ '<STR_LIT>' ] = True <EOL> self . sanction . _on_approve ( self . user , '<STR_LIT>' ) <EOL> assert_true ( mock_complete . called ) <EOL> def test_on_reject_raises_NotImplementedError ( self ) : <EOL> err = lambda : self . sanction . _on_reject ( self . user ) <EOL> assert_raises ( NotImplementedError , err ) <EOL> def test_on_complete_raises_NotImplementedError ( self ) : <EOL> err = lambda : self . sanction . _on_complete ( self . user ) <EOL> assert_raises ( NotImplementedError , err ) <EOL> @ mock . patch . object ( SanctionTestClass , '<STR_LIT>' ) <EOL> def test_approve ( self , mock_on_approve ) : <EOL> approval_token = self . sanction . approval_state [ self . user . _id ] [ '<STR_LIT>' ] <EOL> self . sanction . approve ( self . user , approval_token ) <EOL> assert_true ( self . sanction . approval_state [ self . user . _id ] [ '<STR_LIT>' ] ) <EOL> assert_true ( mock_on_approve . called ) <EOL> @ mock . patch . object ( SanctionTestClass , '<STR_LIT>' ) <EOL> def test_reject ( self , mock_on_reject ) : <EOL> rejection_token = self . sanction . approval_state [ self . user . _id ] [ '<STR_LIT>' ] <EOL> self . sanction . reject ( self . user , rejection_token ) <EOL> assert_false ( self . sanction . approval_state [ self . user . _id ] [ '<STR_LIT>' ] ) <EOL> assert_true ( mock_on_reject . called ) <EOL> @ mock . patch . object ( SanctionTestClass , '<STR_LIT>' ) <EOL> @ mock . patch . object ( SanctionTestClass , '<STR_LIT>' ) <EOL> def test_ask ( self , mock_notify_non_authorizer , mock_notify_authorizer ) : <EOL> other_user = factories . UserFactory ( ) <EOL> p1 = factories . ProjectFactory ( ) <EOL> p2 = factories . ProjectFactory ( ) <EOL> group = [ <EOL> ( other_user , p1 ) , <EOL> ( self . user , p2 ) , <EOL> ] <EOL> self . sanction . ask ( group ) <EOL> mock_notify_non_authorizer . assert_called_once_with ( other_user , p1 ) <EOL> mock_notify_authorizer . assert_called_once_with ( self . user , p2 ) <EOL> class TestEmailApprovableSanction ( SanctionsTestCase ) : <EOL> def setUp ( self , * args , ** kwargs ) : <EOL> super ( TestEmailApprovableSanction , self ) . setUp ( * args , ** kwargs ) <EOL> self . user = factories . UserFactory ( ) <EOL> self . sanction = EmailApprovableSanctionTestClass ( <EOL> initiated_by = self . user , <EOL> end_date = datetime . datetime . now ( ) + datetime . timedelta ( days = <NUM_LIT:2> ) <EOL> ) <EOL> self . sanction . add_authorizer ( self . user , self . sanction . _get_registration ( ) ) <EOL> def test_format_or_empty ( self ) : <EOL> context = { <EOL> '<STR_LIT:key>' : '<STR_LIT:value>' <EOL> } <EOL> template = '<STR_LIT>' <EOL> assert_equal ( EmailApprovableSanctionTestClass . _format_or_empty ( template , context ) , '<STR_LIT>' ) <EOL> def test_format_or_empty_empty ( self ) : <EOL> context = None <EOL> template = '<STR_LIT>' <EOL> assert_equal ( EmailApprovableSanctionTestClass . _format_or_empty ( template , context ) , '<STR_LIT>' ) <EOL> @ mock . patch . object ( EmailApprovableSanctionTestClass , '<STR_LIT>' ) <EOL> @ mock . patch . object ( EmailApprovableSanctionTestClass , '<STR_LIT>' ) <EOL> def test_notify_authorizer ( self , mock_get_email_template_context , mock_send_approval_email ) : <EOL> mock_get_email_template_context . return_value = '<STR_LIT>' <EOL> reg = self . sanction . _get_registration ( ) <EOL> self . sanction . _notify_authorizer ( self . user , reg ) <EOL> mock_get_email_template_context . assert_called_once_with ( <EOL> self . user , <EOL> reg , <EOL> is_authorizer = True <EOL> ) <EOL> mock_send_approval_email . assert_called_once_with ( self . user , '<STR_LIT>' , '<STR_LIT>' ) <EOL> @ mock . patch . object ( EmailApprovableSanctionTestClass , '<STR_LIT>' ) <EOL> @ mock . patch . object ( EmailApprovableSanctionTestClass , '<STR_LIT>' ) <EOL> def test_notify_non_authorizer ( self , mock_get_email_template_context , mock_send_approval_email ) : <EOL> mock_get_email_template_context . return_value = '<STR_LIT>' <EOL> other_user = factories . UserFactory ( ) <EOL> reg = self . sanction . _get_registration ( ) <EOL> self . sanction . _notify_non_authorizer ( other_user , reg ) <EOL> mock_get_email_template_context . assert_called_once_with ( <EOL> other_user , <EOL> reg <EOL> ) <EOL> mock_send_approval_email . assert_called_once_with ( other_user , '<STR_LIT>' , '<STR_LIT>' ) <EOL> def test_add_authorizer ( self ) : <EOL> assert_is_not_none ( self . sanction . stashed_urls . get ( self . user . _id ) ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test__notify_authorizer ( self , mock_send ) : <EOL> self . sanction . _notify_authorizer ( self . user , self . sanction . _get_registration ( ) ) <EOL> assert_true ( mock_send . called ) <EOL> args , kwargs = mock_send . call_args <EOL> assert_true ( self . user . username in args ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test__notify_non_authorizer ( self , mock_send ) : <EOL> self . sanction . _notify_non_authorizer ( self . user , self . sanction . _get_registration ( ) ) <EOL> assert_true ( mock_send . called ) <EOL> args , kwargs = mock_send . call_args <EOL> assert_true ( self . user . username in args ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_ask ( self , mock_send ) : <EOL> group = [ ( self . user , factories . ProjectFactory ( ) ) ] <EOL> for i in range ( <NUM_LIT:5> ) : <EOL> u , n = factories . UserFactory ( ) , factories . ProjectFactory ( ) <EOL> group . append ( ( u , n ) ) <EOL> self . sanction . ask ( group ) <EOL> authorizer = group . pop ( <NUM_LIT:0> ) [ <NUM_LIT:0> ] <EOL> mock_send . assert_any_call ( <EOL> authorizer . username , <EOL> self . sanction . AUTHORIZER_NOTIFY_EMAIL_TEMPLATE , <EOL> user = authorizer , <EOL> ** { } <EOL> ) <EOL> for user , _ in group : <EOL> mock_send . assert_any_call ( <EOL> user . username , <EOL> self . sanction . NON_AUTHORIZER_NOTIFY_EMAIL_TEMPLATE , <EOL> user = user , <EOL> ** { } <EOL> ) <EOL> def test_on_complete_notify_initiator ( self ) : <EOL> sanction = EmailApprovableSanctionTestClass ( <EOL> initiated_by = self . user , <EOL> end_date = datetime . datetime . now ( ) + datetime . timedelta ( days = <NUM_LIT:2> ) , <EOL> notify_initiator_on_complete = True <EOL> ) <EOL> sanction . add_authorizer ( self . user , sanction . _get_registration ( ) ) <EOL> sanction . save ( ) <EOL> with mock . patch . object ( EmailApprovableSanctionTestClass , '<STR_LIT>' ) as mock_notify : <EOL> sanction . _on_complete ( self . user ) <EOL> assert_equal ( mock_notify . call_count , <NUM_LIT:1> ) <EOL> def test_notify_initiator_with_PreregCallbackMixin ( self ) : <EOL> sanction = EmailApprovableSanctionTestClass ( <EOL> initiated_by = self . user , <EOL> end_date = datetime . datetime . now ( ) + datetime . timedelta ( days = <NUM_LIT:2> ) , <EOL> notify_initiator_on_complete = True <EOL> ) <EOL> sanction . add_authorizer ( self . user , sanction . _get_registration ( ) ) <EOL> sanction . save ( ) <EOL> with mock . patch . object ( PreregCallbackMixin , '<STR_LIT>' ) as mock_notify : <EOL> sanction . _on_complete ( self . user ) <EOL> assert_equal ( mock_notify . call_count , <NUM_LIT:1> ) <EOL> class TestRegistrationApproval ( OsfTestCase ) : <EOL> def setUp ( self ) : <EOL> super ( TestRegistrationApproval , self ) . setUp ( ) <EOL> ensure_schemas ( ) <EOL> self . user = factories . AuthUserFactory ( ) <EOL> self . registration = factories . RegistrationFactory ( creator = self . user , archive = True ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_non_contributor_GET_approval_returns_HTTPError ( self , mock_enqueue ) : <EOL> non_contributor = factories . AuthUserFactory ( ) <EOL> approval_token = self . registration . registration_approval . approval_state [ self . user . _id ] [ '<STR_LIT>' ] <EOL> approval_url = self . registration . web_url_for ( '<STR_LIT>' , token = approval_token ) <EOL> res = self . app . get ( approval_url , auth = non_contributor . auth , expect_errors = True ) <EOL> assert_equal ( http . FORBIDDEN , res . status_code ) <EOL> assert_true ( self . registration . is_pending_registration ) <EOL> assert_false ( self . registration . is_registration_approved ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_non_contributor_GET_disapproval_returns_HTTPError ( self , mock_enqueue ) : <EOL> non_contributor = factories . AuthUserFactory ( ) <EOL> rejection_token = self . registration . registration_approval . approval_state [ self . user . _id ] [ '<STR_LIT>' ] <EOL> rejection_url = self . registration . web_url_for ( '<STR_LIT>' , token = rejection_token ) <EOL> res = self . app . get ( rejection_url , auth = non_contributor . auth , expect_errors = True ) <EOL> assert_equal ( http . FORBIDDEN , res . status_code ) <EOL> assert_true ( self . registration . is_pending_registration ) <EOL> assert_false ( self . registration . is_registration_approved ) <EOL> class TestRegistrationApprovalHooks ( OsfTestCase ) : <EOL> def test_on_complete_sets_state_to_approved ( self ) : <EOL> user = factories . UserFactory ( ) <EOL> registration = factories . RegistrationFactory ( creator = user ) <EOL> registration . require_approval ( user ) <EOL> assert_true ( registration . registration_approval . is_pending_approval ) <EOL> registration . registration_approval . _on_complete ( None ) <EOL> assert_false ( registration . registration_approval . is_pending_approval ) <EOL> class TestNodeSanctionStates ( OsfTestCase ) : <EOL> def test_sanction_none ( self ) : <EOL> node = factories . NodeFactory ( ) <EOL> assert_false ( node . sanction ) <EOL> def test_sanction_embargo_termination_first ( self ) : <EOL> embargo_termination_approval = factories . EmbargoTerminationApprovalFactory ( ) <EOL> registration = Node . find_one ( Q ( '<STR_LIT>' , '<STR_LIT>' , embargo_termination_approval ) ) <EOL> assert_equal ( registration . sanction , embargo_termination_approval ) <EOL> def test_sanction_retraction ( self ) : <EOL> retraction = factories . RetractionFactory ( ) <EOL> registration = Node . find_one ( Q ( '<STR_LIT>' , '<STR_LIT>' , retraction ) ) <EOL> assert_equal ( registration . sanction , retraction ) <EOL> def test_sanction_embargo ( self ) : <EOL> embargo = factories . EmbargoFactory ( ) <EOL> registration = Node . find_one ( Q ( '<STR_LIT>' , '<STR_LIT>' , embargo ) ) <EOL> assert_equal ( registration . sanction , embargo ) <EOL> def test_sanction_registration_approval ( self ) : <EOL> registration_approval = factories . RegistrationApprovalFactory ( ) <EOL> registration = Node . find_one ( Q ( '<STR_LIT>' , '<STR_LIT>' , registration_approval ) ) <EOL> assert_equal ( registration . sanction , registration_approval ) <EOL> def test_sanction_searches_parents ( self ) : <EOL> user = factories . AuthUserFactory ( ) <EOL> node = factories . NodeFactory ( creator = user ) <EOL> child = factories . NodeFactory ( creator = user , parent = node ) <EOL> factories . NodeFactory ( creator = user , parent = child ) <EOL> with mock_archive ( node ) as registration : <EOL> approval = registration . registration_approval <EOL> sub_reg = registration . nodes [ <NUM_LIT:0> ] . nodes [ <NUM_LIT:0> ] <EOL> assert_equal ( sub_reg . sanction , approval ) <EOL> def test_is_pending_registration ( self ) : <EOL> registration_approval = factories . RegistrationApprovalFactory ( ) <EOL> registration = Node . find_one ( Q ( '<STR_LIT>' , '<STR_LIT>' , registration_approval ) ) <EOL> assert_true ( registration_approval . is_pending_approval ) <EOL> assert_true ( registration . is_pending_registration ) <EOL> def test_is_pending_registration_searches_parents ( self ) : <EOL> user = factories . AuthUserFactory ( ) <EOL> node = factories . NodeFactory ( creator = user ) <EOL> child = factories . NodeFactory ( creator = user , parent = node ) <EOL> factories . NodeFactory ( creator = user , parent = child ) <EOL> with mock_archive ( node ) as registration : <EOL> sub_reg = registration . nodes [ <NUM_LIT:0> ] . nodes [ <NUM_LIT:0> ] <EOL> assert_true ( sub_reg . is_pending_registration ) <EOL> def test_is_registration_approved ( self ) : <EOL> registration_approval = factories . RegistrationApprovalFactory ( ) <EOL> registration = Node . find_one ( Q ( '<STR_LIT>' , '<STR_LIT>' , registration_approval ) ) <EOL> with mock . patch ( '<STR_LIT>' , mock . Mock ( return_value = True ) ) : <EOL> assert_true ( registration . is_registration_approved ) <EOL> def test_is_registration_approved_searches_parents ( self ) : <EOL> user = factories . AuthUserFactory ( ) <EOL> node = factories . NodeFactory ( creator = user ) <EOL> child = factories . NodeFactory ( creator = user , parent = node ) <EOL> factories . NodeFactory ( creator = user , parent = child ) <EOL> with mock_archive ( node ) as registration : <EOL> with mock . patch ( '<STR_LIT>' , mock . Mock ( return_value = True ) ) : <EOL> sub_reg = registration . nodes [ <NUM_LIT:0> ] . nodes [ <NUM_LIT:0> ] <EOL> assert_true ( sub_reg . is_registration_approved ) <EOL> def test_is_retracted ( self ) : <EOL> retraction = factories . RetractionFactory ( ) <EOL> registration = Node . find_one ( Q ( '<STR_LIT>' , '<STR_LIT>' , retraction ) ) <EOL> with mock . patch ( '<STR_LIT>' , mock . Mock ( return_value = True ) ) : <EOL> assert_true ( registration . is_retracted ) <EOL> def test_is_retracted_searches_parents ( self ) : <EOL> user = factories . AuthUserFactory ( ) <EOL> node = factories . NodeFactory ( creator = user ) <EOL> child = factories . NodeFactory ( creator = user , parent = node ) <EOL> factories . NodeFactory ( creator = user , parent = child ) <EOL> with mock_archive ( node , autoapprove = True , retraction = True , autoapprove_retraction = True ) as registration : <EOL> sub_reg = registration . nodes [ <NUM_LIT:0> ] . nodes [ <NUM_LIT:0> ] <EOL> assert_true ( sub_reg . is_retracted ) <EOL> def test_is_pending_retraction ( self ) : <EOL> retraction = factories . RetractionFactory ( ) <EOL> registration = Node . find_one ( Q ( '<STR_LIT>' , '<STR_LIT>' , retraction ) ) <EOL> assert_true ( retraction . is_pending_approval ) <EOL> assert_true ( registration . is_pending_retraction ) <EOL> def test_is_pending_retraction_searches_parents ( self ) : <EOL> user = factories . AuthUserFactory ( ) <EOL> node = factories . NodeFactory ( creator = user ) <EOL> child = factories . NodeFactory ( creator = user , parent = node ) <EOL> factories . NodeFactory ( creator = user , parent = child ) <EOL> with mock_archive ( node , autoapprove = True , retraction = True ) as registration : <EOL> sub_reg = registration . nodes [ <NUM_LIT:0> ] . nodes [ <NUM_LIT:0> ] <EOL> assert_true ( sub_reg . is_pending_retraction ) <EOL> def test_embargo_end_date ( self ) : <EOL> embargo = factories . EmbargoFactory ( ) <EOL> registration = Node . find_one ( Q ( '<STR_LIT>' , '<STR_LIT>' , embargo ) ) <EOL> assert_equal ( registration . embargo_end_date , embargo . end_date ) <EOL> def test_embargo_end_date_searches_parents ( self ) : <EOL> user = factories . AuthUserFactory ( ) <EOL> node = factories . NodeFactory ( creator = user ) <EOL> child = factories . NodeFactory ( creator = user , parent = node ) <EOL> factories . NodeFactory ( creator = user , parent = child ) <EOL> with mock_archive ( node , embargo = True ) as registration : <EOL> sub_reg = registration . nodes [ <NUM_LIT:0> ] . nodes [ <NUM_LIT:0> ] <EOL> assert_equal ( sub_reg . embargo_end_date , registration . embargo_end_date ) <EOL> def test_is_pending_embargo ( self ) : <EOL> embargo = factories . EmbargoFactory ( ) <EOL> registration = Node . find_one ( Q ( '<STR_LIT>' , '<STR_LIT>' , embargo ) ) <EOL> assert_true ( embargo . is_pending_approval ) <EOL> assert_true ( registration . is_pending_embargo ) <EOL> def test_is_pending_embargo_searches_parents ( self ) : <EOL> user = factories . AuthUserFactory ( ) <EOL> node = factories . NodeFactory ( creator = user ) <EOL> child = factories . NodeFactory ( creator = user , parent = node ) <EOL> factories . NodeFactory ( creator = user , parent = child ) <EOL> with mock_archive ( node , embargo = True ) as registration : <EOL> sub_reg = registration . nodes [ <NUM_LIT:0> ] . nodes [ <NUM_LIT:0> ] <EOL> assert_true ( sub_reg . is_pending_embargo ) <EOL> def test_is_embargoed ( self ) : <EOL> embargo = factories . EmbargoFactory ( ) <EOL> registration = Node . find_one ( Q ( '<STR_LIT>' , '<STR_LIT>' , embargo ) ) <EOL> with mock . patch ( '<STR_LIT>' , mock . Mock ( return_value = True ) ) : <EOL> assert_true ( registration . is_embargoed ) <EOL> def test_is_embargoed_searches_parents ( self ) : <EOL> user = factories . AuthUserFactory ( ) <EOL> node = factories . NodeFactory ( creator = user ) <EOL> child = factories . NodeFactory ( creator = user , parent = node ) <EOL> factories . NodeFactory ( creator = user , parent = child ) <EOL> with mock_archive ( node , embargo = True , autoapprove = True ) as registration : <EOL> sub_reg = registration . nodes [ <NUM_LIT:0> ] . nodes [ <NUM_LIT:0> ] <EOL> assert_true ( sub_reg . is_embargoed ) <EOL> class TestNodeEmbargoTerminations ( OsfTestCase ) : <EOL> def tearDown ( self ) : <EOL> with mock . patch ( '<STR_LIT>' , mock . Mock ( return_value = None ) ) : <EOL> super ( TestNodeEmbargoTerminations , self ) . tearDown ( ) <EOL> def setUp ( self ) : <EOL> super ( TestNodeEmbargoTerminations , self ) . setUp ( ) <EOL> self . user = factories . AuthUserFactory ( ) <EOL> self . node = factories . ProjectFactory ( creator = self . user ) <EOL> with mock_archive ( self . node , embargo = True , autoapprove = True ) as registration : <EOL> self . registration = registration <EOL> self . not_embargoed = factories . RegistrationFactory ( ) <EOL> def test_request_embargo_termination_not_embargoed ( self ) : <EOL> with assert_raises ( NodeStateError ) : <EOL> self . not_embargoed . request_embargo_termination ( Auth ( self . user ) ) <EOL> def test_terminate_embargo_makes_registrations_public ( self ) : <EOL> self . registration . terminate_embargo ( Auth ( self . user ) ) <EOL> for node in self . registration . node_and_primary_descendants ( ) : <EOL> assert_true ( node . is_public ) <EOL> assert_false ( node . is_embargoed ) <EOL> @ assert_logs ( NodeLog . EMBARGO_TERMINATED , '<STR_LIT>' ) <EOL> def test_terminate_embargo_adds_log_to_registered_from ( self ) : <EOL> self . registration . terminate_embargo ( Auth ( self . user ) ) <EOL> def test_terminate_embargo_log_is_nouser ( self ) : <EOL> self . registration . terminate_embargo ( Auth ( self . user ) ) <EOL> last_log = self . node . logs [ - <NUM_LIT:1> ] <EOL> assert_equal ( last_log . action , NodeLog . EMBARGO_TERMINATED ) <EOL> assert_equal ( last_log . user , None ) </s>
<s> import mock <EOL> import random <EOL> import string <EOL> from nose . tools import * <EOL> import website . app <EOL> from webtest_plus import TestApp <EOL> from website . util import api_url_for , web_url_for <EOL> from website . addons . base . testing import AddonTestCase <EOL> from website . addons . badges . util import get_node_badges <EOL> from tests . factories import AuthUserFactory <EOL> from utils import create_mock_badger , create_badge_dict , get_garbage <EOL> class TestBadgesViews ( AddonTestCase ) : <EOL> ADDON_SHORT_NAME = '<STR_LIT>' <EOL> def setUp ( self ) : <EOL> super ( TestBadgesViews , self ) . setUp ( ) <EOL> def set_node_settings ( self , settings ) : <EOL> return settings <EOL> def set_user_settings ( self , settings ) : <EOL> return create_mock_badger ( settings ) <EOL> def create_app ( self ) : <EOL> return TestApp ( app ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_create_badge ( self , img_proc ) : <EOL> img_proc . return_value = '<STR_LIT>' <EOL> badge = create_badge_dict ( ) <EOL> ret = self . app . post_json ( api_url_for ( '<STR_LIT>' ) , badge , auth = self . user . auth ) <EOL> self . user_settings . reload ( ) <EOL> assert_equals ( ret . status_int , <NUM_LIT> ) <EOL> assert_equals ( ret . content_type , '<STR_LIT:application/json>' ) <EOL> assert_true ( ret . json [ '<STR_LIT>' ] in [ badge . _id for badge in self . user_settings . badges ] ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_create_badge_no_data ( self , img_proc ) : <EOL> url = api_url_for ( '<STR_LIT>' ) <EOL> badge = { } <EOL> ret = self . app . post_json ( url , badge , auth = self . user . auth , expect_errors = True ) <EOL> assert_equals ( ret . status_int , <NUM_LIT> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_create_badge_some_data ( self , img_proc ) : <EOL> img_proc . return_value = '<STR_LIT>' <EOL> url = api_url_for ( '<STR_LIT>' ) <EOL> badge = { <EOL> '<STR_LIT>' : '<STR_LIT>' . join ( random . choice ( string . ascii_lowercase + string . digits ) for _ in range ( <NUM_LIT:4> ) ) , <EOL> '<STR_LIT:description>' : '<STR_LIT>' . join ( random . choice ( string . ascii_letters + string . digits ) for _ in range ( <NUM_LIT:6> ) ) <EOL> } <EOL> ret = self . app . post_json ( url , badge , auth = self . user . auth , expect_errors = True ) <EOL> assert_equals ( ret . status_int , <NUM_LIT> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_create_badge_empty_data ( self , img_proc ) : <EOL> img_proc . return_value = '<STR_LIT>' <EOL> url = api_url_for ( '<STR_LIT>' ) <EOL> badge = create_badge_dict ( ) <EOL> badge [ '<STR_LIT>' ] = '<STR_LIT>' <EOL> ret = self . app . post_json ( url , badge , auth = self . user . auth , expect_errors = True ) <EOL> assert_equals ( ret . status_int , <NUM_LIT> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_create_badge_cant_issue ( self , img_proc ) : <EOL> img_proc . return_value = '<STR_LIT>' <EOL> self . user . delete_addon ( '<STR_LIT>' ) <EOL> url = api_url_for ( '<STR_LIT>' ) <EOL> badge = create_badge_dict ( ) <EOL> ret = self . app . post_json ( url , badge , auth = self . user . auth , expect_errors = True ) <EOL> assert_equals ( ret . status_int , <NUM_LIT> ) <EOL> def test_award_badge ( self ) : <EOL> badgeid = self . user_settings . badges [ <NUM_LIT:0> ] . _id <EOL> initnum = get_node_badges ( self . project ) . count ( ) <EOL> assert_true ( self . user_settings . can_award ) <EOL> url = api_url_for ( '<STR_LIT>' , pid = self . project . _id ) <EOL> ret = self . app . post_json ( url , { '<STR_LIT>' : badgeid } , auth = self . user . auth ) <EOL> self . project . reload ( ) <EOL> assert_equals ( ret . status_int , <NUM_LIT:200> ) <EOL> assert_equals ( initnum + <NUM_LIT:1> , get_node_badges ( self . project ) . count ( ) ) <EOL> def test_award_badge_bad_badge_id ( self ) : <EOL> badgeid = '<STR_LIT>' <EOL> assert_true ( self . user_settings . can_award ) <EOL> url = api_url_for ( '<STR_LIT>' , pid = self . project . _id ) <EOL> ret = self . app . post_json ( url , { '<STR_LIT>' : badgeid } , auth = self . user . auth , expect_errors = True ) <EOL> assert_equals ( ret . status_int , <NUM_LIT> ) <EOL> def test_award_badge_empty_badge_id ( self ) : <EOL> assert_true ( self . user_settings . can_award ) <EOL> url = api_url_for ( '<STR_LIT>' , pid = self . project . _id ) <EOL> ret = self . app . post_json ( url , { '<STR_LIT>' : '<STR_LIT>' } , auth = self . user . auth , expect_errors = True ) <EOL> assert_equals ( ret . status_int , <NUM_LIT> ) <EOL> def test_award_badge_no_badge_id ( self ) : <EOL> assert_true ( self . user_settings . can_award ) <EOL> url = api_url_for ( '<STR_LIT>' , pid = self . project . _id ) <EOL> ret = self . app . post_json ( url , { } , auth = self . user . auth , expect_errors = True ) <EOL> assert_equals ( ret . status_int , <NUM_LIT> ) <EOL> @ mock . patch ( '<STR_LIT>' ) <EOL> def test_badge_html ( self , img_proc ) : <EOL> img_proc . return_value = '<STR_LIT>' <EOL> badge = { <EOL> '<STR_LIT>' : get_garbage ( ) , <EOL> '<STR_LIT:description>' : get_garbage ( ) , <EOL> '<STR_LIT>' : get_garbage ( ) , <EOL> '<STR_LIT>' : get_garbage ( ) <EOL> } <EOL> ret = self . app . post_json ( api_url_for ( '<STR_LIT>' ) , badge , auth = self . user . auth ) <EOL> self . user_settings . reload ( ) <EOL> assert_equals ( ret . status_int , <NUM_LIT> ) <EOL> assert_equals ( ret . content_type , '<STR_LIT:application/json>' ) <EOL> assert_true ( ret . json [ '<STR_LIT>' ] in [ badge . _id for badge in self . user_settings . badges ] ) <EOL> with self . app . app . test_request_context ( ) : <EOL> bstr = str ( self . user_settings . badges [ <NUM_LIT:0> ] . to_openbadge ( ) ) <EOL> assert_false ( '<STR_LIT:>>' in bstr ) <EOL> assert_false ( '<STR_LIT:<>' in bstr ) <EOL> def test_revoke_badge ( self ) : <EOL> badgeid = self . user_settings . badges [ <NUM_LIT:0> ] . _id <EOL> initnum = get_node_badges ( self . project ) . count ( ) <EOL> assert_true ( self . user_settings . can_award ) <EOL> url = api_url_for ( '<STR_LIT>' , pid = self . project . _id ) <EOL> ret = self . app . post_json ( url , { '<STR_LIT>' : badgeid } , auth = self . user . auth ) <EOL> self . project . reload ( ) <EOL> assert_equals ( ret . status_int , <NUM_LIT:200> ) <EOL> assert_equals ( initnum + <NUM_LIT:1> , get_node_badges ( self . project ) . count ( ) ) <EOL> assertion = get_node_badges ( self . project ) [ <NUM_LIT:0> ] <EOL> revoke = api_url_for ( '<STR_LIT>' , pid = self . project . _id ) <EOL> ret = self . app . post_json ( revoke , <EOL> { <EOL> '<STR_LIT:id>' : assertion . _id , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , auth = self . user . auth ) <EOL> self . project . reload ( ) <EOL> self . user_settings . reload ( ) <EOL> assertion . reload ( ) <EOL> assert_equals ( ret . status_int , <NUM_LIT:200> ) <EOL> assert_true ( get_node_badges ( self . project ) [ <NUM_LIT:0> ] . _id , assertion . _id ) <EOL> assert_true ( assertion . revoked ) <EOL> assert_true ( assertion . _id in self . user_settings . revocation_list ) <EOL> assert_equals ( len ( self . user_settings . revocation_list ) , <NUM_LIT:1> ) <EOL> def test_revoke_badge_reason ( self ) : <EOL> badgeid = self . user_settings . badges [ <NUM_LIT:0> ] . _id <EOL> initnum = get_node_badges ( self . project ) . count ( ) <EOL> assert_true ( self . user_settings . can_award ) <EOL> url = api_url_for ( '<STR_LIT>' , pid = self . project . _id ) <EOL> ret = self . app . post_json ( url , { '<STR_LIT>' : badgeid } , auth = self . user . auth ) <EOL> self . project . reload ( ) <EOL> assert_equals ( ret . status_int , <NUM_LIT:200> ) <EOL> assert_equals ( initnum + <NUM_LIT:1> , get_node_badges ( self . project ) . count ( ) ) <EOL> assertion = get_node_badges ( self . project ) [ <NUM_LIT:0> ] <EOL> revoke = api_url_for ( '<STR_LIT>' , pid = self . project . _id ) <EOL> ret = self . app . post_json ( revoke , <EOL> { <EOL> '<STR_LIT:id>' : assertion . _id , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , auth = self . user . auth ) <EOL> self . project . reload ( ) <EOL> self . user_settings . reload ( ) <EOL> assertion . reload ( ) <EOL> assert_equals ( ret . status_int , <NUM_LIT:200> ) <EOL> assert_true ( get_node_badges ( self . project ) [ <NUM_LIT:0> ] . _id , assertion . _id ) <EOL> assert_true ( assertion . _id in self . user_settings . revocation_list ) <EOL> assert_equals ( len ( self . user_settings . revocation_list ) , <NUM_LIT:1> ) <EOL> assert_true ( assertion . revoked ) <EOL> assert_equals ( self . user_settings . revocation_list [ assertion . _id ] , '<STR_LIT>' ) <EOL> def test_revoke_badge_no_addon ( self ) : <EOL> badgeid = self . user_settings . badges [ <NUM_LIT:0> ] . _id <EOL> initnum = get_node_badges ( self . project ) . count ( ) <EOL> assert_true ( self . user_settings . can_award ) <EOL> url = api_url_for ( '<STR_LIT>' , pid = self . project . _id ) <EOL> ret = self . app . post_json ( url , { '<STR_LIT>' : badgeid } , auth = self . user . auth ) <EOL> self . project . reload ( ) <EOL> assert_equals ( ret . status_int , <NUM_LIT:200> ) <EOL> assert_equals ( initnum + <NUM_LIT:1> , get_node_badges ( self . project ) . count ( ) ) <EOL> assertion = get_node_badges ( self . project ) [ <NUM_LIT:0> ] <EOL> revoke = api_url_for ( '<STR_LIT>' , pid = self . project . _id ) <EOL> self . user . delete_addon ( '<STR_LIT>' ) <EOL> self . user . save ( ) <EOL> self . user . reload ( ) <EOL> ret = self . app . post_json ( revoke , <EOL> { <EOL> '<STR_LIT:id>' : assertion . _id , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , auth = self . user . auth , expect_errors = True ) <EOL> self . project . reload ( ) <EOL> self . user_settings . reload ( ) <EOL> assertion . reload ( ) <EOL> assert_equals ( ret . status_int , <NUM_LIT> ) <EOL> assert_false ( assertion . revoked ) <EOL> assert_true ( get_node_badges ( self . project ) [ <NUM_LIT:0> ] . _id , assertion . _id ) <EOL> assert_false ( assertion . _id in self . user_settings . revocation_list ) <EOL> def test_revoke_didnt_award ( self ) : <EOL> badgeid = self . user_settings . badges [ <NUM_LIT:0> ] . _id <EOL> initnum = get_node_badges ( self . project ) . count ( ) <EOL> assert_true ( self . user_settings . can_award ) <EOL> url = api_url_for ( '<STR_LIT>' , pid = self . project . _id ) <EOL> ret = self . app . post_json ( url , { '<STR_LIT>' : badgeid } , auth = self . user . auth ) <EOL> self . project . reload ( ) <EOL> assert_equals ( ret . status_int , <NUM_LIT:200> ) <EOL> assert_equals ( initnum + <NUM_LIT:1> , get_node_badges ( self . project ) . count ( ) ) <EOL> assertion = get_node_badges ( self . project ) [ <NUM_LIT:0> ] <EOL> revoke = api_url_for ( '<STR_LIT>' , pid = self . project . _id ) <EOL> user2 = AuthUserFactory ( ) <EOL> user2 . add_addon ( '<STR_LIT>' , override = True ) <EOL> user2 . save ( ) <EOL> user2 . reload ( ) <EOL> ret = self . app . post_json ( revoke , <EOL> { <EOL> '<STR_LIT:id>' : assertion . _id , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , auth = user2 . auth , expect_errors = True ) <EOL> self . project . reload ( ) <EOL> self . user_settings . reload ( ) <EOL> assertion . reload ( ) <EOL> assert_equals ( ret . status_int , <NUM_LIT> ) <EOL> assert_false ( assertion . revoked ) <EOL> assert_true ( get_node_badges ( self . project ) [ <NUM_LIT:0> ] . _id , assertion . _id ) <EOL> assert_false ( assertion . _id in self . user_settings . revocation_list ) <EOL> def test_issuer_html ( self ) : <EOL> pass <EOL> def test_revoke_bad_aid ( self ) : <EOL> badgeid = self . user_settings . badges [ <NUM_LIT:0> ] . _id <EOL> initnum = get_node_badges ( self . project ) . count ( ) <EOL> assert_true ( self . user_settings . can_award ) <EOL> url = api_url_for ( '<STR_LIT>' , pid = self . project . _id ) <EOL> ret = self . app . post_json ( url , { '<STR_LIT>' : badgeid } , auth = self . user . auth ) <EOL> self . project . reload ( ) <EOL> assert_equals ( ret . status_int , <NUM_LIT:200> ) <EOL> assert_equals ( initnum + <NUM_LIT:1> , get_node_badges ( self . project ) . count ( ) ) <EOL> assertion = get_node_badges ( self . project ) [ <NUM_LIT:0> ] <EOL> revoke = api_url_for ( '<STR_LIT>' , pid = self . project . _id ) <EOL> ret = self . app . post_json ( revoke , <EOL> { <EOL> '<STR_LIT:id>' : '<STR_LIT>' , <EOL> '<STR_LIT>' : '<STR_LIT>' <EOL> } , auth = self . user . auth , expect_errors = True ) <EOL> self . project . reload ( ) <EOL> self . user_settings . reload ( ) <EOL> assertion . reload ( ) <EOL> assert_equals ( ret . status_int , <NUM_LIT> ) <EOL> assert_false ( assertion . revoked ) <EOL> assert_true ( get_node_badges ( self . project ) [ <NUM_LIT:0> ] . _id , assertion . _id ) <EOL> assert_false ( assertion . _id in self . user_settings . revocation_list ) <EOL> def test_system_badge_awarder ( self ) : <EOL> badgeid = self . user_settings . badges [ <NUM_LIT:0> ] . _id <EOL> self . user_settings . badges [ <NUM_LIT:0> ] . make_system_badge ( ) <EOL> initnum = get_node_badges ( self . project ) . count ( ) <EOL> assert_true ( self . user_settings . can_award ) <EOL> url = api_url_for ( '<STR_LIT>' , pid = self . project . _id ) <EOL> ret = self . app . post_json ( url , { '<STR_LIT>' : badgeid } , auth = self . user . auth ) <EOL> self . project . reload ( ) <EOL> assert_equals ( ret . status_int , <NUM_LIT:200> ) <EOL> assert_equals ( initnum + <NUM_LIT:1> , get_node_badges ( self . project ) . count ( ) ) <EOL> assertion = get_node_badges ( self . project ) [ <NUM_LIT:0> ] <EOL> assert_equals ( assertion . awarder . _id , self . user_settings . _id ) <EOL> def test_badge_awarder ( self ) : <EOL> badgeid = self . user_settings . badges [ <NUM_LIT:0> ] . _id <EOL> initnum = get_node_badges ( self . project ) . count ( ) <EOL> assert_true ( self . user_settings . can_award ) <EOL> url = api_url_for ( '<STR_LIT>' , pid = self . project . _id ) <EOL> ret = self . app . post_json ( url , { '<STR_LIT>' : badgeid } , auth = self . user . auth ) <EOL> self . project . reload ( ) <EOL> assert_equals ( ret . status_int , <NUM_LIT:200> ) <EOL> assert_equals ( initnum + <NUM_LIT:1> , get_node_badges ( self . project ) . count ( ) ) <EOL> assertion = get_node_badges ( self . project ) [ <NUM_LIT:0> ] <EOL> assert_equals ( assertion . awarder . _id , self . user_settings . _id ) <EOL> def test_award_times ( self ) : <EOL> badge = self . user_settings . badges [ <NUM_LIT:0> ] <EOL> assert_true ( self . user_settings . can_award ) <EOL> url = api_url_for ( '<STR_LIT>' , pid = self . project . _id ) <EOL> ret = self . app . post_json ( url , { '<STR_LIT>' : badge . _id } , auth = self . user . auth ) <EOL> ret = self . app . post_json ( url , { '<STR_LIT>' : badge . _id } , auth = self . user . auth ) <EOL> ret = self . app . post_json ( url , { '<STR_LIT>' : badge . _id } , auth = self . user . auth ) <EOL> self . project . reload ( ) <EOL> assert_equals ( ret . status_int , <NUM_LIT:200> ) <EOL> badge . reload ( ) <EOL> assert_equals ( badge . awarded_count , <NUM_LIT:3> ) <EOL> ret = self . app . post_json ( url , { '<STR_LIT>' : badge . _id } , auth = self . user . auth ) <EOL> ret = self . app . post_json ( url , { '<STR_LIT>' : badge . _id } , auth = self . user . auth ) <EOL> badge . reload ( ) <EOL> assert_equals ( badge . awarded_count , <NUM_LIT:5> ) <EOL> def test_unique_awards ( self ) : <EOL> badge = self . user_settings . badges [ <NUM_LIT:0> ] <EOL> assert_true ( self . user_settings . can_award ) <EOL> url = api_url_for ( '<STR_LIT>' , pid = self . project . _id ) <EOL> ret = self . app . post_json ( url , { '<STR_LIT>' : badge . _id } , auth = self . user . auth ) <EOL> ret = self . app . post_json ( url , { '<STR_LIT>' : badge . _id } , auth = self . user . auth ) <EOL> ret = self . app . post_json ( url , { '<STR_LIT>' : badge . _id } , auth = self . user . auth ) <EOL> self . project . reload ( ) <EOL> assert_equals ( ret . status_int , <NUM_LIT:200> ) <EOL> badge . reload ( ) <EOL> assert_equals ( badge . unique_awards_count , <NUM_LIT:1> ) <EOL> ret = self . app . post_json ( url , { '<STR_LIT>' : badge . _id } , auth = self . user . auth ) <EOL> ret = self . app . post_json ( url , { '<STR_LIT>' : badge . _id } , auth = self . user . auth ) <EOL> badge . reload ( ) <EOL> assert_equals ( badge . unique_awards_count , <NUM_LIT:1> ) </s>
<s> BOX_KEY = None <EOL> BOX_SECRET = None <EOL> REFRESH_TIME = <NUM_LIT:5> * <NUM_LIT> <EOL> BOX_OAUTH_TOKEN_ENDPOINT = '<STR_LIT>' <EOL> BOX_OAUTH_AUTH_ENDPOINT = '<STR_LIT>' <EOL> BOX_OAUTH_REVOKE_ENDPOINT = '<STR_LIT>' </s>
<s> import httplib as http <EOL> import logging <EOL> import os <EOL> from dropbox . client import DropboxOAuth2Flow , DropboxClient <EOL> from dropbox . rest import ErrorResponse <EOL> from flask import request <EOL> import markupsafe <EOL> from modularodm import fields <EOL> from framework . auth import Auth <EOL> from framework . exceptions import HTTPError <EOL> from framework . sessions import session <EOL> from website . util import web_url_for <EOL> from website . addons . base import exceptions <EOL> from website . addons . base import AddonOAuthUserSettingsBase , AddonOAuthNodeSettingsBase <EOL> from website . addons . base import StorageAddonBase <EOL> from website . oauth . models import ExternalProvider <EOL> from website . addons . dropbox import settings <EOL> from website . addons . dropbox . serializer import DropboxSerializer <EOL> logger = logging . getLogger ( __name__ ) <EOL> class DropboxProvider ( ExternalProvider ) : <EOL> name = '<STR_LIT>' <EOL> short_name = '<STR_LIT>' <EOL> client_id = settings . DROPBOX_KEY <EOL> client_secret = settings . DROPBOX_SECRET <EOL> auth_url_base = None <EOL> callback_url = None <EOL> handle_callback = None <EOL> @ property <EOL> def oauth_flow ( self ) : <EOL> if '<STR_LIT>' not in session . data : <EOL> session . data [ '<STR_LIT>' ] = { } <EOL> if self . short_name not in session . data [ '<STR_LIT>' ] : <EOL> session . data [ '<STR_LIT>' ] [ self . short_name ] = { <EOL> '<STR_LIT:state>' : None <EOL> } <EOL> return DropboxOAuth2Flow ( <EOL> self . client_id , <EOL> self . client_secret , <EOL> redirect_uri = web_url_for ( <EOL> '<STR_LIT>' , <EOL> service_name = self . short_name , <EOL> _absolute = True <EOL> ) , <EOL> session = session . data [ '<STR_LIT>' ] [ self . short_name ] , csrf_token_session_key = '<STR_LIT:state>' <EOL> ) <EOL> @ property <EOL> def auth_url ( self ) : <EOL> return self . oauth_flow . start ( '<STR_LIT>' ) <EOL> def auth_callback ( self , user ) : <EOL> try : <EOL> access_token , dropbox_user_id , url_state = self . oauth_flow . finish ( request . values ) <EOL> except ( DropboxOAuth2Flow . NotApprovedException , DropboxOAuth2Flow . BadStateException ) : <EOL> return <EOL> except ( DropboxOAuth2Flow . ProviderException , DropboxOAuth2Flow . CsrfException ) : <EOL> raise HTTPError ( http . FORBIDDEN ) <EOL> except DropboxOAuth2Flow . BadRequestException : <EOL> raise HTTPError ( http . BAD_REQUEST ) <EOL> self . client = DropboxClient ( access_token ) <EOL> info = self . client . account_info ( ) <EOL> return self . _set_external_account ( <EOL> user , <EOL> { <EOL> '<STR_LIT:key>' : access_token , <EOL> '<STR_LIT>' : info [ '<STR_LIT>' ] , <EOL> '<STR_LIT>' : info [ '<STR_LIT>' ] , <EOL> } <EOL> ) <EOL> class DropboxUserSettings ( AddonOAuthUserSettingsBase ) : <EOL> """<STR_LIT>""" <EOL> oauth_provider = DropboxProvider <EOL> serializer = DropboxSerializer <EOL> def revoke_remote_oauth_access ( self , external_account ) : <EOL> """<STR_LIT>""" <EOL> client = DropboxClient ( external_account . oauth_key ) <EOL> try : <EOL> client . disable_access_token ( ) <EOL> except ErrorResponse : <EOL> pass <EOL> class DropboxNodeSettings ( StorageAddonBase , AddonOAuthNodeSettingsBase ) : <EOL> oauth_provider = DropboxProvider <EOL> serializer = DropboxSerializer <EOL> folder = fields . StringField ( default = None ) <EOL> registration_data = fields . DictionaryField ( ) <EOL> _folder_data = None <EOL> _api = None <EOL> @ property <EOL> def api ( self ) : <EOL> """<STR_LIT>""" <EOL> if self . _api is None : <EOL> self . _api = DropboxProvider ( self . external_account ) <EOL> return self . _api <EOL> @ property <EOL> def folder_id ( self ) : <EOL> return self . folder <EOL> @ property <EOL> def folder_name ( self ) : <EOL> return os . path . split ( self . folder or '<STR_LIT>' ) [ <NUM_LIT:1> ] <EOL> @ property <EOL> def folder_path ( self ) : <EOL> return self . folder <EOL> @ property <EOL> def display_name ( self ) : <EOL> return '<STR_LIT>' . format ( self . config . full_name , self . folder ) <EOL> def clear_settings ( self ) : <EOL> self . folder = None <EOL> def fetch_folder_name ( self ) : <EOL> return self . folder <EOL> def set_folder ( self , folder , auth ) : <EOL> self . folder = folder <EOL> self . nodelogger . log ( action = "<STR_LIT>" , save = True ) <EOL> def delete ( self , save = True ) : <EOL> self . deauthorize ( add_log = False ) <EOL> super ( DropboxNodeSettings , self ) . delete ( save ) <EOL> def deauthorize ( self , auth = None , add_log = True ) : <EOL> """<STR_LIT>""" <EOL> folder = self . folder <EOL> self . clear_settings ( ) <EOL> if add_log : <EOL> extra = { '<STR_LIT>' : folder } <EOL> self . nodelogger . log ( action = "<STR_LIT>" , extra = extra , save = True ) <EOL> self . clear_auth ( ) <EOL> def serialize_waterbutler_credentials ( self ) : <EOL> if not self . has_auth : <EOL> raise exceptions . AddonError ( '<STR_LIT>' ) <EOL> return { '<STR_LIT>' : self . external_account . oauth_key } <EOL> def serialize_waterbutler_settings ( self ) : <EOL> if not self . folder : <EOL> raise exceptions . AddonError ( '<STR_LIT>' ) <EOL> return { '<STR_LIT>' : self . folder } <EOL> def create_waterbutler_log ( self , auth , action , metadata ) : <EOL> url = self . owner . web_url_for ( '<STR_LIT>' , path = metadata [ '<STR_LIT:path>' ] . strip ( '<STR_LIT:/>' ) , provider = '<STR_LIT>' ) <EOL> self . owner . add_log ( <EOL> '<STR_LIT>' . format ( action ) , <EOL> auth = auth , <EOL> params = { <EOL> '<STR_LIT>' : self . owner . parent_id , <EOL> '<STR_LIT>' : self . owner . _id , <EOL> '<STR_LIT:path>' : metadata [ '<STR_LIT:path>' ] , <EOL> '<STR_LIT>' : self . folder , <EOL> '<STR_LIT>' : { <EOL> '<STR_LIT>' : url , <EOL> '<STR_LIT>' : url + '<STR_LIT>' <EOL> } , <EOL> } , <EOL> ) <EOL> def __repr__ ( self ) : <EOL> return u'<STR_LIT>' . format ( self = self ) <EOL> def before_register_message ( self , node , user ) : <EOL> """<STR_LIT>""" <EOL> category = node . project_or_component <EOL> if self . user_settings and self . user_settings . has_auth : <EOL> return ( <EOL> u'<STR_LIT>' <EOL> u'<STR_LIT>' <EOL> u'<STR_LIT>' <EOL> ) . format ( category = markupsafe . escape ( category ) ) <EOL> before_register = before_register_message <EOL> def before_remove_contributor_message ( self , node , removed ) : <EOL> """<STR_LIT>""" <EOL> if self . user_settings and self . user_settings . owner == removed : <EOL> category = node . project_or_component <EOL> name = removed . fullname <EOL> return ( u'<STR_LIT>' <EOL> u'<STR_LIT>' <EOL> u'<STR_LIT>' <EOL> ) . format ( category = markupsafe . escape ( category ) , <EOL> name = markupsafe . escape ( name ) ) <EOL> before_remove_contributor = before_remove_contributor_message <EOL> def after_fork ( self , node , fork , user , save = True ) : <EOL> """<STR_LIT>""" <EOL> clone , _ = super ( DropboxNodeSettings , self ) . after_fork ( <EOL> node = node , fork = fork , user = user , save = False <EOL> ) <EOL> if self . user_settings and self . user_settings . owner == user : <EOL> clone . user_settings = self . user_settings <EOL> message = ( <EOL> '<STR_LIT>' <EOL> ) . format ( <EOL> cat = markupsafe . escape ( fork . project_or_component ) <EOL> ) <EOL> else : <EOL> message = ( <EOL> u'<STR_LIT>' <EOL> u'<STR_LIT>' <EOL> u'<STR_LIT>' <EOL> ) . format ( <EOL> url = fork . web_url_for ( '<STR_LIT>' ) , <EOL> cat = markupsafe . escape ( fork . project_or_component ) <EOL> ) <EOL> if save : <EOL> clone . save ( ) <EOL> return clone , message <EOL> def after_remove_contributor ( self , node , removed , auth = None ) : <EOL> """<STR_LIT>""" <EOL> if self . user_settings and self . user_settings . owner == removed : <EOL> self . user_settings = None <EOL> self . save ( ) <EOL> message = ( <EOL> u'<STR_LIT>' <EOL> u'<STR_LIT>' <EOL> ) . format ( <EOL> category = markupsafe . escape ( node . category_display ) , <EOL> title = markupsafe . escape ( node . title ) , <EOL> user = markupsafe . escape ( removed . fullname ) <EOL> ) <EOL> if not auth or auth . user != removed : <EOL> url = node . web_url_for ( '<STR_LIT>' ) <EOL> message += ( <EOL> u'<STR_LIT>' <EOL> ) . format ( url = url ) <EOL> return message <EOL> def after_delete ( self , node , user ) : <EOL> self . deauthorize ( Auth ( user = user ) , add_log = True ) <EOL> self . save ( ) </s>