content
stringlengths
0
1.55M
<import_stmt>copy<import_stmt>os<import_stmt>random<import_stmt>h5py<import_stmt>torch<import_from_stmt>torch.utils.data DataLoader Dataset<import_stmt>tqdm<class_stmt>CoLDataset(Dataset)<block_start>IGNORE_ID=-100<line_sep>sent_strategy='first'<def_stmt>__init__ self file_path tokenizer_name tokenizer block_size=512 split_sent=<false> voken_dir=<none> suffix=<none> verbose=<false> voken_ablation=<none># Open token's hdf5 <block_start>token_path=file_path+'.'+tokenizer_name+'.hdf5'<assert_stmt>os.path.isfile(token_path)<if_stmt>verbose<block_start>print("-------- Load Data -------")<line_sep>print("Load tokens from" token_path)<block_end>self.token_hdf5=h5py.File(token_path 'r')<line_sep>self.tokenizer=tokenizer<line_sep>self.tokens=self.token_hdf5['tokens']<line_sep>self.verbose=verbose<line_sep>self.voken_ablation=voken_ablation<line_sep>self._iter_cnt=0<line_sep># Open voken's hdf5 and load voken ids <if_stmt>voken_dir<is><not><none><block_start><assert_stmt>suffix<is><not><none> 'Please provide suffix of the voken, e.g., vg_nococo.5000.'<line_sep>self.sent_level='sent'<in>voken_dir<line_sep>dset_fname=os.path.split(file_path)[-1]<line_sep>voken_path=os.path.join(voken_dir f"{dset_fname}.{suffix}.hdf5")<line_sep>voken_ids_path=os.path.join(voken_dir f"{dset_fname}.{suffix}.ids")<if_stmt>verbose<block_start>print("Load vokens from" voken_path)<block_end>self.voken_hdf5=h5py.File(voken_path 'r')<line_sep>self.vokens=self.voken_hdf5['vokens']<assert_stmt>len(self.vokens)<eq>len(self.tokens)<line_sep>self._voken_ids=list(map(<lambda>x:x.strip() open(voken_ids_path).readlines()))<if_stmt>verbose<block_start>print("\t with voken size" self.voken_size)<line_sep>print("\t top 5 voken ids are:" self._voken_ids[:5])<block_end><block_end><else_stmt><block_start>self.vokens=<none><block_end># Split for every block_size tokens # The last block without full length will be dropped. num_tokens=len(self.tokens)<line_sep>self.starts=list(range(0 num_tokens block_size))<line_sep>self.batches=list(zip(self.starts[:-1] self.starts[1:]))<line_sep>manual_filtered=<false><if_stmt>"en.train.raw"<in>file_path<and>tokenizer_name<eq>"bert-base-uncased"<block_start>self.batches=manual_filter(self.batches)<if_stmt>verbose<block_start>print("Data: Mannually filter the range for counties.")<block_end>manual_filtered=<true><block_end># batch_info <if_stmt>verbose<block_start>print("Split sent with block size" block_size)<line_sep>print(f"Total batches: {len(self.batches)}")<line_sep>print(f"Total tokens: {len(self.tokens)}")<if_stmt>voken_dir<is><not><none><block_start>print(f"Total vokens: {len(self.vokens)}")<block_end><if_stmt>voken_ablation<is><not><none><block_start>print("The model will process voken ablation strategy:" voken_ablation)<block_end>print()<block_end>block_check(self.batches block_size fixed_size=<true> manual_filtered=manual_filtered)<if_stmt>self.voken_ablation<eq>'token'<block_start>self._voken_ids=list(range(30522))<block_end><block_end>@property<def_stmt>voken_size self<block_start><return>len(self._voken_ids)<block_end>@property<def_stmt>voken_ids self<block_start><return>copy.copy(self._voken_ids)<block_end><def_stmt>assert_equal_vokens self dataset<block_start><assert_stmt>self.voken_size<eq>dataset.voken_size<for_stmt>vid,vid1 zip(self.voken_ids dataset.voken_ids)<block_start><assert_stmt>vid<eq>vid1<block_end><block_end><def_stmt>__len__ self<block_start><return>len(self.batches)-1<block_end><def_stmt>__getitem__ self item<block_start>token_start,token_end=self.batches[item]<if_stmt>self._iter_cnt<l>5<and>self.verbose<block_start>print(f"Data Loader: data iteration {self._iter_cnt}, with range {token_start} to {token_end}.")<line_sep>self._iter_cnt<augadd>1<block_end>tokens=list(self.tokens[token_start:token_end])<line_sep>token_tensor=torch.tensor(self.tokenizer.build_inputs_with_special_tokens(tokens) dtype=torch.long)<if_stmt>self.vokens<is><not><none><block_start>vokens=list(self.vokens[token_start:token_end])<line_sep>vokens=self.maybe_do_sent_level(vokens)<line_sep>vokens=self.maybe_do_ablation_study(vokens tokens)<line_sep>voken_tensor=torch.tensor([self.IGNORE_ID]+vokens+[self.IGNORE_ID] dtype=torch.long)<line_sep><return>token_tensor voken_tensor<block_end><else_stmt><block_start><return>token_tensor<block_end><block_end><def_stmt>maybe_do_sent_level self vokens<block_start><if_stmt><not>self.sent_level<block_start><return>vokens<block_end><else_stmt><block_start><if_stmt>self.sent_strategy<eq>'all'<block_start>vokens=[(-voken-1<if>voken<l>0<else>voken)<for>voken vokens]<block_end><elif_stmt>self.sent_strategy<eq>'first'<block_start>vokens=[(self.IGNORE_ID<if>voken<l>0<else>voken)<for>voken vokens]<block_end><return>vokens<block_end><block_end><def_stmt>maybe_do_ablation_study self vokens tokens<block_start><if_stmt>self.voken_ablation<is><none><block_start><return>vokens<block_end><else_stmt><block_start><if_stmt>self._iter_cnt<l>5<and>self.verbose<block_start>print("Before voken ablation: " vokens)<block_end><if_stmt>self.voken_ablation<eq>'random'<block_start>vokens=[random.randint(0 self.voken_size-1)<for>_ range(len(vokens))]<block_end><elif_stmt>self.voken_ablation<eq>'shuffle'<block_start>random.shuffle(vokens)<block_end><elif_stmt>self.voken_ablation<eq>'reverse'<block_start>vokens=vokens[::-1]<block_end><elif_stmt>self.voken_ablation<eq>'token'<block_start>vokens=tokens<block_end><if_stmt>self._iter_cnt<l>5<and>self.verbose<block_start>print("After voken ablation: " vokens)<block_end><return>vokens<block_end><block_end><def_stmt>get_item_info self item<block_start>token_start=self.batches[item]<line_sep>token_end=self.batches[item+1]<line_sep><return>token_start token_end<block_end><def_stmt>__del__ self<block_start>self.token_hdf5.close()<if_stmt>self.vokens<is><not><none><block_start>self.voken_hdf5.close()<block_end><block_end><block_end>FORBIDDEN_RANGE=(119314944 # Start of iter 3700 187053048# End of iter 5800 )<def_stmt>intersect x y<block_start>x1,x2=x<line_sep>y1,y2=y<if_stmt>x2<le>y1<or>x2<ge>y2# Case 1: [ x )[ y ) # Case 2: [ y )[ x ) <block_start><return><false><block_end><return><true><block_end><def_stmt>manual_filter batches<block_start>batches=list(filter(<lambda>x:<not>intersect(x FORBIDDEN_RANGE) batches))<line_sep><return>batches<block_end><def_stmt>block_check batches block_size fixed_size=<false> manual_filtered=<false><block_start>""" Check whether the batches satisfy following requirements. 1. Monotonic 2. Mutually exclusive 3. Range < block_size """<line_sep>last_end=0<for_stmt>start_token,end_token batches<block_start><assert_stmt>last_end<le>start_token<if_stmt>fixed_size<block_start><assert_stmt>(end_token-start_token)<eq>block_size 'len([%d, %d)) != %d'%(start_token end_token block_size)<block_end><else_stmt><block_start><assert_stmt>(end_token-start_token)<le>block_size 'len([%d, %d)) > %d'%(start_token end_token block_size)<block_end><if_stmt>manual_filtered<block_start><assert_stmt><not>intersect((start_token end_token) FORBIDDEN_RANGE)<block_end>last_end=end_token<block_end><block_end><def_stmt>get_voken_feats dataset:CoLDataset feat_dir:str<block_start>""" Load pre-extracted visual features regarding img_ids of vokens. """<line_sep>set2id2feat={}<line_sep>voken_feats=[]<for_stmt>voken_id dataset.voken_ids<block_start>voken_img_set,voken_img_id=voken_id.split('/')<if_stmt>voken_img_set<not><in>set2id2feat<block_start>img_ids=list(map(<lambda>x:x.rstrip() open(os.path.join(feat_dir f"{voken_img_set}.ids"))))<line_sep>img_feats=h5py.File(os.path.join(feat_dir f"{voken_img_set}.hdf5") 'r')['keys'][:]<line_sep>id2feat={}<assert_stmt>len(img_ids)<eq>len(img_feats)<for_stmt>img_id,img_feat zip(img_ids img_feats)<block_start>id2feat[img_id]=img_feat<block_end>set2id2feat[voken_img_set]=id2feat<block_end>voken_feats.append(set2id2feat[voken_img_set][voken_img_id])<block_end><return>voken_feats<block_end>
<import_from_stmt>disco.test TestCase TestPipe<import_from_stmt>disco.compat bytes_to_str str_to_bytes<import_from_stmt>disco.worker.pipeline.worker Stage<import_from_stmt>disco.worker.task_io task_input_stream<import_stmt>csv<import_from_stmt>functools partial<import_stmt>hashlib<line_sep>PREFIX='/tmp/'<def_stmt>read interface state label inp<block_start><import_from_stmt>disco util<for_stmt>e inp<block_start>scheme,netloc,_=util.urlsplit(e)<line_sep>fileName,joinColumn=str(netloc).split('?')<line_sep>File=open(PREFIX+fileName 'r')<line_sep>col=int(joinColumn)<line_sep>reader=csv.reader(File)<line_sep>firstRow=<true><for_stmt>row reader<block_start><if_stmt>firstRow<block_start>tableName=row[0]<line_sep>firstRow=<false><block_end><else_stmt><block_start>fullName=tableName+'?'+str(col)<line_sep>Hash=int(hashlib.md5(str_to_bytes(row[col])).hexdigest() 16)%160<line_sep>interface.output(Hash).add(fullName row)<block_end><block_end><block_end><block_end><def_stmt>join_init interface params<block_start><return>{}<block_end><def_stmt>join interface state label inp<block_start><for_stmt>k,v inp<block_start><if_stmt>k<not><in>state<block_start>state[k]=[v]<block_end><else_stmt><block_start>state[k].append(v)<block_end><block_end><block_end><def_stmt>join_done interface state<block_start><if_stmt>len(state)<ne>2<block_start><return><block_end>name0=list(state.keys())[0]<line_sep>name1=list(state.keys())[1]<line_sep>_,strCol0=name0.split('?')<line_sep>_,strCol1=name1.split('?')<line_sep>col0=int(strCol0)<line_sep>col1=int(strCol1)<for_stmt>entry0 state[name0]<block_start><for_stmt>entry1 state[name1]<block_start><if_stmt>entry0[col0]<eq>entry1[col1]<block_start>entry0_copy=entry0[:]<line_sep>entry1_copy=entry1[:]<del_stmt>entry0_copy[col0]<del_stmt>entry1_copy[col1]<line_sep>interface.output(0).add(entry0[col0] entry0_copy+entry1_copy)<block_end><block_end><block_end><block_end><def_stmt>combine_init interface params init<block_start><return>init()<block_end><def_stmt>combine interface state label inp func<block_start><for_stmt>k,v inp<block_start>func(state k v)<block_end><block_end><def_stmt>combine_done interface state<block_start><for_stmt>k,v state.items()<block_start>interface.output(0).add(k v)<block_end><block_end><def_stmt>_getPipeline <block_start>select_stage=[("split" Stage('read' process=read))]<line_sep>join_stage=[("group_label" Stage('join' init=join_init process=join done=join_done))]<def_stmt>combine_row state k v func<block_start><if_stmt>k<not><in>state<block_start>state[k]=0<block_end>state[k]=state[k]+func(v)<block_end>node_combine_stage=[("group_node_label" Stage('node_combine' init=partial(combine_init init=<lambda>:{}) process=partial(combine func=partial(combine_row func=<lambda>v:1)) done=combine_done))]<line_sep>combine_all_stage=[("group_label" Stage('combine_all' init=partial(combine_init init=<lambda>:{}) process=partial(combine func=partial(combine_row func=<lambda>v:v)) done=combine_done))]<line_sep><return>select_stage+join_stage+node_combine_stage+combine_all_stage<block_end><class_stmt>PipeJob(TestPipe)<block_start>pipeline=_getPipeline()<block_end><class_stmt>JoinTestCase(TestCase)#input contains the file name and the join column <block_start>input=['raw://cities.csv?0' 'raw://packages.csv?3']<def_stmt>SetUpFiles self<block_start>F1=open(PREFIX+'cities.csv' 'w')<line_sep>F1.write("cities\nEdmonton,-45\nCalgary,-35\nMontreal,-25\nToronto,-15\n")<line_sep>F1.close()<line_sep>F2=open(PREFIX+'packages.csv' 'w')<line_sep>F2.write("packages\n0,2013-10-2,2013-11-3,Edmonton,Calgary\n"+"1,2013-11-3,2013-12-3,Calgary,Toronto\n"+"2,2013-10-4,2013-10-6,Edmonton,Montreal\n")<line_sep>F2.close()<block_end><def_stmt>serve self path<block_start><return>path<block_end><def_stmt>test_per_node self<block_start>self.SetUpFiles()<line_sep>self.job=PipeJob().run(input=self.test_server.urls(self.input))<line_sep>self.assertEqual(sorted(self.results(self.job)) [('Calgary' 1) ('Edmonton' 2)])<block_end><block_end>
#! /usr/bin/env python3 # -*- coding: utf-8 -*- # File : functional.py # Author : <NAME> # Email : <EMAIL> # Date : 03/03/2018 # # This file is part of Jacinle. # Distributed under terms of the MIT license. <import_stmt>math<import_from_stmt>PIL Image<import_stmt>numpy<as>np<import_stmt>torchvision.transforms.functional<as>TF<import_stmt>jactorch.transforms.image.functional<as>jac_tf<import_from_stmt>jacinle.utils.argument get_2dshape<def_stmt>normalize_coor img coor<block_start>coor=coor.copy()<line_sep>coor[: 0]<augdiv>img.width<line_sep>coor[: 1]<augdiv>img.height<line_sep><return>img coor<block_end><def_stmt>denormalize_coor img coor<block_start>coor=coor.copy()<line_sep>coor[: 0]<augmul>img.width<line_sep>coor[: 1]<augmul>img.height<line_sep><return>img coor<block_end><def_stmt>crop img coor i j h w<block_start>coor=coor.copy()<line_sep>coor[: 0]=(coor[: 0]-j/img.width)<times>(img.width/w)<line_sep>coor[: 1]=(coor[: 1]-i/img.height)<times>(img.height/h)<line_sep><return>TF.crop(img i j h w) coor<block_end><def_stmt>center_crop img coor output_size<block_start>output_size=get_2dshape(output_size)<line_sep>w,h=img.size<line_sep>th,tw=output_size<line_sep>i=int(round((h-th)/2.))<line_sep>j=int(round((w-tw)/2.))<line_sep><return>crop(img coor i j th tw)<block_end><def_stmt>pad img coor padding mode='constant' fill=0<block_start><if_stmt>isinstance(padding int)<block_start>padding=(padding padding padding padding)<block_end><elif_stmt>len(padding)<eq>2<block_start>padding=(padding[0] padding[1] padding[0] padding[1])<block_end><else_stmt><block_start><assert_stmt>len(padding)<eq>4<block_end>img_new=jac_tf.pad(img padding mode=mode fill=fill)<line_sep>coor=coor.copy()<line_sep>coor[: 0]=(coor[: 0]+padding[0]/img.width)<times>(img.width/img_new.width)<line_sep>coor[: 1]=(coor[: 1]+padding[1]/img.height)<times>(img.height/img_new.height)<line_sep><return>img_new coor<block_end><def_stmt>hflip img coor<block_start>coor=coor.copy()<line_sep>coor[: 0]=1-coor[: 0]<line_sep><return>TF.hflip(img) coor<block_end><def_stmt>vflip img coor<block_start>coor=coor.copy()<line_sep>coor[: 1]=1-coor[: 1]<line_sep><return>TF.vflip(img) coor<block_end><def_stmt>resize img coor size interpolation=Image.BILINEAR# Assuming coordinates are 0/1-normalized. <block_start><return>TF.resize(img size interpolation=interpolation) coor<block_end><def_stmt>resized_crop img coor i j h w size interpolation=Image.BILINEAR<block_start>img,coor=crop(img coor i j h w)<line_sep>img,coor=resize(img coor size interpolation)<line_sep><return>img coor<block_end><def_stmt>refresh_valid img coor force=<false><block_start><if_stmt>coor.shape[1]<eq>2<block_start><if_stmt>force<block_start>coor=np.concatenate([coor np.ones_like(coor[: 0])] axis=1)<block_end><else_stmt><block_start><return>img coor<block_end><block_end><assert_stmt>coor.shape[1]<eq>3 'Support only (x, y, valid) or (x, y) typed coordinates.'<line_sep>out=[]<for_stmt>x,y,v coor<block_start>valid=(v<eq>1)<and>(x<ge>0)<and>(x<l>img.width)<and>(y<ge>0)<and>(y<l>img.height)<if_stmt>valid<block_start>out.append((x y v))<block_end><else_stmt><block_start>out.append((0. 0. 0.))<block_end><block_end><return>img np.array(out dtype='float32')<block_end><def_stmt>rotate img coor angle resample crop_ expand center=<none> translate=<none><block_start><assert_stmt>translate<is><none><line_sep>img_new=TF.rotate(img angle resample=resample expand=expand center=center)<line_sep>matrix,extra_crop=get_rotation_matrix(img angle crop_ expand center translate)<line_sep>_,coor=denormalize_coor(img coor)<for_stmt>i range(coor.shape[0])<block_start>coor[i :2]=apply_affine_transform(*coor[i :2] matrix)<block_end>_,coor=normalize_coor(img_new coor)<if_stmt>extra_crop<is><not><none><block_start>img_new,coor=crop(img_new coor *extra_crop)<block_end><return>img_new coor<block_end><def_stmt>pad_multiple_of img coor multiple mode='constant' fill=0<block_start>h,w=img.height img.width<line_sep>hh=h-h%multiple+multiple<times>int(h%multiple<ne>0)<line_sep>ww=w-w%multiple+multiple<times>int(w%multiple<ne>0)<if_stmt>h<ne>hh<or>w<ne>ww<block_start><return>pad(img coor (0 0 ww-w hh-h) mode=mode fill=fill)<block_end><return>img coor<block_end><def_stmt>get_rotation_matrix image angle crop expand center translate<block_start>w,h=image.size<if_stmt>translate<is><none><block_start>translate=(0 0)<block_end><if_stmt>center<is><none><block_start>center=(w/2.0 h/2.0)<block_end>angle=math.radians(angle%360)<line_sep>matrix=[round(math.cos(angle) 15) round(math.sin(angle) 15) 0.0 round(-math.sin(angle) 15) round(math.cos(angle) 15) 0.0]<line_sep>matrix[2],matrix[5]=apply_affine_transform(-center[0] -center[1] matrix)<line_sep>matrix[2]<augadd>center[0]+translate[0]<line_sep>matrix[5]<augadd>center[1]+translate[1]<line_sep># print('debug', angle, translate, center, matrix, apply_affine_transform(0.5, 0.5, matrix)) <if_stmt>crop<or>expand<block_start>xx=[]<line_sep>yy=[]<for_stmt>x,y ((0 0) (w 0) (w h) (0 h))<block_start>x,y=apply_affine_transform(x y matrix)<line_sep>xx.append(x)<line_sep>yy.append(y)<block_end>xx.sort()<line_sep>yy.sort()<block_end>extra_crop=<none><if_stmt>crop<block_start><assert_stmt><not>expand 'Cannot use both expand and crop.'<line_sep>nw=int(math.ceil(xx[2])-math.floor(xx[1]))<line_sep>nh=int(math.ceil(yy[2])-math.floor(yy[1]))<line_sep># CAUSION! extra_crop is of format (dy, dx, h, w) extra_crop=((h-nh)<floordiv>2 (w-nw)<floordiv>2 nh nw)<block_end><if_stmt>expand<block_start>nw=int(math.ceil(xx[3])-math.floor(xx[0]))<line_sep>nh=int(math.ceil(yy[3])-math.floor(yy[0]))<line_sep>matrix[2]<augadd>(nw-w)/2.<line_sep>matrix[5]<augadd>(nh-h)/2.<block_end><return>matrix extra_crop<block_end><def_stmt>apply_affine_transform x y matrix<block_start>(a b c d e f)=matrix<line_sep><return>a<times>x+b<times>y+c d<times>x+e<times>y+f<block_end>
######### # GLOBALS ######### <import_from_stmt>sklearn.linear_model LogisticRegression<import_from_stmt>sklearn.preprocessing StandardScaler<line_sep>###### # MAIN ###### <class_stmt>Model(object)<block_start><def_stmt>__init__ self training_data hyperopt=<false><block_start>self.scaler=StandardScaler()<line_sep>self.scaler.fit(training_data.drop("Trend" axis=1))<line_sep>self.model=LogisticRegression(penalty="l1" tol=.001 C=1000 max_iter=150)<line_sep>normalized_training_data=self.scaler.transform(training_data.drop("Trend" axis=1))<line_sep>self.model.fit(normalized_training_data training_data["Trend"])<block_end>## Public Methods ## <def_stmt>predict self vector<block_start><return>self.model.predict(self.scaler.transform(vector.reshape(1 -1)))<block_end><block_end>
DATASET_REGISTRY={}<def_stmt>register_dataset name:str<block_start><def_stmt>register_dataset_func func<block_start>DATASET_REGISTRY[name]=func()<block_end><return>register_dataset_func<block_end>
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_stmt>torch<import_from_stmt>third_party.models.base_model BaseModel<import_from_stmt>. networks<class_stmt>Kp2uvModel(BaseModel)<block_start>"""This class implements the keypoint-to-UV model (inference only)."""<line_sep>@staticmethod<def_stmt>modify_commandline_options parser is_train=<true><block_start>parser.set_defaults(dataset_mode='kpuv')<line_sep><return>parser<block_end><def_stmt>__init__ self opt<block_start>"""Initialize this model class. Parameters: opt -- test options """<line_sep>BaseModel.__init__(self opt)<line_sep>self.visual_names=['keypoints' 'output_uv']<line_sep>self.model_names=['Kp2uv']<line_sep>self.netKp2uv=networks.define_kp2uv(gpu_ids=self.gpu_ids)<line_sep>self.isTrain=<false><block_end># only test mode supported # Our program will automatically call <model.setup> to define schedulers, load networks, and print networks <def_stmt>set_input self input<block_start>"""Unpack input data from the dataloader. Parameters: input: a dictionary that contains the data itself and its metadata information. """<line_sep>self.keypoints=input['keypoints'].to(self.device)<line_sep>self.image_paths=input['path']<block_end><def_stmt>forward self<block_start>"""Run forward pass. This will be called by <test>."""<line_sep>output=self.netKp2uv.forward(self.keypoints)<line_sep>self.output_uv=self.output2rgb(output)<block_end><def_stmt>output2rgb self output<block_start>"""Convert network outputs to RGB image."""<line_sep>pred_id,pred_uv=output<line_sep>_,pred_id_class=pred_id.max(1)<line_sep>pred_id_class=pred_id_class.unsqueeze(1)<line_sep># extract UV from pred_uv (48 channels); select based on class ID selected_uv=-1<times>torch.ones(pred_uv.shape[0] 2 pred_uv.shape[2] pred_uv.shape[3] device=pred_uv.device)<for_stmt>partid range(1 25)<block_start>mask=(pred_id_class<eq>partid).float()<line_sep>selected_uv<augmul>(1.-mask)<line_sep>selected_uv<augadd>mask<times>pred_uv[: (partid-1)<times>2:(partid-1)<times>2+2]<block_end>pred_uv=selected_uv<line_sep>rgb=torch.cat([pred_id_class.float()<times>10/255.<times>2-1 pred_uv] 1)<line_sep><return>rgb<block_end><def_stmt>optimize_parameters self<block_start><pass><block_end><block_end>
<import_stmt>torch<import_stmt>torchvision<import_stmt>torchvision.datasets<as>datasets<import_stmt>sys<import_stmt>numpy<as>np<import_stmt>torch.utils.data<as>utils<import_from_stmt>colour Color<import_from_stmt>os.path join<as>oj<line_sep>mnist_trainset=datasets.MNIST(root='../data' train=<true> download=<true> transform=<none>)<line_sep>color_x=np.zeros((60000 1 28 28))<line_sep>color_x=mnist_trainset.data[: <none>].numpy().astype(np.float32)<line_sep>color_y=mnist_trainset.targets.numpy().copy()<line_sep>choice_1=np.random.choice(2 size=len(color_x))<times>23<line_sep>choice_2=np.random.choice(2 size=len(color_x))<times>23<for_stmt>i range(len(color_x))<block_start>color_x[i : choice_1[i]:choice_1[i]+5 choice_2[i]:choice_2[i]+5]=255-25<times>color_y[i]<block_end>color_x<augdiv>color_x.max()<line_sep>color_x=color_x<times>2-1<line_sep>np.save(oj("../../data/ColorMNIST" "train_x_decoy.npy") color_x)<import_from_stmt>os.path join<as>oj<line_sep>mnist_trainset=datasets.MNIST(root='../data' train=<false> download=<true> transform=<none>)<line_sep>color_x=np.zeros((len(mnist_trainset.data) 1 28 28))<line_sep>color_x=mnist_trainset.data[: <none>].numpy().astype(np.float32)<line_sep>color_y=mnist_trainset.targets.numpy().copy()<line_sep>choice_1=np.random.choice(2 size=len(color_x))<times>23<line_sep>choice_2=np.random.choice(2 size=len(color_x))<times>23<for_stmt>i range(len(color_x))<block_start>color_x[i : choice_1[i]:choice_1[i]+5 choice_2[i]:choice_2[i]+5]=0+25<times>color_y[i]<block_end>color_x<augdiv>color_x.max()<line_sep>color_x=color_x<times>2-1<line_sep>np.save(oj("../data/ColorMNIST" "test_x_decoy.npy") color_x)<line_sep>
<import_stmt>sys<try_stmt><block_start><import_from_stmt>_pydevd_bundle pydevd_bytecode_utils<block_end><except_stmt>ImportError<block_start><pass><block_end><import_stmt>pytest<line_sep>pytestmark=pytest.mark.skipif(sys.version_info[0]<l>3 reason='Only available for Python 3.')<line_sep>@pytest.fixture(autouse=<true> scope='function')<def_stmt>enable_strict # In tests enable strict mode (in regular operation it'll be False and will just ignore # bytecodes we still don't handle as if it didn't change the stack). <block_start>pydevd_bytecode_utils.STRICT_MODE=<true><line_sep><yield><line_sep>pydevd_bytecode_utils.STRICT_MODE=<false><block_end><def_stmt>check found expected<block_start><assert_stmt>len(found)<eq>len(expected) '%s != %s'%(found expected)<line_sep>last_offset=-1<for_stmt>f,e zip(found expected)<block_start><try_stmt><block_start><if_stmt>isinstance(e.name (list tuple set))<block_start><assert_stmt>f.name<in>e.name<block_end><else_stmt><block_start><assert_stmt>f.name<eq>e.name<block_end><assert_stmt>f.is_visited<eq>e.is_visited<assert_stmt>f.line<eq>e.line<assert_stmt>f.call_order<eq>e.call_order<block_end><except_stmt>AssertionError<as>exc<block_start><raise>AssertionError('%s\nError with: %s - %s'%(exc f e))<block_end># We can't check the offset because it may be different among different python versions # so, just check that it's always in order. <assert_stmt>f.offset<g>last_offset<line_sep>last_offset=f.offset<block_end><block_end><def_stmt>collect_smart_step_into_variants *args **kwargs<block_start><try_stmt><block_start><return>pydevd_bytecode_utils.calculate_smart_step_into_variants(*args **kwargs)<block_end><except_stmt># In a failure, rerun with DEBUG! <block_start>debug=pydevd_bytecode_utils.DEBUG<line_sep>pydevd_bytecode_utils.DEBUG=<true><try_stmt><block_start><return>pydevd_bytecode_utils.calculate_smart_step_into_variants(*args **kwargs)<block_end><finally_stmt><block_start>pydevd_bytecode_utils.DEBUG=debug<block_end><block_end><block_end><def_stmt>check_names_from_func_str func_str expected<block_start>locs={}<line_sep>exec(func_str globals() locs)<line_sep>function=locs['function']<class_stmt>Frame<block_start>f_code=function.__code__<line_sep>f_lasti=0<block_end>found=collect_smart_step_into_variants(Frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found expected)<block_end><def_stmt>test_smart_step_into_bytecode_info <block_start><import_from_stmt>_pydevd_bundle.pydevd_bytecode_utils Variant<def_stmt>function <block_start><def_stmt>some arg<block_start><pass><block_end><def_stmt>call arg<block_start><pass><block_end><yield>sys._getframe()<line_sep>call(some(call(some())))<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check(found [Variant(name=('_getframe' 'sys') is_visited=<true> line=8 offset=20 call_order=1) Variant(name='some' is_visited=<false> line=9 offset=34 call_order=1) Variant(name='call' is_visited=<false> line=9 offset=36 call_order=1) Variant(name='some' is_visited=<false> line=9 offset=38 call_order=2) Variant(name='call' is_visited=<false> line=9 offset=40 call_order=2) ])<block_end><def_stmt>check_name_and_line found expected<block_start>names_and_lines=set()<for_stmt>variant found<block_start><if_stmt>variant.children_variants<block_start><for_stmt>v variant.children_variants<block_start>names_and_lines.add((v.name+(' (in %s)'%variant.name) v.line))<block_end><block_end><else_stmt><block_start>names_and_lines.add((variant.name variant.line))<block_end><block_end><if_stmt>names_and_lines<ne>set(expected)<block_start><raise>AssertionError('Found: %s'%(sorted(names_and_lines key=<lambda>tup:tuple(reversed(tup))) ))<block_end><block_end><def_stmt>test_smart_step_into_bytecode_info_002 <block_start><def_stmt>function <block_start><yield>sys._getframe()<line_sep>completions=foo.bar(Something(param1 param2=xxx.yyy) )<line_sep>call()<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('bar' 2) ('Something' 3) ('call' 5)])<block_end><def_stmt>test_smart_step_into_bytecode_info_003 <block_start><def_stmt>function <block_start><yield>sys._getframe()<line_sep>bbb=foo.bar(Something(param1 param2=xxx.yyy) {})<line_sep>call()<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('bar' 2) ('Something' 3) ('call' 5)])<block_end><def_stmt>test_smart_step_into_bytecode_info_004 <block_start><def_stmt>function <block_start><yield>sys._getframe()<line_sep>bbb=foo.bar(Something(param1 param2=xxx.yyy) {1:1}# BUILD_MAP )<line_sep>call()<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('bar' 2) ('Something' 3) ('call' 5)])<block_end><def_stmt>test_smart_step_into_bytecode_info_005 <block_start><def_stmt>function <block_start><yield>sys._getframe()<line_sep>bbb=foo.bar(Something(param1 param2=xxx.yyy) {1:1 2:2}# BUILD_CONST_KEY_MAP )<line_sep>call()<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('bar' 2) ('Something' 3) ('call' 5)])<block_end><def_stmt>test_smart_step_into_bytecode_info_006 <block_start><def_stmt>function <block_start><yield>sys._getframe()<line_sep>foo.bar(Something() {1:1 2:[x<for>x call()]})<line_sep>call2()<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('bar' 2) ('Something' 3) ('call' 8) ('call2' 12)])<block_end><def_stmt>test_smart_step_into_bytecode_info_007 <block_start><def_stmt>function <block_start><yield>sys._getframe()<line_sep>a[0]<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('__getitem__' 2)])<block_end><def_stmt>test_smart_step_into_bytecode_info_008 <block_start><def_stmt>function <block_start><yield>sys._getframe()<line_sep>call([1 2 3])<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('call' 2)])<block_end><def_stmt>test_smart_step_into_bytecode_info_009 <block_start><def_stmt>function <block_start><yield>sys._getframe()<line_sep>[1 2 3][0]()<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('__getitem__' 2) ('__getitem__().__call__' 2)])<block_end><def_stmt>test_smart_step_into_bytecode_info_011 <block_start><def_stmt>function <block_start><yield>sys._getframe()<line_sep>[1 2 3][0]()()<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('__getitem__' 2) ('__getitem__().__call__' 2)])<block_end><def_stmt>test_smart_step_into_bytecode_info_012 <block_start><def_stmt>function <block_start><yield>sys._getframe()<line_sep>(<lambda>a:a)(1)<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('<lambda>' 2)])<block_end><def_stmt>test_smart_step_into_bytecode_info_013 <block_start><def_stmt>function <block_start><yield>sys._getframe()<line_sep>(<lambda>a:a )[0](1)<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('__getitem__().__call__' 2) ('__getitem__' 2)])<block_end><def_stmt>test_smart_step_into_bytecode_info_014 <block_start><def_stmt>function <block_start><yield>sys._getframe()<try_stmt><block_start><raise>RuntimeError()<block_end><except_stmt>Exception<block_start>call2()<block_end><finally_stmt><block_start>call3()<block_end><block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('RuntimeError' 3) ('call2' 5) ('call3' 7)])<block_end><def_stmt>test_smart_step_into_bytecode_info_015 <block_start><def_stmt>function <block_start><yield>sys._getframe()<with_stmt>call()<block_start>call2()<block_end><block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('call' 2) ('call2' 3)])<block_end><def_stmt>test_smart_step_into_bytecode_info_016 <block_start><def_stmt>function <block_start><yield>sys._getframe()<line_sep>call2(1 2 a=3 *args **kwargs)<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('call2' 2)])<block_end><def_stmt>test_smart_step_into_bytecode_info_017 <block_start><def_stmt>function <block_start><yield>sys._getframe()<line_sep>call([x<for>x y<if>x<eq>call2()])<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('call' 2) ('__eq__ (in <listcomp>)' 4) ('call2 (in <listcomp>)' 4)])<block_end><def_stmt>test_smart_step_into_bytecode_info_018 <block_start><def_stmt>function <block_start><yield>sys._getframe()<class_stmt>Foo(object)<block_start><def_stmt>__init__ self<block_start><pass><block_end><block_end>f=Foo()<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('Foo' 8)])<block_end><def_stmt>test_smart_step_into_bytecode_info_019 <block_start><def_stmt>function <block_start><yield>sys._getframe()<class_stmt>Foo(object)<block_start><def_stmt>__init__ self<block_start><pass><block_end><block_end>f=Foo()<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('Foo' 8)])<block_end><def_stmt>test_smart_step_into_bytecode_info_020 <block_start><def_stmt>function <block_start><yield>sys._getframe()<for_stmt>a call()<block_start><if_stmt>a<ne>1<block_start>a()<line_sep><break><block_end><elif_stmt>a<ne>2<block_start>b()<line_sep><break><block_end><else_stmt><block_start><continue><block_end><block_end><else_stmt><block_start><raise>RuntimeError()<block_end><block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('call' 2) ('__ne__' 3) ('a' 4) ('__ne__' 6) ('b' 7) ('RuntimeError' 12)])<block_end><def_stmt>test_smart_step_into_bytecode_info_021 <block_start><def_stmt>function <block_start><yield>sys._getframe()<line_sep>a,b=b a<line_sep>a,b,c=c a b<line_sep>a,b,c,d=d c a b<line_sep>a()<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('a' 5)])<block_end><def_stmt>test_smart_step_into_bytecode_info_022 <block_start><def_stmt>function <block_start><yield>sys._getframe()<line_sep>a(*{1 2} **{1:('1'+'2') 2:tuple(x<for>x c()<if>x<eq>d())})<line_sep>b()<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('a' 2) ('tuple' 6) ('c' 7) ('__eq__ (in <genexpr>)' 8) ('d (in <genexpr>)' 8) ('b' 11)])<block_end><def_stmt>test_smart_step_into_bytecode_info_023 <block_start><def_stmt>function <block_start><yield>sys._getframe()<line_sep>tuple(x<for>x c()<if>x<eq>d())<line_sep>tuple(x<for>x c()<if>x<eq>d())<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('tuple' 2) ('c' 4) ('__eq__ (in <genexpr>)' 5) ('d (in <genexpr>)' 5) ('tuple' 7) ('c' 9) ('__eq__ (in <genexpr>)' 10) ('d (in <genexpr>)' 10)])<block_end><def_stmt>test_smart_step_into_bytecode_info_024 <block_start>func='''def function(): yield sys._getframe() call(a ** b) call(a * b) call(a @ b) call(a / b) call(a // b) call(a % b) call(a + b) call(a - b) call(a >> b) call(a << b) call(a & b) call(a | b) call(a ^ b) '''<line_sep>locs={}<line_sep>exec(func globals() locs)<line_sep>function=locs['function']<line_sep>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('__pow__' 2) ('call' 2) ('__mul__' 3) ('call' 3) ('__matmul__' 4) ('call' 4) ('__div__' 5) ('call' 5) ('__floordiv__' 6) ('call' 6) ('__mod__' 7) ('call' 7) ('__add__' 8) ('call' 8) ('__sub__' 9) ('call' 9) ('__rshift__' 10) ('call' 10) ('__lshift__' 11) ('call' 11) ('__and__' 12) ('call' 12) ('__or__' 13) ('call' 13) ('__xor__' 14) ('call' 14)] )<block_end><def_stmt>test_smart_step_into_bytecode_info_025 <block_start>func='''def function(): yield sys._getframe() a **= b a *= b a @= b a /= b a //= b a %= b a += b a -= b a >>= b a <<= b a &= b a |= b a ^= b call() '''<line_sep>locs={}<line_sep>exec(func globals() locs)<line_sep>function=locs['function']<line_sep>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('call' 15)])<block_end>@pytest.mark.skipif(sys.version_info[0:2]<l>(3 8) reason='Walrus operator only available for Python 3.8 onwards.')<def_stmt>test_smart_step_into_bytecode_info_026 <block_start>func='''def function(): yield sys._getframe() call((a:=1)) '''<line_sep>locs={}<line_sep>exec(func globals() locs)<line_sep>function=locs['function']<line_sep>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('call' 2)])<block_end><def_stmt>test_smart_step_into_bytecode_info_027 <block_start><def_stmt>function <block_start><yield>sys._getframe()<def_stmt>call <block_start><pass><block_end>a=[1 call]<line_sep>a[:1]=[]<line_sep>x=a[0]()<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('__getitem__' 8) ('__getitem__().__call__' 8)])<block_end><def_stmt>test_smart_step_into_bytecode_info_028 <block_start><def_stmt>function <block_start><yield>sys._getframe()<def_stmt>call <block_start><pass><block_end>a=[1 call]<line_sep>a[:1]<augadd>[]<line_sep>x=a[0]()<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('__getitem__' 7) ('__getitem__' 8) ('__getitem__().__call__' 8)])<block_end><def_stmt>test_smart_step_into_bytecode_info_029 <block_start><def_stmt>function <block_start><yield>sys._getframe()<line_sep>call((+b)+(-b)-(<not>b)<times>(~b))<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('__add__' 3) ('__mul__' 3) ('__sub__' 3) ('call' 3)])<block_end><def_stmt>test_smart_step_into_bytecode_info_030 <block_start><def_stmt>function <block_start><yield>sys._getframe()<line_sep>call({a<for>a b})<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('call' 3)])<block_end><def_stmt>test_smart_step_into_bytecode_info_031 <block_start><def_stmt>function <block_start><yield>sys._getframe()<line_sep>call({a:b<for>a b})<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('call' 3)])<block_end><def_stmt>test_smart_step_into_bytecode_info_032 <block_start><def_stmt>function <block_start><yield>sys._getframe()<del_stmt>a[:2]<line_sep>call()<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 1) ('call' 4)])<block_end><def_stmt>test_smart_step_into_bytecode_info_033 <block_start>check_names_from_func_str('''def function(): yield sys._getframe() raise call() ''' [('_getframe' 1) ('call' 3)])<block_end>@pytest.mark.skipif(sys.version_info[0:2]<l>(3 6) reason='Async only available for Python 3.6 onwards.')<def_stmt>test_smart_step_into_bytecode_info_034 <block_start>check_names_from_func_str('''async def function(): await a() async for b in c(): await d() ''' [('a' 1) ('c' 2) ('d' 3)])<block_end><def_stmt>test_smart_step_into_bytecode_info_035 <block_start>check_names_from_func_str('''def function(): assert 0, 'Foo' ''' [('AssertionError' 1)])<block_end><def_stmt>test_smart_step_into_bytecode_info_036 <block_start>check_names_from_func_str('''def function(a): global some_name some_name = a some_name() ''' [('some_name' 3)])<block_end><def_stmt>test_smart_step_into_bytecode_info_037 <block_start>func='''def function(): some_name = 10 def another(): nonlocal some_name some_name = a some_name() return another '''<line_sep>locs={}<line_sep>exec(func globals() locs)<line_sep>function=locs['function']()<class_stmt>Frame<block_start>f_code=function.__code__<line_sep>f_lasti=0<block_end>found=collect_smart_step_into_variants(Frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('some_name' 3)])<block_end><def_stmt>test_smart_step_into_bytecode_info_038 <block_start>check_names_from_func_str('''def function(): try: call() finally: call2() ''' [('call' 2) ('call2' 4)])<block_end><def_stmt>test_smart_step_into_bytecode_info_039 <block_start>check_names_from_func_str('''def function(): try: call() except: return call2() finally: return call3() ''' [('call' 2) ('call2' 4) ('call3' 6)])<block_end><def_stmt>test_smart_step_into_bytecode_info_040 <block_start>check_names_from_func_str('''def function(): a.call = foo() a.call() ''' [('foo' 1) ('call' 2)])<block_end><def_stmt>test_smart_step_into_bytecode_info_041 <block_start>check_names_from_func_str('''def function(): foo = 10 del foo foo = method foo() ''' [('foo' 4)])<block_end><def_stmt>test_smart_step_into_bytecode_info_042 <block_start>check_names_from_func_str(''' foo = 10 def function(): global foo foo() ''' [('foo' 2)])<block_end><def_stmt>test_smart_step_into_bytecode_info_043 <block_start><def_stmt>function call<block_start><def_stmt>another_function <block_start><yield>sys._getframe()<line_sep>call()<block_end><for_stmt>frame another_function()<block_start><yield>frame<block_end><block_end>generator=iter(function(<lambda>:<none>))<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('_getframe' 3) ('call' 5)])<block_end><def_stmt>test_smart_step_into_bytecode_info_044 <block_start>check_names_from_func_str(''' def function(args): call, *c = args call(*c) ''' [('call' 2)])<block_end><def_stmt>test_smart_step_into_bytecode_info_045 <block_start>check_names_from_func_str(''' def function(): x.foo = 10 del x.foo x.foo = lambda:None x.foo() ''' [('foo' 4)])<block_end><def_stmt>test_smart_step_into_bytecode_info_046 <block_start>check_names_from_func_str(''' a = 10 def function(args): global a del a a() ''' [('a' 3)])<block_end><def_stmt>test_smart_step_into_bytecode_info_047 <block_start>check_names_from_func_str(''' def function(): call(a, b=1, *c, **kw) ''' [('call' 1)])<block_end><def_stmt>test_smart_step_into_bytecode_info_048 <block_start>check_names_from_func_str(''' def function(fn): fn = call(fn) def pa(): fn() return pa() ''' [('call' 1) ('pa' 6)])<block_end><def_stmt>test_smart_step_into_bytecode_info_049 <block_start><def_stmt>function foo<block_start><class_stmt>SomeClass(object)<block_start>implementation=foo<line_sep>implementation()<line_sep>f=sys._getframe()<block_end><return>SomeClass.f<block_end>frame=function(object)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<line_sep>check_name_and_line(found [('implementation' 5) ('_getframe' 6)])<block_end><def_stmt>test_smart_step_into_bytecode_info_050 <block_start>check_names_from_func_str(''' def function(): ('a' 'b').index('x') ''' [('index' 1)])<block_end><def_stmt>test_smart_step_into_bytecode_info_051 <block_start>check_names_from_func_str(''' def function(): v = 1 v2 = 2 call((f'a{v()!r}' f'b{v2()}')) ''' [('call' 3) ('v' 3) ('v2' 3)])<block_end><def_stmt>test_smart_step_into_bytecode_info_052 <block_start>check_names_from_func_str(''' def function(): v = 1 v2 = 2 call({*v(), *v2()}) ''' [('call' 3) ('v' 3) ('v2' 3)])<block_end><def_stmt>test_smart_step_into_bytecode_info_053 <block_start>check_names_from_func_str(''' def function(): v = 1 v2 = 2 call({**v(), **v2()}) ''' [('call' 3) ('v' 3) ('v2' 3)])<block_end><def_stmt>test_smart_step_into_bytecode_info_054 <block_start>check_names_from_func_str(''' def function(): import a from a import b call() ''' [('call' 3)])<block_end><def_stmt>test_smart_step_into_bytecode_info_055 <block_start>check_names_from_func_str(''' async def function(): async with lock() as foo: await foo() ''' [('lock' 1) ('foo' 2)])<block_end><def_stmt>test_smart_step_into_bytecode_info_056 <block_start>check_names_from_func_str(''' def function(mask_path): wc = some_func( parsed_content, np.array( Image.open(mask_path) ) ) ''' [('some_func' 1) ('array' 3) ('open' 4)])<block_end><def_stmt>test_smart_step_into_bytecode_info_057 <block_start>check_names_from_func_str(''' def function(mask_path): wc = some_func( parsed_content, np.array( my.pack.Image.open(mask_path) ) ) ''' [('some_func' 1) ('array' 3) ('open' 4)])<block_end><def_stmt>test_get_smart_step_into_variant_from_frame_offset <block_start><import_from_stmt>_pydevd_bundle.pydevd_bytecode_utils Variant<line_sep>found=[Variant(name='_getframe' is_visited=<true> line=8 offset=20 call_order=1) Variant(name='some' is_visited=<false> line=9 offset=34 call_order=1) Variant(name='call' is_visited=<false> line=9 offset=36 call_order=1) Variant(name='some' is_visited=<false> line=9 offset=38 call_order=2) Variant(name='call' is_visited=<false> line=9 offset=40 call_order=2) ]<assert_stmt>pydevd_bytecode_utils.get_smart_step_into_variant_from_frame_offset(19 found)<is><none><assert_stmt>pydevd_bytecode_utils.get_smart_step_into_variant_from_frame_offset(20 found).offset<eq>20<assert_stmt>pydevd_bytecode_utils.get_smart_step_into_variant_from_frame_offset(33 found).offset<eq>20<assert_stmt>pydevd_bytecode_utils.get_smart_step_into_variant_from_frame_offset(34 found).offset<eq>34<assert_stmt>pydevd_bytecode_utils.get_smart_step_into_variant_from_frame_offset(35 found).offset<eq>34<assert_stmt>pydevd_bytecode_utils.get_smart_step_into_variant_from_frame_offset(36 found).offset<eq>36<assert_stmt>pydevd_bytecode_utils.get_smart_step_into_variant_from_frame_offset(44 found).offset<eq>40<block_end><def_stmt>test_smart_step_into_bytecode_info_00eq <block_start><import_from_stmt>_pydevd_bundle.pydevd_bytecode_utils Variant<def_stmt>function <block_start>a=1<line_sep>b=1<if_stmt>a<eq>b<block_start><pass><block_end><if_stmt>a<ne>b<block_start><pass><block_end><if_stmt>a<g>b<block_start><pass><block_end><if_stmt>a<ge>b<block_start><pass><block_end><if_stmt>a<l>b<block_start><pass><block_end><if_stmt>a<le>b<block_start><pass><block_end><if_stmt>a<is>b<block_start><pass><block_end><yield>sys._getframe()<block_end>generator=iter(function())<line_sep>frame=next(generator)<line_sep>found=collect_smart_step_into_variants(frame 0 99999 base=function.__code__.co_firstlineno)<if_stmt>sys.version_info[:2]<l>(3 9)<block_start>check(found [Variant(name='__eq__' is_visited=<true> line=3 offset=18 call_order=1) Variant(name='__ne__' is_visited=<true> line=5 offset=33 call_order=1) Variant(name='__gt__' is_visited=<true> line=7 offset=48 call_order=1) Variant(name='__ge__' is_visited=<true> line=9 offset=63 call_order=1) Variant(name='__lt__' is_visited=<true> line=11 offset=78 call_order=1) Variant(name='__le__' is_visited=<true> line=13 offset=93 call_order=1) Variant(name='is' is_visited=<true> line=15 offset=108 call_order=1) Variant(name=('_getframe' 'sys') is_visited=<true> line=18 offset=123 call_order=1) ])<block_end><else_stmt><block_start>check(found [Variant(name='__eq__' is_visited=<true> line=3 offset=18 call_order=1) Variant(name='__ne__' is_visited=<true> line=5 offset=33 call_order=1) Variant(name='__gt__' is_visited=<true> line=7 offset=48 call_order=1) Variant(name='__ge__' is_visited=<true> line=9 offset=63 call_order=1) Variant(name='__lt__' is_visited=<true> line=11 offset=78 call_order=1) Variant(name='__le__' is_visited=<true> line=13 offset=93 call_order=1) Variant(name=('_getframe' 'sys') is_visited=<true> line=18 offset=123 call_order=1) ])<block_end><block_end><def_stmt>_test_find_bytecode <block_start><import_stmt>glob<import_stmt>dis<import_from_stmt>io StringIO<line_sep>root_dir='C:\\bin\\Python310\\Lib\\site-packages\\'<line_sep>i=0<for_stmt>filename glob.iglob(root_dir+'**/*.py' recursive=<true>)<block_start>print(filename)<with_stmt>open(filename 'r' encoding='utf-8')<as>stream<block_start><try_stmt><block_start>contents=stream.read()<block_end><except_stmt><block_start>sys.stderr.write('Unable to read file: %s'%(filename ))<line_sep><continue><block_end>code_obj=compile(contents filename 'exec')<line_sep>s=StringIO()<line_sep>dis.dis(code_obj file=s)<line_sep># https://docs.python.org/3.10/library/dis.html has references to the new opcodes added. <if_stmt>'COPY_DICT_WITHOUT_KEYS'<in>s.getvalue()<block_start>dis.dis(code_obj)<line_sep><raise>AssertionError('Found bytecode in: %s'%filename)<block_end><block_end># i += 1 # if i == 1000: # break <block_end><block_end>
# pylint: disable=arguments-differ, no-member, missing-docstring, invalid-name, line-too-long <import_from_stmt>functools reduce<import_stmt>torch<import_from_stmt>se3cnn.point.kernel Kernel<import_from_stmt>se3cnn.point.radial ConstantRadialModel<class_stmt>SortSphericalSignals(torch.nn.Module)<block_start><def_stmt>__init__ self Rs<block_start>super().__init__()<line_sep>ljds=[]<line_sep>j=0<for_stmt>mul,l Rs<block_start>d=mul<times>(2<times>l+1)<line_sep>ljds.append((l j d))<line_sep>j<augadd>d<block_end>mixing_matrix=torch.zeros(j j)<line_sep>i=0<for_stmt>_l,j,d sorted(ljds)<block_start>mixing_matrix[i:i+d j:j+d]=torch.eye(d)<line_sep>i<augadd>d<block_end>self.register_buffer('mixing_matrix' mixing_matrix)<block_end><def_stmt>forward self x<block_start>""" :param x: tensor [batch, feature, ...] """<line_sep>output=torch.einsum('ij,zja->zia' (self.mixing_matrix x.flatten(2))).contiguous()<line_sep><return>output.view(*x.size())<block_end><block_end><class_stmt>ConcatenateSphericalSignals(torch.nn.Module)<block_start><def_stmt>__init__ self *Rs<block_start>super().__init__()<line_sep>Rs=reduce(list.__add__ Rs [])<line_sep>self.sort=SortSphericalSignals(Rs)<block_end><def_stmt>forward self *signals<block_start>combined=torch.cat(signals dim=1)<line_sep><return>self.sort(combined)<block_end><block_end><class_stmt>SelfInteraction(torch.nn.Module)<block_start><def_stmt>__init__ self Rs_in Rs_out<block_start>super().__init__()<line_sep>self.kernel=Kernel(Rs_in Rs_out ConstantRadialModel)<block_end><def_stmt>forward self features<block_start>""" :param features: tensor [..., channel] :return: tensro [..., channel] """<line_sep>*size,n=features.size()<line_sep>features=features.view(-1 n)<line_sep>k=self.kernel(features.new_zeros(features.size(0) 3))<line_sep>features=torch.einsum("zij,zj->zi" (k features))<line_sep>features=features.view(*size -1)<line_sep><return>features<block_end><block_end>
<import_stmt>warnings<line_sep>warnings.simplefilter("default")<line_sep>warnings.warn("[pyKeOps]: the kernel_product syntax is deprecated. Please consider using the LazyTensor helper instead." DeprecationWarning )<import_from_stmt>.kernels Kernel kernel_product kernel_formulas<import_from_stmt>.formula Formula<line_sep>
<import_from_stmt>backend.database Column Model String relationship slugify <line_sep>@slugify('name')<class_stmt>Category(Model)<block_start>name=Column(String(32))<line_sep>slug=Column(String(32))<line_sep>articles=relationship('Article' back_populates='category')<line_sep>series=relationship('Series' back_populates='category')<line_sep>__repr_props__=('id' 'name')<def_stmt>__init__ self name **kwargs<block_start>super().__init__(**kwargs)<line_sep>self.name=name<block_end><block_end>
<import_stmt>datetime<import_stmt>operator<import_stmt>typing<import_stmt>weakref<import_from_stmt>typing AbstractSet Any Dict FrozenSet Iterable Iterator Optional Set<import_from_stmt>..definition Definition<import_from_stmt>..pv PropertyValue<import_from_stmt>..synonym Synonym SynonymData SynonymType<import_from_stmt>..utils.meta roundrepr typechecked<import_from_stmt>..xref Xref<if_stmt>typing.TYPE_CHECKING<block_start><import_from_stmt>..ontology _DataGraph Ontology<import_from_stmt>..relationship Relationship RelationshipSet<import_from_stmt>.attributes Relationships<block_end>__all__=["EntityData" "Entity" "EntitySet"]<line_sep>_D=typing.TypeVar("_D" bound="EntityData")<line_sep>_E=typing.TypeVar("_E" bound="Entity")<line_sep>_S=typing.TypeVar("_S" bound="EntitySet")<class_stmt>EntityData<block_start>id:str<line_sep>alternate_ids:Set[str]<line_sep>annotations:Set[PropertyValue]<line_sep>anonymous:bool<line_sep>builtin:bool<line_sep>comment:Optional[str]<line_sep>consider:Set[str]<line_sep>created_by:Optional[str]<line_sep>creation_date:Optional[datetime.datetime]<line_sep>disjoint_from:Set[str]<line_sep>definition:Optional[Definition]<line_sep>equivalent_to:Set[str]<line_sep>name:Optional[str]<line_sep>namespace:Optional[str]<line_sep>obsolete:bool<line_sep>relationships:Dict[str Set[str]]<line_sep>replaced_by:Set[str]<line_sep>subsets:Set[str]<line_sep>synonyms:Set[SynonymData]<line_sep>union_of:Set[str]<line_sep>xrefs:Set[Xref]<if_stmt>typing.TYPE_CHECKING<block_start>__annotations__:Dict[str str]<block_end>__slots__=("__weakref__" )+tuple(__annotations__)<block_end># noqa: E0602 <class_stmt>Entity(typing.Generic[_D _S])<block_start>"""An entity in the ontology graph. With respects to the OBO semantics, an `Entity` is either a term or a relationship in the ontology graph. Any entity has a unique identifier as well as some common properties. """<if_stmt>__debug__<or>typing.TYPE_CHECKING<block_start>__data:"weakref.ReferenceType[_D]"<line_sep>__slots__:Iterable[str]=()<def_stmt>__init__ self ontology:"Ontology" data:"_D"<block_start>self.__data=weakref.ref(data)<line_sep>self.__id=data.id<line_sep>self.__ontology=ontology<block_end><def_stmt>_data self<arrow>"EntityData"<block_start>rdata=self.__data()<if_stmt>rdata<is><none><block_start><raise>RuntimeError("internal data was deallocated")<block_end><return>rdata<block_end><block_end><else_stmt><block_start>__slots__:Iterable[str]=("_data" )# type: ignore <def_stmt>__init__ self ontology:"Ontology" data:"_D"<block_start>self._data=weakref.ref(data)# type: ignore self.__ontology=ontology<line_sep>self.__id=data.id<block_end><block_end>_Set:typing.ClassVar[typing.Type[_S]]=NotImplemented<line_sep>_data_getter:typing.Callable[["Ontology"] "_DataGraph"]=NotImplemented<line_sep># --- Private helpers ---------------------------------------------------- <def_stmt>_ontology self<arrow>"Ontology"<block_start><return>self.__ontology<block_end># --- Magic Methods ------------------------------------------------------ <def_stmt>__eq__ self other:Any<arrow>bool<block_start><if_stmt>isinstance(other Entity)<block_start><return>self.id<eq>other.id<block_end><return><false><block_end><def_stmt>__lt__ self other<block_start><if_stmt>isinstance(other Entity)<block_start><return>self.id<l>other.id<block_end><return>NotImplemented<block_end><def_stmt>__le__ self other<block_start><if_stmt>isinstance(other Entity)<block_start><return>self.id<le>other.id<block_end><return>NotImplemented<block_end><def_stmt>__gt__ self other<block_start><if_stmt>isinstance(other Entity)<block_start><return>self.id<g>other.id<block_end><return>NotImplemented<block_end><def_stmt>__ge__ self other<block_start><if_stmt>isinstance(other Entity)<block_start><return>self.id<ge>other.id<block_end><return>NotImplemented<block_end><def_stmt>__hash__ self<block_start><return>hash((self.id))<block_end><def_stmt>__repr__ self<block_start><return>roundrepr.make(type(self).__name__ self.id name=(self.name <none>))<block_end># --- Data descriptors --------------------------------------------------- @property<def_stmt>alternate_ids self<arrow>Set[str]<block_start>"""`set` of `str`: A set of alternate IDs for this entity."""<line_sep><return>self._data().alternate_ids<block_end>@alternate_ids.setter# type: ignore <def_stmt>alternate_ids self ids:Iterable[str]<block_start>self._data().alternate_ids=set(ids)<block_end>@property<def_stmt>annotations self<arrow>Set[PropertyValue]<block_start>"""`set` of `PropertyValue`: Annotations relevant to the entity."""<line_sep><return>self._data().annotations<block_end>@annotations.setter<def_stmt>annotations self value:Iterable[PropertyValue]<arrow><none><block_start>self._data().annotations=set(value)<block_end>@property<def_stmt>anonymous self<arrow>bool<block_start>"""`bool`: Whether or not the entity has an anonymous id. Semantics of anonymous entities are the same as B-Nodes in RDF. """<line_sep><return>self._data().anonymous<block_end>@anonymous.setter<def_stmt>anonymous self value:bool<block_start>self._data().anonymous=value<block_end>@property<def_stmt>builtin self<arrow>bool<block_start>"""`bool`: Whether or not the entity is built-in to the OBO format. ``pronto`` uses this tag on the ``is_a`` relationship, which is the axiomatic to the OBO language but treated as a relationship in the library. """<line_sep><return>self._data().builtin<block_end>@builtin.setter# type: ignore @typechecked(property=<true>)<def_stmt>builtin self value:bool<block_start>self._data().builtin=value<block_end>@property<def_stmt>comment self<arrow>Optional[str]<block_start>"""`str` or `None`: A comment about the current entity. Comments in ``comment`` clauses are guaranteed to be conserved by OBO parsers and serializers, unlike bang comments. A non `None` `comment` is semantically equivalent to a ``rdfs:comment`` in OWL2. When parsing from OWL, several RDF comments will be merged together into a single ``comment`` clause spanning over multiple lines. """<line_sep><return>self._data().comment<block_end>@comment.setter<def_stmt>comment self value:Optional[str]<block_start>self._data().comment=value<block_end>@property<def_stmt>consider self<arrow>_S<block_start>"""`EntitySet`: A set of potential substitutes for an obsolete term. An obsolete entity can provide one or more entities which may be appropriate substitutes, but needs to be looked at carefully by a human expert before the replacement is done. See Also: `~Entity.replaced_by`, which provides a set of entities suitable for automatic replacement. """<line_sep>s=self._Set()<line_sep>s._ids=self._data().consider<line_sep>s._ontology=self._ontology()<line_sep><return>s<block_end>@consider.setter<def_stmt>consider self consider:Iterable[_E]<arrow><none><block_start><if_stmt>isinstance(consider EntitySet)<block_start>data=consider._ids<block_end><else_stmt><block_start>data={entity.id<for>entity consider}<block_end>self._data().consider=data<block_end>@property<def_stmt>created_by self<arrow>Optional[str]<block_start>"""`str` or `None`: The name of the creator of the entity, if any. This property gets translated to a ``dc:creator`` annotation in OWL2, which has very broad semantics. Some OBO ontologies may instead use other annotation properties such as the ones found in `Information Interchange Ontology <http://www.obofoundry.org/ontology/iao.html>`_, which can be accessed in the `annotations` attribute of the entity, if any. """<line_sep><return>self._data().created_by<block_end>@created_by.setter# type: ignore @typechecked(property=<true>)<def_stmt>created_by self value:Optional[str]<block_start>self._data().created_by=value<block_end>@property<def_stmt>creation_date self<arrow>Optional[datetime.datetime]<block_start>"""`~datetime.datetime` or `None`: The date the entity was created."""<line_sep><return>self._data().creation_date<block_end>@creation_date.setter# type: ignore @typechecked(property=<true>)<def_stmt>creation_date self value:Optional[datetime.datetime]<block_start>self._data().creation_date=value<block_end>@property<def_stmt>definition self<arrow>Optional[Definition]<block_start>"""`Definition` or `None`: The definition of the current entity. Definitions in OBO are intended to be human-readable text describing the entity, with some additional cross-references if possible. Example: >>> hp = pronto.Ontology.from_obo_library("hp.obo") >>> term = hp["HP:0009882"] >>> term.name 'Short distal phalanx of finger' >>> str(term.definition) 'Short distance from the end of the finger to the most distal...' >>> sorted(term.definition.xrefs) [Xref('HPO:probinson'), Xref('PMID:19125433')] """<line_sep><return>self._data().definition<block_end>@definition.setter# type: ignore @typechecked(property=<true>)<def_stmt>definition self definition:Optional[Definition]<block_start>self._data().definition=definition<block_end>@property<def_stmt>disjoint_from self<arrow>_S<block_start>"""`EntitySet`: The entities declared as disjoint from this entity. Two entities are disjoint if they have no instances in common. Two entities that are disjoint cannot share any subentities, but the opposite is not always true. """<line_sep>s=self._Set()<line_sep>s._ids=self._data().disjoint_from<line_sep>s._ontology=self._ontology()<line_sep><return>s<block_end>@disjoint_from.setter<def_stmt>disjoint_from self disjoint:Iterable[_E]<block_start><if_stmt>isinstance(disjoint EntitySet)<block_start>data=disjoint._ids<block_end><else_stmt><block_start>data={entity.id<for>entity disjoint}<block_end>self._data().disjoint_from=data<block_end>@property<def_stmt>equivalent_to self<arrow>_S<block_start>"""`EntitySet`: The entities declared as equivalent to this entity."""<line_sep>s=self._Set()<line_sep>s._ids=self._data().equivalent_to<line_sep>s._ontology=self._ontology()<line_sep><return>s<block_end>@equivalent_to.setter<def_stmt>equivalent_to self entities:Iterable[_E]<block_start><if_stmt>isinstance(entities EntitySet)<block_start>data=entities._ids<block_end><else_stmt><block_start>data={entity.id<for>entity entities}<block_end>self._data().equivalent_to=data<block_end>@property<def_stmt>id self<arrow>str<block_start>"""`str`: The OBO identifier of the entity. Identifiers can be either prefixed (e.g. ``MS:1000031``), unprefixed (e.g. ``part_of``) or given as plain URLs. Identifiers cannot be edited. """<line_sep><return>self.__id<block_end>@property<def_stmt>name self<arrow>Optional[str]<block_start>"""`str` or `None`: The name of the entity. Names are formally equivalent to ``rdf:label`` in OWL2. The OBO format version 1.4 made names optional to improve OWL interoperability, as labels are optional in OWL. """<line_sep><return>self._data().name<block_end>@name.setter# type: ignore @typechecked(property=<true>)<def_stmt>name self value:Optional[str]<block_start>self._data().name=value<block_end>@property<def_stmt>namespace self<arrow>Optional[str]<block_start>"""`str` or `None`: The namespace this entity is defined in."""<line_sep><return>self._data().namespace<block_end>@namespace.setter# type: ignore @typechecked(property=<true>)<def_stmt>namespace self ns:Optional[str]<block_start>self._data().namespace=ns<block_end>@property<def_stmt>obsolete self<arrow>bool<block_start>"""`bool`: Whether or not the entity is obsolete. Hint: All OBO entities can be made obsolete through a boolean flag, and map to one or several replacements. When querying an obsolete entity, ``pronto`` will **not** attempt to perform any kind of replacement itself :: >>> ms = pronto.Ontology.from_obo_library("ms.obo") >>> term = ms["MS:1001414"] >>> term Term('MS:1001414', name='MGF scans') >>> term.obsolete True To always get the up-to-date, non-obsolete entity, you could use the following snippet, going through a term replacement if there is no ambiguity :: >>> while term.obsolete: ... if len(term.replaced_by) != 1: ... raise ValueError(f"no replacement for {term.id}") ... term = term.replaced_by.pop() >>> term Term('MS:1000797', name='peak list scans') See Also: `~.Entity.consider` and `~Entity.replaced_by`, storing some replacement options for an obsolete entity. """<line_sep><return>self._data().obsolete<block_end>@obsolete.setter# type: ignore @typechecked(property=<true>)<def_stmt>obsolete self value:bool<block_start>self._data().obsolete=value<block_end>@property<def_stmt>relationships self:_E<arrow>"Relationships[_E, _S]"<block_start>"""`~.Relationships`: The links from an entity to other entities. This property returns an object that maps a `~.Relationship` to an `~.EntitySet` (either a `~.TermSet` for `Term.relationships`, or a `~.RelationshipSet` for `Relationship.relationships`). Hint: The mapping is mutable, so relationships can be created or removed using the usual interface of a `~collections.abc.MutableMapping`. Example: Get the ``MS:1000004`` term (*sample mass*) from the Mass Spectrometry ontology:: >>> ms = pronto.Ontology.from_obo_library("ms.obo") >>> sample_mass = ms["MS:1000004"] Then use the ``relationships`` property to get the relevant unit from the Unit Ontology:: >>> sorted(sample_mass.relationships.keys()) [Relationship('has_units', name='has_units')] >>> sample_mass.relationships[ms.get_relationship('has_units')] TermSet({Term('UO:0000021', name='gram')}) """<import_from_stmt>.attributes Relationships<line_sep><return>Relationships(self)<block_end>@relationships.setter<def_stmt>relationships self rels:typing.Mapping["Relationship" Iterable[_E]]<block_start>self._data().relationships={relation.id:set(entity.id<for>entity entities)<for>relation,entities rels.items()}<block_end>@property<def_stmt>replaced_by self<arrow>_S<block_start>"""`EntitySet`: A set of of replacements for an obsolete term. An obsolete entity can provide one or more replacement that can safely be used to automatically reassign instances to non-obsolete classes. See Also: `~Entity.consider`, which provides a set of entities suitable for replacement but requiring expert curation. """<line_sep>s=self._Set()<line_sep>s._ids=self._data().replaced_by<line_sep>s._ontology=self._ontology()<line_sep><return>s<block_end>@replaced_by.setter<def_stmt>replaced_by self replacements:Iterable[_E]<arrow><none><block_start><if_stmt>isinstance(replacements EntitySet)<block_start>data=replacements._ids<block_end><else_stmt><block_start>data=set(entity.id<for>entity replacements)<block_end>self._data().replaced_by=data<block_end>@property<def_stmt>subsets self<arrow>FrozenSet[str]<block_start>"""`frozenset` of `str`: The subsets containing this entity."""<line_sep><return>frozenset(self._data().subsets)<block_end>@subsets.setter# type: ignore @typechecked(property=<true>)<def_stmt>subsets self subsets:FrozenSet[str]<block_start>declared=set(s.name<for>s self._ontology().metadata.subsetdefs)<for_stmt>subset subsets<block_start><if_stmt>subset<not><in>declared<block_start><raise>ValueError(f"undeclared subset: {subset!r}")<block_end><block_end>self._data().subsets=set(subsets)<block_end>@property<def_stmt>synonyms self<arrow>FrozenSet[Synonym]<block_start>"""`frozenset` of `Synonym`: A set of synonyms for this entity."""<line_sep>ontology,termdata=self._ontology() self._data()<line_sep><return>frozenset(Synonym(ontology s)<for>s termdata.synonyms)<block_end>@synonyms.setter# type: ignore @typechecked(property=<true>)<def_stmt>synonyms self synonyms:Iterable[Synonym]<block_start>self._data().synonyms={syn._data()<for>syn synonyms}<block_end>@property<def_stmt>union_of self<arrow>_S<block_start>s=self._Set()<line_sep>s._ids=self._data().union_of<line_sep>s._ontology=self._ontology()<line_sep><return>s<block_end>@union_of.setter<def_stmt>union_of self union_of:Iterable[_E]<arrow><none><block_start><if_stmt>isinstance(union_of EntitySet)<block_start>data=union_of._ids<block_end><else_stmt><block_start>data=set()<for_stmt>entity union_of<block_start><if_stmt><not>isinstance(entity Entity)<block_start>ty=type(entity).__name__<line_sep><raise>TypeError(f"expected `Entity`, found {ty}")<block_end>data.add(entity.id)<block_end><block_end><if_stmt>len(data)<eq>1<block_start><raise>ValueError("'union_of' cannot have a cardinality of 1")<block_end>self._data().union_of=data<block_end>@property<def_stmt>xrefs self<arrow>FrozenSet[Xref]<block_start>"""`frozenset` of `Xref`: A set of database cross-references. Xrefs can be used to describe an analogous entity in another vocabulary, such as a database or a semantic knowledge base. """<line_sep><return>frozenset(self._data().xrefs)<block_end>@xrefs.setter# type: ignore @typechecked(property=<true>)<def_stmt>xrefs self xrefs:FrozenSet[Xref]<block_start>self._data().xrefs=set(xrefs)<block_end># --- Convenience methods ------------------------------------------------ <def_stmt>add_synonym self description:str scope:Optional[str]=<none> type:Optional[SynonymType]=<none> xrefs:Optional[Iterable[Xref]]=<none> <arrow>Synonym<block_start>"""Add a new synonym to the current entity. Arguments: description (`str`): The alternate definition of the entity, or a related human-readable synonym. scope (`str` or `None`): An optional synonym scope. Must be either **EXACT**, **RELATED**, **BROAD** or **NARROW** if given. type (`~pronto.SynonymType` or `None`): An optional synonym type. Must be declared in the header of the current ontology. xrefs (iterable of `Xref`, or `None`): A collections of database cross-references backing the origin of the synonym. Raises: ValueError: when given an invalid synonym type or scope. Returns: `~pronto.Synonym`: A new synonym for the terms. The synonym is already added to the `Entity.synonyms` collection. """<line_sep># check the type is declared in the current ontology <if_stmt>type<is><none><block_start>type_id:Optional[str]=<none><block_end><else_stmt><block_start><try_stmt><block_start>type_id=self._ontology().get_synonym_type(type.id).id<block_end><except_stmt>KeyError<as>ke<block_start><raise>ValueError(f"undeclared synonym type {type.id!r}")<from>ke<block_end><block_end>data=SynonymData(description scope type_id xrefs=xrefs)<line_sep>self._data().synonyms.add(data)<line_sep><return>Synonym(self._ontology() data)<block_end><block_end><class_stmt>EntitySet(typing.Generic[_E] typing.MutableSet[_E])<block_start>"""A specialized mutable set to store `Entity` instances."""<line_sep># --- Magic methods ------------------------------------------------------ <def_stmt>__init__ self entities:Optional[Iterable[_E]]=<none><arrow><none><block_start>self._ids:Set[str]=set()<line_sep>self._ontology:"Optional[Ontology]"=<none><for_stmt>entity entities<if>entities<is><not><none><else>()<block_start><if_stmt>__debug__<and><not>isinstance(entity Entity)<block_start>err_msg="'entities' must be iterable of Entity, not {}"<line_sep><raise>TypeError(err_msg.format(type(entity).__name__))<block_end><if_stmt>self._ontology<is><none><block_start>self._ontology=entity._ontology()<block_end><if_stmt>self._ontology<is><not>entity._ontology()<block_start><raise>ValueError("entities do not originate from the same ontology")<block_end>self._ids.add(entity.id)<block_end><block_end><def_stmt>__contains__ self other:object<block_start><if_stmt>isinstance(other Entity)<block_start><return>other.id<in>self._ids<block_end><return><false><block_end><def_stmt>__iter__ self<arrow>Iterator[_E]<block_start><return>map(<lambda>t:self._ontology[t] iter(self._ids))<block_end># type: ignore <def_stmt>__len__ self<block_start><return>len(self._ids)<block_end><def_stmt>__repr__ self<block_start>ontology=self._ontology<line_sep>elements=(repr(ontology[id_])<for>id_ self._ids)<line_sep><return>f"{type(self).__name__}({{{', '.join(elements)}}})"<block_end><def_stmt>__iand__ self other:AbstractSet[_E]<arrow>"EntitySet[_E]"<block_start><if_stmt>isinstance(other EntitySet)<block_start>self._ids<augand>other._ids<block_end><else_stmt><block_start>super().__iand__(other)<block_end><if_stmt><not>self._ids<block_start>self._ontology=<none><block_end><return>self<block_end><def_stmt>__and__ self other:AbstractSet[_E]<arrow>"EntitySet[_E]"<block_start><if_stmt>isinstance(other EntitySet)<block_start>s=type(self)()<line_sep>s._ids=self._ids.__and__(other._ids)<line_sep>s._ontology=self._ontology<if>s._ids<else><none><block_end><else_stmt><block_start>s=type(self)(super().__and__(other))<block_end><return>s<block_end><def_stmt>__ior__ self other:AbstractSet[_E]<arrow>"EntitySet[_E]"<block_start><if_stmt><not>isinstance(other EntitySet)<block_start>other=type(self)(other)<block_end>self._ids<augor>other._ids<line_sep>self._ontology=self._ontology<or>other._ontology<line_sep><return>self<block_end><def_stmt>__or__ self other:AbstractSet[_E]<arrow>"EntitySet[_E]"<block_start><if_stmt>isinstance(other EntitySet)<block_start>s=type(self)()<line_sep>s._ids=self._ids.__or__(other._ids)<line_sep>s._ontology=self._ontology<or>other._ontology<block_end><else_stmt><block_start>s=type(self)(super().__or__(other))<block_end><return>s<block_end><def_stmt>__isub__ self other:AbstractSet[_E]<arrow>"EntitySet[_E]"<block_start><if_stmt>isinstance(other EntitySet)<block_start>self._ids<augsub>other._ids<block_end><else_stmt><block_start>super().__isub__(other)<block_end><if_stmt><not>self._ids<block_start>self._ontology=<none><block_end><return>self<block_end><def_stmt>__sub__ self other:AbstractSet[_E]<arrow>"EntitySet[_E]"<block_start><if_stmt>isinstance(other EntitySet)<block_start>s=type(self)()<line_sep>s._ids=self._ids.__sub__(other._ids)<line_sep>s._ontology=self._ontology<block_end><else_stmt><block_start>s=type(self)(super().__sub__(other))<block_end><return>s<block_end><def_stmt>__ixor__ self other:AbstractSet[_E]<arrow>"EntitySet[_E]"<block_start><if_stmt>isinstance(other EntitySet)<block_start>self._ids<augxor>other._ids<line_sep>self._ontology=self._ontology<or>other._ontology<block_end><else_stmt><block_start>super().__ixor__(other)<block_end><if_stmt><not>self._ids<block_start>self._ontology=<none><block_end><return>self<block_end><def_stmt>__xor__ self other:AbstractSet[_E]<arrow>"EntitySet[_E]"<block_start><if_stmt>isinstance(other EntitySet)<block_start>s=type(self)()<line_sep>s._ids=self._ids.__xor__(other._ids)<line_sep>s._ontology=self._ontology<or>other._ontology<block_end><else_stmt><block_start>s=type(self)(super().__xor__(other))<block_end><if_stmt><not>s._ids<block_start>s._ontology=<none><block_end><return>s<block_end># --- Methods ------------------------------------------------------------ <def_stmt>add self entity:_E<arrow><none><block_start><if_stmt>self._ontology<is><none><block_start>self._ontology=entity._ontology()<block_end><elif_stmt>self._ontology<is><not>entity._ontology()<block_start><raise>ValueError("cannot use `Entity` instances from different `Ontology`")<block_end>self._ids.add(entity.id)<block_end><def_stmt>clear self<arrow><none><block_start>self._ids.clear()<line_sep>self._ontology=<none><block_end><def_stmt>discard self entity:_E<arrow><none><block_start>self._ids.discard(entity.id)<block_end><def_stmt>pop self<arrow>_E<block_start>id_=self._ids.pop()<line_sep>entity=self._ontology[id_]# type: ignore <if_stmt><not>self._ids<block_start>self._ontology=<none><block_end><return>entity<block_end># type: ignore <def_stmt>remove self entity:_E<block_start><if_stmt>self._ontology<is><not><none><and>self._ontology<is><not>entity._ontology()<block_start><raise>ValueError("cannot use `Entity` instances from different `Ontology`")<block_end>self._ids.remove(entity.id)<block_end># --- Attributes --------------------------------------------------------- @property<def_stmt>ids self<arrow>FrozenSet[str]<block_start><return>frozenset(map(operator.attrgetter("id") iter(self)))<block_end>@property<def_stmt>alternate_ids self<arrow>FrozenSet[str]<block_start><return>frozenset(id<for>entity self<for>id entity.alternate_ids)<block_end>@property<def_stmt>names self<arrow>FrozenSet[str]<block_start><return>frozenset(map(operator.attrgetter("name") iter(self)))<block_end><block_end>
"""Definitions for the primitive `composite_full`."""<import_from_stmt>myia.lib SHAPE TYPE VALUE AbstractArray AbstractScalar AbstractType abstract_array distribute force_pending scalar_cast u64tup_typecheck <import_from_stmt>myia.operations primitives<as>P<import_from_stmt>myia.xtype NDArray<def_stmt>pyimpl_composite_full shape fill_value abstract_scalar_type<block_start>"""Implement `composite_full`."""<line_sep>scalar_value=scalar_cast(fill_value abstract_scalar_type)<line_sep><return>distribute(P.scalar_to_array(scalar_value abstract_array(shape scalar_value)) shape )<block_end><async_keyword><def_stmt>infer_composite_full self engine shape:u64tup_typecheck fill_value:AbstractScalar dtype:AbstractType <block_start>"""Infer the return type of primitive `composite_full`."""<line_sep><return>AbstractArray(AbstractScalar({TYPE:<await>force_pending(dtype.element.xtype()) VALUE:fill_value.xvalue() }) {SHAPE:tuple(self.require_constant(e argnum=f'"0:shape[{edx}]"')<for>edx,e enumerate(shape.elements)) TYPE:NDArray } )<block_end>
<import_stmt>__future__<import_stmt>ast<import_stmt>inspect<import_stmt>os<import_stmt>shutil<import_stmt>subprocess<import_stmt>sys<import_from_stmt>functools wraps<import_from_stmt>tempfile mkdtemp<import_from_stmt>textwrap dedent<import_from_stmt>types CodeType TracebackType<import_from_stmt>typing Any Callable Dict List Optional Tuple Type TypeVar Union cast <import_from_stmt>weakref WeakKeyDictionary<import_from_stmt>.cache PatchingCache<if_stmt>sys.version_info<ge>(3 9)<block_start><import_from_stmt>pkgutil resolve_name<as>pkgutil_resolve_name<block_end><else_stmt><block_start><import_from_stmt>pkgutil_resolve_name resolve_name<as>pkgutil_resolve_name<block_end>__all__=("patch" "mc_patchface" "unpatch" "replace" "temp_patch")<line_sep># Public API <def_stmt>patch func:Union[Callable[<ellipsis> Any] str] patch_text:str<arrow><none><block_start>_do_patch(func patch_text forwards=<true>)<block_end>mc_patchface=patch<def_stmt>unpatch func:Union[Callable[<ellipsis> Any] str] patch_text:str<arrow><none><block_start>_do_patch(func patch_text forwards=<false>)<block_end><def_stmt>replace func:Callable[<ellipsis> Any] expected_source:Optional[str] new_source:str <arrow><none><block_start><if_stmt>expected_source<is><not><none><block_start>expected_source=dedent(expected_source)<line_sep>current_source=_get_source(func)<line_sep>_assert_ast_equal(current_source expected_source func.__name__)<block_end>new_source=dedent(new_source)<line_sep>_set_source(func new_source)<block_end>AnyFunc=TypeVar("AnyFunc" bound=Callable[<ellipsis> Any])<class_stmt>temp_patch<block_start><def_stmt>__init__ self func:Union[Callable[<ellipsis> Any] str] patch_text:str<arrow><none><block_start>self.func=func<line_sep>self.patch_text=patch_text<block_end><def_stmt>__enter__ self<arrow><none><block_start>patch(self.func self.patch_text)<block_end><def_stmt>__exit__ self exc_type:Union[Type[BaseException] <none>] exc_val:Union[BaseException <none>] exc_tb:Union[TracebackType <none>] <arrow><none><block_start>unpatch(self.func self.patch_text)<block_end><def_stmt>__call__ self decorable:AnyFunc<arrow>AnyFunc<block_start>@wraps(decorable)<def_stmt>wrapper *args:Any **kwargs:Any<arrow>Any<block_start><with_stmt>self<block_start>decorable(*args **kwargs)<block_end><block_end><return>cast(AnyFunc wrapper)<block_end><block_end># Gritty internals <def_stmt>_do_patch func:Union[Callable[<ellipsis> Any] str] patch_text:str forwards:bool <arrow><none><block_start><if_stmt>isinstance(func str)<block_start>func=cast(Callable[<ellipsis> Any] pkgutil_resolve_name(func))<block_end>source=_get_source(func)<line_sep>patch_text=dedent(patch_text)<line_sep>new_source=_apply_patch(source patch_text forwards func.__name__)<line_sep>_set_source(func new_source)<block_end>_patching_cache=PatchingCache(maxsize=100)<def_stmt>_apply_patch source:str patch_text:str forwards:bool name:str <arrow>str# Cached ? <block_start><try_stmt><block_start><return>_patching_cache.retrieve(source patch_text forwards)<block_end><except_stmt>KeyError<block_start><pass><block_end># Write out files tempdir=mkdtemp(prefix="patchy")<try_stmt><block_start>source_path=os.path.join(tempdir name+".py")<with_stmt>open(source_path "w")<as>source_file<block_start>source_file.write(source)<block_end>patch_path=os.path.join(tempdir name+".patch")<with_stmt>open(patch_path "w")<as>patch_file<block_start>patch_file.write(patch_text)<if_stmt><not>patch_text.endswith("\n")<block_start>patch_file.write("\n")<block_end><block_end># Call `patch` command command=["patch"]<if_stmt><not>forwards<block_start>command.append("--reverse")<block_end>command.extend([source_path patch_path])<line_sep>proc=subprocess.Popen(command stderr=subprocess.PIPE stdout=subprocess.PIPE)<line_sep>stdout,stderr=proc.communicate()<if_stmt>proc.returncode<ne>0<block_start>msg="Could not {action} the patch {prep} '{name}'.".format(action=("apply"<if>forwards<else>"unapply") prep=("to"<if>forwards<else>"from") name=name )<line_sep>msg<augadd>" The message from `patch` was:\n{}\n{}".format(stdout.decode("utf-8") stderr.decode("utf-8"))<line_sep>msg<augadd>"\nThe code to patch was:\n{}\nThe patch was:\n{}".format(source patch_text)<line_sep><raise>ValueError(msg)<block_end><with_stmt>open(source_path)<as>source_file<block_start>new_source=source_file.read()<block_end><block_end><finally_stmt><block_start>shutil.rmtree(tempdir)<block_end>_patching_cache.store(source patch_text forwards new_source)<line_sep><return>new_source<block_end><def_stmt>_get_flags_mask <arrow>int<block_start>result=0<for_stmt>name __future__.all_feature_names<block_start>result<augor>getattr(__future__ name).compiler_flag<block_end><return>result<block_end>FEATURE_MASK=_get_flags_mask()<line_sep># Stores the source of functions that have had their source changed # Bad type hints because WeakKeyDictionary only indexable on Python 3.9+ _source_map:Dict[Callable[<ellipsis> Any] str]=cast(Dict[Callable[<ellipsis> Any] str] WeakKeyDictionary() )<def_stmt>_get_source func:Callable[<ellipsis> Any]<arrow>str<block_start>real_func=_get_real_func(func)<try_stmt><block_start><return>_source_map[real_func]<block_end><except_stmt>KeyError<block_start>source=inspect.getsource(func)<line_sep>source=dedent(source)<line_sep><return>source<block_end><block_end><def_stmt>_class_name func:Callable[<ellipsis> Any]<arrow>Optional[str]<block_start>split_name=func.__qualname__.split(".")<try_stmt><block_start>class_name=split_name[-2]<block_end><except_stmt>IndexError<block_start><return><none><block_end><else_stmt><block_start><if_stmt>class_name<eq>"<locals>"<block_start><return><none><block_end><return>class_name<block_end><block_end><def_stmt>_set_source func:Callable[<ellipsis> Any] func_source:str<arrow><none># Fetch the actual function we are changing <block_start>real_func=_get_real_func(func)<line_sep># Figure out any future headers that may be required feature_flags=real_func.__code__.co_flags&FEATURE_MASK<line_sep>class_name=_class_name(func)<def_stmt>_compile code:Union[str ast.Module] flags:int=0 <arrow>Union[CodeType ast.Module]<block_start><return>compile(code "<patchy>" "exec" flags=feature_flags|flags dont_inherit=<true>)<block_end><def_stmt>_parse code:str<arrow>ast.Module<block_start>result=_compile(code flags=ast.PyCF_ONLY_AST)<assert_stmt>isinstance(result ast.Module)<line_sep><return>result<block_end><def_stmt>_process_freevars <arrow>Tuple[str ast.AST List[str]]<block_start>""" Wrap the new function in a __patchy_freevars__ method that provides all freevars of the original function. Because the new function must use exectaly the same freevars as the original, also append to the new function with a body of code to force use of those freevars (in the case the the patch drops use of any freevars): def __patchy_freevars__(): eg_free_var_spam = object() <- added in wrapper eg_free_var_ham = object() <- added in wrapper def patched_func(): return some_global(eg_free_var_ham) eg_free_var_spam <- appended to new func body eg_free_var_ham <- appended to new func body return patched_func """<line_sep>_def="def __patchy_freevars__():"<line_sep>fvs=func.__code__.co_freevars<line_sep>fv_body=[f" {fv} = object()"<for>fv fvs]<line_sep>fv_force_use_body=[f" {fv}"<for>fv fvs]<if_stmt>fv_force_use_body<block_start>fv_force_use_ast=_parse("\n".join([_def]+fv_force_use_body))<line_sep>fv_force_use=fv_force_use_ast.body[0].body# type: ignore [attr-defined] <block_end><else_stmt><block_start>fv_force_use=[]<block_end>_ast=_parse(func_source).body[0]<line_sep>_ast.body=_ast.body+fv_force_use# type: ignore [attr-defined] <return>_def _ast fv_body<block_end><def_stmt>_process_method <arrow>ast.Module<block_start>""" Wrap the new method in a class to ensure the same mangling as would have been performed on the original method: def __patchy_freevars__(): class SomeClass(object): def patched_func(self): return some_globals(self.__some_mangled_prop) return SomeClass.patched_func """<line_sep>_def,_ast,fv_body=_process_freevars()<line_sep>_global=(""<if>class_name<in>func.__code__.co_freevars<else>f" global {class_name}\n")<line_sep>class_src="{_global} class {name}(object):\n pass".format(_global=_global name=class_name)<line_sep>ret=" return {class_name}.{name}".format(class_name=class_name name=func.__name__)<line_sep>to_parse="\n".join([_def]+fv_body+[class_src ret])<line_sep>new_source=_parse(to_parse)<line_sep>new_source.body[0].body[-2].body[0]=_ast# type: ignore [attr-defined] <return>new_source<block_end><def_stmt>_process_function <arrow>ast.Module<block_start>_def,_ast,fv_body=_process_freevars()<line_sep>name=func.__name__<line_sep>ret=f" return {name}"<line_sep>_global=[]<if>name<in>func.__code__.co_freevars<else>[f" global {name}"]<line_sep>to_parse="\n".join([_def]+_global+fv_body+[" pass" ret])<line_sep>new_source=_parse(to_parse)<line_sep>new_source.body[0].body[-2]=_ast# type: ignore [attr-defined] <return>new_source<block_end><if_stmt>class_name<block_start>new_source=_process_method()<block_end><else_stmt><block_start>new_source=_process_function()<block_end># Compile and retrieve the new Code object localz:Dict[str Any]={}<line_sep>new_code=cast(CodeType _compile(new_source))<line_sep>exec(new_code dict(func.__globals__) # type: ignore [attr-defined] localz )<line_sep>new_func=localz["__patchy_freevars__"]()<line_sep># Put the new Code object in place real_func.__code__=new_func.__code__<line_sep># Store the modified source. This used to be attached to the function but # that is a bit naughty _source_map[real_func]=func_source<block_end><def_stmt>_get_real_func func:Callable[<ellipsis> Any]<arrow>Callable[<ellipsis> Any]<block_start>""" Duplicates some of the logic implicit in inspect.getsource(). Basically some function-esque things, such as classmethods, aren't functions but we can peel back the layers to the underlying function very easily. """<if_stmt>inspect.ismethod(func)<block_start><return>func.__func__# type: ignore [attr-defined] <block_end><else_stmt><block_start><return>func<block_end><block_end><def_stmt>_assert_ast_equal current_source:str expected_source:str name:str<arrow><none><block_start>current_ast=ast.parse(current_source)<line_sep>expected_ast=ast.parse(expected_source)<if_stmt><not>ast.dump(current_ast)<eq>ast.dump(expected_ast)<block_start>msg=("The code of '{name}' has changed from expected.\n"<concat>"The current code is:\n{current_source}\n"<concat>"The expected code is:\n{expected_source}").format(name=name current_source=current_source expected_source=expected_source)<line_sep><raise>ValueError(msg)<block_end><block_end>
# Python object to represent the bluez DBus device object. Provides properties # and functions to easily interact with the DBus object. # Author: <NAME> # # Copyright (c) 2015 Adafruit Industries # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. <import_from_stmt>past.builtins map<import_stmt>threading<import_stmt>time<import_stmt>uuid<import_stmt>dbus<import_from_stmt>..config TIMEOUT_SEC<import_from_stmt>..interfaces Device<import_from_stmt>..platform get_provider<import_from_stmt>.adapter _INTERFACE<as>_ADAPTER_INTERFACE<import_from_stmt>.gatt BluezGattService BluezGattCharacteristic _SERVICE_INTERFACE _CHARACTERISTIC_INTERFACE<line_sep>_INTERFACE='org.bluez.Device1'<class_stmt>BluezDevice(Device)<block_start>"""Bluez BLE device."""<def_stmt>__init__ self dbus_obj<block_start>"""Create an instance of the bluetooth device from the provided bluez DBus object. """<line_sep>self._device=dbus.Interface(dbus_obj _INTERFACE)<line_sep>self._props=dbus.Interface(dbus_obj 'org.freedesktop.DBus.Properties')<line_sep>self._connected=threading.Event()<line_sep>self._disconnected=threading.Event()<line_sep>self._props.connect_to_signal('PropertiesChanged' self._prop_changed)<block_end><def_stmt>_prop_changed self iface changed_props invalidated_props# Handle property changes for the device. Note this call happens in # a separate thread so be careful to make thread safe changes to state! # Skip any change events not for this adapter interface. <block_start><if_stmt>iface<ne>_INTERFACE<block_start><return><block_end># If connected then fire the connected event. <if_stmt>'Connected'<in>changed_props<and>changed_props['Connected']<eq>1<block_start>self._connected.set()<block_end># If disconnected then fire the disconnected event. <if_stmt>'Connected'<in>changed_props<and>changed_props['Connected']<eq>0<block_start>self._disconnected.set()<block_end><block_end><def_stmt>connect self timeout_sec=TIMEOUT_SEC<block_start>"""Connect to the device. If not connected within the specified timeout then an exception is thrown. """<line_sep>self._connected.clear()<line_sep>self._device.Connect()<if_stmt><not>self._connected.wait(timeout_sec)<block_start><raise>RuntimeError('Exceeded timeout waiting to connect to device!')<block_end><block_end><def_stmt>disconnect self timeout_sec=TIMEOUT_SEC<block_start>"""Disconnect from the device. If not disconnected within the specified timeout then an exception is thrown. """<line_sep>self._disconnected.clear()<line_sep>self._device.Disconnect()<if_stmt><not>self._disconnected.wait(timeout_sec)<block_start><raise>RuntimeError('Exceeded timeout waiting to disconnect from device!')<block_end><block_end><def_stmt>list_services self<block_start>"""Return a list of GattService objects that have been discovered for this device. """<line_sep><return>map(BluezGattService get_provider()._get_objects(_SERVICE_INTERFACE self._device.object_path))<block_end><def_stmt>discover self service_uuids char_uuids timeout_sec=TIMEOUT_SEC<block_start>"""Wait up to timeout_sec for the specified services and characteristics to be discovered on the device. If the timeout is exceeded without discovering the services and characteristics then an exception is thrown. """<line_sep># Turn expected values into a counter of each UUID for fast comparison. expected_services=set(service_uuids)<line_sep>expected_chars=set(char_uuids)<line_sep># Loop trying to find the expected services for the device. start=time.time()<while_stmt><true># Find actual services discovered for the device. <block_start>actual_services=set(self.advertised)<line_sep># Find actual characteristics discovered for the device. chars=map(BluezGattCharacteristic get_provider()._get_objects(_CHARACTERISTIC_INTERFACE self._device.object_path))<line_sep>actual_chars=set(map(<lambda>x:x.uuid chars))<line_sep># Compare actual discovered UUIDs with expected and return true if at # least the expected UUIDs are available. <if_stmt>actual_services<ge>expected_services<and>actual_chars<ge>expected_chars# Found at least the expected services! <block_start><return><true><block_end># Couldn't find the devices so check if timeout has expired and try again. <if_stmt>time.time()-start<ge>timeout_sec<block_start><return><false><block_end>time.sleep(1)<block_end><block_end>@property<def_stmt>advertised self<block_start>"""Return a list of UUIDs for services that are advertised by this device. """<line_sep>uuids=[]<line_sep># Get UUIDs property but wrap it in a try/except to catch if the property # doesn't exist as it is optional. <try_stmt><block_start>uuids=self._props.Get(_INTERFACE 'UUIDs')<block_end><except_stmt>dbus.exceptions.DBusException<as>ex# Ignore error if device has no UUIDs property (i.e. might not be # a BLE device). <block_start><if_stmt>ex.get_dbus_name()<ne>'org.freedesktop.DBus.Error.InvalidArgs'<block_start><raise>ex<block_end><block_end><return>[uuid.UUID(str(x))<for>x uuids]<block_end>@property<def_stmt>id self<block_start>"""Return a unique identifier for this device. On supported platforms this will be the MAC address of the device, however on unsupported platforms (Mac OSX) it will be a unique ID like a UUID. """<line_sep><return>self._props.Get(_INTERFACE 'Address')<block_end>@property<def_stmt>name self<block_start>"""Return the name of this device."""<line_sep><return>self._props.Get(_INTERFACE 'Name')<block_end>@property<def_stmt>is_connected self<block_start>"""Return True if the device is connected to the system, otherwise False. """<line_sep><return>self._props.Get(_INTERFACE 'Connected')<block_end>@property<def_stmt>rssi self<block_start>"""Return the RSSI signal strength in decibels."""<line_sep><return>self._props.Get(_INTERFACE 'RSSI')<block_end>@property<def_stmt>_adapter self<block_start>"""Return the DBus path to the adapter that owns this device."""<line_sep><return>self._props.Get(_INTERFACE 'Adapter')<block_end><block_end>
<import_from_stmt>hummingbot.pmm_script.pmm_script_base PMMScriptBase<class_stmt>HelloWorldPMMScript(PMMScriptBase)<block_start>""" Demonstrates how to send messages using notify and log functions. It also shows how errors and commands are handled. """<def_stmt>on_tick self<block_start><if_stmt>len(self.mid_prices)<l>3<block_start>self.notify("Hello Hummingbots World!")<line_sep>self.log("Hello world logged.")<block_end><elif_stmt>3<le>len(self.mid_prices)<l>5# This below statement will cause ZeroDivisionError, Hummingbot will later report this on the log screen. <block_start>_=1/0<block_end><block_end><def_stmt>on_command self cmd args<block_start><if_stmt>cmd<eq>'ping'<block_start>self.notify('pong!')<block_end><else_stmt><block_start>self.notify(f'Unrecognised command: {cmd}')<block_end><block_end><block_end>
""" Django settings for example project. """<import_stmt>os<import_from_stmt>pathlib Path<line_sep># Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR=Path.cwd()<line_sep># SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY="a_not_so_secret_key"<line_sep># SECURITY WARNING: don't run with debug turned on in production! DEBUG=<true><line_sep>ALLOWED_HOSTS=[]<line_sep># Application definition INSTALLED_APPS=['channels' 'django.contrib.admin' 'django.contrib.auth' 'django.contrib.contenttypes' 'django.contrib.sessions' 'django.contrib.messages' 'django.contrib.staticfiles' 'sockpuppet' 'tests.example' ]<line_sep>MIDDLEWARE=['django.middleware.security.SecurityMiddleware' 'django.contrib.sessions.middleware.SessionMiddleware' 'django.middleware.common.CommonMiddleware' 'django.middleware.csrf.CsrfViewMiddleware' 'django.contrib.auth.middleware.AuthenticationMiddleware' 'django.contrib.messages.middleware.MessageMiddleware' 'django.middleware.clickjacking.XFrameOptionsMiddleware' ]<line_sep>ROOT_URLCONF='tests.urls'<line_sep>TEMPLATES=[{'BACKEND':'django.template.backends.django.DjangoTemplates' 'DIRS':[os.path.join(BASE_DIR 'templates') ] 'APP_DIRS':<true> 'OPTIONS':{'context_processors':['django.template.context_processors.debug' 'django.template.context_processors.request' 'django.contrib.auth.context_processors.auth' 'django.contrib.messages.context_processors.messages' ] } } ]<line_sep>ASGI_APPLICATION='sockpuppet.routing.application'<line_sep>WSGI_APPLICATION='tests.example.wsgi.application'<line_sep>CHANNEL_LAYERS={"default":{"BACKEND":"channels.layers.InMemoryChannelLayer"}}<line_sep># CHANNEL_LAYERS = { # "default": { # "BACKEND": "channels_redis.core.RedisChannelLayer", # "CONFIG": { # "hosts": [("127.0.0.1", 6379)], # }, # }, # } LOGGING={'version':1 'disable_existing_loggers':<false> 'root':{'handlers':['console'] 'level':'DEBUG'} 'handlers':{'console':{'level':os.getenv('DJANGO_LOG_LEVEL' 'INFO') 'class':'logging.StreamHandler' 'formatter':'simple'} 'sockpuppet':{'level':'DEBUG' 'class':'logging.StreamHandler' 'formatter':'simple'}} 'formatters':{'verbose':{'format':"[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s" 'datefmt':"%d/%b/%Y %H:%M:%S"} 'simple':{'format':'%(levelname)s %(message)s'} } 'loggers':{'django.db.backends':{# uncomment to see all queries # 'level': 'DEBUG', 'handlers':['console'] } 'sockpuppet':{'level':'DEBUG' 'handlers':['sockpuppet']}}}<line_sep>DATABASES={'default':{'ENGINE':'django.db.backends.sqlite3' 'NAME':os.path.join(BASE_DIR 'db.sqlite3') }}<line_sep># Internationalization LANGUAGE_CODE='en-us'<line_sep>TIME_ZONE='UTC'<line_sep>USE_I18N=<true><line_sep>USE_L10N=<true><line_sep>USE_TZ=<true><line_sep># Static files (CSS, JavaScript, Images) STATIC_URL='/static/'<line_sep>STATICFILES_DIRS=[('js' '{}/jsdist/js/'.format(BASE_DIR)) ]<line_sep>
<import_from_stmt>django.contrib.syndication.views Feed<import_from_stmt>parsing.telegram.parser parse_channel<class_stmt>TelegramChannelFeed(Feed)<block_start>FEED_ITEMS=30<def_stmt>get_object self request channel_name<block_start>limit=int(request.GET.get("size")<or>self.FEED_ITEMS)<line_sep>only=str(request.GET.get("only")<or>"")<line_sep><return>parse_channel(channel_name only_text=only<eq>"text" limit=limit)<block_end><def_stmt>title self obj<block_start><return>obj.name<block_end><def_stmt>items self obj<block_start><return>obj.messages<block_end><def_stmt>link self obj<block_start><return>obj.url<block_end><def_stmt>item_title self item<block_start><return>item.text<block_end><def_stmt>item_description self item<block_start>result=""<if_stmt>item.photo<block_start>result<augadd>f"<img src=\"{item.photo}\"><br>"<block_end><if_stmt>item.text<block_start>result<augadd>str(item.text)<block_end><return>result<block_end><def_stmt>item_link self item<block_start><return>item.url<block_end><def_stmt>item_pubdate self item<block_start><return>item.created_at<block_end><block_end>
# Copyright (c) 2016, the GPyOpt Authors # Licensed under the BSD 3-clause license (see LICENSE.txt) #from ..util.general import samples_multidimensional_uniform, multigrid, iroot <import_stmt>numpy<as>np<line_sep>
"""GeoCube Version"""<line_sep>__version__="0.1.1.dev0"<line_sep>
# !/usr/bin/python3 # coding: utf-8 # Copyright 2015-2018 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_stmt>io<import_stmt>os<import_stmt>cv2<import_stmt>numpy<as>np<import_from_stmt>PIL Image<import_from_stmt>pytesseract pytesseract<import_from_stmt>wand.image Image<as>WandImage<import_from_stmt>scipy.ndimage interpolation<as>inter<import_from_stmt>receipt_parser_core Receipt<import_from_stmt>receipt_parser_core.config read_config<line_sep>BASE_PATH=os.getcwd()<line_sep>INPUT_FOLDER=os.path.join(BASE_PATH "data/img")<line_sep>TMP_FOLDER=os.path.join(BASE_PATH "data/tmp")<line_sep>OUTPUT_FOLDER=os.path.join(BASE_PATH "data/txt")<line_sep>ORANGE='\033[33m'<line_sep>RESET='\033[0m'<def_stmt>prepare_folders <block_start>""" :return: void Creates necessary folders """<for_stmt>folder [INPUT_FOLDER TMP_FOLDER OUTPUT_FOLDER]<block_start><if_stmt><not>os.path.exists(folder)<block_start>os.makedirs(folder)<block_end><block_end><block_end><def_stmt>find_images folder<block_start>""" :param folder: str Path to folder to search :return: generator of str List of images in folder """<for_stmt>file os.listdir(folder)<block_start>full_path=os.path.join(folder file)<if_stmt>os.path.isfile(full_path)<block_start><try_stmt><block_start>_=Image.open(full_path)# if constructor succeeds <yield>file<block_end><except_stmt><block_start><pass><block_end><block_end><block_end><block_end><def_stmt>rotate_image input_file output_file angle=90<block_start>""" :param input_file: str Path to image to rotate :param output_file: str Path to output image :param angle: float Angle to rotate :return: void Rotates image and saves result """<with_stmt>WandImage(filename=input_file)<as>img<block_start>width,height=img.size<if_stmt>width<l>height<block_start>angle=0<block_end>print(ORANGE+'\t~: '+RESET+'Rotate image by: '+str(angle)+"°"+RESET)<with_stmt>img.clone()<as>rotated<block_start>rotated.rotate(angle)<line_sep>rotated.save(filename=output_file)<block_end><block_end><block_end><def_stmt>deskew_image image delta=1 limit=5<block_start><def_stmt>determine_score arr angle<block_start>data=inter.rotate(arr angle reshape=<false> order=0)<line_sep>histogram=np.sum(data axis=1)<line_sep>score=np.sum((histogram[1:]-histogram[:-1])<power>2)<line_sep><return>histogram score<block_end>gray=cv2.cvtColor(image cv2.COLOR_BGR2GRAY)<line_sep>thresh=cv2.threshold(gray 0 255 cv2.THRESH_BINARY_INV+cv2.THRESH_OTSU)[1]<line_sep>scores=[]<line_sep>angles=np.arange(-limit limit+delta delta)<for_stmt>angle angles<block_start>histogram,score=determine_score(thresh angle)<line_sep>scores.append(score)<block_end>best_angle=angles[scores.index(max(scores))]<line_sep>(h w)=image.shape[:2]<line_sep>center=(w<floordiv>2 h<floordiv>2)<line_sep>M=cv2.getRotationMatrix2D(center best_angle 1.0)<line_sep>print(ORANGE+'\t~: '+RESET+'Deskew image by: '+str(best_angle)+' angle'+RESET)<line_sep>rotated=cv2.warpAffine(image M (w h) flags=cv2.INTER_CUBIC borderMode=cv2.BORDER_REPLICATE)<line_sep><return>rotated<block_end><def_stmt>run_tesseract input_file output_file language="deu"<block_start>""" :param input_file: str Path to image to OCR :param output_file: str Path to output file :return: void Runs tesseract on image and saves result """<line_sep>print(ORANGE+'\t~: '+RESET+'Parse image using pytesseract'+RESET)<line_sep>print(ORANGE+'\t~: '+RESET+'Parse image at: '+input_file+RESET)<line_sep>print(ORANGE+'\t~: '+RESET+'Write result to: '+output_file+RESET)<with_stmt>io.BytesIO()<as>transfer<block_start><with_stmt>WandImage(filename=input_file)<as>img<block_start>img.save(transfer)<block_end><with_stmt>Image.open(transfer)<as>img<block_start>image_data=pytesseract.image_to_string(img lang=language timeout=60 config="--psm 6")<line_sep>out=open(output_file "w" encoding='utf-8')<line_sep>out.write(image_data)<line_sep>out.close()<block_end><block_end><block_end><def_stmt>rescale_image img<block_start>print(ORANGE+'\t~: '+RESET+'Rescale image'+RESET)<line_sep>img=cv2.resize(img <none> fx=1.2 fy=1.2 interpolation=cv2.INTER_CUBIC)<line_sep><return>img<block_end><def_stmt>grayscale_image img<block_start>print(ORANGE+'\t~: '+RESET+'Grayscale image'+RESET)<line_sep>img=cv2.cvtColor(img cv2.COLOR_BGR2GRAY)<line_sep><return>img<block_end><def_stmt>remove_noise img<block_start>kernel=np.ones((1 1) np.uint8)<line_sep>img=cv2.dilate(img kernel iterations=1)<line_sep>img=cv2.erode(img kernel iterations=1)<line_sep>print(ORANGE+'\t~: '+RESET+'Applying gaussianBlur and medianBlur'+RESET)<line_sep>img=cv2.threshold(cv2.GaussianBlur(img (5 5) 0) 150 255 cv2.THRESH_BINARY+cv2.THRESH_OTSU)[1]<line_sep>img=cv2.threshold(cv2.bilateralFilter(img 5 75 75) 0 255 cv2.THRESH_BINARY+cv2.THRESH_OTSU)[1]<line_sep>img=cv2.adaptiveThreshold(cv2.bilateralFilter(img 9 75 75) 255 cv2.ADAPTIVE_THRESH_GAUSSIAN_C cv2.THRESH_BINARY 31 2)<line_sep><return>img<block_end><def_stmt>remove_shadows img<block_start>rgb_planes=cv2.split(img)<line_sep>result_planes=[]<line_sep>result_norm_planes=[]<for_stmt>plane rgb_planes<block_start>dilated_img=cv2.dilate(plane np.ones((7 7) np.uint8))<line_sep>bg_img=cv2.medianBlur(dilated_img 21)<line_sep>diff_img=255-cv2.absdiff(plane bg_img)<line_sep>norm_img=cv2.normalize(diff_img <none> alpha=0 beta=255 norm_type=cv2.NORM_MINMAX dtype=cv2.CV_8UC1)<line_sep>result_planes.append(diff_img)<line_sep>result_norm_planes.append(norm_img)<block_end>result=cv2.merge(result_planes)<line_sep><return>result<block_end><def_stmt>detect_orientation image<block_start>coords=np.column_stack(np.where(image<g>0))<line_sep>angle=cv2.minAreaRect(coords)[-1]<line_sep>print(ORANGE+'\t~: '+RESET+'Get rotation angle:'+str(angle)+RESET)<line_sep><return>image<block_end><def_stmt>enhance_image img tmp_path high_contrast=<true> gaussian_blur=<true> rotate=<true><block_start>img=rescale_image(img)<if_stmt>rotate<block_start>cv2.imwrite(tmp_path img)<line_sep>rotate_image(tmp_path tmp_path)<line_sep>img=cv2.imread(tmp_path)<block_end>img=deskew_image(img)<line_sep>img=remove_shadows(img)<if_stmt>high_contrast<block_start>img=grayscale_image(img)<block_end><if_stmt>gaussian_blur<block_start>img=remove_noise(img)<block_end><return>img<block_end><def_stmt>process_receipt config filename rotate=<true> grayscale=<true> gaussian_blur=<true><block_start>input_path=INPUT_FOLDER+"/"+filename<line_sep>output_path=OUTPUT_FOLDER+"/"+filename.split(".")[0]+".txt"<line_sep>print(ORANGE+'~: '+RESET+'Process image: '+ORANGE+input_path+RESET)<line_sep>prepare_folders()<try_stmt><block_start>img=cv2.imread(input_path)<block_end><except_stmt>FileNotFoundError<block_start><return>Receipt(config=config raw="")<block_end>tmp_path=os.path.join(TMP_FOLDER filename)<line_sep>img=enhance_image(img tmp_path grayscale gaussian_blur)<line_sep>print(ORANGE+'~: '+RESET+'Temporary store image at: '+ORANGE+tmp_path+RESET)<line_sep>cv2.imwrite(tmp_path img)<line_sep>run_tesseract(tmp_path output_path config.language)<line_sep>print(ORANGE+'~: '+RESET+'Store parsed text at: '+ORANGE+output_path+RESET)<line_sep>raw=open(output_path 'r').readlines()<line_sep><return>Receipt(config=config raw=raw)<block_end><def_stmt>main <block_start>prepare_folders()<line_sep>dir_path=os.getcwd()<line_sep>config=read_config(config=dir_path+"/config.yml")<line_sep>images=list(find_images(INPUT_FOLDER))<line_sep>print(ORANGE+'~: '+RESET+'Found: '+ORANGE+str(len(images)) RESET+' images in: '+ORANGE+INPUT_FOLDER+RESET)<line_sep>i=1<for_stmt>image images<block_start>input_path=os.path.join(INPUT_FOLDER image)<line_sep>tmp_path=os.path.join(TMP_FOLDER image)<line_sep>out_path=os.path.join(OUTPUT_FOLDER image+".txt")<if_stmt>i<ne>1<block_start>print()<block_end>print(ORANGE+'~: '+RESET+'Process image ('+ORANGE+str(i)+'/'+str(len(images))+RESET+') : '+input_path+RESET)<line_sep>img=cv2.imread(input_path)<line_sep>img=enhance_image(img tmp_path)<line_sep>cv2.imwrite(tmp_path img)<line_sep>run_tesseract(tmp_path out_path config.language)<line_sep>i=i+1<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>main()<block_end>
# Copyright 2021, <NAME>, mailto:<EMAIL> # # Part of "Nuitka", an optimizing Python compiler that is compatible and # integrates with CPython, but also works on its own. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ Jinja folklore wrappers and handling of inline copy usage. """<import_from_stmt>.Importing importFromInlineCopy<line_sep>environments={}<def_stmt>unlikely_if value<block_start><if_stmt>value<block_start><return>"unlikely"<block_end><else_stmt><block_start><return>""<block_end><block_end><def_stmt>unlikely_or_likely_from value<block_start><if_stmt>value<block_start><return>"unlikely"<block_end><else_stmt><block_start><return>"likely"<block_end><block_end><def_stmt>getEnvironment module_name<block_start><if_stmt>module_name<not><in>environments# Import dependencies, sadly we get to manage this ourselves. <block_start>importFromInlineCopy("markupsafe" must_exist=<true>)<line_sep>jinja2=importFromInlineCopy("jinja2" must_exist=<true>)<import_stmt>jinja2<line_sep>env=jinja2.Environment(loader=jinja2.PackageLoader(module_name "templates") # extensions=["jinja2.ext.do"], trim_blocks=<true> lstrip_blocks=<true> )<line_sep># For shared global functions. env.globals.update({"unlikely_if":unlikely_if "unlikely_or_likely_from":unlikely_or_likely_from })<line_sep>env.undefined=jinja2.StrictUndefined<line_sep>environments[module_name]=env<block_end><return>environments[module_name]<block_end><def_stmt>getTemplate module_name template_name<block_start><return>getEnvironment(module_name).get_template(template_name)<block_end>
# Copyright 2017, OpenCensus Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. <import_stmt>unittest<import_stmt>mock<import_from_stmt>opencensus.trace.propagation text_format<class_stmt>Test_from_carrier(unittest.TestCase)<block_start><def_stmt>test_from_carrier_keys_exist self<block_start>test_trace_id='6e0c63257de34c92bf9efcd03927272e'<line_sep>test_span_id='00f067aa0ba902b7'<line_sep>test_options=1<line_sep>carrier={text_format._TRACE_ID_KEY:test_trace_id text_format._SPAN_ID_KEY:test_span_id text_format._TRACE_OPTIONS_KEY:test_options }<line_sep>propagator=text_format.TextFormatPropagator()<line_sep>span_context=propagator.from_carrier(carrier)<line_sep>self.assertEqual(span_context.trace_id test_trace_id)<line_sep>self.assertEqual(span_context.span_id test_span_id)<line_sep>self.assertEqual(span_context.trace_options.enabled bool(test_options))<block_end><def_stmt>test_from_carrier_keys_not_exist self<block_start>carrier={}<line_sep>propagator=text_format.TextFormatPropagator()<line_sep>span_context=propagator.from_carrier(carrier)<line_sep>self.assertIsNotNone(span_context.trace_id)<line_sep># Span_id should be None here which indicates no parent span_id for # the child spans self.assertIsNone(span_context.span_id)<line_sep>self.assertTrue(span_context.trace_options.enabled)<block_end><def_stmt>test_to_carrier_has_span_id self<block_start>test_trace_id='6e0c63257de34c92bf9efcd03927272e'<line_sep>test_span_id='00f067aa0ba902b7'<line_sep>test_options='2'<line_sep>span_context=mock.Mock()<line_sep>span_context.trace_id=test_trace_id<line_sep>span_context.span_id=test_span_id<line_sep>span_context.trace_options.trace_options_byte=test_options<line_sep>carrier={}<line_sep>propagator=text_format.TextFormatPropagator()<line_sep>carrier=propagator.to_carrier(span_context carrier)<line_sep>self.assertEqual(carrier[text_format._TRACE_ID_KEY] test_trace_id)<line_sep>self.assertEqual(carrier[text_format._SPAN_ID_KEY] str(test_span_id))<line_sep>self.assertEqual(carrier[text_format._TRACE_OPTIONS_KEY] test_options)<block_end><def_stmt>test_to_carrier_no_span_id self<block_start>test_trace_id='6e0c63257de34c92bf9efcd03927272e'<line_sep>test_options='1'<line_sep>span_context=mock.Mock()<line_sep>span_context.trace_id=test_trace_id<line_sep>span_context.span_id=<none><line_sep>span_context.trace_options.trace_options_byte=test_options<line_sep>carrier={}<line_sep>propagator=text_format.TextFormatPropagator()<line_sep>carrier=propagator.to_carrier(span_context carrier)<line_sep>self.assertEqual(carrier[text_format._TRACE_ID_KEY] test_trace_id)<line_sep>self.assertIsNone(carrier.get(text_format._SPAN_ID_KEY))<line_sep>self.assertEqual(carrier[text_format._TRACE_OPTIONS_KEY] test_options)<block_end><block_end>
<import_stmt>pathlib<import_stmt>setuptools<line_sep>here=pathlib.Path(__file__).resolve().parent<with_stmt>open(here/'controldiffeq/README.md' 'r')<as>f<block_start>readme=f.read()<block_end>setuptools.setup(name='controldiffeq' version='0.0.1' author='<NAME>' author_email='<EMAIL>' maintainer='<NAME>' maintainer_email='<EMAIL>' description='PyTorch functions for solving CDEs.' long_description=readme url='https://github.com/patrick-kidger/NeuralCDE/tree/master/controldiffeq' license='Apache-2.0' zip_safe=<false> python_requires='>=3.5, <4' install_requires=['torch>=1.0.0' 'torchdiffeq>=0.0.1'] packages=['controldiffeq'] classifiers=["Programming Language :: Python :: 3" "License :: OSI Approved :: Apache Software License"])<line_sep>
<import_stmt>chpl_cpu<import_stmt>chpl_atomics<import_stmt>chpl_aux_filesys<import_stmt>chpl_bin_subdir<import_stmt>chpl_make<import_stmt>chpl_platform<import_stmt>chpl_comm<import_stmt>chpl_comm_debug<import_stmt>chpl_comm_segment<import_stmt>chpl_comm_substrate<import_stmt>chpl_compiler<import_stmt>chpl_gasnet<import_stmt>chpl_gmp<import_stmt>chpl_hwloc<import_stmt>chpl_jemalloc<import_stmt>chpl_launcher<import_stmt>chpl_libfabric<import_stmt>chpl_llvm<import_stmt>chpl_locale_model<import_stmt>chpl_gpu<import_stmt>chpl_arch<import_stmt>chpl_mem<import_stmt>chpl_qthreads<import_stmt>chpl_re2<import_stmt>chpl_tasks<import_stmt>chpl_timers<import_stmt>chpl_unwind<import_stmt>chpl_lib_pic<import_stmt>chpl_sanitizers<line_sep># General purpose helpers <import_stmt>chpl_home_utils<import_stmt>chpl_python_version<import_stmt>compiler_utils<import_stmt>overrides<import_stmt>utils<line_sep>
<import_from_future_stmt> absolute_import<import_from_future_stmt> division<import_from_future_stmt> print_function<import_stmt>shutil<import_stmt>sys<import_stmt>tempfile<import_from_stmt>observations.r.swahili swahili<def_stmt>test_swahili <block_start>"""Test module swahili.py by downloading swahili.csv and testing shape of extracted data has 480 rows and 4 columns """<line_sep>test_path=tempfile.mkdtemp()<line_sep>x_train,metadata=swahili(test_path)<try_stmt><block_start><assert_stmt>x_train.shape<eq>(480 4)<block_end><except_stmt><block_start>shutil.rmtree(test_path)<line_sep><raise>()<block_end><block_end>
# OpenNero will execute ModMain when this mod is loaded <import_from_stmt>Maze.client ClientMain<def_stmt>ModMain mode=""<block_start>ClientMain(mode)<block_end><def_stmt>StartMe <block_start><import_from_stmt>Maze.module getMod<line_sep>getMod().set_speedup(1.0)# full speed ahead getMod().start_sarsa()<block_end># start an algorithm for headless mode
# Copyright (c) 2019, Fundacion Dr. <NAME> # All rights reserved. # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # 1. Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. <import_from_future_stmt> absolute_import<import_stmt>platform<import_stmt>unittest<import_from_stmt>.armtranslator ArmTranslationTestCase<line_sep>@unittest.skipUnless(platform.machine().lower()<in>['armv6l' 'armv7l'] 'Not running on an ARMv6 system')<class_stmt>ArmTranslationBranchTests(ArmTranslationTestCase)<block_start><def_stmt>test_branch_instructions self<block_start>untouched_value=0x45454545<line_sep>touched_value=0x31313131<line_sep># R11 is used as a dirty register to check if the branch was # taken or not. instr_samples=[["mov r11, #0x{:x}".format(untouched_value) "b #0x800c" "mov r11, #0x{:x}".format(touched_value) "mov r0, r0" ] ["mov r11, #0x{:x}".format(untouched_value) "bx #0x800c" "mov r11, #0x{:x}".format(touched_value) "mov r0, r0" ] ["mov r11, #0x{:x}".format(untouched_value) "bl #0x800c" "mov r11, #0x{:x}".format(touched_value) "mov r0, r0" ] ["mov r11, #0x{:x}".format(untouched_value) "blx #0x800c" "mov r11, #0x{:x}".format(touched_value) "mov r0, r0" ] ["movs r11, #0x{:x}".format(untouched_value) "bne #0x800c" "mov r11, #0x{:x}".format(touched_value) "mov r0, r0" ] ["mov r11, #0x{:x}".format(untouched_value) "mov r1, #0x8010" "bx r1" "mov r11, #0x{:x}".format(touched_value) "mov r0, r0" ] ["mov r11, #0x{:x}".format(untouched_value) "mov r1, #0x8010" "blx r1" "mov r11, #0x{:x}".format(touched_value) "mov r0, r0" ] ]<for_stmt>instr instr_samples<block_start>reil_ctx_out=self._execute_asm(instr 0x8000)<line_sep>self.assertTrue(reil_ctx_out['r11']<eq>untouched_value)<block_end><block_end><block_end>
<import_from_future_stmt> unicode_literals<import_from_stmt>django.apps AppConfig<class_stmt>CustomersConfig(AppConfig)<block_start>name='customers'<line_sep>verbose_name='Customers'<def_stmt>ready self<block_start><import_stmt>customers.handlers<block_end><block_end>
<import_stmt>pandas<as>pd<import_stmt>numpy<as>numpy<import_from_stmt>keras.models Sequential<import_from_stmt>keras.layers Dense Dropout Activation Flatten Reshape<import_from_stmt>keras.layers Conv1D MaxPooling1D LeakyReLU<import_from_stmt>keras.utils np_utils<import_from_stmt>keras.layers GRU CuDNNGRU<import_from_stmt>keras.callbacks CSVLogger ModelCheckpoint<import_stmt>h5py<import_stmt>os<import_stmt>tensorflow<as>tf<import_from_stmt>keras.backend.tensorflow_backend set_session<line_sep>os.environ['CUDA_DEVICE_ORDER']='PCI_BUS_ID'<line_sep>os.environ['CUDA_VISIBLE_DEVICES']='1'<line_sep>os.environ['TF_CPP_MIN_LOG_LEVEL']='2'<line_sep>config=tf.ConfigProto()<line_sep>config.gpu_options.allow_growth=<true><line_sep>set_session(tf.Session(config=config))<with_stmt>h5py.File(''.join(['bitcoin2015to2017_close.h5']) 'r')<as>hf<block_start>datas=hf['inputs'].value<line_sep>labels=hf['outputs'].value<block_end>output_file_name='bitcoin2015to2017_close_GRU_1_tanh_relu_'<line_sep>step_size=datas.shape[1]<line_sep>units=50<line_sep>batch_size=8<line_sep>nb_features=datas.shape[2]<line_sep>epochs=100<line_sep>output_size=16<line_sep>#split training validation training_size=int(0.8<times>datas.shape[0])<line_sep>training_datas=datas[:training_size :]<line_sep>training_labels=labels[:training_size : 0]<line_sep>validation_datas=datas[training_size: :]<line_sep>validation_labels=labels[training_size: : 0]<line_sep>#build model model=Sequential()<line_sep>model.add(GRU(units=units input_shape=(step_size nb_features) return_sequences=<false>))<line_sep>model.add(Activation('tanh'))<line_sep>model.add(Dropout(0.2))<line_sep>model.add(Dense(output_size))<line_sep>model.add(Activation('relu'))<line_sep>model.compile(loss='mse' optimizer='adam')<line_sep>model.fit(training_datas training_labels batch_size=batch_size validation_data=(validation_datas validation_labels) epochs=epochs callbacks=[CSVLogger(output_file_name+'.csv' append=<true>) ModelCheckpoint('weights/'+output_file_name+'-{epoch:02d}-{val_loss:.5f}.hdf5' monitor='val_loss' verbose=1 mode='min')])<line_sep># model.fit(datas,labels) #model.save(output_file_name+'.h5')
# coding=utf-8 # ------------------------------------ # Copyright (c) Microsoft Corporation. # Licensed under the MIT License. # ------------------------------------ <import_stmt>pytest<import_from_stmt>azure.core.exceptions HttpResponseError ClientAuthenticationError<import_from_stmt>azure.core.credentials AzureKeyCredential<import_from_stmt>testcase ConversationTest GlobalConversationAccountPreparer <import_from_stmt>azure.ai.language.conversations ConversationAnalysisClient<import_from_stmt>azure.ai.language.conversations.models ConversationAnalysisOptions AnalyzeConversationResult ConversationPrediction <class_stmt>ConversationAppTests(ConversationTest)<block_start>@GlobalConversationAccountPreparer()<def_stmt>test_conversation_app self conv_account conv_key conv_project# prepare data <block_start>query="One california maki please."<line_sep>input=ConversationAnalysisOptions(query=query )<line_sep># analyze quey client=ConversationAnalysisClient(conv_account AzureKeyCredential(conv_key))<with_stmt>client<block_start>result=client.analyze_conversations(input project_name=conv_project deployment_name='production')<block_end># assert <assert_stmt>isinstance(result AnalyzeConversationResult)<assert_stmt>result.query<eq>query<assert_stmt>isinstance(result.prediction ConversationPrediction)<assert_stmt>result.prediction.project_kind<eq>'conversation'<assert_stmt>result.prediction.top_intent<eq>'Order'<assert_stmt>len(result.prediction.entities)<g>0<assert_stmt>len(result.prediction.intents)<g>0<assert_stmt>result.prediction.intents[0].category<eq>'Order'<assert_stmt>result.prediction.intents[0].confidence_score<g>0<assert_stmt>result.prediction.entities[0].category<eq>'OrderItem'<assert_stmt>result.prediction.entities[0].text<eq>'california maki'<assert_stmt>result.prediction.entities[0].confidence_score<g>0<block_end>@GlobalConversationAccountPreparer()<def_stmt>test_conversation_app_with_dictparams self conv_account conv_key conv_project# prepare data <block_start>query="One california maki please."<line_sep>params={"query":query "api_version":"2021-11-01-preview"}<line_sep># analyze quey client=ConversationAnalysisClient(conv_account AzureKeyCredential(conv_key))<with_stmt>client<block_start>result=client.analyze_conversations(params project_name=conv_project deployment_name='production')<block_end># assert <assert_stmt>isinstance(result AnalyzeConversationResult)<assert_stmt>result.query<eq>query<assert_stmt>isinstance(result.prediction ConversationPrediction)<assert_stmt>result.prediction.project_kind<eq>'conversation'<assert_stmt>result.prediction.top_intent<eq>'Order'<assert_stmt>len(result.prediction.entities)<g>0<assert_stmt>len(result.prediction.intents)<g>0<assert_stmt>result.prediction.intents[0].category<eq>'Order'<assert_stmt>result.prediction.intents[0].confidence_score<g>0<assert_stmt>result.prediction.entities[0].category<eq>'OrderItem'<assert_stmt>result.prediction.entities[0].text<eq>'california maki'<assert_stmt>result.prediction.entities[0].confidence_score<g>0<block_end><block_end>
# Copyright (c) 2021 <NAME> # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. """ Module for SS58 decoding/encoding. Reference: https://github.com/paritytech/substrate/wiki/External-Address-Format-(SS58). """<line_sep># Imports <import_from_stmt>typing Tuple<import_from_stmt>bip_utils.ss58.ss58_ex SS58ChecksumError<import_from_stmt>bip_utils.base58 Base58Decoder Base58Encoder<import_from_stmt>bip_utils.utils.misc ConvUtils CryptoUtils<class_stmt>SS58Const<block_start>"""Class container for SS58 constants."""<line_sep># Max format for simple account SIMPLE_ACCOUNT_FORMAT_MAX_VAL:int=63<line_sep># Format maximum value FORMAT_MAX_VAL:int=16383<line_sep># Reserved formats RESERVED_FORMATS:Tuple[int int]=(46 47)<line_sep># Data length in bytes DATA_BYTE_LEN:int=32<line_sep># Checksum length in bytes CHECKSUM_BYTE_LEN:int=2<line_sep># Checksum prefix CHECKSUM_PREFIX:bytes=b"SS58PRE"<block_end><class_stmt>SS58Utils<block_start>"""Class container for SS58 utility functions."""<line_sep>@staticmethod<def_stmt>ComputeChecksum data_bytes:bytes<arrow>bytes<block_start>""" Compute SS58 checksum. Args: data_bytes (bytes): Data bytes Returns: bytes: Computed checksum """<line_sep><return>CryptoUtils.Blake2b(SS58Const.CHECKSUM_PREFIX+data_bytes)[:SS58Const.CHECKSUM_BYTE_LEN]<block_end><block_end><class_stmt>SS58Encoder<block_start>"""SS58 encoder class. It provides methods for encoding to SS58 format."""<line_sep>@staticmethod<def_stmt>Encode data_bytes:bytes ss58_format:int<arrow>str<block_start>""" Encode bytes into a SS58 string. Args: data_bytes (bytes): Data bytes (32-byte length) ss58_format (int) : SS58 format Returns: str: SS58 encoded string Raises: ValueError: If parameters are not valid """<line_sep># Check parameters <if_stmt>len(data_bytes)<ne>SS58Const.DATA_BYTE_LEN<block_start><raise>ValueError(f"Invalid data length ({len(data_bytes)})")<block_end><if_stmt>ss58_format<l>0<or>ss58_format<g>SS58Const.FORMAT_MAX_VAL<block_start><raise>ValueError(f"Invalid SS58 format ({ss58_format})")<block_end><if_stmt>ss58_format<in>SS58Const.RESERVED_FORMATS<block_start><raise>ValueError(f"Invalid SS58 format ({ss58_format})")<block_end># Simple account <if_stmt>ss58_format<le>SS58Const.SIMPLE_ACCOUNT_FORMAT_MAX_VAL<block_start>ss58_format_bytes=ConvUtils.IntegerToBytes(ss58_format)<block_end># Full address <else_stmt># 0b00HHHHHH_MMLLLLLL -> (0b01LLLLLL, 0bHHHHHHMM) <block_start>ss58_format_bytes=bytes([((ss58_format&0x00FC)<rshift>2)|0x0040 (ss58_format<rshift>8)|((ss58_format&0x0003)<lshift>6)])<block_end># Get payload payload=ss58_format_bytes+data_bytes<line_sep># Compute checksum checksum=SS58Utils.ComputeChecksum(payload)<line_sep># Encode <return>Base58Encoder.Encode(payload+checksum)<block_end><block_end><class_stmt>SS58Decoder<block_start>"""SS58 decoder class. It provides methods for decoding SS58 format."""<line_sep>@staticmethod<def_stmt>Decode data_str:str<arrow>Tuple[int bytes]<block_start>""" Decode bytes from a SS58 string. Args: data_str (string): Data string Returns: tuple: SS58 format and data bytes Raises: SS58ChecksumError: If checksum is not valid ValueError: If the string is not a valid SS58 format """<line_sep># Decode string dec_bytes=Base58Decoder.Decode(data_str)<line_sep># Full address <if_stmt>dec_bytes[0]&0x40<block_start>ss58_format_len=2<line_sep>ss58_format=((dec_bytes[0]&0x3F)<lshift>2)|(dec_bytes[1]<rshift>6)|((dec_bytes[1]&0x3F)<lshift>8)<block_end># Simple account <else_stmt><block_start>ss58_format_len=1<line_sep>ss58_format=dec_bytes[0]<block_end># Check format <if_stmt>ss58_format<in>SS58Const.RESERVED_FORMATS<block_start><raise>ValueError(f"Invalid SS58 format ({ss58_format})")<block_end># Get back data and checksum data_bytes=dec_bytes[ss58_format_len:-SS58Const.CHECKSUM_BYTE_LEN]<line_sep>checksum_bytes=dec_bytes[-SS58Const.CHECKSUM_BYTE_LEN:]<line_sep># Check data length <if_stmt>len(data_bytes)<ne>SS58Const.DATA_BYTE_LEN<block_start><raise>ValueError(f"Invalid data length ({len(data_bytes)})")<block_end># Compute checksum comp_checksum=SS58Utils.ComputeChecksum(dec_bytes[:-SS58Const.CHECKSUM_BYTE_LEN])<line_sep># Verify checksum <if_stmt>checksum_bytes<ne>comp_checksum<block_start><raise>SS58ChecksumError(f"Invalid checksum (expected {ConvUtils.BytesToHexString(comp_checksum)}, "<concat>f"got {ConvUtils.BytesToHexString(checksum_bytes)})")<block_end><return>ss58_format data_bytes<block_end><block_end>
<import_stmt>FWCore.ParameterSet.Config<as>cms<line_sep>process=cms.Process("TEST")<line_sep>process.load("CondCore.DBCommon.CondDBCommon_cfi")<line_sep>process.CondDBCommon.connect='sqlite_file:userconf.db'<line_sep>process.CondDBCommon.DBParameters.authenticationPath='.'<line_sep>process.PoolDBOutputService=cms.Service("PoolDBOutputService" process.CondDBCommon logconnect=cms.untracked.string('sqlite_file:log.db') toPut=cms.VPSet(cms.PSet(record=cms.string('DTCCBConfigRcd') tag=cms.string('conf_test') timetype=cms.untracked.string('runnumber')) cms.PSet(record=cms.string('keyedConfBricks') tag=cms.string('DT_keyedConfBricks_V01') timetype=cms.untracked.string('hash') withWrapper=cms.untracked.bool(<true>) outOfOrder=cms.untracked.bool(<true>)) cms.PSet(record=cms.string('keyedConfListIOV') tag=cms.string('DT_keyedConfListIOV_V01') timetype=cms.untracked.string('runnumber') withWrapper=cms.untracked.bool(<true>) outOfOrder=cms.untracked.bool(<false>))))<line_sep>process.source=cms.Source("EmptyIOVSource" timetype=cms.string('runnumber') firstValue=cms.uint64(1) lastValue=cms.uint64(1) interval=cms.uint64(1))<line_sep>process.essource=cms.ESSource("PoolDBESSource" process.CondDBCommon DumpStat=cms.untracked.bool(<true>) toGet=cms.VPSet(cms.PSet(record=cms.string('DTKeyedConfigListRcd') tag=cms.string('DT_keyedConfListIOV_V01')) cms.PSet(record=cms.string('DTKeyedConfigContainerRcd') tag=cms.string('DT_keyedConfBricks_V01'))))<line_sep>process.conf_o2o=cms.EDAnalyzer("DTUserKeyedConfigPopConAnalyzer" name=cms.untracked.string('DTCCBConfig') Source=cms.PSet(DBParameters=cms.PSet(messageLevel=cms.untracked.int32(0) authenticationPath=cms.untracked.string('.')) onlineDB=cms.string('sqlite_file:dummy_online.db') tag=cms.string('conf_test') run=cms.int32(1) writeKeys=cms.bool(<true>) writeData=cms.bool(<true>) container=cms.string('keyedConfBricks') DTConfigKeys=cms.VPSet(cms.PSet(configType=cms.untracked.int32(1) configKey=cms.untracked.int32(542)) cms.PSet(configType=cms.untracked.int32(2) configKey=cms.untracked.int32(926)) cms.PSet(configType=cms.untracked.int32(3) configKey=cms.untracked.int32(542)) cms.PSet(configType=cms.untracked.int32(4) configKey=cms.untracked.int32(542)) cms.PSet(configType=cms.untracked.int32(5) configKey=cms.untracked.int32(542)) cms.PSet(configType=cms.untracked.int32(6) configKey=cms.untracked.int32(1226))) onlineAuthentication=cms.string('.')) SinceAppendMode=cms.bool(<true>) record=cms.string('DTCCBConfigRcd') loggingOn=cms.untracked.bool(<true>) debug=cms.bool(<false>))<line_sep>process.p=cms.Path(process.conf_o2o)<line_sep>
''' Function: define the ai agent Author: Charles 微信公众号: Charles的皮卡丘 '''<import_from_stmt>modules.food *<import_from_stmt>operator itemgetter<import_from_stmt>collections OrderedDict<line_sep>'''ai agent'''<class_stmt>Agent()<block_start><def_stmt>__init__ self cfg snake **kwargs<block_start>self.cfg=cfg<line_sep>self.num_rows=cfg.GAME_MATRIX_SIZE[1]<line_sep>self.num_cols=cfg.GAME_MATRIX_SIZE[0]<line_sep>self.directions=[(0 -1) (0 1) (-1 0) (1 0)]<line_sep>self.path=self.buildcircle(snake)<line_sep>self.shortcut_path={}<block_end>'''make decision'''<def_stmt>act self snake food# make decision <block_start><if_stmt>self.shortcut_path<block_start>head_next=self.shortcut_path.pop(snake.coords[0])<block_end><else_stmt><block_start>head_next=self.path[snake.coords[0]]<block_end>query=(head_next[0]-snake.coords[0][0] head_next[1]-snake.coords[0][1])<line_sep>direction={(-1 0):'left' (1 0):'right' (0 -1):'up' (0 1):'down'}[query]<line_sep>snake.setDirection(direction)<if_stmt>snake.update(food)<block_start>food=Apple(self.cfg snake.coords)<line_sep>infos={'eaten':<true> 'food':food}<block_end><else_stmt><block_start>infos={'eaten':<false> 'food':<none>}<block_end># if snake has eaten the food <if_stmt>head_next<eq>food.coord<block_start>path=self.buildcircle(snake)<if_stmt>path<block_start>self.path=path<block_end><block_end># take shortcut <if_stmt>self.shortcut_path<block_start><return><block_end>shortcut_path=self.shortcut(snake food)<if_stmt>shortcut_path<block_start>self.shortcut_path=shortcut_path<block_end># return the necessary infos <return>infos<block_end>'''calculate shortcut path'''<def_stmt>shortcut self snake food# empty screen, with the ordered hamitonian cycle precomputed and order numbered <block_start>world=[[0<for>i range(self.num_cols)]<for>j range(self.num_rows)]<line_sep>num=1<line_sep>node=snake.coords[-1]<line_sep>world[node[1]][node[0]]=num<line_sep>node=self.path[node]<while_stmt>node<ne>snake.coords[-1]<block_start>num<augadd>1<line_sep>world[node[1]][node[0]]=num<line_sep>node=self.path[node]<block_end># obtain shortcut_path wall=snake.coords<line_sep>food=food.coord<line_sep>food_number=world[food[1]][food[0]]<line_sep>node,pre=wall[0] (-1 -1)<line_sep>wait=OrderedDict()<line_sep>wait[node]=pre<line_sep>path={}<while_stmt>wait<block_start>node,pre=wait.popitem(last=<false>)<line_sep>path[node]=pre<if_stmt>node<eq>food<block_start><break><block_end>node_number=world[node[1]][node[0]]<line_sep>neigh={}<for_stmt>direction self.directions<block_start>to=(node[0]+direction[0] node[1]+direction[1])<if_stmt><not>self.checkboundary(to)<block_start><continue><block_end><if_stmt>to<in>wait<or>to<in>wall<or>to<in>path<block_start><continue><block_end>to_number=world[to[1]][to[0]]<if_stmt>to_number<g>node_number<and>to_number<le>food_number<block_start>neigh[node_number]=to<block_end><block_end>neigh=sorted(neigh.items() key=itemgetter(0) reverse=<true>)<for_stmt>item neigh<block_start>wait[item[1]]=node<block_end><block_end><if_stmt>node<ne>food<block_start><return>{}<block_end><return>self.reverse(path snake.coords[0] food)<block_end>'''check boundary'''<def_stmt>checkboundary self pos<block_start><if_stmt>pos[0]<l>0<or>pos[1]<l>0<or>pos[0]<ge>self.num_cols<or>pos[1]<ge>self.num_rows<block_start><return><false><block_end><return><true><block_end>'''the shortest'''<def_stmt>shortest self wall head food<block_start>wait=OrderedDict()<line_sep>node,pre=head (-1 -1)<line_sep>wait[node]=pre<line_sep>path={}<while_stmt>wait<block_start>node,pre=wait.popitem(last=<false>)<line_sep>path[node]=pre<if_stmt>node<eq>food<block_start><break><block_end><if_stmt>pre<in>path<block_start>prepre=path[pre]<line_sep>direction=(pre[0]-prepre[0] pre[1]-prepre[1])<if_stmt>(direction<in>self.directions)<and>(direction<ne>self.directions[0])<block_start>self.directions.remove(direction)<line_sep>self.directions.insert(0 direction)<block_end><block_end><for_stmt>direction self.directions<block_start>to=(node[0]+direction[0] node[1]+direction[1])<if_stmt><not>self.checkboundary(to)<block_start><continue><block_end><if_stmt>to<in>path<or>to<in>wait<or>to<in>wall<block_start><continue><block_end>wait[to]=node<block_end><block_end><if_stmt>node<ne>food<block_start><return><none><block_end><return>self.reverse(path head food)<block_end>'''reverse path'''<def_stmt>reverse self path head food<block_start><if_stmt><not>path<block_start><return>path<block_end>path_new={}<line_sep>node=food<while_stmt>node<ne>head<block_start>path_new[path[node]]=node<line_sep>node=path[node]<block_end><return>path_new<block_end>'''the longest'''<def_stmt>longest self wall head food<block_start>path=self.shortest(wall head food)<if_stmt>path<is><none><block_start><return><none><block_end>node=head<while_stmt>node<ne>food<block_start><if_stmt>self.extendpath(path node wall+[food])<block_start>node=head<line_sep><continue><block_end>node=path[node]<block_end><return>path<block_end>'''extend path'''<def_stmt>extendpath self path node wall<block_start>next_=path[node]<line_sep>direction_1=(next_[0]-node[0] next_[1]-node[1])<if_stmt>direction_1<in>[(0 -1) (0 1)]<block_start>directions=[(-1 0) (1 0)]<block_end><else_stmt><block_start>directions=[(0 -1) (0 1)]<block_end><for_stmt>d directions<block_start>src=(node[0]+d[0] node[1]+d[1])<line_sep>to=(next_[0]+d[0] next_[1]+d[1])<if_stmt>(src<eq>to)<or><not>(self.checkboundary(src)<and>self.checkboundary(to))<block_start><continue><block_end><if_stmt>src<in>path<or>src<in>wall<or>to<in>path<or>to<in>wall<block_start><continue><block_end>direction_2=(to[0]-src[0] to[1]-src[1])<if_stmt>direction_1<eq>direction_2<block_start>path[node]=src<line_sep>path[src]=to<line_sep>path[to]=next_<line_sep><return><true><block_end><block_end><return><false><block_end>'''build a Hamiltonian cycle'''<def_stmt>buildcircle self snake<block_start>path=self.longest(snake.coords[1:-1] snake.coords[0] snake.coords[-1])<if_stmt>(<not>path)<or>(len(path)-1<ne>self.num_rows<times>self.num_cols-len(snake.coords))<block_start><return><none><block_end><for_stmt>i range(1 len(snake.coords))<block_start>path[snake.coords[i]]=snake.coords[i-1]<block_end><return>path<block_end><block_end>
""" Created on May 18, 2021 modules of Deep&Crossing: Residual units @author: <NAME>(<EMAIL>) """<import_stmt>tensorflow<as>tf<import_from_stmt>tensorflow.keras.layers Dense ReLU Layer<class_stmt>Residual_Units(Layer)<block_start>""" Residual Units """<def_stmt>__init__ self hidden_unit dim_stack<block_start>""" :param hidden_unit: A list. Neural network hidden units. :param dim_stack: A scalar. The dimension of inputs unit. """<line_sep>super(Residual_Units self).__init__()<line_sep>self.layer1=Dense(units=hidden_unit activation='relu')<line_sep>self.layer2=Dense(units=dim_stack activation=<none>)<line_sep>self.relu=ReLU()<block_end><def_stmt>call self inputs **kwargs<block_start>x=inputs<line_sep>x=self.layer1(x)<line_sep>x=self.layer2(x)<line_sep>outputs=self.relu(x+inputs)<line_sep><return>outputs<block_end><block_end>
<import_from_stmt>enum Enum<class_stmt>NetworkFaults(Enum)<block_start>NETWORK_DELAY_MILLISECONDS=1<line_sep>PACKET_DUPLICATE_PERCENTAGE=2<line_sep>PACKET_CORRUPT_PERCENTAGE=3<line_sep>PACKET_LOSS_PERCENTAGE=4<block_end>
<import_from_stmt>typing Iterable<import_from_stmt>eth2spec.test.helpers.constants ALTAIR MINIMAL MAINNET PHASE0<import_from_stmt>eth2spec.test.altair.transition test_transition<as>test_altair_transition test_activations_and_exits<as>test_altair_activations_and_exits test_leaking<as>test_altair_leaking test_slashing<as>test_altair_slashing test_operations<as>test_altair_operations <import_from_stmt>eth2spec.gen_helpers.gen_base gen_runner gen_typing<import_from_stmt>eth2spec.gen_helpers.gen_from_tests.gen generate_from_tests<def_stmt>create_provider tests_src preset_name:str pre_fork_name:str post_fork_name:str<arrow>gen_typing.TestProvider<block_start><def_stmt>prepare_fn <arrow><none><block_start><return><block_end><def_stmt>cases_fn <arrow>Iterable[gen_typing.TestCase]<block_start><return>generate_from_tests(runner_name='transition' handler_name='core' src=tests_src fork_name=post_fork_name phase=pre_fork_name preset_name=preset_name )<block_end><return>gen_typing.TestProvider(prepare=prepare_fn make_cases=cases_fn)<block_end>TRANSITION_TESTS=((PHASE0 ALTAIR test_altair_transition) (PHASE0 ALTAIR test_altair_activations_and_exits) (PHASE0 ALTAIR test_altair_leaking) (PHASE0 ALTAIR test_altair_slashing) (PHASE0 ALTAIR test_altair_operations) )<if_stmt>__name__<eq>"__main__"<block_start><for_stmt>pre_fork,post_fork,transition_test_module TRANSITION_TESTS<block_start>gen_runner.run_generator("transition" [create_provider(transition_test_module MINIMAL pre_fork post_fork) create_provider(transition_test_module MAINNET pre_fork post_fork) ])<block_end><block_end>
<import_from_stmt>mlflow.entities.model_registry.registered_model RegisteredModel<import_from_stmt>mlflow.entities.model_registry.model_version ModelVersion<import_from_stmt>mlflow.entities.model_registry.registered_model_tag RegisteredModelTag<import_from_stmt>mlflow.entities.model_registry.model_version_tag ModelVersionTag<line_sep>__all__=["RegisteredModel" "ModelVersion" "RegisteredModelTag" "ModelVersionTag" ]<line_sep>
# encoding: UTF-8 <import_from_stmt>tests.base TestCase<import_from_stmt>vilya.models.issue Issue<import_from_stmt>vilya.models.project_issue ProjectIssue<class_stmt>TestProjectIssue(TestCase)<block_start><def_stmt>test_add_issue self<block_start>p=ProjectIssue.add('test' 'test description' 'test' project=1)<assert_stmt>isinstance(p ProjectIssue)<assert_stmt>p.title<eq>'test'<assert_stmt>p.description<eq>'test description'<assert_stmt>p.project_id<eq>1<line_sep>p.delete()<block_end><def_stmt>test_get_issue self<block_start>p=ProjectIssue.add('test' 'test description' 'test' project=1)<line_sep>r=ProjectIssue.get(p.project_id issue_id=p.issue_id)<assert_stmt>isinstance(r ProjectIssue)<assert_stmt>r.project_id<eq>1<line_sep>r=ProjectIssue.get(p.project_id number=p.number)<assert_stmt>isinstance(r ProjectIssue)<assert_stmt>r.project_id<eq>1<line_sep>r=Issue.get_cached_issue(p.issue_id)<assert_stmt>isinstance(r ProjectIssue)<assert_stmt>r.title<eq>'test'<assert_stmt>r.description<eq>'test description'<assert_stmt>r.project_id<eq>1<line_sep>p2=ProjectIssue.add('test2' 'test2 description' 'test' project=1 assignee='assignee')<line_sep>p3=ProjectIssue.add('test3' 'test3 description' 'test' project=1 assignee='assignee')<line_sep>p4=ProjectIssue.add('test4' 'test4 description' 'test' project=1 assignee='test')<line_sep>p5=ProjectIssue.add('test5' 'test5 description' 'test1' project=2 assignee='test')<line_sep>rs=ProjectIssue._gets_by_project_id(1)<assert_stmt>len(rs)<eq>4<line_sep>rs=ProjectIssue._get_issues_by_project_id(1)<assert_stmt>all([isinstance(i ProjectIssue)<for>i rs])<assert_stmt>len(rs)<eq>4<line_sep>rs=ProjectIssue.gets_by_assignee_id(1 'assignee')<assert_stmt>all([isinstance(i ProjectIssue)<for>i rs])<assert_stmt>len(rs)<eq>2<line_sep>rs=ProjectIssue.gets_by_creator_id(1 'test')<assert_stmt>all([isinstance(i ProjectIssue)<for>i rs])<assert_stmt>len(rs)<eq>4<for_stmt>p [p p2 p3 p4 p5]<block_start>p.delete()<block_end><block_end><def_stmt>test_n_issue self<block_start>p1=ProjectIssue.add('test1' 'test1 description' 'test' project=1 assignee='assignee')<line_sep>p1.close('test')<line_sep>p2=ProjectIssue.add('test2' 'test2 description' 'test' project=1 assignee='assignee')<line_sep>p2.close('test')<line_sep>p3=ProjectIssue.add('test3' 'test3 description' 'test' project=1 assignee='assignee')<line_sep>p4=ProjectIssue.add('test4' 'test4 description' 'test' project=1 assignee='test')<line_sep>p5=ProjectIssue.add('test5' 'test5 description' 'test1' project=2 assignee='test')<line_sep>count=ProjectIssue.get_count_by_project_id(1)<assert_stmt>count<eq>4<line_sep>count=ProjectIssue.get_count_by_project_id(1 'open')<assert_stmt>count<eq>2<line_sep>count=ProjectIssue.get_count_by_project_id(1 'closed')<assert_stmt>count<eq>2<line_sep>count=ProjectIssue.get_count_by_assignee_id(1 'assignee')<assert_stmt>count<eq>3<line_sep>count=ProjectIssue.get_count_by_assignee_id(1 'assignee' 'open')<assert_stmt>count<eq>1<line_sep>count=ProjectIssue.get_count_by_assignee_id(1 'assignee' 'closed')<assert_stmt>count<eq>2<line_sep>count=ProjectIssue.get_count_by_creator_id(1 'test')<assert_stmt>count<eq>4<line_sep>count=ProjectIssue.get_count_by_creator_id(1 'test' 'open')<assert_stmt>count<eq>2<line_sep>count=ProjectIssue.get_count_by_creator_id(1 'test' 'closed')<assert_stmt>count<eq>2<line_sep>r=ProjectIssue.get(p1.project_id p1.issue_id)<assert_stmt>isinstance(r ProjectIssue)<assert_stmt>r.n_closed_issues<eq>2<assert_stmt>r.n_open_issues<eq>2<for_stmt>p [p1 p2 p3 p4 p5]<block_start>p.delete()<block_end><block_end><def_stmt>test_open_and_close_issue self<block_start>p1=ProjectIssue.add('test1' 'test1 description' 'test' project=1)<line_sep>p2=ProjectIssue.add('test2' 'test2 description' 'test' project=1)<line_sep>p3=ProjectIssue.add('test3' 'test3 description' 'test' project=1)<line_sep>count=ProjectIssue.get_count_by_project_id(1)<assert_stmt>count<eq>3<line_sep>p1.close('test')<line_sep>count=ProjectIssue.get_count_by_project_id(1 'open')<assert_stmt>count<eq>2<line_sep>p1.open()<line_sep>count=ProjectIssue.get_count_by_project_id(1 'open')<assert_stmt>count<eq>3<for_stmt>p [p1 p2 p3]<block_start>p.delete()<block_end><block_end><def_stmt>test_add_tags self<block_start>target_id=project_id=1<line_sep>p=ProjectIssue.add('test' 'test description' 'test' project=project_id)<assert_stmt>isinstance(p ProjectIssue)<assert_stmt>p.title<eq>'test'<assert_stmt>p.description<eq>'test description'<assert_stmt>p.project_id<eq>1<line_sep>tags=['tag1' 'tag2' 'tag3']<line_sep>p.add_tags(tags target_id)<assert_stmt>len(p.tags)<eq>len(tags)<line_sep>tag_names=[t.name<for>t p.tags]<assert_stmt>set(tags)&set(tag_names)<eq>set(tags)<line_sep>p.delete()<block_end><def_stmt>test_gets_by_issue_ids self<block_start>project_id=1<line_sep>p=ProjectIssue.add('test' 'test description' 'test' project=project_id)<assert_stmt>isinstance(p ProjectIssue)<assert_stmt>p.title<eq>'test'<assert_stmt>p.description<eq>'test description'<assert_stmt>p.project_id<eq>1<line_sep>project_issues=ProjectIssue._gets_by_issue_ids([p.issue_id] state=<none>)<assert_stmt>len(project_issues)<eq>1<line_sep>pissue=project_issues[0]<assert_stmt>isinstance(pissue ProjectIssue)<assert_stmt>pissue.project_id<eq>project_id<line_sep>project_issues=ProjectIssue._gets_by_issue_ids([p.issue_id] state="open")<assert_stmt>len(project_issues)<eq>1<line_sep>pissue=project_issues[0]<assert_stmt>isinstance(pissue ProjectIssue)<assert_stmt>pissue.project_id<eq>project_id<line_sep>project_issues=ProjectIssue._gets_by_issue_ids([p.issue_id] state="closed")<assert_stmt>len(project_issues)<eq>0<line_sep>pissue.close("test")<line_sep>project_issues=ProjectIssue._gets_by_issue_ids([p.issue_id] state="open")<assert_stmt>len(project_issues)<eq>0<line_sep>project_issues=ProjectIssue._gets_by_issue_ids([p.issue_id] state="closed")<assert_stmt>len(project_issues)<eq>1<line_sep>pissue=project_issues[0]<assert_stmt>isinstance(pissue ProjectIssue)<assert_stmt>pissue.project_id<eq>project_id<line_sep>p.delete()<block_end><def_stmt>test_gets_by_project_ids self<block_start>p1=ProjectIssue.add('test1' 'desp' 'test' project=1)<line_sep>p2=ProjectIssue.add('test2' 'desp' 'test2' project=2)<line_sep>p3=ProjectIssue.add('test3' 'desp' 'test3' project=2)<line_sep>issues=ProjectIssue.gets_by_project_ids([1 2])<assert_stmt>len(issues) 3<for_stmt>p [p1 p2 p3]<block_start>p.delete()<block_end><block_end><block_end>
# coding=utf-8 # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. *** # *** Do not edit by hand unless you're certain you know what you are doing! *** <import_stmt>warnings<import_stmt>pulumi<import_stmt>pulumi.runtime<import_from_stmt>typing Any Mapping Optional Sequence Union overload<import_from_stmt>.. _utilities<import_from_stmt>. outputs<line_sep>__all__=['GetKeysResult' 'AwaitableGetKeysResult' 'get_keys' 'get_keys_output' ]<line_sep>@pulumi.output_type<class_stmt>GetKeysResult<block_start>""" A collection of values returned by getKeys. """<def_stmt>__init__ __self__ id=<none> key_signing_keys=<none> managed_zone=<none> project=<none> zone_signing_keys=<none><block_start><if_stmt>id<and><not>isinstance(id str)<block_start><raise>TypeError("Expected argument 'id' to be a str")<block_end>pulumi.set(__self__ "id" id)<if_stmt>key_signing_keys<and><not>isinstance(key_signing_keys list)<block_start><raise>TypeError("Expected argument 'key_signing_keys' to be a list")<block_end>pulumi.set(__self__ "key_signing_keys" key_signing_keys)<if_stmt>managed_zone<and><not>isinstance(managed_zone str)<block_start><raise>TypeError("Expected argument 'managed_zone' to be a str")<block_end>pulumi.set(__self__ "managed_zone" managed_zone)<if_stmt>project<and><not>isinstance(project str)<block_start><raise>TypeError("Expected argument 'project' to be a str")<block_end>pulumi.set(__self__ "project" project)<if_stmt>zone_signing_keys<and><not>isinstance(zone_signing_keys list)<block_start><raise>TypeError("Expected argument 'zone_signing_keys' to be a list")<block_end>pulumi.set(__self__ "zone_signing_keys" zone_signing_keys)<block_end>@property@pulumi.getter<def_stmt>id self<arrow>str<block_start>""" The provider-assigned unique ID for this managed resource. """<line_sep><return>pulumi.get(self "id")<block_end>@property@pulumi.getter(name="keySigningKeys")<def_stmt>key_signing_keys self<arrow>Sequence['outputs.GetKeysKeySigningKeyResult']<block_start>""" A list of Key-signing key (KSK) records. Structure is documented below. Additionally, the DS record is provided: """<line_sep><return>pulumi.get(self "key_signing_keys")<block_end>@property@pulumi.getter(name="managedZone")<def_stmt>managed_zone self<arrow>str<block_start><return>pulumi.get(self "managed_zone")<block_end>@property@pulumi.getter<def_stmt>project self<arrow>str<block_start><return>pulumi.get(self "project")<block_end>@property@pulumi.getter(name="zoneSigningKeys")<def_stmt>zone_signing_keys self<arrow>Sequence['outputs.GetKeysZoneSigningKeyResult']<block_start>""" A list of Zone-signing key (ZSK) records. Structure is documented below. """<line_sep><return>pulumi.get(self "zone_signing_keys")<block_end><block_end><class_stmt>AwaitableGetKeysResult(GetKeysResult)# pylint: disable=using-constant-test <block_start><def_stmt>__await__ self<block_start><if_stmt><false><block_start><yield>self<block_end><return>GetKeysResult(id=self.id key_signing_keys=self.key_signing_keys managed_zone=self.managed_zone project=self.project zone_signing_keys=self.zone_signing_keys)<block_end><block_end><def_stmt>get_keys managed_zone:Optional[str]=<none> project:Optional[str]=<none> opts:Optional[pulumi.InvokeOptions]=<none><arrow>AwaitableGetKeysResult<block_start>""" Get the DNSKEY and DS records of DNSSEC-signed managed zones. For more information see the [official documentation](https://cloud.google.com/dns/docs/dnskeys/) and [API](https://cloud.google.com/dns/docs/reference/v1/dnsKeys). ## Example Usage ```python import pulumi import pulumi_gcp as gcp foo = gcp.dns.ManagedZone("foo", dns_name="foo.bar.", dnssec_config=gcp.dns.ManagedZoneDnssecConfigArgs( state="on", non_existence="nsec3", )) foo_dns_keys = foo.id.apply(lambda id: gcp.dns.get_keys(managed_zone=id)) pulumi.export("fooDnsDsRecord", foo_dns_keys.key_signing_keys[0].ds_record) ``` :param str managed_zone: The name or id of the Cloud DNS managed zone. :param str project: The ID of the project in which the resource belongs. If `project` is not provided, the provider project is used. """<line_sep>__args__=dict()<line_sep>__args__['managedZone']=managed_zone<line_sep>__args__['project']=project<if_stmt>opts<is><none><block_start>opts=pulumi.InvokeOptions()<block_end><if_stmt>opts.version<is><none><block_start>opts.version=_utilities.get_version()<block_end>__ret__=pulumi.runtime.invoke('gcp:dns/getKeys:getKeys' __args__ opts=opts typ=GetKeysResult).value<line_sep><return>AwaitableGetKeysResult(id=__ret__.id key_signing_keys=__ret__.key_signing_keys managed_zone=__ret__.managed_zone project=__ret__.project zone_signing_keys=__ret__.zone_signing_keys)<block_end>@_utilities.lift_output_func(get_keys)<def_stmt>get_keys_output managed_zone:Optional[pulumi.Input[str]]=<none> project:Optional[pulumi.Input[Optional[str]]]=<none> opts:Optional[pulumi.InvokeOptions]=<none><arrow>pulumi.Output[GetKeysResult]<block_start>""" Get the DNSKEY and DS records of DNSSEC-signed managed zones. For more information see the [official documentation](https://cloud.google.com/dns/docs/dnskeys/) and [API](https://cloud.google.com/dns/docs/reference/v1/dnsKeys). ## Example Usage ```python import pulumi import pulumi_gcp as gcp foo = gcp.dns.ManagedZone("foo", dns_name="foo.bar.", dnssec_config=gcp.dns.ManagedZoneDnssecConfigArgs( state="on", non_existence="nsec3", )) foo_dns_keys = foo.id.apply(lambda id: gcp.dns.get_keys(managed_zone=id)) pulumi.export("fooDnsDsRecord", foo_dns_keys.key_signing_keys[0].ds_record) ``` :param str managed_zone: The name or id of the Cloud DNS managed zone. :param str project: The ID of the project in which the resource belongs. If `project` is not provided, the provider project is used. """<line_sep><ellipsis><block_end>
<import_from_stmt>enum Enum<import_from_stmt>typing Optional Tuple<import_from_stmt>attr dataclass<line_sep>@dataclass(frozen=<true>)<class_stmt>Hash<block_start>hash:str<def_stmt>__str__ self<arrow>str<block_start><return>f'{str(self.hash[:6])}..{str(self.hash[-6:])}'<block_end><block_end><class_stmt>VersionSource(Enum)<block_start>value:Tuple[int str]<line_sep>TRAVIS=(0 'tr')<line_sep>GITHUB=(1 'gh')<def_stmt>__lt__ self other<block_start><return>self.value<l>other.value<block_end><def_stmt>__str__ self<block_start><return>f'{self.value[1]}'<block_end><block_end>@dataclass(frozen=<true> repr=<false>)<class_stmt>Version<block_start>source:VersionSource<line_sep>number:int<line_sep>@staticmethod<def_stmt>from_string version_str:str assumed_source:VersionSource=VersionSource.GITHUB<block_start><if_stmt>'-'<not><in>version_str<block_start><return>Version(assumed_source int(version_str))<block_end>source,num=version_str.split('-')<for_stmt>possible_source list(VersionSource)<block_start><if_stmt>possible_source.value[1]<eq>source<block_start><return>Version(possible_source int(num))<block_end><block_end><raise>RuntimeError(f'Unknown source {source}')<block_end><def_stmt>__str__ self<block_start><return>f'{self.source}-{self.number}'<block_end><def_stmt>__repr__ self<block_start><return>str(self)<block_end><block_end>@dataclass<class_stmt>Release<block_start>version:Version<line_sep>branch:str<line_sep>key:str<line_sep>info_key:str<line_sep>size:int<line_sep>hash:Hash<line_sep>static_key:Optional[str]=<none><block_end>
# Generated by Django 2.2 on 2019-05-16 07:59 <import_from_stmt>django.db migrations<class_stmt>Migration(migrations.Migration)<block_start>dependencies=[('modelchimp' '0048_auto_20190515_1032') ]<line_sep>operations=[migrations.RemoveField(model_name='experiment' name='algorithm' ) migrations.RemoveField(model_name='experiment' name='features' ) migrations.RemoveField(model_name='experiment' name='platform' ) migrations.RemoveField(model_name='experiment' name='platform_library' ) ]<block_end>
<import_from_stmt>django.contrib.auth.models User<import_from_stmt>django.test TestCase<import_from_stmt>dfirtrack_artifacts.forms ArtifactCreatorForm<import_from_stmt>dfirtrack_artifacts.models Artifactpriority Artifactstatus Artifacttype<import_from_stmt>dfirtrack_main.models System Systemstatus Tag Tagcolor<class_stmt>ArtifactCreatorFormTestCase(TestCase)<block_start>"""artifact creator form tests"""<line_sep>@classmethod<def_stmt>setUpTestData cls# create user <block_start>test_user=User.objects.create_user(username='testuser_artifact_creator' password='<PASSWORD>')<line_sep># create object systemstatus_1=Systemstatus.objects.create(systemstatus_name='systemstatus_1')<line_sep># create object System.objects.create(system_name='system_1' systemstatus=systemstatus_1 system_created_by_user_id=test_user system_modified_by_user_id=test_user )<line_sep>System.objects.create(system_name='system_2' systemstatus=systemstatus_1 system_created_by_user_id=test_user system_modified_by_user_id=test_user )<line_sep># create object tagcolor_1=Tagcolor.objects.create(tagcolor_name='tagcolor_1')<line_sep># create object Tag.objects.create(tag_name='tag_1' tagcolor=tagcolor_1 )<line_sep>Tag.objects.create(tag_name='tag_2' tagcolor=tagcolor_1 )<line_sep># create object Artifactpriority.objects.create(artifactpriority_name='prio_1')<line_sep># create object Artifactstatus.objects.create(artifactstatus_name='artifactstatus_1')<line_sep># create object Artifacttype.objects.create(artifacttype_name='artifacttype_1')<line_sep>Artifacttype.objects.create(artifacttype_name='artifacttype_2')<block_end><def_stmt>test_artifact_creator_artifactpriority_form_label self<block_start>"""test form label"""<line_sep># get object form=ArtifactCreatorForm()<line_sep># compare self.assertEqual(form.fields['artifactpriority'].label 'Artifactpriority (*)')<block_end><def_stmt>test_artifact_creator_artifactstatus_form_label self<block_start>"""test form label"""<line_sep># get object form=ArtifactCreatorForm()<line_sep># compare self.assertEqual(form.fields['artifactstatus'].label 'Artifactstatus (*)')<block_end><def_stmt>test_artifact_creator_artifacttype_form_label self<block_start>"""test form label"""<line_sep># get object form=ArtifactCreatorForm()<line_sep># compare self.assertEqual(form.fields['artifacttype'].label 'Artifacttypes (*) - Will also be set as artifact names' )<block_end><def_stmt>test_artifact_creator_system_form_label self<block_start>"""test form label"""<line_sep># get object form=ArtifactCreatorForm()<line_sep># compare self.assertEqual(form.fields['system'].label 'Systems (*)')<block_end><def_stmt>test_artifact_creator_tag_form_label self<block_start>"""test form label"""<line_sep># get object form=ArtifactCreatorForm()<line_sep># compare self.assertEqual(form.fields['tag'].label 'Tags')<block_end><def_stmt>test_artifact_creator_analysisresult_note_form_label self<block_start>"""test form label"""<line_sep># get object form=ArtifactCreatorForm()<line_sep># compare self.assertEqual(form.fields['artifact_note_analysisresult'].label 'Analysis result')<block_end><def_stmt>test_artifact_creator_external_note_form_label self<block_start>"""test form label"""<line_sep># get object form=ArtifactCreatorForm()<line_sep># compare self.assertEqual(form.fields['artifact_note_external'].label 'External note')<block_end><def_stmt>test_artifact_creator_internal_note_form_label self<block_start>"""test form label"""<line_sep># get object form=ArtifactCreatorForm()<line_sep># compare self.assertEqual(form.fields['artifact_note_internal'].label 'Internal note')<block_end><def_stmt>test_artifact_creator_name_choice_form_label self<block_start>"""test form label"""<line_sep># get object form=ArtifactCreatorForm()<line_sep># compare self.assertEqual(form.fields['alternative_artifact_name_choice'].label 'Use alternative artifact name' )<block_end><def_stmt>test_artifact_creator_name_form_label self<block_start>"""test form label"""<line_sep># get object form=ArtifactCreatorForm()<line_sep># compare self.assertEqual(form.fields['alternative_artifact_name'].label 'Alternative artifact name')<block_end><def_stmt>test_artifact_creator_source_path_form_label self<block_start>"""test form label"""<line_sep># get object form=ArtifactCreatorForm()<line_sep># compare self.assertEqual(form.fields['artifact_source_path'].label 'Artifact source path (attention: will be set for all artifacts regardless of type)' )<block_end><def_stmt>test_artifact_creator_form_empty self<block_start>"""test minimum form requirements / INVALID"""<line_sep># get object form=ArtifactCreatorForm(data={})<line_sep># compare self.assertFalse(form.is_valid())<block_end><def_stmt>test_artifact_creator_artifacttype_form_filled self<block_start>"""test minimum form requirements / INVALID"""<line_sep># get object artifacttype_1_id=Artifacttype.objects.get(artifacttype_name='artifacttype_1').artifacttype_id<line_sep>artifacttype_2_id=Artifacttype.objects.get(artifacttype_name='artifacttype_2').artifacttype_id<line_sep># get object form=ArtifactCreatorForm(data={'artifacttype':[artifacttype_1_id artifacttype_2_id ] })<line_sep># compare self.assertFalse(form.is_valid())<block_end><def_stmt>test_artifact_creator_artifactpriority_form_filled self<block_start>"""test minimum form requirements / INVALID"""<line_sep># get object artifactpriority_id=Artifactpriority.objects.get(artifactpriority_name='prio_1').artifactpriority_id<line_sep># get object artifacttype_1_id=Artifacttype.objects.get(artifacttype_name='artifacttype_1').artifacttype_id<line_sep>artifacttype_2_id=Artifacttype.objects.get(artifacttype_name='artifacttype_2').artifacttype_id<line_sep># get object form=ArtifactCreatorForm(data={'artifactpriority':artifactpriority_id 'artifacttype':[artifacttype_1_id artifacttype_2_id ] })<line_sep># compare self.assertFalse(form.is_valid())<block_end><def_stmt>test_artifact_creator_artifactstatus_form_filled self<block_start>"""test minimum form requirements / INVALID"""<line_sep># get object artifactpriority_id=Artifactpriority.objects.get(artifactpriority_name='prio_1').artifactpriority_id<line_sep># get object artifactstatus_id=Artifactstatus.objects.get(artifactstatus_name='artifactstatus_1').artifactstatus_id<line_sep># get object artifacttype_1_id=Artifacttype.objects.get(artifacttype_name='artifacttype_1').artifacttype_id<line_sep>artifacttype_2_id=Artifacttype.objects.get(artifacttype_name='artifacttype_2').artifacttype_id<line_sep># get object form=ArtifactCreatorForm(data={'artifactpriority':artifactpriority_id 'artifactstatus':artifactstatus_id 'artifacttype':[artifacttype_1_id artifacttype_2_id ] })<line_sep># compare self.assertFalse(form.is_valid())<block_end><def_stmt>test_artifact_creator_system_form_filled self<block_start>"""test minimum form requirements / VALID"""<line_sep># get object artifactpriority_id=Artifactpriority.objects.get(artifactpriority_name='prio_1').artifactpriority_id<line_sep># get object artifactstatus_id=Artifactstatus.objects.get(artifactstatus_name='artifactstatus_1').artifactstatus_id<line_sep># get object artifacttype_1_id=Artifacttype.objects.get(artifacttype_name='artifacttype_1').artifacttype_id<line_sep>artifacttype_2_id=Artifacttype.objects.get(artifacttype_name='artifacttype_2').artifacttype_id<line_sep># get object system_1_id=System.objects.get(system_name='system_1').system_id<line_sep>system_2_id=System.objects.get(system_name='system_2').system_id<line_sep># get object form=ArtifactCreatorForm(data={'artifactpriority':artifactpriority_id 'artifactstatus':artifactstatus_id 'artifacttype':[artifacttype_1_id artifacttype_2_id ] 'system':[system_1_id system_2_id ] })<line_sep># compare self.assertTrue(form.is_valid())<block_end><def_stmt>test_artifact_creator_all_fields_form_filled self<block_start>"""test additional form content"""<line_sep># get object artifactpriority_id=Artifactpriority.objects.get(artifactpriority_name='prio_1').artifactpriority_id<line_sep># get object artifactstatus_id=Artifactstatus.objects.get(artifactstatus_name='artifactstatus_1').artifactstatus_id<line_sep># get object artifacttype_1_id=Artifacttype.objects.get(artifacttype_name='artifacttype_1').artifacttype_id<line_sep>artifacttype_2_id=Artifacttype.objects.get(artifacttype_name='artifacttype_2').artifacttype_id<line_sep># get object system_1_id=System.objects.get(system_name='system_1').system_id<line_sep>system_2_id=System.objects.get(system_name='system_2').system_id<line_sep># get object tag_1_id=Tag.objects.get(tag_name='tag_1').tag_id<line_sep>tag_2_id=Tag.objects.get(tag_name='tag_2').tag_id<line_sep># get object form=ArtifactCreatorForm(data={'artifactpriority':artifactpriority_id 'artifactstatus':artifactstatus_id 'artifacttype':[artifacttype_1_id artifacttype_2_id ] 'system':[system_1_id system_2_id ] 'tag':[tag_1_id tag_2_id ] 'artifact_note_analysisresult':'lorem ipsum' 'artifact_note_external':'lorem ipsum' 'artifact_note_internal':'lorem ipsum' 'artifact_source_path':'evil.exe' })<line_sep># compare self.assertTrue(form.is_valid())<block_end><def_stmt>test_artifact_creator_alternative_name_form_filled self<block_start>"""test custom field validation"""<line_sep># get object artifactpriority_id=Artifactpriority.objects.get(artifactpriority_name='prio_1').artifactpriority_id<line_sep># get object artifactstatus_id=Artifactstatus.objects.get(artifactstatus_name='artifactstatus_1').artifactstatus_id<line_sep># create object artifacttype_1_id=Artifacttype.objects.get(artifacttype_name='artifacttype_1').artifacttype_id<line_sep># get object system_1_id=System.objects.get(system_name='system_1').system_id<line_sep># get object form=ArtifactCreatorForm(data={'artifactpriority':artifactpriority_id 'artifactstatus':artifactstatus_id 'artifacttype':[artifacttype_1_id ] 'system':[system_1_id ] 'alternative_artifact_name':'alternative name' })<line_sep># compare self.assertFalse(form.is_valid())<line_sep>self.assertEqual(form.errors['alternative_artifact_name'] ['Either both or neither of the fields is required.'] )<block_end><def_stmt>test_artifact_creator_alternative_choice_form_filled self<block_start>"""test custom field validation"""<line_sep># get object artifactpriority_id=Artifactpriority.objects.get(artifactpriority_name='prio_1').artifactpriority_id<line_sep># get object artifactstatus_id=Artifactstatus.objects.get(artifactstatus_name='artifactstatus_1').artifactstatus_id<line_sep># create object artifacttype_1_id=Artifacttype.objects.get(artifacttype_name='artifacttype_1').artifacttype_id<line_sep># get object system_1_id=System.objects.get(system_name='system_1').system_id<line_sep># get object form=ArtifactCreatorForm(data={'artifactpriority':artifactpriority_id 'artifactstatus':artifactstatus_id 'artifacttype':[artifacttype_1_id ] 'system':[system_1_id ] 'alternative_artifact_name_choice':<true> })<line_sep># compare self.assertFalse(form.is_valid())<line_sep>self.assertEqual(form.errors['alternative_artifact_name'] ['Either both or neither of the fields is required.'] )<block_end><def_stmt>test_artifact_creator_alternative_both_form_filled self<block_start>"""test custom field validation"""<line_sep># get object artifactpriority_id=Artifactpriority.objects.get(artifactpriority_name='prio_1').artifactpriority_id<line_sep># get object artifactstatus_id=Artifactstatus.objects.get(artifactstatus_name='artifactstatus_1').artifactstatus_id<line_sep># create object artifacttype_1_id=Artifacttype.objects.get(artifacttype_name='artifacttype_1').artifacttype_id<line_sep># get object system_1_id=System.objects.get(system_name='system_1').system_id<line_sep># get object form=ArtifactCreatorForm(data={'artifactpriority':artifactpriority_id 'artifactstatus':artifactstatus_id 'artifacttype':[artifacttype_1_id ] 'system':[system_1_id ] 'alternative_artifact_name_choice':<true> 'alternative_artifact_name':'alternative name' })<line_sep># compare self.assertTrue(form.is_valid())<block_end><block_end>
# Licensed to Elasticsearch B.V. under one or more contributor # license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright # ownership. Elasticsearch B.V. licenses this file to you under # the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. <import_stmt>os<import_stmt>pandas<as>pd<import_from_stmt>elasticsearch Elasticsearch<import_from_stmt>eland.common es_version<line_sep>ROOT_DIR=os.path.dirname(os.path.abspath(__file__))<line_sep># Define test files and indices ELASTICSEARCH_HOST=os.environ.get("ELASTICSEARCH_URL" os.environ.get("ELASTICSEARCH_HOST" "http://localhost:9200"))<line_sep># Define client to use in tests ES_TEST_CLIENT=Elasticsearch(ELASTICSEARCH_HOST)<line_sep>ES_VERSION=es_version(ES_TEST_CLIENT)<line_sep>FLIGHTS_INDEX_NAME="flights"<line_sep>FLIGHTS_MAPPING={"mappings":{"properties":{"AvgTicketPrice":{"type":"float"} "Cancelled":{"type":"boolean"} "Carrier":{"type":"keyword"} "Dest":{"type":"keyword"} "DestAirportID":{"type":"keyword"} "DestCityName":{"type":"keyword"} "DestCountry":{"type":"keyword"} "DestLocation":{"type":"geo_point"} "DestRegion":{"type":"keyword"} "DestWeather":{"type":"keyword"} "DistanceKilometers":{"type":"float"} "DistanceMiles":{"type":"float"} "FlightDelay":{"type":"boolean"} "FlightDelayMin":{"type":"integer"} "FlightDelayType":{"type":"keyword"} "FlightNum":{"type":"keyword"} "FlightTimeHour":{"type":"float"} "FlightTimeMin":{"type":"float"} "Origin":{"type":"keyword"} "OriginAirportID":{"type":"keyword"} "OriginCityName":{"type":"keyword"} "OriginCountry":{"type":"keyword"} "OriginLocation":{"type":"geo_point"} "OriginRegion":{"type":"keyword"} "OriginWeather":{"type":"keyword"} "dayOfWeek":{"type":"byte"} "timestamp":{"type":"date" "format":"strict_date_hour_minute_second"} }}}<line_sep>FLIGHTS_FILE_NAME=ROOT_DIR+"/flights.json.gz"<line_sep>FLIGHTS_DF_FILE_NAME=ROOT_DIR+"/flights_df.json.gz"<line_sep>FLIGHTS_SMALL_INDEX_NAME="flights_small"<line_sep>FLIGHTS_SMALL_MAPPING=FLIGHTS_MAPPING<line_sep>FLIGHTS_SMALL_FILE_NAME=ROOT_DIR+"/flights_small.json.gz"<line_sep>ECOMMERCE_INDEX_NAME="ecommerce"<line_sep>ECOMMERCE_MAPPING={"mappings":{"properties":{"category":{"type":"text" "fields":{"keyword":{"type":"keyword"}}} "currency":{"type":"keyword"} "customer_birth_date":{"type":"date"} "customer_first_name":{"type":"text" "fields":{"keyword":{"type":"keyword" "ignore_above":256}} } "customer_full_name":{"type":"text" "fields":{"keyword":{"type":"keyword" "ignore_above":256}} } "customer_gender":{"type":"text"} "customer_id":{"type":"keyword"} "customer_last_name":{"type":"text" "fields":{"keyword":{"type":"keyword" "ignore_above":256}} } "customer_phone":{"type":"keyword"} "day_of_week":{"type":"keyword"} "day_of_week_i":{"type":"integer"} "email":{"type":"keyword"} "geoip":{"properties":{"city_name":{"type":"keyword"} "continent_name":{"type":"keyword"} "country_iso_code":{"type":"keyword"} "location":{"type":"geo_point"} "region_name":{"type":"keyword"} }} "manufacturer":{"type":"text" "fields":{"keyword":{"type":"keyword"}} } "order_date":{"type":"date"} "order_id":{"type":"keyword"} "products":{"properties":{"_id":{"type":"text" "fields":{"keyword":{"type":"keyword" "ignore_above":256}} } "base_price":{"type":"half_float"} "base_unit_price":{"type":"half_float"} "category":{"type":"text" "fields":{"keyword":{"type":"keyword"}} } "created_on":{"type":"date"} "discount_amount":{"type":"half_float"} "discount_percentage":{"type":"half_float"} "manufacturer":{"type":"text" "fields":{"keyword":{"type":"keyword"}} } "min_price":{"type":"half_float"} "price":{"type":"half_float"} "product_id":{"type":"long"} "product_name":{"type":"text" "fields":{"keyword":{"type":"keyword"}} "analyzer":"english" } "quantity":{"type":"integer"} "sku":{"type":"keyword"} "tax_amount":{"type":"half_float"} "taxful_price":{"type":"half_float"} "taxless_price":{"type":"half_float"} "unit_discount_amount":{"type":"half_float"} }} "sku":{"type":"keyword"} "taxful_total_price":{"type":"float"} "taxless_total_price":{"type":"float"} "total_quantity":{"type":"integer"} "total_unique_products":{"type":"integer"} "type":{"type":"keyword"} "user":{"type":"keyword"} }}}<line_sep>ECOMMERCE_FILE_NAME=ROOT_DIR+"/ecommerce.json.gz"<line_sep>ECOMMERCE_DF_FILE_NAME=ROOT_DIR+"/ecommerce_df.json.gz"<line_sep>TEST_MAPPING1={"mappings":{"properties":{"city":{"type":"text" "fields":{"raw":{"type":"keyword"}}} "text":{"type":"text" "fields":{"english":{"type":"text" "analyzer":"english"}} } "origin_location":{"properties":{"lat":{"type":"text" "index_prefixes":{} "fields":{"keyword":{"type":"keyword" "ignore_above":256}} } "lon":{"type":"text" "fields":{"keyword":{"type":"keyword" "ignore_above":256}} } }} "maps-telemetry":{"properties":{"attributesPerMap":{"properties":{"dataSourcesCount":{"properties":{"avg":{"type":"long"} "max":{"type":"long"} "min":{"type":"long"} }} "emsVectorLayersCount":{"dynamic":"true" "properties":{"france_departments":{"properties":{"avg":{"type":"float"} "max":{"type":"long"} "min":{"type":"long"} }}} } }}}} "type":{"type":"keyword"} "name":{"type":"text"} "user_name":{"type":"keyword"} "email":{"type":"keyword"} "content":{"type":"text"} "tweeted_at":{"type":"date"} "dest_location":{"type":"geo_point"} "my_join_field":{"type":"join" "relations":{"question":["answer" "comment"] "answer":"vote"} } }}}<line_sep>TEST_MAPPING1_INDEX_NAME="mapping1"<line_sep>TEST_MAPPING1_EXPECTED={"city":"text" "city.raw":"keyword" "content":"text" "dest_location":"geo_point" "email":"keyword" "maps-telemetry.attributesPerMap.dataSourcesCount.avg":"long" "maps-telemetry.attributesPerMap.dataSourcesCount.max":"long" "maps-telemetry.attributesPerMap.dataSourcesCount.min":"long" "maps-telemetry.attributesPerMap.emsVectorLayersCount.france_departments.avg":"float" "maps-telemetry.attributesPerMap.emsVectorLayersCount.france_departments.max":"long" "maps-telemetry.attributesPerMap.emsVectorLayersCount.france_departments.min":"long" "my_join_field":"join" "name":"text" "origin_location.lat":"text" "origin_location.lat.keyword":"keyword" "origin_location.lon":"text" "origin_location.lon.keyword":"keyword" "text":"text" "text.english":"text" "tweeted_at":"date" "type":"keyword" "user_name":"keyword" }<line_sep>TEST_MAPPING1_EXPECTED_DF=pd.DataFrame.from_dict(data=TEST_MAPPING1_EXPECTED orient="index" columns=["es_dtype"])<line_sep>TEST_MAPPING1_EXPECTED_SOURCE_FIELD_DF=TEST_MAPPING1_EXPECTED_DF.drop(index=["city.raw" "origin_location.lat.keyword" "origin_location.lon.keyword" "text.english" ])<line_sep>TEST_MAPPING1_EXPECTED_SOURCE_FIELD_COUNT=len(TEST_MAPPING1_EXPECTED_SOURCE_FIELD_DF.index)<line_sep>TEST_NESTED_USER_GROUP_INDEX_NAME="nested_user_group"<line_sep>TEST_NESTED_USER_GROUP_MAPPING={"mappings":{"properties":{"group":{"type":"keyword"} "user":{"properties":{"first":{"type":"keyword"} "last":{"type":"keyword"} "address":{"type":"keyword"} }} }}}<line_sep>TEST_NESTED_USER_GROUP_DOCS=[{"_index":TEST_NESTED_USER_GROUP_INDEX_NAME "_source":{"group":"amsterdam" "user":[{"first":"Manke" "last":"Nelis" "address":["Elandsgracht" "Amsterdam"] } {"first":"Johnny" "last":"Jordaan" "address":["Elandsstraat" "Amsterdam"] } ] } } {"_index":TEST_NESTED_USER_GROUP_INDEX_NAME "_source":{"group":"london" "user":[{"first":"Alice" "last":"Monkton"} {"first":"Jimmy" "last":"White" "address":["London"]} ] } } {"_index":TEST_NESTED_USER_GROUP_INDEX_NAME "_source":{"group":"new york" "user":[{"first":"Bill" "last":"Jones"}]} } ]<line_sep>
<import_stmt>pytest<import_from_stmt>icevision.all *<line_sep>@pytest.fixture<def_stmt>dummy_class_map <block_start><return>ClassMap(["dummy-1" "dummy-2"] background=<none>)<block_end>@pytest.fixture<def_stmt>dummy_class_map_elaborate <block_start><return>ClassMap(["dummy-1" "dummy-2" "dummy-3" "dummy-4"] background=<none>)<block_end><def_stmt>test_classification_multilabel dummy_class_map<block_start>rec=BaseRecord([ClassificationLabelsRecordComponent(is_multilabel=<true>)])<line_sep>rec.classification.set_class_map(dummy_class_map)<line_sep>rec.classification.set_labels_by_id([0 1])<assert_stmt>rec.classification.label_ids<eq>[0 1]<assert_stmt>(rec.classification.one_hot_encoded()<eq>np.array([1 1])).all()<block_end>@pytest.mark.parametrize("label_ids" [([0 1]) ([0]) ] )<def_stmt>test_classification_single_label dummy_class_map label_ids<block_start>rec=BaseRecord([ClassificationLabelsRecordComponent(is_multilabel=<false>)])<line_sep>rec.classification.set_class_map(dummy_class_map)<line_sep>rec.classification.set_labels_by_id(label_ids)<if_stmt>len(label_ids)<g>1# label_ids == [0, 1] # Setting two labels when `is_multilabel=False` raises an error <block_start><with_stmt>pytest.raises(AutofixAbort)<block_start>rec.classification._autofix()<block_end><block_end><else_stmt># label_ids == [0] # Only one label must be assigned <block_start><assert_stmt>all(rec.classification._autofix().values())<assert_stmt>rec.classification.one_hot_encoded().sum()<eq>1<block_end><block_end>@pytest.mark.parametrize("label_ids" [([0 1 2]) ([0 1]) ([0]) ] )<def_stmt>test_one_hot_encodings dummy_class_map_elaborate label_ids<block_start>rec=BaseRecord([ClassificationLabelsRecordComponent(is_multilabel=<true>)])<line_sep>rec.classification.set_class_map(dummy_class_map_elaborate)<line_sep>rec.classification.set_labels_by_id(label_ids)<assert_stmt>all(rec.classification._autofix().values())<line_sep># Ensure we have the correct no. of labels and that they are indeed # one-hot encoded one_hot_values=rec.classification.one_hot_encoded()<assert_stmt>one_hot_values.sum()<eq>len(label_ids)<assert_stmt>np.unique(one_hot_values).tolist()<eq>[0 1]<block_end>
<import_from_stmt>office365.runtime.client_value ClientValue<class_stmt>SharingLinkInfo(ClientValue)<block_start><def_stmt>__init__ self<block_start>""" Specifies the information about the tokenized sharing link. """<line_sep>super(SharingLinkInfo self).__init__()<line_sep>self.AllowsAnonymousAccess=<none><line_sep>self.ApplicationId=<none><line_sep>self.CreatedBy=<none><line_sep>self.PasswordProtected=<none><block_end><block_end>
""" STIX 2.0 open vocabularies and enums """<line_sep>ATTACK_MOTIVATION_ACCIDENTAL="accidental"<line_sep>ATTACK_MOTIVATION_COERCION="coercion"<line_sep>ATTACK_MOTIVATION_DOMINANCE="dominance"<line_sep>ATTACK_MOTIVATION_IDEOLOGY="ideology"<line_sep>ATTACK_MOTIVATION_NOTORIETY="notoriety"<line_sep>ATTACK_MOTIVATION_ORGANIZATIONAL_GAIN="organizational-gain"<line_sep>ATTACK_MOTIVATION_PERSONAL_GAIN="personal-gain"<line_sep>ATTACK_MOTIVATION_PERSONAL_SATISFACTION="personal-satisfaction"<line_sep>ATTACK_MOTIVATION_REVENGE="revenge"<line_sep>ATTACK_MOTIVATION_UNPREDICTABLE="unpredictable"<line_sep>ATTACK_MOTIVATION=[ATTACK_MOTIVATION_ACCIDENTAL ATTACK_MOTIVATION_COERCION ATTACK_MOTIVATION_DOMINANCE ATTACK_MOTIVATION_IDEOLOGY ATTACK_MOTIVATION_NOTORIETY ATTACK_MOTIVATION_ORGANIZATIONAL_GAIN ATTACK_MOTIVATION_PERSONAL_GAIN ATTACK_MOTIVATION_PERSONAL_SATISFACTION ATTACK_MOTIVATION_REVENGE ATTACK_MOTIVATION_UNPREDICTABLE ]<line_sep>ATTACK_RESOURCE_LEVEL_INDIVIDUAL="individual"<line_sep>ATTACK_RESOURCE_LEVEL_CLUB="club"<line_sep>ATTACK_RESOURCE_LEVEL_CONTEST="contest"<line_sep>ATTACK_RESOURCE_LEVEL_TEAM="team"<line_sep>ATTACK_RESOURCE_LEVEL_ORGANIZATION="organization"<line_sep>ATTACK_RESOURCE_LEVEL_GOVERNMENT="government"<line_sep>ATTACK_RESOURCE_LEVEL=[ATTACK_RESOURCE_LEVEL_INDIVIDUAL ATTACK_RESOURCE_LEVEL_CLUB ATTACK_RESOURCE_LEVEL_CONTEST ATTACK_RESOURCE_LEVEL_TEAM ATTACK_RESOURCE_LEVEL_ORGANIZATION ATTACK_RESOURCE_LEVEL_GOVERNMENT ]<line_sep>HASHING_ALGORITHM_MD5="MD5"<line_sep>HASHING_ALGORITHM_MD6="MD6"<line_sep>HASHING_ALGORITHM_RIPEMD_160="RIPEMD-160"<line_sep>HASHING_ALGORITHM_SHA_1="SHA-1"<line_sep>HASHING_ALGORITHM_SHA_224="SHA-224"<line_sep>HASHING_ALGORITHM_SHA_256="SHA-256"<line_sep>HASHING_ALGORITHM_SHA_384="SHA-384"<line_sep>HASHING_ALGORITHM_SHA_512="SHA-512"<line_sep>HASHING_ALGORITHM_SHA3_224="SHA3-224"<line_sep>HASHING_ALGORITHM_SHA3_256="SHA3-256"<line_sep>HASHING_ALGORITHM_SHA3_384="SHA3-384"<line_sep>HASHING_ALGORITHM_SHA3_512="SHA3-512"<line_sep>HASHING_ALGORITHM_SSDEEP="ssdeep"<line_sep>HASHING_ALGORITHM_WHIRLPOOL="WHIRLPOOL"<line_sep>HASHING_ALGORITHM=[HASHING_ALGORITHM_MD5 HASHING_ALGORITHM_MD6 HASHING_ALGORITHM_RIPEMD_160 HASHING_ALGORITHM_SHA_1 HASHING_ALGORITHM_SHA_224 HASHING_ALGORITHM_SHA_256 HASHING_ALGORITHM_SHA_384 HASHING_ALGORITHM_SHA_512 HASHING_ALGORITHM_SHA3_224 HASHING_ALGORITHM_SHA3_256 HASHING_ALGORITHM_SHA3_384 HASHING_ALGORITHM_SHA3_512 HASHING_ALGORITHM_SSDEEP HASHING_ALGORITHM_WHIRLPOOL ]<line_sep>IDENTITY_CLASS_INDIVIDUAL="individual"<line_sep>IDENTITY_CLASS_GROUP="group"<line_sep>IDENTITY_CLASS_ORGANIZATION="organization"<line_sep>IDENTITY_CLASS_CLASS="class"<line_sep>IDENTITY_CLASS_UNKNOWN="unknown"<line_sep>IDENTITY_CLASS=[IDENTITY_CLASS_INDIVIDUAL IDENTITY_CLASS_GROUP IDENTITY_CLASS_ORGANIZATION IDENTITY_CLASS_CLASS IDENTITY_CLASS_UNKNOWN ]<line_sep>INDICATOR_LABEL_ANOMALOUS_ACTIVITY="anomalous-activity"<line_sep>INDICATOR_LABEL_ANONYMIZATION="anonymization"<line_sep>INDICATOR_LABEL_BENIGN="benign"<line_sep>INDICATOR_LABEL_COMPROMISED="compromised"<line_sep>INDICATOR_LABEL_MALICIOUS_ACTIVITY="malicious-activity"<line_sep>INDICATOR_LABEL_ATTRIBUTION="attribution"<line_sep>INDICATOR_LABEL=[INDICATOR_LABEL_ANOMALOUS_ACTIVITY INDICATOR_LABEL_ANONYMIZATION INDICATOR_LABEL_BENIGN INDICATOR_LABEL_COMPROMISED INDICATOR_LABEL_MALICIOUS_ACTIVITY INDICATOR_LABEL_ATTRIBUTION ]<line_sep>INDUSTRY_SECTOR_AGRICULTURE="agriculture"<line_sep>INDUSTRY_SECTOR_AEROSPACE="aerospace"<line_sep>INDUSTRY_SECTOR_AUTOMOTIVE="automotive"<line_sep>INDUSTRY_SECTOR_COMMUNICATIONS="communications"<line_sep>INDUSTRY_SECTOR_CONSTRUCTION="construction"<line_sep>INDUSTRY_SECTOR_DEFENCE="defence"<line_sep>INDUSTRY_SECTOR_EDUCATION="education"<line_sep>INDUSTRY_SECTOR_ENERGY="energy"<line_sep>INDUSTRY_SECTOR_ENTERTAINMENT="entertainment"<line_sep>INDUSTRY_SECTOR_FINANCIAL_SERVICES="financial-services"<line_sep>INDUSTRY_SECTOR_GOVERNMENT_NATIONAL="government-national"<line_sep>INDUSTRY_SECTOR_GOVERNMENT_REGIONAL="government-regional"<line_sep>INDUSTRY_SECTOR_GOVERNMENT_LOCAL="government-local"<line_sep>INDUSTRY_SECTOR_GOVERNMENT_PUBLIC_SERVICES="government-public-services"<line_sep>INDUSTRY_SECTOR_HEALTHCARE="healthcare"<line_sep>INDUSTRY_SECTOR_HOSPITALITY_LEISURE="hospitality-leisure"<line_sep>INDUSTRY_SECTOR_INFRASTRUCTURE="infrastructure"<line_sep>INDUSTRY_SECTOR_INSURANCE="insurance"<line_sep>INDUSTRY_SECTOR_MANUFACTURING="manufacturing"<line_sep>INDUSTRY_SECTOR_MINING="mining"<line_sep>INDUSTRY_SECTOR_NON_PROFIT="non-profit"<line_sep>INDUSTRY_SECTOR_PHARMACEUTICALS="pharmaceuticals"<line_sep>INDUSTRY_SECTOR_RETAIL="retail"<line_sep>INDUSTRY_SECTOR_TECHNOLOGY="technology"<line_sep>INDUSTRY_SECTOR_TELECOMMUNICATIONS="telecommunications"<line_sep>INDUSTRY_SECTOR_TRANSPORTATION="transportation"<line_sep>INDUSTRY_SECTOR_UTILITIES="utilities"<line_sep>INDUSTRY_SECTOR=[INDUSTRY_SECTOR_AGRICULTURE INDUSTRY_SECTOR_AEROSPACE INDUSTRY_SECTOR_AUTOMOTIVE INDUSTRY_SECTOR_COMMUNICATIONS INDUSTRY_SECTOR_CONSTRUCTION INDUSTRY_SECTOR_DEFENCE INDUSTRY_SECTOR_EDUCATION INDUSTRY_SECTOR_ENERGY INDUSTRY_SECTOR_ENTERTAINMENT INDUSTRY_SECTOR_FINANCIAL_SERVICES INDUSTRY_SECTOR_GOVERNMENT_NATIONAL INDUSTRY_SECTOR_GOVERNMENT_REGIONAL INDUSTRY_SECTOR_GOVERNMENT_LOCAL INDUSTRY_SECTOR_GOVERNMENT_PUBLIC_SERVICES INDUSTRY_SECTOR_HEALTHCARE INDUSTRY_SECTOR_HOSPITALITY_LEISURE INDUSTRY_SECTOR_INFRASTRUCTURE INDUSTRY_SECTOR_INSURANCE INDUSTRY_SECTOR_MANUFACTURING INDUSTRY_SECTOR_MINING INDUSTRY_SECTOR_NON_PROFIT INDUSTRY_SECTOR_PHARMACEUTICALS INDUSTRY_SECTOR_RETAIL INDUSTRY_SECTOR_TECHNOLOGY INDUSTRY_SECTOR_TELECOMMUNICATIONS INDUSTRY_SECTOR_TRANSPORTATION INDUSTRY_SECTOR_UTILITIES ]<line_sep>MALWARE_LABEL_ADWARE="adware"<line_sep>MALWARE_LABEL_BACKDOOR="backdoor"<line_sep>MALWARE_LABEL_BOT="bot"<line_sep>MALWARE_LABEL_DDOS="ddos"<line_sep>MALWARE_LABEL_DROPPER="dropper"<line_sep>MALWARE_LABEL_EXPLOIT_KIT="exploit-kit"<line_sep>MALWARE_LABEL_KEYLOGGER="keylogger"<line_sep>MALWARE_LABEL_RANSOMWARE="ransomware"<line_sep>MALWARE_LABEL_REMOTE_ACCESS_TROJAN="remote-access-trojan"<line_sep>MALWARE_LABEL_RESOURCE_EXPLOITATION="resource-exploitation"<line_sep>MALWARE_LABEL_ROGUE_SECURITY_SOFTWARE="rogue-security-software"<line_sep>MALWARE_LABEL_ROOTKIT="rootkit"<line_sep>MALWARE_LABEL_SCREEN_CAPTURE="screen-capture"<line_sep>MALWARE_LABEL_SPYWARE="spyware"<line_sep>MALWARE_LABEL_TROJAN="trojan"<line_sep>MALWARE_LABEL_VIRUS="virus"<line_sep>MALWARE_LABEL_WORM="worm"<line_sep>MALWARE_LABEL=[MALWARE_LABEL_ADWARE MALWARE_LABEL_BACKDOOR MALWARE_LABEL_BOT MALWARE_LABEL_DDOS MALWARE_LABEL_DROPPER MALWARE_LABEL_EXPLOIT_KIT MALWARE_LABEL_KEYLOGGER MALWARE_LABEL_RANSOMWARE MALWARE_LABEL_REMOTE_ACCESS_TROJAN MALWARE_LABEL_RESOURCE_EXPLOITATION MALWARE_LABEL_ROGUE_SECURITY_SOFTWARE MALWARE_LABEL_ROOTKIT MALWARE_LABEL_SCREEN_CAPTURE MALWARE_LABEL_SPYWARE MALWARE_LABEL_TROJAN MALWARE_LABEL_VIRUS MALWARE_LABEL_WORM ]<line_sep>REPORT_LABEL_THREAT_REPORT="threat-report"<line_sep>REPORT_LABEL_ATTACK_PATTERN="attack-pattern"<line_sep>REPORT_LABEL_CAMPAIGN="campaign"<line_sep>REPORT_LABEL_IDENTITY="identity"<line_sep>REPORT_LABEL_INDICATOR="indicator"<line_sep>REPORT_LABEL_INTRUSION_SET="intrusion-set"<line_sep>REPORT_LABEL_MALWARE="malware"<line_sep>REPORT_LABEL_OBSERVED_DATA="observed-data"<line_sep>REPORT_LABEL_THREAT_ACTOR="threat-actor"<line_sep>REPORT_LABEL_TOOL="tool"<line_sep>REPORT_LABEL_VULNERABILITY="vulnerability"<line_sep>REPORT_LABEL=[REPORT_LABEL_THREAT_REPORT REPORT_LABEL_ATTACK_PATTERN REPORT_LABEL_CAMPAIGN REPORT_LABEL_IDENTITY REPORT_LABEL_INDICATOR REPORT_LABEL_INTRUSION_SET REPORT_LABEL_MALWARE REPORT_LABEL_OBSERVED_DATA REPORT_LABEL_THREAT_ACTOR REPORT_LABEL_TOOL REPORT_LABEL_VULNERABILITY ]<line_sep>THREAT_ACTOR_LABEL_ACTIVIST="activist"<line_sep>THREAT_ACTOR_LABEL_COMPETITOR="competitor"<line_sep>THREAT_ACTOR_LABEL_CRIME_SYNDICATE="crime-syndicate"<line_sep>THREAT_ACTOR_LABEL_CRIMINAL="criminal"<line_sep>THREAT_ACTOR_LABEL_HACKER="hacker"<line_sep>THREAT_ACTOR_LABEL_INSIDER_ACCIDENTAL="insider-accidental"<line_sep>THREAT_ACTOR_LABEL_INSIDER_DISGRUNTLED="insider-disgruntled"<line_sep>THREAT_ACTOR_LABEL_NATION_STATE="nation-state"<line_sep>THREAT_ACTOR_LABEL_SENSATIONALIST="sensationalist"<line_sep>THREAT_ACTOR_LABEL_SPY="spy"<line_sep>THREAT_ACTOR_LABEL_TERRORIST="terrorist"<line_sep>THREAT_ACTOR_LABEL=[THREAT_ACTOR_LABEL_ACTIVIST THREAT_ACTOR_LABEL_COMPETITOR THREAT_ACTOR_LABEL_CRIME_SYNDICATE THREAT_ACTOR_LABEL_CRIMINAL THREAT_ACTOR_LABEL_HACKER THREAT_ACTOR_LABEL_INSIDER_ACCIDENTAL THREAT_ACTOR_LABEL_INSIDER_DISGRUNTLED THREAT_ACTOR_LABEL_NATION_STATE THREAT_ACTOR_LABEL_SENSATIONALIST THREAT_ACTOR_LABEL_SPY THREAT_ACTOR_LABEL_TERRORIST ]<line_sep>THREAT_ACTOR_ROLE_AGENT="agent"<line_sep>THREAT_ACTOR_ROLE_DIRECTOR="director"<line_sep>THREAT_ACTOR_ROLE_INDEPENDENT="independent"<line_sep>THREAT_ACTOR_ROLE_INFRASTRUCTURE_ARCHITECT="infrastructure-architect"<line_sep>THREAT_ACTOR_ROLE_INFRASTRUCTURE_OPERATOR="infrastructure-operator"<line_sep>THREAT_ACTOR_ROLE_MALWARE_AUTHOR="malware-author"<line_sep>THREAT_ACTOR_ROLE_SPONSOR="sponsor"<line_sep>THREAT_ACTOR_ROLE=[THREAT_ACTOR_ROLE_AGENT THREAT_ACTOR_ROLE_DIRECTOR THREAT_ACTOR_ROLE_INDEPENDENT THREAT_ACTOR_ROLE_INFRASTRUCTURE_ARCHITECT THREAT_ACTOR_ROLE_INFRASTRUCTURE_OPERATOR THREAT_ACTOR_ROLE_MALWARE_AUTHOR THREAT_ACTOR_ROLE_SPONSOR ]<line_sep>THREAT_ACTOR_SOPHISTICATION_NONE="none"<line_sep>THREAT_ACTOR_SOPHISTICATION_MINIMAL="minimal"<line_sep>THREAT_ACTOR_SOPHISTICATION_INTERMEDIATE="intermediate"<line_sep>THREAT_ACTOR_SOPHISTICATION_ADVANCED="advanced"<line_sep>THREAT_ACTOR_SOPHISTICATION_EXPERT="expert"<line_sep>THREAT_ACTOR_SOPHISTICATION_INNOVATOR="innovator"<line_sep>THREAT_ACTOR_SOPHISTICATION_STRATEGIC="strategic"<line_sep>THREAT_ACTOR_SOPHISTICATION=[THREAT_ACTOR_SOPHISTICATION_NONE THREAT_ACTOR_SOPHISTICATION_MINIMAL THREAT_ACTOR_SOPHISTICATION_INTERMEDIATE THREAT_ACTOR_SOPHISTICATION_ADVANCED THREAT_ACTOR_SOPHISTICATION_EXPERT THREAT_ACTOR_SOPHISTICATION_INNOVATOR THREAT_ACTOR_SOPHISTICATION_STRATEGIC ]<line_sep>TOOL_LABEL_DENIAL_OF_SERVICE="denial-of-service"<line_sep>TOOL_LABEL_EXPLOITATION="exploitation"<line_sep>TOOL_LABEL_INFORMATION_GATHERING="information-gathering"<line_sep>TOOL_LABEL_NETWORK_CAPTURE="network-capture"<line_sep>TOOL_LABEL_CREDENTIAL_EXPLOITATION="credential-exploitation"<line_sep>TOOL_LABEL_REMOTE_ACCESS="remote-access"<line_sep>TOOL_LABEL_VULNERABILITY_SCANNING="vulnerability-scanning"<line_sep>TOOL_LABEL=[TOOL_LABEL_DENIAL_OF_SERVICE TOOL_LABEL_EXPLOITATION TOOL_LABEL_INFORMATION_GATHERING TOOL_LABEL_NETWORK_CAPTURE TOOL_LABEL_CREDENTIAL_EXPLOITATION TOOL_LABEL_REMOTE_ACCESS TOOL_LABEL_VULNERABILITY_SCANNING ]<line_sep>
# Copyright 2013 django-htmlmin authors. All rights reserved. # Use of this source code is governed by a BSD-style # license that can be found in the LICENSE file. <import_stmt>unittest<import_from_stmt>django.test.client Client<class_stmt>TestDecorator(unittest.TestCase)<block_start>@classmethod<def_stmt>setUpClass cls<block_start>cls.client=Client()<block_end><def_stmt>test_should_minify_the_content_of_a_view_decorated self<block_start>response=self.client.get('/min')<line_sep>minified=b'<html><head></head><body><p>Hello world! :D'+b'</p><div>Copyright 3000</div></body></html>'<line_sep>self.assertEquals(minified response.content)<block_end><def_stmt>should_not_touch_the_content_of_an_undecorated_view self<block_start>expected=''' <html> <body> <p>Hello world! :D</p> <div>Copyright 3000</div> </body> </html> '''<line_sep>response=self.client.get('/raw')<line_sep>self.assertEquals(expected response.content)<block_end><def_stmt>test_minify_response_should_be_false_in_not_minified_views self<block_start>response=self.client.get('/not_min')<line_sep>self.assertEquals(<false> response.minify_response)<block_end><block_end>
<import_from_stmt>bokeh.models HoverTool<import_from_stmt>bokeh.plotting figure output_file show<import_from_stmt>bokeh.sampledata.glucose data<line_sep>x=data.loc['2010-10-06'].index.to_series()<line_sep>y=data.loc['2010-10-06']['glucose']<line_sep># Basic plot setup p=figure(width=800 height=400 x_axis_type="datetime" tools="" toolbar_location=<none> title='Hover over points')<line_sep>p.ygrid.grid_line_color=<none><line_sep>p.background_fill_color="#fafafa"<line_sep>p.line(x y line_dash="4 4" line_width=1 color='gray')<line_sep>cr=p.circle(x y size=20 fill_color="steelblue" alpha=0.1 line_color=<none> hover_fill_color="midnightblue" hover_alpha=0.5 hover_line_color="white")<line_sep>p.add_tools(HoverTool(tooltips=<none> renderers=[cr] mode='hline'))<line_sep>output_file("hover_glyph.html" title="hover_glyph.py example")<line_sep>show(p)<line_sep>
sbox=[0x63 0x7C 0x77 0x7B 0xF2 0x6B 0x6F 0xC5 0x30 0x01 0x67 0x2B 0xFE 0xD7 0xAB 0x76 0xCA 0x82 0xC9 0x7D 0xFA 0x59 0x47 0xF0 0xAD 0xD4 0xA2 0xAF 0x9C 0xA4 0x72 0xC0 0xB7 0xFD 0x93 0x26 0x36 0x3F 0xF7 0xCC 0x34 0xA5 0xE5 0xF1 0x71 0xD8 0x31 0x15 0x04 0xC7 0x23 0xC3 0x18 0x96 0x05 0x9A 0x07 0x12 0x80 0xE2 0xEB 0x27 0xB2 0x75 0x09 0x83 0x2C 0x1A 0x1B 0x6E 0x5A 0xA0 0x52 0x3B 0xD6 0xB3 0x29 0xE3 0x2F 0x84 0x53 0xD1 0x00 0xED 0x20 0xFC 0xB1 0x5B 0x6A 0xCB 0xBE 0x39 0x4A 0x4C 0x58 0xCF 0xD0 0xEF 0xAA 0xFB 0x43 0x4D 0x33 0x85 0x45 0xF9 0x02 0x7F 0x50 0x3C 0x9F 0xA8 0x51 0xA3 0x40 0x8F 0x92 0x9D 0x38 0xF5 0xBC 0xB6 0xDA 0x21 0x10 0xFF 0xF3 0xD2 0xCD 0x0C 0x13 0xEC 0x5F 0x97 0x44 0x17 0xC4 0xA7 0x7E 0x3D 0x64 0x5D 0x19 0x73 0x60 0x81 0x4F 0xDC 0x22 0x2A 0x90 0x88 0x46 0xEE 0xB8 0x14 0xDE 0x5E 0x0B 0xDB 0xE0 0x32 0x3A 0x0A 0x49 0x06 0x24 0x5C 0xC2 0xD3 0xAC 0x62 0x91 0x95 0xE4 0x79 0xE7 0xC8 0x37 0x6D 0x8D 0xD5 0x4E 0xA9 0x6C 0x56 0xF4 0xEA 0x65 0x7A 0xAE 0x08 0xBA 0x78 0x25 0x2E 0x1C 0xA6 0xB4 0xC6 0xE8 0xDD 0x74 0x1F 0x4B 0xBD 0x8B 0x8A 0x70 0x3E 0xB5 0x66 0x48 0x03 0xF6 0x0E 0x61 0x35 0x57 0xB9 0x86 0xC1 0x1D 0x9E 0xE1 0xF8 0x98 0x11 0x69 0xD9 0x8E 0x94 0x9B 0x1E 0x87 0xE9 0xCE 0x55 0x28 0xDF 0x8C 0xA1 0x89 0x0D 0xBF 0xE6 0x42 0x68 0x41 0x99 0x2D 0x0F 0xB0 0x54 0xBB 0x16]<line_sep>sboxInv=[0x52 0x09 0x6A 0xD5 0x30 0x36 0xA5 0x38 0xBF 0x40 0xA3 0x9E 0x81 0xF3 0xD7 0xFB 0x7C 0xE3 0x39 0x82 0x9B 0x2F 0xFF 0x87 0x34 0x8E 0x43 0x44 0xC4 0xDE 0xE9 0xCB 0x54 0x7B 0x94 0x32 0xA6 0xC2 0x23 0x3D 0xEE 0x4C 0x95 0x0B 0x42 0xFA 0xC3 0x4E 0x08 0x2E 0xA1 0x66 0x28 0xD9 0x24 0xB2 0x76 0x5B 0xA2 0x49 0x6D 0x8B 0xD1 0x25 0x72 0xF8 0xF6 0x64 0x86 0x68 0x98 0x16 0xD4 0xA4 0x5C 0xCC 0x5D 0x65 0xB6 0x92 0x6C 0x70 0x48 0x50 0xFD 0xED 0xB9 0xDA 0x5E 0x15 0x46 0x57 0xA7 0x8D 0x9D 0x84 0x90 0xD8 0xAB 0x00 0x8C 0xBC 0xD3 0x0A 0xF7 0xE4 0x58 0x05 0xB8 0xB3 0x45 0x06 0xD0 0x2C 0x1E 0x8F 0xCA 0x3F 0x0F 0x02 0xC1 0xAF 0xBD 0x03 0x01 0x13 0x8A 0x6B 0x3A 0x91 0x11 0x41 0x4F 0x67 0xDC 0xEA 0x97 0xF2 0xCF 0xCE 0xF0 0xB4 0xE6 0x73 0x96 0xAC 0x74 0x22 0xE7 0xAD 0x35 0x85 0xE2 0xF9 0x37 0xE8 0x1C 0x75 0xDF 0x6E 0x47 0xF1 0x1A 0x71 0x1D 0x29 0xC5 0x89 0x6F 0xB7 0x62 0x0E 0xAA 0x18 0xBE 0x1B 0xFC 0x56 0x3E 0x4B 0xC6 0xD2 0x79 0x20 0x9A 0xDB 0xC0 0xFE 0x78 0xCD 0x5A 0xF4 0x1F 0xDD 0xA8 0x33 0x88 0x07 0xC7 0x31 0xB1 0x12 0x10 0x59 0x27 0x80 0xEC 0x5F 0x60 0x51 0x7F 0xA9 0x19 0xB5 0x4A 0x0D 0x2D 0xE5 0x7A 0x9F 0x93 0xC9 0x9C 0xEF 0xA0 0xE0 0x3B 0x4D 0xAE 0x2A 0xF5 0xB0 0xC8 0xEB 0xBB 0x3C 0x83 0x53 0x99 0x61 0x17 0x2B 0x04 0x7E 0xBA 0x77 0xD6 0x26 0xE1 0x69 0x14 0x63 0x55 0x21 0x0C 0x7D]<line_sep>rCon=[0x8d 0x01 0x02 0x04 0x08 0x10 0x20 0x40 0x80 0x1b 0x36 0x6c 0xd8 0xab 0x4d 0x9a 0x2f 0x5e 0xbc 0x63 0xc6 0x97 0x35 0x6a 0xd4 0xb3 0x7d 0xfa 0xef 0xc5 0x91 0x39 0x72 0xe4 0xd3 0xbd 0x61 0xc2 0x9f 0x25 0x4a 0x94 0x33 0x66 0xcc 0x83 0x1d 0x3a 0x74 0xe8 0xcb 0x8d 0x01 0x02 0x04 0x08 0x10 0x20 0x40 0x80 0x1b 0x36 0x6c 0xd8 0xab 0x4d 0x9a 0x2f 0x5e 0xbc 0x63 0xc6 0x97 0x35 0x6a 0xd4 0xb3 0x7d 0xfa 0xef 0xc5 0x91 0x39 0x72 0xe4 0xd3 0xbd 0x61 0xc2 0x9f 0x25 0x4a 0x94 0x33 0x66 0xcc 0x83 0x1d 0x3a 0x74 0xe8 0xcb 0x8d 0x01 0x02 0x04 0x08 0x10 0x20 0x40 0x80 0x1b 0x36 0x6c 0xd8 0xab 0x4d 0x9a 0x2f 0x5e 0xbc 0x63 0xc6 0x97 0x35 0x6a 0xd4 0xb3 0x7d 0xfa 0xef 0xc5 0x91 0x39 0x72 0xe4 0xd3 0xbd 0x61 0xc2 0x9f 0x25 0x4a 0x94 0x33 0x66 0xcc 0x83 0x1d 0x3a 0x74 0xe8 0xcb 0x8d 0x01 0x02 0x04 0x08 0x10 0x20 0x40 0x80 0x1b 0x36 0x6c 0xd8 0xab 0x4d 0x9a 0x2f 0x5e 0xbc 0x63 0xc6 0x97 0x35 0x6a 0xd4 0xb3 0x7d 0xfa 0xef 0xc5 0x91 0x39 0x72 0xe4 0xd3 0xbd 0x61 0xc2 0x9f 0x25 0x4a 0x94 0x33 0x66 0xcc 0x83 0x1d 0x3a 0x74 0xe8 0xcb 0x8d 0x01 0x02 0x04 0x08 0x10 0x20 0x40 0x80 0x1b 0x36 0x6c 0xd8 0xab 0x4d 0x9a 0x2f 0x5e 0xbc 0x63 0xc6 0x97 0x35 0x6a 0xd4 0xb3 0x7d 0xfa 0xef 0xc5 0x91 0x39 0x72 0xe4 0xd3 0xbd 0x61 0xc2 0x9f 0x25 0x4a 0x94 0x33 0x66 0xcc 0x83 0x1d 0x3a 0x74 0xe8 0xcb 0x8d]<line_sep>vector_table=[2 3 1 1 1 2 3 1 1 1 2 3 3 1 1 2]<line_sep>table_2=[0x00 0x02 0x04 0x06 0x08 0x0a 0x0c 0x0e 0x10 0x12 0x14 0x16 0x18 0x1a 0x1c 0x1e 0x20 0x22 0x24 0x26 0x28 0x2a 0x2c 0x2e 0x30 0x32 0x34 0x36 0x38 0x3a 0x3c 0x3e 0x40 0x42 0x44 0x46 0x48 0x4a 0x4c 0x4e 0x50 0x52 0x54 0x56 0x58 0x5a 0x5c 0x5e 0x60 0x62 0x64 0x66 0x68 0x6a 0x6c 0x6e 0x70 0x72 0x74 0x76 0x78 0x7a 0x7c 0x7e 0x80 0x82 0x84 0x86 0x88 0x8a 0x8c 0x8e 0x90 0x92 0x94 0x96 0x98 0x9a 0x9c 0x9e 0xa0 0xa2 0xa4 0xa6 0xa8 0xaa 0xac 0xae 0xb0 0xb2 0xb4 0xb6 0xb8 0xba 0xbc 0xbe 0xc0 0xc2 0xc4 0xc6 0xc8 0xca 0xcc 0xce 0xd0 0xd2 0xd4 0xd6 0xd8 0xda 0xdc 0xde 0xe0 0xe2 0xe4 0xe6 0xe8 0xea 0xec 0xee 0xf0 0xf2 0xf4 0xf6 0xf8 0xfa 0xfc 0xfe 0x1b 0x19 0x1f 0x1d 0x13 0x11 0x17 0x15 0x0b 0x09 0x0f 0x0d 0x03 0x01 0x07 0x05 0x3b 0x39 0x3f 0x3d 0x33 0x31 0x37 0x35 0x2b 0x29 0x2f 0x2d 0x23 0x21 0x27 0x25 0x5b 0x59 0x5f 0x5d 0x53 0x51 0x57 0x55 0x4b 0x49 0x4f 0x4d 0x43 0x41 0x47 0x45 0x7b 0x79 0x7f 0x7d 0x73 0x71 0x77 0x75 0x6b 0x69 0x6f 0x6d 0x63 0x61 0x67 0x65 0x9b 0x99 0x9f 0x9d 0x93 0x91 0x97 0x95 0x8b 0x89 0x8f 0x8d 0x83 0x81 0x87 0x85 0xbb 0xb9 0xbf 0xbd 0xb3 0xb1 0xb7 0xb5 0xab 0xa9 0xaf 0xad 0xa3 0xa1 0xa7 0xa5 0xdb 0xd9 0xdf 0xdd 0xd3 0xd1 0xd7 0xd5 0xcb 0xc9 0xcf 0xcd 0xc3 0xc1 0xc7 0xc5 0xfb 0xf9 0xff 0xfd 0xf3 0xf1 0xf7 0xf5 0xeb 0xe9 0xef 0xed 0xe3 0xe1 0xe7 0xe5]<line_sep>table_3=[0x00 0x03 0x06 0x05 0x0c 0x0f 0x0a 0x09 0x18 0x1b 0x1e 0x1d 0x14 0x17 0x12 0x11 0x30 0x33 0x36 0x35 0x3c 0x3f 0x3a 0x39 0x28 0x2b 0x2e 0x2d 0x24 0x27 0x22 0x21 0x60 0x63 0x66 0x65 0x6c 0x6f 0x6a 0x69 0x78 0x7b 0x7e 0x7d 0x74 0x77 0x72 0x71 0x50 0x53 0x56 0x55 0x5c 0x5f 0x5a 0x59 0x48 0x4b 0x4e 0x4d 0x44 0x47 0x42 0x41 0xc0 0xc3 0xc6 0xc5 0xcc 0xcf 0xca 0xc9 0xd8 0xdb 0xde 0xdd 0xd4 0xd7 0xd2 0xd1 0xf0 0xf3 0xf6 0xf5 0xfc 0xff 0xfa 0xf9 0xe8 0xeb 0xee 0xed 0xe4 0xe7 0xe2 0xe1 0xa0 0xa3 0xa6 0xa5 0xac 0xaf 0xaa 0xa9 0xb8 0xbb 0xbe 0xbd 0xb4 0xb7 0xb2 0xb1 0x90 0x93 0x96 0x95 0x9c 0x9f 0x9a 0x99 0x88 0x8b 0x8e 0x8d 0x84 0x87 0x82 0x81 0x9b 0x98 0x9d 0x9e 0x97 0x94 0x91 0x92 0x83 0x80 0x85 0x86 0x8f 0x8c 0x89 0x8a 0xab 0xa8 0xad 0xae 0xa7 0xa4 0xa1 0xa2 0xb3 0xb0 0xb5 0xb6 0xbf 0xbc 0xb9 0xba 0xfb 0xf8 0xfd 0xfe 0xf7 0xf4 0xf1 0xf2 0xe3 0xe0 0xe5 0xe6 0xef 0xec 0xe9 0xea 0xcb 0xc8 0xcd 0xce 0xc7 0xc4 0xc1 0xc2 0xd3 0xd0 0xd5 0xd6 0xdf 0xdc 0xd9 0xda 0x5b 0x58 0x5d 0x5e 0x57 0x54 0x51 0x52 0x43 0x40 0x45 0x46 0x4f 0x4c 0x49 0x4a 0x6b 0x68 0x6d 0x6e 0x67 0x64 0x61 0x62 0x73 0x70 0x75 0x76 0x7f 0x7c 0x79 0x7a 0x3b 0x38 0x3d 0x3e 0x37 0x34 0x31 0x32 0x23 0x20 0x25 0x26 0x2f 0x2c 0x29 0x2a 0x0b 0x08 0x0d 0x0e 0x07 0x04 0x01 0x02 0x13 0x10 0x15 0x16 0x1f 0x1c 0x19 0x1a]<line_sep>table_9=[0x00 0x09 0x12 0x1b 0x24 0x2d 0x36 0x3f 0x48 0x41 0x5a 0x53 0x6c 0x65 0x7e 0x77 0x90 0x99 0x82 0x8b 0xb4 0xbd 0xa6 0xaf 0xd8 0xd1 0xca 0xc3 0xfc 0xf5 0xee 0xe7 0x3b 0x32 0x29 0x20 0x1f 0x16 0x0d 0x04 0x73 0x7a 0x61 0x68 0x57 0x5e 0x45 0x4c 0xab 0xa2 0xb9 0xb0 0x8f 0x86 0x9d 0x94 0xe3 0xea 0xf1 0xf8 0xc7 0xce 0xd5 0xdc 0x76 0x7f 0x64 0x6d 0x52 0x5b 0x40 0x49 0x3e 0x37 0x2c 0x25 0x1a 0x13 0x08 0x01 0xe6 0xef 0xf4 0xfd 0xc2 0xcb 0xd0 0xd9 0xae 0xa7 0xbc 0xb5 0x8a 0x83 0x98 0x91 0x4d 0x44 0x5f 0x56 0x69 0x60 0x7b 0x72 0x05 0x0c 0x17 0x1e 0x21 0x28 0x33 0x3a 0xdd 0xd4 0xcf 0xc6 0xf9 0xf0 0xeb 0xe2 0x95 0x9c 0x87 0x8e 0xb1 0xb8 0xa3 0xaa 0xec 0xe5 0xfe 0xf7 0xc8 0xc1 0xda 0xd3 0xa4 0xad 0xb6 0xbf 0x80 0x89 0x92 0x9b 0x7c 0x75 0x6e 0x67 0x58 0x51 0x4a 0x43 0x34 0x3d 0x26 0x2f 0x10 0x19 0x02 0x0b 0xd7 0xde 0xc5 0xcc 0xf3 0xfa 0xe1 0xe8 0x9f 0x96 0x8d 0x84 0xbb 0xb2 0xa9 0xa0 0x47 0x4e 0x55 0x5c 0x63 0x6a 0x71 0x78 0x0f 0x06 0x1d 0x14 0x2b 0x22 0x39 0x30 0x9a 0x93 0x88 0x81 0xbe 0xb7 0xac 0xa5 0xd2 0xdb 0xc0 0xc9 0xf6 0xff 0xe4 0xed 0x0a 0x03 0x18 0x11 0x2e 0x27 0x3c 0x35 0x42 0x4b 0x50 0x59 0x66 0x6f 0x74 0x7d 0xa1 0xa8 0xb3 0xba 0x85 0x8c 0x97 0x9e 0xe9 0xe0 0xfb 0xf2 0xcd 0xc4 0xdf 0xd6 0x31 0x38 0x23 0x2a 0x15 0x1c 0x07 0x0e 0x79 0x70 0x6b 0x62 0x5d 0x54 0x4f 0x46]<line_sep>table_11=[0x00 0x0b 0x16 0x1d 0x2c 0x27 0x3a 0x31 0x58 0x53 0x4e 0x45 0x74 0x7f 0x62 0x69 0xb0 0xbb 0xa6 0xad 0x9c 0x97 0x8a 0x81 0xe8 0xe3 0xfe 0xf5 0xc4 0xcf 0xd2 0xd9 0x7b 0x70 0x6d 0x66 0x57 0x5c 0x41 0x4a 0x23 0x28 0x35 0x3e 0x0f 0x04 0x19 0x12 0xcb 0xc0 0xdd 0xd6 0xe7 0xec 0xf1 0xfa 0x93 0x98 0x85 0x8e 0xbf 0xb4 0xa9 0xa2 0xf6 0xfd 0xe0 0xeb 0xda 0xd1 0xcc 0xc7 0xae 0xa5 0xb8 0xb3 0x82 0x89 0x94 0x9f 0x46 0x4d 0x50 0x5b 0x6a 0x61 0x7c 0x77 0x1e 0x15 0x08 0x03 0x32 0x39 0x24 0x2f 0x8d 0x86 0x9b 0x90 0xa1 0xaa 0xb7 0xbc 0xd5 0xde 0xc3 0xc8 0xf9 0xf2 0xef 0xe4 0x3d 0x36 0x2b 0x20 0x11 0x1a 0x07 0x0c 0x65 0x6e 0x73 0x78 0x49 0x42 0x5f 0x54 0xf7 0xfc 0xe1 0xea 0xdb 0xd0 0xcd 0xc6 0xaf 0xa4 0xb9 0xb2 0x83 0x88 0x95 0x9e 0x47 0x4c 0x51 0x5a 0x6b 0x60 0x7d 0x76 0x1f 0x14 0x09 0x02 0x33 0x38 0x25 0x2e 0x8c 0x87 0x9a 0x91 0xa0 0xab 0xb6 0xbd 0xd4 0xdf 0xc2 0xc9 0xf8 0xf3 0xee 0xe5 0x3c 0x37 0x2a 0x21 0x10 0x1b 0x06 0x0d 0x64 0x6f 0x72 0x79 0x48 0x43 0x5e 0x55 0x01 0x0a 0x17 0x1c 0x2d 0x26 0x3b 0x30 0x59 0x52 0x4f 0x44 0x75 0x7e 0x63 0x68 0xb1 0xba 0xa7 0xac 0x9d 0x96 0x8b 0x80 0xe9 0xe2 0xff 0xf4 0xc5 0xce 0xd3 0xd8 0x7a 0x71 0x6c 0x67 0x56 0x5d 0x40 0x4b 0x22 0x29 0x34 0x3f 0x0e 0x05 0x18 0x13 0xca 0xc1 0xdc 0xd7 0xe6 0xed 0xf0 0xfb 0x92 0x99 0x84 0x8f 0xbe 0xb5 0xa8 0xa3]<line_sep>table_13=[0x00 0x0d 0x1a 0x17 0x34 0x39 0x2e 0x23 0x68 0x65 0x72 0x7f 0x5c 0x51 0x46 0x4b 0xd0 0xdd 0xca 0xc7 0xe4 0xe9 0xfe 0xf3 0xb8 0xb5 0xa2 0xaf 0x8c 0x81 0x96 0x9b 0xbb 0xb6 0xa1 0xac 0x8f 0x82 0x95 0x98 0xd3 0xde 0xc9 0xc4 0xe7 0xea 0xfd 0xf0 0x6b 0x66 0x71 0x7c 0x5f 0x52 0x45 0x48 0x03 0x0e 0x19 0x14 0x37 0x3a 0x2d 0x20 0x6d 0x60 0x77 0x7a 0x59 0x54 0x43 0x4e 0x05 0x08 0x1f 0x12 0x31 0x3c 0x2b 0x26 0xbd 0xb0 0xa7 0xaa 0x89 0x84 0x93 0x9e 0xd5 0xd8 0xcf 0xc2 0xe1 0xec 0xfb 0xf6 0xd6 0xdb 0xcc 0xc1 0xe2 0xef 0xf8 0xf5 0xbe 0xb3 0xa4 0xa9 0x8a 0x87 0x90 0x9d 0x06 0x0b 0x1c 0x11 0x32 0x3f 0x28 0x25 0x6e 0x63 0x74 0x79 0x5a 0x57 0x40 0x4d 0xda 0xd7 0xc0 0xcd 0xee 0xe3 0xf4 0xf9 0xb2 0xbf 0xa8 0xa5 0x86 0x8b 0x9c 0x91 0x0a 0x07 0x10 0x1d 0x3e 0x33 0x24 0x29 0x62 0x6f 0x78 0x75 0x56 0x5b 0x4c 0x41 0x61 0x6c 0x7b 0x76 0x55 0x58 0x4f 0x42 0x09 0x04 0x13 0x1e 0x3d 0x30 0x27 0x2a 0xb1 0xbc 0xab 0xa6 0x85 0x88 0x9f 0x92 0xd9 0xd4 0xc3 0xce 0xed 0xe0 0xf7 0xfa 0xb7 0xba 0xad 0xa0 0x83 0x8e 0x99 0x94 0xdf 0xd2 0xc5 0xc8 0xeb 0xe6 0xf1 0xfc 0x67 0x6a 0x7d 0x70 0x53 0x5e 0x49 0x44 0x0f 0x02 0x15 0x18 0x3b 0x36 0x21 0x2c 0x0c 0x01 0x16 0x1b 0x38 0x35 0x22 0x2f 0x64 0x69 0x7e 0x73 0x50 0x5d 0x4a 0x47 0xdc 0xd1 0xc6 0xcb 0xe8 0xe5 0xf2 0xff 0xb4 0xb9 0xae 0xa3 0x80 0x8d 0x9a 0x97]<line_sep>table_14=[0x00 0x0e 0x1c 0x12 0x38 0x36 0x24 0x2a 0x70 0x7e 0x6c 0x62 0x48 0x46 0x54 0x5a 0xe0 0xee 0xfc 0xf2 0xd8 0xd6 0xc4 0xca 0x90 0x9e 0x8c 0x82 0xa8 0xa6 0xb4 0xba 0xdb 0xd5 0xc7 0xc9 0xe3 0xed 0xff 0xf1 0xab 0xa5 0xb7 0xb9 0x93 0x9d 0x8f 0x81 0x3b 0x35 0x27 0x29 0x03 0x0d 0x1f 0x11 0x4b 0x45 0x57 0x59 0x73 0x7d 0x6f 0x61 0xad 0xa3 0xb1 0xbf 0x95 0x9b 0x89 0x87 0xdd 0xd3 0xc1 0xcf 0xe5 0xeb 0xf9 0xf7 0x4d 0x43 0x51 0x5f 0x75 0x7b 0x69 0x67 0x3d 0x33 0x21 0x2f 0x05 0x0b 0x19 0x17 0x76 0x78 0x6a 0x64 0x4e 0x40 0x52 0x5c 0x06 0x08 0x1a 0x14 0x3e 0x30 0x22 0x2c 0x96 0x98 0x8a 0x84 0xae 0xa0 0xb2 0xbc 0xe6 0xe8 0xfa 0xf4 0xde 0xd0 0xc2 0xcc 0x41 0x4f 0x5d 0x53 0x79 0x77 0x65 0x6b 0x31 0x3f 0x2d 0x23 0x09 0x07 0x15 0x1b 0xa1 0xaf 0xbd 0xb3 0x99 0x97 0x85 0x8b 0xd1 0xdf 0xcd 0xc3 0xe9 0xe7 0xf5 0xfb 0x9a 0x94 0x86 0x88 0xa2 0xac 0xbe 0xb0 0xea 0xe4 0xf6 0xf8 0xd2 0xdc 0xce 0xc0 0x7a 0x74 0x66 0x68 0x42 0x4c 0x5e 0x50 0x0a 0x04 0x16 0x18 0x32 0x3c 0x2e 0x20 0xec 0xe2 0xf0 0xfe 0xd4 0xda 0xc8 0xc6 0x9c 0x92 0x80 0x8e 0xa4 0xaa 0xb8 0xb6 0x0c 0x02 0x10 0x1e 0x34 0x3a 0x28 0x26 0x7c 0x72 0x60 0x6e 0x44 0x4a 0x58 0x56 0x37 0x39 0x2b 0x25 0x0f 0x01 0x13 0x1d 0x47 0x49 0x5b 0x55 0x7f 0x71 0x63 0x6d 0xd7 0xd9 0xcb 0xc5 0xef 0xe1 0xf3 0xfd 0xa7 0xa9 0xbb 0xb5 0x9f 0x91 0x83 0x8d]<line_sep>
TAG_TYPE="#type"<line_sep>TAG_XML="#xml"<line_sep>TAG_VERSION="@version"<line_sep>TAG_UIVERSION="@uiVersion"<line_sep>TAG_NAMESPACE="@xmlns"<line_sep>TAG_NAME="@name"<line_sep>TAG_META="meta"<line_sep>TAG_FORM='form'<line_sep>ATTACHMENT_NAME="form.xml"<line_sep>MAGIC_PROPERTY='xml_submission_file'<line_sep>RESERVED_WORDS=[TAG_TYPE TAG_XML TAG_VERSION TAG_UIVERSION TAG_NAMESPACE TAG_NAME TAG_META ATTACHMENT_NAME 'case' MAGIC_PROPERTY]<line_sep>DEVICE_LOG_XMLNS='http://code.javarosa.org/devicereport'<line_sep>
<import_stmt>gym<import_stmt>sys<import_stmt>rlschool.metamaze<def_stmt>test_2d_maze max_iteration<block_start>print("Testing 2D Maze...")<line_sep>maze_env=gym.make("meta-maze-2D-v0" enable_render=<false>)<line_sep>cell_scale=9<line_sep>task=maze_env.sample_task(cell_scale=cell_scale)<line_sep>maze_env.set_task(task)<line_sep>iteration=0<while_stmt>iteration<l>max_iteration<block_start>iteration<augadd>1<line_sep>maze_env.reset()<line_sep>done=<false><line_sep>sum_reward=0<while_stmt><not>done<block_start>state,reward,done,_=maze_env.step(maze_env.action_space.sample())<line_sep>sum_reward<augadd>reward<block_end>print("Episode is over! You got %.1f score."%sum_reward)<if_stmt>(sum_reward<g>0.0)<block_start>cell_scale<augadd>2# gradually increase the difficulty print("Increase the difficulty, cell_scale = %d"%cell_scale)<block_end>task=maze_env.sample_task(cell_scale=cell_scale)<line_sep>maze_env.set_task(task)<block_end><block_end><def_stmt>test_3d_maze max_iteration<block_start>print("Testing 3D Maze...")<line_sep>maze_env=gym.make("meta-maze-3D-v0" enable_render=<false>)<line_sep>cell_scale=9<line_sep>task=maze_env.sample_task(cell_scale=cell_scale cell_size=2.0 wall_height=3.2)<line_sep>maze_env.set_task(task)<line_sep>iteration=0<while_stmt>iteration<l>max_iteration<block_start>iteration<augadd>1<line_sep>maze_env.reset()<line_sep>done=<false><line_sep>sum_reward=0<while_stmt><not>done<block_start>state,reward,done,_=maze_env.step(maze_env.action_space.sample())<line_sep>sum_reward<augadd>reward<block_end>print("Episode is over! You got %.1f score."%sum_reward)<if_stmt>(sum_reward<g>0.0)<block_start>cell_scale<augadd>2# gradually increase the difficulty print("Increase the difficulty, cell_scale = %d"%cell_scale)<block_end>task=maze_env.sample_task(cell_scale=cell_scale cell_size=2.0 wall_height=3.2)<line_sep>maze_env.set_task(task)<block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start>test_2d_maze(100)<line_sep>test_3d_maze(100)<block_end>
<import_stmt>apsw<import_stmt>boto3<import_stmt>os<import_stmt>json<import_from_stmt>flask Blueprint Response abort current_app render_template url_for<import_from_stmt>. setup_logger<import_from_stmt>.webcommon log_application_errors flask_log_level<import_from_stmt>.webhooks get_memcache_client<line_sep>dots=Blueprint('dots' __name__)<line_sep># https://stackoverflow.com/questions/56776974/sqlite3-connect-to-a-database-in-cloud-s3 <class_stmt>S3VFS(apsw.VFS)<block_start><def_stmt>__init__ self vfsname="s3" basevfs="" cache=<none><block_start>self.vfsname=vfsname<line_sep>self.basevfs=basevfs<line_sep>self.cache=cache<line_sep>apsw.VFS.__init__(self self.vfsname self.basevfs)<block_end><def_stmt>xOpen self name flags<block_start><return>S3VFSFile(self.basevfs name flags self.cache)<block_end><block_end><class_stmt>S3VFSFile()<block_start><def_stmt>__init__ self inheritfromvfsname filename flags cache<block_start>self.s3=boto3.client('s3')<line_sep>self.cache=cache<line_sep>self.bucket=filename.uri_parameter("bucket")<line_sep>self.key=filename.filename().lstrip("/")<block_end><def_stmt>_cache_key self amount offset<block_start><return>'{bucket}/{key}/{amount}/{offset}'.format(bucket=self.bucket key=self.key amount=amount offset=offset )<block_end><def_stmt>xRead self amount offset<block_start>data=<none><if_stmt>self.cache<block_start>cache_key=self._cache_key(amount offset)<line_sep>data=self.cache.get(cache_key)<block_end><if_stmt>data<is><none><block_start>response=self.s3.get_object(Bucket=self.bucket Key=self.key Range='bytes={}-{}'.format(offset offset+amount))<line_sep>data=response['Body'].read()<if_stmt>self.cache<block_start>self.cache.set(cache_key data)<block_end><block_end><return>data<block_end><def_stmt>xFileSize self<block_start>length=<none><if_stmt>self.cache<block_start>cache_key='{bucket}/{key}/size'.format(bucket=self.bucket key=self.key)<line_sep>length=self.cache.get(cache_key)<block_end><if_stmt>length<is><none><block_start>response=self.s3.head_object(Bucket=self.bucket Key=self.key)<line_sep>length=response['ContentLength']<if_stmt>self.cache<block_start>self.cache.set(cache_key length)<block_end><block_end><return>length<block_end><def_stmt>xClose self<block_start><pass><block_end><def_stmt>xFileControl self op ptr<block_start><return><false><block_end><block_end><def_stmt>get_mbtiles_connection bucket key cache<block_start>''' '''<line_sep>s3vfs=S3VFS(cache=cache)<line_sep><return>apsw.Connection('file:/{key}?bucket={bucket}&immutable=1'.format(bucket=bucket key=key) flags=apsw.SQLITE_OPEN_READONLY|apsw.SQLITE_OPEN_URI vfs=s3vfs.vfsname )<block_end><def_stmt>get_mbtiles_metadata bucket key cache<block_start>''' '''<if_stmt>cache<block_start>cache_key='{bucket}/{key}/metadata'.format(bucket=bucket key=key)<line_sep>cached=cache.get(cache_key)<if_stmt>cached<block_start><return>cached<block_end><block_end>connection=get_mbtiles_connection(bucket key cache)<line_sep>cur=connection.cursor()<line_sep>res=cur.execute('''SELECT name, value FROM metadata WHERE name IN ('center', 'json')''')<line_sep>data=dict(res.fetchall())<line_sep>lon,lat,zoom=map(float data.get('center' '0,0,0').split(','))<line_sep>more=json.loads(data.get('json' '{}'))<line_sep>fields=list(more.get('vector_layers' [])[0].get('fields' {}).keys())<line_sep>cur.close()<line_sep>metadata_tuple=(zoom lat lon fields)<if_stmt>cache<block_start>cache.set(cache_key metadata_tuple)<block_end><return>metadata_tuple<block_end><def_stmt>get_mbtiles_tile bucket key row col zoom cache<block_start>''' '''<if_stmt>cache<block_start>cache_key='{bucket}/{key}/{zoom}/{row}/{col}'.format(bucket=bucket key=key zoom=zoom row=row col=col)<line_sep>cached=cache.get(cache_key)<if_stmt>cached<block_start><return>cached<block_end><block_end>connection=get_mbtiles_connection(bucket key cache)<line_sep>cur=connection.cursor()<line_sep>flipped_row=(2<power>zoom)-1-row<line_sep>res=cur.execute('''SELECT tile_data FROM tiles WHERE zoom_level=? AND tile_column=? AND tile_row=?''' (zoom col flipped_row))<line_sep>data=res.fetchone()<line_sep>cur.close()<if_stmt>cache<block_start>cache.set(cache_key data)<block_end><return>data<block_end>@dots.route('/runs/<int:run_id>/dotmap/index.html')@log_application_errors<def_stmt>dotmap_preview run_id<block_start>''' '''<if_stmt><not>run_id<block_start><return>'invalid run_id' 404<block_end><try_stmt><block_start>bucket="data.openaddresses.io"<line_sep>key="runs/{run_id}/slippymap.mbtiles".format(run_id=run_id)<line_sep>mc=get_memcache_client(current_app.config)<line_sep>zoom,lat,lon,fields=get_mbtiles_metadata(bucket key mc)<block_end><except_stmt>ValueError<block_start>abort(500)<block_end><return>render_template('dotmap-index.html' run_id=run_id zoom=zoom lat=lat lon=lon fields=fields scene_url=url_for('dots.get_scene' run_id=run_id))<block_end>@dots.route('/runs/<run_id>/dotmap/scene.yaml')@log_application_errors<def_stmt>get_scene run_id<block_start><if_stmt><not>run_id<block_start><return>'invalid run_id' 404<block_end>tile_args=dict(run_id=run_id zoom=123 col=456 row=789)<line_sep>tile_url=url_for('dots.get_one_tile' **tile_args).replace('123/456/789' '{z}/{x}/{y}')<line_sep><return>Response(render_template('dotmap-scene.yaml' tile_url=tile_url) headers={'Content-Type':'application/x-yaml'} )<block_end>@dots.route('/runs/<run_id>/dotmap/tiles/<int:zoom>/<int:col>/<int:row>.mvt')@log_application_errors<def_stmt>get_one_tile run_id zoom col row<block_start>''' '''<if_stmt><not>run_id<block_start><return>'invalid run_id' 404<block_end>bucket="data.openaddresses.io"<line_sep>key="runs/{run_id}/slippymap.mbtiles".format(run_id=run_id)<line_sep>mc=get_memcache_client(current_app.config)<line_sep>body=get_mbtiles_tile(bucket key row col zoom mc)<if_stmt><not>body<block_start><return>'tile not found' 404<block_end>headers={'Content-Type':'application/vnd.mapbox-vector-tile' 'Content-Encoding':'gzip' }<line_sep><return>Response(body headers=headers)<block_end><def_stmt>apply_dotmap_blueprint app<block_start>''' '''<line_sep>@dots.after_request<def_stmt>cache_everything response<block_start>response.cache_control.max_age=31556952# 1 year response.cache_control.public=<true><line_sep><return>response<block_end>app.register_blueprint(dots)<line_sep>@app.before_first_request<def_stmt>app_prepare <block_start>setup_logger(os.environ.get('AWS_SNS_ARN') <none> flask_log_level(app.config))<block_end><block_end>
# --- # jupyter: # jupytext: # text_representation: # extension: .py # format_name: light # format_version: '1.5' # jupytext_version: 1.11.3 # kernelspec: # display_name: Python [default] # language: python # name: python3 # --- # + [markdown] id="view-in-github" colab_type="text" # <a href="https://colab.research.google.com/github/probml/pyprobml/blob/master/notebooks/linreg_hierarchical_non_centered_numpyro.ipynb" target="_parent"><img src="https://colab.research.google.com/assets/colab-badge.svg" alt="Open In Colab"/></a> # + [markdown] id="f_py_lrTPdK1" # # # # Hierarchical non-centered Bayesian Linear Regression in NumPyro # # The text and code for this notebook are taken directly from [this blog post](https://twiecki.io/blog/2017/02/08/bayesian-hierchical-non-centered/) # by <NAME>. [Original notebook](https://github.com/twiecki/WhileMyMCMCGentlySamples/blob/master/content/downloads/notebooks/GLM_hierarchical_non_centered.ipynb) # # # # + colab={"base_uri": "https://localhost:8080/"} id="XcsJEi91Qelr" outputId="8a943870-b8fe-4ef7-aa9f-0006e3266ae7" # !pip install -q numpyro@git+https://github.com/pyro-ppl/numpyro arviz # !pip install arviz # !pip install seaborn # + [markdown] id="J3PmS3woW962" # First, we will import the libraries we need to: # + id="QPTA4cZCPdK1" # %matplotlib inline <import_stmt>matplotlib.pyplot<as>plt<import_stmt>numpy<as>np<import_stmt>pandas<as>pd<import_stmt>seaborn<as>sns<import_stmt>arviz<as>az<import_stmt>numpyro<import_from_stmt>numpyro.infer MCMC NUTS Predictive<import_stmt>numpyro.distributions<as>dist<import_from_stmt>jax random<line_sep>sns.set_style('whitegrid')<line_sep>np.random.seed(123)<line_sep># + [markdown] id="JzDno90bHlrO" # Then, we'll load the data: # + id="c4BgCIlclQXX" url='https://github.com/twiecki/WhileMyMCMCGentlySamples/blob/master/content/downloads/notebooks/radon.csv?raw=true'<line_sep>data=pd.read_csv(url)<line_sep># + id="17ISOnzPlSR1" county_names=data.county.unique()<line_sep>county_idx=data.county_code.values<line_sep># + [markdown] id="KdWGECP9PdK1" # ## The intuitive specification # # Usually, hierachical models are specified in a *centered* way. In a regression model, individual slopes would be centered around a group mean with a certain group variance, which controls the shrinkage: # + id="R3K2OfGGnWlZ" <def_stmt>hierarchical_model_centered county floor log_of_radon# Hyperpriors <block_start>mu_a=numpyro.sample("mu_a" dist.Normal(0. 100<power>2))<line_sep>sigma_a=numpyro.sample("sigma_a" dist.HalfCauchy(5.))<line_sep>mu_b=numpyro.sample("mu_b" dist.Normal(0. 100<power>2))<line_sep>sigma_b=numpyro.sample("sigma_b" dist.HalfCauchy(5.))<line_sep>unique_counties=np.unique(county)<line_sep>n_counties=len(unique_counties)<with_stmt>numpyro.plate("counties" n_counties)# Intercept for each county, distributed around group mean mu_a <block_start>a=numpyro.sample("a" dist.Normal(mu_a sigma_a))<line_sep># Intercept for each county, distributed around group mean mu_a b=numpyro.sample("b" dist.Normal(mu_b sigma_b))<block_end># Model error eps=numpyro.sample("eps" dist.HalfCauchy(scale=5.))<line_sep># Expected value radon_est=a[county_idx]+b[county_idx]<times>floor<with_stmt>numpyro.plate("data" len(county))# Data likelihood <block_start>numpyro.sample("obs" dist.Normal(radon_est eps) obs=log_of_radon)<block_end><block_end># + colab={"base_uri": "https://localhost:8080/"} id="pmpzyT74Cj17" outputId="b0540268-3b40-4f57-fffa-d1880b859369" nuts_kernel=NUTS(hierarchical_model_centered)<line_sep>mcmc=MCMC(nuts_kernel num_samples=5000 num_warmup=1000 num_chains=2)<line_sep>rng_key=random.PRNGKey(0)<line_sep>mcmc.run(rng_key data.county.values data.floor.values data.log_radon.values)<line_sep>hierarchical_centered_trace=mcmc.get_samples(<true>)<line_sep># Eliminates the first 1000 samples hierarchical_centered_trace={k:v[: 1000: :]<if>len(v.shape)<eq>3<else>v[: 1000:]<for>k,v hierarchical_centered_trace.items()}<line_sep># + colab={"base_uri": "https://localhost:8080/", "height": 1000} id="M6hlZ2905Eoo" outputId="5e8a6fe3-6555-4fcb-9e9e-9e39d7bc348a" inference_data=az.from_numpyro(mcmc)<line_sep>az.plot_trace(inference_data compact=<true>)<line_sep># + [markdown] id="OAbZ_QXGPdK2" # I have seen plenty of traces with terrible convergences but this one might look fine to the unassuming eye. Perhaps `sigma_b` has some problems, so let's look at the Rhat: # + id="EdTq66JUPdK2" colab={"base_uri": "https://localhost:8080/"} outputId="93b8896f-326b-43b1-b059-a95f79966804" print('Rhat(sigma_b) = {}'.format(numpyro.diagnostics.gelman_rubin(hierarchical_centered_trace['sigma_b'])))<line_sep># + [markdown] id="JHSPBEbQPdK2" # Not too bad -- well below 1.1. I used to think this wasn't a big deal but <NAME> in his [StanCon 2017 talk](https://www.youtube.com/watch?v=DJ0c7Bm5Djk&feature=youtu.be&t=4h40m9s) makes a strong point that it is actually very problematic. To understand what's going on, let's take a closer look at the slopes `b` and their group variance (i.e. how far they are allowed to move from the mean) `sigma_b`. I'm just plotting a single chain now. # + id="AzfoQz2RPdK2" colab={"base_uri": "https://localhost:8080/", "height": 268} outputId="f439fe30-1b94-40ed-df80-719878b576dc" fig,axs=plt.subplots(nrows=2)<line_sep>axs[0].plot(hierarchical_centered_trace['sigma_b'][1] alpha=.5)<line_sep>axs[0].set(ylabel='sigma_b')<line_sep>axs[1].plot(hierarchical_centered_trace['b'][1] alpha=.5)<line_sep>axs[1].set(ylabel='b')<line_sep># + [markdown] id="0zBgOlmnPdK2" # `sigma_b` seems to drift into this area of very small values and get stuck there for a while. This is a common pattern and the sampler is trying to tell you that there is a region in space that it can't quite explore efficiently. While stuck down there, the slopes `b_i` become all squished together. We've entered **The Funnel of Hell** (it's just called the funnel, I added the last part for dramatic effect). # + [markdown] id="iTckxwW7PdK2" # ## The Funnel of Hell (and how to escape it) # # Let's look at the joint posterior of a single slope `b` (I randomly chose the 75th one) and the slope group variance `sigma_b`. # + id="e1gZ_JZSPdK2" colab={"base_uri": "https://localhost:8080/", "height": 441} outputId="2703eeff-e39a-4d4b-b02e-3a46b1034023" x=pd.Series(hierarchical_centered_trace['b'][: : 75].flatten() name='slope b_75')<line_sep>y=pd.Series(hierarchical_centered_trace['sigma_b'][: :].flatten() name='slope group variance sigma_b')<line_sep>sns.jointplot(x=x y=y ylim=(0 .7))<line_sep># + [markdown] id="byYER5es2l_l" # This makes sense, as the slope group variance goes to zero (or, said differently, we apply maximum shrinkage), individual slopes are not allowed to deviate from the slope group mean, so they all collapose to the group mean. # # While this property of the posterior in itself is not problematic, it makes the job extremely difficult for our sampler. Imagine a [Metropolis-Hastings](https://twiecki.github.io/blog/2015/11/10/mcmc-sampling/) exploring this space with a medium step-size (we're using NUTS here but the intuition works the same): in the wider top region we can comfortably make larger jumps to explore the space efficiently. However, once we move to the narrow bottom region we can change `b_75` and `sigma_b` only by tiny amounts. This causes the sampler to become trapped in that region of space. Most of the proposals will be rejected because our step-size is too large for this narrow part of the space and exploration will be very inefficient. # # You might wonder if we could somehow choose the step-size based on the denseness (or curvature) of the space. Indeed that's possible and it's called [Riemannian HMC](https://arxiv.org/abs/0907.1100). It works very well but is quite costly to run. Here, we will explore a different, simpler method. # # Finally, note that this problem does not exist for the intercept parameters `a`. Because we can determine individual intercepts `a_i` with enough confidence, `sigma_a` is not small enough to be problematic. Thus, the funnel of hell can be a problem in hierarchical models, but it does not have to be. (Thanks to <NAME> for pointing this out). # # # ## Reparameterization # # If we can't easily make the sampler step-size adjust to the region of space, maybe we can adjust the region of space to make it simpler for the sampler? This is indeed possible and quite simple with a small reparameterization trick, we will call this the *non-centered* version. # + id="HZp-OZ_RLWxN" <def_stmt>hierarchical_model_non_centered county floor log_of_radon# Hyperpriors <block_start>mu_a=numpyro.sample("mu_a" dist.Normal(0. 100<power>2))<line_sep>sigma_a=numpyro.sample("sigma_a" dist.HalfCauchy(5.))<line_sep>mu_b=numpyro.sample("mu_b" dist.Normal(0. 100<power>2))<line_sep>sigma_b=numpyro.sample("sigma_b" dist.HalfCauchy(5.))<line_sep>unique_counties=np.unique(county)<line_sep>n_counties=len(unique_counties)<with_stmt>numpyro.plate("counties" n_counties)# Intercept for each county, distributed around group mean mu_a <block_start>a_offset=numpyro.sample("a_offset" dist.Normal(0 1))<line_sep>a=numpyro.deterministic("a" mu_a+a_offset<times>sigma_a)<line_sep># Intercept for each county, distributed around group mean mu_a b_offset=numpyro.sample("b_offset" dist.Normal(0 1))<line_sep>b=numpyro.deterministic("b" mu_b+b_offset<times>sigma_b)<block_end># Model error eps=numpyro.sample("eps" dist.HalfCauchy(scale=5.))<line_sep># Expected value radon_est=a[county_idx]+b[county_idx]<times>floor<with_stmt>numpyro.plate("data" len(county))# Data likelihood <block_start>numpyro.sample("obs" dist.Normal(radon_est eps) obs=log_of_radon)<block_end><block_end># + id="eCnNxlmD2g-G" colab={"base_uri": "https://localhost:8080/"} outputId="a9df6771-8bfc-4d6f-9ef7-dc1a04c9f9ed" nuts_kernel=NUTS(hierarchical_model_non_centered)<line_sep>mcmc=MCMC(nuts_kernel num_samples=5000 num_warmup=1000 num_chains=2)<line_sep>mcmc.run(rng_key data.county.values data.floor.values data.log_radon.values)<line_sep>hierarchical_non_centered_trace=mcmc.get_samples(<true>)<line_sep>hierarchical_non_centered_trace={k:v[: 1000: :]<if>len(v.shape)<eq>3<else>v[: 1000:]<for>k,v hierarchical_non_centered_trace.items()}<line_sep># + [markdown] id="3Be9WYvFPdK3" # Pay attention to the definitions of `a_offset`, `a`, `b_offset`, and `b` and compare them to before (commented out). What's going on here? It's pretty neat actually. Instead of saying that our individual slopes `b` are normally distributed around a group mean (i.e. modeling their absolute values directly), we can say that they are offset from a group mean by a certain value (`b_offset`; i.e. modeling their values relative to that mean). Now we still have to consider how far from that mean we actually allow things to deviate (i.e. how much shrinkage we apply). This is where `sigma_b` makes a comeback. We can simply multiply the offset by this scaling factor to get the same effect as before, just under a different parameterization. For a more formal introduction, see e.g. [Betancourt & Girolami (2013)](https://arxiv.org/pdf/1312.0906.pdf). # # Critically, `b_offset` and `sigma_b` are now mostly independent. This will become more clear soon. Let's first look at if this transform helped our sampling: # + id="zzrN4osl2kMq" colab={"base_uri": "https://localhost:8080/", "height": 1000} outputId="a46c60da-cf05-4382-9603-7f7b87526fc9" var_names=['a' 'b' 'mu_a' 'mu_b' 'sigma_a' 'sigma_b' 'eps']<line_sep>inference_data=az.from_numpyro(mcmc)<line_sep>az.plot_trace(inference_data var_names=var_names compact=<true>)<line_sep># + [markdown] id="b1lMZjlxPdK3" # That looks much better as also confirmed by the joint plot: # + colab={"base_uri": "https://localhost:8080/", "height": 296} id="_dcp7FYr2-dH" outputId="892efbac-6411-4b51-8d94-2641d6fcb174" fig,axs=plt.subplots(ncols=2 sharex=<true> sharey=<true>)<line_sep>x=pd.Series(hierarchical_centered_trace['b'][: : 75].flatten() name='slope b_75')<line_sep>y=pd.Series(hierarchical_centered_trace['sigma_b'][: :].flatten() name='slope group variance sigma_b')<line_sep>axs[0].plot(x y '.')<line_sep>axs[0].set(title='Centered' ylabel='sigma_b' xlabel='b_75')<line_sep>x=pd.Series(hierarchical_non_centered_trace['b'][: : 75].flatten() name='slope b_75')<line_sep>y=pd.Series(hierarchical_non_centered_trace['sigma_b'].flatten() name='slope group variance sigma_b')<line_sep>axs[1].plot(x y '.')<line_sep>axs[1].set(title='Non-centered' xlabel='b_75')<line_sep># + [markdown] id="Q_W701t6PdK3" # To really drive this home, let's also compare the `sigma_b` marginal posteriors of the two models: # + id="XJxFSFbnPdK3" colab={"base_uri": "https://localhost:8080/", "height": 313} outputId="ae23d007-188a-435a-a2c9-d786dc18708e" az.plot_kde(np.stack([hierarchical_centered_trace['sigma_b'] hierarchical_non_centered_trace['sigma_b'] ]).T)<line_sep>plt.axvline(hierarchical_centered_trace['sigma_b'].mean() color='b' linestyle='--')<line_sep>plt.axvline(hierarchical_non_centered_trace['sigma_b'].mean() color='g' linestyle='--')<line_sep>plt.legend(['Centered' 'Non-cenetered' 'Centered posterior mean' 'Non-centered posterior mean'])<line_sep>plt.xlabel('sigma_b')<line_sep>plt.ylabel('Probability Density')<line_sep># + [markdown] id="QXe9_4vIPdK3" # That's crazy -- there's a large region of very small `sigma_b` values that the sampler could not even explore before. In other words, our previous inferences ("Centered") were severely biased towards higher values of `sigma_b`. Indeed, if you look at the [previous blog post](https://twiecki.github.io/blog/2014/03/17/bayesian-glms-3/) the sampler never even got stuck in that low region causing me to believe everything was fine. These issues are hard to detect and very subtle, but they are meaningful as demonstrated by the sizable difference in posterior mean. # # But what does this concretely mean for our analysis? Over-estimating `sigma_b` means that we have a biased (=false) belief that we can tell individual slopes apart better than we actually can. There is less information in the individual slopes than what we estimated. # + [markdown] id="3G2KQzuvPdK3" # ### Why does the reparameterized model work better? # # To more clearly understand why this model works better, let's look at the joint distribution of `b_offset`: # + colab={"base_uri": "https://localhost:8080/", "height": 441} id="Uje-j5FJ5WM0" outputId="b0d4f19e-f3e5-4fb1-ccfd-41954d36caa0" x=pd.Series(hierarchical_non_centered_trace['b'][: : 75].flatten() name='slope b_offset_75')<line_sep>y=pd.Series(hierarchical_non_centered_trace['sigma_b'][: :].flatten() name='slope group variance sigma_b')<line_sep>sns.jointplot(x=x y=y ylim=(0 .7))<line_sep># + [markdown] id="iUUIWErkPdK3" # This is the space the sampler sees; you can see how the funnel is flattened out. We can freely change the (relative) slope offset parameters even if the slope group variance is tiny as it just acts as a scaling parameter. # # Note that the funnel is still there -- it's a perfectly valid property of the model -- but the sampler has a much easier time exploring it in this different parameterization. # + [markdown] id="5Klof7DEPdK3" # ## Why hierarchical models are Bayesian # # Finally, I want to take the opportunity to make another point that is not directly related to hierarchical models but can be demonstrated quite well here. # # Usually when talking about the perils of Bayesian statistics we talk about priors, uncertainty, and flexibility when coding models using Probabilistic Programming. However, an even more important property is rarely mentioned because it is much harder to communicate. <NAME> touched on this point in his tweet: # + [markdown] id="i4dat7gDPdK3" # <blockquote class="twitter-tweet" data-lang="en"><p lang="en" dir="ltr">It&#39;s interesting that many summarize Bayes as being about priors; but real power is its focus on integrals/expectations over maxima/modes</p>&mdash; <NAME> (@rosstaylor90) <a href="https://twitter.com/rosstaylor90/status/827263854002401281">February 2, 2017</a></blockquote> # <script async src="//platform.twitter.com/widgets.js" charset="utf-8"></script> # + [markdown] id="4tJwmkxRPdK3" # <NAME> makes a similar point when he says ["Expectations are the only thing that make sense."](https://www.youtube.com/watch?v=pHsuIaPbNbY&t=8s) # # But what's wrong with maxima/modes? Aren't those really close to the posterior mean (i.e. the expectation)? Unfortunately, that's only the case for the simple models we teach to build up intuitions. In complex models, like the hierarchical one, the MAP can be far away and not be interesting or meaningful at all. # # Let's compare the posterior mode (i.e. the MAP) to the posterior mean of our hierachical linear regression model: # + id="df4orfyOPdK3" colab={"base_uri": "https://localhost:8080/"} outputId="37e89240-dd0f-45c6-cdb3-fe6ba7cb6958" hierarchical_centered_trace['b'].reshape(8000 -1).mean(axis=0)<line_sep># + id="rsadfvlSPdK3" colab={"base_uri": "https://localhost:8080/"} outputId="b1ce064b-25fb-4a3e-b427-bca426c2a275" hierarchical_centered_trace['sigma_b'].reshape(1 -1).std(axis=1)<line_sep># + [markdown] id="muQpdSipPdK3" # As you can see, the slopes are all identical and the group slope variance is effectively zero. The reason is again related to the funnel. The MAP only cares about the probability **density** which is highest at the bottom of the funnel. # # But if you could only choose one point in parameter space to summarize the posterior above, would this be the one you'd pick? Probably not. # # Let's instead look at the **Expected Value** (i.e. posterior mean) which is computed by integrating probability **density** and **volume** to provide probabilty **mass** -- the thing we really care about. Under the hood, that's the integration performed by the MCMC sampler. # + colab={"base_uri": "https://localhost:8080/"} id="5uXWUhPbnowC" outputId="af8ac9fb-e6d4-4c23-8a69-b7ec280156c4" hierarchical_non_centered_trace['b'].reshape(8000 -1).mean(axis=0)<line_sep># + id="9h-FzVGJPdK3" colab={"base_uri": "https://localhost:8080/"} outputId="38fa1e6c-99e7-4d98-f8cb-bf4d8d9f391b" hierarchical_non_centered_trace['sigma_b'].reshape(1 -1).mean(axis=1)<line_sep># + [markdown] id="-AL504GdPdK3" # Quite a difference. This also explains why it can be a bad idea to use the MAP to initialize your sampler: in certain models the MAP is not at all close to the region you want to explore (i.e. the "typical set"). # # This strong divergence of the MAP and the Posterior Mean does not only happen in hierarchical models but also in high dimensional ones, where our intuitions from low-dimensional spaces gets twisted in serious ways. [This talk by <NAME>](https://www.youtube.com/watch?v=pHsuIaPbNbY&t=8s) makes the point quite nicely. # # So why do people -- especially in Machine Learning -- still use the MAP/MLE? As we all learned in high school first hand, integration is much harder than differentation. This is really the only reason. # # Final disclaimer: This might provide the impression that this is a property of being in a Bayesian framework, which is not true. Technically, we can talk about Expectations vs Modes irrespective of that. Bayesian statistics just happens to provide a very intuitive and flexible framework for expressing and estimating these models. # # See [here](https://rawgithub.com/twiecki/WhileMyMCMCGentlySamples/master/content/downloads/notebooks/GLM_hierarchical_non_centered.ipynb) for the underlying notebook of this blog post. # + [markdown] id="SzMHO6fNPdK3" # ## Acknowledgements # # Thanks to [<NAME>](https://twitter.com/jonsedar) for helpful comments on an earlier draft.
# ------------------------------------------------------------------------- # Copyright (c) Microsoft Corporation. All rights reserved. # Licensed under the MIT License. See License.txt in the project root for # license information. # -------------------------------------------------------------------------- <import_from_stmt>typing # pylint: disable=unused-import Union Optional Any Iterable Dict List Type Tuple TYPE_CHECKING <import_stmt>logging<import_from_stmt>os fstat<import_from_stmt>io SEEK_END SEEK_SET UnsupportedOperation <import_stmt>isodate<import_from_stmt>azure.core.exceptions raise_with_traceback<line_sep>_LOGGER=logging.getLogger(__name__)<def_stmt>serialize_iso attr<block_start>"""Serialize Datetime object into ISO-8601 formatted string. :param Datetime attr: Object to be serialized. :rtype: str :raises: ValueError if format invalid. """<if_stmt><not>attr<block_start><return><none><block_end><if_stmt>isinstance(attr str)<block_start>attr=isodate.parse_datetime(attr)<block_end><try_stmt><block_start>utc=attr.utctimetuple()<if_stmt>utc.tm_year<g>9999<or>utc.tm_year<l>1<block_start><raise>OverflowError("Hit max or min date")<block_end>date="{:04}-{:02}-{:02}T{:02}:{:02}:{:02}".format(utc.tm_year utc.tm_mon utc.tm_mday utc.tm_hour utc.tm_min utc.tm_sec)<line_sep><return>date+'Z'<block_end><except_stmt>(ValueError OverflowError)<as>err<block_start>msg="Unable to serialize datetime object."<line_sep>raise_with_traceback(ValueError msg err)<block_end><except_stmt>AttributeError<as>err<block_start>msg="ISO-8601 object must be valid Datetime object."<line_sep>raise_with_traceback(TypeError msg err)<block_end><block_end><def_stmt>get_length data<block_start>length=<none><line_sep># Check if object implements the __len__ method, covers most input cases such as bytearray. <try_stmt><block_start>length=len(data)<block_end><except_stmt># pylint: disable=bare-except <block_start><pass><block_end><if_stmt><not>length# Check if the stream is a file-like stream object. # If so, calculate the size using the file descriptor. <block_start><try_stmt><block_start>fileno=data.fileno()<block_end><except_stmt>(AttributeError UnsupportedOperation)<block_start><pass><block_end><else_stmt><block_start><try_stmt><block_start><return>fstat(fileno).st_size<block_end><except_stmt>OSError# Not a valid fileno, may be possible requests returned # a socket number? <block_start><pass><block_end><block_end># If the stream is seekable and tell() is implemented, calculate the stream size. <try_stmt><block_start>current_position=data.tell()<line_sep>data.seek(0 SEEK_END)<line_sep>length=data.tell()-current_position<line_sep>data.seek(current_position SEEK_SET)<block_end><except_stmt>(AttributeError UnsupportedOperation)<block_start><pass><block_end><block_end><return>length<block_end><def_stmt>read_length data<block_start><try_stmt><block_start><if_stmt>hasattr(data 'read')<block_start>read_data=b''<for_stmt>chunk iter(<lambda>:data.read(4096) b"")<block_start>read_data<augadd>chunk<block_end><return>len(read_data) read_data<block_end><if_stmt>hasattr(data '__iter__')<block_start>read_data=b''<for_stmt>chunk data<block_start>read_data<augadd>chunk<block_end><return>len(read_data) read_data<block_end><block_end><except_stmt># pylint: disable=bare-except <block_start><pass><block_end><raise>ValueError("Unable to calculate content length, please specify.")<block_end><def_stmt>validate_and_format_range_headers start_range end_range start_range_required=<true> end_range_required=<true> check_content_md5=<false> align_to_page=<false># If end range is provided, start range must be provided <block_start><if_stmt>(start_range_required<or>end_range<is><not><none>)<and>start_range<is><none><block_start><raise>ValueError("start_range value cannot be None.")<block_end><if_stmt>end_range_required<and>end_range<is><none><block_start><raise>ValueError("end_range value cannot be None.")<block_end># Page ranges must be 512 aligned <if_stmt>align_to_page<block_start><if_stmt>start_range<is><not><none><and>start_range%512<ne>0<block_start><raise>ValueError("Invalid page blob start_range: {0}. "<concat>"The size must be aligned to a 512-byte boundary.".format(start_range))<block_end><if_stmt>end_range<is><not><none><and>end_range%512<ne>511<block_start><raise>ValueError("Invalid page blob end_range: {0}. "<concat>"The size must be aligned to a 512-byte boundary.".format(end_range))<block_end><block_end># Format based on whether end_range is present range_header=<none><if_stmt>end_range<is><not><none><block_start>range_header='bytes={0}-{1}'.format(start_range end_range)<block_end><elif_stmt>start_range<is><not><none><block_start>range_header="bytes={0}-".format(start_range)<block_end># Content MD5 can only be provided for a complete range less than 4MB in size range_validation=<none><if_stmt>check_content_md5<block_start><if_stmt>start_range<is><none><or>end_range<is><none><block_start><raise>ValueError("Both start and end range requied for MD5 content validation.")<block_end><if_stmt>end_range-start_range<g>4<times>1024<times>1024<block_start><raise>ValueError("Getting content MD5 for a range greater than 4MB is not supported.")<block_end>range_validation='true'<block_end><return>range_header range_validation<block_end><def_stmt>add_metadata_headers metadata=<none># type: (Optional[Dict[str, str]]) -> Dict[str, str] <block_start>headers={}<if_stmt>metadata<block_start><for_stmt>key,value metadata.items()<block_start>headers['x-ms-meta-{}'.format(key.strip())]=value.strip()<if>value<else>value<block_end><block_end><return>headers<block_end>
<import_stmt>numpy<as>np<import_from_stmt>mmdet.datasets PIPELINES<import_from_stmt>mmdet.datasets.pipelines.formating Collect<import_from_stmt>ssod.core TrimapMasks<line_sep>@PIPELINES.register_module()<class_stmt>ExtraAttrs(object)<block_start><def_stmt>__init__ self **attrs<block_start>self.attrs=attrs<block_end><def_stmt>__call__ self results<block_start><for_stmt>k,v self.attrs.items()<block_start><assert_stmt>k<not><in>results<line_sep>results[k]=v<block_end><return>results<block_end><block_end>@PIPELINES.register_module()<class_stmt>ExtraCollect(Collect)<block_start><def_stmt>__init__ self *args extra_meta_keys=[] **kwargs<block_start>super().__init__(*args **kwargs)<line_sep>self.meta_keys=self.meta_keys+tuple(extra_meta_keys)<block_end><block_end>@PIPELINES.register_module()<class_stmt>PseudoSamples(object)<block_start><def_stmt>__init__ self with_bbox=<false> with_mask=<false> with_seg=<false> fill_value=255<block_start>""" Replacing gt labels in original data with fake labels or adding extra fake labels for unlabeled data. This is to remove the effect of labeled data and keep its elements aligned with other sample. Args: with_bbox: with_mask: with_seg: fill_value: """<line_sep>self.with_bbox=with_bbox<line_sep>self.with_mask=with_mask<line_sep>self.with_seg=with_seg<line_sep>self.fill_value=fill_value<block_end><def_stmt>__call__ self results<block_start><if_stmt>self.with_bbox<block_start>results["gt_bboxes"]=np.zeros((0 4))<line_sep>results["gt_labels"]=np.zeros((0 ))<if_stmt>"bbox_fields"<not><in>results<block_start>results["bbox_fields"]=[]<block_end><if_stmt>"gt_bboxes"<not><in>results["bbox_fields"]<block_start>results["bbox_fields"].append("gt_bboxes")<block_end><block_end><if_stmt>self.with_mask<block_start>num_inst=len(results["gt_bboxes"])<line_sep>h,w=results["img"].shape[:2]<line_sep>results["gt_masks"]=TrimapMasks([self.fill_value<times>np.ones((h w) dtype=np.uint8)<for>_ range(num_inst)] h w )<if_stmt>"mask_fields"<not><in>results<block_start>results["mask_fields"]=[]<block_end><if_stmt>"gt_masks"<not><in>results["mask_fields"]<block_start>results["mask_fields"].append("gt_masks")<block_end><block_end><if_stmt>self.with_seg<block_start>results["gt_semantic_seg"]=self.fill_value<times>np.ones(results["img"].shape[:2] dtype=np.uint8)<if_stmt>"seg_fields"<not><in>results<block_start>results["seg_fields"]=[]<block_end><if_stmt>"gt_semantic_seg"<not><in>results["seg_fields"]<block_start>results["seg_fields"].append("gt_semantic_seg")<block_end><block_end><return>results<block_end><block_end>
# -*- coding: utf-8 -*- <import_from_future_stmt> absolute_import<import_from_future_stmt> division<import_from_future_stmt> print_function<import_stmt>csv<import_stmt>numpy<as>np<import_stmt>os<import_stmt>sys<import_from_stmt>observations.util maybe_download_and_extract<def_stmt>davis path<block_start>"""Self-Reports of Height and Weight The `Davis` data frame has 200 rows and 5 columns. The subjects were men and women engaged in regular exercise. There are some missing data. This data frame contains the following columns: sex A factor with levels: `F`, female; `M`, male. weight Measured weight in kg. height Measured height in cm. repwt Reported weight in kg. repht Reported height in cm. Personal communication from <NAME>, Departments of Physical Education and Psychology, York University. Args: path: str. Path to directory which either stores file or otherwise file will be downloaded and extracted there. Filename is `davis.csv`. Returns: Tuple of np.ndarray `x_train` with 200 rows and 5 columns and dictionary `metadata` of column headers (feature names). """<import_stmt>pandas<as>pd<line_sep>path=os.path.expanduser(path)<line_sep>filename='davis.csv'<if_stmt><not>os.path.exists(os.path.join(path filename))<block_start>url='http://dustintran.com/data/r/car/Davis.csv'<line_sep>maybe_download_and_extract(path url save_file_name='davis.csv' resume=<false>)<block_end>data=pd.read_csv(os.path.join(path filename) index_col=0 parse_dates=<true>)<line_sep>x_train=data.values<line_sep>metadata={'columns':data.columns}<line_sep><return>x_train metadata<block_end>
<import_from_stmt>.model_list ModelList<import_from_stmt>.data_mixin DataMixin<class_stmt>DataList(DataMixin ModelList)<block_start><pass><block_end>
<import_stmt>wagtail.admin.rich_text.editors.draftail.features<as>draftail_features<import_from_stmt>wagtail.admin.rich_text.converters.html_to_contentstate InlineStyleElementHandler BlockElementHandler<import_from_stmt>wagtail.core hooks<line_sep>@hooks.register('register_rich_text_features')<def_stmt>register_blockquote_feature features<block_start>""" Registering the `blockquote` feature, which uses the `blockquote` Draft.js block type, and is stored as HTML with a `<blockquote>` tag. """<line_sep>feature_name='blockquote'<line_sep>type_='blockquote'<line_sep>tag='blockquote'<line_sep>control={'type':type_ 'label':'❝' 'description':'Quote' 'element':'blockquote' }<line_sep>features.register_editor_plugin('draftail' feature_name draftail_features.BlockFeature(control))<line_sep>features.register_converter_rule('contentstate' feature_name {'from_database_format':{tag:BlockElementHandler(type_)} 'to_database_format':{'block_map':{type_:tag}} })<line_sep>features.default_features.append(feature_name)<block_end>@hooks.register('register_rich_text_features')<def_stmt>register_codeline_feature features<block_start>feature_name='Code Line'<line_sep>type_='CODE'<line_sep>tag='code'<line_sep>control={'type':type_ 'label':'>_' 'description':'Code Line' }<line_sep>features.register_editor_plugin('draftail' feature_name draftail_features.InlineStyleFeature(control))<line_sep>db_conversion={'from_database_format':{tag:InlineStyleElementHandler(type_)} 'to_database_format':{'style_map':{type_:tag}} }<line_sep>features.register_converter_rule('contentstate' feature_name db_conversion)<line_sep>features.default_features.append(feature_name)<block_end>
<import_stmt>os<import_stmt>time<import_stmt>json<import_stmt>shutil<import_stmt>base64<import_stmt>logging<line_sep>cache_directory='.lightnion-cache.d'<def_stmt>directory base_dir=<none><block_start><if_stmt>base_dir<is><none><block_start>base_dir=os.getcwd()<block_end>base_dir=os.path.join(base_dir cache_directory)<if_stmt><not>os.path.isdir(base_dir)<block_start>logging.info('Note: creating {} to cache descriptors.'.format(base_dir))<line_sep>os.mkdir(base_dir)<block_end><if_stmt><not>os.path.isdir(base_dir)<block_start><raise>RuntimeError('Unable to fetch cache directory: {}'.format(base_dir))<block_end><return>base_dir<block_end><def_stmt>purge <block_start>base_dir=directory()<line_sep>logging.warning('Note: removing {} to purge cache.'.format(base_dir))<line_sep>shutil.rmtree(base_dir)<block_end><class_stmt>descriptors<block_start>@staticmethod<def_stmt>filename descriptor get=<false><block_start>base_dir='descriptors'<if_stmt>'micro'<in>descriptor['flavor']<block_start>base_dir='micro-'+base_dir<block_end>base_dir=os.path.join(directory() base_dir)<if_stmt><not>os.path.isdir(base_dir)<block_start>os.mkdir(base_dir)<block_end>field='digest'<if_stmt>'micro'<in>descriptor['flavor']<block_start>field='micro-digest'<block_end>digest=descriptor[field]<if_stmt>(<not>get)<or>'micro'<in>descriptor['flavor']<block_start>digest=base64.b64decode(descriptor[field]+'====').hex()<block_end>half_dir=os.path.join(base_dir digest[:2])<if_stmt><not>os.path.isdir(half_dir)<block_start>os.mkdir(half_dir)<block_end><return>os.path.join(half_dir digest)<block_end>@staticmethod<def_stmt>put descriptor<block_start>filename=descriptors.filename(descriptor)<if_stmt>os.path.isfile(filename)<block_start><return><block_end><with_stmt>open(filename 'w')<as>f<block_start>json.dump(descriptor f)<block_end><block_end>@staticmethod<def_stmt>get flavor digest<block_start>field='digest'<if_stmt>'micro'<in>flavor<block_start>field='micro-digest'<block_end>descriptor={'flavor':flavor field:digest}<line_sep>filename=descriptors.filename(descriptor get=<true>)<with_stmt>open(filename 'r')<as>f<block_start>descriptor=json.load(f)<block_end><if_stmt><not>descriptor['flavor']<eq>flavor<block_start><raise>ValueError('Mismatched flavor.')<block_end>new_digest=descriptor[field]<if_stmt><not>'micro'<in>field<block_start>new_digest=base64.b64decode(new_digest+'====').hex()<block_end><if_stmt><not>new_digest<eq>digest<block_start><raise>ValueError('Mismatched digest.')<block_end><return>descriptor<block_end><block_end><class_stmt>consensus<block_start>@staticmethod<def_stmt>filename flavor<block_start><return>os.path.join(directory() 'consensus-{}'.format(flavor))<block_end>@staticmethod<def_stmt>put fields<block_start>filename=consensus.filename(fields['flavor'])<with_stmt>open(filename 'w')<as>f<block_start>json.dump(fields f)<block_end><block_end>@staticmethod<def_stmt>get flavor<block_start>filename=consensus.filename(flavor)<with_stmt>open(filename 'r')<as>f<block_start>fields=json.load(f)<block_end><if_stmt><not>fields['flavor']<eq>flavor<block_start><raise>ValueError('Mismatched flavor.')<block_end><if_stmt>fields['headers']['valid-until']['stamp']<l>time.time()<block_start><raise>ValueError('Consensus need to be refreshed: {} < {}'.format(fields['headers']['valid-until']['stamp'] time.time()))<block_end><return>fields<block_end><block_end>
<import_stmt>torch<import_stmt>torch.nn<as>nn<import_stmt>torchvision<def_stmt>BottleneckV1 in_channels out_channels stride<block_start><return>nn.Sequential(nn.Conv2d(in_channels=in_channels out_channels=in_channels kernel_size=3 stride=stride padding=1 groups=in_channels) nn.BatchNorm2d(in_channels) nn.ReLU6(inplace=<true>) nn.Conv2d(in_channels=in_channels out_channels=out_channels kernel_size=1 stride=1) nn.BatchNorm2d(out_channels) nn.ReLU6(inplace=<true>))<block_end><class_stmt>MobileNetV1(nn.Module)<block_start><def_stmt>__init__ self num_classes=1000<block_start>super(MobileNetV1 self).__init__()<line_sep>self.first_conv=nn.Sequential(nn.Conv2d(in_channels=3 out_channels=32 kernel_size=3 stride=2 padding=1) nn.BatchNorm2d(32) nn.ReLU6(inplace=<true>) )<line_sep>self.bottleneck=nn.Sequential(BottleneckV1(32 64 stride=1) BottleneckV1(64 128 stride=2) BottleneckV1(128 128 stride=1) BottleneckV1(128 256 stride=2) BottleneckV1(256 256 stride=1) BottleneckV1(256 512 stride=2) BottleneckV1(512 512 stride=1) BottleneckV1(512 512 stride=1) BottleneckV1(512 512 stride=1) BottleneckV1(512 512 stride=1) BottleneckV1(512 512 stride=1) BottleneckV1(512 1024 stride=2) BottleneckV1(1024 1024 stride=1) )<line_sep>self.avg_pool=nn.AvgPool2d(kernel_size=7 stride=1)<line_sep>self.linear=nn.Linear(in_features=1024 out_features=num_classes)<line_sep>self.dropout=nn.Dropout(p=0.2)<line_sep>self.softmax=nn.Softmax(dim=1)<line_sep>self.init_params()<block_end><def_stmt>init_params self<block_start><for_stmt>m self.modules()<block_start><if_stmt>isinstance(m nn.Conv2d)<block_start>nn.init.kaiming_normal_(m.weight)<line_sep>nn.init.constant_(m.bias 0)<block_end><elif_stmt>isinstance(m nn.Linear)<or>isinstance(m nn.BatchNorm2d)<block_start>nn.init.constant_(m.weight 1)<line_sep>nn.init.constant_(m.bias 0)<block_end><block_end><block_end><def_stmt>forward self x<block_start>x=self.first_conv(x)<line_sep>x=self.bottleneck(x)<line_sep>x=self.avg_pool(x)<line_sep>x=x.view(x.size(0) -1)<line_sep>x=self.dropout(x)<line_sep>x=self.linear(x)<line_sep>out=self.softmax(x)<line_sep><return>out<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>model=MobileNetV1()<line_sep>print(model)<line_sep>input=torch.randn(1 3 224 224)<line_sep>out=model(input)<line_sep>print(out.shape)<block_end>
#------------------------------------------------------------------------------ # Copyright (c) 2013-2017, Nucleic Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file LICENSE, distributed with this software. #------------------------------------------------------------------------------ <import_from_stmt>atom.api Typed<import_from_stmt>enaml.widgets.html ProxyHtml<import_from_stmt>.QtWidgets QTextEdit<import_from_stmt>.qt_control QtControl<class_stmt>QtHtml(QtControl ProxyHtml)<block_start>""" A Qt implementation of an Enaml ProxyHtml widget. """<line_sep>#: A reference to the widget created by the proxy. widget=Typed(QTextEdit)<line_sep>#-------------------------------------------------------------------------- # Initialization API #-------------------------------------------------------------------------- <def_stmt>create_widget self<block_start>""" Create the underlying html widget. """<line_sep>widget=QTextEdit(self.parent_widget())<line_sep>widget.setReadOnly(<true>)<line_sep>self.widget=widget<block_end><def_stmt>init_widget self<block_start>""" Initialize the underlying widget. """<line_sep>super(QtHtml self).init_widget()<line_sep>self.set_source(self.declaration.source)<block_end>#-------------------------------------------------------------------------- # ProxyHtml API #-------------------------------------------------------------------------- <def_stmt>set_source self source<block_start>""" Set the source of the html widget """<line_sep>self.widget.setHtml(source)<block_end><block_end>
# License: BSD 3 clause <import_stmt>unittest<import_stmt>numpy<as>np<import_from_stmt>scipy.sparse csr_matrix<import_from_stmt>tick.robust ModelHuber<import_from_stmt>tick.base_model.tests.generalized_linear_model TestGLM<import_from_stmt>tick.linear_model SimuLinReg<class_stmt>Test(TestGLM)<block_start><def_stmt>test_ModelHuber self<block_start>"""...Numerical consistency check of loss and gradient for Huber model """<line_sep>np.random.seed(12)<line_sep>n_samples,n_features=5000 10<line_sep>w0=np.random.randn(n_features)<line_sep>c0=np.random.randn()<line_sep># First check with intercept X,y=SimuLinReg(w0 c0 n_samples=n_samples verbose=<false>).simulate()<line_sep>X_spars=csr_matrix(X)<line_sep>model=ModelHuber(fit_intercept=<true> threshold=1.3).fit(X y)<line_sep>model_spars=ModelHuber(fit_intercept=<true> threshold=1.3).fit(X_spars y)<line_sep>self.run_test_for_glm(model model_spars)<line_sep>self._test_glm_intercept_vs_hardcoded_intercept(model)<line_sep># Then check without intercept X,y=SimuLinReg(w0 <none> n_samples=n_samples verbose=<false> seed=2038).simulate()<line_sep>X_spars=csr_matrix(X)<line_sep>model=ModelHuber(fit_intercept=<false>).fit(X y)<line_sep>model_spars=ModelHuber(fit_intercept=<false>).fit(X_spars y)<line_sep>self.run_test_for_glm(model model_spars)<line_sep># Test for the Lipschitz constants without intercept self.assertAlmostEqual(model.get_lip_best() 2.6873683857125981)<line_sep>self.assertAlmostEqual(model.get_lip_mean() 9.95845726788432)<line_sep>self.assertAlmostEqual(model.get_lip_max() 54.82616964855237)<line_sep>self.assertAlmostEqual(model_spars.get_lip_mean() model.get_lip_mean())<line_sep>self.assertAlmostEqual(model_spars.get_lip_max() model.get_lip_max())<line_sep># Test for the Lipschitz constants with intercept model=ModelHuber(fit_intercept=<true>).fit(X y)<line_sep>model_spars=ModelHuber(fit_intercept=<true>).fit(X_spars y)<line_sep>self.assertAlmostEqual(model.get_lip_best() 2.687568385712598)<line_sep>self.assertAlmostEqual(model.get_lip_mean() 10.958457267884327)<line_sep>self.assertAlmostEqual(model.get_lip_max() 55.82616964855237)<line_sep>self.assertAlmostEqual(model_spars.get_lip_mean() model.get_lip_mean())<line_sep>self.assertAlmostEqual(model_spars.get_lip_max() model.get_lip_max())<block_end><def_stmt>test_ModelHuber_threshold self<block_start>np.random.seed(12)<line_sep>n_samples,n_features=5000 10<line_sep>w0=np.random.randn(n_features)<line_sep>c0=np.random.randn()<line_sep># First check with intercept X,y=SimuLinReg(w0 c0 n_samples=n_samples verbose=<false>).simulate()<line_sep>model=ModelHuber(threshold=1.541).fit(X y)<line_sep>self.assertEqual(model._model.get_threshold() 1.541)<line_sep>model.threshold=3.14<line_sep>self.assertEqual(model._model.get_threshold() 3.14)<line_sep>msg='^threshold must be > 0$'<with_stmt>self.assertRaisesRegex(RuntimeError msg)<block_start>model=ModelHuber(threshold=-1).fit(X y)<block_end><with_stmt>self.assertRaisesRegex(RuntimeError msg)<block_start>model.threshold=0.<block_end><block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>unittest.main()<block_end>
<import_from_stmt>corus.record Record<import_from_stmt>corus.io load_lines parse_tsv skip_header <class_stmt>LRWCRecord(Record)<block_start>__attributes__=['hyponym' 'hypernym' 'genitive' 'judgement' 'confidence']<def_stmt>__init__ self hyponym hypernym genitive judgement confidence<block_start>self.hyponym=hyponym<line_sep>self.hypernym=hypernym<line_sep>self.genitive=genitive<line_sep>self.judgement=judgement<line_sep>self.confidence=confidence<block_end><block_end># INPUT:hyponym INPUT:hypernym INPUT:genitive OUTPUT:judgement CONFIDENCE:judgement # автомобиль автомашина автомашины true 99.75% # автомобиль автомототранспорт автомототранспорта true 99.96% # автомобиль автомототранспортный автомототранспортного true 99.99% <def_stmt>parse_judgement value<block_start><if_stmt>value<eq>'true'<block_start><return>1.0<block_end><elif_stmt>value<eq>'false'<block_start><return>0.0<block_end><block_end><def_stmt>parse_confidence value<block_start><return>float(value[:-1])<block_end><def_stmt>parse_toloka_lrwc lines<block_start>skip_header(lines)<line_sep>records=parse_tsv(lines)<for_stmt>record records<block_start>hyponym,hypernym,genitive,judgement,confidence=record<line_sep>judgement=parse_judgement(judgement)<line_sep>confidence=parse_confidence(confidence)<line_sep><yield>LRWCRecord(hyponym hypernym genitive judgement confidence)<block_end><block_end><def_stmt>load_toloka_lrwc path<block_start>lines=load_lines(path)<line_sep><return>parse_toloka_lrwc(lines)<block_end><class_stmt>RuADReCTRecord(Record)<block_start>__attributes__=['tweet_id' 'tweet' 'label']<def_stmt>__init__ self tweet_id tweet label<block_start>self.tweet_id=tweet_id<line_sep>self.tweet=tweet<line_sep>self.label=label<block_end><block_end># – tweet_id: уникальный номер сообщения в системе twitter; # – tweet: текст сообщения (твита); # - label: класс твита, 1 - содержит упоминание побочного эффекта, 0 - не содердит <def_stmt>parse_ruadrect lines<block_start>rows=parse_tsv(lines)<line_sep>skip_header(rows)<for_stmt>cells rows<block_start><yield>RuADReCTRecord(*cells)<block_end><block_end><def_stmt>load_ruadrect path<block_start>lines=load_lines(path)<line_sep><return>parse_ruadrect(lines)<block_end>
#-------关联C++库--------------- <import_stmt>ctypes<import_stmt>platform<line_sep>system=platform.system()<if_stmt>system<eq>"Windows"<block_start>pre="./"<line_sep>suff=".dll"<block_end><else_stmt><block_start>pre="./lib"<line_sep>suff=".so"<block_end>libfile=ctypes.cdll.LoadLibrary<line_sep>filename=pre+"GraphicsAnalyse"+suff<line_sep>postPro=libfile(filename)<import_stmt>MainWindow<line_sep>#--------------------------------- #-------定义函数------------------ <def_stmt>script_openFile id type file<block_start>MainWindow.script_openFile(id type file)<line_sep><pass><block_end><def_stmt>script_applyClicked id type<block_start>MainWindow.script_applyClicked(id type)<line_sep><pass><block_end><def_stmt>script_Properties_Opacity id type obj_id mOpacity<block_start>MainWindow.script_Properties_Opacity(id type obj_id mOpacity)<line_sep><pass><block_end><def_stmt>script_Properties_colorColumn id type obj_id mColorColumnStyle<block_start>MainWindow.script_Properties_colorColumn(id type obj_id mColorColumnStyle)<line_sep><pass><block_end><def_stmt>script_Properties_scalarBarTitle id type obj_id colName m_title<block_start>MainWindow.script_Properties_scalarBarTitle(id type obj_id colName m_title)<line_sep><pass><block_end><def_stmt>script_Properties_scalarBarFontSize id type obj_id colName m_fontSize<block_start>MainWindow.script_Properties_scalarBarFontSize(id type obj_id colName m_fontSize)<line_sep><pass><block_end><def_stmt>script_Properties_scalarBarNumLables id type obj_id colName m_numLables<block_start>MainWindow.script_Properties_scalarBarNumLables(id type obj_id colName m_numLables)<line_sep><pass><block_end><def_stmt>script_Properties_lineWidth id type obj_id mLineWidth<block_start>MainWindow.script_Properties_lineWidth(id type obj_id mLineWidth)<line_sep><pass><block_end><def_stmt>script_Properties_pointSize id type obj_id mPointSize<block_start>MainWindow.script_Properties_pointSize(id type obj_id mPointSize)<line_sep><pass><block_end><def_stmt>script_Properties_translate id type obj_id x y z<block_start>MainWindow.script_Properties_translate(id type obj_id x y z)<line_sep><pass><block_end><def_stmt>script_Properties_origin id type obj_id x y z<block_start>MainWindow.script_Properties_origin(id type obj_id x y z)<line_sep><pass><block_end><def_stmt>script_Properties_scale id type obj_id x y z<block_start>MainWindow.script_Properties_scale(id type obj_id x y z)<line_sep><pass><block_end><def_stmt>script_Properties_orientation id type obj_id x y z<block_start>MainWindow.script_Properties_orientation(id type obj_id x y z)<line_sep><pass><block_end><def_stmt>script_Properties_representation id type obj_id m_enum_representationtype<block_start>MainWindow.script_Properties_representation(id type obj_id m_enum_representationtype)<line_sep><pass><block_end><def_stmt>script_Properties_specular id type obj_id mSpecular<block_start>MainWindow.script_Properties_specular(id type obj_id mSpecular)<line_sep><pass><block_end><def_stmt>script_Properties_diffuse id type obj_id mDiffuse<block_start>MainWindow.script_Properties_diffuse(id type obj_id mDiffuse)<line_sep><pass><block_end><def_stmt>script_Properties_ambient id type obj_id mAmbient<block_start>MainWindow.script_Properties_ambient(id type obj_id mAmbient)<line_sep><pass><block_end><def_stmt>script_Properties_specularPower id type obj_id mSpecularPower<block_start>MainWindow.script_Properties_specularPower(id type obj_id mSpecularPower)<line_sep><pass><block_end><def_stmt>script_Properties_specularColor id type obj_id r g b<block_start>MainWindow.script_Properties_specularColor(id type obj_id r g b)<line_sep><pass><block_end><def_stmt>script_Properties_solidColor id type obj_id r g b<block_start>MainWindow.script_Properties_solidColor(id type obj_id r g b)<line_sep><pass><block_end><def_stmt>script_Properties_edgeColor id type obj_id r g b<block_start>MainWindow.script_Properties_edgeColor(id type obj_id r g b)<line_sep><pass><block_end><def_stmt>script_Properties_interpolation id type obj_id m_enum_interpolationtype<block_start>MainWindow.script_Properties_interpolation(id type obj_id m_enum_interpolationtype)<line_sep><pass><block_end><def_stmt>script_Properties_Flag_scalarBar id type obj_id mColorColumnStyle<block_start>MainWindow.script_Properties_Flag_scalarBar(id type obj_id mColorColumnStyle)<line_sep><pass><block_end><def_stmt>script_Properties_EnableOpacityMap id type obj_id val<block_start>MainWindow.script_Properties_EnableOpacityMap(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Properties_visible id type obj_id flag_show_actors<block_start>MainWindow.script_Properties_visible(id type obj_id flag_show_actors)<line_sep><pass><block_end><def_stmt>script_Properties_show_scalarBars id type obj_id mScalarBarVisible<block_start>MainWindow.script_Properties_show_scalarBars(id type obj_id mScalarBarVisible)<line_sep><pass><block_end><def_stmt>script_Properties_show_cubeAxes id type obj_id flag_cubeAxes<block_start>MainWindow.script_Properties_show_cubeAxes(id type obj_id flag_cubeAxes)<line_sep><pass><block_end><def_stmt>script_Properties_scalarBarPosition id type obj_id colName tep_orietation pos0 pos1 pos2 pos3<block_start>MainWindow.script_Properties_scalarBarPosition(id type obj_id colName tep_orietation pos0 pos1 pos2 pos3)<line_sep><pass><block_end><def_stmt>script_Clip id type obj_id<block_start>MainWindow.script_FilterClip(id type obj_id)<line_sep><pass><block_end><def_stmt>script_Slice id type obj_id<block_start>MainWindow.script_FilterSlice(id type obj_id)<line_sep><pass><block_end><def_stmt>script_Contour id type obj_id<block_start>MainWindow.script_FilterContour(id type obj_id)<line_sep><pass><block_end><def_stmt>script_Vector id type obj_id<block_start>MainWindow.script_FilterVector(id type obj_id)<line_sep><pass><block_end><def_stmt>script_Reflection id type obj_id<block_start>MainWindow.script_FilterReflection(id type obj_id)<line_sep><pass><block_end><def_stmt>script_Smooth id type obj_id<block_start>MainWindow.script_FilterSmooth(id type obj_id)<line_sep><pass><block_end><def_stmt>script_StreamLine id type obj_id<block_start>MainWindow.script_FilterStreamLine(id type obj_id)<line_sep><pass><block_end>################### <def_stmt>script_Vector_GlyphVector id type obj_id val<block_start>MainWindow.script_Properties_vector_GlyphVector(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Vector_scalar id type obj_id val<block_start>MainWindow.script_Properties_vector_scalar(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Vector_normal id type obj_id val<block_start>MainWindow.script_Properties_vector_normal(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Vector_numPoints id type obj_id val<block_start>MainWindow.script_Properties_vector_numPoints(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Vector_glyph_type id type obj_id val<block_start>MainWindow.script_Properties_vector_glyph_type(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Vector_glyph_tipRes id type obj_id val<block_start>MainWindow.script_Properties_vector_glyph_tipRes(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Vector_glyph_tipRad id type obj_id val<block_start>MainWindow.script_Properties_vector_glyph_tipRad(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Vector_glyph_tipLen id type obj_id val<block_start>MainWindow.script_Properties_vector_glyph_tipLen(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_FilterStreamLine id type obj_id<block_start>MainWindow.script_FilterStreamLine(id type obj_id)<line_sep><pass><block_end><def_stmt>script_Vector_glyph_shaftRes id type obj_id val<block_start>MainWindow.script_Properties_vector_glyph_shaftRes(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Vector_glyph_shaftRad id type obj_id val<block_start>MainWindow.script_Properties_vector_glyph_shaftRad(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Properties_view_backgroundType id type obj_id val<block_start>MainWindow.script_Properties_view_backgroundType(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Properties_view_backgroundColor id type obj_id red green blue<block_start>MainWindow.script_Properties_view_backgroundColor(id type obj_id red green blue)<line_sep><pass><block_end><def_stmt>script_Properties_view_background2Color id type obj_id red green blue<block_start>MainWindow.script_Properties_view_background2Color(id type obj_id red green blue)<line_sep><pass><block_end><def_stmt>script_Properties_view_axesVisible id type a<block_start>MainWindow.script_Properties_view_axesVisible(id type a)<line_sep><pass><block_end><def_stmt>script_Properties_view_cameraParallel id type a<block_start>MainWindow.script_Properties_view_cameraParallel(id type a)<line_sep><pass><block_end><def_stmt>script_Properties_view_interaction id type a<block_start>MainWindow.script_Properties_view_interaction(id type a)<line_sep><pass><block_end><def_stmt>script_Properties_renderView id type<block_start>MainWindow.script_Properties_renderView(id type)<line_sep><pass><block_end><def_stmt>script_Camera_Position id type pos0 pos1 pos2<block_start>MainWindow.script_Camera_Position(id type pos0 pos1 pos2)<line_sep><pass><block_end><def_stmt>script_Camera_FocalPoint id type focalPoint0 focalPoint1 focalPoint2<block_start>MainWindow.script_Camera_FocalPoint(id type focalPoint0 focalPoint1 focalPoint2)<line_sep><pass><block_end><def_stmt>script_Camera_ClippingRange id type clippingRange0 clippingRange1<block_start>MainWindow.script_Camera_ClippingRange(id type clippingRange0 clippingRange1)<line_sep><pass><block_end><def_stmt>script_Camera_ViewUp id type viewup0 viewup1 viewup2<block_start>MainWindow.script_Camera_ViewUp(id type viewup0 viewup1 viewup2)<line_sep><pass><block_end><def_stmt>script_Camera_ViewAngle id type angle<block_start>MainWindow.script_Camera_ViewAngle(id type angle)<line_sep><pass><block_end><def_stmt>script_Camera_Zoom id type zoom<block_start>MainWindow.script_Camera_Zoom(id type zoom)<line_sep><pass><block_end><def_stmt>script_Camera_Reset id type <block_start>MainWindow.script_Camera_Reset(id type )<line_sep><pass><block_end><def_stmt>script_Properties_planeOrigin id type obj_id x y z<block_start>MainWindow.script_Properties_planeOrigin(id type obj_id x y z)<line_sep><pass><block_end><def_stmt>script_Properties_planeNormal id type obj_id x y z<block_start>MainWindow.script_Properties_planeNormal(id type obj_id x y z)<line_sep><pass><block_end><def_stmt>script_Properties_planeVisible id type obj_id a<block_start>MainWindow.script_Properties_planeVisible(id type obj_id a)<line_sep><pass><block_end><def_stmt>script_Properties_insideOut id type obj_id a<block_start>MainWindow.script_Properties_insideOut(id type obj_id a)<line_sep><pass><block_end><def_stmt>script_Contour_Column id type obj_id val<block_start>MainWindow.script_Properties_contourColumn(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Contour_value id type obj_id val<block_start>MainWindow.script_Properties_contourValue(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Contour_reflection id type obj_id aaa<block_start>MainWindow.script_Properties_contour_reflection(id type obj_id aaa)<line_sep><pass><block_end><def_stmt>script_Contour_reflectionAxes id type obj_id val<block_start>MainWindow.script_Properties_contour_reflectionAxes(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Properties_reflectionAxes id type obj_id reflection_axis<block_start>MainWindow.script_Properties_reflectionAxes(id type obj_id reflection_axis)<line_sep><pass><block_end><def_stmt>Smooth_smooth id type obj_id smotype coef<block_start>MainWindow.script_Properties_smooth(id type obj_id smotype coef)<line_sep><pass><block_end><def_stmt>script_Streamline_vector id type obj_id val<block_start>MainWindow.script_Properties_streamline_vector(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Streamline_integration_type id type obj_id val<block_start>MainWindow.script_Properties_streamline_integration_type(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Streamline_integration_direction id type obj_id val<block_start>MainWindow.script_Properties_streamline_integration_direction(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Streamline_integration_stepUnit id type obj_id val<block_start>MainWindow.script_Properties_streamline_integration_type(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Properties_streamline_integration_stepUnit id type obj_id val<block_start>MainWindow.script_Properties_streamline_integration_stepUnit(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Streamline_integration_initStepLen id type obj_id val<block_start>MainWindow.script_Properties_streamline_integration_initStepLen(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Streamline_integration_miniStepLen id type obj_id val<block_start>MainWindow.script_Properties_streamline_integration_miniStepLen(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Streamline_integration_maxiStepLen id type obj_id val<block_start>MainWindow.script_Properties_streamline_integration_maxiStepLen(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Streamline_stream_maxiSteps id type obj_id val<block_start>MainWindow.script_Properties_streamline_stream_maxiSteps(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Streamline_stream_maxiStreamLen id type obj_id val<block_start>MainWindow.script_Properties_streamline_stream_maxiStreamLen(id type obj_id val)<line_sep><pass><block_end>########### <def_stmt>script_Streamline_stream_terminalSpeed id type obj_id val<block_start>MainWindow.script_Properties_streamline_stream_terminalSpeed(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Streamline_stream_maxiError id type obj_id val<block_start>MainWindow.script_Properties_streamline_stream_maxiError(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Streamline_seeds_type id type obj_id val<block_start>MainWindow.script_Properties_streamline_seeds_type(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Streamline_seeds_mPoint id type obj_id val0 val1 val2<block_start>MainWindow.script_Properties_streamline_seeds_mPoint(id type obj_id val0 val1 val2)<line_sep><pass><block_end><def_stmt>script_Streamline_seeds_num_points id type obj_id val<block_start>MainWindow.script_Properties_streamline_seeds_num_points(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Streamline_seeds_radius id type obj_id val<block_start>MainWindow.script_Properties_streamline_seeds_radius(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Streamline_vorticity id type obj_id val<block_start>MainWindow.script_Properties_streamline_vorticity(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Streamline_interpolatorType id type obj_id val<block_start>MainWindow.script_Properties_streamline_interpolatorType(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Streamline_surface_streamLines id type obj_id val<block_start>MainWindow.script_Properties_streamline_surface_streamLines(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Properties_streamline_reflection id type obj_id val<block_start>MainWindow.script_Properties_streamline_reflection(id type obj_id val)<line_sep><pass><block_end><def_stmt>script_Properties_streamline_reflectionAxes id type obj_id val<block_start>MainWindow.script_Properties_streamline_reflectionAxes(id type obj_id val)<line_sep><pass><block_end>
""" To see if we have the right pipelines in place """<import_stmt>inspect<import_from_stmt>unittest TestCase<import_from_stmt>scrapy signals Field Item<import_from_stmt>mock patch mock_open Mock call<import_from_stmt>arachne.extensions ExportCSV ExportData ExportJSON<import_from_stmt>scrapy.contrib.exporter CsvItemExporter JsonItemExporter<class_stmt>ScrapyItem(Item)<block_start>field1=Field()<line_sep>field2=Field()<line_sep>field3=Field()<block_end><class_stmt>TestPipelines(TestCase)<block_start><def_stmt>test_cls_export_data self<block_start>cls=ExportData()<line_sep>self.assertTrue(inspect.ismethod(cls.from_crawler))<with_stmt>self.assertRaises(NotImplementedError)<block_start>cls.spider_opened('test')<block_end># TODO: test extension signals connect using `mock.assert_has_calls` crawler_mock=Mock()<line_sep>cls.from_crawler(crawler_mock)<assert_stmt>crawler_mock.signals.connect.called<line_sep>self.assertEquals(cls.files {})<line_sep>self.assertIsNone(cls.exporter)<block_end><def_stmt>test_export_cls self<block_start>test_classes=[{'cls':ExportJSON 'file_type':'json' 'exporter':JsonItemExporter} {'cls':ExportCSV 'file_type':'csv' 'exporter':CsvItemExporter}]<for_stmt>test_cls test_classes<block_start>cls=test_cls['cls']()<line_sep>mock_open_func=mock_open(read_data='Hello')<line_sep>spider=Mock()<line_sep>spider.name='abc'<with_stmt>patch('arachne.extensions.open' mock_open_func)<block_start>cls.spider_opened(spider)<line_sep>path='exports/%s/abc.%s'%(test_cls['file_type'] test_cls['file_type'])<line_sep>mock_open_func.assert_called_with(path 'w+b')<line_sep>self.assertIsInstance(cls.exporter test_cls['exporter'])<line_sep># test if cls.files is empty cls.spider_closed(spider)<line_sep>self.assertEquals(cls.files {})<line_sep># test exporter.export_item item=ScrapyItem()<line_sep>result=cls.item_scraped(item spider)<line_sep>self.assertEquals(item result)<block_end><block_end><block_end><block_end>
#//---------------------------------------------------------------------- #// Copyright 2010-2011 Mentor Graphics Corporation #// Copyright 2010-2011 Synopsys, Inc #// Copyright 2019-2020 <NAME> (tpoikela) #// All Rights Reserved Worldwide #// #// Licensed under the Apache License, Version 2.0 (the #// "License"); you may not use this file except in #// compliance with the License. You may obtain a copy of #// the License at #// #// http://www.apache.org/licenses/LICENSE-2.0 #// #// Unless required by applicable law or agreed to in #// writing, software distributed under the License is #// distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR #// CONDITIONS OF ANY KIND, either express or implied. See #// the License for the specific language governing #// permissions and limitations under the License. #//---------------------------------------------------------------------- <import_stmt>cocotb<import_from_stmt>uvm UVMComponent uvm_component_utils UVMTLMTime UVMTLMBInitiatorSocket <import_from_stmt>uvm.macros *<import_from_stmt>apb_rw apb_rw<class_stmt>initiator(UVMComponent)# uvm_tlm_b_initiator_socket#(apb_rw) sock <block_start><def_stmt>__init__ self name="initiator" parent=<none><block_start>super().__init__(name parent)<line_sep>self.sock=UVMTLMBInitiatorSocket("sock" self)<block_end># (apb_rw)("sock", self) # // # // Execute a simple read-modify-write # // <async_keyword><def_stmt>run_phase self phase<block_start>delay=UVMTLMTime()<line_sep>phase.raise_objection(self)<for_stmt>i range(10)<block_start>rw=apb_rw.type_id.create("rw" <none> self.get_full_name())<line_sep>rw.kind=apb_rw.READ<line_sep>rw.addr=0x0000FF00<line_sep>rw.data=i+1<line_sep><await>self.sock.b_transport(rw delay)<line_sep># Ok to reuse the same RW instance rw.kind=apb_rw.WRITE<line_sep>rw.data=~rw.data<line_sep><await>self.sock.b_transport(rw delay)<block_end>phase.drop_objection(self)<block_end><block_end>uvm_component_utils(initiator)<line_sep>
# -*- coding: utf-8 -*- # Import modules <import_stmt>pytest<import_from_stmt>google.cloud bigquery<import_from_stmt>tests.spells.base_test_spell BaseTestSpell SpellDB<line_sep># Import from package <import_from_stmt>geomancer.backend.settings SQLiteConfig<import_from_stmt>geomancer.spells LengthOf<line_sep>params=[SpellDB(spell=LengthOf(on="residential" within=50 source_table="gis_osm_roads_free_1" feature_name="len_residential" options=SQLiteConfig() ) dburl="sqlite:///tests/data/source.sqlite" )]<line_sep>@pytest.mark.slow<class_stmt>TestLengthOf(BaseTestSpell)<block_start>@pytest.fixture(params=params ids=["roads-sqlite"])<def_stmt>spelldb self request<block_start><return>request.param<block_end><block_end>
<import_stmt>pytest<import_stmt>os<import_stmt>time<import_stmt>CHIP_IO.PWM<as>PWM<import_stmt>CHIP_IO.OverlayManager<as>OM<import_stmt>CHIP_IO.Utilities<as>UT<def_stmt>setup_module module<block_start><if_stmt><not>UT.is_chip_pro()<block_start>OM.load("PWM0")<block_end><block_end><def_stmt>teardown_module module<block_start>PWM.cleanup()<if_stmt><not>UT.is_chip_pro()<block_start>OM.unload("PWM0")<block_end><block_end><class_stmt>TestPwmSetup<block_start><def_stmt>setup_method self test_method<block_start>time.sleep(0.5)<block_end><def_stmt>test_start_pwm self<block_start>PWM.start("PWM0" 0)<line_sep>pwm_test='/sys/class/pwm/pwmchip0/pwm0/'<assert_stmt>os.path.exists(pwm_test)<eq><true><line_sep>duty=open(pwm_test+'duty_cycle').readline().strip()<line_sep>period=open(pwm_test+'period').readline().strip()<assert_stmt>int(duty)<eq>0<assert_stmt>int(period)<eq>500000<block_end><def_stmt>test_start_pwm_with_polarity_one self<block_start>PWM.cleanup()<line_sep>PWM.start("PWM0" 0 2000 1)<line_sep>pwm_test='/sys/class/pwm/pwmchip0/pwm0/'<line_sep>duty=open(pwm_test+'duty_cycle').readline().strip()<line_sep>period=open(pwm_test+'period').readline().strip()<line_sep>polarity=open(pwm_test+'polarity').readline().strip()<assert_stmt>int(duty)<eq>0<assert_stmt>int(period)<eq>500000<assert_stmt>str(polarity)<eq>"inversed"<block_end><def_stmt>test_start_pwm_with_polarity_default self<block_start>PWM.cleanup()<line_sep>PWM.start("PWM0" 0 2000 0)<line_sep>pwm_test='/sys/class/pwm/pwmchip0/pwm0/'<line_sep>duty=open(pwm_test+'duty_cycle').readline().strip()<line_sep>period=open(pwm_test+'period').readline().strip()<line_sep>polarity=open(pwm_test+'polarity').readline().strip()<assert_stmt>int(duty)<eq>0<assert_stmt>int(period)<eq>500000<assert_stmt>str(polarity)<eq>"normal"<block_end><def_stmt>test_start_pwm_with_polarity_zero self<block_start>PWM.cleanup()<line_sep>PWM.start("PWM0" 0 2000 0)<line_sep>pwm_test='/sys/class/pwm/pwmchip0/pwm0/'<line_sep>duty=open(pwm_test+'duty_cycle').readline().strip()<line_sep>period=open(pwm_test+'period').readline().strip()<line_sep>polarity=open(pwm_test+'polarity').readline().strip()<assert_stmt>int(duty)<eq>0<assert_stmt>int(period)<eq>500000<assert_stmt>str(polarity)<eq>"normal"<block_end><def_stmt>test_pwm_start_invalid_pwm_key self<block_start><with_stmt>pytest.raises(ValueError)<block_start>PWM.start("P8_25" -1)<block_end><block_end><def_stmt>test_pwm_start_invalid_duty_cycle_negative self<block_start><with_stmt>pytest.raises(ValueError)<block_start>PWM.start("PWM0" -1)<block_end><block_end><def_stmt>test_pwm_start_valid_duty_cycle_min self#testing an exception isn't thrown <block_start>PWM.cleanup()<line_sep>PWM.start("PWM0" 0)<line_sep>PWM.cleanup()<block_end><def_stmt>test_pwm_start_valid_duty_cycle_max self#testing an exception isn't thrown <block_start>PWM.start("PWM0" 100)<line_sep>PWM.cleanup()<block_end><def_stmt>test_pwm_start_invalid_duty_cycle_high self<block_start><with_stmt>pytest.raises(ValueError)<block_start>PWM.start("PWM0" 101)<block_end><block_end><def_stmt>test_pwm_start_invalid_duty_cycle_string self<block_start><with_stmt>pytest.raises(TypeError)<block_start>PWM.start("PWM0" "1")<block_end><block_end><def_stmt>test_pwm_start_invalid_frequency_negative self<block_start><with_stmt>pytest.raises(ValueError)<block_start>PWM.start("PWM0" 0 -1)<block_end><block_end><def_stmt>test_pwm_start_invalid_frequency_string self<block_start><with_stmt>pytest.raises(TypeError)<block_start>PWM.start("PWM0" 0 "1")<block_end><block_end><def_stmt>test_pwm_start_negative_polarity self<block_start><with_stmt>pytest.raises(ValueError)<block_start>PWM.start("PWM0" 0 100 -1)<block_end><block_end><def_stmt>test_pwm_start_invalid_positive_polarity self<block_start><with_stmt>pytest.raises(ValueError)<block_start>PWM.start("PWM0" 0 100 2)<block_end><block_end><def_stmt>test_pwm_start_invalid_polarity_type self<block_start><with_stmt>pytest.raises(TypeError)<block_start>PWM.start("PWM0" 0 100 "1")<block_end><block_end>@pytest.mark.xfail(reason="pwm cleanup is doing weirdness for this test")<def_stmt>test_pwm_duty_modified self<block_start>PWM.start("PWM0" 0)<line_sep>pwm_test='/sys/class/pwm/pwmchip0/pwm0/'<assert_stmt>os.path.exists(pwm_test)<eq><true><line_sep>duty=open(pwm_test+'duty_cycle').readline().strip()<line_sep>period=open(pwm_test+'period').readline().strip()<assert_stmt>int(duty)<eq>0<assert_stmt>int(period)<eq>500000<line_sep>PWM.set_duty_cycle("PWM0" 100)<line_sep>duty=open(pwm_test+'duty_cycle').readline().strip()<line_sep>period=open(pwm_test+'period').readline().strip()<assert_stmt>int(duty)<eq>500000<assert_stmt>int(period)<eq>500000<block_end><def_stmt>test_pwm_duty_cycle_non_setup_key self<block_start><with_stmt>pytest.raises(ValueError)<block_start>PWM.cleanup()<line_sep>PWM.set_duty_cycle("PWM0" 100)<block_end><block_end><def_stmt>test_pwm_duty_cycle_invalid_key self<block_start><with_stmt>pytest.raises(ValueError)<block_start>PWM.set_duty_cycle("P9_15" 100)<block_end><block_end><def_stmt>test_pwm_duty_cycle_invalid_value_high self<block_start>PWM.start("PWM0" 0)<with_stmt>pytest.raises(ValueError)<block_start>PWM.set_duty_cycle("PWM0" 101)<block_end>PWM.cleanup()<block_end><def_stmt>test_pwm_duty_cycle_invalid_value_negative self<block_start>PWM.start("PWM0" 0)<with_stmt>pytest.raises(ValueError)<block_start>PWM.set_duty_cycle("PWM0" -1)<block_end>PWM.cleanup()<block_end><def_stmt>test_pwm_duty_cycle_invalid_value_string self<block_start>PWM.start("PWM0" 0)<with_stmt>pytest.raises(TypeError)<block_start>PWM.set_duty_cycle("PWM0" "a")<block_end>PWM.cleanup()<block_end><def_stmt>test_pwm_frequency_invalid_value_negative self<block_start>PWM.start("PWM0" 0)<with_stmt>pytest.raises(ValueError)<block_start>PWM.set_frequency("PWM0" -1)<block_end>PWM.cleanup()<block_end><def_stmt>test_pwm_frequency_invalid_value_string self<block_start>PWM.start("PWM0" 0)<with_stmt>pytest.raises(TypeError)<block_start>PWM.set_frequency("PWM0" "11")<block_end>PWM.cleanup()<block_end><def_stmt>test_pwm_freq_non_setup_key self<block_start><with_stmt>pytest.raises(RuntimeError)<block_start>PWM.set_frequency("PWM0" 100)<block_end><block_end><def_stmt>test_pwm_freq_non_setup_key self<block_start><with_stmt>pytest.raises(ValueError)<block_start>PWM.set_frequency("P9_15" 100)<block_end><block_end><block_end>
<import_from_future_stmt> print_function unicode_literals<import_from_stmt>gratipay fake_data<import_from_stmt>gratipay.testing Harness<import_from_stmt>gratipay.cli.fake_data main<class_stmt>TestFakeData(Harness)<block_start><def_stmt>test_fake_data_cli self<block_start>num_participants=6<line_sep>num_tips=25<line_sep>num_teams=5<line_sep>num_packages=5<line_sep>main(self.db num_participants num_tips num_teams num_packages)<line_sep>participants=self.db.all("SELECT * FROM participants")<line_sep>teams=self.db.all("SELECT * FROM teams")<line_sep>packages=self.db.all("SELECT * FROM packages")<line_sep>payment_instructions=self.db.all("SELECT * FROM payment_instructions")<assert_stmt>len(participants)<eq>num_participants<assert_stmt>len(teams)<eq>num_teams+1# +1 for the fake Gratipay team. <assert_stmt>len(packages)<eq>num_packages<assert_stmt>len(payment_instructions)<eq>num_tips<block_end><def_stmt>test_fake_participant_identity self<block_start>crusher=self.make_participant('crusher' email_address='<EMAIL>')<line_sep>country_id=fake_data.fake_participant_identity(crusher)<assert_stmt>[x.country.id<for>x crusher.list_identity_metadata()]<eq>[country_id]<block_end><def_stmt>test_fake_team_doesnt_fail_for_name_with_apostrophe self<block_start>crusher=self.make_participant('crusher' email_address='<EMAIL>')<line_sep>team=fake_data.fake_team(self.db crusher "D'Amorebury")<assert_stmt>team.name<ne>"d-amorebury"<block_end><block_end>
<import_from_stmt>enum Enum<line_sep>SYNC_RESPONSE_SINK="robusta-synchronized-response-sink"<class_stmt>FindingType(Enum)<block_start>ISSUE="issue"<line_sep>CONF_CHANGE="configuration_change"<line_sep>HEALTH_CHECK="health_check"<line_sep>REPORT="report"<block_end># Finding sources <class_stmt>FindingSource(Enum)<block_start>NONE=<none># empty default KUBERNETES_API_SERVER="kubernetes_api_server"<line_sep>PROMETHEUS="prometheus"<line_sep>MANUAL="manual"<line_sep>CALLBACK="callback"<block_end># Finding subject types <class_stmt>FindingSubjectType(Enum)<block_start>TYPE_NONE=<none># empty default TYPE_DEPLOYMENT="deployment"<line_sep>TYPE_NODE="node"<line_sep>TYPE_POD="pod"<line_sep>TYPE_JOB="job"<line_sep>TYPE_DAEMONSET="daemonset"<line_sep>@staticmethod<def_stmt>from_kind kind:str<block_start><if_stmt>kind<eq>"deployment"<block_start><return>FindingSubjectType.TYPE_DEPLOYMENT<block_end><elif_stmt>kind<eq>"node"<block_start><return>FindingSubjectType.TYPE_NODE<block_end><elif_stmt>kind<eq>"pod"<block_start><return>FindingSubjectType.TYPE_POD<block_end><elif_stmt>kind<eq>"job"<block_start><return>FindingSubjectType.TYPE_JOB<block_end><elif_stmt>kind<eq>"daemonset"<block_start><return>FindingSubjectType.TYPE_DAEMONSET<block_end><return>FindingSubjectType.TYPE_NONE<block_end><block_end># Annotations <class_stmt>SlackAnnotations<block_start>UNFURL="unfurl"<line_sep>ATTACHMENT="attachment"<block_end>
<import_stmt>copy<import_stmt>cv2<import_stmt>glob<import_stmt>json<import_stmt>numpy<as>np<import_stmt>os<import_from_stmt>.box_utils compute_box_3d boxes_to_corners_3d get_size<import_from_stmt>.rotation convert_angle_axis_to_matrix3<import_from_stmt>.taxonomy class_names ARKitDatasetConfig<def_stmt>TrajStringToMatrix traj_str<block_start>""" convert traj_str into translation and rotation matrices Args: traj_str: A space-delimited file where each line represents a camera position at a particular timestamp. The file has seven columns: * Column 1: timestamp * Columns 2-4: rotation (axis-angle representation in radians) * Columns 5-7: translation (usually in meters) Returns: ts: translation matrix Rt: rotation matrix """<line_sep># line=[float(x) for x in traj_str.split()] # ts = line[0]; # R = cv2.Rodrigues(np.array(line[1:4]))[0]; # t = np.array(line[4:7]); # Rt = np.concatenate((np.concatenate((R, t[:,np.newaxis]), axis=1), [[0.0,0.0,0.0,1.0]]), axis=0) tokens=traj_str.split()<assert_stmt>len(tokens)<eq>7<line_sep>ts=tokens[0]<line_sep># Rotation in angle axis angle_axis=[float(tokens[1]) float(tokens[2]) float(tokens[3])]<line_sep>r_w_to_p=convert_angle_axis_to_matrix3(np.asarray(angle_axis))<line_sep># Translation t_w_to_p=np.asarray([float(tokens[4]) float(tokens[5]) float(tokens[6])])<line_sep>extrinsics=np.eye(4 4)<line_sep>extrinsics[:3 :3]=r_w_to_p<line_sep>extrinsics[:3 -1]=t_w_to_p<line_sep>Rt=np.linalg.inv(extrinsics)<line_sep><return>(ts Rt)<block_end><def_stmt>st2_camera_intrinsics filename<block_start>w,h,fx,fy,hw,hh=np.loadtxt(filename)<line_sep><return>np.asarray([[fx 0 hw] [0 fy hh] [0 0 1]])<block_end><def_stmt>generate_point rgb_image depth_image intrinsic subsample=1 world_coordinate=<true> pose=<none> <block_start>"""Generate 3D point coordinates and related rgb feature Args: rgb_image: (h, w, 3) rgb depth_image: (h, w) depth intrinsic: (3, 3) subsample: int resize stride world_coordinate: bool pose: (4, 4) matrix transfer from camera to world coordindate Returns: points: (N, 3) point cloud coordinates in world-coordinates if world_coordinate==True else in camera coordinates rgb_feat: (N, 3) rgb feature of each point """<line_sep>intrinsic_4x4=np.identity(4)<line_sep>intrinsic_4x4[:3 :3]=intrinsic<line_sep>u,v=np.meshgrid(range(0 depth_image.shape[1] subsample) range(0 depth_image.shape[0] subsample) )<line_sep>d=depth_image[v u]<line_sep>d_filter=d<ne>0<line_sep>mat=np.vstack((u[d_filter]<times>d[d_filter] v[d_filter]<times>d[d_filter] d[d_filter] np.ones_like(u[d_filter]) ))<line_sep>new_points_3d=np.dot(np.linalg.inv(intrinsic_4x4) mat)[:3]<if_stmt>world_coordinate<block_start>new_points_3d_padding=np.vstack((new_points_3d np.ones((1 new_points_3d.shape[1]))))<line_sep>world_coord_padding=np.dot(pose new_points_3d_padding)<line_sep>new_points_3d=world_coord_padding[:3]<block_end>rgb_feat=rgb_image[v u][d_filter]<line_sep><return>new_points_3d.T rgb_feat<block_end><def_stmt>extract_gt gt_fn<block_start>"""extract original label data Args: gt_fn: str (file name of "annotation.json") after loading, we got a dict with keys 'data', 'stats', 'comment', 'confirm', 'skipped' ['data']: a list of dict for bboxes, each dict has keys: 'uid', 'label', 'modelId', 'children', 'objectId', 'segments', 'hierarchy', 'isInGroup', 'labelType', 'attributes' 'label': str 'segments': dict for boxes 'centroid': list of float (x, y, z)? 'axesLengths': list of float (x, y, z)? 'normalizedAxes': list of float len()=9 'uid' 'comments': 'stats': ... Returns: skipped: bool skipped or not boxes_corners: (n, 8, 3) box corners **world-coordinate** centers: (n, 3) **world-coordinate** sizes: (n, 3) full-sizes (no halving!) labels: list of str uids: list of str """<line_sep>gt=json.load(open(gt_fn "r"))<line_sep>skipped=gt['skipped']<if_stmt>len(gt)<eq>0<block_start>boxes_corners=np.zeros((0 8 3))<line_sep>centers=np.zeros((0 3))<line_sep>sizes=np.zeros((0 3))<line_sep>labels,uids=[] []<line_sep><return>skipped boxes_corners centers sizes labels uids<block_end>boxes_corners=[]<line_sep>centers=[]<line_sep>sizes=[]<line_sep>labels=[]<line_sep>uids=[]<for_stmt>data gt['data']<block_start>l=data["label"]<for_stmt>delimiter [" " "-" "/"]<block_start>l=l.replace(delimiter "_")<block_end><if_stmt>l<not><in>class_names<block_start>print("unknown category: %s"%l)<line_sep><continue><block_end>rotmat=np.array(data["segments"]["obbAligned"]["normalizedAxes"]).reshape(3 3)<line_sep>center=np.array(data["segments"]["obbAligned"]["centroid"]).reshape(-1 3)<line_sep>size=np.array(data["segments"]["obbAligned"]["axesLengths"]).reshape(-1 3)<line_sep>box3d=compute_box_3d(size.reshape(3).tolist() center rotmat)<line_sep>''' Box corner order that we return is of the format below: 6 -------- 7 /| /| 5 -------- 4 . | | | | . 2 -------- 3 |/ |/ 1 -------- 0 '''<line_sep>boxes_corners.append(box3d.reshape(1 8 3))<line_sep>size=np.array(get_size(box3d)).reshape(1 3)<line_sep>center=np.mean(box3d axis=0).reshape(1 3)<line_sep># boxes_corners.append(box3d.reshape(1, 8, 3)) centers.append(center)<line_sep>sizes.append(size)<line_sep># labels.append(l) labels.append(data["label"])<line_sep>uids.append(data["uid"])<block_end>centers=np.concatenate(centers axis=0)<line_sep>sizes=np.concatenate(sizes axis=0)<line_sep>boxes_corners=np.concatenate(boxes_corners axis=0)<line_sep><return>skipped boxes_corners centers sizes labels uids<block_end><class_stmt>TenFpsDataLoader(object)<block_start><def_stmt>__init__ self dataset_cfg class_names root_path=<none> gt_path=<none> logger=<none> frame_rate=1 with_color_image=<true> subsample=2 world_coordinate=<true> <block_start>""" Args: dataset_cfg: EasyDict() with key POINT_CLOUD_RANGE POINT_FEATURE_ENCODING DATA_PROCESSOR class_names: list of str root_path: path with all info for a scene_id color, color_2det, depth, label, vote, ... gt_path: xxx.json just to get correct floor height an2d_root: path to scene_id.json or None logger: frame_rate: int subsample: int world_coordinate: bool """<line_sep>self.root_path=root_path<line_sep># pipeline does box residual coding here self.num_class=len(class_names)<line_sep>self.dc=ARKitDatasetConfig()<line_sep>depth_folder=os.path.join(self.root_path "lowres_depth")<if_stmt><not>os.path.exists(depth_folder)<block_start>self.frame_ids=[]<block_end><else_stmt><block_start>depth_images=sorted(glob.glob(os.path.join(depth_folder "*.png")))<line_sep>self.frame_ids=[os.path.basename(x)<for>x depth_images]<line_sep>self.frame_ids=[x.split(".png")[0].split("_")[1]<for>x self.frame_ids]<line_sep>self.video_id=depth_folder.split('/')[-3]<line_sep>self.frame_ids=[x<for>x self.frame_ids]<line_sep>self.frame_ids.sort()<line_sep>self.intrinsics={}<block_end>traj_file=os.path.join(self.root_path 'lowres_wide.traj')<with_stmt>open(traj_file)<as>f<block_start>self.traj=f.readlines()<block_end># convert traj to json dict poses_from_traj={}<for_stmt>line self.traj<block_start>traj_timestamp=line.split(" ")[0]<line_sep>poses_from_traj[f"{round(float(traj_timestamp) 3):.3f}"]=TrajStringToMatrix(line)[1].tolist()<block_end><if_stmt>os.path.exists(traj_file)# self.poses = json.load(open(traj_file)) <block_start>self.poses=poses_from_traj<block_end><else_stmt><block_start>self.poses={}<block_end># get intrinsics <for_stmt>frame_id self.frame_ids<block_start>intrinsic_fn=os.path.join(self.root_path "lowres_wide_intrinsics" f"{self.video_id}_{frame_id}.pincam")<if_stmt><not>os.path.exists(intrinsic_fn)<block_start>intrinsic_fn=os.path.join(self.root_path "lowres_wide_intrinsics" f"{self.video_id}_{float(frame_id)-0.001:.3f}.pincam")<block_end><if_stmt><not>os.path.exists(intrinsic_fn)<block_start>intrinsic_fn=os.path.join(self.root_path "lowres_wide_intrinsics" f"{self.video_id}_{float(frame_id)+0.001:.3f}.pincam")<block_end><if_stmt><not>os.path.exists(intrinsic_fn)<block_start>print("frame_id" frame_id)<line_sep>print(intrinsic_fn)<block_end>self.intrinsics[frame_id]=st2_camera_intrinsics(intrinsic_fn)<block_end># # intrinsic_fn = os.path.join(self.root_path, "camera.txt") # intrinsic_fn = os.path.join(self.root_path, "color.pincam") # if os.path.exists(intrinsic_fn): # self.intrinsics = st2_camera_intrinsics(intrinsic_fn) # else: # self.intrinsics = None self.frame_rate=frame_rate<line_sep>self.subsample=subsample<line_sep>self.with_color_image=with_color_image<line_sep>self.world_coordinate=world_coordinate<if_stmt>gt_path<is><not><none><and>os.path.exists(gt_path)<block_start>skipped,gt_corners,gt_centers,gt_sizes,_,_=extract_gt(gt_path)<line_sep>self.gt_corners=gt_corners<line_sep>self.gt_centers=gt_centers<line_sep>self.gt_sizes=gt_sizes<block_end><else_stmt><block_start>self.gt_corners=<none><line_sep>self.gt_centers=<none><line_sep>self.gt_sizes=<none><block_end><block_end><def_stmt>__iter__ self<block_start><return>self<block_end><def_stmt>__len__ self<block_start><return>len(self.frame_ids)<block_end><def_stmt>__getitem__ self idx<block_start>""" Returns: frame: a dict {frame_id}: str {depth}: (h, w) {image}: (h, w) {image_path}: str {intrinsics}: np.array 3x3 {pose}: np.array 4x4 {pcd}: np.array (n, 3) in world coordinate {color}: (n, 3) """<line_sep>frame_id=self.frame_ids[idx]<line_sep>frame={}<line_sep>frame["frame_id"]=frame_id<line_sep>fname="{}_{}.png".format(self.video_id frame_id)<line_sep># fname = "{}.png".format(frame_id) depth_image_path=os.path.join(self.root_path "lowres_depth" fname)<if_stmt><not>os.path.exists(depth_image_path)<block_start>print(depth_image_path)<block_end>image_path=os.path.join(self.root_path "lowres_wide" fname)<if_stmt><not>os.path.exists(depth_image_path)<block_start>print(depth_image_path "does not exist")<block_end>frame["depth"]=cv2.imread(depth_image_path -1)<line_sep>frame["image"]=cv2.imread(image_path)<line_sep>frame["image_path"]=image_path<line_sep>depth_height,depth_width=frame["depth"].shape<line_sep>im_height,im_width,im_channels=frame["image"].shape<line_sep>frame["intrinsics"]=copy.deepcopy(self.intrinsics[frame_id])<if_stmt>str(frame_id)<in>self.poses.keys()<block_start>frame_pose=np.array(self.poses[str(frame_id)])<block_end><else_stmt><block_start><for_stmt>my_key list(self.poses.keys())<block_start><if_stmt>abs(float(frame_id)-float(my_key))<l>0.005<block_start>frame_pose=np.array(self.poses[str(my_key)])<block_end><block_end><block_end>frame["pose"]=copy.deepcopy(frame_pose)<line_sep>im_height_scale=np.float(depth_height)/im_height<line_sep>im_width_scale=np.float(depth_width)/im_width<if_stmt>depth_height<ne>im_height<block_start>frame["image"]=np.zeros([depth_height depth_width 3])# 288, 384, 3 frame["image"][48:48+192 64:64+256 :]=cv2.imread(image_path)<block_end>(m n _)=frame["image"].shape<line_sep>depth_image=frame["depth"]/1000.0<line_sep>rgb_image=frame["image"]/255.0<line_sep>pcd,rgb_feat=generate_point(rgb_image depth_image frame["intrinsics"] self.subsample self.world_coordinate frame_pose )<line_sep>frame["pcd"]=pcd<line_sep>frame["color"]=rgb_feat<line_sep><return>frame<block_end><block_end>
''' Parse the MC_object database from the Habitat Stratus backup. There are still lots of unknowns: * Many objects have container 0x20202020. They appear to be unused, but it's unclear why. * Some address strings have unprintable characters. It's unclear if this was intentional or garbage data. * Matchbook (class 49): there are 3 objects of this type, but they appear to be overwritten or otherwise unused. * When combined with MC_regions, we find lots of orphaned objects. This may be because of broken relationships. Some appear to be pockets of avatars. '''<import_stmt>json struct sys<import_from_stmt>collections OrderedDict<line_sep>STRUCT_ITEMS=('id' 'class' 'container' 'contype' 'x_pos' 'y_pos' 'style' 'gr_state' 'orientation' 'gr_width' 'nitty_bits' 'prop_length' 'property_data' )<line_sep>FORMAT='> 3I 7H I 10x H 86s'<assert_stmt>struct.calcsize(FORMAT)<eq>128<line_sep>PARSERS={2:('>HI' ['magic_type' 'magic_data']) 129:('>H' ['state']) 6:('>HW' ['open_flags' 'key']) 130:('>H' ['open_flags']) 10:('>HIH' ['current_page' 'text_id' 'last_page']) 12:('>H' ['filled']) 13:('>HW' ['open_flags' 'key']) 131:('>HH' ['width' 'length']) 132:('>xxxxxxi' ['connection']) 158:('>H' ['open_flags']) 134:('>H' ['open_flags']) 135:('>HW' ['open_flags' 'key']) 136:('>I' ['take']) 137:('>H' ['open_flags']) 18:('>HW' ['open_flags' 'key']) # + whoput array 20:('>H' ['live']) 21:('>H' ['state']) 22:('>HWIH' ['open_flags' 'key' 'owner' 'locked']) 23:('>HWi' ['open_flags' 'key' 'connection']) 25:('>HH' ['count' 'effect']) 28:('>HI20s' ['state' 'take' 'address']) 26:('>H' ['charge']) 27:('>H' ['state']) 29:('>H' ['mass']) 30:('>H' ['on']) 93:('>H' ['flat_type']) 139:('>H' ['on']) 140:('>I' ['take']) 141:('>H' ['live']) 5:('>H' ['state']) 32:('>HW' ['open_flags' 'key']) 33:('>HI' ['magic_type' 'magic_data']) 98:('>HWHHHHHHHHHHHH' ['open_flags' 'key' 'x_offset_1' 'y_offset_1' 'x_offset_2' 'y_offset_2' 'x_offset_3' 'y_offset_3' 'x_offset_4' 'y_offset_4' 'x_offset_5' 'y_offset_5' 'x_offset_6' 'y_offset_6']) 35:('>H' ['pinpulled']) 38:('>H' ['state']) 88:('>HW' ['open_flags' 'key']) 40:('>H' ['instant_what']) 42:('>W' ['key_number']) 43:('>H' ['is_magic']) 45:('>HHxxxxH' ['lamp_state' 'wisher' 'live']) 46:('>HI' ['magic_type' 'magic_data']) 48:('>HI' ['mail_arrived' 'owner']) # XXX can't find valid example to decode varstring properly #49: ('>84s', ['mtext']), 52:('>H' ['on']) 54:('>I' ['text_id']) 96:('>HW' ['open_flags' 'key']) 152:('>HH' ['mass' 'picture']) 58:('>H' ['mass']) 55:('>HIH' ['current_page' 'text_id' 'last_page']) 60:('>HI' ['magic_type' 'magic_data']) 61:('>H' ['mass']) 149:('>HH' ['base' 'pattern']) 150:('>HW' ['open_flags' 'key']) 63:('>H' ['on']) 64:('>H' ['scan_type']) #56: short sign, handled below #57: sign, handled below 95:('>H' ['charge']) 70:('>HH' ['on' 'tape']) 153:('>HH' ['width' 'height']) 92:('>HHHHHHHH' ['trapezoid_type' 'upper_left_x' 'upper_right_x' 'lower_left_x' 'lower_right_x' 'height' 'pattern_x_size' 'pattern_y_size']) # + pattern array 97:('>HI' ['magic_type' 'magic_data']) 155:('>HW' ['open_flags' 'key']) 74:('>HI20s' ['state' 'take' 'address']) 75:('>H' ['event']) 76:('>W' ['denom']) 87:('>HHHHHH' ['trapezoid_type' 'upper_left_x' 'upper_right_x' 'lower_left_x' 'lower_right_x' 'height']) 85:('>HWHH' ['open_flags' 'key' 'item_price' 'display_item']) # + prices array 86:('>HW' ['open_flags' 'key']) 80:('>HH' ['length' 'height' 'pattern']) 82:('>H' ['wind_level']) }<def_stmt>decode_properties buf fmt keys<block_start>''' Parse the properties from the given byte buffer, using the format string and names of keys for each item in the format string. Returns a dict of name/value pairs for all keys. '''<line_sep>fat_words=[]<line_sep># Handle fatwords, which are 16-bits stored as 00 xx 00 yy. <if_stmt>'W'<in>fmt# Hack: our fatword handling doesn't count repeated format strings <block_start>idx=fmt.index('W')<if_stmt>fmt[:idx].isdigit()<block_start><raise>ValueError('cant handle format strings with numbers')<block_end>base=1<if><not>fmt[0].isalpha()<else>0<line_sep>fmt_chars=[]<for_stmt>i,c enumerate(fmt)<block_start><if_stmt>c<eq>'W'<block_start>c='I'<line_sep>fat_words.append(keys[i-base])<block_end>fmt_chars.append(c)<block_end>fmt=''.join(fmt_chars)<block_end>data=OrderedDict(zip(keys struct.unpack(fmt buf[:struct.calcsize(fmt)])))<line_sep># Replace each fat word with its actual value <for_stmt>name fat_words<block_start>data[name]=((data[name]<rshift>8)&0xff00)|(data[name]&0xff)<block_end><return>data<block_end><def_stmt>parse_array buf fmt count<block_start>''' Unpack a number of same-sized items into an array '''<line_sep>items=[]<line_sep>item_size=struct.calcsize(fmt)<for_stmt>i range(count)<block_start>items<augadd>struct.unpack(fmt buf[i<times>item_size:(i+1)<times>item_size])<block_end><return>items<block_end><def_stmt>decode_text buf<block_start>''' Decode a word-packed string (00 x 00 y ...), which is similar to a fatword but is a string instead of int. '''<line_sep><return>[buf[i]<for>i range(1 len(buf) 2)]<block_end><def_stmt>parse_properties cls property_data<block_start>''' Decode basic properties and then class-specific ones '''<line_sep>data=OrderedDict()<line_sep>args=PARSERS.get(cls)<if_stmt>args<block_start>data.update(decode_properties(property_data *args))<line_sep>remainder_off=struct.calcsize(args[0].replace('W' 'I'))<block_end># Special class decoders for those not fully handled above <if_stmt>cls<eq>56# short sign <block_start>data['text']=decode_text(property_data[:10<times>2])<block_end><elif_stmt>cls<eq>57# sign <block_start>data['text']=decode_text(property_data[:40<times>2])<block_end><elif_stmt>cls<eq>18# countertop: whoput = 5 ints <block_start>n=5<line_sep>data['whoput']=parse_array(property_data[remainder_off:remainder_off+n<times>4] '>I' n)<block_end><elif_stmt>cls<eq>92# super trapezoid: pattern = 32 halfwords <block_start>n=32<line_sep>data['pattern']=parse_array(property_data[remainder_off:remainder_off+n<times>4] '>H' n)<block_end><elif_stmt>cls<eq>85# vendo front: prices = 10 halfwords <block_start>n=10<line_sep>data['prices']=parse_array(property_data[remainder_off:remainder_off+n<times>4] '>H' n)<block_end><return>data<block_end><def_stmt>decode_row row<block_start>''' Parse a single row and return a dict of the items '''<line_sep>data=OrderedDict(zip(STRUCT_ITEMS struct.unpack(FORMAT row)))<line_sep>data.update(parse_properties(data['class'] data['property_data']))<line_sep># Debug-dump the Matchbook class #if data['class'] == 49: # print ' '.join('%02x' % ord(c) for c in row) # print data # These fields tend to be all padding for many objects. # Maybe these were deleted or superseded? data['deleted']=(data['container']<eq>0x20202020<and>data['contype']<eq>0x2020)<line_sep># Always remove the raw property bytes, which we've decoded <del_stmt>data['property_data']<line_sep># Clear text data if it's unprintable <if_stmt>'address'<in>data<block_start><if_stmt>any(c<ge>0x80<for>c data['address'])#print ' '.join('%02x' % ord(c) for c in row) #print data <block_start>data['address']=''<block_end><else_stmt><block_start>data['address']=data['address'].decode('ascii')<block_end><block_end><return>data<block_end><def_stmt>main <block_start>''' Read each row from database and then decode it, dumping output to JSON '''<line_sep>items=[]<with_stmt>open(sys.argv[1] "rb")<as>fp<block_start><while_stmt><true><block_start>row=fp.read(struct.calcsize(FORMAT))<if_stmt><not>row<block_start><break><block_end>items.append(decode_row(row))<block_end><block_end><with_stmt>open(sys.argv[2] 'w')<as>fp<block_start>json.dump(items fp indent=2)<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>main()<block_end>
<import_stmt>sys<import_stmt>ast<import_stmt>io<class_stmt>Visitor(ast.NodeVisitor)<block_start><def_stmt>__init__ self f<block_start>self.f=f<block_end><def_stmt>generic_visit self node<block_start>self.f.write(ast.dump(node))<line_sep>self.f.write("\n")<line_sep>super().generic_visit(node)<block_end><def_stmt>visit_Assign self node<block_start><for_stmt>n node.targets<block_start>self.visit(n)<line_sep>self.f.write(" = ")<block_end>self.visit(node.value)<line_sep>self.f.write("\n")<block_end><def_stmt>visit_Name self node<block_start>self.f.write(node.id)<block_end><def_stmt>visit_Num self node<block_start>self.f.write(str(node.n))<block_end><block_end>SRC="""\ a = 1 a = b = 1 """<line_sep>EXP="""\ Module(body=[Assign(targets=[Name(id='a', ctx=Store())], value=Num(n=1)), Assign(targets=[Name(id='a', ctx=Store()), Name(id='b', ctx=Store())], value=Num(n=1))]) a = 1 a = b = 1 """<line_sep>t=ast.parse(SRC)<line_sep>buf=io.StringIO()<line_sep>visitor=Visitor(buf)<line_sep>visitor.visit(t)<assert_stmt>buf.getvalue()<eq>EXP<line_sep>
<import_stmt>_sk_fail<line_sep>_sk_fail._("SocketServer")<line_sep>
<import_stmt>re<def_stmt>normalize_text text<block_start>result=text.lower()#lower the text even unicode given result=re.sub(r'[^a-z0-9 -]' ' ' result flags=re.IGNORECASE|re.MULTILINE)<line_sep>result=re.sub(r'( +)' ' ' result flags=re.IGNORECASE|re.MULTILINE)<line_sep><return>result.strip()<block_end>
<import_from_stmt>sympy symbols FunctionMatrix MatrixExpr Lambda Matrix <def_stmt>test_funcmatrix <block_start>i,j=symbols('i,j')<line_sep>X=FunctionMatrix(3 3 Lambda((i j) i-j))<assert_stmt>X[1 1]<eq>0<assert_stmt>X[1 2]<eq>-1<assert_stmt>X.shape<eq>(3 3)<assert_stmt>X.rows<eq>X.cols<eq>3<assert_stmt>Matrix(X)<eq>Matrix(3 3 <lambda>i j:i-j)<assert_stmt>isinstance(X<times>X+X MatrixExpr)<block_end>
# View more python tutorials on my Youtube and Youku channel!!! # Youtube video tutorial: https://www.youtube.com/channel/UCdyjiB5H8Pu7aDTNVXTTpcg # Youku video tutorial: http://i.youku.com/pythontutorial # 12 - contours """ Please note, this script is for python3+. If you are using python2+, please modify it accordingly. Tutorial reference: http://www.scipy-lectures.org/intro/matplotlib/matplotlib.html """<import_stmt>matplotlib.pyplot<as>plt<import_stmt>numpy<as>np<def_stmt>f x y# the height function <block_start><return>(1-x/2+x<power>5+y<power>3)<times>np.exp(-x<power>2-y<power>2)<block_end>n=256<line_sep>x=np.linspace(-3 3 n)<line_sep>y=np.linspace(-3 3 n)<line_sep>X,Y=np.meshgrid(x y)<line_sep># use plt.contourf to filling contours # X, Y and value for (X,Y) point plt.contourf(X Y f(X Y) 8 alpha=.75 cmap=plt.cm.hot)<line_sep># use plt.contour to add contour lines C=plt.contour(X Y f(X Y) 8 colors='black' linewidth=.5)<line_sep># adding label plt.clabel(C inline=<true> fontsize=10)<line_sep>plt.xticks(())<line_sep>plt.yticks(())<line_sep>plt.show()<line_sep>
<import_from_stmt>distutils.core setup<line_sep>setup(name='attention' version='0.1.0' author='tllake' author_email='<EMAIL>' packages=['attention'] description='An attention function for PyTorch.' long_description=open('README.md').read())<line_sep>
# This file is part of the Astrometry.net suite. # Licensed under a 3-clause BSD style license - see LICENSE <import_from_future_stmt> print_function<import_from_future_stmt> absolute_import<import_stmt>os<import_from_stmt>astrometry.util.fits fits_table<import_stmt>numpy<as>np<import_stmt>logging<import_stmt>tempfile<import_stmt>sys<line_sep>py3=(sys.version_info[0]<ge>3)<if_stmt>py3<block_start><import_from_stmt>urllib.parse urljoin<block_end><else_stmt><block_start><import_from_stmt>urlparse urljoin<block_end>fitsio=<none><try_stmt><block_start><import_stmt>fitsio<block_end><except_stmt><block_start><try_stmt><block_start><import_stmt>pyfits<block_end><except_stmt>ImportError<block_start><try_stmt><block_start><import_from_stmt>astropy.io fits<as>pyfits<block_end><except_stmt>ImportError<block_start><raise>ImportError("Cannot import either pyfits or astropy.io.fits")<block_end><block_end><block_end><import_from_stmt>.common *<import_from_stmt>.dr7 *<import_from_stmt>.yanny *<import_from_stmt>astrometry.util.run_command run_command<class_stmt>Frame(SdssFile)<block_start><def_stmt>__init__ self *args **kwargs<block_start>super(Frame self).__init__(*args **kwargs)<line_sep>self.filetype='frame'<line_sep>self.image=<none><line_sep>self.image_proxy=<none><block_end><def_stmt>getImageShape self<block_start><if_stmt>self.image_proxy<is><not><none># fitsio fits.FITSHDU object <block_start>H,W=self.image_proxy.get_info()['dims']<line_sep>H=int(H)<line_sep>W=int(W)<block_end><else_stmt><block_start>H,W=self.image.shape<block_end><return>H W<block_end><def_stmt>getImageSlice self slice<block_start><if_stmt>self.image_proxy<is><not><none>#print 'reading slice from image proxy:', slice <block_start><return>self.image_proxy[slice]<block_end><return>self.image[slice]<block_end>#def __str__(self): <def_stmt>getImage self<block_start><if_stmt>self.image<is><none><and>self.image_proxy<is><not><none><block_start>self.image=self.image_proxy.read()<line_sep>self.image_proxy=<none><block_end><return>self.image<block_end><def_stmt>getHeader self<block_start><return>self.header<block_end><def_stmt>getAsTrans self<block_start><return>self.astrans<block_end><def_stmt>getCalibVec self<block_start><return>self.calib<block_end><def_stmt>getSkyAt self x y<block_start>skyim=self.sky<line_sep>(sh sw)=skyim.shape<if_stmt>sw<ne>256<block_start>skyim=skyim.T<block_end>(sh sw)=skyim.shape<line_sep>xi=np.round(self.skyxi[x]).astype(int)<line_sep>yi=np.round(self.skyyi[y]).astype(int)<line_sep>yi=np.minimum(yi sh-1)<line_sep><return>skyim[yi xi]<block_end><def_stmt>getSky self<block_start>skyim=self.sky<line_sep>(sh sw)=skyim.shape<if_stmt>sw<ne>256<block_start>skyim=skyim.T<block_end>(sh sw)=skyim.shape<line_sep>xi=np.round(self.skyxi).astype(int)<line_sep>yi=np.round(self.skyyi).astype(int)<line_sep>yi=np.minimum(yi sh-1)<assert_stmt>(all(xi<ge>0)<and>all(xi<l>sw))<assert_stmt>(all(yi<ge>0)<and>all(yi<l>sh))<line_sep>XI,YI=np.meshgrid(xi yi)<line_sep># Nearest-neighbour interpolation -- we just need this # for approximate invvar. bigsky=skyim[YI XI]<line_sep><return>bigsky<block_end><def_stmt>getInvvar self psfield bandnum ignoreSourceFlux=<false> sourceFlux=<none> constantSkyAt=<none><block_start>''' If constantSkyAt = (x,y) (INTEGERS!), returns a scalar (rather than a np.array) of the invvar at that point. NOTE that this does NOT blank out masked pixels; use, eg, fpM = sdss.readFpM(run, camcol, field, bandname) for plane in [ 'INTERP', 'SATUR', 'CR', 'GHOST' ]: fpM.setMaskedPixels(plane, invvar, 0, roi=roi) '''<line_sep>calibvec=self.getCalibVec()<if_stmt>constantSkyAt<block_start>x,y=constantSkyAt<line_sep>calibvec=calibvec[x]<line_sep>sky=self.getSkyAt(x y)<if_stmt>ignoreSourceFlux<block_start>dn=sky<block_end><elif_stmt>sourceFlux<is><none><block_start>image=self.getImage()<line_sep>dn=(image[y x]/calibvec)+sky<block_end><else_stmt><block_start>dn=(sourceFlux/calibvec)+sky<block_end><block_end><else_stmt><block_start>bigsky=self.getSky()<if_stmt>ignoreSourceFlux<block_start>dn=bigsky<block_end><elif_stmt>sourceFlux<is><none><block_start>image=self.getImage()<line_sep>dn=(image/calibvec)+bigsky<block_end><else_stmt><block_start>dn=(sourceFlux/calibvec)+bigsky<block_end><block_end>gain=psfield.getGain(bandnum)<line_sep># Note, "darkvar" includes dark current *and* read noise. darkvar=psfield.getDarkVariance(bandnum)<line_sep>dnvar=(dn/gain)+darkvar<line_sep>invvar=1./(dnvar<times>calibvec<power>2)<line_sep><return>invvar<block_end><block_end><class_stmt>PhotoObj(SdssFile)<block_start><def_stmt>__init__ self *args **kwargs<block_start>super(PhotoObj self).__init__(*args **kwargs)<line_sep>self.filetype='photoObj'<line_sep>self.table=<none><block_end><def_stmt>getTable self<block_start><return>self.table<block_end><block_end><class_stmt>runlist(object)<block_start><pass><block_end><class_stmt>DR8(DR7)<block_start>_lup_to_mag_b=np.array([1.4e-10 0.9e-10 1.2e-10 1.8e-10 7.4e-10])<line_sep>_two_lup_to_mag_b=2.<times>_lup_to_mag_b<line_sep>_ln_lup_to_mag_b=np.log(_lup_to_mag_b)<line_sep>''' From http://data.sdss3.org/datamodel/glossary.html#asinh m = -(2.5/ln(10))*[asinh(f/2b)+ln(b)]. The parameter b is a softening parameter measured in maggies, and for the [u, g, r, i, z] bands has the values [1.4, 0.9, 1.2, 1.8, 7.4] x 1e-10 '''<line_sep>@staticmethod<def_stmt>luptitude_to_mag Lmag bandnum badmag=25<block_start><if_stmt>bandnum<is><none># assume Lmag is broadcastable to a 5-vector <block_start>twobi=DR8._two_lup_to_mag_b<line_sep>lnbi=DR8._ln_lup_to_mag_b<block_end><else_stmt><block_start>twobi=DR8._two_lup_to_mag_b[bandnum]<line_sep>lnbi=DR8._ln_lup_to_mag_b[bandnum]<block_end># MAGIC -1.08.... = -2.5/np.log(10.) f=np.sinh(Lmag/-1.0857362047581294-lnbi)<times>twobi<line_sep># prevent log10(-flux) mag=np.zeros_like(f)+badmag<line_sep>I=(f<g>0)<line_sep>mag[I]=-2.5<times>np.log10(f[I])<line_sep><return>mag<block_end>@staticmethod<def_stmt>nmgy_to_mag nmgy<block_start><return>22.5-2.5<times>np.log10(nmgy)<block_end><def_stmt>getDRNumber self<block_start><return>8<block_end><def_stmt>useLocalTree self photoObjs=<none> resolve=<none><block_start><if_stmt>photoObjs<is><none><block_start>photoObjs=os.environ['BOSS_PHOTOOBJ']<block_end>redux=os.environ['PHOTO_REDUX']<if_stmt>resolve<is><none><block_start>resolve=os.environ['PHOTO_RESOLVE']<block_end>self.filenames.update(photoObj=os.path.join(photoObjs '%(rerun)s' '%(run)i' '%(camcol)i' 'photoObj-%(run)06i-%(camcol)i-%(field)04i.fits') frame=os.path.join(photoObjs 'frames' '%(rerun)s' '%(run)i' '%(camcol)i' 'frame-%(band)s-%(run)06i-%(camcol)i-%(field)04i.fits.bz2') photoField=os.path.join(photoObjs '%(rerun)s' '%(run)i' 'photoField-%(run)06i-%(camcol)i.fits') psField=os.path.join(redux '%(rerun)s' '%(run)i' 'objcs' '%(camcol)i' 'psField-%(run)06i-%(camcol)i-%(field)04i.fit') fpM=os.path.join(redux '%(rerun)s' '%(run)i' 'objcs' '%(camcol)i' 'fpM-%(run)06i-%(band)s%(camcol)i-%(field)04i.fit.gz') window_flist=os.path.join(resolve 'window_flist.fits') )<line_sep># use fpM files compressed <try_stmt><block_start><del_stmt>self.dassuffix['fpM']<block_end><except_stmt><block_start><pass><block_end><try_stmt><block_start><del_stmt>self.processcmds['fpM']<block_end><except_stmt><block_start><pass><block_end><block_end><def_stmt>saveUnzippedFiles self basedir<block_start>self.unzip_dir=basedir<block_end><def_stmt>setFitsioReadBZ2 self to=<true><block_start>''' Call this if fitsio supports reading .bz2 files directly. '''<line_sep>self.readBz2=to<block_end><def_stmt>__init__ self **kwargs<block_start>''' Useful kwargs: basedir : (string) - local directory where data will be stored. '''<line_sep>DR7.__init__(self **kwargs)<line_sep>self.unzip_dir=<none><line_sep>self.readBz2=<false><line_sep># Local filenames self.filenames.update({'frame':'frame-%(band)s-%(run)06i-%(camcol)i-%(field)04i.fits.bz2' 'idR':'idR-%(run)06i-%(band)s-%(camcol)i-%(field)04i.fits' 'photoObj':'photoObj-%(run)06i-%(camcol)i-%(field)04i.fits' 'photoField':'photoField-%(run)06i-%(camcol)i.fits' 'window_flist':'window_flist.fits' })<line_sep># URLs on DAS server self.dasurl='http://data.sdss3.org/sas/dr8/groups/boss/'<line_sep>self.daspaths={'idR':'photo/data/%(run)i/fields/%(camcol)i/idR-%(run)06i-%(band)s%(camcol)i-%(field)04i.fit.Z' 'fpObjc':'photo/redux/%(rerun)s/%(run)i/objcs/%(camcol)i/fpObjc-%(run)06i-%(camcol)i-%(field)04i.fit' # DR8 frames are no longer available on DAS. 'frame':'/sas/dr9/boss/photoObj/frames/%(rerun)s/%(run)i/%(camcol)i/frame-%(band)s-%(run)06i-%(camcol)i-%(field)04i.fits.bz2' #'frame': 'photoObj/frames/%(rerun)s/%(run)i/%(camcol)i/frame-%(band)s-%(run)06i-%(camcol)i-%(field)04i.fits.bz2', 'photoObj':'photoObj/%(rerun)s/%(run)i/%(camcol)i/photoObj-%(run)06i-%(camcol)i-%(field)04i.fits' 'psField':'photo/redux/%(rerun)s/%(run)i/objcs/%(camcol)i/psField-%(run)06i-%(camcol)i-%(field)04i.fit' 'photoField':'photoObj/%(rerun)s/%(run)i/photoField-%(run)06i-%(camcol)i.fits' 'fpM':'photo/redux/%(rerun)s/%(run)i/objcs/%(camcol)i/fpM-%(run)06i-%(band)s%(camcol)i-%(field)04i.fit.gz' 'fpAtlas':'photo/redux/%(rerun)s/%(run)i/objcs/%(camcol)i/fpAtlas-%(run)06i-%(camcol)i-%(field)04i.fit' 'window_flist':'resolve/2010-05-23/window_flist.fits' }<line_sep>self.dassuffix={#'frame': '.bz2', 'fpM':'.gz' 'idR':'.Z' }<line_sep># called in retrieve() self.processcmds={'fpM':'gunzip -cd %(input)s > %(output)s' 'idR':'gunzip -cd %(input)s > %(output)s' }<line_sep>self.postprocesscmds={'frame':'TMPFILE=$(mktemp %(output)s.tmp.XXXXXX) && bunzip2 -cd %(input)s > $TMPFILE && mv $TMPFILE %(output)s' }<line_sep>y=read_yanny(self._get_runlist_filename())<line_sep>y=y['RUNDATA']<line_sep>rl=runlist()<line_sep>rl.run=np.array(y['run'])<line_sep>rl.startfield=np.array(y['startfield'])<line_sep>rl.endfield=np.array(y['endfield'])<line_sep>rl.rerun=np.array(y['rerun'])<line_sep>#print 'Rerun type:', type(rl.rerun), rl.rerun.dtype self.runlist=rl<line_sep>self.logger=logging.getLogger('astrometry.sdss.DR%i'%self.getDRNumber())<line_sep>#self.logger.debug('debug test') #self.logger.info('info test') #self.logger.warning('warning test') <block_end><def_stmt>_unzip_frame self fn run camcol<block_start><if_stmt>self.readBz2<block_start><return><none> <true><block_end># No, PJM reported that pyfits failed on SDSS frame*.bz2 files # if not fitsio: # # pyfits can read .bz2 # return None,True tempfn=<none><line_sep>keep=<false><line_sep>filetype='frame'<if_stmt><not>(filetype<in>self.postprocesscmds<and>fn.endswith('.bz2'))<block_start><return><none> <true><block_end>cmd=self.postprocesscmds[filetype]<if_stmt>self.unzip_dir<is><not><none><block_start>udir=os.path.join(self.unzip_dir '%i'%run '%i'%camcol)<if_stmt><not>os.path.exists(udir)<block_start><try_stmt><block_start>os.makedirs(udir)<block_end><except_stmt><block_start><pass><block_end><block_end>tempfn=os.path.join(udir os.path.basename(fn).replace('.bz2' ''))<line_sep>#print 'Checking', tempfn <if_stmt>os.path.exists(tempfn)<block_start>print('File exists:' tempfn)<line_sep><return>tempfn <true><block_end><else_stmt><block_start>print('Saving to' tempfn)<line_sep>keep=<true><block_end><block_end><else_stmt><block_start>fid,tempfn=tempfile.mkstemp()<line_sep>os.close(fid)<block_end>cmd=cmd%dict(input=fn output=tempfn)<line_sep>self.logger.debug('cmd: %s'%cmd)<line_sep>print('command:' cmd)<line_sep>(rtn out err)=run_command(cmd)<if_stmt>rtn<block_start>print('Command failed: command' cmd)<line_sep>print('Output:' out)<line_sep>print('Error:' err)<line_sep>print('Return val:' rtn)<line_sep><raise>RuntimeError('Command failed (return val %i): %s'%(rtn cmd))<block_end>print(out)<line_sep>print(err)<line_sep><return>tempfn keep<block_end><def_stmt>_get_runlist_filename self<block_start><return>self._get_data_file('runList-dr8.par')<block_end># read a data file describing the DR8 data <def_stmt>_get_data_file self fn<block_start><return>os.path.join(os.path.dirname(__file__) fn)<block_end><def_stmt>get_rerun self run field=<none><block_start>I=(self.runlist.run<eq>run)<if_stmt>field<is><not><none><block_start>I<augmul>(self.runlist.startfield<le>field)<times>(self.runlist.endfield<ge>field)<block_end>I=np.flatnonzero(I)<line_sep>reruns=np.unique(self.runlist.rerun[I])<line_sep>#print 'Run', run, '-> reruns:', reruns <if_stmt>len(reruns)<eq>0<block_start><return><none><block_end><return>reruns[-1]<block_end><def_stmt>get_url self filetype run camcol field band=<none> rerun=<none><block_start><if_stmt>rerun<is><none><block_start>rerun=self.get_rerun(run field)<block_end>path=self.daspaths[filetype]<line_sep>url=urljoin(self.dasurl path%dict(run=run camcol=camcol field=field rerun=rerun band=band))<line_sep><return>url<block_end><def_stmt>retrieve self filetype run camcol field=<none> band=<none> skipExisting=<true> tempsuffix='.tmp' rerun=<none><block_start>outfn=self.getPath(filetype run camcol field band rerun=rerun)<line_sep>print('Checking for file' outfn)<if_stmt>outfn<is><none><block_start><return><none><block_end><if_stmt>skipExisting<and>os.path.exists(outfn)#print('Exists') <block_start><return>outfn<block_end>outdir=os.path.dirname(outfn)<if_stmt><not>os.path.exists(outdir)<block_start><try_stmt><block_start>os.makedirs(outdir)<block_end><except_stmt><block_start><pass><block_end><block_end>url=self.get_url(filetype run camcol field band=band rerun=rerun)<line_sep>#print 'Did not find file:', outfn print('Retrieving from URL:' url)<if_stmt>self.curl<block_start>cmd="curl -o '%(outfn)s' '%(url)s'"<block_end><else_stmt><block_start>cmd="wget --continue -nv -O %(outfn)s '%(url)s'"<block_end># suffix to add to the downloaded filename suff=self.dassuffix.get(filetype '')<line_sep>oo=outfn+suff<if_stmt>tempsuffix<is><not><none><block_start>oo<augadd>tempsuffix<block_end>cmd=cmd%dict(outfn=oo url=url)<line_sep>self.logger.debug('cmd: %s'%cmd)<line_sep>(rtn out err)=run_command(cmd)<if_stmt>rtn<block_start>print('Command failed: command' cmd)<line_sep>print('Output:' out)<line_sep>print('Error:' err)<line_sep>print('Return val:' rtn)<line_sep><return><none><block_end><if_stmt>tempsuffix<is><not><none># <block_start>self.logger.debug('Renaming %s to %s'%(oo outfn+suff))<line_sep>os.rename(oo outfn+suff)<block_end><if_stmt>filetype<in>self.processcmds<block_start>cmd=self.processcmds[filetype]<line_sep>cmd=cmd%dict(input=outfn+suff output=outfn)<line_sep>self.logger.debug('cmd: %s'%cmd)<line_sep>(rtn out err)=run_command(cmd)<if_stmt>rtn<block_start>print('Command failed: command' cmd)<line_sep>print('Output:' out)<line_sep>print('Error:' err)<line_sep>print('Return val:' rtn)<line_sep><return><none><block_end><block_end><return>outfn<block_end><def_stmt>readPhotoObj self run camcol field filename=<none><block_start>obj=PhotoObj(run camcol field)<if_stmt>filename<is><none><block_start>fn=self.getPath('photoObj' run camcol field)<block_end><else_stmt><block_start>fn=filename<block_end>obj.table=fits_table(fn)<line_sep><return>obj<block_end><def_stmt>readFrame self run camcol field band filename=<none><block_start>''' http://data.sdss3.org/datamodel/files/BOSS_PHOTOOBJ/frames/RERUN/RUN/CAMCOL/frame.html '''<line_sep>f=Frame(run camcol field band)<line_sep># ... <if_stmt>filename<is><none><block_start>fn=self.getPath('frame' run camcol field band)<block_end><else_stmt><block_start>fn=filename<block_end># optionally bunzip2 the frame file. tempfn,keep=self._unzip_frame(fn run camcol)<if_stmt>tempfn<is><not><none><block_start>fn=tempfn<block_end><if_stmt>fitsio<block_start>print('Frame filename' fn)<line_sep># eg /clusterfs/riemann/raid006/dr10/boss/photoObj/frames/301/2825/1/frame-u-002825-1-0126.fits.bz2 F=fitsio.FITS(fn lower=<true>)<line_sep>f.header=F[0].read_header()<line_sep># Allow later reading of just the pixels of interest. f.image_proxy=F[0]<line_sep>f.calib=F[1].read()<line_sep>sky=F[2].read_columns(['allsky' 'xinterp' 'yinterp'])<line_sep>#print 'sky', type(sky) # ... supposed to be a recarray, but it's not... f.sky,f.skyxi,f.skyyi=sky.tolist()[0]<line_sep>tab=fits_table(F[3].read())<if_stmt><not>keep<and>tempfn<is><not><none><block_start>os.remove(tempfn)<block_end><block_end><else_stmt><block_start>p=pyfits.open(fn)<line_sep># in nanomaggies f.image=p[0].data<line_sep>f.header=p[0].header<line_sep># converts counts -> nanomaggies f.calib=p[1].data<line_sep># table with val,x,y -- binned; use bilinear interpolation to expand sky=p[2].data<line_sep># table -- asTrans structure tab=fits_table(p[3].data)<line_sep>f.sky=sky.field('allsky')[0]<line_sep>f.skyxi=sky.field('xinterp')[0]<line_sep>f.skyyi=sky.field('yinterp')[0]<block_end>#print 'sky shape', f.sky.shape <if_stmt>len(f.sky.shape)<ne>2<block_start>f.sky=f.sky.reshape((-1 256))<block_end><assert_stmt>(len(tab)<eq>1)<line_sep>tab=tab[0]<line_sep># DR7 has NODE, INCL in radians... f.astrans=AsTrans(run camcol field band node=np.deg2rad(tab.node) incl=np.deg2rad(tab.incl) astrans=tab cut_to_band=<false>)<line_sep><return>f<block_end><block_end>
"""add locales table Revision ID: 2a8981379eba Revises: 438b950c4c9a Create Date: 2018-01-10 16:21:39.595957 """<import_from_stmt>alembic op<import_stmt>sqlalchemy<as>sa<line_sep># revision identifiers, used by Alembic. revision='2a8981379eba'<line_sep>down_revision='<PASSWORD>'<line_sep>branch_labels=<none><line_sep>depends_on=<none><def_stmt>upgrade <block_start>op.execute(""" CREATE TABLE locales ( id UUID PRIMARY KEY DEFAULT uuid_generate_v4(), ident TEXT NOT NULL, language TEXT NOT NULL, country TEXT NOT NULL, pairs hstore, project_id UUID REFERENCES projects (id) ON UPDATE CASCADE ON DELETE CASCADE, UNIQUE (ident, project_id) ); """)<block_end><def_stmt>downgrade <block_start>op.execute(""" DROP TABLE locales; """)<block_end>
<import_from_stmt>.mdaextractors MdaRecordingExtractor MdaSortingExtractor<line_sep>
<class_stmt>PointCloudObject(RhinoObject)# no doc <block_start><def_stmt>DuplicatePointCloudGeometry self<block_start>""" DuplicatePointCloudGeometry(self: PointCloudObject) -> PointCloud """<line_sep><pass><block_end>PointCloudGeometry=property(<lambda>self:object() <lambda>self v:<none> <lambda>self:<none>)<line_sep>"""Get: PointCloudGeometry(self: PointCloudObject) -> PointCloud """<block_end>
<import_stmt>itertools<import_stmt>regex<as>re<import_stmt>numpy<as>np<line_sep># seed is fixed for reproducibility np.random.seed(7)<import_from_stmt>tensorflow set_random_seed<line_sep>set_random_seed(7)<import_from_stmt>unidecode unidecode<import_from_stmt>delft.utilities.Tokenizer tokenizeAndFilterSimple<import_from_stmt>delft.utilities.bert.run_classifier_delft DataProcessor<import_stmt>delft.utilities.bert.tokenization<as>tokenization<import_from_stmt>delft.utilities.bert.run_classifier_delft InputExample<line_sep>special_character_removal=re.compile(r'[^A-Za-z\.\-\?\!\,\#\@\% ]' re.IGNORECASE)<def_stmt>to_vector_single text embeddings maxlen=300<block_start>""" Given a string, tokenize it, then convert it to a sequence of word embedding vectors with the provided embeddings, introducing <PAD> and <UNK> padding token vector when appropriate """<line_sep>tokens=tokenizeAndFilterSimple(clean_text(text))<line_sep>window=tokens[-maxlen:]<line_sep># TBD: use better initializers (uniform, etc.) x=np.zeros((maxlen embeddings.embed_size) )<line_sep># TBD: padding should be left and which vector do we use for padding? # and what about masking padding later for RNN? <for_stmt>i,word enumerate(window)<block_start>x[i :]=embeddings.get_word_vector(word).astype('float32')<block_end><return>x<block_end><def_stmt>to_vector_elmo tokens embeddings maxlen=300 lowercase=<false> num_norm=<false><block_start>""" Given a list of tokens convert it to a sequence of word embedding vectors based on ELMo contextualized embeddings """<line_sep>subtokens=[]<for_stmt>i range(0 len(tokens))<block_start>local_tokens=[]<for_stmt>j range(0 min(len(tokens[i]) maxlen))<block_start><if_stmt>lowercase<block_start>local_tokens.append(lower(tokens[i][j]))<block_end><else_stmt><block_start>local_tokens.append(tokens[i][j])<block_end><block_end>subtokens.append(local_tokens)<block_end><return>embeddings.get_sentence_vector_only_ELMo(subtokens)<line_sep>""" if use_token_dump: return embeddings.get_sentence_vector_ELMo_with_token_dump(tokens) """<block_end><def_stmt>to_vector_bert tokens embeddings maxlen=300 lowercase=<false> num_norm=<false><block_start>""" Given a list of tokens convert it to a sequence of word embedding vectors based on the BERT contextualized embeddings, introducing padding token when appropriate """<line_sep>subtokens=[]<for_stmt>i range(0 len(tokens))<block_start>local_tokens=[]<for_stmt>j range(0 min(len(tokens[i]) maxlen))<block_start><if_stmt>lowercase<block_start>local_tokens.append(lower(tokens[i][j]))<block_end><else_stmt><block_start>local_tokens.append(tokens[i][j])<block_end><block_end>subtokens.append(local_tokens)<block_end>vector=embeddings.get_sentence_vector_only_BERT(subtokens)<line_sep><return>vector<block_end><def_stmt>to_vector_simple_with_elmo tokens embeddings maxlen=300 lowercase=<false> num_norm=<false><block_start>""" Given a list of tokens convert it to a sequence of word embedding vectors based on the concatenation of the provided static embeddings and the ELMo contextualized embeddings, introducing <PAD> and <UNK> padding token vector when appropriate """<line_sep>subtokens=[]<for_stmt>i range(0 len(tokens))<block_start>local_tokens=[]<for_stmt>j range(0 min(len(tokens[i]) maxlen))<block_start><if_stmt>lowercase<block_start>local_tokens.append(lower(tokens[i][j]))<block_end><else_stmt><block_start>local_tokens.append(tokens[i][j])<block_end><block_end><if_stmt>len(tokens[i])<l>maxlen<block_start><for_stmt>i range(0 maxlen-len(tokens[i]))<block_start>local_tokens.append(" ")<block_end><block_end>subtokens.append(local_tokens)<block_end><return>embeddings.get_sentence_vector_with_ELMo(subtokens)<block_end><def_stmt>to_vector_simple_with_bert tokens embeddings maxlen=300 lowercase=<false> num_norm=<false><block_start>""" Given a list of tokens convert it to a sequence of word embedding vectors based on the concatenation of the provided static embeddings and the BERT contextualized embeddings, introducing padding token vector when appropriate """<line_sep>subtokens=[]<for_stmt>i range(0 len(tokens))<block_start>local_tokens=[]<for_stmt>j range(0 min(len(tokens[i]) maxlen))<block_start><if_stmt>lowercase<block_start>local_tokens.append(lower(tokens[i][j]))<block_end><else_stmt><block_start>local_tokens.append(tokens[i][j])<block_end><block_end><if_stmt>len(tokens[i])<l>maxlen<block_start><for_stmt>i range(0 maxlen-len(tokens[i]))<block_start>local_tokens.append(" ")<block_end><block_end>subtokens.append(local_tokens)<block_end><return>embeddings.get_sentence_vector_with_BERT(subtokens)<block_end><def_stmt>clean_text text<block_start>x_ascii=unidecode(text)<line_sep>x_clean=special_character_removal.sub('' x_ascii)<line_sep><return>x_clean<block_end><def_stmt>lower word<block_start><return>word.lower()<block_end><def_stmt>normalize_num word<block_start><return>re.sub(r'[0-90123456789]' r'0' word)<block_end><class_stmt>BERT_classifier_processor(DataProcessor)<block_start>""" BERT data processor for classification """<def_stmt>__init__ self labels=<none> x_train=<none> y_train=<none> x_test=<none> y_test=<none><block_start>self.list_classes=labels<line_sep>self.x_train=x_train<line_sep>self.y_train=y_train<line_sep>self.x_test=x_test<line_sep>self.y_test=y_test<block_end><def_stmt>get_train_examples self x_train=<none> y_train=<none><block_start>"""See base class."""<if_stmt>x_train<is><not><none><block_start>self.x_train=x_train<block_end><if_stmt>y_train<is><not><none><block_start>self.y_train=y_train<block_end>examples,_=self.create_examples(self.x_train self.y_train)<line_sep><return>examples<block_end><def_stmt>get_labels self<block_start>"""See base class."""<line_sep><return>self.list_classes<block_end><def_stmt>get_test_examples self x_test=<none> y_test=<none><block_start>"""See base class."""<if_stmt>x_test<is><not><none><block_start>self.x_test=x_test<block_end><if_stmt>y_test<is><not><none><block_start>self.y_test=y_test<block_end>examples,results=self.create_examples(self.x_test self.y_test)<line_sep><return>examples results<block_end><def_stmt>create_examples self x_s y_s=<none><block_start>examples=[]<line_sep>valid_classes=np.zeros((y_s.shape[0] len(self.list_classes)))<line_sep>accumul=0<for_stmt>(i x) enumerate(x_s)<block_start>y=y_s[i]<line_sep>guid=i<line_sep>text_a=tokenization.convert_to_unicode(x)<line_sep>#the_class = self._rewrite_classes(y, i) ind,=np.where(y<eq>1)<line_sep>the_class=self.list_classes[ind[0]]<if_stmt>the_class<is><none>#print(text_a) <block_start><continue><block_end><if_stmt>the_class<not><in>self.list_classes#the_class = 'other' <block_start><continue><block_end>label=tokenization.convert_to_unicode(the_class)<line_sep>examples.append(InputExample(guid=guid text_a=text_a text_b=<none> label=label))<line_sep>valid_classes[accumul]=y<line_sep>accumul<augadd>1<block_end><return>examples valid_classes<block_end><def_stmt>create_inputs self x_s dummy_label='dummy'<block_start>examples=[]<line_sep># dummy label to avoid breaking the bert base code label=tokenization.convert_to_unicode(dummy_label)<for_stmt>(i x) enumerate(x_s)<block_start>guid=i<line_sep>text_a=tokenization.convert_to_unicode(x)<line_sep>examples.append(InputExample(guid=guid text_a=text_a text_b=<none> label=label))<block_end><return>examples<block_end><block_end>
<import_stmt>time<import_from_stmt>pynng Bus0 Timeout<line_sep>address='tcp://127.0.0.1:13131'<with_stmt>Bus0(listen=address recv_timeout=100)<as>s0 Bus0(dial=address recv_timeout=100)<as>s1 Bus0(dial=address recv_timeout=100)<as>s2# let all connections be established <block_start>time.sleep(0.05)<line_sep>s0.send(b'hello buddies')<line_sep>s1.recv()# prints b'hello buddies' s2.recv()# prints b'hello buddies' s1.send(b'hi s0')<line_sep>print(s0.recv())# prints b'hi s0' # s2 is not directly connected to s1. <try_stmt><block_start>s2.recv()<assert_stmt><false> "this is never reached"<block_end><except_stmt>Timeout<block_start>print('s2 is not connected directly to s1!')<block_end><block_end>
# Copyright (c) 2020 Graphcore Ltd. All rights reserved. <import_stmt>os<import_stmt>gc<import_stmt>pytest<import_stmt>shutil<import_stmt>torch<import_stmt>poptorch<import_stmt>popart<import_from_stmt>poptorch.optim SGD<import_stmt>import_helper<import_from_stmt>train TrainingModelWithLoss<import_stmt>datasets<import_stmt>models<import_from_stmt>utils get_train_accuracy get_test_accuracy run_script<line_sep>@pytest.mark.ipus(1)<def_stmt>test_recomputation_checkpoints <block_start>gc.collect()<line_sep># run the model with and without recomputation <def_stmt>train model recompute<block_start>input_data=torch.ones(1 3 224 224)<line_sep>labels_data=torch.ones(1).long()<line_sep>opts=poptorch.Options()<if_stmt>recompute<block_start>opts._Popart.set("autoRecomputation" int(popart.RecomputationType.Standard))<block_end>opts.outputMode(poptorch.OutputMode.All)<line_sep>opts.randomSeed(0)<line_sep>opts.Training.gradientAccumulation(1)<line_sep>opts.Precision.enableStochasticRounding(<false>)<line_sep>model_with_loss=TrainingModelWithLoss(model)<line_sep>optimizer=SGD(model_with_loss.parameters() lr=0.01 momentum=0. use_combined_accum=<true>)<line_sep>training_model=poptorch.trainingModel(model_with_loss opts optimizer=optimizer)<line_sep>predictions=[]<for_stmt>_ range(3)<block_start>preds,_,_=training_model(input_data labels_data)<line_sep>predictions.append(preds)<block_end>training_model.destroy()<line_sep><return>predictions<block_end><class_stmt>Options()<block_start><def_stmt>__init__ self<block_start>self.model="resnet18"<line_sep>self.precision="16.16"<line_sep>self.norm_type="group"<line_sep>self.norm_eps=1e-5<line_sep>self.norm_num_groups=32<line_sep>self.normalization_location="none"<line_sep>self.pipeline_splits=[]<line_sep>self.eight_bit_io=<false><line_sep>self.num_io_tiles=0<block_end><block_end>args=Options()<line_sep>torch.manual_seed(0)<line_sep>model=models.get_model(args datasets.datasets_info["cifar10"] pretrained=<true>)<line_sep>no_recompute_predictions=train(model <false>)<line_sep>args.recompute_checkpoints=["conv" "norm"]<line_sep>torch.manual_seed(0)<line_sep>model=models.get_model(args datasets.datasets_info["cifar10"] pretrained=<true>)<line_sep>recompute_predictions=train(model <true>)<for_stmt>pred1,pred2 zip(no_recompute_predictions recompute_predictions)<block_start><assert_stmt>torch.allclose(pred1 pred2 atol=1e-04)<block_end><block_end>@pytest.mark.ipus(4)<def_stmt>test_replicas_reduction <block_start>gc.collect()<def_stmt>common_opts <block_start>opts=poptorch.Options()<line_sep>opts.Training.accumulationAndReplicationReductionType(poptorch.ReductionType.Mean)<line_sep>opts.outputMode(poptorch.OutputMode.All)<line_sep>opts.randomSeed(0)<line_sep>opts.Training.gradientAccumulation(1)<line_sep><return>opts<block_end><def_stmt>run_model opts<block_start>input_data=torch.ones(4 1)<line_sep>labels_data=torch.ones(4).long()<line_sep>model=torch.nn.Linear(1 2 bias=<false>)<line_sep>model_with_loss=TrainingModelWithLoss(model 0.1)<line_sep>optimizer=SGD(model_with_loss.parameters() lr=0.1 momentum=0. use_combined_accum=<true>)<line_sep>training_model=poptorch.trainingModel(model_with_loss opts optimizer=optimizer)<for_stmt>_ range(3)<block_start>preds,loss,_=training_model(input_data labels_data)<block_end># return the weights of the model <return>list(model_with_loss.model.named_parameters())[0][1] loss<block_end># Single replica opts=common_opts()<line_sep>opts.replicationFactor(1)<line_sep>single_replica_weights,single_replica_loss=run_model(opts)<line_sep># 4 replica running gc.collect()<line_sep>opts=common_opts()<line_sep>opts.replicationFactor(4)<line_sep>replicated_weights,replicated_loss=run_model(opts)<assert_stmt>torch.allclose(single_replica_weights replicated_weights atol=1e-05)<assert_stmt>torch.allclose(single_replica_loss replicated_loss atol=1e-05)<block_end>@pytest.mark.ipus(1)<def_stmt>test_generated <block_start>gc.collect()<line_sep>run_script("train/train.py" f"--data generated --model resnet18 --epoch 1 --precision 16.16 --validation-mode none --optimizer sgd_combined --lr 0.001 --gradient-accumulation 128 --batch-size 1 --dataloader-worker 4 --seed 0")<block_end>@pytest.mark.ipus(1)@pytest.mark.parametrize("precision" ["16.16" "32.32"])<def_stmt>test_synthetic precision<block_start>gc.collect()<line_sep>run_script("train/train.py" f"--data synthetic --model resnet18 --epoch 1 --precision {precision} --validation-mode none --optimizer sgd_combined --lr 0.001 --gradient-accumulation 64 --batch-size 1 --dataloader-worker 4 --seed 0")<block_end>@pytest.mark.parametrize("label_smoothing" [0.0 1.0 0.1 0.5])<def_stmt>test_loss_function label_smoothing<block_start>torch.manual_seed(0)<line_sep>inp=torch.rand(4 10)<times>10-5# create random input between [-5,5) label=torch.ones(4).long()<line_sep># calculate the ground truth log_pred=torch.nn.functional.log_softmax(inp dim=-1)<line_sep>ground_truth=-torch.mean(torch.sum((label_smoothing/10.0)<times>log_pred dim=1)+(1.0-label_smoothing)<times>log_pred[: 1])<line_sep>model_with_loss=TrainingModelWithLoss(<lambda>x:x label_smoothing=label_smoothing)<line_sep>_,loss,_=model_with_loss(inp label)<assert_stmt>torch.allclose(ground_truth loss atol=1e-05)<block_end>@pytest.mark.ipus(1)<def_stmt>test_mixup <block_start>gc.collect()<line_sep>run_script("train/train.py" f"--mixup-alpha 0.1 --data generated --model resnet18 --epoch 1 --validation-mode none --optimizer sgd_combined --batch-size 3 --dataloader-worker 1 --seed 0")<block_end>@pytest.mark.ipus(1)<def_stmt>test_cutmix <block_start>gc.collect()<line_sep>run_script("train/train.py" f"--cutmix-lambda-low 0.0 --cutmix-lambda-high 1.0 --data generated --model resnet18 --epoch 1 --validation-mode none --optimizer sgd_combined --batch-size 3 --dataloader-worker 1 --seed 0")<block_end><class_stmt>TestSynthetic<block_start>@pytest.mark.ipus(2)@pytest.mark.ipu_version("ipu2")<def_stmt>test_synthetic_mixed_precision self<block_start>gc.collect()<line_sep>run_script("train/train.py" "--data synthetic --model resnet18 --epoch 1 --precision 16.32 --pipeline-splits layer4/0 "<concat>"--validation-mode none --optimizer sgd_combined --lr 0.001 --gradient-accumulation 64 --dataloader-worker 4 --seed 0")<block_end><block_end><class_stmt>TestTrainCIFAR10<block_start>@pytest.mark.ipus(1)<def_stmt>test_single_ipu_validation_groupnorm self<block_start>gc.collect()<line_sep>out=run_script("train/train.py" "--data cifar10 --model resnet18 --epoch 3 --precision 16.16 --optimizer sgd_combined --lr 0.1 --batch-size 2 --gradient-accumulation 32 "<concat>"--norm-type group --norm-num-groups 32 --enable-stochastic-rounding --dataloader-worker 4 --seed 0")<line_sep>acc=get_test_accuracy(out)<assert_stmt>acc<g>15.0<block_end>@pytest.mark.ipus(1)@pytest.mark.ipu_version("ipu2")<def_stmt>test_single_ipu_validation_batchnorm self<block_start>gc.collect()<line_sep>out=run_script("train/train.py" "--data cifar10 --model resnet18 --epoch 2 --precision 16.16 --optimizer sgd_combined --lr 0.1 --gradient-accumulation 8 "<concat>"--norm-type batch --batch-size 16 --enable-stochastic-rounding --dataloader-worker 4 --seed 0")<line_sep>acc=get_test_accuracy(out)<assert_stmt>acc<g>15.0<block_end>@pytest.mark.ipus(2)<def_stmt>test_replicas self<block_start>gc.collect()<line_sep>out=run_script("train/train.py" "--data cifar10 --model resnet18 --epoch 2 --replicas 2 --precision 16.16 --validation-mode none --optimizer sgd_combined --lr 0.1 "<concat>"--gradient-accumulation 32 --enable-stochastic-rounding --dataloader-worker 4 --seed 0")<line_sep>acc=get_train_accuracy(out)<assert_stmt>acc<g>15.0<block_end>@pytest.mark.ipus(2)<def_stmt>test_efficient_net self<block_start>gc.collect()<line_sep>out=run_script("train/train.py" "--data cifar10 --epoch 4 --model efficientnet-b0 --precision 16.32 --validation-mode none --optimizer sgd_combined --lr 0.1 --gradient-accumulation 64 "<concat>"--pipeline-splits blocks/2/1 --norm-type group --norm-num-groups 4 --enable-stochastic-rounding --dataloader-worker 4 --seed 0")<line_sep>acc=get_train_accuracy(out)<assert_stmt>acc<g>15.0<block_end>@pytest.mark.ipus(1)<def_stmt>test_full_precision self<block_start>gc.collect()<line_sep>out=run_script("train/train.py" "--data cifar10 --epoch 2 --model resnet18 --precision 32.32 --optimizer sgd_combined --lr 0.1 --batch-size 1 --gradient-accumulation 64 --dataloader-worker 4 --seed 0")<line_sep>acc=get_train_accuracy(out)<assert_stmt>acc<g>15.0<block_end>@pytest.mark.ipus(2)@pytest.mark.ipu_version("ipu2")<def_stmt>test_mixed_precision self<block_start>gc.collect()<line_sep>out=run_script("train/train.py" "--data cifar10 --epoch 2 --model resnet18 --pipeline-splits layer4/0 --precision 16.32 --optimizer sgd_combined "<concat>"--lr 0.1 --batch-size 1 --gradient-accumulation 64 --validation-mode none --dataloader-worker 4 --seed 0")<line_sep>acc=get_train_accuracy(out)<assert_stmt>acc<g>15.0<block_end>@pytest.mark.ipus(1)<def_stmt>test_single_ipu_mobilenet_v3_small_validation_batchnorm self<block_start>gc.collect()<line_sep>out=run_script("train/train.py" "--data cifar10 --model mobilenet-v3-small --epoch 3 --precision 16.32 --optimizer sgd_combined --lr 0.1 --batch-size 2 --gradient-accumulation 32 "<concat>"--norm-type batch --enable-stochastic-rounding --dataloader-worker 4 --seed 0")<line_sep>acc=get_test_accuracy(out)<assert_stmt>acc<g>15.0<block_end>@pytest.mark.ipus(1)@pytest.mark.ipu_version("ipu2")<def_stmt>test_single_ipu_mobilenet_v3_large_validation_batchnorm self<block_start>gc.collect()<line_sep>out=run_script("train/train.py" "--data cifar10 --model mobilenet-v3-large --epoch 3 --precision 16.32 --optimizer sgd_combined --lr 0.1 --batch-size 2 --gradient-accumulation 32 "<concat>"--norm-type batch --enable-stochastic-rounding --dataloader-worker 4 --seed 0")<line_sep>acc=get_test_accuracy(out)<assert_stmt>acc<g>15.0<block_end>@pytest.mark.ipus(1)@pytest.mark.ipu_version("ipu2")<def_stmt>test_half_resolution_training self<block_start>gc.collect()<line_sep>out=run_script("train/train.py" "--data cifar10 --model resnet18 --epoch 1 --precision 16.32 --optimizer sgd_combined --lr 0.1 --batch-size 2 --gradient-accumulation 32 "<concat>"--norm-type batch --dataloader-worker 4 --half-res-training --fine-tune-epoch 1 --fine-tune-first-trainable-layer layer3 --weight-avg-strategy exponential "<concat>"--weight-avg-exp-decay 0.97 --checkpoint-path test_half_resolution_training --seed 0")<line_sep>acc=get_test_accuracy(out)<assert_stmt>acc<g>15.0<line_sep># remove folder parent_dir=os.path.dirname(os.path.dirname(os.path.abspath(__file__)))<line_sep>shutil.rmtree(os.path.join(parent_dir "test_half_resolution_training"))<block_end><block_end><class_stmt>TestRestoreCheckpoint<block_start>@pytest.mark.ipus(1)<def_stmt>test_restore_train self<block_start>gc.collect()<line_sep># create a model out=run_script("train/train.py" "--data cifar10 --epoch 2 --model resnet18 --precision 16.16 --optimizer sgd_combined --lr 0.1 --batch-size 2 --gradient-accumulation 32 --seed 0 "<concat>"--validation-mode none --norm-type group --norm-num-groups 32 --checkpoint-path restore_test_path_test_restore_train --dataloader-worker 4")<line_sep>saved_train_acc=get_train_accuracy(out)<line_sep># reload the model out=run_script("train/restore.py" "--checkpoint-path restore_test_path_test_restore_train/resnet18_cifar10_1.pt")<line_sep>acc=get_train_accuracy(out)<assert_stmt>acc<g>saved_train_acc-5.0<line_sep># remove folder parent_dir=os.path.dirname(os.path.dirname(os.path.abspath(__file__)))<line_sep>shutil.rmtree(os.path.join(parent_dir "restore_test_path_test_restore_train"))<block_end>@pytest.mark.ipus(1)<def_stmt>test_validation self<block_start>gc.collect()<line_sep># create a model out=run_script("train/train.py" "--data cifar10 --epoch 1 --model resnet18 --precision 16.16 --optimizer sgd_combined --lr 0.1 --batch-size 2 --gradient-accumulation 32 --seed 0 "<concat>"--norm-type group --norm-num-groups 32 --checkpoint-path restore_test_path_test_validation --dataloader-worker 4")<line_sep>saved_test_acc=get_test_accuracy(out)<line_sep># validate the model out=run_script("train/validate.py" "--checkpoint-path restore_test_path_test_validation/resnet18_cifar10_1.pt")<line_sep>acc=get_test_accuracy(out)<line_sep># close enough <assert_stmt>abs(saved_test_acc-acc)<l>0.01<line_sep># remove folder parent_dir=os.path.dirname(os.path.dirname(os.path.abspath(__file__)))<line_sep>shutil.rmtree(os.path.join(parent_dir "restore_test_path_test_validation"))<block_end>@pytest.mark.ipus(1)<def_stmt>test_weight_avg self<block_start>gc.collect()<line_sep>parent_dir=os.path.dirname(os.path.dirname(os.path.abspath(__file__)))<line_sep>out1=run_script("train/train.py" "--data cifar10 --epoch 3 --model resnet18 --precision 16.16 --weight-avg-strategy mean --norm-type group "<concat>"--norm-num-groups 32 --optimizer sgd_combined --lr 0.1 --batch-size 2 --gradient-accumulation 32 --checkpoint-path restore_test_path_weight_avg "<concat>"--weight-avg-N 2 --dataloader-worker 4 --seed 0")<line_sep>os.remove(os.path.join(parent_dir "restore_test_path_weight_avg" "resnet18_cifar10_3_averaged.pt"))<line_sep>_=run_script("train/weight_avg.py" "--checkpoint-path restore_test_path_weight_avg --weight-avg-strategy mean --weight-avg-N 2")<line_sep>out2=run_script("train/validate.py" "--checkpoint-path restore_test_path_weight_avg/resnet18_cifar10_3_averaged.pt")<line_sep>acc1=get_test_accuracy(out1)<line_sep>acc2=get_test_accuracy(out1)<assert_stmt>acc1<g>15<assert_stmt>acc1<eq>acc2<line_sep>shutil.rmtree(os.path.join(parent_dir "restore_test_path_weight_avg"))<block_end>@pytest.mark.ipus(1)<def_stmt>test_mixup_cutmix_validation_weight_avg self# Only make sure that checkpoint loading works with mixup model wrapper. <block_start>gc.collect()<line_sep>parent_dir=os.path.dirname(os.path.dirname(os.path.abspath(__file__)))<line_sep>run_script("train/train.py" f"--mixup-alpha 0.1 --cutmix-lambda-low 0.2 --cutmix-lambda-high 0.8 --data generated --checkpoint-path test_mixup_cutmix_validation_weight_avg --weight-avg-strategy exponential --weight-avg-exp-decay 0.97 --model resnet18 --epoch 2 --validation-mode after --optimizer sgd_combined --batch-size 4 --dataloader-worker 1 --seed 0")<line_sep>shutil.rmtree(os.path.join(parent_dir "test_mixup_cutmix_validation_weight_avg"))<block_end>@pytest.mark.ipus(1)@pytest.mark.ipu_version("ipu2")<def_stmt>test_mixup_cutmix_restore_train self# Only make sure that checkpoint loading works with mixup model wrapper. <block_start>gc.collect()<line_sep>parent_dir=os.path.dirname(os.path.dirname(os.path.abspath(__file__)))<line_sep>run_script("train/train.py" f"--mixup-alpha 0.1 --cutmix-lambda-low 0.5 --cutmix-lambda-high 0.5 --data generated --checkpoint-path test_mixup_cutmix_restore_train --model resnet18 --epoch 2 --validation-mode none --optimizer sgd_combined --batch-size 4 --dataloader-worker 1 --seed 0")<line_sep>run_script("train/restore.py" "--checkpoint-path test_mixup_cutmix_restore_train/resnet18_generated_1.pt")<line_sep>shutil.rmtree(os.path.join(parent_dir "test_mixup_cutmix_restore_train"))<block_end><block_end>
<import_from_future_stmt> division absolute_import print_function<import_from_stmt>.numpydoc setup<line_sep>
''' Given a collection of numbers that might contain duplicates, return all possible unique permutations. For example, [1,1,2] have the following unique permutations: [1,1,2], [1,2,1], and [2,1,1]. '''<class_stmt>Solution(object)<block_start><def_stmt>permuteUnique self nums<block_start>""" :type nums: List[int] :rtype: List[List[int]] """<line_sep>result=[]<line_sep>nums.sort()<line_sep>self.get_permute([] nums result)<line_sep><return>result<block_end><def_stmt>get_permute self current num result<block_start><if_stmt><not>num<block_start>result.append(current+[])<line_sep><return><block_end><for_stmt>i,v enumerate(num)<block_start><if_stmt>i-1<ge>0<and>num[i]<eq>num[i-1]<block_start><continue><block_end>current.append(num[i])<line_sep>self.get_permute(current num[:i]+num[i+1:] result)<line_sep>current.pop()<block_end><block_end><block_end><if_stmt>__name__<eq>"__main__"<block_start><assert_stmt>Solution().permuteUnique([1 2 1])<eq>[[1 1 2] [1 2 1] [2 1 1]]<block_end>
<import_from_stmt>skfda.representation.basis FDataBasis Monomial BSpline Fourier Constant VectorValued Tensor <import_stmt>unittest<import_stmt>numpy<as>np<class_stmt>TestBasisEvaluationFourier(unittest.TestCase)<block_start><def_stmt>test_evaluation_simple_fourier self<block_start>"""Test the evaluation of FDataBasis"""<line_sep>fourier=Fourier(domain_range=(0 2) n_basis=5)<line_sep>coefficients=np.array([[1 2 3 4 5] [6 7 8 9 10]])<line_sep>f=FDataBasis(fourier coefficients)<line_sep>t=np.linspace(0 2 11)<line_sep># Results in R package fda res=np.array([[8.71 9.66 1.84 -4.71 -2.80 2.71 2.45 -3.82 -6.66 -0.30 8.71] [22.24 26.48 10.57 -4.95 -3.58 6.24 5.31 -7.69 -13.32 1.13 22.24]])[<ellipsis> np.newaxis]<line_sep>np.testing.assert_array_almost_equal(f(t).round(2) res)<line_sep>np.testing.assert_array_almost_equal(f.evaluate(t).round(2) res)<block_end><def_stmt>test_evaluation_point_fourier self<block_start>"""Test the evaluation of a single point FDataBasis"""<line_sep>fourier=Fourier(domain_range=(0 1) n_basis=3)<line_sep>coefficients=np.array([[0.00078238 0.48857741 0.63971985] [0.01778079 0.73440271 0.20148638]])<line_sep>f=FDataBasis(fourier coefficients)<line_sep># Test different ways of call f with a point res=np.array([-0.903918107989282 -0.267163981229459]).reshape((2 1 1)).round(4)<line_sep>np.testing.assert_array_almost_equal(f([0.5]).round(4) res)<line_sep>np.testing.assert_array_almost_equal(f((0.5 )).round(4) res)<line_sep>np.testing.assert_array_almost_equal(f(0.5).round(4) res)<line_sep>np.testing.assert_array_almost_equal(f(np.array([0.5])).round(4) res)<line_sep># Problematic case, should be accepted or no? #np.testing.assert_array_almost_equal(f(np.array(0.5)).round(4), res) <block_end><def_stmt>test_evaluation_derivative_fourier self<block_start>"""Test the evaluation of the derivative of a FDataBasis"""<line_sep>fourier=Fourier(domain_range=(0 1) n_basis=3)<line_sep>coefficients=np.array([[0.00078238 0.48857741 0.63971985] [0.01778079 0.73440271 0.20148638]])<line_sep>f=FDataBasis(fourier coefficients)<line_sep>t=np.linspace(0 1 4)<line_sep>res=np.array([4.34138447771721 -7.09352774867064 2.75214327095343 4.34138447771721 6.52573053999253 -4.81336320468984 -1.7123673353027 6.52573053999253]).reshape((2 4 1)).round(3)<line_sep>f_deriv=f.derivative()<line_sep>np.testing.assert_array_almost_equal(f_deriv(t).round(3) res)<block_end><def_stmt>test_evaluation_grid_fourier self<block_start>"""Test the evaluation of FDataBasis with the grid option set to true. Nothing should be change due to the domain dimension is 1, but can accept the """<line_sep>fourier=Fourier(domain_range=(0 1) n_basis=3)<line_sep>coefficients=np.array([[0.00078238 0.48857741 0.63971985] [0.01778079 0.73440271 0.20148638]])<line_sep>f=FDataBasis(fourier coefficients)<line_sep>t=np.linspace(0 1 4)<line_sep>res_test=f(t)<line_sep># Different ways to pass the axes np.testing.assert_array_almost_equal(f(t grid=<true>) res_test)<line_sep>np.testing.assert_array_almost_equal(f((t ) grid=<true>) res_test)<line_sep>np.testing.assert_array_almost_equal(f([t] grid=<true>) res_test)<line_sep>np.testing.assert_array_almost_equal(f(np.atleast_2d(t) grid=<true>) res_test)<line_sep># Number of axis different than the domain dimension (1) <with_stmt>np.testing.assert_raises(ValueError)<block_start>f((t t) grid=<true>)<block_end><block_end><def_stmt>test_evaluation_composed_fourier self<block_start>"""Test the evaluation of FDataBasis the a matrix of times instead of a list of times """<line_sep>fourier=Fourier(domain_range=(0 1) n_basis=3)<line_sep>coefficients=np.array([[0.00078238 0.48857741 0.63971985] [0.01778079 0.73440271 0.20148638]])<line_sep>f=FDataBasis(fourier coefficients)<line_sep>t=np.linspace(0 1 4)<line_sep># Test same result than evaluation standart np.testing.assert_array_almost_equal(f([1]) f([[1] [1]] aligned=<false>))<line_sep>np.testing.assert_array_almost_equal(f(t) f(np.vstack((t t)) aligned=<false>))<line_sep># Different evaluation times t_multiple=[[0 0.5] [0.2 0.7]]<line_sep>np.testing.assert_array_almost_equal(f(t_multiple[0])[0] f(t_multiple aligned=<false>)[0])<line_sep>np.testing.assert_array_almost_equal(f(t_multiple[1])[1] f(t_multiple aligned=<false>)[1])<block_end><def_stmt>test_domain_in_list_fourier self<block_start>"""Test the evaluation of FDataBasis"""<for_stmt>fourier (Fourier(domain_range=[(0 1)] n_basis=3) Fourier(domain_range=((0 1) ) n_basis=3) Fourier(domain_range=np.array((0 1)) n_basis=3) Fourier(domain_range=np.array([(0 1)]) n_basis=3))<block_start>coefficients=np.array([[0.00078238 0.48857741 0.63971985] [0.01778079 0.73440271 0.20148638]])<line_sep>f=FDataBasis(fourier coefficients)<line_sep>t=np.linspace(0 1 4)<line_sep>res=np.array([0.905 0.147 -1.05 0.905 0.303 0.775 -1.024 0.303]).reshape((2 4 1))<line_sep>np.testing.assert_array_almost_equal(f(t).round(3) res)<line_sep>np.testing.assert_array_almost_equal(f.evaluate(t).round(3) res)<block_end><block_end><block_end><class_stmt>TestBasisEvaluationBSpline(unittest.TestCase)<block_start><def_stmt>test_evaluation_simple_bspline self<block_start>"""Test the evaluation of FDataBasis"""<line_sep>bspline=BSpline(domain_range=(0 2) n_basis=5)<line_sep>coefficients=np.array([[1 2 3 4 5] [6 7 8 9 10]])<line_sep>f=FDataBasis(bspline coefficients)<line_sep>t=np.linspace(0 2 11)<line_sep># Results in R package fda res=np.array([[1 1.54 1.99 2.37 2.7 3 3.3 3.63 4.01 4.46 5] [6 6.54 6.99 7.37 7.7 8 8.3 8.63 9.01 9.46 10]])[<ellipsis> np.newaxis]<line_sep>np.testing.assert_array_almost_equal(f(t).round(2) res)<line_sep>np.testing.assert_array_almost_equal(f.evaluate(t).round(2) res)<block_end><def_stmt>test_evaluation_point_bspline self<block_start>"""Test the evaluation of a single point FDataBasis"""<line_sep>bspline=BSpline(domain_range=(0 1) n_basis=5 order=3)<line_sep>coefficients=[[0.00078238 0.48857741 0.63971985 0.23 0.33] [0.01778079 0.73440271 0.20148638 0.54 0.12]]<line_sep>f=FDataBasis(bspline coefficients)<line_sep># Test different ways of call f with a point res=np.array([[0.5696] [0.3104]])[<ellipsis> np.newaxis]<line_sep>np.testing.assert_array_almost_equal(f([0.5]).round(4) res)<line_sep>np.testing.assert_array_almost_equal(f((0.5 )).round(4) res)<line_sep>np.testing.assert_array_almost_equal(f(0.5).round(4) res)<line_sep>np.testing.assert_array_almost_equal(f(np.array([0.5])).round(4) res)<line_sep># Problematic case, should be accepted or no? #np.testing.assert_array_almost_equal(f(np.array(0.5)).round(4), res) <block_end><def_stmt>test_evaluation_derivative_bspline self<block_start>"""Test the evaluation of the derivative of a FDataBasis"""<line_sep>bspline=BSpline(domain_range=(0 1) n_basis=5 order=3)<line_sep>coefficients=[[0.00078238 0.48857741 0.63971985 0.23 0.33] [0.01778079 0.73440271 0.20148638 0.54 0.12]]<line_sep>f=FDataBasis(bspline coefficients)<line_sep>t=np.linspace(0 1 4)<line_sep>f_deriv=f.derivative()<line_sep>np.testing.assert_array_almost_equal(f_deriv(t).round(3) np.array([[2.927 0.453 -1.229 0.6] [4.3 -1.599 1.016 -2.52]])[<ellipsis> np.newaxis])<block_end><def_stmt>test_evaluation_grid_bspline self<block_start>"""Test the evaluation of FDataBasis with the grid option set to true. Nothing should be change due to the domain dimension is 1, but can accept the """<line_sep>bspline=BSpline(domain_range=(0 1) n_basis=5 order=3)<line_sep>coefficients=[[0.00078238 0.48857741 0.63971985 0.23 0.33] [0.01778079 0.73440271 0.20148638 0.54 0.12]]<line_sep>f=FDataBasis(bspline coefficients)<line_sep>t=np.linspace(0 1 4)<line_sep>res_test=f(t)<line_sep># Different ways to pass the axes np.testing.assert_array_almost_equal(f(t grid=<true>) res_test)<line_sep>np.testing.assert_array_almost_equal(f((t ) grid=<true>) res_test)<line_sep>np.testing.assert_array_almost_equal(f([t] grid=<true>) res_test)<line_sep>np.testing.assert_array_almost_equal(f(np.atleast_2d(t) grid=<true>) res_test)<line_sep># Number of axis different than the domain dimension (1) <with_stmt>np.testing.assert_raises(ValueError)<block_start>f((t t) grid=<true>)<block_end><block_end><def_stmt>test_evaluation_composed_bspline self<block_start>"""Test the evaluation of FDataBasis the a matrix of times instead of a list of times """<line_sep>bspline=BSpline(domain_range=(0 1) n_basis=5 order=3)<line_sep>coefficients=[[0.00078238 0.48857741 0.63971985 0.23 0.33] [0.01778079 0.73440271 0.20148638 0.54 0.12]]<line_sep>f=FDataBasis(bspline coefficients)<line_sep>t=np.linspace(0 1 4)<line_sep># Test same result than evaluation standart np.testing.assert_array_almost_equal(f([1]) f([[1] [1]] aligned=<false>))<line_sep>np.testing.assert_array_almost_equal(f(t) f(np.vstack((t t)) aligned=<false>))<line_sep># Different evaluation times t_multiple=[[0 0.5] [0.2 0.7]]<line_sep>np.testing.assert_array_almost_equal(f(t_multiple[0])[0] f(t_multiple aligned=<false>)[0])<line_sep>np.testing.assert_array_almost_equal(f(t_multiple[1])[1] f(t_multiple aligned=<false>)[1])<block_end><def_stmt>test_domain_in_list_bspline self<block_start>"""Test the evaluation of FDataBasis"""<for_stmt>bspline (BSpline(domain_range=[(0 1)] n_basis=5 order=3) BSpline(domain_range=((0 1) ) n_basis=5 order=3) BSpline(domain_range=np.array((0 1)) n_basis=5 order=3) BSpline(domain_range=np.array([(0 1)]) n_basis=5 order=3))<block_start>coefficients=[[0.00078238 0.48857741 0.63971985 0.23 0.33] [0.01778079 0.73440271 0.20148638 0.54 0.12]]<line_sep>f=FDataBasis(bspline coefficients)<line_sep>t=np.linspace(0 1 4)<line_sep>res=np.array([[0.001 0.564 0.435 0.33] [0.018 0.468 0.371 0.12]])[<ellipsis> np.newaxis]<line_sep>np.testing.assert_array_almost_equal(f(t).round(3) res)<line_sep>np.testing.assert_array_almost_equal(f.evaluate(t).round(3) res)<block_end># Check error <with_stmt>np.testing.assert_raises(ValueError)<block_start>BSpline(domain_range=[(0 1) (0 1)])<block_end><block_end><block_end><class_stmt>TestBasisEvaluationMonomial(unittest.TestCase)<block_start><def_stmt>test_evaluation_simple_monomial self<block_start>"""Test the evaluation of FDataBasis"""<line_sep>monomial=Monomial(domain_range=(0 2) n_basis=5)<line_sep>coefficients=np.array([[1 2 3 4 5] [6 7 8 9 10]])<line_sep>f=FDataBasis(monomial coefficients)<line_sep>t=np.linspace(0 2 11)<line_sep># Results in R package fda res=np.array([[1.00 1.56 2.66 4.79 8.62 15.00 25.00 39.86 61.03 90.14 129.00] [6.00 7.81 10.91 16.32 25.42 40.00 62.21 94.59 140.08 201.98 284.00]])[<ellipsis> np.newaxis]<line_sep>np.testing.assert_array_almost_equal(f(t).round(2) res)<line_sep>np.testing.assert_array_almost_equal(f.evaluate(t).round(2) res)<block_end><def_stmt>test_evaluation_point_monomial self<block_start>"""Test the evaluation of a single point FDataBasis"""<line_sep>monomial=Monomial(domain_range=(0 1) n_basis=3)<line_sep>coefficients=[[1 2 3] [0.5 1.4 1.3]]<line_sep>f=FDataBasis(monomial coefficients)<line_sep># Test different ways of call f with a point res=np.array([[2.75] [1.525]])[<ellipsis> np.newaxis]<line_sep>np.testing.assert_array_almost_equal(f([0.5]).round(4) res)<line_sep>np.testing.assert_array_almost_equal(f((0.5 )).round(4) res)<line_sep>np.testing.assert_array_almost_equal(f(0.5).round(4) res)<line_sep>np.testing.assert_array_almost_equal(f(np.array([0.5])).round(4) res)<line_sep># Problematic case, should be accepted or no? #np.testing.assert_array_almost_equal(f(np.array(0.5)).round(4), res) <block_end><def_stmt>test_evaluation_derivative_monomial self<block_start>"""Test the evaluation of the derivative of a FDataBasis"""<line_sep>monomial=Monomial(domain_range=(0 1) n_basis=3)<line_sep>coefficients=[[1 2 3] [0.5 1.4 1.3]]<line_sep>f=FDataBasis(monomial coefficients)<line_sep>t=np.linspace(0 1 4)<line_sep>f_deriv=f.derivative()<line_sep>np.testing.assert_array_almost_equal(f_deriv(t).round(3) np.array([[2. 4. 6. 8.] [1.4 2.267 3.133 4.]])[<ellipsis> np.newaxis])<block_end><def_stmt>test_evaluation_grid_monomial self<block_start>"""Test the evaluation of FDataBasis with the grid option set to true. Nothing should be change due to the domain dimension is 1, but can accept the """<line_sep>monomial=Monomial(domain_range=(0 1) n_basis=3)<line_sep>coefficients=[[1 2 3] [0.5 1.4 1.3]]<line_sep>f=FDataBasis(monomial coefficients)<line_sep>t=np.linspace(0 1 4)<line_sep>res_test=f(t)<line_sep># Different ways to pass the axes np.testing.assert_array_almost_equal(f(t grid=<true>) res_test)<line_sep>np.testing.assert_array_almost_equal(f((t ) grid=<true>) res_test)<line_sep>np.testing.assert_array_almost_equal(f([t] grid=<true>) res_test)<line_sep>np.testing.assert_array_almost_equal(f(np.atleast_2d(t) grid=<true>) res_test)<line_sep># Number of axis different than the domain dimension (1) <with_stmt>np.testing.assert_raises(ValueError)<block_start>f((t t) grid=<true>)<block_end><block_end><def_stmt>test_evaluation_composed_monomial self<block_start>"""Test the evaluation of FDataBasis the a matrix of times instead of a list of times """<line_sep>monomial=Monomial(domain_range=(0 1) n_basis=3)<line_sep>coefficients=[[1 2 3] [0.5 1.4 1.3]]<line_sep>f=FDataBasis(monomial coefficients)<line_sep>t=np.linspace(0 1 4)<line_sep># Test same result than evaluation standart np.testing.assert_array_almost_equal(f([1]) f([[1] [1]] aligned=<false>))<line_sep>np.testing.assert_array_almost_equal(f(t) f(np.vstack((t t)) aligned=<false>))<line_sep># Different evaluation times t_multiple=[[0 0.5] [0.2 0.7]]<line_sep>np.testing.assert_array_almost_equal(f(t_multiple[0])[0] f(t_multiple aligned=<false>)[0])<line_sep>np.testing.assert_array_almost_equal(f(t_multiple[1])[1] f(t_multiple aligned=<false>)[1])<block_end><def_stmt>test_domain_in_list_monomial self<block_start>"""Test the evaluation of FDataBasis"""<for_stmt>monomial (Monomial(domain_range=[(0 1)] n_basis=3) Monomial(domain_range=((0 1) ) n_basis=3) Monomial(domain_range=np.array((0 1)) n_basis=3) Monomial(domain_range=np.array([(0 1)]) n_basis=3))<block_start>coefficients=[[1 2 3] [0.5 1.4 1.3]]<line_sep>f=FDataBasis(monomial coefficients)<line_sep>t=np.linspace(0 1 4)<line_sep>res=np.array([[1. 2. 3.667 6.] [0.5 1.111 2.011 3.2]])[<ellipsis> np.newaxis]<line_sep>np.testing.assert_array_almost_equal(f(t).round(3) res)<line_sep>np.testing.assert_array_almost_equal(f.evaluate(t).round(3) res)<block_end><block_end><block_end><class_stmt>TestBasisEvaluationVectorValued(unittest.TestCase)<block_start><def_stmt>test_vector_valued_constant self<block_start>basis_first=Constant()<line_sep>basis_second=Constant()<line_sep>basis=VectorValued([basis_first basis_second])<line_sep>fd=FDataBasis(basis=basis coefficients=[[1 2] [3 4]])<line_sep>self.assertEqual(fd.dim_codomain 2)<line_sep>res=np.array([[[1 2]] [[3 4]]])<line_sep>np.testing.assert_allclose(fd(0) res)<block_end><def_stmt>test_vector_valued_constant_monomial self<block_start>basis_first=Constant(domain_range=(0 5))<line_sep>basis_second=Monomial(n_basis=3 domain_range=(0 5))<line_sep>basis=VectorValued([basis_first basis_second])<line_sep>fd=FDataBasis(basis=basis coefficients=[[1 2 3 4] [3 4 5 6]])<line_sep>self.assertEqual(fd.dim_codomain 2)<line_sep>np.testing.assert_allclose(fd.domain_range[0] (0 5))<line_sep>res=np.array([[[1 2] [1 9] [1 24]] [[3 4] [3 15] [3 38]]])<line_sep>np.testing.assert_allclose(fd([0 1 2]) res)<block_end><block_end><class_stmt>TestBasisEvaluationTensor(unittest.TestCase)<block_start><def_stmt>test_tensor_monomial_constant self<block_start>basis=Tensor([Monomial(n_basis=2) Constant()])<line_sep>fd=FDataBasis(basis=basis coefficients=[1 1])<line_sep>self.assertEqual(fd.dim_domain 2)<line_sep>self.assertEqual(fd.dim_codomain 1)<line_sep>np.testing.assert_allclose(fd([0. 0.]) [[[1.]]])<line_sep>np.testing.assert_allclose(fd([0.5 0.5]) [[[1.5]]])<line_sep>np.testing.assert_allclose(fd([(0. 0.) (0.5 0.5)]) [[[1.0] [1.5]]])<line_sep>fd_grid=fd.to_grid()<line_sep>fd2=fd_grid.to_basis(basis)<line_sep>np.testing.assert_allclose(fd.coefficients fd2.coefficients)<block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>print()<line_sep>unittest.main()<block_end>
# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Derived from the SimpleSQL Parser example in pyparsing, retrofitted to just handle the # where clause predicates # https://github.com/pyparsing/pyparsing/blob/master/examples/simpleSQL.py <import_stmt>logging<import_from_stmt>pyparsing alphanums alphas CaselessKeyword delimitedList Group infixNotation oneOf opAssoc pyparsing_common<as>ppc quotedString Word <line_sep>_logger=logging.getLogger(__name__)<line_sep>AND,OR,IN,IS,NOT,NULL,BETWEEN=map(CaselessKeyword "and or in is not null between".split())<line_sep>NOT_NULL=NOT+NULL<line_sep>ident=Word(alphas alphanums+"_$").setName("identifier")<line_sep>columnName=delimitedList(ident "." combine=<true>).setName("column name")<line_sep>binop=oneOf("= == != < > >= <= eq ne lt le gt ge <>" caseless=<false>)<line_sep>realNum=ppc.real()<line_sep>intNum=ppc.signed_integer()<line_sep>columnRval=(realNum|intNum|quotedString|columnName)<line_sep># need to add support for alg expressions whereCondition=Group((columnName+binop+columnRval)|(columnName+IN+Group("("+delimitedList(columnRval)+")"))|(columnName+IS+(NULL|NOT_NULL))|(columnName+BETWEEN+columnRval+AND+columnRval))<line_sep>whereExpression=infixNotation(Group(whereCondition|NOT+whereCondition|NOT+Group('('+whereCondition+')')|NOT+columnName) [(NOT 1 opAssoc.LEFT) (AND 2 opAssoc.LEFT) (OR 2 opAssoc.LEFT) (IS 2 opAssoc.LEFT)] )<line_sep>op_map={"=":"eq" "==":"eq" "eq":"eq" ">":"gt" "gt":"gt" ">=":"gte" "gte":"gte" "<":"lt" "lt":"lt" "<=":"lte" "lte":"lte" "!":"not" "not":"not" "!=":"neq" "<>":"neq" "neq":"neq" "||":"or" "or":"or" "&&":"and" "and":"and" "in":"in" "between":"between" "is":"is"}<def_stmt>get_expr_tree tokens<block_start><if_stmt>isinstance(tokens (str int))<block_start><return>tokens<block_end><if_stmt>len(tokens)<g>1<block_start><if_stmt>(tokens[0]<eq>"not")<block_start><return>{"not":get_expr_tree(tokens[1])}<block_end><if_stmt>(tokens[0]<eq>"("<and>tokens[-1]<eq>")")<block_start><return>get_expr_tree(tokens[1:-1])<block_end><block_end><else_stmt><block_start><return>get_expr_tree(tokens[0])<block_end>op=op_map[tokens[1]]<if_stmt>op<eq>"in"<block_start><return>{'in':[get_expr_tree(tokens[0]) [token<for>token tokens[2][1:-1]]]}<block_end><elif_stmt>op<eq>"between"<block_start><return>{'and':[{"gte":[get_expr_tree(tokens[0]) tokens[2]]} {"lte":[get_expr_tree(tokens[0]) tokens[4]]}]}<block_end><elif_stmt>op<eq>"is"<block_start><if_stmt>tokens[2]<eq>'null'<block_start><return>{"missing":tokens[0]}<block_end><else_stmt><block_start><return>{"exists":tokens[0]}<block_end><block_end><if_stmt>len(tokens)<g>3<block_start>binary_tuples=get_expr_tree(tokens[2:])<block_end><else_stmt><block_start>binary_tuples=get_expr_tree(tokens[2])<block_end><return>{op:[get_expr_tree(tokens[0]) binary_tuples]}<block_end><def_stmt>get_expr node expr_map<block_start><if_stmt>isinstance(node dict)<block_start><for_stmt>i node.keys()<block_start>op=i<block_end><if_stmt>op<eq>"literal"<block_start><return>node["literal"]<block_end>mapped_op=expr_map.get(op expr_map)<if_stmt>len(mapped_op)<eq>1<block_start>mapped_op=mapped_op[0]<block_end><if_stmt>mapped_op<is><none><block_start><raise>RuntimeError("no mapping for op: %s"%op)<block_end><if_stmt>op<in>("not" "exists" "missing")<block_start><return>mapped_op(get_expr(node[op] expr_map))<block_end><return>mapped_op(*get_expr(node[op] expr_map))<block_end><elif_stmt>isinstance(node (list tuple))<block_start><return>(get_expr(item expr_map)<for>item node)<block_end><elif_stmt>isinstance(node (str int float))<block_start><return>node<block_end><else_stmt><block_start><raise>RuntimeError("unknown node type"%node)<block_end><block_end><def_stmt>parse_expr_string predicate_string expr_map<block_start><import_from_stmt>pyparsing ParseException<try_stmt><block_start>expr=whereExpression.parseString(predicate_string parseAll=<true>)<line_sep>expr=get_expr_tree(expr)<line_sep><return>get_expr(expr expr_map)<block_end><except_stmt>ParseException<as>pe<block_start>_logger.error("Error parsing string expression into iceberg expression: %s"%str(pe))<line_sep><raise><block_end><block_end>
<import_stmt>unittest<import_stmt>logging<import_stmt>sys<import_stmt>numpy<as>np<import_stmt>nltk<line_sep># import os # os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID" # os.environ["CUDA_VISIBLE_DEVICES"] = "" <import_from_stmt>rnnmorph.predictor RNNMorphPredictor<import_from_stmt>rnnmorph.tag_genres tag_ru_files tag_en_files<class_stmt>TestLSTMMorph(unittest.TestCase)<block_start>@classmethod<def_stmt>setUpClass cls<block_start>logging.basicConfig(stream=sys.stdout level=logging.DEBUG)<line_sep>nltk.download("wordnet")<line_sep>nltk.download('averaged_perceptron_tagger')<line_sep>nltk.download('universal_tagset')<line_sep>cls.en_predictor=RNNMorphPredictor(language="en")<line_sep>cls.ru_predictor=RNNMorphPredictor(language="ru")<block_end><def_stmt>__assert_parse self parse pos normal_form tag<block_start>self.assertEqual(parse.pos pos)<line_sep>self.assertEqual(parse.normal_form normal_form)<line_sep>self.assertEqual(parse.tag tag)<block_end><def_stmt>test_ru_sentence_analysis1 self<block_start>forms=self.ru_predictor.predict(["косил" "косой" "косой" "косой"])<line_sep>self.__assert_parse(forms[0] 'VERB' 'косить' 'Gender=Masc|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Act')<line_sep>self.assertIn(1 forms[0].vector)<block_end><def_stmt>test_empty_sentence self<block_start>forms=self.ru_predictor.predict([])<line_sep>self.assertEqual(forms [])<block_end><def_stmt>test_ru_sentence_analysis2 self<block_start>forms=self.ru_predictor.predict(["мама" "мыла" "раму"])<line_sep>self.__assert_parse(forms[0] 'NOUN' 'мама' 'Case=Nom|Gender=Fem|Number=Sing')<line_sep>self.__assert_parse(forms[1] 'VERB' 'мыть' 'Gender=Fem|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Act')<line_sep>self.__assert_parse(forms[2] 'NOUN' 'рама' 'Case=Acc|Gender=Fem|Number=Sing')<block_end><def_stmt>test_ru_sentences_analysis1 self<block_start>forms=self.ru_predictor.predict_sentences([["косил" "косой" "косой" "косой"] ["мама" "мыла" "раму"]])<line_sep>self.__assert_parse(forms[0][0] 'VERB' 'косить' 'Gender=Masc|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Act')<line_sep>self.__assert_parse(forms[1][0] 'NOUN' 'мама' 'Case=Nom|Gender=Fem|Number=Sing')<line_sep>self.__assert_parse(forms[1][1] 'VERB' 'мыть' 'Gender=Fem|Mood=Ind|Number=Sing|Tense=Past|VerbForm=Fin|Voice=Act')<line_sep>self.__assert_parse(forms[1][2] 'NOUN' 'рама' 'Case=Acc|Gender=Fem|Number=Sing')<block_end><def_stmt>test_empty_sentences self<block_start>forms=self.ru_predictor.predict_sentences([[]])<line_sep>self.assertEqual(forms [[]])<block_end><def_stmt>test_ru_one_empty_sentence_in_sentences self<block_start>forms=self.ru_predictor.predict_sentences([["косил" "косой" "косой" "косой"] []])<line_sep>self.assertEqual(forms[1] [])<line_sep>self.assertNotEqual(forms[0] [])<block_end><def_stmt>test_ru_proba self<block_start>forms=self.ru_predictor.predict(["косил" "косой" "косой" "косой"] include_all_forms=<true>)<line_sep>self.assertEqual(len(forms[0].possible_forms) 252)<line_sep>indices=np.array([form.score<for>form forms[2].possible_forms]).argsort()[-5:][::-1]<line_sep>variants=[forms[2].possible_forms[i].tag<for>i indices]<line_sep>self.assertIn('Case=Nom|Degree=Pos|Gender=Masc|Number=Sing' variants)<block_end><def_stmt>test_ru_genres_accuracy self<block_start>quality=tag_ru_files(self.ru_predictor)<line_sep>self.assertGreater(quality['Lenta'].tag_accuracy 95)<line_sep>self.assertGreater(quality['Lenta'].sentence_accuracy 70)<line_sep>self.assertGreater(quality['VK'].tag_accuracy 93)<line_sep>self.assertGreater(quality['VK'].sentence_accuracy 65)<line_sep>self.assertGreater(quality['JZ'].tag_accuracy 94)<line_sep>self.assertGreater(quality['JZ'].sentence_accuracy 70)<line_sep>print("Точность по тегам по всем разделам: %.2f%%"%(quality['All']['tag_accuracy']<times>100))<line_sep>print("Точность по PoS тегам по всем разделам: %.2f%%"%(quality['All']['pos_accuracy']<times>100))<line_sep>print("Точность по предложениям по всем разделам: %.2f%%"%(quality['All']['sentence_accuracy']<times>100))<line_sep>self.assertGreater(quality['All']['tag_accuracy'] 0.95)<block_end><def_stmt>test_en_accuracy self<block_start>self.assertGreater(tag_en_files(self.en_predictor).tag_accuracy 85)<block_end><block_end>
<import_from_stmt>lib.test.vot20.stark_vot20lt run_vot_exp<import_stmt>os<line_sep>os.environ['CUDA_VISIBLE_DEVICES']='6'<line_sep>run_vot_exp('stark_st' 'baseline' vis=<false>)<line_sep>
# Copyright 2021 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests for jax_cfd.subgrid_models."""<import_stmt>functools<import_from_stmt>absl.testing absltest<import_from_stmt>absl.testing parameterized<import_stmt>jax.numpy<as>jnp<import_from_stmt>jax_cfd.base advection<import_from_stmt>jax_cfd.base boundaries<import_from_stmt>jax_cfd.base finite_differences<as>fd<import_from_stmt>jax_cfd.base funcutils<import_from_stmt>jax_cfd.base grids<import_from_stmt>jax_cfd.base pressure<import_from_stmt>jax_cfd.base subgrid_models<import_from_stmt>jax_cfd.base test_util<import_stmt>numpy<as>np<def_stmt>periodic_grid_variable data offset grid<block_start><return>grids.GridVariable(array=grids.GridArray(data offset grid) bc=boundaries.periodic_boundary_conditions(grid.ndim))<block_end><def_stmt>zero_velocity_field grid:grids.Grid<arrow>grids.GridVariableVector<block_start>"""Returns an all-zero periodic velocity fields."""<line_sep><return>tuple(periodic_grid_variable(jnp.zeros(grid.shape) o grid)<for>o grid.cell_faces)<block_end><def_stmt>sinusoidal_velocity_field grid:grids.Grid<arrow>grids.GridVariableVector<block_start>"""Returns a divergence-free velocity flow on `grid`."""<line_sep>mesh_size=jnp.array(grid.shape)<times>jnp.array(grid.step)<line_sep>vs=tuple(jnp.sin(2.<times>np.pi<times>g/s)<for>g,s zip(grid.mesh() mesh_size))<line_sep><return>tuple(periodic_grid_variable(v o grid)<for>v,o zip(vs[1:]+vs[:1] grid.cell_faces))<block_end><def_stmt>gaussian_force_field grid:grids.Grid<arrow>grids.GridArrayVector<block_start>"""Returns a 'Gaussian-shaped' force field in the 'x' direction."""<line_sep>mesh=grid.mesh()<line_sep>mesh_size=jnp.array(grid.shape)<times>jnp.array(grid.step)<line_sep>offsets=grid.cell_faces<line_sep>v=[grids.GridArray(jnp.exp(-sum([jnp.square(x/s-.5)<for>x,s zip(mesh mesh_size)])<times>100.) offsets[0] grid)]<for_stmt>j range(1 grid.ndim)<block_start>v.append(grids.GridArray(jnp.zeros(grid.shape) offsets[j] grid))<block_end><return>tuple(v)<block_end><def_stmt>gaussian_forcing v:grids.GridVariableVector<arrow>grids.GridArrayVector<block_start>"""Returns Gaussian field forcing."""<line_sep>grid=grids.consistent_grid(*v)<line_sep><return>gaussian_force_field(grid)<block_end><def_stmt>momentum v:grids.GridVariableVector density:float<block_start>"""Returns the momentum due to velocity field `v`."""<line_sep>grid=grids.consistent_grid(*v)<line_sep><return>jnp.array([u.data<for>u v]).sum()<times>density<times>jnp.array(grid.step).prod()<block_end><def_stmt>_convect_upwind v:grids.GridVariableVector<arrow>grids.GridArrayVector<block_start><return>tuple(advection.advect_upwind(u v)<for>u v)<block_end><class_stmt>SubgridModelsTest(test_util.TestCase)<block_start><def_stmt>test_smagorinsky_viscosity self<block_start>grid=grids.Grid((3 3))<line_sep>v=(periodic_grid_variable(jnp.zeros(grid.shape) (1 0.5) grid) periodic_grid_variable(jnp.zeros(grid.shape) (0.5 1) grid))<line_sep>c00=grids.GridArray(jnp.zeros(grid.shape) offset=(0 0) grid=grid)<line_sep>c01=grids.GridArray(jnp.zeros(grid.shape) offset=(0 1) grid=grid)<line_sep>c10=grids.GridArray(jnp.zeros(grid.shape) offset=(1 0) grid=grid)<line_sep>c11=grids.GridArray(jnp.zeros(grid.shape) offset=(1 1) grid=grid)<line_sep>s_ij=grids.GridArrayTensor(np.array([[c00 c01] [c10 c11]]))<line_sep>viscosity=subgrid_models.smagorinsky_viscosity(s_ij=s_ij v=v dt=0.1 cs=0.2)<line_sep>self.assertIsInstance(viscosity grids.GridArrayTensor)<line_sep>self.assertEqual(viscosity.shape (2 2))<line_sep>self.assertAllClose(viscosity[0 0] c00)<line_sep>self.assertAllClose(viscosity[0 1] c01)<line_sep>self.assertAllClose(viscosity[1 0] c10)<line_sep>self.assertAllClose(viscosity[1 1] c11)<block_end><def_stmt>test_evm_model self<block_start>grid=grids.Grid((3 3))<line_sep>v=(periodic_grid_variable(jnp.zeros(grid.shape) (1 0.5) grid) periodic_grid_variable(jnp.zeros(grid.shape) (0.5 1) grid))<line_sep>viscosity_fn=functools.partial(subgrid_models.smagorinsky_viscosity dt=1.0 cs=0.2)<line_sep>acceleration=subgrid_models.evm_model(v viscosity_fn)<line_sep>self.assertIsInstance(acceleration tuple)<line_sep>self.assertLen(acceleration 2)<line_sep>self.assertAllClose(acceleration[0] v[0].array)<line_sep>self.assertAllClose(acceleration[1] v[1].array)<block_end>@parameterized.named_parameters(dict(testcase_name='sinusoidal_velocity_base' cs=0.0 velocity=sinusoidal_velocity_field forcing=<none> shape=(100 100) step=(1. 1.) density=1. viscosity=1e-4 convect=advection.convect_linear pressure_solve=pressure.solve_cg dt=1e-3 time_steps=1000 divergence_atol=1e-3 momentum_atol=1e-3) dict(testcase_name='gaussian_force_upwind_with_subgrid_model' cs=0.12 velocity=zero_velocity_field forcing=gaussian_forcing shape=(40 40 40) step=(1. 1. 1.) density=1. viscosity=0 convect=_convect_upwind pressure_solve=pressure.solve_cg dt=1e-3 time_steps=100 divergence_atol=1e-4 momentum_atol=1e-4) dict(testcase_name='sinusoidal_velocity_with_subgrid_model' cs=0.12 velocity=sinusoidal_velocity_field forcing=<none> shape=(100 100) step=(1. 1.) density=1. viscosity=1e-4 convect=advection.convect_linear pressure_solve=pressure.solve_fast_diag dt=1e-3 time_steps=1000 divergence_atol=1e-3 momentum_atol=1e-3) )<def_stmt>test_divergence_and_momentum self cs velocity forcing shape step density viscosity convect pressure_solve dt time_steps divergence_atol momentum_atol <block_start>grid=grids.Grid(shape step)<line_sep>kwargs=dict(density=density viscosity=viscosity cs=cs dt=dt grid=grid convect=convect pressure_solve=pressure_solve forcing=forcing)<line_sep># Explicit and implicit navier-stokes solvers: explicit_eq=subgrid_models.explicit_smagorinsky_navier_stokes(**kwargs)<line_sep>implicit_eq=subgrid_models.implicit_smagorinsky_navier_stokes(**kwargs)<line_sep>v_initial=velocity(grid)<line_sep>v_final=funcutils.repeated(explicit_eq time_steps)(v_initial)<line_sep># TODO(dkochkov) consider adding more thorough tests for these models. <with_stmt>self.subTest('divergence free')<block_start>divergence=fd.divergence(v_final)<line_sep>self.assertLess(jnp.max(divergence.data) divergence_atol)<block_end><with_stmt>self.subTest('conservation of momentum')<block_start>initial_momentum=momentum(v_initial density)<line_sep>final_momentum=momentum(v_final density)<if_stmt>forcing<is><not><none><block_start>expected_change=(jnp.array([f.data<for>f forcing(v_initial)]).sum()<times>jnp.array(grid.step).prod()<times>dt<times>time_steps)<block_end><else_stmt><block_start>expected_change=0<block_end>expected_momentum=initial_momentum+expected_change<line_sep>self.assertAllClose(expected_momentum final_momentum atol=momentum_atol)<block_end><with_stmt>self.subTest('explicit-implicit consistency')<block_start>v_final_2=funcutils.repeated(implicit_eq time_steps)(v_initial)<for_stmt>axis range(grid.ndim)<block_start>self.assertAllClose(v_final[axis] v_final_2[axis] atol=1e-4 err_msg=f'axis={axis}')<block_end><block_end><block_end><block_end><if_stmt>__name__<eq>'__main__'<block_start>absltest.main()<block_end>
"""This problem was asked by Two Sigma. You’re tracking stock price at a given instance of time. Implement an API with the following functions: add(), update(), remove(), which adds/updates/removes a datapoint for the stock price you are tracking. The data is given as (timestamp, price), where timestamp is specified in unix epoch time. Also, provide max(), min(), and average() functions that give the max/min/average of all values seen thus far. """<line_sep>
<import_from_stmt>django.db models<class_stmt>SampleKeyword(models.Model)<block_start>"""An ontology term associated with a sample in our database"""<line_sep>name=models.ForeignKey("OntologyTerm" on_delete=models.CASCADE related_name="+")<line_sep>sample=models.ForeignKey("Sample" on_delete=models.CASCADE related_name="keywords")<line_sep>source=models.ForeignKey("Contribution" on_delete=models.CASCADE)<block_end>
# Copyright 2018 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Functions used to provision Fuchsia boot images."""<import_stmt>common<import_stmt>logging<import_stmt>os<import_stmt>subprocess<import_stmt>tempfile<import_stmt>time<import_stmt>uuid<line_sep>_SSH_CONFIG_TEMPLATE=""" Host * CheckHostIP no StrictHostKeyChecking no ForwardAgent no ForwardX11 no UserKnownHostsFile {known_hosts} User fuchsia IdentitiesOnly yes IdentityFile {identity} ServerAliveInterval 2 ServerAliveCountMax 5 ControlMaster auto ControlPersist 1m ControlPath /tmp/ssh-%r@%h:%p ConnectTimeout 5 """<line_sep>FVM_TYPE_QCOW='qcow'<line_sep>FVM_TYPE_SPARSE='sparse'<line_sep># Specifies boot files intended for use by an emulator. TARGET_TYPE_QEMU='qemu'<line_sep># Specifies boot files intended for use by anything (incl. physical devices). TARGET_TYPE_GENERIC='generic'<def_stmt>_GetPubKeyPath output_dir<block_start>"""Returns a path to the generated SSH public key."""<line_sep><return>os.path.join(output_dir 'id_ed25519.pub')<block_end><def_stmt>ProvisionSSH output_dir<block_start>"""Generates a keypair and config file for SSH."""<line_sep>host_key_path=os.path.join(output_dir 'ssh_key')<line_sep>host_pubkey_path=host_key_path+'.pub'<line_sep>id_key_path=os.path.join(output_dir 'id_ed25519')<line_sep>id_pubkey_path=_GetPubKeyPath(output_dir)<line_sep>known_hosts_path=os.path.join(output_dir 'known_hosts')<line_sep>ssh_config_path=os.path.join(output_dir 'ssh_config')<line_sep>logging.debug('Generating SSH credentials.')<if_stmt><not>os.path.isfile(host_key_path)<block_start>subprocess.check_call(['ssh-keygen' '-t' 'ed25519' '-h' '-f' host_key_path '-P' '' '-N' ''] stdout=open(os.devnull))<block_end><if_stmt><not>os.path.isfile(id_key_path)<block_start>subprocess.check_call(['ssh-keygen' '-t' 'ed25519' '-f' id_key_path '-P' '' '-N' ''] stdout=open(os.devnull))<block_end><with_stmt>open(ssh_config_path "w")<as>ssh_config<block_start>ssh_config.write(_SSH_CONFIG_TEMPLATE.format(identity=id_key_path known_hosts=known_hosts_path))<block_end><if_stmt>os.path.exists(known_hosts_path)<block_start>os.remove(known_hosts_path)<block_end><block_end><def_stmt>GetTargetFile filename target_arch target_type<block_start>"""Computes a path to |filename| in the Fuchsia boot image directory specific to |target_type| and |target_arch|."""<assert_stmt>target_type<eq>TARGET_TYPE_QEMU<or>target_type<eq>TARGET_TYPE_GENERIC<line_sep><return>os.path.join(common.IMAGES_ROOT target_arch target_type filename)<block_end><def_stmt>GetSSHConfigPath output_dir<block_start><return>output_dir+'/ssh_config'<block_end><def_stmt>GetBootImage output_dir target_arch target_type<block_start>""""Gets a path to the Zircon boot image, with the SSH client public key added."""<line_sep>ProvisionSSH(output_dir)<line_sep>pubkey_path=_GetPubKeyPath(output_dir)<line_sep>zbi_tool=common.GetHostToolPathFromPlatform('zbi')<line_sep>image_source_path=GetTargetFile('zircon-a.zbi' target_arch target_type)<line_sep>image_dest_path=os.path.join(output_dir 'gen' 'fuchsia-with-keys.zbi')<line_sep>cmd=[zbi_tool '-o' image_dest_path image_source_path '-e' 'data/ssh/authorized_keys='+pubkey_path]<line_sep>subprocess.check_call(cmd)<line_sep><return>image_dest_path<block_end><def_stmt>GetKernelArgs output_dir<block_start><return>['devmgr.epoch=%d'%time.time()]<block_end><def_stmt>AssertBootImagesExist arch platform<block_start><assert_stmt>os.path.exists(GetTargetFile('zircon-a.zbi' arch platform)) 'This checkout is missing the files necessary for\n'<concat>'booting this configuration of Fuchsia.\n'<concat>'To check out the files, add this entry to the "custom_vars"\n'<concat>'section of your .gclient file:\n\n'<concat>' "checkout_fuchsia_boot_images": "%s.%s"\n\n'%(platform arch)<block_end>
""" A CPython inspired RPython parser. """<import_from_stmt>rpython.rlib.objectmodel not_rpython<class_stmt>Grammar(object)<block_start>""" Base Grammar object. Pass this to ParserGenerator.build_grammar to fill it with useful values for the Parser. """<def_stmt>__init__ self<block_start>self.symbol_ids={}<line_sep>self.symbol_names={}<line_sep>self.symbol_to_label={}<line_sep>self.keyword_ids={}<line_sep>self.token_to_error_string={}<line_sep>self.dfas=[]<line_sep>self.labels=[0]<line_sep>self.token_ids={}<line_sep>self.start=-1<block_end><def_stmt>shared_copy self<block_start>new=self.__class__()<line_sep>new.symbol_ids=self.symbol_ids<line_sep>new.symbols_names=self.symbol_names<line_sep>new.keyword_ids=self.keyword_ids<line_sep>new.token_to_error_string=self.token_to_error_string<line_sep>new.dfas=self.dfas<line_sep>new.labels=self.labels<line_sep>new.token_ids=self.token_ids<line_sep><return>new<block_end><def_stmt>classify self token<block_start>"""Find the label for a token."""<if_stmt>token.token_type<eq>self.KEYWORD_TOKEN<block_start>label_index=self.keyword_ids.get(token.value -1)<if_stmt>label_index<ne>-1<block_start><return>label_index<block_end><block_end>label_index=self.token_ids.get(token.token_type -1)<if_stmt>label_index<eq>-1<block_start><raise>ParseError("invalid token" token)<block_end><return>label_index<block_end><def_stmt>_freeze_ self# Remove some attributes not used in parsing. <block_start><try_stmt><block_start><del_stmt>self.symbol_to_label<del_stmt>self.symbol_names<del_stmt>self.symbol_ids<block_end><except_stmt>AttributeError<block_start><pass><block_end><return><true><block_end><block_end><class_stmt>DFA(object)<block_start><def_stmt>__init__ self grammar symbol_id states first<block_start>self.grammar=grammar<line_sep>self.symbol_id=symbol_id<line_sep>self.states=states<line_sep>self.first=self._first_to_string(first)<block_end><def_stmt>could_match_token self label_index<block_start>pos=label_index<rshift>3<line_sep>bit=1<lshift>(label_index&0b111)<line_sep><return>bool(ord(self.first[label_index<rshift>3])&bit)<block_end>@staticmethod@not_rpython<def_stmt>_first_to_string first<block_start>l=sorted(first.keys())<line_sep>b=bytearray(32)<for_stmt>label_index l<block_start>pos=label_index<rshift>3<line_sep>bit=1<lshift>(label_index&0b111)<line_sep>b[pos]<augor>bit<block_end><return>str(b)<block_end><block_end><class_stmt>Token(object)<block_start><def_stmt>__init__ self token_type value lineno column line<block_start>self.token_type=token_type<line_sep>self.value=value<line_sep>self.lineno=lineno<line_sep># 0-based offset self.column=column<line_sep>self.line=line<block_end><def_stmt>__repr__ self<block_start><return>"Token(%s, %s)"%(self.token_type self.value)<block_end><def_stmt>__eq__ self other# for tests <block_start><return>(self.token_type<eq>other.token_type<and>self.value<eq>other.value<and>self.lineno<eq>other.lineno<and>self.column<eq>other.column<and>self.line<eq>other.line)<block_end><def_stmt>__ne__ self other<block_start><return><not>self<eq>other<block_end><block_end><class_stmt>Node(object)<block_start>__slots__=("grammar" "type")<def_stmt>__init__ self grammar type<block_start><assert_stmt>grammar<is><none><or>isinstance(grammar Grammar)<assert_stmt>isinstance(type int)<line_sep>self.grammar=grammar<line_sep>self.type=type<block_end><def_stmt>__eq__ self other<block_start><raise>NotImplementedError("abstract base class")<block_end><def_stmt>__ne__ self other<block_start><return><not>self<eq>other<block_end><def_stmt>get_value self<block_start><return><none><block_end><def_stmt>get_child self i<block_start><raise>NotImplementedError("abstract base class")<block_end><def_stmt>num_children self<block_start><return>0<block_end><def_stmt>append_child self child<block_start><raise>NotImplementedError("abstract base class")<block_end><def_stmt>get_lineno self<block_start><raise>NotImplementedError("abstract base class")<block_end><def_stmt>get_column self<block_start><raise>NotImplementedError("abstract base class")<block_end><def_stmt>get_line self<block_start><raise>NotImplementedError("abstract base class")<block_end><def_stmt>view self<block_start><import_from_stmt>dotviewer graphclient<import_stmt>pytest<line_sep>r=["digraph G {"]<line_sep>self._dot(r)<line_sep>r.append("}")<line_sep>p=pytest.ensuretemp("pyparser").join("temp.dot")<line_sep>p.write("\n".join(r))<line_sep>graphclient.display_dot_file(str(p))<block_end><def_stmt>_dot self result<block_start><raise>NotImplementedError("abstract base class")<block_end><block_end><class_stmt>Terminal(Node)<block_start>__slots__=("value" "lineno" "column" "line")<def_stmt>__init__ self grammar type value lineno column line=<none><block_start>Node.__init__(self grammar type)<line_sep>self.value=value<line_sep>self.lineno=lineno<line_sep>self.column=column<line_sep>self.line=line<block_end>@staticmethod<def_stmt>fromtoken grammar token<block_start><return>Terminal(grammar token.token_type token.value token.lineno token.column token.line)<block_end><def_stmt>__repr__ self<block_start><return>"Terminal(type=%s, value=%r)"%(self.type self.value)<block_end><def_stmt>__eq__ self other# For tests. <block_start><return>(type(self)<eq>type(other)<and>self.type<eq>other.type<and>self.value<eq>other.value)<block_end><def_stmt>get_value self<block_start><return>self.value<block_end><def_stmt>get_lineno self<block_start><return>self.lineno<block_end><def_stmt>get_column self<block_start><return>self.column<block_end><def_stmt>get_line self<block_start><return>self.line<block_end><def_stmt>_dot self result<block_start>result.append('%s [label="%r", shape=box];'%(id(self) self.value))<block_end><block_end><class_stmt>AbstractNonterminal(Node)<block_start>__slots__=()<def_stmt>get_lineno self<block_start><return>self.get_child(0).get_lineno()<block_end><def_stmt>get_column self<block_start><return>self.get_child(0).get_column()<block_end><def_stmt>get_line self<block_start><return>self.get_child(0).get_line()<block_end><def_stmt>__eq__ self other# For tests. # grumble, annoying <block_start><if_stmt><not>isinstance(other AbstractNonterminal)<block_start><return><false><block_end><if_stmt>self.type<ne>other.type<block_start><return><false><block_end><if_stmt>self.num_children()<ne>other.num_children()<block_start><return><false><block_end><for_stmt>i range(self.num_children())<block_start><if_stmt>self.get_child(i)<ne>other.get_child(i)<block_start><return><false><block_end><block_end><return><true><block_end><def_stmt>_dot self result<block_start><for_stmt>i range(self.num_children())<block_start>child=self.get_child(i)<line_sep>result.append('%s [label=%s, shape=box]'%(id(self) self.grammar.symbol_names[self.type]))<line_sep>result.append('%s -> %s [label="%s"]'%(id(self) id(child) i))<line_sep>child._dot(result)<block_end><block_end><block_end><class_stmt>Nonterminal(AbstractNonterminal)<block_start>__slots__=("_children" )<def_stmt>__init__ self grammar type children=<none><block_start>Node.__init__(self grammar type)<if_stmt>children<is><none><block_start>children=[]<block_end>self._children=children<block_end><def_stmt>__repr__ self<block_start><return>"Nonterminal(type=%s, children=%r)"%(self.grammar.symbol_names[self.type]<if>self.grammar<is><not><none><else>self.type self._children)<block_end><def_stmt>get_child self i<block_start><assert_stmt>self._children<is><not><none><line_sep><return>self._children[i]<block_end><def_stmt>num_children self<block_start><return>len(self._children)<block_end><def_stmt>append_child self child<block_start>self._children.append(child)<block_end><block_end><class_stmt>Nonterminal1(AbstractNonterminal)<block_start>__slots__=("_child" )<def_stmt>__init__ self grammar type child<block_start>Node.__init__(self grammar type)<line_sep>self._child=child<block_end><def_stmt>__repr__ self<block_start><return>"Nonterminal(type=%s, children=[%r])"%(self.grammar.symbol_names[self.type]<if>self.grammar<is><not><none><else>self.type self._child)<block_end><def_stmt>get_child self i<block_start><assert_stmt>i<eq>0<or>i<eq>-1<line_sep><return>self._child<block_end><def_stmt>num_children self<block_start><return>1<block_end><def_stmt>append_child self child<block_start><assert_stmt>0 "should be unreachable"<block_end><block_end><class_stmt>ParseError(Exception)<block_start><def_stmt>__init__ self msg token expected=-1 expected_str=<none><block_start>self.msg=msg<line_sep>self.token=token<line_sep>self.expected=expected<line_sep>self.expected_str=expected_str<block_end><def_stmt>__str__ self<block_start><return>"ParserError(%s)"%(self.token )<block_end><block_end><class_stmt>StackEntry(object)<block_start><def_stmt>__init__ self next dfa state<block_start>self.next=next<line_sep>self.dfa=dfa<line_sep>self.state=state<line_sep>self.node=<none><block_end><def_stmt>push self dfa state<block_start><return>StackEntry(self dfa state)<block_end><def_stmt>pop self<block_start><return>self.next<block_end><def_stmt>node_append_child self child<block_start>node=self.node<if_stmt>node<is><none><block_start>self.node=Nonterminal1(self.dfa.grammar self.dfa.symbol_id child)<block_end><elif_stmt>isinstance(node Nonterminal1)<block_start>newnode=self.node=Nonterminal(self.dfa.grammar self.dfa.symbol_id [node._child child])<block_end><else_stmt><block_start>self.node.append_child(child)<block_end><block_end><def_stmt>view self<block_start><import_from_stmt>dotviewer graphclient<import_stmt>pytest<line_sep>r=["digraph G {"]<line_sep>self._dot(r)<line_sep>r.append("}")<line_sep>p=pytest.ensuretemp("pyparser").join("temp.dot")<line_sep>p.write("\n".join(r))<line_sep>graphclient.display_dot_file(str(p))<block_end><def_stmt>_dot self result<block_start>result.append('%s [label=%s, shape=box, color=white]'%(id(self) self.dfa.grammar.symbol_names[self.dfa.symbol_id]))<if_stmt>self.next<block_start>result.append('%s -> %s [label="next"]'%(id(self) id(self.next)))<line_sep>self.next._dot(result)<block_end><if_stmt>self.node<block_start>result.append('%s -> %s [label="node"]'%(id(self) id(self.node)))<line_sep>self.node._dot(result)<block_end><block_end><block_end><class_stmt>Parser(object)<block_start><def_stmt>__init__ self grammar<block_start>self.grammar=grammar<line_sep>self.root=<none><block_end><def_stmt>prepare self start=-1<block_start>"""Setup the parser for parsing. Takes the starting symbol as an argument. """<if_stmt>start<eq>-1<block_start>start=self.grammar.start<block_end>self.root=<none><line_sep>self.stack=StackEntry(<none> self.grammar.dfas[start-256] 0)<block_end><def_stmt>add_token self token<block_start>label_index=self.grammar.classify(token)<line_sep>sym_id=0# for the annotator <while_stmt><true><block_start>dfa=self.stack.dfa<line_sep>state_index=self.stack.state<line_sep>states=dfa.states<line_sep>arcs,is_accepting=states[state_index]<for_stmt>i,next_state arcs<block_start>sym_id=self.grammar.labels[i]<if_stmt>label_index<eq>i# We matched a non-terminal. <block_start>self.shift(next_state token)<line_sep>state=states[next_state]<line_sep># While the only possible action is to accept, pop nodes off # the stack. <while_stmt>state[1]<and><not>state[0]<block_start>self.pop()<if_stmt>self.stack<is><none># Parsing is done. <block_start><return><true><block_end>dfa=self.stack.dfa<line_sep>state_index=self.stack.state<line_sep>state=dfa.states[state_index]<block_end><return><false><block_end><elif_stmt>sym_id<ge>256<block_start>sub_node_dfa=self.grammar.dfas[sym_id-256]<line_sep># Check if this token can start a child node. <if_stmt>sub_node_dfa.could_match_token(label_index)<block_start>self.push(sub_node_dfa next_state sym_id)<line_sep><break><block_end><block_end><block_end><else_stmt># We failed to find any arcs to another state, so unless this # state is accepting, it's invalid input. <block_start><if_stmt>is_accepting<block_start>self.pop()<if_stmt>self.stack<is><none><block_start><raise>ParseError("too much input" token)<block_end><block_end><else_stmt># If only one possible input would satisfy, attach it to the # error. <block_start><if_stmt>len(arcs)<eq>1<block_start>expected=sym_id<line_sep>expected_str=self.grammar.token_to_error_string.get(arcs[0][0] <none>)<block_end><else_stmt><block_start>expected=-1<line_sep>expected_str=<none><block_end><raise>ParseError("bad input" token expected expected_str)<block_end><block_end><block_end><block_end><def_stmt>shift self next_state token<block_start>"""Shift a non-terminal and prepare for the next state."""<line_sep>new_node=Terminal.fromtoken(self.grammar token)<line_sep>self.stack.node_append_child(new_node)<line_sep>self.stack.state=next_state<block_end><def_stmt>push self next_dfa next_state node_type<block_start>"""Push a terminal and adjust the current state."""<line_sep>self.stack.state=next_state<line_sep>self.stack=self.stack.push(next_dfa 0)<block_end><def_stmt>pop self<block_start>"""Pop an entry off the stack and make its node a child of the last."""<line_sep>top=self.stack<line_sep>self.stack=top.pop()<line_sep>node=top.node<assert_stmt>node<is><not><none><if_stmt>self.stack<block_start>self.stack.node_append_child(node)<block_end><else_stmt><block_start>self.root=node<block_end><block_end><block_end>
<import_from_stmt>mpl_toolkits.axisartist.angle_helper *<line_sep>
<def_stmt>test_load_case case_obj adapter## GIVEN a database with no cases <block_start><assert_stmt>adapter.case_collection.find_one()<is><none><line_sep>## WHEN loading a case adapter._add_case(case_obj)<line_sep>## THEN assert that the case have been loaded with correct info <assert_stmt>adapter.case_collection.find_one()<block_end><def_stmt>test_load_case_rank_model_version case_obj adapter## GIVEN a database with no cases <block_start><assert_stmt>adapter.case_collection.find_one()<is><none><line_sep>## WHEN loading a case adapter._add_case(case_obj)<line_sep>## THEN assert that the case have been loaded with rank_model loaded_case=adapter.case_collection.find_one({"_id":case_obj["_id"]})<assert_stmt>loaded_case["rank_model_version"]<eq>case_obj["rank_model_version"]<assert_stmt>loaded_case["sv_rank_model_version"]<eq>case_obj["sv_rank_model_version"]<block_end><def_stmt>test_load_case_limsid case_obj adapter<block_start>"""Test loading a case with lims_id"""<line_sep>## GIVEN a database with no cases <assert_stmt>adapter.case_collection.find_one()<is><none><line_sep>## WHEN loading a case adapter._add_case(case_obj)<line_sep>## THEN assert that the case have been loaded with lims id loaded_case=adapter.case_collection.find_one({"_id":case_obj["_id"]})<assert_stmt>loaded_case["lims_id"]<eq>case_obj["lims_id"]<block_end>
load("@bazel_tools//tools/build_defs/repo:jvm.bzl" "jvm_maven_import_external")<line_sep>_default_server_urls=["https://repo.maven.apache.org/maven2/" "https://mvnrepository.com/artifact" "https://maven-central.storage.googleapis.com" "http://gitblit.github.io/gitblit-maven" "https://repository.mulesoft.org/nexus/content/repositories/public/" ]<def_stmt>safe_exodus_maven_import_external name artifact **kwargs<block_start><if_stmt>native.existing_rule(name)<eq><none><block_start>exodus_maven_import_external(name=name artifact=artifact **kwargs)<block_end><block_end><def_stmt>exodus_maven_import_external name artifact **kwargs<block_start>fetch_sources=kwargs.get("srcjar_sha256")<ne><none><line_sep>exodus_maven_import_external_sources(name artifact fetch_sources **kwargs)<block_end><def_stmt>exodus_snapshot_maven_import_external name artifact **kwargs<block_start>exodus_maven_import_external_sources(name artifact <true> **kwargs)<block_end><def_stmt>exodus_maven_import_external_sources name artifact fetch_sources **kwargs<block_start>jvm_maven_import_external(name=name artifact=artifact licenses=["notice"] # Apache 2.0 fetch_sources=fetch_sources server_urls=_default_server_urls **kwargs)<block_end>