name
stringlengths
1
152
class_name
stringlengths
1
51
class_bases
stringlengths
0
159
is_member
bool
2 classes
args
stringlengths
0
804
class_docstr
stringlengths
4
8.19k
class_docstr_tok
stringlengths
2
11.6k
docstr
stringlengths
0
11.4k
docstr_tok
stringlengths
2
13.4k
returns
stringlengths
0
260
code
stringlengths
21
52.4k
code_tok
stringlengths
33
92.8k
lstart
int64
1
1.75k
lend
int64
5
1.75k
raises
stringclasses
16 values
filename
stringlengths
5
66
file_path
stringlengths
12
161
imports
stringlengths
0
1.77k
total_objects
int64
15
15
num_classes
float64
1
7
num_imports
int64
0
14
num_functions
int64
0
15
num_all_bases
float64
0
9
num_methods
float64
1
14
num_bases
float64
1
7
label_desc
stringlengths
69
1.05k
label_desc_len
int64
69
1.05k
label_id
stringclasses
15 values
__index_level_0__
int64
468
2.35M
_fit_sklearn_model_with_active_run
global
null
false
pandas_df
null
null
null
null
mlflow
def _fit_sklearn_model_with_active_run(pandas_df): run_id = mlflow.active_run().info.run_id _fit_sklearn(pandas_df) return mlflow.get_run(run_id)
["def","_fit_sklearn_model_with_active_run","(","pandas_df",")",":","run_id","=","mlflow.active_run","(",")",".info.run_id","_fit_sklearn","(","pandas_df",")","return","mlflow.get_run","(","run_id",")"]
32
35
null
test_spark_datasource_autologging_crossframework.py
mlflow/tests/spark/autologging/datasource/test_spark_datasource_autologging_crossframework.py
import time import numpy import pytest from sklearn.linear_model import LinearRegression import mlflow import mlflow.spark from tests.spark.autologging.utils import _assert_spark_data_logged
15
null
7
8
null
null
null
Use image node_id 3 for calling a global function with example usage: _fit_sklearn_model_with_active_run(pandas_df) and returns: mlflow
135
node_id 3
1,357,874
test_super
TestGaussianNoise
unittest
true
self
A unittest class for testing the GaussianNoise postprocessor.
["A","unittest","class","for","testing","the","GaussianNoise","postprocessor","."]
null
null
null
def test_super(self): gan = GaussianNoise(scale=0.1) self.assertTrue(gan.is_fitted) self.assertFalse(gan._apply_fit) self.assertTrue(gan._apply_predict) gan.fit(preds=np.array([0.1, 0.2, 0.3]))
["def","test_super","(","self",")",":","gan","=","GaussianNoise","(","scale=0.1",")","self.assertTrue","(","gan.is_fitted",")","self.assertFalse","(","gan._apply_fit",")","self.assertTrue","(","gan._apply_predict",")","gan.fit","(","preds=np.array","(","[","0.1",",","0.2",",","0.3","]",")",")"]
104
109
null
test_gaussian_noise.py
adversarial-robustness-toolbox/tests/defences/test_gaussian_noise.py
import logging import unittest import numpy from art.defences.postprocessor import GaussianNoise from art.utils import load_dataset from tests.utils import master_seed, get_image_classifier_kr_tf, get_image_classifier_kr_tf_binary
15
1
6
0
1
7
1
Use image node_id 7 for calling the TestGaussianNoise obj's underlying member method code with example usage: obj.test_super() without return types
147
node_id 7
235,295
setUpClass
TestHighConfidence
unittest
true
cls
A unittest class for testing the HighConfidence postprocessor.
["A","unittest","class","for","testing","the","HighConfidence","postprocessor","."]
null
null
null
def setUpClass(cls): (x_train, y_train), (x_test, y_test), _, _ = load_dataset("mnist") cls.mnist = (x_train, y_train), (x_test, y_test)
["def","setUpClass","(","cls",")",":","(","x_train",",","y_train",")",",","(","x_test",",","y_test",")",",","_",",","_","=","load_dataset","(","``","mnist","''",")","cls.mnist","=","(","x_train",",","y_train",")",",","(","x_test",",","y_test",")"]
37
39
null
test_high_confidence.py
adversarial-robustness-toolbox/tests/defences/test_high_confidence.py
import logging import unittest import numpy from art.defences.postprocessor import HighConfidence from art.utils import load_dataset from tests.utils import master_seed, get_image_classifier_kr_tf, get_image_classifier_kr_tf_binary
15
1
6
0
1
7
1
Use image node_id 1 for calling the TestHighConfidence obj's underlying member method code with example usage: obj.setUpClass(cls) without return types
151
node_id 1
235,296
test_ThompsonSamplerInfeasible
ThompsonSamplerTest
TestCase
true
self
null
null
null
null
null
def test_ThompsonSamplerInfeasible(self) -> None: generator = ThompsonSampler(min_weight=0.9) generator.fit( # pyre-fixme[6]: For 1st param expected `List[List[List[Union[None, # bool, float, int, str]]]]` but got `List[List[List[int]]]`. Xs=self.Xs, # pyre-fixme[6]: For 2nd param expected `List[List[float]]` but got # `List[List[int]]`. Ys=self.Ys, # pyre-fixme[6]: For 3rd param expected `List[List[float]]` but got # `List[List[int]]`. Yvars=self.Yvars, # pyre-fixme[6]: For 4th param expected `List[List[Union[None, bool, # float, int, str]]]` but got `List[List[int]]`. parameter_values=self.parameter_values, outcome_names=self.outcome_names, ) with self.assertRaises(ModelError): generator.gen( n=3, # pyre-fixme[6]: For 2nd param expected `List[List[Union[None, bool, # float, int, str]]]` but got `List[List[int]]`. parameter_values=self.parameter_values, objective_weights=np.ones(1), )
["def","test_ThompsonSamplerInfeasible","(","self",")","-",">","None",":","generator","=","ThompsonSampler","(","min_weight=0.9",")","generator.fit","(","#","pyre-fixme","[","6","]",":","For","1st","param","expected","`","List","[","List","[","List","[","Union","[","None",",","#","bool",",","float",",","int",",","str","]","]","]","]","`","but","got","`","List","[","List","[","List","[","int","]","]","]","`",".","Xs=self.Xs",",","#","pyre-fixme","[","6","]",":","For","2nd","param","expected","`","List","[","List","[","float","]","]","`","but","got","#","`","List","[","List","[","int","]","]","`",".","Ys=self.Ys",",","#","pyre-fixme","[","6","]",":","For","3rd","param","expected","`","List","[","List","[","float","]","]","`","but","got","#","`","List","[","List","[","int","]","]","`",".","Yvars=self.Yvars",",","#","pyre-fixme","[","6","]",":","For","4th","param","expected","`","List","[","List","[","Union","[","None",",","bool",",","#","float",",","int",",","str","]","]","]","`","but","got","`","List","[","List","[","int","]","]","`",".","parameter_values=self.parameter_values",",","outcome_names=self.outcome_names",",",")","with","self.assertRaises","(","ModelError",")",":","generator.gen","(","n=3",",","#","pyre-fixme","[","6","]",":","For","2nd","param","expected","`","List","[","List","[","Union","[","None",",","bool",",","#","float",",","int",",","str","]","]","]","`","but","got","`","List","[","List","[","int","]","]","`",".","parameter_values=self.parameter_values",",","objective_weights=np.ones","(","1",")",",",")"]
174
198
null
test_thompson.py
Ax/ax/models/tests/test_thompson.py
import numpy from ax.exceptions.model import ModelError from ax.models.discrete.thompson import ThompsonSampler from ax.utils.common.testutils import TestCase
15
1
4
0
1
9
1
Use image node_id 6 for calling the ThompsonSamplerTest obj's underlying member method code with example usage: obj.test_ThompsonSamplerInfeasible() without return types
169
node_id 6
9,540
_maybe_clause
global
null
false
clause
null
null
null
null
unknown
def _maybe_clause(clause: Optional[str]) -> Sequence[str]: return [clause] if clause is not None else []
["def","_maybe_clause","(","clause",":","Optional","[","str","]",")","-",">","Sequence","[","str","]",":","return","[","clause","]","if","clause","is","not","None","else","[","]"]
40
41
null
store_ext.py
tfx/tfx/orchestration/portable/mlmd/store_ext.py
import collections import itertools from typing import Callable, Mapping, Optional, Sequence, Union from tfx.dsl.compiler import compiler_utils from tfx.dsl.compiler import constants from tfx.orchestration.experimental.core import constants from tfx.orchestration.portable.mlmd import event_lib from tfx.orchestration.portable.mlmd import filter_query_builder import ml_metadata
15
null
9
6
null
null
null
Use image node_id 2 for calling a global function with example usage: _maybe_clause(clause) and returns: unknown
112
node_id 2
2,198,856
__init__
EventSievesConfiguration
SievesConfiguration
true
self
null
null
null
null
EventSievesConfiguration
def __init__(self): super(EventSievesConfiguration, self).__init__() self.run_evaluation = True self.sieves_order = [ (RelationType.SAME_HEAD_LEMMA, 1.0), (RelationType.EXACT_STRING, 1.0), (RelationType.WIKIPEDIA_DISAMBIGUATION, 0.1), (RelationType.WORD_EMBEDDING_MATCH, 0.7), (RelationType.WIKIPEDIA_REDIRECT_LINK, 0.1), (RelationType.FUZZY_HEAD_FIT, 0.5), (RelationType.FUZZY_FIT, 1.0), (RelationType.WITHIN_DOC_COREF, 1.0), (RelationType.WIKIPEDIA_TITLE_PARENTHESIS, 0.1), (RelationType.WIKIPEDIA_BE_COMP, 0.1), (RelationType.WIKIPEDIA_CATEGORY, 0.1), (RelationType.VERBOCEAN_MATCH, 0.1), (RelationType.WORDNET_DERIVATIONALLY, 1.0), ]
["def","__init__","(","self",")",":","super","(","EventSievesConfiguration",",","self",")",".__init__","(",")","self.run_evaluation","=","True","self.sieves_order","=","[","(","RelationType.SAME_HEAD_LEMMA",",","1.0",")",",","(","RelationType.EXACT_STRING",",","1.0",")",",","(","RelationType.WIKIPEDIA_DISAMBIGUATION",",","0.1",")",",","(","RelationType.WORD_EMBEDDING_MATCH",",","0.7",")",",","(","RelationType.WIKIPEDIA_REDIRECT_LINK",",","0.1",")",",","(","RelationType.FUZZY_HEAD_FIT",",","0.5",")",",","(","RelationType.FUZZY_FIT",",","1.0",")",",","(","RelationType.WITHIN_DOC_COREF",",","1.0",")",",","(","RelationType.WIKIPEDIA_TITLE_PARENTHESIS",",","0.1",")",",","(","RelationType.WIKIPEDIA_BE_COMP",",","0.1",")",",","(","RelationType.WIKIPEDIA_CATEGORY",",","0.1",")",",","(","RelationType.VERBOCEAN_MATCH",",","0.1",")",",","(","RelationType.WORDNET_DERIVATIONALLY",",","1.0",")",",","]"]
59
78
null
sieves_config.py
nlp-architect/nlp_architect/models/cross_doc_coref/sieves_config.py
from typing import List, Tuple from nlp_architect.data.cdc_resources.relations.relation_types_enums import RelationType
15
3
2
0
3
1
1
Use image node_id 1 to create a new EventSievesConfiguration object from inherited base classes: SievesConfiguration with example: obj = EventSievesConfiguration()
163
node_id 1
1,443,098
simple_segmentation_example
global
null
false
null
null
null
null
null
def simple_segmentation_example(): "Perfect results!" parameters = legion_parameters() parameters.eps = 0.02 parameters.alpha = 0.005 parameters.betta = 0.1 parameters.gamma = 7.0 parameters.teta = 0.9 parameters.lamda = 0.1 parameters.teta_x = -0.5 parameters.teta_p = 7.0 parameters.Wz = 0.7 parameters.mu = 0.01 parameters.fi = 3.0 parameters.teta_xz = 0.1 parameters.teta_zx = 0.1 parameters.ENABLE_POTENTIONAL = False template_dynamic_legion( 81, 2500, 2500, conn_type=conn_type.GRID_FOUR, params=parameters, stimulus=[ 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0, 0, 0, ], separate_repr=[ [0, 1, 2, 9, 10, 11, 18, 19, 20], [ 14, 15, 16, 17, 23, 24, 25, 26, 33, 34, 35, 42, 43, 44, 51, 52, 53, ], [ 45, 46, 47, 48, 54, 55, 56, 57, 63, 64, 65, 66, 72, 73, 74, 75, ], ], )
["def","simple_segmentation_example","(",")",":","``","Perfect","results","!","''","parameters","=","legion_parameters","(",")","parameters.eps","=","0.02","parameters.alpha","=","0.005","parameters.betta","=","0.1","parameters.gamma","=","7.0","parameters.teta","=","0.9","parameters.lamda","=","0.1","parameters.teta_x","=","-0.5","parameters.teta_p","=","7.0","parameters.Wz","=","0.7","parameters.mu","=","0.01","parameters.fi","=","3.0","parameters.teta_xz","=","0.1","parameters.teta_zx","=","0.1","parameters.ENABLE_POTENTIONAL","=","False","template_dynamic_legion","(","81",",","2500",",","2500",",","conn_type=conn_type.GRID_FOUR",",","params=parameters",",","stimulus=","[","1",",","1",",","1",",","0",",","0",",","0",",","0",",","0",",","0",",","1",",","1",",","1",",","0",",","0",",","1",",","1",",","1",",","1",",","1",",","1",",","1",",","0",",","0",",","1",",","1",",","1",",","1",",","0",",","0",",","0",",","0",",","0",",","0",",","1",",","1",",","1",",","0",",","0",",","0",",","0",",","0",",","0",",","1",",","1",",","1",",","1",",","1",",","1",",","1",",","0",",","0",",","1",",","1",",","1",",","1",",","1",",","1",",","1",",","0",",","0",",","0",",","0",",","0",",","1",",","1",",","1",",","1",",","0",",","0",",","0",",","0",",","0",",","1",",","1",",","1",",","1",",","0",",","0",",","0",",","0",",","0",",","]",",","separate_repr=","[","[","0",",","1",",","2",",","9",",","10",",","11",",","18",",","19",",","20","]",",","[","14",",","15",",","16",",","17",",","23",",","24",",","25",",","26",",","33",",","34",",","35",",","42",",","43",",","44",",","51",",","52",",","53",",","]",",","[","45",",","46",",","47",",","48",",","54",",","55",",","56",",","57",",","63",",","64",",","65",",","66",",","72",",","73",",","74",",","75",",","]",",","]",",",")"]
94
126
null
legion_examples.py
pyclustering/pyclustering/nnet/examples/legion_examples.py
from pyclustering.utils import draw_dynamics from pyclustering.nnet.legion import legion_network, legion_parameters from pyclustering.nnet import *
15
null
3
12
null
null
null
Use image node_id 12 for calling a global function with example usage: simple_segmentation_example() without return types
121
node_id 12
1,634,375
forward
ConstantGate
torch.nn
true
self,inp
null
null
null
null
idx, score
def forward(self, inp): idx = torch.zeros( (inp.shape[0], self.top_k), dtype=torch.int64, device=inp.device, ) score = ( torch.ones((inp.shape[0], 1, self.top_k), device=inp.device) / 2 ) return idx, score
["def","forward","(","self",",","inp",")",":","idx","=","torch.zeros","(","(","inp.shape","[","0","]",",","self.top_k",")",",","dtype=torch.int64",",","device=inp.device",",",")","score","=","(","torch.ones","(","(","inp.shape","[","0","]",",","1",",","self.top_k",")",",","device=inp.device",")","\/","2",")","return","idx",",","score"]
16
20
null
test_zero.py
thu-pacman-faster-moe/tests/test_zero.py
import os import sys import json import torch from fmoe.layers import _fmoe_general_global_forward from fmoe import FMoETransformerMLP from test_ddp import _run_distributed
15
1
7
4
1
2
1
Use image node_id 2 for calling the ConstantGate obj's underlying member method code with example usage: obj.forward(inp) and returns: idx, score
146
node_id 2
2,201,994
__init__
ConstantGate
torch.nn
true
self,d_model,num_expert,world_size,top_k
null
null
null
null
ConstantGate
def __init__(self, d_model, num_expert, world_size, top_k=1): super().__init__() self.top_k = top_k
["def","__init__","(","self",",","d_model",",","num_expert",",","world_size",",","top_k=1",")",":","super","(",")",".__init__","(",")","self.top_k","=","top_k"]
12
14
null
test_zero.py
thu-pacman-faster-moe/tests/test_zero.py
import os import sys import json import torch from fmoe.layers import _fmoe_general_global_forward from fmoe import FMoETransformerMLP from test_ddp import _run_distributed
15
1
7
4
1
2
1
Use image node_id 1 to create a new ConstantGate object from inherited base classes: torch.nn with example: obj = ConstantGate(d_model, num_expert, world_size, top_k)
166
node_id 1
2,201,993
test_GetLastPoint
RandomModelTest
TestCase
true
self
null
null
null
null
null
def test_GetLastPoint(self) -> None: generated_points = np.array([[1, 2, 3], [4, 5, 6]]) RandomModelWithPoints = RandomModel( generated_points=generated_points ) result = RandomModelWithPoints._get_last_point() expected = torch.tensor([[4], [5], [6]]) comparison = result == expected # pyre-fixme[16]: `bool` has no attribute `any`. self.assertEqual(comparison.any(), True)
["def","test_GetLastPoint","(","self",")","-",">","None",":","generated_points","=","np.array","(","[","[","1",",","2",",","3","]",",","[","4",",","5",",","6","]","]",")","RandomModelWithPoints","=","RandomModel","(","generated_points=generated_points",")","result","=","RandomModelWithPoints._get_last_point","(",")","expected","=","torch.tensor","(","[","[","4","]",",","[","5","]",",","[","6","]","]",")","comparison","=","result","==","expected","#","pyre-fixme","[","16","]",":","`","bool","`","has","no","attribute","`","any","`",".","self.assertEqual","(","comparison.any","(",")",",","True",")"]
74
81
null
test_random.py
Ax/ax/models/tests/test_random.py
import numpy import torch from ax.models.random.base import RandomModel from ax.utils.common.testutils import TestCase from ax.utils.common.typeutils import not_none
15
1
5
0
1
8
1
Use image node_id 8 for calling the RandomModelTest obj's underlying member method code with example usage: obj.test_GetLastPoint() without return types
152
node_id 8
9,517
get_config
global
null
false
null
null
null
null
config, args
def get_config(): parser = argparse.ArgumentParser( "Global Config Argument Parser", allow_abbrev=False ) parser.add_argument( "--config_yaml", required=True, type=str, help="the configuration file for this experiment.", ) parser.add_argument( "--resume", type=str, help="a specified logging path to resume training.\ It will fall back to run from initialization if no latest checkpoint are found.", ) parser.add_argument( "--test", type=str, help="a specified logging path to test" ) args, _ = parser.parse_known_args() config = get_user_config(args.config_yaml) add_cfg_to_argparser(config, parser) args = parser.parse_args() update_cfg_with_argparser(config, args) check_config_conflicts(config) # print(config) return config, args
["def","get_config","(",")",":","parser","=","argparse.ArgumentParser","(","``","Global","Config","Argument","Parser","''",",","allow_abbrev=False",")","parser.add_argument","(","``","--","config_yaml","''",",","required=True",",","type=str",",","help=","''","the","configuration","file","for","this","experiment",".","``",",",")","parser.add_argument","(","``","--","resume","''",",","type=str",",","help=","''","a","specified","logging","path","to","resume","training.\\","It","will","fall","back","to","run","from","initialization","if","no","latest","checkpoint","are","found",".","``",",",")","parser.add_argument","(","``","--","test","''",",","type=str",",","help=","''","a","specified","logging","path","to","test","''",")","args",",","_","=","parser.parse_known_args","(",")","config","=","get_user_config","(","args.config_yaml",")","add_cfg_to_argparser","(","config",",","parser",")","args","=","parser.parse_args","(",")","update_cfg_with_argparser","(","config",",","args",")","check_config_conflicts","(","config",")","#","print","(","config",")","return","config",",","args"]
123
138
null
config.py
OpenPrompt/openprompt/config.py
import argparse from yacs.config import CfgNode import sys from openprompt.utils.utils import check_config_conflicts from .default_config import get_default_config from openprompt.utils.logging import logger import os
15
null
7
8
null
null
null
Use image node_id 8 for calling a global function with example usage: get_config() and returns: config, args
109
node_id 8
152,524
save_config_to_yaml
global
null
false
config
null
null
null
null
null
def save_config_to_yaml(config): from contextlib import redirect_stdout saved_yaml_path = os.path.join(config.logging.path, "config.yaml") with open(saved_yaml_path, "w") as f: with redirect_stdout(f): print(config.dump()) logger.info("Config saved as {}".format(saved_yaml_path))
["def","save_config_to_yaml","(","config",")",":","from","contextlib","import","redirect_stdout","saved_yaml_path","=","os.path.join","(","config.logging.path",",","``","config.yaml","''",")","with","open","(","saved_yaml_path",",","``","w","''",")","as","f",":","with","redirect_stdout","(","f",")",":","print","(","config.dump","(",")",")","logger.info","(","``","Config","saved","as","{","}","''",".format","(","saved_yaml_path",")",")"]
116
121
null
config.py
OpenPrompt/openprompt/config.py
import argparse from yacs.config import CfgNode import sys from openprompt.utils.utils import check_config_conflicts from .default_config import get_default_config from openprompt.utils.logging import logger import os
15
null
7
8
null
null
null
Use image node_id 7 for calling a global function with example usage: save_config_to_yaml(config) without return types
118
node_id 7
152,523
update_cfg_with_argparser
global
null
false
cfg,args,prefix
null
null
null
null
null
def update_cfg_with_argparser(cfg, args, prefix=None): r"""To support update cfg with command line""" for key in cfg: value = cfg[key] full_key_name = ( prefix + "." + key if prefix is not None else key ) if isinstance(value, CfgNode): update_cfg_with_argparser( value, args, prefix=full_key_name ) else: v = getattr(args, full_key_name) if type(v) != type(value): raise TypeError if v != value: cfg[key] = v print( "Update key {}, value {} -> {}".format( full_key_name, value, v ) )
["def","update_cfg_with_argparser","(","cfg",",","args",",","prefix=None",")",":","r","''","''","''","To","support","update","cfg","with","command","line","''","''","''","for","key","in","cfg",":","value","=","cfg","[","key","]","full_key_name","=","(","prefix","+","``",".","''","+","key","if","prefix","is","not","None","else","key",")","if","isinstance","(","value",",","CfgNode",")",":","update_cfg_with_argparser","(","value",",","args",",","prefix=full_key_name",")","else",":","v","=","getattr","(","args",",","full_key_name",")","if","type","(","v",")","!","=","type","(","value",")",":","raise","TypeError","if","v","!","=","value",":","cfg","[","key","]","=","v","print","(","``","Update","key","{","}",",","value","{","}","-",">","{","}","''",".format","(","full_key_name",",","value",",","v",")",")"]
99
113
null
config.py
OpenPrompt/openprompt/config.py
import argparse from yacs.config import CfgNode import sys from openprompt.utils.utils import check_config_conflicts from .default_config import get_default_config from openprompt.utils.logging import logger import os
15
null
7
8
null
null
null
Use image node_id 6 for calling a global function with example usage: update_cfg_with_argparser(cfg, args, prefix) without return types
135
node_id 6
152,522
test_ConvertBounds
RandomModelTest
TestCase
true
self
null
null
null
null
null
def test_ConvertBounds(self) -> None: bounds = [(1.0, 2.0), (3.0, 4.0), (5.0, 6.0)] bounds_result = self.random_model._convert_bounds(bounds) bounds_expected = torch.tensor( [[1, 3, 5], [2, 4, 6]], dtype=torch.double ) bounds_comparison = bounds_result == bounds_expected # pyre-fixme[16]: `bool` has no attribute `any`. self.assertEqual(bounds_comparison.any(), True) # pyre-fixme[6]: For 1st param expected `List[Tuple[float, float]]` but got # `None`. self.assertEqual(self.random_model._convert_bounds(None), None)
["def","test_ConvertBounds","(","self",")","-",">","None",":","bounds","=","[","(","1.0",",","2.0",")",",","(","3.0",",","4.0",")",",","(","5.0",",","6.0",")","]","bounds_result","=","self.random_model._convert_bounds","(","bounds",")","bounds_expected","=","torch.tensor","(","[","[","1",",","3",",","5","]",",","[","2",",","4",",","6","]","]",",","dtype=torch.double",")","bounds_comparison","=","bounds_result","==","bounds_expected","#","pyre-fixme","[","16","]",":","`","bool","`","has","no","attribute","`","any","`",".","self.assertEqual","(","bounds_comparison.any","(",")",",","True",")","#","pyre-fixme","[","6","]",":","For","1st","param","expected","`","List","[","Tuple","[","float",",","float","]","]","`","but","got","#","`","None","`",".","self.assertEqual","(","self.random_model._convert_bounds","(","None",")",",","None",")"]
63
72
null
test_random.py
Ax/ax/models/tests/test_random.py
import numpy import torch from ax.models.random.base import RandomModel from ax.utils.common.testutils import TestCase from ax.utils.common.typeutils import not_none
15
1
5
0
1
8
1
Use image node_id 7 for calling the RandomModelTest obj's underlying member method code with example usage: obj.test_ConvertBounds() without return types
153
node_id 7
9,516
test_ConvertInequalityConstraints
RandomModelTest
TestCase
true
self
null
null
null
null
null
def test_ConvertInequalityConstraints(self) -> None: A = np.array([[1, 2], [3, 4]]) b = np.array([[5], [6]]) A_result, b_result = not_none( self.random_model._convert_inequality_constraints((A, b)) ) A_expected = torch.tensor([[1, 2], [3, 4]], dtype=torch.double) b_expected = torch.tensor([[5], [6]], dtype=torch.double) A_comparison = A_result == A_expected b_comparison = b_result == b_expected self.assertEqual(A_comparison.any(), True) self.assertEqual(b_comparison.any(), True) self.assertEqual( self.random_model._convert_inequality_constraints(None), None )
["def","test_ConvertInequalityConstraints","(","self",")","-",">","None",":","A","=","np.array","(","[","[","1",",","2","]",",","[","3",",","4","]","]",")","b","=","np.array","(","[","[","5","]",",","[","6","]","]",")","A_result",",","b_result","=","not_none","(","self.random_model._convert_inequality_constraints","(","(","A",",","b",")",")",")","A_expected","=","torch.tensor","(","[","[","1",",","2","]",",","[","3",",","4","]","]",",","dtype=torch.double",")","b_expected","=","torch.tensor","(","[","[","5","]",",","[","6","]","]",",","dtype=torch.double",")","A_comparison","=","A_result","==","A_expected","b_comparison","=","b_result","==","b_expected","self.assertEqual","(","A_comparison.any","(",")",",","True",")","self.assertEqual","(","b_comparison.any","(",")",",","True",")","self.assertEqual","(","self.random_model._convert_inequality_constraints","(","None",")",",","None",")"]
49
61
null
test_random.py
Ax/ax/models/tests/test_random.py
import numpy import torch from ax.models.random.base import RandomModel from ax.utils.common.testutils import TestCase from ax.utils.common.typeutils import not_none
15
1
5
0
1
8
1
Use image node_id 6 for calling the RandomModelTest obj's underlying member method code with example usage: obj.test_ConvertInequalityConstraints() without return types
168
node_id 6
9,515
test_ConvertEqualityConstraints
RandomModelTest
TestCase
true
self
null
null
null
null
null
def test_ConvertEqualityConstraints(self) -> None: fixed_features = {3: 0.7, 1: 0.5} d = 4 C, c = not_none( self.random_model._convert_equality_constraints( d, fixed_features ) ) c_expected = torch.tensor([[0.5], [0.7]], dtype=torch.double) C_expected = torch.tensor( [[0, 1, 0, 0], [0, 0, 0, 1]], dtype=torch.double ) c_comparison = c == c_expected C_comparison = C == C_expected self.assertEqual(c_comparison.any(), True) self.assertEqual(C_comparison.any(), True) self.assertEqual( self.random_model._convert_equality_constraints(d, None), None )
["def","test_ConvertEqualityConstraints","(","self",")","-",">","None",":","fixed_features","=","{","3",":","0.7",",","1",":","0.5","}","d","=","4","C",",","c","=","not_none","(","self.random_model._convert_equality_constraints","(","d",",","fixed_features",")",")","c_expected","=","torch.tensor","(","[","[","0.5","]",",","[","0.7","]","]",",","dtype=torch.double",")","C_expected","=","torch.tensor","(","[","[","0",",","1",",","0",",","0","]",",","[","0",",","0",",","0",",","1","]","]",",","dtype=torch.double",")","c_comparison","=","c","==","c_expected","C_comparison","=","C","==","C_expected","self.assertEqual","(","c_comparison.any","(",")",",","True",")","self.assertEqual","(","C_comparison.any","(",")",",","True",")","self.assertEqual","(","self.random_model._convert_equality_constraints","(","d",",","None",")",",","None",")"]
35
47
null
test_random.py
Ax/ax/models/tests/test_random.py
import numpy import torch from ax.models.random.base import RandomModel from ax.utils.common.testutils import TestCase from ax.utils.common.typeutils import not_none
15
1
5
0
1
8
1
Use image node_id 5 for calling the RandomModelTest obj's underlying member method code with example usage: obj.test_ConvertEqualityConstraints() without return types
166
node_id 5
9,514
add_cfg_to_argparser
global
null
false
cfg,parser,prefix
null
null
null
null
null
def add_cfg_to_argparser(cfg, parser, prefix=None): r"""To support argument parser style in addition to yaml style""" for key in cfg: value = cfg[key] full_key_name = ( prefix + "." + key if prefix is not None else key ) if isinstance(value, CfgNode): add_cfg_to_argparser( value, parser=parser, prefix=full_key_name ) else: if type(value) in [str, int, float]: parser.add_argument( "--" + full_key_name, type=type(value), default=value, ) elif type(value) in [tuple, list]: parser.add_argument( "--" + full_key_name, type=type(value), default=value, nargs="+", ) elif type(value) == bool: parser.add_argument( "--" + full_key_name, action="store_{}".format(not value).lower(), ) elif type(value) == type(None): parser.add_argument( "--" + full_key_name, default=None ) else: raise NotImplementedError( "The type of config value is not supported" )
["def","add_cfg_to_argparser","(","cfg",",","parser",",","prefix=None",")",":","r","''","''","''","To","support","argument","parser","style","in","addition","to","yaml","style","''","''","''","for","key","in","cfg",":","value","=","cfg","[","key","]","full_key_name","=","(","prefix","+","``",".","''","+","key","if","prefix","is","not","None","else","key",")","if","isinstance","(","value",",","CfgNode",")",":","add_cfg_to_argparser","(","value",",","parser=parser",",","prefix=full_key_name",")","else",":","if","type","(","value",")","in","[","str",",","int",",","float","]",":","parser.add_argument","(","``","--","''","+","full_key_name",",","type=type","(","value",")",",","default=value",",",")","elif","type","(","value",")","in","[","tuple",",","list","]",":","parser.add_argument","(","``","--","''","+","full_key_name",",","type=type","(","value",")",",","default=value",",","nargs=","''","+","''",",",")","elif","type","(","value",")","==","bool",":","parser.add_argument","(","``","--","''","+","full_key_name",",","action=","''","store_","{","}","''",".format","(","not","value",")",".lower","(",")",",",")","elif","type","(","value",")","==","type","(","None",")",":","parser.add_argument","(","``","--","''","+","full_key_name",",","default=None",")","else",":","raise","NotImplementedError","(","``","The","type","of","config","value","is","not","supported","''",")"]
78
96
null
config.py
OpenPrompt/openprompt/config.py
import argparse from yacs.config import CfgNode import sys from openprompt.utils.utils import check_config_conflicts from .default_config import get_default_config from openprompt.utils.logging import logger import os
15
null
7
8
null
null
null
Use image node_id 5 for calling a global function with example usage: add_cfg_to_argparser(cfg, parser, prefix) without return types
132
node_id 5
152,521
_get_node_live_artifacts
global
null
false
store
null
null
null
null
store
def _get_node_live_artifacts( store: mlmd.MetadataStore, *, pipeline_id: str, node_id: str, pipeline_run_id: Optional[str] = None, ) -> Sequence[mlmd.proto.Artifact]: """Gets all LIVE node artifacts. Args: store: A MetadataStore object. pipeline_id: The pipeline ID. node_id: The node ID. pipeline_run_id: The pipeline run ID that the node belongs to. Only artifacts from the specified pipeline run are returned if specified. Returns: A list of LIVE artifacts of the given pipeline node. """ artifact_state_filter_query = f"state = {mlmd.proto.Artifact.State.Name(mlmd.proto.Artifact.LIVE)}" node_context_name = compiler_utils.node_context_name( pipeline_id, node_id ) node_filter_query = q.And( [ f'contexts_0.type = "{constants.NODE_CONTEXT_TYPE_NAME}"', f'contexts_0.name = "{node_context_name}"', ] ) artifact_filter_query = q.And( [ node_filter_query, artifact_state_filter_query, ] ) if pipeline_run_id: artifact_filter_query.append( q.And( [ f'contexts_1.type = "{constants.PIPELINE_RUN_CONTEXT_TYPE_NAME}"', f'contexts_1.name = "{pipeline_run_id}"', ] ) ) return store.get_artifacts( list_options=mlmd.ListOptions( filter_query=str(artifact_filter_query) ) )
["def","_get_node_live_artifacts","(","store",":","mlmd.MetadataStore",",","*",",","pipeline_id",":","str",",","node_id",":","str",",","pipeline_run_id",":","Optional","[","str","]","=","None",",",")","-",">","Sequence","[","mlmd.proto.Artifact","]",":","``","''","''","Gets","all","LIVE","node","artifacts",".","Args",":","store",":","A","MetadataStore","object",".","pipeline_id",":","The","pipeline","ID",".","node_id",":","The","node","ID",".","pipeline_run_id",":","The","pipeline","run","ID","that","the","node","belongs","to",".","Only","artifacts","from","the","specified","pipeline","run","are","returned","if","specified",".","Returns",":","A","list","of","LIVE","artifacts","of","the","given","pipeline","node.","``","''","''","artifact_state_filter_query","=","f","''","state","=","{","mlmd.proto.Artifact.State.Name","(","mlmd.proto.Artifact.LIVE",")","}","''","node_context_name","=","compiler_utils.node_context_name","(","pipeline_id",",","node_id",")","node_filter_query","=","q.And","(","[","f'contexts_0.type","=","``","{","constants.NODE_CONTEXT_TYPE_NAME","}","''","'",",","f'contexts_0.name","=","``","{","node_context_name","}","''","'",",","]",")","artifact_filter_query","=","q.And","(","[","node_filter_query",",","artifact_state_filter_query",",","]",")","if","pipeline_run_id",":","artifact_filter_query.append","(","q.And","(","[","f'contexts_1.type","=","``","{","constants.PIPELINE_RUN_CONTEXT_TYPE_NAME","}","''","'",",","f'contexts_1.name","=","``","{","pipeline_run_id","}","''","'",",","]",")",")","return","store.get_artifacts","(","list_options=mlmd.ListOptions","(","filter_query=str","(","artifact_filter_query",")",")",")"]
44
87
null
store_ext.py
tfx/tfx/orchestration/portable/mlmd/store_ext.py
import collections import itertools from typing import Callable, Mapping, Optional, Sequence, Union from tfx.dsl.compiler import compiler_utils from tfx.dsl.compiler import constants from tfx.orchestration.experimental.core import constants from tfx.orchestration.portable.mlmd import event_lib from tfx.orchestration.portable.mlmd import filter_query_builder import ml_metadata
15
null
9
6
null
null
null
Use image node_id 3 for calling a global function with example usage: _get_node_live_artifacts(store) and returns: store
120
node_id 3
2,198,857
get_node_executions
global
null
false
store
null
null
null
null
store
def get_node_executions( store: mlmd.MetadataStore, *, pipeline_id: str, node_id: str, pipeline_run_id: Optional[str] = None, order_by: mlmd.OrderByField = mlmd.OrderByField.ID, is_asc: bool = True, execution_states: Optional[ Sequence["mlmd.proto.Execution.State"] ] = None, min_last_update_time_since_epoch: Optional[int] = None, ) -> Sequence[mlmd.proto.Execution]: """Gets all node executions. Args: store: A MetadataStore object. pipeline_id: The pipeline ID. node_id: The node ID. pipeline_run_id: The pipeline run ID that the node belongs to. Only executions from the specified pipeline run are returned if specified. order_by: The field of execution to order results by. is_asc: If True, the results will be returned in the ascending order. If False, the result will be returned in the descending order. execution_states: The MLMD execution state(s) to pull LIVE artifacts from. If not specified or is empty, will consider all MLMD execution states. min_last_update_time_since_epoch: The minimum update time of MLMD executions in the format of milliseconds since the unix epoch. If not specified, will consider all MLMD executions. Returns: A list of executions of the given pipeline node. """ node_context_name = compiler_utils.node_context_name( pipeline_id, node_id ) node_executions_filter_queries = [] node_executions_filter_queries.append( q.And( [ f'contexts_0.type = "{constants.NODE_CONTEXT_TYPE_NAME}"', f'contexts_0.name = "{node_context_name}"', ] ) ) if pipeline_run_id: node_executions_filter_queries.append( q.And( [ f'contexts_1.type = "{constants.PIPELINE_RUN_CONTEXT_TYPE_NAME}"', f'contexts_1.name = "{pipeline_run_id}"', ] ) ) if execution_states: states_str = ",".join( [ mlmd.proto.Execution.State.Name(state) for state in execution_states ] ) states_filter_query = f"last_known_state IN ({states_str})" node_executions_filter_queries.append(states_filter_query) if min_last_update_time_since_epoch: node_executions_filter_queries.append( f"last_update_time_since_epoch >= {min_last_update_time_since_epoch}" ) return store.get_executions( list_options=mlmd.ListOptions( filter_query=str(q.And(node_executions_filter_queries)), order_by=order_by, is_asc=is_asc, ) )
["def","get_node_executions","(","store",":","mlmd.MetadataStore",",","*",",","pipeline_id",":","str",",","node_id",":","str",",","pipeline_run_id",":","Optional","[","str","]","=","None",",","order_by",":","mlmd.OrderByField","=","mlmd.OrderByField.ID",",","is_asc",":","bool","=","True",",","execution_states",":","Optional","[","Sequence","[","``","mlmd.proto.Execution.State","''","]","]","=","None",",","min_last_update_time_since_epoch",":","Optional","[","int","]","=","None",",",")","-",">","Sequence","[","mlmd.proto.Execution","]",":","``","''","''","Gets","all","node","executions",".","Args",":","store",":","A","MetadataStore","object",".","pipeline_id",":","The","pipeline","ID",".","node_id",":","The","node","ID",".","pipeline_run_id",":","The","pipeline","run","ID","that","the","node","belongs","to",".","Only","executions","from","the","specified","pipeline","run","are","returned","if","specified",".","order_by",":","The","field","of","execution","to","order","results","by",".","is_asc",":","If","True",",","the","results","will","be","returned","in","the","ascending","order",".","If","False",",","the","result","will","be","returned","in","the","descending","order",".","execution_states",":","The","MLMD","execution","state","(","s",")","to","pull","LIVE","artifacts","from",".","If","not","specified","or","is","empty",",","will","consider","all","MLMD","execution","states",".","min_last_update_time_since_epoch",":","The","minimum","update","time","of","MLMD","executions","in","the","format","of","milliseconds","since","the","unix","epoch",".","If","not","specified",",","will","consider","all","MLMD","executions",".","Returns",":","A","list","of","executions","of","the","given","pipeline","node.","``","''","''","node_context_name","=","compiler_utils.node_context_name","(","pipeline_id",",","node_id",")","node_executions_filter_queries","=","[","]","node_executions_filter_queries.append","(","q.And","(","[","f'contexts_0.type","=","``","{","constants.NODE_CONTEXT_TYPE_NAME","}","''","'",",","f'contexts_0.name","=","``","{","node_context_name","}","''","'",",","]",")",")","if","pipeline_run_id",":","node_executions_filter_queries.append","(","q.And","(","[","f'contexts_1.type","=","``","{","constants.PIPELINE_RUN_CONTEXT_TYPE_NAME","}","''","'",",","f'contexts_1.name","=","``","{","pipeline_run_id","}","''","'",",","]",")",")","if","execution_states",":","states_str","=","``",",","''",".join","(","[","mlmd.proto.Execution.State.Name","(","state",")","for","state","in","execution_states","]",")","states_filter_query","=","f","''","last_known_state","IN","(","{","states_str","}",")","''","node_executions_filter_queries.append","(","states_filter_query",")","if","min_last_update_time_since_epoch",":","node_executions_filter_queries.append","(","f","''","last_update_time_since_epoch",">","=","{","min_last_update_time_since_epoch","}","''",")","return","store.get_executions","(","list_options=mlmd.ListOptions","(","filter_query=str","(","q.And","(","node_executions_filter_queries",")",")",",","order_by=order_by",",","is_asc=is_asc",",",")",")"]
90
154
null
store_ext.py
tfx/tfx/orchestration/portable/mlmd/store_ext.py
import collections import itertools from typing import Callable, Mapping, Optional, Sequence, Union from tfx.dsl.compiler import compiler_utils from tfx.dsl.compiler import constants from tfx.orchestration.experimental.core import constants from tfx.orchestration.portable.mlmd import event_lib from tfx.orchestration.portable.mlmd import filter_query_builder import ml_metadata
15
null
9
6
null
null
null
Use image node_id 4 for calling a global function with example usage: get_node_executions(store) and returns: store
115
node_id 4
2,198,858
__init__
Distribution
object
true
self,generator
Sampling distribution for mystic optimizers
["Sampling","distribution","for","mystic","optimizers"]
generate a sampling distribution with interface dist(size=None) input:: - generator: a 'distribution' method from scipy.stats or numpy.random - rng: a mystic.random_state object [default: random_state('numpy.random')] - args: positional arguments for the distribtution object - kwds: keyword arguments for the distribution object note:: this method only accepts numpy.random methods with the keyword 'size', and only accepts random_state objects built with module='numpy.random' note:: generator may be a method object or a string of 'module.object'; similarly, rng may be a random_state object or a string of 'module' note:: Distributions d1,d2 may be combined by adding data (i.e. d1(n) + d2(n)), or by adding probabilitiies as Distribution(d1,d2); the former uses the addition operator and produces a new unnormalized Distribution, while the latter produces a new Distribution which randomly chooses from the Distributions provided note:: a normalization factor can be incorporated through the multiplication or division operator, and is stored in the Distribution as 'norm'
["generate","a","sampling","distribution","with","interface","dist","(","size=None",")","input",":",":","-","generator",":","a","'distribution","'","method","from","scipy.stats","or","numpy.random","-","rng",":","a","mystic.random_state","object","[","default",":","random_state","(","'numpy.random","'",")","]","-","args",":","positional","arguments","for","the","distribtution","object","-","kwds",":","keyword","arguments","for","the","distribution","object","note",":",":","this","method","only","accepts","numpy.random","methods","with","the","keyword","'size","'",",","and","only","accepts","random_state","objects","built","with","module='numpy.random'","note",":",":","generator","may","be","a","method","object","or","a","string","of","'module.object","'",";","similarly",",","rng","may","be","a","random_state","object","or","a","string","of","'module'","note",":",":","Distributions","d1",",","d2","may","be","combined","by","adding","data","(","i.e",".","d1","(","n",")","+","d2","(","n",")",")",",","or","by","adding","probabilitiies","as","Distribution","(","d1",",","d2",")",";","the","former","uses","the","addition","operator","and","produces","a","new","unnormalized","Distribution",",","while","the","latter","produces","a","new","Distribution","which","randomly","chooses","from","the","Distributions","provided","note",":",":","a","normalization","factor","can","be","incorporated","through","the","multiplication","or","division","operator",",","and","is","stored","in","the","Distribution","as","'norm","'"]
Distribution
def __init__(self, generator=None, *args, **kwds): """ generate a sampling distribution with interface dist(size=None) input:: - generator: a 'distribution' method from scipy.stats or numpy.random - rng: a mystic.random_state object [default: random_state('numpy.random')] - args: positional arguments for the distribtution object - kwds: keyword arguments for the distribution object note:: this method only accepts numpy.random methods with the keyword 'size', and only accepts random_state objects built with module='numpy.random' note:: generator may be a method object or a string of 'module.object'; similarly, rng may be a random_state object or a string of 'module' note:: Distributions d1,d2 may be combined by adding data (i.e. d1(n) + d2(n)), or by adding probabilitiies as Distribution(d1,d2); the former uses the addition operator and produces a new unnormalized Distribution, while the latter produces a new Distribution which randomly chooses from the Distributions provided note:: a normalization factor can be incorporated through the multiplication or division operator, and is stored in the Distribution as 'norm' """ # XXX: generate Distribution from list of Distributions? self.norm = kwds.pop("norm", 1) + 0 if isinstance(generator, Distribution): if kwds: msg = "keyword arguments are invalid with {0} instance".format( self.__class__.__name__ ) raise TypeError(msg) if not args: self._type = generator._type self.rvs = generator.rvs self.repr = generator.repr self.norm *= generator.norm return # args can only support additional distribution instances for arg in args: if not isinstance(arg, Distribution): # raise TypeError generator += arg # use choice from multiple distributions import numpy as np generator = (generator,) + args rep = ( lambda di: "{0}".format(di).split("(", 1)[-1][:-1] if di._type == "join" else "{0}".format(di) ) sig = ", ".join(rep(i) for i in generator) self.repr = lambda cls, fac: ( "{0}({1}".format(cls, sig) + (")" if fac == 1 else ", norm={0})".format(fac)) ) self.rvs = lambda size=None: np.choose( np.random.choice(range(len(generator)), size=size), tuple(d(size) for d in generator), ) self._type = "join" return from mystic.tools import random_state rng = kwds.pop("rng", random_state(module="numpy.random")) if isinstance(rng, str): rng = random_state(module=rng) mod = "numpy.random" if generator is None: generator = rng.random mod = rng.__name__ elif isinstance(generator, str): from importlib import import_module if "." in generator: mod, generator = generator.rsplit(".", 1) mod = import_module(mod) else: mod = rng generator = getattr(mod, generator) mod = mod.__name__ if getattr(generator, "rvs", False): d = generator(*args, **kwds) self.rvs = lambda size=None: d.rvs( size=size, random_state=rng ) name = getattr( generator, "name", None ) # XXX: also try __name__? mod = "scipy.stats" # XXX: assumed due to 'd.rvs' else: d = getattr(rng, generator.__name__) self.rvs = lambda size=None: d(size=size, *args, **kwds) name = generator.__name__ mod = getattr( rng, "__name__", "numpy.random" ) # XXX: bad default? name = "'{0}.{1}'".format(mod, name) if name else "" sig = ", ".join(str(i) for i in args) kwd = ", ".join("{0}={1}".format(i, j) for i, j in kwds.items()) # nrm = '' if self.norm == 1 else 'norm={0}'.format(self.norm) # kwd = '{0}, {1}'.format(kwd, nrm) if (kwd and nrm) else (kwd or nrm) sig = ( "{0}, {1}".format(sig, kwd) if (sig and kwd) else (sig or kwd) ) if name and sig: name += ", " # sig = ", rng='{0}')".format(rng.__name__) self.repr = lambda cls, fac: ( "{0}({1}".format(cls, name) + sig + ( "" if fac == 1 else ( (", " if (name or sig) else "") + "norm={0}".format(fac) ) ) + ")" ) self._type = "base" return
["def","__init__","(","self",",","generator=None",",","*","args",",","*","*","kwds",")",":","``","''","''","generate","a","sampling","distribution","with","interface","dist","(","size=None",")","input",":",":","-","generator",":","a","'distribution","'","method","from","scipy.stats","or","numpy.random","-","rng",":","a","mystic.random_state","object","[","default",":","random_state","(","'numpy.random","'",")","]","-","args",":","positional","arguments","for","the","distribtution","object","-","kwds",":","keyword","arguments","for","the","distribution","object","note",":",":","this","method","only","accepts","numpy.random","methods","with","the","keyword","'size","'",",","and","only","accepts","random_state","objects","built","with","module='numpy.random'","note",":",":","generator","may","be","a","method","object","or","a","string","of","'module.object","'",";","similarly",",","rng","may","be","a","random_state","object","or","a","string","of","'module'","note",":",":","Distributions","d1",",","d2","may","be","combined","by","adding","data","(","i.e",".","d1","(","n",")","+","d2","(","n",")",")",",","or","by","adding","probabilitiies","as","Distribution","(","d1",",","d2",")",";","the","former","uses","the","addition","operator","and","produces","a","new","unnormalized","Distribution",",","while","the","latter","produces","a","new","Distribution","which","randomly","chooses","from","the","Distributions","provided","note",":",":","a","normalization","factor","can","be","incorporated","through","the","multiplication","or","division","operator",",","and","is","stored","in","the","Distribution","as","'norm'","``","''","''","#","XXX",":","generate","Distribution","from","list","of","Distributions","?","self.norm","=","kwds.pop","(","``","norm","''",",","1",")","+","0","if","isinstance","(","generator",",","Distribution",")",":","if","kwds",":","msg","=","``","keyword","arguments","are","invalid","with","{","0","}","instance","''",".format","(","self.__class__.__name__",")","raise","TypeError","(","msg",")","if","not","args",":","self._type","=","generator._type","self.rvs","=","generator.rvs","self.repr","=","generator.repr","self.norm","*","=","generator.norm","return","#","args","can","only","support","additional","distribution","instances","for","arg","in","args",":","if","not","isinstance","(","arg",",","Distribution",")",":","#","raise","TypeError","generator","+=","arg","#","use","choice","from","multiple","distributions","import","numpy","as","np","generator","=","(","generator",",",")","+","args","rep","=","(","lambda","di",":","``","{","0","}","''",".format","(","di",")",".split","(","``","(","``",",","1",")","[","-1","]","[",":","-1","]","if","di._type","==","``","join","''","else","``","{","0","}","''",".format","(","di",")",")","sig","=","``",",","``",".join","(","rep","(","i",")","for","i","in","generator",")","self.repr","=","lambda","cls",",","fac",":","(","``","{","0","}","(","{","1","}","''",".format","(","cls",",","sig",")","+","(","``",")","''","if","fac","==","1","else","``",",","norm=","{","0","}",")","''",".format","(","fac",")",")",")","self.rvs","=","lambda","size=None",":","np.choose","(","np.random.choice","(","range","(","len","(","generator",")",")",",","size=size",")",",","tuple","(","d","(","size",")","for","d","in","generator",")",",",")","self._type","=","``","join","''","return","from","mystic.tools","import","random_state","rng","=","kwds.pop","(","``","rng","''",",","random_state","(","module=","''","numpy.random","''",")",")","if","isinstance","(","rng",",","str",")",":","rng","=","random_state","(","module=rng",")","mod","=","``","numpy.random","''","if","generator","is","None",":","generator","=","rng.random","mod","=","rng.__name__","elif","isinstance","(","generator",",","str",")",":","from","importlib","import","import_module","if","``",".","''","in","generator",":","mod",",","generator","=","generator.rsplit","(","``",".","``",",","1",")","mod","=","import_module","(","mod",")","else",":","mod","=","rng","generator","=","getattr","(","mod",",","generator",")","mod","=","mod.__name__","if","getattr","(","generator",",","``","rvs","''",",","False",")",":","d","=","generator","(","*","args",",","*","*","kwds",")","self.rvs","=","lambda","size=None",":","d.rvs","(","size=size",",","random_state=rng",")","name","=","getattr","(","generator",",","``","name","''",",","None",")","#","XXX",":","also","try","__name__","?","mod","=","``","scipy.stats","''","#","XXX",":","assumed","due","to","'d.rvs'","else",":","d","=","getattr","(","rng",",","generator.__name__",")","self.rvs","=","lambda","size=None",":","d","(","size=size",",","*","args",",","*","*","kwds",")","name","=","generator.__name__","mod","=","getattr","(","rng",",","``","__name__","''",",","``","numpy.random","''",")","#","XXX",":","bad","default","?","name","=","``","'","{","0","}",".","{","1","}","'","''",".format","(","mod",",","name",")","if","name","else","``","''","sig","=","``",",","``",".join","(","str","(","i",")","for","i","in","args",")","kwd","=","``",",","``",".join","(","``","{","0","}","=","{","1","}","''",".format","(","i",",","j",")","for","i",",","j","in","kwds.items","(",")",")","#","nrm","=","``","if","self.norm","==","1","else","'norm=","{","0","}","'.format","(","self.norm",")","#","kwd","=","'","{","0","}",",","{","1","}","'.format","(","kwd",",","nrm",")","if","(","kwd","and","nrm",")","else","(","kwd","or","nrm",")","sig","=","(","``","{","0","}",",","{","1","}","''",".format","(","sig",",","kwd",")","if","(","sig","and","kwd",")","else","(","sig","or","kwd",")",")","if","name","and","sig",":","name","+=","``",",","``","#","sig","=","``",",","rng=","'","{","0","}","'",")","''",".format","(","rng.__name__",")","self.repr","=","lambda","cls",",","fac",":","(","``","{","0","}","(","{","1","}","''",".format","(","cls",",","name",")","+","sig","+","(","``","''","if","fac","==","1","else","(","(","``",",","``","if","(","name","or","sig",")","else","``","''",")","+","``","norm=","{","0","}","''",".format","(","fac",")",")",")","+","``",")","''",")","self._type","=","``","base","''","return"]
55
144
null
__init__.py
mystic/mystic/math/__init__.py
from .poly import polyeval, poly1d from .grid import gridpts, samplepts, fillpts from .approx import almostEqual, tolerance from .approx import approx_equal from .None import discrete from .None import distance
15
1
6
0
1
7
1
Use image node_id 1 to create a new Distribution object from inherited base classes: object with example: obj = Distribution(generator)
135
node_id 1
1,406,721
__call__
Distribution
object
true
self,size
Sampling distribution for mystic optimizers
["Sampling","distribution","for","mystic","optimizers"]
generate a sample of given size (tuple) from the distribution
["generate","a","sample","of","given","size","(","tuple",")","from","the","distribution"]
unknown
def __call__(self, size=None): """generate a sample of given size (tuple) from the distribution""" return self.norm * self.rvs(size)
["def","__call__","(","self",",","size=None",")",":","``","''","''","generate","a","sample","of","given","size","(","tuple",")","from","the","distribution","''","''","''","return","self.norm","*","self.rvs","(","size",")"]
145
147
null
__init__.py
mystic/mystic/math/__init__.py
from .poly import polyeval, poly1d from .grid import gridpts, samplepts, fillpts from .approx import almostEqual, tolerance from .approx import approx_equal from .None import discrete from .None import distance
15
1
6
0
1
7
1
Use image node_id 2 for calling the Distribution obj's underlying member method code with example usage: obj.__call__(size) and returns: unknown
144
node_id 2
1,406,722
__repr__
Distribution
object
true
self
Sampling distribution for mystic optimizers
["Sampling","distribution","for","mystic","optimizers"]
null
null
self
def __repr__(self): return self.repr(self.__class__.__name__, self.norm)
["def","__repr__","(","self",")",":","return","self.repr","(","self.__class__.__name__",",","self.norm",")"]
148
149
null
__init__.py
mystic/mystic/math/__init__.py
from .poly import polyeval, poly1d from .grid import gridpts, samplepts, fillpts from .approx import almostEqual, tolerance from .approx import approx_equal from .None import discrete from .None import distance
15
1
6
0
1
7
1
Use image node_id 3 for calling the Distribution obj's underlying member method code with example usage: obj.__repr__() and returns: self
137
node_id 3
1,406,723
__add__
Distribution
object
true
self,dist
Sampling distribution for mystic optimizers
["Sampling","distribution","for","mystic","optimizers"]
null
null
new
def __add__(self, dist): if not isinstance(dist, Distribution): msg = "unsupported operand type(s) for +: '{0}' and '{1}'".format( self.__class__.__name__, type(dist) ) raise TypeError(msg) # add data from multiple distributions new = Distribution() first = "{0}".format(self) second = "{0}".format(dist) if self._type == "add": first = first.split("(", 1)[-1][:-1] if dist._type == "add": second = second.split("(", 1)[-1][:-1] new.repr = lambda cls, fac: ( "{0}({1} + {2}".format(cls, first, second) + (")" if fac == 1 else ", norm={0})".format(fac)) ) new.rvs = lambda size=None: (self(size) + dist(size)) new._type = "add" new.norm = 1 return new
["def","__add__","(","self",",","dist",")",":","if","not","isinstance","(","dist",",","Distribution",")",":","msg","=","``","unsupported","operand","type","(","s",")","for","+",":","'","{","0","}","'","and","'","{","1","}","'","''",".format","(","self.__class__.__name__",",","type","(","dist",")",")","raise","TypeError","(","msg",")","#","add","data","from","multiple","distributions","new","=","Distribution","(",")","first","=","``","{","0","}","''",".format","(","self",")","second","=","``","{","0","}","''",".format","(","dist",")","if","self._type","==","``","add","''",":","first","=","first.split","(","``","(","``",",","1",")","[","-1","]","[",":","-1","]","if","dist._type","==","``","add","''",":","second","=","second.split","(","``","(","``",",","1",")","[","-1","]","[",":","-1","]","new.repr","=","lambda","cls",",","fac",":","(","``","{","0","}","(","{","1","}","+","{","2","}","''",".format","(","cls",",","first",",","second",")","+","(","``",")","''","if","fac","==","1","else","``",",","norm=","{","0","}",")","''",".format","(","fac",")",")",")","new.rvs","=","lambda","size=None",":","(","self","(","size",")","+","dist","(","size",")",")","new._type","=","``","add","''","new.norm","=","1","return","new"]
150
164
null
__init__.py
mystic/mystic/math/__init__.py
from .poly import polyeval, poly1d from .grid import gridpts, samplepts, fillpts from .approx import almostEqual, tolerance from .approx import approx_equal from .None import discrete from .None import distance
15
1
6
0
1
7
1
Use image node_id 4 for calling the Distribution obj's underlying member method code with example usage: obj.__add__(dist) and returns: new
139
node_id 4
1,406,724
convert_cfg_to_dict
global
null
false
cfg_node,key_list
null
null
null
null
cfg_node,cfg_dict
def convert_cfg_to_dict(cfg_node, key_list=[]): """Convert a config node to dictionary""" if not isinstance(cfg_node, CfgNode): if type(cfg_node) not in _VALID_TYPES: print( "Key {} with value {} is not a valid type; valid types: {}".format( ".".join(key_list), type(cfg_node), _VALID_TYPES ), ) return cfg_node else: cfg_dict = dict(cfg_node) for k, v in cfg_dict.items(): cfg_dict[k] = convert_cfg_to_dict(v, key_list + [k]) return cfg_dict
["def","convert_cfg_to_dict","(","cfg_node",",","key_list=","[","]",")",":","``","''","''","Convert","a","config","node","to","dictionary","''","''","''","if","not","isinstance","(","cfg_node",",","CfgNode",")",":","if","type","(","cfg_node",")","not","in","_VALID_TYPES",":","print","(","``","Key","{","}","with","value","{","}","is","not","a","valid","type",";","valid","types",":","{","}","''",".format","(","``",".","``",".join","(","key_list",")",",","type","(","cfg_node",")",",","_VALID_TYPES",")",",",")","return","cfg_node","else",":","cfg_dict","=","dict","(","cfg_node",")","for","k",",","v","in","cfg_dict.items","(",")",":","cfg_dict","[","k","]","=","convert_cfg_to_dict","(","v",",","key_list","+","[","k","]",")","return","cfg_dict"]
65
76
null
config.py
OpenPrompt/openprompt/config.py
import argparse from yacs.config import CfgNode import sys from openprompt.utils.utils import check_config_conflicts from .default_config import get_default_config from openprompt.utils.logging import logger import os
15
null
7
8
null
null
null
Use image node_id 4 for calling a global function with example usage: convert_cfg_to_dict(cfg_node, key_list) and returns: cfg_node, cfg_dict
141
node_id 4
152,520
get_live_output_artifacts_of_node_by_output_key
global
null
false
store
null
null
null
null
output_artifacts_by_output_key,dict,dict
def get_live_output_artifacts_of_node_by_output_key( store: mlmd.MetadataStore, *, pipeline_id: str, node_id: str, pipeline_run_id: Optional[str] = None, execution_states: Optional[ Sequence["mlmd.proto.Execution.State"] ] = None, ) -> Mapping[str, Sequence[Sequence[mlmd.proto.Artifact]]]: """Get LIVE output artifacts of the given node grouped by output key. The LIVE output artifacts associated with an output key are represented as a list of a list of artifacts. 1. The outer list represents artifacts generated across all executions. 2. The inner list represents artifacts generated by one execution. 3. Elements in the outer list are returned in descending order of the creation time of the execution associated with them. 4. Elements in the inner list have no order guarantee. 5. If no LIVE output artifacts found for one execution, an empty list will be returned. Args: store: A MetadataStore object. pipeline_id: A pipeline ID. node_id: A node ID. pipeline_run_id: The pipeline run ID that the node belongs to. Only artifacts from the specified pipeline run are returned if specified. execution_states: The MLMD execution state(s) to pull LIVE artifacts from. If not specified or is empty, will consider MLMD execution states in [COMPLETE, CACHED]. Returns: A mapping from output key to all output artifacts from the given node. """ # Step 1: Get LIVE artifacts attributed to node with `node_id`. live_artifacts = _get_node_live_artifacts( store, pipeline_id=pipeline_id, node_id=node_id, pipeline_run_id=pipeline_run_id, ) if not live_artifacts: return {} # Step 2: Get executions associated with node that created `live_artifacts` # ordered by execution creation time in descending order. # These executions should satisfy the constraint: # min (execution update time) >= min (artifact create time) min_live_artifact_create_time = min( [a.create_time_since_epoch for a in live_artifacts], default=0 ) # Within one transaction that updates both artifacts and execution, the # timestamp of execution is larger or equal than that of the artifacts. # Apply time skew for the artifacts created before cl/574333630 is rolled out. # TODO(b/275231956): Remove the following 2 lines if we are sure that there # are no more artifacts older than the timestamp. if ( min_live_artifact_create_time < orchestration_constants.TIME_SKEW_DATE ): min_live_artifact_create_time -= 24 * 3600 * 1000 executions_ordered_by_desc_creation_time = get_node_executions( store, pipeline_id=pipeline_id, node_id=node_id, pipeline_run_id=pipeline_run_id, order_by=mlmd.OrderByField.CREATE_TIME, is_asc=False, execution_states=execution_states, min_last_update_time_since_epoch=min_live_artifact_create_time, ) if not executions_ordered_by_desc_creation_time: return {} # Step 3: Get output events by executions obtained in step 2. events_by_executions = store.get_events_by_execution_ids( _ids(executions_ordered_by_desc_creation_time) ) output_events = [ e for e in events_by_executions if event_lib.is_valid_output_event(e) ] # Step 4: Construct and return `output_artifacts_by_output_key` from events. # # Create a mapping from execution_id to an empty list first to make sure # iteration orders of output_events_by_execution_id and # output_artifacts_map_by_execution_id are both in desc order of execution's # creation time. # # The desc order is guaranteed by execution_ids and dict is guaranteed to be # iterated in the insertion order of keys. output_events_by_execution_id = { execution.id: [] for execution in executions_ordered_by_desc_creation_time } for event in output_events: output_events_by_execution_id[event.execution_id].append( event ) artifact_ids_by_output_key_map_by_execution_id = {} for exec_id, events in output_events_by_execution_id.items(): output_artifacts_map = ( event_lib.reconstruct_artifact_id_multimap(events) ) artifact_ids_by_output_key_map_by_execution_id[ exec_id ] = output_artifacts_map output_artifacts_by_output_key = collections.defaultdict(list) # Keep only LIVE output artifacts when constructing the result. live_artifacts_by_id = {a.id: a for a in live_artifacts} for ( artifact_ids_by_output_key ) in artifact_ids_by_output_key_map_by_execution_id.values(): for ( output_key, artifact_ids, ) in artifact_ids_by_output_key.items(): live_output_artifacts = [ live_artifacts_by_id[artifact_id] for artifact_id in artifact_ids if artifact_id in live_artifacts_by_id ] output_artifacts_by_output_key[output_key].append( live_output_artifacts ) return output_artifacts_by_output_key
["def","get_live_output_artifacts_of_node_by_output_key","(","store",":","mlmd.MetadataStore",",","*",",","pipeline_id",":","str",",","node_id",":","str",",","pipeline_run_id",":","Optional","[","str","]","=","None",",","execution_states",":","Optional","[","Sequence","[","``","mlmd.proto.Execution.State","''","]","]","=","None",",",")","-",">","Mapping","[","str",",","Sequence","[","Sequence","[","mlmd.proto.Artifact","]","]","]",":","``","''","''","Get","LIVE","output","artifacts","of","the","given","node","grouped","by","output","key",".","The","LIVE","output","artifacts","associated","with","an","output","key","are","represented","as","a","list","of","a","list","of","artifacts",".","1",".","The","outer","list","represents","artifacts","generated","across","all","executions",".","2",".","The","inner","list","represents","artifacts","generated","by","one","execution",".","3",".","Elements","in","the","outer","list","are","returned","in","descending","order","of","the","creation","time","of","the","execution","associated","with","them",".","4",".","Elements","in","the","inner","list","have","no","order","guarantee",".","5",".","If","no","LIVE","output","artifacts","found","for","one","execution",",","an","empty","list","will","be","returned",".","Args",":","store",":","A","MetadataStore","object",".","pipeline_id",":","A","pipeline","ID",".","node_id",":","A","node","ID",".","pipeline_run_id",":","The","pipeline","run","ID","that","the","node","belongs","to",".","Only","artifacts","from","the","specified","pipeline","run","are","returned","if","specified",".","execution_states",":","The","MLMD","execution","state","(","s",")","to","pull","LIVE","artifacts","from",".","If","not","specified","or","is","empty",",","will","consider","MLMD","execution","states","in","[","COMPLETE",",","CACHED","]",".","Returns",":","A","mapping","from","output","key","to","all","output","artifacts","from","the","given","node.","``","''","''","#","Step","1",":","Get","LIVE","artifacts","attributed","to","node","with","`","node_id","`",".","live_artifacts","=","_get_node_live_artifacts","(","store",",","pipeline_id=pipeline_id",",","node_id=node_id",",","pipeline_run_id=pipeline_run_id",",",")","if","not","live_artifacts",":","return","{","}","#","Step","2",":","Get","executions","associated","with","node","that","created","`","live_artifacts","`","#","ordered","by","execution","creation","time","in","descending","order",".","#","These","executions","should","satisfy","the","constraint",":","#","min","(","execution","update","time",")",">","=","min","(","artifact","create","time",")","min_live_artifact_create_time","=","min","(","[","a.create_time_since_epoch","for","a","in","live_artifacts","]",",","default=0",")","#","Within","one","transaction","that","updates","both","artifacts","and","execution",",","the","#","timestamp","of","execution","is","larger","or","equal","than","that","of","the","artifacts",".","#","Apply","time","skew","for","the","artifacts","created","before","cl\/574333630","is","rolled","out",".","#","TODO","(","b\/275231956",")",":","Remove","the","following","2","lines","if","we","are","sure","that","there","#","are","no","more","artifacts","older","than","the","timestamp",".","if","(","min_live_artifact_create_time","<","orchestration_constants.TIME_SKEW_DATE",")",":","min_live_artifact_create_time","-=","24","*","3600","*","1000","executions_ordered_by_desc_creation_time","=","get_node_executions","(","store",",","pipeline_id=pipeline_id",",","node_id=node_id",",","pipeline_run_id=pipeline_run_id",",","order_by=mlmd.OrderByField.CREATE_TIME",",","is_asc=False",",","execution_states=execution_states",",","min_last_update_time_since_epoch=min_live_artifact_create_time",",",")","if","not","executions_ordered_by_desc_creation_time",":","return","{","}","#","Step","3",":","Get","output","events","by","executions","obtained","in","step","2.","events_by_executions","=","store.get_events_by_execution_ids","(","_ids","(","executions_ordered_by_desc_creation_time",")",")","output_events","=","[","e","for","e","in","events_by_executions","if","event_lib.is_valid_output_event","(","e",")","]","#","Step","4",":","Construct","and","return","`","output_artifacts_by_output_key","`","from","events",".","#","#","Create","a","mapping","from","execution_id","to","an","empty","list","first","to","make","sure","#","iteration","orders","of","output_events_by_execution_id","and","#","output_artifacts_map_by_execution_id","are","both","in","desc","order","of","execution's","#","creation","time",".","#","#","The","desc","order","is","guaranteed","by","execution_ids","and","dict","is","guaranteed","to","be","#","iterated","in","the","insertion","order","of","keys",".","output_events_by_execution_id","=","{","execution.id",":","[","]","for","execution","in","executions_ordered_by_desc_creation_time","}","for","event","in","output_events",":","output_events_by_execution_id","[","event.execution_id","]",".append","(","event",")","artifact_ids_by_output_key_map_by_execution_id","=","{","}","for","exec_id",",","events","in","output_events_by_execution_id.items","(",")",":","output_artifacts_map","=","(","event_lib.reconstruct_artifact_id_multimap","(","events",")",")","artifact_ids_by_output_key_map_by_execution_id","[","exec_id","]","=","output_artifacts_map","output_artifacts_by_output_key","=","collections.defaultdict","(","list",")","#","Keep","only","LIVE","output","artifacts","when","constructing","the","result",".","live_artifacts_by_id","=","{","a.id",":","a","for","a","in","live_artifacts","}","for","(","artifact_ids_by_output_key",")","in","artifact_ids_by_output_key_map_by_execution_id.values","(",")",":","for","(","output_key",",","artifact_ids",",",")","in","artifact_ids_by_output_key.items","(",")",":","live_output_artifacts","=","[","live_artifacts_by_id","[","artifact_id","]","for","artifact_id","in","artifact_ids","if","artifact_id","in","live_artifacts_by_id","]","output_artifacts_by_output_key","[","output_key","]",".append","(","live_output_artifacts",")","return","output_artifacts_by_output_key"]
157
273
null
store_ext.py
tfx/tfx/orchestration/portable/mlmd/store_ext.py
import collections import itertools from typing import Callable, Mapping, Optional, Sequence, Union from tfx.dsl.compiler import compiler_utils from tfx.dsl.compiler import constants from tfx.orchestration.experimental.core import constants from tfx.orchestration.portable.mlmd import event_lib from tfx.orchestration.portable.mlmd import filter_query_builder import ml_metadata
15
null
9
6
null
null
null
Use image node_id 5 for calling a global function with example usage: get_live_output_artifacts_of_node_by_output_key(store) and returns: output_artifacts_by_output_key, dict, dict
180
node_id 5
2,198,859
test_zero_fwd
global
null
false
num_expert,batch_size,d_hidden,world_size
null
null
null
null
null
def test_zero_fwd( num_expert=2, batch_size=4, d_hidden=8, world_size=1 ): _run_distributed( "_test_zero_fwd", 1, { "num_expert": num_expert, "batch_size": batch_size, "d_hidden": d_hidden, }, script=__file__, )
["def","test_zero_fwd","(","num_expert=2",",","batch_size=4",",","d_hidden=8",",","world_size=1",")",":","_run_distributed","(","``","_test_zero_fwd","''",",","1",",","{","``","num_expert","''",":","num_expert",",","``","batch_size","''",":","batch_size",",","``","d_hidden","''",":","d_hidden",",","}",",","script=__file__",",",")"]
23
32
null
test_zero.py
thu-pacman-faster-moe/tests/test_zero.py
import os import sys import json import torch from fmoe.layers import _fmoe_general_global_forward from fmoe import FMoETransformerMLP from test_ddp import _run_distributed
15
null
7
4
null
null
null
Use image node_id 1 for calling a global function with example usage: test_zero_fwd(num_expert, batch_size, d_hidden, world_size) without return types
150
node_id 1
2,201,995
_test_zero_fwd
global
null
false
num_expert,batch_size,d_hidden,world_size
null
null
null
null
null
def _test_zero_fwd( num_expert=2, batch_size=4, d_hidden=8, world_size=1 ): inp = torch.rand(batch_size, d_hidden).cuda() gate = torch.zeros(batch_size, dtype=torch.int64).cuda() x = _fmoe_general_global_forward( inp, gate, lambda x, y: x, num_expert, world_size )
["def","_test_zero_fwd","(","num_expert=2",",","batch_size=4",",","d_hidden=8",",","world_size=1",")",":","inp","=","torch.rand","(","batch_size",",","d_hidden",")",".cuda","(",")","gate","=","torch.zeros","(","batch_size",",","dtype=torch.int64",")",".cuda","(",")","x","=","_fmoe_general_global_forward","(","inp",",","gate",",","lambda","x",",","y",":","x",",","num_expert",",","world_size",")"]
34
38
null
test_zero.py
thu-pacman-faster-moe/tests/test_zero.py
import os import sys import json import torch from fmoe.layers import _fmoe_general_global_forward from fmoe import FMoETransformerMLP from test_ddp import _run_distributed
15
null
7
4
null
null
null
Use image node_id 2 for calling a global function with example usage: _test_zero_fwd(num_expert, batch_size, d_hidden, world_size) without return types
151
node_id 2
2,201,996
test_zero_transformer
global
null
false
num_expert,batch_size,d_hidden,world_size
null
null
null
null
null
def test_zero_transformer( num_expert=2, batch_size=4, d_hidden=8, world_size=1 ): _run_distributed( "_test_zero_transformer", 1, { "num_expert": num_expert, "batch_size": batch_size, "d_hidden": d_hidden, }, script=__file__, )
["def","test_zero_transformer","(","num_expert=2",",","batch_size=4",",","d_hidden=8",",","world_size=1",")",":","_run_distributed","(","``","_test_zero_transformer","''",",","1",",","{","``","num_expert","''",":","num_expert",",","``","batch_size","''",":","batch_size",",","``","d_hidden","''",":","d_hidden",",","}",",","script=__file__",",",")"]
41
50
null
test_zero.py
thu-pacman-faster-moe/tests/test_zero.py
import os import sys import json import torch from fmoe.layers import _fmoe_general_global_forward from fmoe import FMoETransformerMLP from test_ddp import _run_distributed
15
null
7
4
null
null
null
Use image node_id 3 for calling a global function with example usage: test_zero_transformer(num_expert, batch_size, d_hidden, world_size) without return types
158
node_id 3
2,201,997
_test_zero_transformer
global
null
false
num_expert,batch_size,d_hidden,world_size
null
null
null
null
null
def _test_zero_transformer( num_expert=2, batch_size=4, d_hidden=8, world_size=1 ): inp = torch.rand(batch_size, d_hidden).cuda() mask = torch.zeros(inp.shape[0], dtype=torch.long) mask[1] = 1 mask_dict = {1: torch.zeros(d_hidden).cuda()} model = FMoETransformerMLP( num_expert, d_hidden, d_hidden * 4, world_size, gate=ConstantGate, mask=mask, mask_dict=mask_dict, ).cuda() oup = model(inp)
["def","_test_zero_transformer","(","num_expert=2",",","batch_size=4",",","d_hidden=8",",","world_size=1",")",":","inp","=","torch.rand","(","batch_size",",","d_hidden",")",".cuda","(",")","mask","=","torch.zeros","(","inp.shape","[","0","]",",","dtype=torch.long",")","mask","[","1","]","=","1","mask_dict","=","{","1",":","torch.zeros","(","d_hidden",")",".cuda","(",")","}","model","=","FMoETransformerMLP","(","num_expert",",","d_hidden",",","d_hidden","*","4",",","world_size",",","gate=ConstantGate",",","mask=mask",",","mask_dict=mask_dict",",",")",".cuda","(",")","oup","=","model","(","inp",")"]
52
61
null
test_zero.py
thu-pacman-faster-moe/tests/test_zero.py
import os import sys import json import torch from fmoe.layers import _fmoe_general_global_forward from fmoe import FMoETransformerMLP from test_ddp import _run_distributed
15
null
7
4
null
null
null
Use image node_id 4 for calling a global function with example usage: _test_zero_transformer(num_expert, batch_size, d_hidden, world_size) without return types
159
node_id 4
2,201,998
setUp
TestHighConfidence
unittest
true
self
A unittest class for testing the HighConfidence postprocessor.
["A","unittest","class","for","testing","the","HighConfidence","postprocessor","."]
null
null
null
def setUp(self): master_seed(seed=1234)
["def","setUp","(","self",")",":","master_seed","(","seed=1234",")"]
41
42
null
test_high_confidence.py
adversarial-robustness-toolbox/tests/defences/test_high_confidence.py
import logging import unittest import numpy from art.defences.postprocessor import HighConfidence from art.utils import load_dataset from tests.utils import master_seed, get_image_classifier_kr_tf, get_image_classifier_kr_tf_binary
15
1
6
0
1
7
1
Use image node_id 2 for calling the TestHighConfidence obj's underlying member method code with example usage: obj.setUp() without return types
143
node_id 2
235,297
test_difference
TestIntervalIndex
null
true
self,closed,sort
null
null
null
null
null
def test_difference(self, closed, sort): index = IntervalIndex.from_arrays( [1, 0, 3, 2], [1, 2, 3, 4], closed=closed ) result = index.difference(index[:1], sort=sort) expected = index[1:] if sort is None: expected = expected.sort_values() tm.assert_index_equal(result, expected) # GH 19101: empty result, same dtype result = index.difference(index, sort=sort) expected = empty_index(dtype="int64", closed=closed) tm.assert_index_equal(result, expected) # GH 19101: empty result, different dtypes other = IntervalIndex.from_arrays( index.left.astype("float64"), index.right, closed=closed ) result = index.difference(other, sort=sort) tm.assert_index_equal(result, expected)
["def","test_difference","(","self",",","closed",",","sort",")",":","index","=","IntervalIndex.from_arrays","(","[","1",",","0",",","3",",","2","]",",","[","1",",","2",",","3",",","4","]",",","closed=closed",")","result","=","index.difference","(","index","[",":1","]",",","sort=sort",")","expected","=","index","[","1",":","]","if","sort","is","None",":","expected","=","expected.sort_values","(",")","tm.assert_index_equal","(","result",",","expected",")","#","GH","19101",":","empty","result",",","same","dtype","result","=","index.difference","(","index",",","sort=sort",")","expected","=","empty_index","(","dtype=","''","int64","''",",","closed=closed",")","tm.assert_index_equal","(","result",",","expected",")","#","GH","19101",":","empty","result",",","different","dtypes","other","=","IntervalIndex.from_arrays","(","index.left.astype","(","``","float64","''",")",",","index.right",",","closed=closed",")","result","=","index.difference","(","other",",","sort=sort",")","tm.assert_index_equal","(","result",",","expected",")"]
131
149
null
test_setops.py
pandas/pandas/tests/indexes/interval/test_setops.py
import numpy import pytest from pandas import Index, IntervalIndex, Timestamp, interval_range import pandas._testing
15
1
4
2
0
8
null
Use image node_id 6 for calling the TestIntervalIndex obj's underlying member method code with example usage: obj.test_difference(closed, sort) without return types
164
node_id 6
1,514,638
test_ThompsonSamplerUniformWeights
ThompsonSamplerTest
TestCase
true
self
null
null
null
null
null
def test_ThompsonSamplerUniformWeights(self) -> None: generator = ThompsonSampler(min_weight=0.0, uniform_weights=True) generator.fit( # pyre-fixme[6]: For 1st param expected `List[List[List[Union[None, # bool, float, int, str]]]]` but got `List[List[List[int]]]`. Xs=self.Xs, # pyre-fixme[6]: For 2nd param expected `List[List[float]]` but got # `List[List[int]]`. Ys=self.Ys, # pyre-fixme[6]: For 3rd param expected `List[List[float]]` but got # `List[List[int]]`. Yvars=self.Yvars, # pyre-fixme[6]: For 4th param expected `List[List[Union[None, bool, # float, int, str]]]` but got `List[List[int]]`. parameter_values=self.parameter_values, outcome_names=self.outcome_names, ) arms, weights, _ = generator.gen( n=3, # pyre-fixme[6]: For 2nd param expected `List[List[Union[None, bool, # float, int, str]]]` but got `List[List[int]]`. parameter_values=self.parameter_values, objective_weights=np.ones(1), ) self.assertEqual(arms, [[4, 4], [3, 3], [2, 2]]) for weight, expected_weight in zip(weights, [1.0, 1.0, 1.0]): self.assertAlmostEqual(weight, expected_weight, 1)
["def","test_ThompsonSamplerUniformWeights","(","self",")","-",">","None",":","generator","=","ThompsonSampler","(","min_weight=0.0",",","uniform_weights=True",")","generator.fit","(","#","pyre-fixme","[","6","]",":","For","1st","param","expected","`","List","[","List","[","List","[","Union","[","None",",","#","bool",",","float",",","int",",","str","]","]","]","]","`","but","got","`","List","[","List","[","List","[","int","]","]","]","`",".","Xs=self.Xs",",","#","pyre-fixme","[","6","]",":","For","2nd","param","expected","`","List","[","List","[","float","]","]","`","but","got","#","`","List","[","List","[","int","]","]","`",".","Ys=self.Ys",",","#","pyre-fixme","[","6","]",":","For","3rd","param","expected","`","List","[","List","[","float","]","]","`","but","got","#","`","List","[","List","[","int","]","]","`",".","Yvars=self.Yvars",",","#","pyre-fixme","[","6","]",":","For","4th","param","expected","`","List","[","List","[","Union","[","None",",","bool",",","#","float",",","int",",","str","]","]","]","`","but","got","`","List","[","List","[","int","]","]","`",".","parameter_values=self.parameter_values",",","outcome_names=self.outcome_names",",",")","arms",",","weights",",","_","=","generator.gen","(","n=3",",","#","pyre-fixme","[","6","]",":","For","2nd","param","expected","`","List","[","List","[","Union","[","None",",","bool",",","#","float",",","int",",","str","]","]","]","`","but","got","`","List","[","List","[","int","]","]","`",".","parameter_values=self.parameter_values",",","objective_weights=np.ones","(","1",")",",",")","self.assertEqual","(","arms",",","[","[","4",",","4","]",",","[","3",",","3","]",",","[","2",",","2","]","]",")","for","weight",",","expected_weight","in","zip","(","weights",",","[","1.0",",","1.0",",","1.0","]",")",":","self.assertAlmostEqual","(","weight",",","expected_weight",",","1",")"]
146
172
null
test_thompson.py
Ax/ax/models/tests/test_thompson.py
import numpy from ax.exceptions.model import ModelError from ax.models.discrete.thompson import ThompsonSampler from ax.utils.common.testutils import TestCase
15
1
4
0
1
9
1
Use image node_id 5 for calling the ThompsonSamplerTest obj's underlying member method code with example usage: obj.test_ThompsonSamplerUniformWeights() without return types
173
node_id 5
9,539
test_ThompsonSamplerMinWeight
ThompsonSamplerTest
TestCase
true
self
null
null
null
null
null
def test_ThompsonSamplerMinWeight(self) -> None: generator = ThompsonSampler(min_weight=0.01) generator.fit( # pyre-fixme[6]: For 1st param expected `List[List[List[Union[None, # bool, float, int, str]]]]` but got `List[List[List[int]]]`. Xs=self.Xs, # pyre-fixme[6]: For 2nd param expected `List[List[float]]` but got # `List[List[int]]`. Ys=self.Ys, # pyre-fixme[6]: For 3rd param expected `List[List[float]]` but got # `List[List[int]]`. Yvars=self.Yvars, # pyre-fixme[6]: For 4th param expected `List[List[Union[None, bool, # float, int, str]]]` but got `List[List[int]]`. parameter_values=self.parameter_values, outcome_names=self.outcome_names, ) arms, weights, _ = generator.gen( n=5, # pyre-fixme[6]: For 2nd param expected `List[List[Union[None, bool, # float, int, str]]]` but got `List[List[int]]`. parameter_values=self.parameter_values, objective_weights=np.ones(1), ) self.assertEqual(arms, [[4, 4], [3, 3], [2, 2]]) for weight, expected_weight in zip( weights, [3 * i for i in [0.725, 0.225, 0.05]] ): self.assertAlmostEqual(weight, expected_weight, 1)
["def","test_ThompsonSamplerMinWeight","(","self",")","-",">","None",":","generator","=","ThompsonSampler","(","min_weight=0.01",")","generator.fit","(","#","pyre-fixme","[","6","]",":","For","1st","param","expected","`","List","[","List","[","List","[","Union","[","None",",","#","bool",",","float",",","int",",","str","]","]","]","]","`","but","got","`","List","[","List","[","List","[","int","]","]","]","`",".","Xs=self.Xs",",","#","pyre-fixme","[","6","]",":","For","2nd","param","expected","`","List","[","List","[","float","]","]","`","but","got","#","`","List","[","List","[","int","]","]","`",".","Ys=self.Ys",",","#","pyre-fixme","[","6","]",":","For","3rd","param","expected","`","List","[","List","[","float","]","]","`","but","got","#","`","List","[","List","[","int","]","]","`",".","Yvars=self.Yvars",",","#","pyre-fixme","[","6","]",":","For","4th","param","expected","`","List","[","List","[","Union","[","None",",","bool",",","#","float",",","int",",","str","]","]","]","`","but","got","`","List","[","List","[","int","]","]","`",".","parameter_values=self.parameter_values",",","outcome_names=self.outcome_names",",",")","arms",",","weights",",","_","=","generator.gen","(","n=5",",","#","pyre-fixme","[","6","]",":","For","2nd","param","expected","`","List","[","List","[","Union","[","None",",","bool",",","#","float",",","int",",","str","]","]","]","`","but","got","`","List","[","List","[","int","]","]","`",".","parameter_values=self.parameter_values",",","objective_weights=np.ones","(","1",")",",",")","self.assertEqual","(","arms",",","[","[","4",",","4","]",",","[","3",",","3","]",",","[","2",",","2","]","]",")","for","weight",",","expected_weight","in","zip","(","weights",",","[","3","*","i","for","i","in","[","0.725",",","0.225",",","0.05","]","]",")",":","self.assertAlmostEqual","(","weight",",","expected_weight",",","1",")"]
116
144
null
test_thompson.py
Ax/ax/models/tests/test_thompson.py
import numpy from ax.exceptions.model import ModelError from ax.models.discrete.thompson import ThompsonSampler from ax.utils.common.testutils import TestCase
15
1
4
0
1
9
1
Use image node_id 4 for calling the ThompsonSamplerTest obj's underlying member method code with example usage: obj.test_ThompsonSamplerMinWeight() without return types
168
node_id 4
9,538
test_ThompsonSamplerValidation
ThompsonSamplerTest
TestCase
true
self
null
null
null
null
null
def test_ThompsonSamplerValidation(self) -> None: generator = ThompsonSampler(min_weight=0.01) # all Xs are not the same with self.assertRaises(ValueError): generator.fit( Xs=[ [[1, 1], [2, 2], [3, 3], [4, 4]], [[1, 1], [2, 2], [4, 4]], ], # pyre-fixme[6]: For 2nd param expected `List[List[float]]` but got # `List[List[int]]`. Ys=self.Ys, # pyre-fixme[6]: For 3rd param expected `List[List[float]]` but got # `List[List[int]]`. Yvars=self.Yvars, # pyre-fixme[6]: For 4th param expected `List[List[Union[None, bool, # float, int, str]]]` but got `List[List[int]]`. parameter_values=self.parameter_values, outcome_names=self.outcome_names, ) # multiple observations per parameterization with self.assertRaises(ValueError): generator.fit( Xs=[[[1, 1], [2, 2], [2, 2]]], # pyre-fixme[6]: For 2nd param expected `List[List[float]]` but got # `List[List[int]]`. Ys=self.Ys, # pyre-fixme[6]: For 3rd param expected `List[List[float]]` but got # `List[List[int]]`. Yvars=self.Yvars, # pyre-fixme[6]: For 4th param expected `List[List[Union[None, bool, # float, int, str]]]` but got `List[List[int]]`. parameter_values=self.parameter_values, outcome_names=self.outcome_names, ) # these are not the same observations, so should not error generator.fit( Xs=[[[1, 1], [2.0, 2], [2, 2]]], # pyre-fixme[6]: For 2nd param expected `List[List[float]]` but got # `List[List[int]]`. Ys=self.Ys, # pyre-fixme[6]: For 3rd param expected `List[List[float]]` but got # `List[List[int]]`. Yvars=self.Yvars, # pyre-fixme[6]: For 4th param expected `List[List[Union[None, bool, # float, int, str]]]` but got `List[List[int]]`. parameter_values=self.parameter_values, outcome_names=self.outcome_names, ) # requires objective weights with self.assertRaises(ValueError): # pyre-fixme[6]: For 2nd param expected `List[List[Union[None, bool, # float, int, str]]]` but got `List[List[int]]`. generator.gen( 5, self.parameter_values, objective_weights=None )
["def","test_ThompsonSamplerValidation","(","self",")","-",">","None",":","generator","=","ThompsonSampler","(","min_weight=0.01",")","#","all","Xs","are","not","the","same","with","self.assertRaises","(","ValueError",")",":","generator.fit","(","Xs=","[","[","[","1",",","1","]",",","[","2",",","2","]",",","[","3",",","3","]",",","[","4",",","4","]","]",",","[","[","1",",","1","]",",","[","2",",","2","]",",","[","4",",","4","]","]",",","]",",","#","pyre-fixme","[","6","]",":","For","2nd","param","expected","`","List","[","List","[","float","]","]","`","but","got","#","`","List","[","List","[","int","]","]","`",".","Ys=self.Ys",",","#","pyre-fixme","[","6","]",":","For","3rd","param","expected","`","List","[","List","[","float","]","]","`","but","got","#","`","List","[","List","[","int","]","]","`",".","Yvars=self.Yvars",",","#","pyre-fixme","[","6","]",":","For","4th","param","expected","`","List","[","List","[","Union","[","None",",","bool",",","#","float",",","int",",","str","]","]","]","`","but","got","`","List","[","List","[","int","]","]","`",".","parameter_values=self.parameter_values",",","outcome_names=self.outcome_names",",",")","#","multiple","observations","per","parameterization","with","self.assertRaises","(","ValueError",")",":","generator.fit","(","Xs=","[","[","[","1",",","1","]",",","[","2",",","2","]",",","[","2",",","2","]","]","]",",","#","pyre-fixme","[","6","]",":","For","2nd","param","expected","`","List","[","List","[","float","]","]","`","but","got","#","`","List","[","List","[","int","]","]","`",".","Ys=self.Ys",",","#","pyre-fixme","[","6","]",":","For","3rd","param","expected","`","List","[","List","[","float","]","]","`","but","got","#","`","List","[","List","[","int","]","]","`",".","Yvars=self.Yvars",",","#","pyre-fixme","[","6","]",":","For","4th","param","expected","`","List","[","List","[","Union","[","None",",","bool",",","#","float",",","int",",","str","]","]","]","`","but","got","`","List","[","List","[","int","]","]","`",".","parameter_values=self.parameter_values",",","outcome_names=self.outcome_names",",",")","#","these","are","not","the","same","observations",",","so","should","not","error","generator.fit","(","Xs=","[","[","[","1",",","1","]",",","[","2.0",",","2","]",",","[","2",",","2","]","]","]",",","#","pyre-fixme","[","6","]",":","For","2nd","param","expected","`","List","[","List","[","float","]","]","`","but","got","#","`","List","[","List","[","int","]","]","`",".","Ys=self.Ys",",","#","pyre-fixme","[","6","]",":","For","3rd","param","expected","`","List","[","List","[","float","]","]","`","but","got","#","`","List","[","List","[","int","]","]","`",".","Yvars=self.Yvars",",","#","pyre-fixme","[","6","]",":","For","4th","param","expected","`","List","[","List","[","Union","[","None",",","bool",",","#","float",",","int",",","str","]","]","]","`","but","got","`","List","[","List","[","int","]","]","`",".","parameter_values=self.parameter_values",",","outcome_names=self.outcome_names",",",")","#","requires","objective","weights","with","self.assertRaises","(","ValueError",")",":","#","pyre-fixme","[","6","]",":","For","2nd","param","expected","`","List","[","List","[","Union","[","None",",","bool",",","#","float",",","int",",","str","]","]","]","`","but","got","`","List","[","List","[","int","]","]","`",".","generator.gen","(","5",",","self.parameter_values",",","objective_weights=None",")"]
60
114
null
test_thompson.py
Ax/ax/models/tests/test_thompson.py
import numpy from ax.exceptions.model import ModelError from ax.models.discrete.thompson import ThompsonSampler from ax.utils.common.testutils import TestCase
15
1
4
0
1
9
1
Use image node_id 3 for calling the ThompsonSamplerTest obj's underlying member method code with example usage: obj.test_ThompsonSamplerValidation() without return types
169
node_id 3
9,537
test_ThompsonSampler
ThompsonSamplerTest
TestCase
true
self
null
null
null
null
null
def test_ThompsonSampler(self) -> None: generator = ThompsonSampler(min_weight=0.0) generator.fit( # pyre-fixme[6]: For 1st param expected `List[List[List[Union[None, # bool, float, int, str]]]]` but got `List[List[List[int]]]`. Xs=self.Xs, # pyre-fixme[6]: For 2nd param expected `List[List[float]]` but got # `List[List[int]]`. Ys=self.Ys, # pyre-fixme[6]: For 3rd param expected `List[List[float]]` but got # `List[List[int]]`. Yvars=self.Yvars, # pyre-fixme[6]: For 4th param expected `List[List[Union[None, bool, # float, int, str]]]` but got `List[List[int]]`. parameter_values=self.parameter_values, outcome_names=self.outcome_names, ) arms, weights, gen_metadata = generator.gen( n=3, # pyre-fixme[6]: For 2nd param expected `List[List[Union[None, bool, # float, int, str]]]` but got `List[List[int]]`. parameter_values=self.parameter_values, objective_weights=np.ones(1), ) self.assertEqual(arms, [[4, 4], [3, 3], [2, 2]]) for weight, expected_weight in zip( weights, [3 * i for i in [0.725, 0.225, 0.05]] ): self.assertAlmostEqual(weight, expected_weight, 1) self.assertEqual(len(gen_metadata["arms_to_weights"]), 4)
["def","test_ThompsonSampler","(","self",")","-",">","None",":","generator","=","ThompsonSampler","(","min_weight=0.0",")","generator.fit","(","#","pyre-fixme","[","6","]",":","For","1st","param","expected","`","List","[","List","[","List","[","Union","[","None",",","#","bool",",","float",",","int",",","str","]","]","]","]","`","but","got","`","List","[","List","[","List","[","int","]","]","]","`",".","Xs=self.Xs",",","#","pyre-fixme","[","6","]",":","For","2nd","param","expected","`","List","[","List","[","float","]","]","`","but","got","#","`","List","[","List","[","int","]","]","`",".","Ys=self.Ys",",","#","pyre-fixme","[","6","]",":","For","3rd","param","expected","`","List","[","List","[","float","]","]","`","but","got","#","`","List","[","List","[","int","]","]","`",".","Yvars=self.Yvars",",","#","pyre-fixme","[","6","]",":","For","4th","param","expected","`","List","[","List","[","Union","[","None",",","bool",",","#","float",",","int",",","str","]","]","]","`","but","got","`","List","[","List","[","int","]","]","`",".","parameter_values=self.parameter_values",",","outcome_names=self.outcome_names",",",")","arms",",","weights",",","gen_metadata","=","generator.gen","(","n=3",",","#","pyre-fixme","[","6","]",":","For","2nd","param","expected","`","List","[","List","[","Union","[","None",",","bool",",","#","float",",","int",",","str","]","]","]","`","but","got","`","List","[","List","[","int","]","]","`",".","parameter_values=self.parameter_values",",","objective_weights=np.ones","(","1",")",",",")","self.assertEqual","(","arms",",","[","[","4",",","4","]",",","[","3",",","3","]",",","[","2",",","2","]","]",")","for","weight",",","expected_weight","in","zip","(","weights",",","[","3","*","i","for","i","in","[","0.725",",","0.225",",","0.05","]","]",")",":","self.assertAlmostEqual","(","weight",",","expected_weight",",","1",")","self.assertEqual","(","len","(","gen_metadata","[","``","arms_to_weights","''","]",")",",","4",")"]
29
58
null
test_thompson.py
Ax/ax/models/tests/test_thompson.py
import numpy from ax.exceptions.model import ModelError from ax.models.discrete.thompson import ThompsonSampler from ax.utils.common.testutils import TestCase
15
1
4
0
1
9
1
Use image node_id 2 for calling the ThompsonSamplerTest obj's underlying member method code with example usage: obj.test_ThompsonSampler() without return types
159
node_id 2
9,536
setUp
ThompsonSamplerTest
TestCase
true
self
null
null
null
null
null
def setUp(self) -> None: self.Xs = [ [[1, 1], [2, 2], [3, 3], [4, 4]] ] # 4 arms, each of dimensionality 2 self.Ys = [[1, 2, 3, 4]] self.Yvars = [[1, 1, 1, 1]] self.parameter_values = [[1, 2, 3, 4], [1, 2, 3, 4]] self.outcome_names = ["x", "y"] # not used for regular TS self.multiple_metrics_Xs = [ [[1, 1], [2, 2], [3, 3], [4, 4]], [[1, 1], [2, 2], [3, 3], [4, 4]], ] # 2 metrics, 4 arms, each of dimensionality 2 self.multiple_metrics_Ys = [[1, 2, 3, 4], [0, 0, 0, 1]] self.multiple_metrics_Yvars = [[1, 1, 1, 1], [1, 1, 1, 1]]
["def","setUp","(","self",")","-",">","None",":","self.Xs","=","[","[","[","1",",","1","]",",","[","2",",","2","]",",","[","3",",","3","]",",","[","4",",","4","]","]","]","#","4","arms",",","each","of","dimensionality","2","self.Ys","=","[","[","1",",","2",",","3",",","4","]","]","self.Yvars","=","[","[","1",",","1",",","1",",","1","]","]","self.parameter_values","=","[","[","1",",","2",",","3",",","4","]",",","[","1",",","2",",","3",",","4","]","]","self.outcome_names","=","[","``","x","''",",","``","y","''","]","#","not","used","for","regular","TS","self.multiple_metrics_Xs","=","[","[","[","1",",","1","]",",","[","2",",","2","]",",","[","3",",","3","]",",","[","4",",","4","]","]",",","[","[","1",",","1","]",",","[","2",",","2","]",",","[","3",",","3","]",",","[","4",",","4","]","]",",","]","#","2","metrics",",","4","arms",",","each","of","dimensionality","2","self.multiple_metrics_Ys","=","[","[","1",",","2",",","3",",","4","]",",","[","0",",","0",",","0",",","1","]","]","self.multiple_metrics_Yvars","=","[","[","1",",","1",",","1",",","1","]",",","[","1",",","1",",","1",",","1","]","]"]
15
27
null
test_thompson.py
Ax/ax/models/tests/test_thompson.py
import numpy from ax.exceptions.model import ModelError from ax.models.discrete.thompson import ThompsonSampler from ax.utils.common.testutils import TestCase
15
1
4
0
1
9
1
Use image node_id 1 for calling the ThompsonSamplerTest obj's underlying member method code with example usage: obj.setUp() without return types
144
node_id 1
9,535
test_decimals_0_1
TestHighConfidence
unittest
true
self
A unittest class for testing the HighConfidence postprocessor.
["A","unittest","class","for","testing","the","HighConfidence","postprocessor","."]
Test with cutoff of 0.1.
["Test","with","cutoff","of","0.1","."]
null
def test_decimals_0_1(self): """ Test with cutoff of 0.1. """ (_, _), (x_test, _) = self.mnist classifier = get_image_classifier_kr_tf() preds = classifier.predict(x_test[0:1]) postprocessor = HighConfidence(cutoff=0.1) post_preds = postprocessor(preds=preds) classifier_prediction_expected = np.asarray( [ [ 0.12109935, 0.0498215, 0.0993958, 0.06410096, 0.11366928, 0.04645343, 0.06419807, 0.30685693, 0.07616714, 0.05823757, ] ], dtype=np.float32, ) post_classifier_prediction_expected = np.asarray( [ [ 0.12109935, 0.0, 0.0, 0.0, 0.11366928, 0.0, 0.0, 0.30685693, 0.0, 0.0, ] ], dtype=np.float32, ) np.testing.assert_array_almost_equal( preds, classifier_prediction_expected, decimal=4 ) np.testing.assert_array_almost_equal( post_preds, post_classifier_prediction_expected, decimal=4 )
["def","test_decimals_0_1","(","self",")",":","``","''","''","Test","with","cutoff","of","0.1.","``","''","''","(","_",",","_",")",",","(","x_test",",","_",")","=","self.mnist","classifier","=","get_image_classifier_kr_tf","(",")","preds","=","classifier.predict","(","x_test","[","0:1","]",")","postprocessor","=","HighConfidence","(","cutoff=0.1",")","post_preds","=","postprocessor","(","preds=preds",")","classifier_prediction_expected","=","np.asarray","(","[","[","0.12109935",",","0.0498215",",","0.0993958",",","0.06410096",",","0.11366928",",","0.04645343",",","0.06419807",",","0.30685693",",","0.07616714",",","0.05823757",",","]","]",",","dtype=np.float32",",",")","post_classifier_prediction_expected","=","np.asarray","(","[","[","0.12109935",",","0.0",",","0.0",",","0.0",",","0.11366928",",","0.0",",","0.0",",","0.30685693",",","0.0",",","0.0",",","]","]",",","dtype=np.float32",",",")","np.testing.assert_array_almost_equal","(","preds",",","classifier_prediction_expected",",","decimal=4",")","np.testing.assert_array_almost_equal","(","post_preds",",","post_classifier_prediction_expected",",","decimal=4",")"]
44
76
null
test_high_confidence.py
adversarial-robustness-toolbox/tests/defences/test_high_confidence.py
import logging import unittest import numpy from art.defences.postprocessor import HighConfidence from art.utils import load_dataset from tests.utils import master_seed, get_image_classifier_kr_tf, get_image_classifier_kr_tf_binary
15
1
6
0
1
7
1
Use image node_id 3 for calling the TestHighConfidence obj's underlying member method code with example usage: obj.test_decimals_0_1() without return types
155
node_id 3
235,298
test_decimals_0_2
TestHighConfidence
unittest
true
self
A unittest class for testing the HighConfidence postprocessor.
["A","unittest","class","for","testing","the","HighConfidence","postprocessor","."]
Test with cutoff of 0.2.
["Test","with","cutoff","of","0.2","."]
null
def test_decimals_0_2(self): """ Test with cutoff of 0.2. """ (_, _), (x_test, _) = self.mnist classifier = get_image_classifier_kr_tf() preds = classifier.predict(x_test[0:1]) postprocessor = HighConfidence(cutoff=0.2) post_preds = postprocessor(preds=preds) classifier_prediction_expected = np.asarray( [ [ 0.12109935, 0.0498215, 0.0993958, 0.06410096, 0.11366928, 0.04645343, 0.06419807, 0.30685693, 0.07616714, 0.05823757, ] ], dtype=np.float32, ) post_classifier_prediction_expected = np.asarray( [[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.30685693, 0.0, 0.0]], dtype=np.float32, ) np.testing.assert_array_almost_equal( preds, classifier_prediction_expected, decimal=4 ) np.testing.assert_array_almost_equal( post_preds, post_classifier_prediction_expected, decimal=4 )
["def","test_decimals_0_2","(","self",")",":","``","''","''","Test","with","cutoff","of","0.2.","``","''","''","(","_",",","_",")",",","(","x_test",",","_",")","=","self.mnist","classifier","=","get_image_classifier_kr_tf","(",")","preds","=","classifier.predict","(","x_test","[","0:1","]",")","postprocessor","=","HighConfidence","(","cutoff=0.2",")","post_preds","=","postprocessor","(","preds=preds",")","classifier_prediction_expected","=","np.asarray","(","[","[","0.12109935",",","0.0498215",",","0.0993958",",","0.06410096",",","0.11366928",",","0.04645343",",","0.06419807",",","0.30685693",",","0.07616714",",","0.05823757",",","]","]",",","dtype=np.float32",",",")","post_classifier_prediction_expected","=","np.asarray","(","[","[","0.0",",","0.0",",","0.0",",","0.0",",","0.0",",","0.0",",","0.0",",","0.30685693",",","0.0",",","0.0","]","]",",","dtype=np.float32",",",")","np.testing.assert_array_almost_equal","(","preds",",","classifier_prediction_expected",",","decimal=4",")","np.testing.assert_array_almost_equal","(","post_preds",",","post_classifier_prediction_expected",",","decimal=4",")"]
78
110
null
test_high_confidence.py
adversarial-robustness-toolbox/tests/defences/test_high_confidence.py
import logging import unittest import numpy from art.defences.postprocessor import HighConfidence from art.utils import load_dataset from tests.utils import master_seed, get_image_classifier_kr_tf, get_image_classifier_kr_tf_binary
15
1
6
0
1
7
1
Use image node_id 4 for calling the TestHighConfidence obj's underlying member method code with example usage: obj.test_decimals_0_2() without return types
155
node_id 4
235,299
test_binary_decimals_0_5
TestHighConfidence
unittest
true
self
A unittest class for testing the HighConfidence postprocessor.
["A","unittest","class","for","testing","the","HighConfidence","postprocessor","."]
Test with cutoff of 0.5 for binary classifier.
["Test","with","cutoff","of","0.5","for","binary","classifier","."]
null
def test_binary_decimals_0_5(self): """ Test with cutoff of 0.5 for binary classifier. """ (_, _), (x_test, _) = self.mnist classifier = get_image_classifier_kr_tf_binary() preds = classifier.predict(x_test[0:1]) postprocessor = HighConfidence(cutoff=0.5) post_preds = postprocessor(preds=preds) classifier_prediction_expected = np.asarray( [[0.5301345]], dtype=np.float32 ) post_classifier_prediction_expected = np.asarray( [[0.5301345]], dtype=np.float32 ) np.testing.assert_array_almost_equal( preds, classifier_prediction_expected, decimal=4 ) np.testing.assert_array_almost_equal( post_preds, post_classifier_prediction_expected, decimal=4 )
["def","test_binary_decimals_0_5","(","self",")",":","``","''","''","Test","with","cutoff","of","0.5","for","binary","classifier.","``","''","''","(","_",",","_",")",",","(","x_test",",","_",")","=","self.mnist","classifier","=","get_image_classifier_kr_tf_binary","(",")","preds","=","classifier.predict","(","x_test","[","0:1","]",")","postprocessor","=","HighConfidence","(","cutoff=0.5",")","post_preds","=","postprocessor","(","preds=preds",")","classifier_prediction_expected","=","np.asarray","(","[","[","0.5301345","]","]",",","dtype=np.float32",")","post_classifier_prediction_expected","=","np.asarray","(","[","[","0.5301345","]","]",",","dtype=np.float32",")","np.testing.assert_array_almost_equal","(","preds",",","classifier_prediction_expected",",","decimal=4",")","np.testing.assert_array_almost_equal","(","post_preds",",","post_classifier_prediction_expected",",","decimal=4",")"]
112
126
null
test_high_confidence.py
adversarial-robustness-toolbox/tests/defences/test_high_confidence.py
import logging import unittest import numpy from art.defences.postprocessor import HighConfidence from art.utils import load_dataset from tests.utils import master_seed, get_image_classifier_kr_tf, get_image_classifier_kr_tf_binary
15
1
6
0
1
7
1
Use image node_id 5 for calling the TestHighConfidence obj's underlying member method code with example usage: obj.test_binary_decimals_0_5() without return types
162
node_id 5
235,300
test_RandomModelGenSamples
RandomModelTest
TestCase
true
self
null
null
null
null
null
def test_RandomModelGenSamples(self) -> None: with self.assertRaises(NotImplementedError): self.random_model._gen_samples(n=1, tunable_d=1)
["def","test_RandomModelGenSamples","(","self",")","-",">","None",":","with","self.assertRaises","(","NotImplementedError",")",":","self.random_model._gen_samples","(","n=1",",","tunable_d=1",")"]
25
27
null
test_random.py
Ax/ax/models/tests/test_random.py
import numpy import torch from ax.models.random.base import RandomModel from ax.utils.common.testutils import TestCase from ax.utils.common.typeutils import not_none
15
1
5
0
1
8
1
Use image node_id 3 for calling the RandomModelTest obj's underlying member method code with example usage: obj.test_RandomModelGenSamples() without return types
161
node_id 3
9,512
test_binary_decimals_0_6
TestHighConfidence
unittest
true
self
A unittest class for testing the HighConfidence postprocessor.
["A","unittest","class","for","testing","the","HighConfidence","postprocessor","."]
Test with cutoff of 0.6 for binary classifier.
["Test","with","cutoff","of","0.6","for","binary","classifier","."]
null
def test_binary_decimals_0_6(self): """ Test with cutoff of 0.6 for binary classifier. """ (_, _), (x_test, _) = self.mnist classifier = get_image_classifier_kr_tf_binary() preds = classifier.predict(x_test[0:1]) postprocessor = HighConfidence(cutoff=0.6) post_preds = postprocessor(preds=preds) classifier_prediction_expected = np.asarray( [[0.5301345]], dtype=np.float32 ) post_classifier_prediction_expected = np.asarray( [[0.0]], dtype=np.float32 ) np.testing.assert_array_almost_equal( preds, classifier_prediction_expected, decimal=4 ) np.testing.assert_array_almost_equal( post_preds, post_classifier_prediction_expected, decimal=4 )
["def","test_binary_decimals_0_6","(","self",")",":","``","''","''","Test","with","cutoff","of","0.6","for","binary","classifier.","``","''","''","(","_",",","_",")",",","(","x_test",",","_",")","=","self.mnist","classifier","=","get_image_classifier_kr_tf_binary","(",")","preds","=","classifier.predict","(","x_test","[","0:1","]",")","postprocessor","=","HighConfidence","(","cutoff=0.6",")","post_preds","=","postprocessor","(","preds=preds",")","classifier_prediction_expected","=","np.asarray","(","[","[","0.5301345","]","]",",","dtype=np.float32",")","post_classifier_prediction_expected","=","np.asarray","(","[","[","0.0","]","]",",","dtype=np.float32",")","np.testing.assert_array_almost_equal","(","preds",",","classifier_prediction_expected",",","decimal=4",")","np.testing.assert_array_almost_equal","(","post_preds",",","post_classifier_prediction_expected",",","decimal=4",")"]
128
142
null
test_high_confidence.py
adversarial-robustness-toolbox/tests/defences/test_high_confidence.py
import logging import unittest import numpy from art.defences.postprocessor import HighConfidence from art.utils import load_dataset from tests.utils import master_seed, get_image_classifier_kr_tf, get_image_classifier_kr_tf_binary
15
1
6
0
1
7
1
Use image node_id 6 for calling the TestHighConfidence obj's underlying member method code with example usage: obj.test_binary_decimals_0_6() without return types
162
node_id 6
235,301
convert
Converter
ImageConverter
true
self,_from,_to
null
null
Converts the image from SVG to PDF using chrome.
["Converts","the","image","from","SVG","to","PDF","using","chrome","."]
True
def convert(self, _from: str, _to: str) -> bool: """Converts the image from SVG to PDF using chrome.""" with open(_from, "r") as f: svg = f.read() HTML = ( "<html><head><style>body {margin: 0; }</style><script>function init() {const element = document.querySelector('svg');const positionInfo = element.getBoundingClientRect();const height = positionInfo.height;const width = positionInfo.width;const style = document.createElement('style');style.innerHTML = `@page {margin: 0; size: ${width}px ${height}px}`;document.head.appendChild(style); }window.onload = init;</script></head><body>%s</body></html>" % (svg) ) temp_name = f"{_from}.html" with open(temp_name, "w") as f: f.write(HTML) chromium = self.chromium_command() code = self.command_runner(chromium, _to, temp_name) if code != 0: chrome = self.chrome_command() code = self.command_runner(chrome, _to, temp_name) if code != 0: logger.error( "Fail to convert svg to pdf. Make sure Chromium or Chrome is installed." ) exit(1) return True
["def","convert","(","self",",","_from",":","str",",","_to",":","str",")","-",">","bool",":","``","''","''","Converts","the","image","from","SVG","to","PDF","using","chrome",".","''","''","''","with","open","(","_from",",","``","r","''",")","as","f",":","svg","=","f.read","(",")","HTML","=","(","``","<","html",">","<","head",">","<","style",">","body","{","margin",":","0",";","}","<","\/style",">","<","script",">","function","init","(",")","{","const","element","=","document.querySelector","(","'svg","'",")",";","const","positionInfo","=","element.getBoundingClientRect","(",")",";","const","height","=","positionInfo.height",";","const","width","=","positionInfo.width",";","const","style","=","document.createElement","(","'style","'",")",";","style.innerHTML","=","`","@","page","{","margin",":","0",";","size",":","$","{","width","}","px","$","{","height","}","px","}","`",";","document.head.appendChild","(","style",")",";","}","window.onload","=","init",";","<","\/script",">","<","\/head",">","<","body",">","%","s","<","\/body",">","<","\/html",">","''","%","(","svg",")",")","temp_name","=","f","''","{","_from","}",".html","''","with","open","(","temp_name",",","``","w","''",")","as","f",":","f.write","(","HTML",")","chromium","=","self.chromium_command","(",")","code","=","self.command_runner","(","chromium",",","_to",",","temp_name",")","if","code","!","=","0",":","chrome","=","self.chrome_command","(",")","code","=","self.command_runner","(","chrome",",","_to",",","temp_name",")","if","code","!","=","0",":","logger.error","(","``","Fail","to","convert","svg","to","pdf",".","Make","sure","Chromium","or","Chrome","is","installed",".","''",")","exit","(","1",")","return","True"]
71
89
null
convert-svg-to-pdf.py
sympy/doc/ext/convert-svg-to-pdf.py
from __future__ import annotations from sphinx.transforms.post_transforms.images import ImageConverter from sphinx.util import logging import os import platform from typing import Any from sphinx.application import Sphinx
15
1
7
1
1
5
1
Use image node_id 5 for calling the Converter obj's underlying member method code with example usage: obj.convert(_from, _to) and returns: True
143
node_id 5
2,029,278
_setup
RunnerTrafficMetricsMiddleware
null
true
self,metrics_client
null
null
null
null
null
def _setup( self, metrics_client: "PrometheusClient" = Provide[ BentoMLContainer.metrics_client ], ): self.metrics_client = metrics_client self.metrics_request_duration = metrics_client.Histogram( namespace=self.namespace, name="request_duration_seconds", documentation="runner RPC duration in seconds", labelnames=[ "endpoint", "service_name", "service_version", "http_response_code", "runner_name", ], ) self.metrics_request_total = metrics_client.Counter( namespace=self.namespace, name="request_total", documentation="Total number of runner RPC", labelnames=[ "endpoint", "service_name", "service_version", "http_response_code", "runner_name", ], ) self.metrics_request_in_progress = metrics_client.Gauge( namespace=self.namespace, name="request_in_progress", documentation="Total number of runner RPC in progress now", labelnames=[ "endpoint", "service_name", "service_version", "runner_name", ], multiprocess_mode="livesum", ) self._is_setup = True
["def","_setup","(","self",",","metrics_client",":","``","PrometheusClient","''","=","Provide","[","BentoMLContainer.metrics_client","]",",",")",":","self.metrics_client","=","metrics_client","self.metrics_request_duration","=","metrics_client.Histogram","(","namespace=self.namespace",",","name=","''","request_duration_seconds","''",",","documentation=","''","runner","RPC","duration","in","seconds","''",",","labelnames=","[","``","endpoint","''",",","``","service_name","''",",","``","service_version","''",",","``","http_response_code","''",",","``","runner_name","''",",","]",",",")","self.metrics_request_total","=","metrics_client.Counter","(","namespace=self.namespace",",","name=","''","request_total","''",",","documentation=","''","Total","number","of","runner","RPC","''",",","labelnames=","[","``","endpoint","''",",","``","service_name","''",",","``","service_version","''",",","``","http_response_code","''",",","``","runner_name","''",",","]",",",")","self.metrics_request_in_progress","=","metrics_client.Gauge","(","namespace=self.namespace",",","name=","''","request_in_progress","''",",","documentation=","''","Total","number","of","runner","RPC","in","progress","now","''",",","labelnames=","[","``","endpoint","''",",","``","service_name","''",",","``","service_version","''",",","``","runner_name","''",",","]",",","multiprocess_mode=","''","livesum","''",",",")","self._is_setup","=","True"]
150
187
null
instruments.py
BentoML/src/bentoml/_internal/server/http/instruments.py
from __future__ import annotations import contextvars import logging from timeit import default_timer from typing import TYPE_CHECKING from simple_di import Provide from simple_di import inject from ...configuration.containers import BentoMLContainer from ...context import component_context
15
2
9
0
0
2
null
Use image node_id 2 for calling the RunnerTrafficMetricsMiddleware obj's underlying member method code with example usage: obj._setup(metrics_client) without return types
170
node_id 2
14,515
command_runner
Converter
ImageConverter
true
self,chrome,_to,temp_name
null
null
null
null
os,int
def command_runner( self, chrome: str | None, _to: str, temp_name: str ) -> int: if not chrome: return 1 command = f"{chrome} --headless --disable-gpu --disable-software-rasterizer --print-to-pdf={_to} {temp_name}" logger.error(command) return os.system(command)
["def","command_runner","(","self",",","chrome",":","str","|","None",",","_to",":","str",",","temp_name",":","str",")","-",">","int",":","if","not","chrome",":","return","1","command","=","f","''","{","chrome","}","--","headless","--","disable-gpu","--","disable-software-rasterizer","--","print-to-pdf=","{","_to","}","{","temp_name","}","''","logger.error","(","command",")","return","os.system","(","command",")"]
64
69
null
convert-svg-to-pdf.py
sympy/doc/ext/convert-svg-to-pdf.py
from __future__ import annotations from sphinx.transforms.post_transforms.images import ImageConverter from sphinx.util import logging import os import platform from typing import Any from sphinx.application import Sphinx
15
1
7
1
1
5
1
Use image node_id 4 for calling the Converter obj's underlying member method code with example usage: obj.command_runner(chrome, _to, temp_name) and returns: os, int
165
node_id 4
2,029,277
chromium_command
Converter
ImageConverter
true
self
null
null
null
null
None,None,str,str,str+path+str,path,str
def chromium_command(self) -> str | None: if platform.win32_ver()[0]: if os.system("where chromium") == 0: return "chromium" path = os.path.join( os.environ["PROGRAMW6432"], "Chromium\\Application\\chrome.exe", ) if os.path.exists(path): return f'"{path}"' return None if os.system("chromium --version") == 0: return "chromium" if platform.mac_ver()[0]: path = "/Applications/Chromium.app/Contents/MacOS/Chromium" if os.path.exists(path): return path elif platform.libc_ver()[0]: if os.system("chromium-browser --version") == 0: return "chromium-browser" return None
["def","chromium_command","(","self",")","-",">","str","|","None",":","if","platform.win32_ver","(",")","[","0","]",":","if","os.system","(","``","where","chromium","''",")","==","0",":","return","``","chromium","''","path","=","os.path.join","(","os.environ","[","``","PROGRAMW6432","''","]",",","``","Chromium\\\\Application\\\\chrome.exe","''",",",")","if","os.path.exists","(","path",")",":","return","f","'","''","{","path","}","''","'","return","None","if","os.system","(","``","chromium","--","version","''",")","==","0",":","return","``","chromium","''","if","platform.mac_ver","(",")","[","0","]",":","path","=","``","\/Applications\/Chromium.app\/Contents\/MacOS\/Chromium","''","if","os.path.exists","(","path",")",":","return","path","elif","platform.libc_ver","(",")","[","0","]",":","if","os.system","(","``","chromium-browser","--","version","''",")","==","0",":","return","``","chromium-browser","''","return","None"]
44
61
null
convert-svg-to-pdf.py
sympy/doc/ext/convert-svg-to-pdf.py
from __future__ import annotations from sphinx.transforms.post_transforms.images import ImageConverter from sphinx.util import logging import os import platform from typing import Any from sphinx.application import Sphinx
15
1
7
1
1
5
1
Use image node_id 3 for calling the Converter obj's underlying member method code with example usage: obj.chromium_command() and returns: None, None, str, str, str, path, str, path, str
185
node_id 3
2,029,276
_test_texture_as_input
TestShapeShifter
TestBase
true
self,sign_gradients,use_spectral,soft_clip
null
null
null
null
background, image_frame, y_,current_image
def _test_texture_as_input( self, sign_gradients, use_spectral, soft_clip ): # We must start a new graph tf.reset_default_graph() # Only import if object detection module is available from art.estimators.object_detection.tensorflow_faster_rcnn import ( TensorFlowFasterRCNN, ) from art.attacks.evasion.shapeshifter import ShapeShifter # Define object detector images = tf.Variable( initial_value=np.zeros([1, 28, 28, 1]), dtype=tf.float32 ) obj_dec = TensorFlowFasterRCNN(images=images) # Create labels result = obj_dec.predict(self.x_test_mnist[:1].astype(np.float32)) groundtruth_boxes_list = [result[i]["boxes"] for i in range(1)] groundtruth_classes_list = [result[i]["labels"] for i in range(1)] groundtruth_weights_list = [ np.ones_like(r) for r in groundtruth_classes_list ] y = { "groundtruth_boxes_list": groundtruth_boxes_list, "groundtruth_classes_list": groundtruth_classes_list, "groundtruth_weights_list": groundtruth_weights_list, } # Define random transform def random_transform(x): background = np.random.rand(*x.shape) image_frame = np.random.rand(*(list(x.shape[:-1]) + [4])) y_ = y.copy() y_["groundtruth_boxes_list"][0] = ( y_["groundtruth_boxes_list"][0] + np.random.rand() ) y_["groundtruth_weights_list"][0] = ( y_["groundtruth_weights_list"][0] + np.random.rand() ) return background, image_frame, y_ # Define attack attack = ShapeShifter( estimator=obj_dec, random_transform=random_transform, box_classifier_weight=1.0, box_localizer_weight=1.0, rpn_classifier_weight=1.0, rpn_localizer_weight=1.0, box_iou_threshold=0.3, box_victim_weight=1.0, box_target_weight=1.0, box_victim_cw_weight=1.0, box_victim_cw_confidence=1.0, box_target_cw_weight=1.0, box_target_cw_confidence=1.0, rpn_iou_threshold=0.3, rpn_background_weight=1.0, rpn_foreground_weight=1.0, rpn_cw_weight=1.0, rpn_cw_confidence=1.0, similarity_weight=1.0, learning_rate=0.1, optimizer="MomentumOptimizer", momentum=0.01, decay=0.01, sign_gradients=sign_gradients, random_size=2, max_iter=2, texture_as_input=True, use_spectral=use_spectral, soft_clip=soft_clip, ) # Define rendering function def rendering_function( background_phd, image_frame_phd, current_texture ): current_image = background_phd + current_texture current_image = tf.clip_by_value(current_image, 0, 1) return current_image # Targeted attack adv_x = attack.generate( x=self.x_test_mnist[:1].astype(np.float32), label=y, target_class=2, victim_class=5, rendering_function=rendering_function, ) self.assertTrue(adv_x.shape == (1, 28, 28, 1)) # Untargeted attack adv_x = attack.generate( x=self.x_test_mnist[:1].astype(np.float32), label=y, target_class=8, victim_class=8, rendering_function=rendering_function, ) self.assertTrue(adv_x.shape == (1, 28, 28, 1))
["def","_test_texture_as_input","(","self",",","sign_gradients",",","use_spectral",",","soft_clip",")",":","#","We","must","start","a","new","graph","tf.reset_default_graph","(",")","#","Only","import","if","object","detection","module","is","available","from","art.estimators.object_detection.tensorflow_faster_rcnn","import","(","TensorFlowFasterRCNN",",",")","from","art.attacks.evasion.shapeshifter","import","ShapeShifter","#","Define","object","detector","images","=","tf.Variable","(","initial_value=np.zeros","(","[","1",",","28",",","28",",","1","]",")",",","dtype=tf.float32",")","obj_dec","=","TensorFlowFasterRCNN","(","images=images",")","#","Create","labels","result","=","obj_dec.predict","(","self.x_test_mnist","[",":1","]",".astype","(","np.float32",")",")","groundtruth_boxes_list","=","[","result","[","i","]","[","``","boxes","''","]","for","i","in","range","(","1",")","]","groundtruth_classes_list","=","[","result","[","i","]","[","``","labels","''","]","for","i","in","range","(","1",")","]","groundtruth_weights_list","=","[","np.ones_like","(","r",")","for","r","in","groundtruth_classes_list","]","y","=","{","``","groundtruth_boxes_list","''",":","groundtruth_boxes_list",",","``","groundtruth_classes_list","''",":","groundtruth_classes_list",",","``","groundtruth_weights_list","''",":","groundtruth_weights_list",",","}","#","Define","random","transform","def","random_transform","(","x",")",":","background","=","np.random.rand","(","*","x.shape",")","image_frame","=","np.random.rand","(","*","(","list","(","x.shape","[",":","-1","]",")","+","[","4","]",")",")","y_","=","y.copy","(",")","y_","[","``","groundtruth_boxes_list","''","]","[","0","]","=","(","y_","[","``","groundtruth_boxes_list","''","]","[","0","]","+","np.random.rand","(",")",")","y_","[","``","groundtruth_weights_list","''","]","[","0","]","=","(","y_","[","``","groundtruth_weights_list","''","]","[","0","]","+","np.random.rand","(",")",")","return","background",",","image_frame",",","y_","#","Define","attack","attack","=","ShapeShifter","(","estimator=obj_dec",",","random_transform=random_transform",",","box_classifier_weight=1.0",",","box_localizer_weight=1.0",",","rpn_classifier_weight=1.0",",","rpn_localizer_weight=1.0",",","box_iou_threshold=0.3",",","box_victim_weight=1.0",",","box_target_weight=1.0",",","box_victim_cw_weight=1.0",",","box_victim_cw_confidence=1.0",",","box_target_cw_weight=1.0",",","box_target_cw_confidence=1.0",",","rpn_iou_threshold=0.3",",","rpn_background_weight=1.0",",","rpn_foreground_weight=1.0",",","rpn_cw_weight=1.0",",","rpn_cw_confidence=1.0",",","similarity_weight=1.0",",","learning_rate=0.1",",","optimizer=","''","MomentumOptimizer","''",",","momentum=0.01",",","decay=0.01",",","sign_gradients=sign_gradients",",","random_size=2",",","max_iter=2",",","texture_as_input=True",",","use_spectral=use_spectral",",","soft_clip=soft_clip",",",")","#","Define","rendering","function","def","rendering_function","(","background_phd",",","image_frame_phd",",","current_texture",")",":","current_image","=","background_phd","+","current_texture","current_image","=","tf.clip_by_value","(","current_image",",","0",",","1",")","return","current_image","#","Targeted","attack","adv_x","=","attack.generate","(","x=self.x_test_mnist","[",":1","]",".astype","(","np.float32",")",",","label=y",",","target_class=2",",","victim_class=5",",","rendering_function=rendering_function",",",")","self.assertTrue","(","adv_x.shape","==","(","1",",","28",",","28",",","1",")",")","#","Untargeted","attack","adv_x","=","attack.generate","(","x=self.x_test_mnist","[",":1","]",".astype","(","np.float32",")",",","label=y",",","target_class=8",",","victim_class=8",",","rendering_function=rendering_function",",",")","self.assertTrue","(","adv_x.shape","==","(","1",",","28",",","28",",","1",")",")"]
139
235
null
test_shapeshifter.py
adversarial-robustness-toolbox/tests/attacks/test_shapeshifter.py
from __future__ import absolute_import, division, print_function, unicode_literals import logging import unittest import importlib import tensorflow import numpy from tests.utils import TestBase, master_seed
15
1
7
0
1
6
1
Use image node_id 5 for calling the TestShapeShifter obj's underlying member method code with example usage: obj._test_texture_as_input(sign_gradients, use_spectral, soft_clip) and returns: background, image_frame, y_, current_image
234
node_id 5
234,965
test_check_params
TestShapeShifter
TestBase
true
self
null
null
null
null
null
def test_check_params(self): from art.estimators.object_detection import TensorFlowFasterRCNN from art.attacks.evasion import ShapeShifter images = tf.Variable( initial_value=np.zeros([1, 28, 28, 1]), dtype=tf.float32 ) obj_dec = TensorFlowFasterRCNN(images=images) with self.assertRaises(ValueError): _ = ShapeShifter(obj_dec, random_transform="1") with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, box_classifier_weight=1, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, box_classifier_weight=-1.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, box_localizer_weight=1, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, box_localizer_weight=-1.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, rpn_classifier_weight=1, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, rpn_classifier_weight=-1.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, rpn_localizer_weight=1, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, rpn_localizer_weight=-1.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, box_iou_threshold=1, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, box_iou_threshold=-1.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, box_victim_weight=1, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, box_victim_weight=-1.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, box_target_weight=1, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, box_target_weight=-1.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, box_victim_cw_weight=1, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, box_victim_cw_weight=-1.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, box_victim_cw_confidence=1, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, box_victim_cw_confidence=-1.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, box_target_cw_weight=1, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, box_target_cw_weight=-1.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, box_target_cw_confidence=1, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, box_target_cw_confidence=-1.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, rpn_iou_threshold=1, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, rpn_iou_threshold=-1.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, rpn_background_weight=1, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, rpn_background_weight=-1.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, rpn_foreground_weight=1, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, rpn_foreground_weight=-1.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, rpn_cw_weight=1, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, rpn_cw_weight=-1.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, rpn_cw_confidence=1, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, rpn_cw_confidence=-1.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, similarity_weight=1, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, similarity_weight=-1.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, learning_rate=1, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, learning_rate=-1.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, optimizer="test", ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, optimizer="MomentumOptimizer", momentum=1, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, optimizer="MomentumOptimizer", momentum=-1.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, optimizer="RMSPropOptimizer", momentum=0.5, decay="1", ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, optimizer="RMSPropOptimizer", momentum=0.5, decay=-1.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, optimizer="RMSPropOptimizer", momentum=0.5, decay=2.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, sign_gradients="true", ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, random_size=1.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, random_size=-1, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, max_iter=1.0, ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, max_iter=-1 ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, texture_as_input="true", ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, use_spectral="true", ) with self.assertRaises(ValueError): _ = ShapeShifter( obj_dec, random_transform=lambda x: x + 1e-10, soft_clip="true", )
["def","test_check_params","(","self",")",":","from","art.estimators.object_detection","import","TensorFlowFasterRCNN","from","art.attacks.evasion","import","ShapeShifter","images","=","tf.Variable","(","initial_value=np.zeros","(","[","1",",","28",",","28",",","1","]",")",",","dtype=tf.float32",")","obj_dec","=","TensorFlowFasterRCNN","(","images=images",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=","''","1","''",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","box_classifier_weight=1",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","box_classifier_weight=-1.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","box_localizer_weight=1",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","box_localizer_weight=-1.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","rpn_classifier_weight=1",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","rpn_classifier_weight=-1.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","rpn_localizer_weight=1",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","rpn_localizer_weight=-1.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","box_iou_threshold=1",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","box_iou_threshold=-1.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","box_victim_weight=1",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","box_victim_weight=-1.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","box_target_weight=1",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","box_target_weight=-1.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","box_victim_cw_weight=1",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","box_victim_cw_weight=-1.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","box_victim_cw_confidence=1",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","box_victim_cw_confidence=-1.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","box_target_cw_weight=1",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","box_target_cw_weight=-1.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","box_target_cw_confidence=1",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","box_target_cw_confidence=-1.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","rpn_iou_threshold=1",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","rpn_iou_threshold=-1.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","rpn_background_weight=1",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","rpn_background_weight=-1.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","rpn_foreground_weight=1",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","rpn_foreground_weight=-1.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","rpn_cw_weight=1",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","rpn_cw_weight=-1.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","rpn_cw_confidence=1",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","rpn_cw_confidence=-1.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","similarity_weight=1",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","similarity_weight=-1.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","learning_rate=1",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","learning_rate=-1.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","optimizer=","''","test","''",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","optimizer=","''","MomentumOptimizer","''",",","momentum=1",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","optimizer=","''","MomentumOptimizer","''",",","momentum=-1.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","optimizer=","''","RMSPropOptimizer","''",",","momentum=0.5",",","decay=","''","1","''",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","optimizer=","''","RMSPropOptimizer","''",",","momentum=0.5",",","decay=-1.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","optimizer=","''","RMSPropOptimizer","''",",","momentum=0.5",",","decay=2.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","sign_gradients=","''","true","''",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","random_size=1.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","random_size=-1",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","max_iter=1.0",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","max_iter=-1",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","texture_as_input=","''","true","''",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","use_spectral=","''","true","''",",",")","with","self.assertRaises","(","ValueError",")",":","_","=","ShapeShifter","(","obj_dec",",","random_transform=lambda","x",":","x","+","1e-10",",","soft_clip=","''","true","''",",",")"]
237
380
null
test_shapeshifter.py
adversarial-robustness-toolbox/tests/attacks/test_shapeshifter.py
from __future__ import absolute_import, division, print_function, unicode_literals import logging import unittest import importlib import tensorflow import numpy from tests.utils import TestBase, master_seed
15
1
7
0
1
6
1
Use image node_id 6 for calling the TestShapeShifter obj's underlying member method code with example usage: obj.test_check_params() without return types
153
node_id 6
234,966
do_measure
global
null
false
opts
null
null
null
null
null
def do_measure(opts): name = opts["-d"] _log.info("reading data %s", name) test = pd.read_parquet(f"data/{name}-test.parquet") recs = pd.read_parquet(f"data/{name}-recs.parquet") _log.info("setting up analysis") rla = RecListAnalysis() rla.add_metric(ndcg) rla.add_metric(recip_rank) timer = Stopwatch() results = rla.compute(recs, test, include_missing=True) _log.info("analyzed in %s", timer) results = results.fillna(0) a_res = results.groupby("Algorithm").mean() a_res["count"] = results.groupby("Algorithm")["nrecs"].count() _log.info("finished") print(a_res) print(results.groupby("Algorithm")["recip_rank"].describe())
["def","do_measure","(","opts",")",":","name","=","opts","[","``","-d","''","]","_log.info","(","``","reading","data","%","s","''",",","name",")","test","=","pd.read_parquet","(","f","''","data\/","{","name","}","-test.parquet","''",")","recs","=","pd.read_parquet","(","f","''","data\/","{","name","}","-recs.parquet","''",")","_log.info","(","``","setting","up","analysis","''",")","rla","=","RecListAnalysis","(",")","rla.add_metric","(","ndcg",")","rla.add_metric","(","recip_rank",")","timer","=","Stopwatch","(",")","results","=","rla.compute","(","recs",",","test",",","include_missing=True",")","_log.info","(","``","analyzed","in","%","s","''",",","timer",")","results","=","results.fillna","(","0",")","a_res","=","results.groupby","(","``","Algorithm","''",")",".mean","(",")","a_res","[","``","count","''","]","=","results.groupby","(","``","Algorithm","''",")","[","``","nrecs","''","]",".count","(",")","_log.info","(","``","finished","''",")","print","(","a_res",")","print","(","results.groupby","(","``","Algorithm","''",")","[","``","recip_rank","''","]",".describe","(",")",")"]
65
86
null
rla-perf.py
lkpy/utils/rla-perf.py
import sys import logging import tqdm from docopt import docopt import pandas from lenskit.datasets import MovieLens from lenskit.util import Stopwatch from lenskit.batch import recommend from lenskit.crossfold import sample_users, SampleN from lenskit.algorithms.basic import Popular from lenskit.algorithms import Recommender from lenskit.algorithms.als import ImplicitMF from lenskit.topn import RecListAnalysis, ndcg, recip_rank
15
null
13
2
null
null
null
Use image node_id 2 for calling a global function with example usage: do_measure(opts) without return types
107
node_id 2
1,270,200
__init__
EntitySievesConfiguration
SievesConfiguration
true
self
null
null
null
null
EntitySievesConfiguration
def __init__(self): super(EntitySievesConfiguration, self).__init__() self.run_evaluation = True self.sieves_order = [ (RelationType.SAME_HEAD_LEMMA, 1.0), (RelationType.EXACT_STRING, 1.0), (RelationType.FUZZY_FIT, 1.0), (RelationType.WIKIPEDIA_REDIRECT_LINK, 0.1), (RelationType.WIKIPEDIA_DISAMBIGUATION, 0.1), (RelationType.WORD_EMBEDDING_MATCH, 0.7), (RelationType.WORDNET_PARTIAL_SYNSET_MATCH, 0.1), (RelationType.FUZZY_HEAD_FIT, 0.5), (RelationType.WIKIPEDIA_CATEGORY, 0.1), (RelationType.WITHIN_DOC_COREF, 1.0), (RelationType.WIKIPEDIA_BE_COMP, 0.1), (RelationType.WIKIPEDIA_TITLE_PARENTHESIS, 0.1), (RelationType.WORDNET_SAME_SYNSET, 1.0), (RelationType.REFERENT_DICT, 0.5), ]
["def","__init__","(","self",")",":","super","(","EntitySievesConfiguration",",","self",")",".__init__","(",")","self.run_evaluation","=","True","self.sieves_order","=","[","(","RelationType.SAME_HEAD_LEMMA",",","1.0",")",",","(","RelationType.EXACT_STRING",",","1.0",")",",","(","RelationType.FUZZY_FIT",",","1.0",")",",","(","RelationType.WIKIPEDIA_REDIRECT_LINK",",","0.1",")",",","(","RelationType.WIKIPEDIA_DISAMBIGUATION",",","0.1",")",",","(","RelationType.WORD_EMBEDDING_MATCH",",","0.7",")",",","(","RelationType.WORDNET_PARTIAL_SYNSET_MATCH",",","0.1",")",",","(","RelationType.FUZZY_HEAD_FIT",",","0.5",")",",","(","RelationType.WIKIPEDIA_CATEGORY",",","0.1",")",",","(","RelationType.WITHIN_DOC_COREF",",","1.0",")",",","(","RelationType.WIKIPEDIA_BE_COMP",",","0.1",")",",","(","RelationType.WIKIPEDIA_TITLE_PARENTHESIS",",","0.1",")",",","(","RelationType.WORDNET_SAME_SYNSET",",","1.0",")",",","(","RelationType.REFERENT_DICT",",","0.5",")",",","]"]
82
102
null
sieves_config.py
nlp-architect/nlp_architect/models/cross_doc_coref/sieves_config.py
from typing import List, Tuple from nlp_architect.data.cdc_resources.relations.relation_types_enums import RelationType
15
3
2
0
3
1
1
Use image node_id 1 to create a new EntitySievesConfiguration object from inherited base classes: SievesConfiguration with example: obj = EntitySievesConfiguration()
165
node_id 1
1,443,099
get_boutiques_output_from_inp
global
null
false
inputs,inp_spec,inp_name
null
null
null
null
output
def get_boutiques_output_from_inp(inputs, inp_spec, inp_name): """ Takes a Nipype input representing an output file and generates a Boutiques output for it """ output = {} output["name"] = inp_name.replace("_", " ").capitalize() output["id"] = inp_name output["optional"] = True output["description"] = get_description_from_spec( inputs, inp_name, inp_spec ) if not (hasattr(inp_spec, "mandatory") and inp_spec.mandatory): output["optional"] = True else: output["optional"] = False if inp_spec.usedefault: output["default-value"] = inp_spec.default_value()[1] if isinstance(inp_spec.name_source, list): source = inp_spec.name_source[0] else: source = inp_spec.name_source output["path-template"] = inp_spec.name_template.replace( "%s", "[" + source.upper() + "]" ) output["value-key"] = "[" + inp_name.upper() + "]" flag, flag_sep = get_command_line_flag(inp_spec) if flag is not None: output["command-line-flag"] = flag if flag_sep is not None: output["command-line-flag-separator"] = flag_sep return output
["def","get_boutiques_output_from_inp","(","inputs",",","inp_spec",",","inp_name",")",":","``","''","''","Takes","a","Nipype","input","representing","an","output","file","and","generates","a","Boutiques","output","for","it","``","''","''","output","=","{","}","output","[","``","name","''","]","=","inp_name.replace","(","``","_","''",",","``","``",")",".capitalize","(",")","output","[","``","id","''","]","=","inp_name","output","[","``","optional","''","]","=","True","output","[","``","description","''","]","=","get_description_from_spec","(","inputs",",","inp_name",",","inp_spec",")","if","not","(","hasattr","(","inp_spec",",","``","mandatory","''",")","and","inp_spec.mandatory",")",":","output","[","``","optional","''","]","=","True","else",":","output","[","``","optional","''","]","=","False","if","inp_spec.usedefault",":","output","[","``","default-value","''","]","=","inp_spec.default_value","(",")","[","1","]","if","isinstance","(","inp_spec.name_source",",","list",")",":","source","=","inp_spec.name_source","[","0","]","else",":","source","=","inp_spec.name_source","output","[","``","path-template","''","]","=","inp_spec.name_template.replace","(","``","%","s","''",",","``","[","``","+","source.upper","(",")","+","``","]","''",")","output","[","``","value-key","''","]","=","``","[","``","+","inp_name.upper","(",")","+","``","]","''","flag",",","flag_sep","=","get_command_line_flag","(","inp_spec",")","if","flag","is","not","None",":","output","[","``","command-line-flag","''","]","=","flag","if","flag_sep","is","not","None",":","output","[","``","command-line-flag-separator","''","]","=","flag_sep","return","output"]
637
666
null
nipype2boutiques.py
nipype/nipype/utils/nipype2boutiques.py
import os import sys import simplejson from ..scripts.instance import import_module
15
null
4
11
null
null
null
Use image node_id 11 for calling a global function with example usage: get_boutiques_output_from_inp(inputs, inp_spec, inp_name) and returns: output
148
node_id 11
1,442,053
_ids
global
null
false
values
null
null
null
null
unknown
def _ids(values: Sequence[_Metadata]) -> Sequence[int]: return [v.id for v in values]
["def","_ids","(","values",":","Sequence","[","_Metadata","]",")","-",">","Sequence","[","int","]",":","return","[","v.id","for","v","in","values","]"]
36
37
null
store_ext.py
tfx/tfx/orchestration/portable/mlmd/store_ext.py
import collections import itertools from typing import Callable, Mapping, Optional, Sequence, Union from tfx.dsl.compiler import compiler_utils from tfx.dsl.compiler import constants from tfx.orchestration.experimental.core import constants from tfx.orchestration.portable.mlmd import event_lib from tfx.orchestration.portable.mlmd import filter_query_builder import ml_metadata
15
null
9
6
null
null
null
Use image node_id 1 for calling a global function with example usage: _ids(values) and returns: unknown
103
node_id 1
2,198,855
setup
global
null
false
app
null
null
null
null
dict
def setup(app: Sphinx) -> dict[str, Any]: app.add_post_transform(Converter) return { "version": "builtin", "parallel_read_safe": True, "parallel_write_safe": True, }
["def","setup","(","app",":","Sphinx",")","-",">","dict","[","str",",","Any","]",":","app.add_post_transform","(","Converter",")","return","{","``","version","''",":","``","builtin","''",",","``","parallel_read_safe","''",":","True",",","``","parallel_write_safe","''",":","True",",","}"]
92
99
null
convert-svg-to-pdf.py
sympy/doc/ext/convert-svg-to-pdf.py
from __future__ import annotations from sphinx.transforms.post_transforms.images import ImageConverter from sphinx.util import logging import os import platform from typing import Any from sphinx.application import Sphinx
15
null
7
1
null
null
null
Use image node_id 1 for calling a global function with example usage: setup(app) and returns: dict
98
node_id 1
2,029,279
__init__
SearchSpaceToChoice
Transform
true
self,search_space,observations,modelbridge,config
Replaces the search space with a single choice parameter, whose values are the signatures of the arms observed in the data. This transform is meant to be used with ThompsonSampler. Choice parameter will be unordered unless config["use_ordered"] specifies otherwise. Transform is done in-place.
["Replaces","the","search","space","with","a","single","choice","parameter",",","whose","values","are","the","signatures","of","the","arms","observed","in","the","data",".","This","transform","is","meant","to","be","used","with","ThompsonSampler",".","Choice","parameter","will","be","unordered","unless","config","[","``","use_ordered","''","]","specifies","otherwise",".","Transform","is","done","in-place","."]
null
null
SearchSpaceToChoice
def __init__( self, search_space: Optional[SearchSpace] = None, observations: Optional[List[Observation]] = None, modelbridge: Optional[ "modelbridge_module.base.ModelBridge" ] = None, config: Optional[TConfig] = None, ) -> None: assert ( search_space is not None ), "SearchSpaceToChoice requires search space" assert ( observations is not None ), "SeachSpaceToChoice requires observations" super().__init__( search_space=search_space, observations=observations, config=config, ) if any(p.is_fidelity for p in search_space.parameters.values()): raise ValueError( "Cannot perform SearchSpaceToChoice conversion if fidelity " "parameters are present" ) if isinstance(search_space, RobustSearchSpace): raise UnsupportedError( "SearchSpaceToChoice transform is not supported for RobustSearchSpace." ) self.parameter_name = "arms" # pyre-fixme[4]: Attribute must be annotated. self.signature_to_parameterization = { Arm( parameters=obs.features.parameters ).signature: obs.features.parameters for obs in observations }
["def","__init__","(","self",",","search_space",":","Optional","[","SearchSpace","]","=","None",",","observations",":","Optional","[","List","[","Observation","]","]","=","None",",","modelbridge",":","Optional","[","``","modelbridge_module.base.ModelBridge","''","]","=","None",",","config",":","Optional","[","TConfig","]","=","None",",",")","-",">","None",":","assert","(","search_space","is","not","None",")",",","``","SearchSpaceToChoice","requires","search","space","''","assert","(","observations","is","not","None",")",",","``","SeachSpaceToChoice","requires","observations","''","super","(",")",".__init__","(","search_space=search_space",",","observations=observations",",","config=config",",",")","if","any","(","p.is_fidelity","for","p","in","search_space.parameters.values","(",")",")",":","raise","ValueError","(","``","Can","not","perform","SearchSpaceToChoice","conversion","if","fidelity","``","``","parameters","are","present","''",")","if","isinstance","(","search_space",",","RobustSearchSpace",")",":","raise","UnsupportedError","(","``","SearchSpaceToChoice","transform","is","not","supported","for","RobustSearchSpace",".","''",")","self.parameter_name","=","``","arms","''","#","pyre-fixme","[","4","]",":","Attribute","must","be","annotated",".","self.signature_to_parameterization","=","{","Arm","(","parameters=obs.features.parameters",")",".signature",":","obs.features.parameters","for","obs","in","observations","}"]
35
63
null
search_space_to_choice.py
Ax/ax/modelbridge/transforms/search_space_to_choice.py
from typing import List, Optional, TYPE_CHECKING from ax.core.arm import Arm from ax.core.observation import Observation, ObservationFeatures from ax.core.parameter import ChoiceParameter, FixedParameter, ParameterType from ax.core.search_space import RobustSearchSpace, SearchSpace from ax.exceptions.core import UnsupportedError from ax.modelbridge.transforms.base import Transform from ax.models.types import TConfig from ax.utils.common.typeutils import checked_cast
15
1
9
0
1
4
1
Use image node_id 1 to create a new SearchSpaceToChoice object from inherited base classes: Transform with example: obj = SearchSpaceToChoice(search_space, observations, modelbridge, config)
190
node_id 1
9,096
cs_diff
global
null
false
x,a,b,period,_cache
null
null
null
null
convolve,unknown,int,unknown
def cs_diff(x, a, b, period=None, _cache=_cache): """ Return (a,b)-cosh/sinh pseudo-derivative of a periodic sequence. If ``x_j`` and ``y_j`` are Fourier coefficients of periodic functions x and y, respectively, then:: y_j = -sqrt(-1)*cosh(j*a*2*pi/period)/sinh(j*b*2*pi/period) * x_j y_0 = 0 Parameters ---------- x : array_like The array to take the pseudo-derivative from. a, b : float Defines the parameters of the cosh/sinh pseudo-differential operator. period : float, optional The period of the sequence. Default period is ``2*pi``. Returns ------- cs_diff : ndarray Pseudo-derivative of periodic sequence `x`. Notes ----- For even len(`x`), the Nyquist mode of `x` is taken as zero. """ tmp = asarray(x) if iscomplexobj(tmp): return cs_diff(tmp.real, a, b, period) + 1j * cs_diff( tmp.imag, a, b, period ) if period is not None: a = a * 2 * pi / period b = b * 2 * pi / period n = len(x) omega = _cache.get((n, a, b)) if omega is None: if len(_cache) > 20: while _cache: _cache.popitem() def kernel(k, a=a, b=b): if k: return -cosh(a * k) / sinh(b * k) return 0 omega = convolve.init_convolution_kernel(n, kernel, d=1) _cache[(n, a, b)] = omega overwrite_x = _datacopied(tmp, x) return convolve.convolve( tmp, omega, swap_real_imag=1, overwrite_x=overwrite_x )
["def","cs_diff","(","x",",","a",",","b",",","period=None",",","_cache=_cache",")",":","``","''","''","Return","(","a",",","b",")","-cosh\/sinh","pseudo-derivative","of","a","periodic","sequence",".","If","``","x_j","``","and","``","y_j","``","are","Fourier","coefficients","of","periodic","functions","x","and","y",",","respectively",",","then",":",":","y_j","=","-sqrt","(","-1",")","*","cosh","(","j","*","a","*","2","*","pi\/period",")","\/sinh","(","j","*","b","*","2","*","pi\/period",")","*","x_j","y_0","=","0","Parameters","--","--","--","--","--","x",":","array_like","The","array","to","take","the","pseudo-derivative","from",".","a",",","b",":","float","Defines","the","parameters","of","the","cosh\/sinh","pseudo-differential","operator",".","period",":","float",",","optional","The","period","of","the","sequence",".","Default","period","is","``","2","*","pi","``",".","Returns","--","--","--","-","cs_diff",":","ndarray","Pseudo-derivative","of","periodic","sequence","`","x","`",".","Notes","--","--","-","For","even","len","(","`","x","`",")",",","the","Nyquist","mode","of","`","x","`","is","taken","as","zero.","``","''","''","tmp","=","asarray","(","x",")","if","iscomplexobj","(","tmp",")",":","return","cs_diff","(","tmp.real",",","a",",","b",",","period",")","+","1j","*","cs_diff","(","tmp.imag",",","a",",","b",",","period",")","if","period","is","not","None",":","a","=","a","*","2","*","pi","\/","period","b","=","b","*","2","*","pi","\/","period","n","=","len","(","x",")","omega","=","_cache.get","(","(","n",",","a",",","b",")",")","if","omega","is","None",":","if","len","(","_cache",")",">","20",":","while","_cache",":","_cache.popitem","(",")","def","kernel","(","k",",","a=a",",","b=b",")",":","if","k",":","return","-cosh","(","a","*","k",")","\/","sinh","(","b","*","k",")","return","0","omega","=","convolve.init_convolution_kernel","(","n",",","kernel",",","d=1",")","_cache","[","(","n",",","a",",","b",")","]","=","omega","overwrite_x","=","_datacopied","(","tmp",",","x",")","return","convolve.convolve","(","tmp",",","omega",",","swap_real_imag=1",",","overwrite_x=overwrite_x",")"]
282
333
null
pseudo_diffs.py
catboost/contrib/python/scipy/py2/scipy/fftpack/pseudo_diffs.py
from __future__ import division, print_function, absolute_import from numpy import pi, asarray, sin, cos, sinh, cosh, tanh, iscomplexobj from .None import convolve from scipy.fftpack.basic import _datacopied import atexit
15
null
5
10
null
null
null
Use image node_id 6 for calling a global function with example usage: cs_diff(x, a, b, period, _cache) and returns: convolve, unknown, int, unknown
147
node_id 6
523,402
hilbert
global
null
false
x,_cache
null
null
null
null
convolve,unknown,int,int,unknown
def hilbert(x, _cache=_cache): """ Return Hilbert transform of a periodic sequence x. If x_j and y_j are Fourier coefficients of periodic functions x and y, respectively, then:: y_j = sqrt(-1)*sign(j) * x_j y_0 = 0 Parameters ---------- x : array_like The input array, should be periodic. _cache : dict, optional Dictionary that contains the kernel used to do a convolution with. Returns ------- y : ndarray The transformed input. See Also -------- scipy.signal.hilbert : Compute the analytic signal, using the Hilbert transform. Notes ----- If ``sum(x, axis=0) == 0`` then ``hilbert(ihilbert(x)) == x``. For even len(x), the Nyquist mode of x is taken zero. The sign of the returned transform does not have a factor -1 that is more often than not found in the definition of the Hilbert transform. Note also that `scipy.signal.hilbert` does have an extra -1 factor compared to this function. """ tmp = asarray(x) if iscomplexobj(tmp): return hilbert(tmp.real) + 1j * hilbert(tmp.imag) n = len(x) omega = _cache.get(n) if omega is None: if len(_cache) > 20: while _cache: _cache.popitem() def kernel(k): if k > 0: return 1.0 elif k < 0: return -1.0 return 0.0 omega = convolve.init_convolution_kernel(n, kernel, d=1) _cache[n] = omega overwrite_x = _datacopied(tmp, x) return convolve.convolve( tmp, omega, swap_real_imag=1, overwrite_x=overwrite_x )
["def","hilbert","(","x",",","_cache=_cache",")",":","``","''","''","Return","Hilbert","transform","of","a","periodic","sequence","x",".","If","x_j","and","y_j","are","Fourier","coefficients","of","periodic","functions","x","and","y",",","respectively",",","then",":",":","y_j","=","sqrt","(","-1",")","*","sign","(","j",")","*","x_j","y_0","=","0","Parameters","--","--","--","--","--","x",":","array_like","The","input","array",",","should","be","periodic",".","_cache",":","dict",",","optional","Dictionary","that","contains","the","kernel","used","to","do","a","convolution","with",".","Returns","--","--","--","-","y",":","ndarray","The","transformed","input",".","See","Also","--","--","--","--","scipy.signal.hilbert",":","Compute","the","analytic","signal",",","using","the","Hilbert","transform",".","Notes","--","--","-","If","``","sum","(","x",",","axis=0",")","==","0","``","then","``","hilbert","(","ihilbert","(","x",")",")","==","x","``",".","For","even","len","(","x",")",",","the","Nyquist","mode","of","x","is","taken","zero",".","The","sign","of","the","returned","transform","does","not","have","a","factor","-1","that","is","more","often","than","not","found","in","the","definition","of","the","Hilbert","transform",".","Note","also","that","`","scipy.signal.hilbert","`","does","have","an","extra","-1","factor","compared","to","this","function.","``","''","''","tmp","=","asarray","(","x",")","if","iscomplexobj","(","tmp",")",":","return","hilbert","(","tmp.real",")","+","1j","*","hilbert","(","tmp.imag",")","n","=","len","(","x",")","omega","=","_cache.get","(","n",")","if","omega","is","None",":","if","len","(","_cache",")",">","20",":","while","_cache",":","_cache.popitem","(",")","def","kernel","(","k",")",":","if","k",">","0",":","return","1.0","elif","k","<","0",":","return","-1.0","return","0.0","omega","=","convolve.init_convolution_kernel","(","n",",","kernel",",","d=1",")","_cache","[","n","]","=","omega","overwrite_x","=","_datacopied","(","tmp",",","x",")","return","convolve.convolve","(","tmp",",","omega",",","swap_real_imag=1",",","overwrite_x=overwrite_x",")"]
201
259
null
pseudo_diffs.py
catboost/contrib/python/scipy/py2/scipy/fftpack/pseudo_diffs.py
from __future__ import division, print_function, absolute_import from numpy import pi, asarray, sin, cos, sinh, cosh, tanh, iscomplexobj from .None import convolve from scipy.fftpack.basic import _datacopied import atexit
15
null
5
10
null
null
null
Use image node_id 4 for calling a global function with example usage: hilbert(x, _cache) and returns: convolve, unknown, int, int, unknown
138
node_id 4
523,400
price
InterestRateSwap
null
true
self,valuation_date,market,model,pricing_context,name
Represents a batch of Interest Rate Swaps (IRS). An Interest rate swap (IRS) is a contract between two counterparties for an exchange of a series of payments over a period of time. The payments are made periodically (for example quarterly or semi-annually) where the last payment is made at the maturity (or termination) of the contract. In the case of fixed-for-floating IRS, one counterparty pays a fixed rate while the other counterparty's payments are linked to a floating index, most commonly the LIBOR rate. On the other hand, in the case of interest rate basis swap, the payments of both counterparties are linked to a floating index. Typically, the floating rate is observed (or fixed) at the beginning of each period while the payments are made at the end of each period [1]. For example, consider a vanilla swap with the starting date T_0 and maturity date T_n and equally spaced coupon payment dates T_1, T_2, ..., T_n such that T_0 < T_1 < T_2 < ... < T_n and dt_i = T_(i+1) - T_i (A) The floating rate is fixed on T_0, T_1, ..., T_(n-1) and both the fixed and floating payments are made on T_1, T_2, ..., T_n (payment dates). The InterestRateSwap class can be used to create and price multiple IRS simultaneously. The class supports vanilla fixed-for-floating swaps as well as basis swaps. However all IRS within an IRS object must be priced using a common reference and discount curve. #### Example (non batch): The following example illustrates the construction of an IRS instrument and calculating its price. ```python import numpy as np import tensorflow as tf import tf_quant_finance as tff dates = tff.datetime instruments = tff.experimental.instruments dtype = np.float64 start_date = dates.convert_to_date_tensor([(2020, 2, 8)]) maturity_date = dates.convert_to_date_tensor([(2022, 2, 8)]) valuation_date = dates.convert_to_date_tensor([(2020, 2, 8)]) period_3m = dates.periods.months(3) period_6m = dates.periods.months(6) fix_spec = instruments.FixedCouponSpecs( coupon_frequency=period_6m, currency='usd', notional=1., coupon_rate=0.03134, daycount_convention=instruments.DayCountConvention.ACTUAL_365, businessday_rule=dates.BusinessDayConvention.NONE) flt_spec = instruments.FloatCouponSpecs( coupon_frequency=period_3m, reference_rate_term=period_3m, reset_frequency=period_3m, currency='usd', notional=1., businessday_rule=dates.BusinessDayConvention.NONE, coupon_basis=0., coupon_multiplier=1., daycount_convention=instruments.DayCountConvention.ACTUAL_365) swap = instruments.InterestRateSwap([(2020,2,2)], [(2023,2,2)], [fix_spec], [flt_spec], dtype=np.float64) curve_dates = valuation_date + dates.periods.years([1, 2, 3, 5, 7, 10, 30]) reference_curve = instruments.RateCurve( curve_dates, np.array([ 0.02834814, 0.03077457, 0.03113739, 0.03130794, 0.03160892, 0.03213901, 0.03257991 ], dtype=dtype), valuation_date=valuation_date, dtype=dtype) market = instruments.InterestRateMarket( reference_curve=reference_curve, discount_curve=reference_curve) price = swap.price(valuation_date, market) # Expected result: 1e-7 ``` #### Example (batch): The following example illustrates the construction and pricing of IRS using batches. ```python import numpy as np import tensorflow as tf import tf_quant_finance as tff dates = tff.datetime instruments = tff.experimental.instruments dtype = np.float64 notional = 1.0 maturity_date = dates.convert_to_date_tensor([(2023, 2, 8), (2027, 2, 8)]) start_date = dates.convert_to_date_tensor([(2020, 2, 8), (2020, 2, 8)]) valuation_date = dates.convert_to_date_tensor([(2020, 2, 8)]) period3m = dates.periods.months([3, 3]) period6m = dates.periods.months([6, 6]) fix_spec = instruments.FixedCouponSpecs( coupon_frequency=period6m, currency='usd', notional=notional, coupon_rate=[0.03134, 0.03181], daycount_convention=instruments.DayCountConvention.ACTUAL_365, businessday_rule=dates.BusinessDayConvention.NONE) flt_spec = instruments.FloatCouponSpecs( coupon_frequency=period3m, reference_rate_term=period3m, reset_frequency=period3m, currency='usd', notional=notional, businessday_rule=dates.BusinessDayConvention.NONE, coupon_basis=0.0, coupon_multiplier=1.0, daycount_convention=instruments.DayCountConvention.ACTUAL_365) swap = instruments.InterestRateSwap(start_date, maturity_date, fix_spec, flt_spec, dtype=dtype) curve_dates = valuation_date + dates.periods.years([1, 2, 3, 5, 7, 10, 30]) reference_curve = instruments.RateCurve( curve_dates, np.array([ 0.02834814, 0.03077457, 0.03113739, 0.03130794, 0.03160892, 0.03213901, 0.03257991 ], dtype=dtype), valuation_date=valuation_date, dtype=dtype) market = instruments.InterestRateMarket( reference_curve=reference_curve, discount_curve=reference_curve) price = swap.price(valuation_date, market) # Expected result: [1.0e-7, 1.0e-7] ``` #### References: [1]: Leif B.G. Andersen and Vladimir V. Piterbarg. Interest Rate Modeling, Volume I: Foundations and Vanilla Models. Chapter 5. 2010.
["Represents","a","batch","of","Interest","Rate","Swaps","(","IRS",")",".","An","Interest","rate","swap","(","IRS",")","is","a","contract","between","two","counterparties","for","an","exchange","of","a","series","of","payments","over","a","period","of","time",".","The","payments","are","made","periodically","(","for","example","quarterly","or","semi-annually",")","where","the","last","payment","is","made","at","the","maturity","(","or","termination",")","of","the","contract",".","In","the","case","of","fixed-for-floating","IRS",",","one","counterparty","pays","a","fixed","rate","while","the","other","counterparty","'s","payments","are","linked","to","a","floating","index",",","most","commonly","the","LIBOR","rate",".","On","the","other","hand",",","in","the","case","of","interest","rate","basis","swap",",","the","payments","of","both","counterparties","are","linked","to","a","floating","index",".","Typically",",","the","floating","rate","is","observed","(","or","fixed",")","at","the","beginning","of","each","period","while","the","payments","are","made","at","the","end","of","each","period","[","1","]",".","For","example",",","consider","a","vanilla","swap","with","the","starting","date","T_0","and","maturity","date","T_n","and","equally","spaced","coupon","payment","dates","T_1",",","T_2",",","...",",","T_n","such","that","T_0","<","T_1","<","T_2","<","...","<","T_n","and","dt_i","=","T_","(","i+1",")","-","T_i","(","A",")","The","floating","rate","is","fixed","on","T_0",",","T_1",",","...",",","T_","(","n-1",")","and","both","the","fixed","and","floating","payments","are","made","on","T_1",",","T_2",",","...",",","T_n","(","payment","dates",")",".","The","InterestRateSwap","class","can","be","used","to","create","and","price","multiple","IRS","simultaneously",".","The","class","supports","vanilla","fixed-for-floating","swaps","as","well","as","basis","swaps",".","However","all","IRS","within","an","IRS","object","must","be","priced","using","a","common","reference","and","discount","curve",".","#","#","#","#","Example","(","non","batch",")",":","The","following","example","illustrates","the","construction","of","an","IRS","instrument","and","calculating","its","price",".","``","`","python","import","numpy","as","np","import","tensorflow","as","tf","import","tf_quant_finance","as","tff","dates","=","tff.datetime","instruments","=","tff.experimental.instruments","dtype","=","np.float64","start_date","=","dates.convert_to_date_tensor","(","[","(","2020",",","2",",","8",")","]",")","maturity_date","=","dates.convert_to_date_tensor","(","[","(","2022",",","2",",","8",")","]",")","valuation_date","=","dates.convert_to_date_tensor","(","[","(","2020",",","2",",","8",")","]",")","period_3m","=","dates.periods.months","(","3",")","period_6m","=","dates.periods.months","(","6",")","fix_spec","=","instruments.FixedCouponSpecs","(","coupon_frequency=period_6m",",","currency='usd","'",",","notional=1.",",","coupon_rate=0.03134",",","daycount_convention=instruments.DayCountConvention.ACTUAL_365",",","businessday_rule=dates.BusinessDayConvention.NONE",")","flt_spec","=","instruments.FloatCouponSpecs","(","coupon_frequency=period_3m",",","reference_rate_term=period_3m",",","reset_frequency=period_3m",",","currency='usd","'",",","notional=1.",",","businessday_rule=dates.BusinessDayConvention.NONE",",","coupon_basis=0.",",","coupon_multiplier=1.",",","daycount_convention=instruments.DayCountConvention.ACTUAL_365",")","swap","=","instruments.InterestRateSwap","(","[","(","2020,2,2",")","]",",","[","(","2023,2,2",")","]",",","[","fix_spec","]",",","[","flt_spec","]",",","dtype=np.float64",")","curve_dates","=","valuation_date","+","dates.periods.years","(","[","1",",","2",",","3",",","5",",","7",",","10",",","30","]",")","reference_curve","=","instruments.RateCurve","(","curve_dates",",","np.array","(","[","0.02834814",",","0.03077457",",","0.03113739",",","0.03130794",",","0.03160892",",","0.03213901",",","0.03257991","]",",","dtype=dtype",")",",","valuation_date=valuation_date",",","dtype=dtype",")","market","=","instruments.InterestRateMarket","(","reference_curve=reference_curve",",","discount_curve=reference_curve",")","price","=","swap.price","(","valuation_date",",","market",")","#","Expected","result",":","1e-7","``","`","#","#","#","#","Example","(","batch",")",":","The","following","example","illustrates","the","construction","and","pricing","of","IRS","using","batches",".","``","`","python","import","numpy","as","np","import","tensorflow","as","tf","import","tf_quant_finance","as","tff","dates","=","tff.datetime","instruments","=","tff.experimental.instruments","dtype","=","np.float64","notional","=","1.0","maturity_date","=","dates.convert_to_date_tensor","(","[","(","2023",",","2",",","8",")",",","(","2027",",","2",",","8",")","]",")","start_date","=","dates.convert_to_date_tensor","(","[","(","2020",",","2",",","8",")",",","(","2020",",","2",",","8",")","]",")","valuation_date","=","dates.convert_to_date_tensor","(","[","(","2020",",","2",",","8",")","]",")","period3m","=","dates.periods.months","(","[","3",",","3","]",")","period6m","=","dates.periods.months","(","[","6",",","6","]",")","fix_spec","=","instruments.FixedCouponSpecs","(","coupon_frequency=period6m",",","currency='usd","'",",","notional=notional",",","coupon_rate=","[","0.03134",",","0.03181","]",",","daycount_convention=instruments.DayCountConvention.ACTUAL_365",",","businessday_rule=dates.BusinessDayConvention.NONE",")","flt_spec","=","instruments.FloatCouponSpecs","(","coupon_frequency=period3m",",","reference_rate_term=period3m",",","reset_frequency=period3m",",","currency='usd","'",",","notional=notional",",","businessday_rule=dates.BusinessDayConvention.NONE",",","coupon_basis=0.0",",","coupon_multiplier=1.0",",","daycount_convention=instruments.DayCountConvention.ACTUAL_365",")","swap","=","instruments.InterestRateSwap","(","start_date",",","maturity_date",",","fix_spec",",","flt_spec",",","dtype=dtype",")","curve_dates","=","valuation_date","+","dates.periods.years","(","[","1",",","2",",","3",",","5",",","7",",","10",",","30","]",")","reference_curve","=","instruments.RateCurve","(","curve_dates",",","np.array","(","[","0.02834814",",","0.03077457",",","0.03113739",",","0.03130794",",","0.03160892",",","0.03213901",",","0.03257991","]",",","dtype=dtype",")",",","valuation_date=valuation_date",",","dtype=dtype",")","market","=","instruments.InterestRateMarket","(","reference_curve=reference_curve",",","discount_curve=reference_curve",")","price","=","swap.price","(","valuation_date",",","market",")","#","Expected","result",":","[","1.0e-7",",","1.0e-7","]","``","`","#","#","#","#","References",":","[","1","]",":","Leif","B.G",".","Andersen","and","Vladimir","V.","Piterbarg",".","Interest","Rate","Modeling",",","Volume","I",":","Foundations","and","Vanilla","Models",".","Chapter","5",".","2010","."]
Returns the present value of the instrument on the valuation date. Args: valuation_date: A scalar `DateTensor` specifying the date on which valuation is being desired. market: A namedtuple of type `InterestRateMarket` which contains the necessary information for pricing the interest rate swap. model: Reserved for future use. pricing_context: Additional context relevant for pricing. name: Python str. The name to give to the ops created by this function. Default value: `None` which maps to 'price'. Returns: A Rank 1 `Tensor` of real type containing the modeled price of each IRS contract based on the input market data.
["Returns","the","present","value","of","the","instrument","on","the","valuation","date",".","Args",":","valuation_date",":","A","scalar","`","DateTensor","`","specifying","the","date","on","which","valuation","is","being","desired",".","market",":","A","namedtuple","of","type","`","InterestRateMarket","`","which","contains","the","necessary","information","for","pricing","the","interest","rate","swap",".","model",":","Reserved","for","future","use",".","pricing_context",":","Additional","context","relevant","for","pricing",".","name",":","Python","str",".","The","name","to","give","to","the","ops","created","by","this","function",".","Default","value",":","`","None","`","which","maps","to","'price","'",".","Returns",":","A","Rank","1","`","Tensor","`","of","real","type","containing","the","modeled","price","of","each","IRS","contract","based","on","the","input","market","data","."]
unknown
def price( self, valuation_date, market, model=None, pricing_context=None, name=None, ): """Returns the present value of the instrument on the valuation date. Args: valuation_date: A scalar `DateTensor` specifying the date on which valuation is being desired. market: A namedtuple of type `InterestRateMarket` which contains the necessary information for pricing the interest rate swap. model: Reserved for future use. pricing_context: Additional context relevant for pricing. name: Python str. The name to give to the ops created by this function. Default value: `None` which maps to 'price'. Returns: A Rank 1 `Tensor` of real type containing the modeled price of each IRS contract based on the input market data. """ name = name or (self._name + "_price") with tf.name_scope(name): valuation_date = dates.convert_to_date_tensor(valuation_date) pay_cf = self._pay_leg.price( valuation_date, market, model, pricing_context ) receive_cf = self._receive_leg.price( valuation_date, market, model, pricing_context ) return receive_cf - pay_cf
["def","price","(","self",",","valuation_date",",","market",",","model=None",",","pricing_context=None",",","name=None",",",")",":","``","''","''","Returns","the","present","value","of","the","instrument","on","the","valuation","date",".","Args",":","valuation_date",":","A","scalar","`","DateTensor","`","specifying","the","date","on","which","valuation","is","being","desired",".","market",":","A","namedtuple","of","type","`","InterestRateMarket","`","which","contains","the","necessary","information","for","pricing","the","interest","rate","swap",".","model",":","Reserved","for","future","use",".","pricing_context",":","Additional","context","relevant","for","pricing",".","name",":","Python","str",".","The","name","to","give","to","the","ops","created","by","this","function",".","Default","value",":","`","None","`","which","maps","to","'price","'",".","Returns",":","A","Rank","1","`","Tensor","`","of","real","type","containing","the","modeled","price","of","each","IRS","contract","based","on","the","input","market","data.","``","''","''","name","=","name","or","(","self._name","+","``","_price","''",")","with","tf.name_scope","(","name",")",":","valuation_date","=","dates.convert_to_date_tensor","(","valuation_date",")","pay_cf","=","self._pay_leg.price","(","valuation_date",",","market",",","model",",","pricing_context",")","receive_cf","=","self._receive_leg.price","(","valuation_date",",","market",",","model",",","pricing_context",")","return","receive_cf","-","pay_cf"]
213
239
null
interest_rate_swap.py
tf-quant-finance/tf_quant_finance/experimental/instruments/interest_rate_swap.py
import tensorflow.compat.v2 from tf_quant_finance import datetime from tf_quant_finance.experimental.instruments import cashflow_stream from tf_quant_finance.experimental.instruments import rates_common
15
1
4
0
0
10
null
Use image node_id 2 for calling the InterestRateSwap obj's underlying member method code with example usage: obj.price(valuation_date, market, model, pricing_context, name) and returns: unknown
193
node_id 2
2,191,435
create_attn
global
null
false
attn_type,channels
null
null
null
null
None,module_cls
def create_attn(attn_type, channels, **kwargs): module_cls = get_attn(attn_type) if module_cls is not None: # NOTE: it's expected the first (positional) argument of all attention layers is the # input channels return module_cls(channels, **kwargs) return None
["def","create_attn","(","attn_type",",","channels",",","*","*","kwargs",")",":","module_cls","=","get_attn","(","attn_type",")","if","module_cls","is","not","None",":","#","NOTE",":","it","'s","expected","the","first","(","positional",")","argument","of","all","attention","layers","is","the","#","input","channels","return","module_cls","(","channels",",","*","*","kwargs",")","return","None"]
84
89
null
create_attn.py
pytorch-image-models/timm/layers/create_attn.py
import torch from functools import partial from .bottleneck_attn import BottleneckAttn from .cbam import CbamModule, LightCbamModule from .eca import EcaModule, CecaModule from .gather_excite import GatherExcite from .global_context import GlobalContext from .halo_attn import HaloAttn from .lambda_layer import LambdaLayer from .non_local_attn import NonLocalAttn, BatNonLocalAttn from .selective_kernel import SelectiveKernel from .split_attn import SplitAttn from .squeeze_excite import SEModule, EffectiveSEModule
15
null
13
2
null
null
null
Use image node_id 2 for calling a global function with example usage: create_attn(attn_type, channels) and returns: None, module_cls
132
node_id 2
1,692,280
test_unstructured
TestRetrieveUtils
null
true
self
null
null
null
null
null
def test_unstructured(self): pdf_file_path = os.path.join(test_dir, "example.pdf") txt_file_path = os.path.join(test_dir, "example.txt") word_file_path = os.path.join(test_dir, "example.docx") chunks = split_files_to_chunks( [pdf_file_path, txt_file_path, word_file_path] ) assert all( isinstance(chunk, str) and "AutoGen is an advanced tool designed to assist developers" in chunk.strip() for chunk in chunks )
["def","test_unstructured","(","self",")",":","pdf_file_path","=","os.path.join","(","test_dir",",","``","example.pdf","''",")","txt_file_path","=","os.path.join","(","test_dir",",","``","example.txt","''",")","word_file_path","=","os.path.join","(","test_dir",",","``","example.docx","''",")","chunks","=","split_files_to_chunks","(","[","pdf_file_path",",","txt_file_path",",","word_file_path","]",")","assert","all","(","isinstance","(","chunk",",","str",")","and","``","AutoGen","is","an","advanced","tool","designed","to","assist","developers","''","in","chunk.strip","(",")","for","chunk","in","chunks",")"]
226
234
null
test_retrieve_utils.py
autogen/test/test_retrieve_utils.py
import pytest import os
15
1
2
0
0
12
null
Use image node_id 12 for calling the TestRetrieveUtils obj's underlying member method code with example usage: obj.test_unstructured() without return types
155
node_id 12
319,457
on
TextLogger
NullLogger
true
self
null
null
null
null
True
def on(self): return True
["def","on","(","self",")",":","return","True"]
45
46
null
logger.py
turicreate/src/external/boost/boost_1_68_0/tools/build/src/util/logger.py
import sys
15
2
1
0
1
4
1
Use image node_id 4 for calling the TextLogger obj's underlying member method code with example usage: obj.on() and returns: True
129
node_id 4
2,276,657
normalize
ImageGPTFeatureExtractor
FeatureExtractionMixin,ImageFeatureExtractionMixin
true
self,image
Constructs an ImageGPT feature extractor. This feature extractor can be used to resize images to a smaller resolution (such as 32x32 or 64x64), normalize them and finally color quantize them to obtain sequences of "pixel values" (color clusters). This feature extractor inherits from [`FeatureExtractionMixin`] which contains most of the main methods. Users should refer to this superclass for more information regarding those methods. Args: clusters (`np.ndarray`): The color clusters to use, as a `np.ndarray` of shape `(n_clusters, 3)`. do_resize (`bool`, *optional*, defaults to `True`): Whether to resize the input to a certain `size`. size (`int` or `Tuple(int)`, *optional*, defaults to 32): Resize the input to the given size. If a tuple is provided, it should be (width, height). If only an integer is provided, then the input will be resized to (size, size). Only has an effect if `do_resize` is set to `True`. resample (`int`, *optional*, defaults to `PIL.Image.Resampling.BILINEAR`): An optional resampling filter. This can be one of `PIL.Image.Resampling.NEAREST`, `PIL.Image.Resampling.BOX`, `PIL.Image.Resampling.BILINEAR`, `PIL.Image.Resampling.HAMMING`, `PIL.Image.Resampling.BICUBIC` or `PIL.Image.Resampling.LANCZOS`. Only has an effect if `do_resize` is set to `True`. do_normalize (`bool`, *optional*, defaults to `True`): Whether or not to normalize the input to the range between -1 and +1.
["Constructs","an","ImageGPT","feature","extractor",".","This","feature","extractor","can","be","used","to","resize","images","to","a","smaller","resolution","(","such","as","32x32","or","64x64",")",",","normalize","them","and","finally","color","quantize","them","to","obtain","sequences","of","``","pixel","values","''","(","color","clusters",")",".","This","feature","extractor","inherits","from","[","`","FeatureExtractionMixin","`","]","which","contains","most","of","the","main","methods",".","Users","should","refer","to","this","superclass","for","more","information","regarding","those","methods",".","Args",":","clusters","(","`","np.ndarray","`",")",":","The","color","clusters","to","use",",","as","a","`","np.ndarray","`","of","shape","`","(","n_clusters",",","3",")","`",".","do_resize","(","`","bool","`",",","*","optional","*",",","defaults","to","`","True","`",")",":","Whether","to","resize","the","input","to","a","certain","`","size","`",".","size","(","`","int","`","or","`","Tuple","(","int",")","`",",","*","optional","*",",","defaults","to","32",")",":","Resize","the","input","to","the","given","size",".","If","a","tuple","is","provided",",","it","should","be","(","width",",","height",")",".","If","only","an","integer","is","provided",",","then","the","input","will","be","resized","to","(","size",",","size",")",".","Only","has","an","effect","if","`","do_resize","`","is","set","to","`","True","`",".","resample","(","`","int","`",",","*","optional","*",",","defaults","to","`","PIL.Image.Resampling.BILINEAR","`",")",":","An","optional","resampling","filter",".","This","can","be","one","of","`","PIL.Image.Resampling.NEAREST","`",",","`","PIL.Image.Resampling.BOX","`",",","`","PIL.Image.Resampling.BILINEAR","`",",","`","PIL.Image.Resampling.HAMMING","`",",","`","PIL.Image.Resampling.BICUBIC","`","or","`","PIL.Image.Resampling.LANCZOS","`",".","Only","has","an","effect","if","`","do_resize","`","is","set","to","`","True","`",".","do_normalize","(","`","bool","`",",","*","optional","*",",","defaults","to","`","True","`",")",":","Whether","or","not","to","normalize","the","input","to","the","range","between","-1","and","+1","."]
Normalizes `image` into the range -1 to +1. Args: image (`PIL.Image.Image` or `np.ndarray` or `torch.Tensor`): The image to normalize. Returns: `np.ndarray`: The normalized image.
["Normalizes","`","image","`","into","the","range","-1","to","+1",".","Args",":","image","(","`","PIL.Image.Image","`","or","`","np.ndarray","`","or","`","torch.Tensor","`",")",":","The","image","to","normalize",".","Returns",":","`","np.ndarray","`",":","The","normalized","image","."]
unknown
def normalize(self, image): """ Normalizes `image` into the range -1 to +1. Args: image (`PIL.Image.Image` or `np.ndarray` or `torch.Tensor`): The image to normalize. Returns: `np.ndarray`: The normalized image. """ image = self.to_numpy_array( image, rescale=False, channel_first=False ) return image / 127.5 - 1
["def","normalize","(","self",",","image",")",":","``","''","''","Normalizes","`","image","`","into","the","range","-1","to","+1",".","Args",":","image","(","`","PIL.Image.Image","`","or","`","np.ndarray","`","or","`","torch.Tensor","`",")",":","The","image","to","normalize",".","Returns",":","`","np.ndarray","`",":","The","normalized","image.","``","''","''","image","=","self.to_numpy_array","(","image",",","rescale=False",",","channel_first=False",")","return","image","\/","127.5","-","1"]
86
99
null
feature_extraction_imagegpt.py
H2O/h2o_flexgen/benchmark/third_party/transformers/src/transformers/models/imagegpt/feature_extraction_imagegpt.py
from typing import List, Optional, Union import numpy from PIL import Image from transformers.image_utils import PILImageResampling from ...feature_extraction_utils import BatchFeature, FeatureExtractionMixin from ...image_utils import ImageFeatureExtractionMixin, is_torch_tensor from ...utils import TensorType, logging
15
1
7
2
2
3
2
Use image node_id 2 for calling the ImageGPTFeatureExtractor obj's underlying member method code with example usage: obj.normalize(image) and returns: unknown
158
node_id 2
95,363
test_retrieve_utils
TestRetrieveUtils
null
true
self
null
null
null
null
null
def test_retrieve_utils(self): client = chromadb.PersistentClient(path="/tmp/chromadb") create_vector_db_from_dir( dir_path="./website/docs", client=client, collection_name="autogen-docs", custom_text_types=["txt", "md", "rtf", "rst"], get_or_create=True, ) results = query_vector_db( query_texts=[ "How can I use AutoGen UserProxyAgent and AssistantAgent to do code generation?", ], n_results=4, client=client, collection_name="autogen-docs", search_string="AutoGen", ) print(results["ids"][0]) assert len(results["ids"][0]) == 4
["def","test_retrieve_utils","(","self",")",":","client","=","chromadb.PersistentClient","(","path=","''","\/tmp\/chromadb","''",")","create_vector_db_from_dir","(","dir_path=","''",".\/website\/docs","''",",","client=client",",","collection_name=","''","autogen-docs","''",",","custom_text_types=","[","``","txt","''",",","``","md","''",",","``","rtf","''",",","``","rst","''","]",",","get_or_create=True",",",")","results","=","query_vector_db","(","query_texts=","[","``","How","can","I","use","AutoGen","UserProxyAgent","and","AssistantAgent","to","do","code","generation","?","``",",","]",",","n_results=4",",","client=client",",","collection_name=","''","autogen-docs","''",",","search_string=","''","AutoGen","''",",",")","print","(","results","[","``","ids","''","]","[","0","]",")","assert","len","(","results","[","``","ids","''","]","[","0","]",")","==","4"]
201
220
null
test_retrieve_utils.py
autogen/test/test_retrieve_utils.py
import pytest import os
15
1
2
0
0
12
null
Use image node_id 11 for calling the TestRetrieveUtils obj's underlying member method code with example usage: obj.test_retrieve_utils() without return types
157
node_id 11
319,456
_maximum_given_samples
global
null
false
f,pts,map
null
null
null
null
max
def _maximum_given_samples(f, pts, map=None): """ use given sample pts to calculate maximum for function f Inputs: f -- a function that returns a single value, given a list of inputs pts -- a list of sample points map -- the mapping function [Default is builtins.map]""" if map is None: from builtins import map from numpy import transpose, atleast_2d return max(list(map(f, atleast_2d(transpose(pts)).tolist())))
["def","_maximum_given_samples","(","f",",","pts",",","map=None",")",":","``","''","''","use","given","sample","pts","to","calculate","maximum","for","function","f","Inputs",":","f","--","a","function","that","returns","a","single","value",",","given","a","list","of","inputs","pts","--","a","list","of","sample","points","map","--","the","mapping","function","[","Default","is","builtins.map","]","''","''","''","if","map","is","None",":","from","builtins","import","map","from","numpy","import","transpose",",","atleast_2d","return","max","(","list","(","map","(","f",",","atleast_2d","(","transpose","(","pts",")",")",".tolist","(",")",")",")",")"]
226
238
null
samples.py
mystic/mystic/math/samples.py
15
null
0
15
null
null
null
Use image node_id 11 for calling a global function with example usage: _maximum_given_samples(f, pts, map) and returns: max
123
node_id 11
1,407,031
test_custom_text_split_function
TestRetrieveUtils
null
true
self
null
null
null
null
list
def test_custom_text_split_function(self): def custom_text_split_function(text): return [text[: len(text) // 2], text[len(text) // 2 :]] db_path = "/tmp/test_retrieve_utils_chromadb.db" client = chromadb.PersistentClient(path=db_path) create_vector_db_from_dir( os.path.join(test_dir, "example.txt"), client=client, collection_name="mytestcollection", custom_text_split_function=custom_text_split_function, get_or_create=True, recursive=False, ) results = query_vector_db( ["autogen"], client=client, collection_name="mytestcollection", n_results=1, ) assert ( "AutoGen is an advanced tool designed to assist developers in harnessing the capabilities" in results.get("documents")[0][0] )
["def","test_custom_text_split_function","(","self",")",":","def","custom_text_split_function","(","text",")",":","return","[","text","[",":","len","(","text",")","\/\/","2","]",",","text","[","len","(","text",")","\/\/","2",":","]","]","db_path","=","``","\/tmp\/test_retrieve_utils_chromadb.db","''","client","=","chromadb.PersistentClient","(","path=db_path",")","create_vector_db_from_dir","(","os.path.join","(","test_dir",",","``","example.txt","''",")",",","client=client",",","collection_name=","''","mytestcollection","''",",","custom_text_split_function=custom_text_split_function",",","get_or_create=True",",","recursive=False",",",")","results","=","query_vector_db","(","[","``","autogen","''","]",",","client=client",",","collection_name=","''","mytestcollection","''",",","n_results=1",",",")","assert","(","``","AutoGen","is","an","advanced","tool","designed","to","assist","developers","in","harnessing","the","capabilities","''","in","results.get","(","``","documents","''",")","[","0","]","[","0","]",")"]
181
199
null
test_retrieve_utils.py
autogen/test/test_retrieve_utils.py
import pytest import os
15
1
2
0
0
12
null
Use image node_id 10 for calling the TestRetrieveUtils obj's underlying member method code with example usage: obj.test_custom_text_split_function() and returns: list
166
node_id 10
319,455
_ptp_given_samples
global
null
false
f,pts,map
null
null
null
null
ptp
def _ptp_given_samples(f, pts, map=None): """ use given sample pts to calculate spread for function f Inputs: f -- a function that returns a single value, given a list of inputs pts -- a list of sample points map -- the mapping function [Default is builtins.map]""" if map is None: from builtins import map from numpy import transpose, ptp, atleast_2d return ptp(list(map(f, atleast_2d(transpose(pts)).tolist())))
["def","_ptp_given_samples","(","f",",","pts",",","map=None",")",":","``","''","''","use","given","sample","pts","to","calculate","spread","for","function","f","Inputs",":","f","--","a","function","that","returns","a","single","value",",","given","a","list","of","inputs","pts","--","a","list","of","sample","points","map","--","the","mapping","function","[","Default","is","builtins.map","]","''","''","''","if","map","is","None",":","from","builtins","import","map","from","numpy","import","transpose",",","ptp",",","atleast_2d","return","ptp","(","list","(","map","(","f",",","atleast_2d","(","transpose","(","pts",")",")",".tolist","(",")",")",")",")"]
241
253
null
samples.py
mystic/mystic/math/samples.py
15
null
0
15
null
null
null
Use image node_id 12 for calling a global function with example usage: _ptp_given_samples(f, pts, map) and returns: ptp
119
node_id 12
1,407,032
test_custom_vector_db
TestRetrieveUtils
null
true
self
null
null
null
null
dict
def test_custom_vector_db(self): try: import lancedb except ImportError: return from autogen.agentchat.contrib.retrieve_user_proxy_agent import ( RetrieveUserProxyAgent, ) db_path = "/tmp/lancedb" def create_lancedb(): db = lancedb.connect(db_path) data = [ { "vector": [1.1, 1.2], "id": 1, "documents": "This is a test document spark", }, { "vector": [0.2, 1.8], "id": 2, "documents": "This is another test document", }, { "vector": [0.1, 0.3], "id": 3, "documents": "This is a third test document spark", }, { "vector": [0.5, 0.7], "id": 4, "documents": "This is a fourth test document", }, { "vector": [2.1, 1.3], "id": 5, "documents": "This is a fifth test document spark", }, { "vector": [5.1, 8.3], "id": 6, "documents": "This is a sixth test document", }, ] try: db.create_table("my_table", data) except OSError: pass class MyRetrieveUserProxyAgent(RetrieveUserProxyAgent): def query_vector_db( self, query_texts, n_results=10, search_string="", ): if query_texts: vector = [0.1, 0.3] db = lancedb.connect(db_path) table = db.open_table("my_table") query = ( table.search(vector) .where(f"documents LIKE '%{search_string}%'") .limit(n_results) .to_df() ) return { "ids": [query["id"].tolist()], "documents": [query["documents"].tolist()], } def retrieve_docs( self, problem: str, n_results: int = 20, search_string: str = "", ): results = self.query_vector_db( query_texts=[problem], n_results=n_results, search_string=search_string, ) self._results = results print("doc_ids: ", results["ids"]) ragragproxyagent = MyRetrieveUserProxyAgent( name="ragproxyagent", human_input_mode="NEVER", max_consecutive_auto_reply=2, retrieve_config={ "task": "qa", "chunk_token_size": 2000, "client": "__", "embedding_model": "all-mpnet-base-v2", }, ) create_lancedb() ragragproxyagent.retrieve_docs( "This is a test document spark", n_results=10, search_string="spark", ) assert ragragproxyagent._results["ids"] == [[3, 1, 5]]
["def","test_custom_vector_db","(","self",")",":","try",":","import","lancedb","except","ImportError",":","return","from","autogen.agentchat.contrib.retrieve_user_proxy_agent","import","(","RetrieveUserProxyAgent",",",")","db_path","=","``","\/tmp\/lancedb","''","def","create_lancedb","(",")",":","db","=","lancedb.connect","(","db_path",")","data","=","[","{","``","vector","''",":","[","1.1",",","1.2","]",",","``","id","''",":","1",",","``","documents","''",":","``","This","is","a","test","document","spark","''",",","}",",","{","``","vector","''",":","[","0.2",",","1.8","]",",","``","id","''",":","2",",","``","documents","''",":","``","This","is","another","test","document","''",",","}",",","{","``","vector","''",":","[","0.1",",","0.3","]",",","``","id","''",":","3",",","``","documents","''",":","``","This","is","a","third","test","document","spark","''",",","}",",","{","``","vector","''",":","[","0.5",",","0.7","]",",","``","id","''",":","4",",","``","documents","''",":","``","This","is","a","fourth","test","document","''",",","}",",","{","``","vector","''",":","[","2.1",",","1.3","]",",","``","id","''",":","5",",","``","documents","''",":","``","This","is","a","fifth","test","document","spark","''",",","}",",","{","``","vector","''",":","[","5.1",",","8.3","]",",","``","id","''",":","6",",","``","documents","''",":","``","This","is","a","sixth","test","document","''",",","}",",","]","try",":","db.create_table","(","``","my_table","''",",","data",")","except","OSError",":","pass","class","MyRetrieveUserProxyAgent","(","RetrieveUserProxyAgent",")",":","def","query_vector_db","(","self",",","query_texts",",","n_results=10",",","search_string=","''","''",",",")",":","if","query_texts",":","vector","=","[","0.1",",","0.3","]","db","=","lancedb.connect","(","db_path",")","table","=","db.open_table","(","``","my_table","''",")","query","=","(","table.search","(","vector",")",".where","(","f","''","documents","LIKE","'","%","{","search_string","}","%","'","''",")",".limit","(","n_results",")",".to_df","(",")",")","return","{","``","ids","''",":","[","query","[","``","id","''","]",".tolist","(",")","]",",","``","documents","''",":","[","query","[","``","documents","''","]",".tolist","(",")","]",",","}","def","retrieve_docs","(","self",",","problem",":","str",",","n_results",":","int","=","20",",","search_string",":","str","=","``","''",",",")",":","results","=","self.query_vector_db","(","query_texts=","[","problem","]",",","n_results=n_results",",","search_string=search_string",",",")","self._results","=","results","print","(","``","doc_ids",":","``",",","results","[","``","ids","''","]",")","ragragproxyagent","=","MyRetrieveUserProxyAgent","(","name=","''","ragproxyagent","''",",","human_input_mode=","''","NEVER","''",",","max_consecutive_auto_reply=2",",","retrieve_config=","{","``","task","''",":","``","qa","''",",","``","chunk_token_size","''",":","2000",",","``","client","''",":","``","__","''",",","``","embedding_model","''",":","``","all-mpnet-base-v2","''",",","}",",",")","create_lancedb","(",")","ragragproxyagent.retrieve_docs","(","``","This","is","a","test","document","spark","''",",","n_results=10",",","search_string=","''","spark","''",",",")","assert","ragragproxyagent._results","[","``","ids","''","]","==","[","[","3",",","1",",","5","]","]"]
117
179
null
test_retrieve_utils.py
autogen/test/test_retrieve_utils.py
import pytest import os
15
1
2
0
0
12
null
Use image node_id 9 for calling the TestRetrieveUtils obj's underlying member method code with example usage: obj.test_custom_vector_db() and returns: dict
155
node_id 9
319,454
test_query_vector_db
TestRetrieveUtils
null
true
self
null
null
null
null
null
def test_query_vector_db(self): db_path = "/tmp/test_retrieve_utils_chromadb.db" if os.path.exists(db_path): client = chromadb.PersistentClient(path=db_path) else: # If the database does not exist, create it first client = chromadb.PersistentClient(path=db_path) create_vector_db_from_dir(test_dir, client=client) results = query_vector_db(["autogen"], client=client) assert isinstance(results, dict) and any( "autogen" in res[0].lower() for res in results.get("documents", []) )
["def","test_query_vector_db","(","self",")",":","db_path","=","``","\/tmp\/test_retrieve_utils_chromadb.db","''","if","os.path.exists","(","db_path",")",":","client","=","chromadb.PersistentClient","(","path=db_path",")","else",":","#","If","the","database","does","not","exist",",","create","it","first","client","=","chromadb.PersistentClient","(","path=db_path",")","create_vector_db_from_dir","(","test_dir",",","client=client",")","results","=","query_vector_db","(","[","``","autogen","''","]",",","client=client",")","assert","isinstance","(","results",",","dict",")","and","any","(","``","autogen","''","in","res","[","0","]",".lower","(",")","for","res","in","results.get","(","``","documents","''",",","[","]",")",")"]
106
115
null
test_retrieve_utils.py
autogen/test/test_retrieve_utils.py
import pytest import os
15
1
2
0
0
12
null
Use image node_id 8 for calling the TestRetrieveUtils obj's underlying member method code with example usage: obj.test_query_vector_db() without return types
157
node_id 8
319,453
get_boxes
global
null
false
model,val_set,num_images,image_dir,score_threshold
null
null
null
null
all_boxes, all_gt_boxes
def get_boxes( model, val_set, num_images=0, image_dir=None, score_threshold=0.6 ): n = 0 all_boxes = [] all_gt_boxes = [] with tqdm(total=val_set.ndata) as pbar: # progress bar for img, ( gt_boxes, gt_classes, num_gt_boxes, difficult, im_shape, ) in val_set: outputs = model.fprop(img, inference=True) for k, boxes in enumerate(outputs): pbar.update(1) all_boxes.append(boxes) ngt = num_gt_boxes[0, k] gtb = gt_boxes[:, k].reshape((-1, 4)) # retrieve gt boxes # we add a extra column to track detections during the AP calculation detected = np.array([False] * ngt) gtb = np.hstack( [ gtb[:ngt], gt_classes[:ngt, k][:, np.newaxis], difficult[:ngt, k][:, np.newaxis], detected[:, np.newaxis], ] ) all_gt_boxes.append(gtb) # plot images if needed if n < num_images: gt_boxes = np.copy( gt_boxes.reshape((-1, 4, val_set.be.bsz)) ) boxes = np.copy(boxes) ngt = num_gt_boxes[0, k] img = plot_image( img=img[:, k].get(), im_shape=im_shape[:, k], gt_boxes=gt_boxes[:ngt, :, k], boxes=boxes, score_threshold=score_threshold, ) file_name = os.path.join( image_dir, "image_{}.jpg".format(n) ) img.save(file_name) n = n + 1 return (all_boxes, all_gt_boxes)
["def","get_boxes","(","model",",","val_set",",","num_images=0",",","image_dir=None",",","score_threshold=0.6",")",":","n","=","0","all_boxes","=","[","]","all_gt_boxes","=","[","]","with","tqdm","(","total=val_set.ndata",")","as","pbar",":","#","progress","bar","for","img",",","(","gt_boxes",",","gt_classes",",","num_gt_boxes",",","difficult",",","im_shape",",",")","in","val_set",":","outputs","=","model.fprop","(","img",",","inference=True",")","for","k",",","boxes","in","enumerate","(","outputs",")",":","pbar.update","(","1",")","all_boxes.append","(","boxes",")","ngt","=","num_gt_boxes","[","0",",","k","]","gtb","=","gt_boxes","[",":",",","k","]",".reshape","(","(","-1",",","4",")",")","#","retrieve","gt","boxes","#","we","add","a","extra","column","to","track","detections","during","the","AP","calculation","detected","=","np.array","(","[","False","]","*","ngt",")","gtb","=","np.hstack","(","[","gtb","[",":","ngt","]",",","gt_classes","[",":","ngt",",","k","]","[",":",",","np.newaxis","]",",","difficult","[",":","ngt",",","k","]","[",":",",","np.newaxis","]",",","detected","[",":",",","np.newaxis","]",",","]",")","all_gt_boxes.append","(","gtb",")","#","plot","images","if","needed","if","n","<","num_images",":","gt_boxes","=","np.copy","(","gt_boxes.reshape","(","(","-1",",","4",",","val_set.be.bsz",")",")",")","boxes","=","np.copy","(","boxes",")","ngt","=","num_gt_boxes","[","0",",","k","]","img","=","plot_image","(","img=img","[",":",",","k","]",".get","(",")",",","im_shape=im_shape","[",":",",","k","]",",","gt_boxes=gt_boxes","[",":","ngt",",",":",",","k","]",",","boxes=boxes",",","score_threshold=score_threshold",",",")","file_name","=","os.path.join","(","image_dir",",","``","image_","{","}",".jpg","''",".format","(","n",")",")","img.save","(","file_name",")","n","=","n","+","1","return","(","all_boxes",",","all_gt_boxes",")"]
41
83
null
inference.py
neon/examples/ssd/inference.py
from neon.backends import gen_backend from neon.util.argparser import NeonArgparser from neon.models.model import Model import numpy from ssd_container import SSD from util.voc_eval import voc_eval from tqdm import tqdm import json from ssd_dataloader import build_dataloader import pickle import os from sys import exit from util.util import plot_image from collections import OrderedDict
15
null
14
1
null
null
null
Use image node_id 1 for calling a global function with example usage: get_boxes(model, val_set, num_images, image_dir, score_threshold) and returns: all_boxes, all_gt_boxes
173
node_id 1
1,411,035
test_create_vector_db_from_dir
TestRetrieveUtils
null
true
self
null
null
null
null
null
def test_create_vector_db_from_dir(self): db_path = "/tmp/test_retrieve_utils_chromadb.db" if os.path.exists(db_path): client = chromadb.PersistentClient(path=db_path) else: client = chromadb.PersistentClient(path=db_path) create_vector_db_from_dir(test_dir, client=client) assert client.get_collection("all-my-documents")
["def","test_create_vector_db_from_dir","(","self",")",":","db_path","=","``","\/tmp\/test_retrieve_utils_chromadb.db","''","if","os.path.exists","(","db_path",")",":","client","=","chromadb.PersistentClient","(","path=db_path",")","else",":","client","=","chromadb.PersistentClient","(","path=db_path",")","create_vector_db_from_dir","(","test_dir",",","client=client",")","assert","client.get_collection","(","``","all-my-documents","''",")"]
96
104
null
test_retrieve_utils.py
autogen/test/test_retrieve_utils.py
import pytest import os
15
1
2
0
0
12
null
Use image node_id 7 for calling the TestRetrieveUtils obj's underlying member method code with example usage: obj.test_create_vector_db_from_dir() without return types
167
node_id 7
319,452
test_is_url
TestRetrieveUtils
null
true
self
null
null
null
null
null
def test_is_url(self): assert is_url("https://www.example.com") assert not is_url("not_a_url")
["def","test_is_url","(","self",")",":","assert","is_url","(","``","https",":","\/\/www.example.com","''",")","assert","not","is_url","(","``","not_a_url","''",")"]
92
94
null
test_retrieve_utils.py
autogen/test/test_retrieve_utils.py
import pytest import os
15
1
2
0
0
12
null
Use image node_id 6 for calling the TestRetrieveUtils obj's underlying member method code with example usage: obj.test_is_url() without return types
148
node_id 6
319,451
test_download_all
TestDatasets
null
true
self
null
null
null
null
null
def test_download_all(self): # This fixture requires INTERNET CONNECTION # test_setup phase download_all() yield
["def","test_download_all","(","self",")",":","#","This","fixture","requires","INTERNET","CONNECTION","#","test_setup","phase","download_all","(",")","yield"]
30
36
null
test_data.py
scipy/scipy/datasets/tests/test_data.py
from scipy.datasets._registry import registry from scipy.datasets._fetchers import data_fetcher from scipy.datasets._utils import _clear_cache from scipy.datasets import ascent, face, electrocardiogram, download_all from numpy.testing import assert_equal, assert_almost_equal import os import pytest
15
1
7
2
0
5
null
Use image node_id 1 for calling the TestDatasets obj's underlying member method code with example usage: obj.test_download_all() without return types
149
node_id 1
1,884,887
difference
global
null
false
ctx,s,n
null
null
null
null
d
def difference(ctx, s, n): r""" Given a sequence `(s_k)` containing at least `n+1` items, returns the `n`-th forward difference, .. math :: \Delta^n = \sum_{k=0}^{\infty} (-1)^{k+n} {n \choose k} s_k. """ n = int(n) d = ctx.zero b = (-1) ** (n & 1) for k in xrange(n + 1): d += b * s[k] b = (b * (k - n)) // (k + 1) return d
["def","difference","(","ctx",",","s",",","n",")",":","r","''","''","''","Given","a","sequence","`","(","s_k",")","`","containing","at","least","`","n+1","`","items",",","returns","the","`","n","`","-th","forward","difference",",","..","math",":",":","\\Delta^n","=","\\sum_","{","k=0","}","^","{","\\infty","}","(","-1",")","^","{","k+n","}","{","n","\\choose","k","}","s_k.","``","''","''","n","=","int","(","n",")","d","=","ctx.zero","b","=","(","-1",")","*","*","(","n","&","1",")","for","k","in","xrange","(","n","+","1",")",":","d","+=","b","*","s","[","k","]","b","=","(","b","*","(","k","-","n",")",")","\/\/","(","k","+","1",")","return","d"]
14
29
null
differentiation.py
catboost/contrib/python/mpmath/py3/mpmath/calculus/differentiation.py
from ..libmp.backend import xrange from .calculus import defun
15
null
2
13
null
null
null
Use image node_id 1 for calling a global function with example usage: difference(ctx, s, n) and returns: d
106
node_id 1
407,212
test_existence_all
TestDatasets
null
true
self
null
null
null
null
null
def test_existence_all(self): assert len(os.listdir(data_dir)) >= len(registry)
["def","test_existence_all","(","self",")",":","assert","len","(","os.listdir","(","data_dir",")",")",">","=","len","(","registry",")"]
38
39
null
test_data.py
scipy/scipy/datasets/tests/test_data.py
from scipy.datasets._registry import registry from scipy.datasets._fetchers import data_fetcher from scipy.datasets._utils import _clear_cache from scipy.datasets import ascent, face, electrocardiogram, download_all from numpy.testing import assert_equal, assert_almost_equal import os import pytest
15
1
7
2
0
5
null
Use image node_id 2 for calling the TestDatasets obj's underlying member method code with example usage: obj.test_existence_all() without return types
150
node_id 2
1,884,888
hsteps
global
null
false
ctx,f,x,n,prec
null
null
null
null
values, norm, workprec
def hsteps(ctx, f, x, n, prec, **options): singular = options.get("singular") addprec = options.get("addprec", 10) direction = options.get("direction", 0) workprec = (prec + 2 * addprec) * (n + 1) orig = ctx.prec try: ctx.prec = workprec h = options.get("h") if h is None: if options.get("relative"): hextramag = int(ctx.mag(x)) else: hextramag = 0 h = ctx.ldexp(1, -prec - addprec - hextramag) else: h = ctx.convert(h) # Directed: steps x, x+h, ... x+n*h direction = options.get("direction", 0) if direction: h *= ctx.sign(direction) steps = xrange(n + 1) norm = h # Central: steps x-n*h, x-(n-2)*h ..., x, ..., x+(n-2)*h, x+n*h else: steps = xrange(-n, n + 1, 2) norm = 2 * h # Perturb if singular: x += 0.5 * h values = [f(x + k * h) for k in steps] return values, norm, workprec finally: ctx.prec = orig
["def","hsteps","(","ctx",",","f",",","x",",","n",",","prec",",","*","*","options",")",":","singular","=","options.get","(","``","singular","''",")","addprec","=","options.get","(","``","addprec","''",",","10",")","direction","=","options.get","(","``","direction","''",",","0",")","workprec","=","(","prec","+","2","*","addprec",")","*","(","n","+","1",")","orig","=","ctx.prec","try",":","ctx.prec","=","workprec","h","=","options.get","(","``","h","''",")","if","h","is","None",":","if","options.get","(","``","relative","''",")",":","hextramag","=","int","(","ctx.mag","(","x",")",")","else",":","hextramag","=","0","h","=","ctx.ldexp","(","1",",","-prec","-","addprec","-","hextramag",")","else",":","h","=","ctx.convert","(","h",")","#","Directed",":","steps","x",",","x+h",",","...","x+n","*","h","direction","=","options.get","(","``","direction","''",",","0",")","if","direction",":","h","*","=","ctx.sign","(","direction",")","steps","=","xrange","(","n","+","1",")","norm","=","h","#","Central",":","steps","x-n","*","h",",","x-","(","n-2",")","*","h","...",",","x",",","...",",","x+","(","n-2",")","*","h",",","x+n","*","h","else",":","steps","=","xrange","(","-n",",","n","+","1",",","2",")","norm","=","2","*","h","#","Perturb","if","singular",":","x","+=","0.5","*","h","values","=","[","f","(","x","+","k","*","h",")","for","k","in","steps","]","return","values",",","norm",",","workprec","finally",":","ctx.prec","=","orig"]
31
64
null
differentiation.py
catboost/contrib/python/mpmath/py3/mpmath/calculus/differentiation.py
from ..libmp.backend import xrange from .calculus import defun
15
null
2
13
null
null
null
Use image node_id 2 for calling a global function with example usage: hsteps(ctx, f, x, n, prec) and returns: values, norm, workprec
134
node_id 2
407,213
diff
global
null
false
ctx,f,x,n
null
null
null
null
unknown,_partial_diff,f,unknown
def diff(ctx, f, x, n=1, **options): r""" Numerically computes the derivative of `f`, `f'(x)`, or generally for an integer `n \ge 0`, the `n`-th derivative `f^{(n)}(x)`. A few basic examples are:: >>> from mpmath import * >>> mp.dps = 15; mp.pretty = True >>> diff(lambda x: x**2 + x, 1.0) 3.0 >>> diff(lambda x: x**2 + x, 1.0, 2) 2.0 >>> diff(lambda x: x**2 + x, 1.0, 3) 0.0 >>> nprint([diff(exp, 3, n) for n in range(5)]) # exp'(x) = exp(x) [20.0855, 20.0855, 20.0855, 20.0855, 20.0855] Even more generally, given a tuple of arguments `(x_1, \ldots, x_k)` and order `(n_1, \ldots, n_k)`, the partial derivative `f^{(n_1,\ldots,n_k)}(x_1,\ldots,x_k)` is evaluated. For example:: >>> diff(lambda x,y: 3*x*y + 2*y - x, (0.25, 0.5), (0,1)) 2.75 >>> diff(lambda x,y: 3*x*y + 2*y - x, (0.25, 0.5), (1,1)) 3.0 **Options** The following optional keyword arguments are recognized: ``method`` Supported methods are ``'step'`` or ``'quad'``: derivatives may be computed using either a finite difference with a small step size `h` (default), or numerical quadrature. ``direction`` Direction of finite difference: can be -1 for a left difference, 0 for a central difference (default), or +1 for a right difference; more generally can be any complex number. ``addprec`` Extra precision for `h` used to account for the function's sensitivity to perturbations (default = 10). ``relative`` Choose `h` relative to the magnitude of `x`, rather than an absolute value; useful for large or tiny `x` (default = False). ``h`` As an alternative to ``addprec`` and ``relative``, manually select the step size `h`. ``singular`` If True, evaluation exactly at the point `x` is avoided; this is useful for differentiating functions with removable singularities. Default = False. ``radius`` Radius of integration contour (with ``method = 'quad'``). Default = 0.25. A larger radius typically is faster and more accurate, but it must be chosen so that `f` has no singularities within the radius from the evaluation point. A finite difference requires `n+1` function evaluations and must be performed at `(n+1)` times the target precision. Accordingly, `f` must support fast evaluation at high precision. With integration, a larger number of function evaluations is required, but not much extra precision is required. For high order derivatives, this method may thus be faster if f is very expensive to evaluate at high precision. **Further examples** The direction option is useful for computing left- or right-sided derivatives of nonsmooth functions:: >>> diff(abs, 0, direction=0) 0.0 >>> diff(abs, 0, direction=1) 1.0 >>> diff(abs, 0, direction=-1) -1.0 More generally, if the direction is nonzero, a right difference is computed where the step size is multiplied by sign(direction). For example, with direction=+j, the derivative from the positive imaginary direction will be computed:: >>> diff(abs, 0, direction=j) (0.0 - 1.0j) With integration, the result may have a small imaginary part even even if the result is purely real:: >>> diff(sqrt, 1, method='quad') # doctest:+ELLIPSIS (0.5 - 4.59...e-26j) >>> chop(_) 0.5 Adding precision to obtain an accurate value:: >>> diff(cos, 1e-30) 0.0 >>> diff(cos, 1e-30, h=0.0001) -9.99999998328279e-31 >>> diff(cos, 1e-30, addprec=100) -1.0e-30 """ partial = False try: orders = list(n) x = list(x) partial = True except TypeError: pass if partial: x = [ctx.convert(_) for _ in x] return _partial_diff(ctx, f, x, orders, options) method = options.get("method", "step") if n == 0 and method != "quad" and not options.get("singular"): return f(ctx.convert(x)) prec = ctx.prec try: if method == "step": values, norm, workprec = hsteps( ctx, f, x, n, prec, **options ) ctx.prec = workprec v = ctx.difference(values, n) / norm**n elif method == "quad": ctx.prec += 10 radius = ctx.convert(options.get("radius", 0.25)) def g(t): rei = radius * ctx.expj(t) z = x + rei return f(z) / rei**n d = ctx.quadts(g, [0, 2 * ctx.pi]) v = d * ctx.factorial(n) / (2 * ctx.pi) else: raise ValueError("unknown method: %r" % method) finally: ctx.prec = prec return +v
["def","diff","(","ctx",",","f",",","x",",","n=1",",","*","*","options",")",":","r","''","''","''","Numerically","computes","the","derivative","of","`","f","`",",","`","f","'","(","x",")","`",",","or","generally","for","an","integer","`","n","\\ge","0","`",",","the","`","n","`","-th","derivative","`","f^","{","(","n",")","}","(","x",")","`",".","A","few","basic","examples","are",":",":",">",">",">","from","mpmath","import","*",">",">",">","mp.dps","=","15",";","mp.pretty","=","True",">",">",">","diff","(","lambda","x",":","x","*","*","2","+","x",",","1.0",")","3.0",">",">",">","diff","(","lambda","x",":","x","*","*","2","+","x",",","1.0",",","2",")","2.0",">",">",">","diff","(","lambda","x",":","x","*","*","2","+","x",",","1.0",",","3",")","0.0",">",">",">","nprint","(","[","diff","(","exp",",","3",",","n",")","for","n","in","range","(","5",")","]",")","#","exp","'","(","x",")","=","exp","(","x",")","[","20.0855",",","20.0855",",","20.0855",",","20.0855",",","20.0855","]","Even","more","generally",",","given","a","tuple","of","arguments","`","(","x_1",",","\\ldots",",","x_k",")","`","and","order","`","(","n_1",",","\\ldots",",","n_k",")","`",",","the","partial","derivative","`","f^","{","(","n_1",",","\\ldots",",","n_k",")","}","(","x_1",",","\\ldots",",","x_k",")","`","is","evaluated",".","For","example",":",":",">",">",">","diff","(","lambda","x",",","y",":","3","*","x","*","y","+","2","*","y","-","x",",","(","0.25",",","0.5",")",",","(","0,1",")",")","2.75",">",">",">","diff","(","lambda","x",",","y",":","3","*","x","*","y","+","2","*","y","-","x",",","(","0.25",",","0.5",")",",","(","1,1",")",")","3.0","*","*","Options","*","*","The","following","optional","keyword","arguments","are","recognized",":","``","method","``","Supported","methods","are","``","'step","'","``","or","``","'quad","'","``",":","derivatives","may","be","computed","using","either","a","finite","difference","with","a","small","step","size","`","h","`","(","default",")",",","or","numerical","quadrature",".","``","direction","``","Direction","of","finite","difference",":","can","be","-1","for","a","left","difference",",","0","for","a","central","difference","(","default",")",",","or","+1","for","a","right","difference",";","more","generally","can","be","any","complex","number",".","``","addprec","``","Extra","precision","for","`","h","`","used","to","account","for","the","function's","sensitivity","to","perturbations","(","default","=","10",")",".","``","relative","``","Choose","`","h","`","relative","to","the","magnitude","of","`","x","`",",","rather","than","an","absolute","value",";","useful","for","large","or","tiny","`","x","`","(","default","=","False",")",".","``","h","``","As","an","alternative","to","``","addprec","``","and","``","relative","``",",","manually","select","the","step","size","`","h","`",".","``","singular","``","If","True",",","evaluation","exactly","at","the","point","`","x","`","is","avoided",";","this","is","useful","for","differentiating","functions","with","removable","singularities",".","Default","=","False",".","``","radius","``","Radius","of","integration","contour","(","with","``","method","=","'quad","'","``",")",".","Default","=","0.25",".","A","larger","radius","typically","is","faster","and","more","accurate",",","but","it","must","be","chosen","so","that","`","f","`","has","no","singularities","within","the","radius","from","the","evaluation","point",".","A","finite","difference","requires","`","n+1","`","function","evaluations","and","must","be","performed","at","`","(","n+1",")","`","times","the","target","precision",".","Accordingly",",","`","f","`","must","support","fast","evaluation","at","high","precision",".","With","integration",",","a","larger","number","of","function","evaluations","is","required",",","but","not","much","extra","precision","is","required",".","For","high","order","derivatives",",","this","method","may","thus","be","faster","if","f","is","very","expensive","to","evaluate","at","high","precision",".","*","*","Further","examples","*","*","The","direction","option","is","useful","for","computing","left-","or","right-sided","derivatives","of","nonsmooth","functions",":",":",">",">",">","diff","(","abs",",","0",",","direction=0",")","0.0",">",">",">","diff","(","abs",",","0",",","direction=1",")","1.0",">",">",">","diff","(","abs",",","0",",","direction=-1",")","-1.0","More","generally",",","if","the","direction","is","nonzero",",","a","right","difference","is","computed","where","the","step","size","is","multiplied","by","sign","(","direction",")",".","For","example",",","with","direction=+j",",","the","derivative","from","the","positive","imaginary","direction","will","be","computed",":",":",">",">",">","diff","(","abs",",","0",",","direction=j",")","(","0.0","-","1.0j",")","With","integration",",","the","result","may","have","a","small","imaginary","part","even","even","if","the","result","is","purely","real",":",":",">",">",">","diff","(","sqrt",",","1",",","method='quad","'",")","#","doctest",":","+ELLIPSIS","(","0.5","-","4.59","...","e-26j",")",">",">",">","chop","(","_",")","0.5","Adding","precision","to","obtain","an","accurate","value",":",":",">",">",">","diff","(","cos",",","1e-30",")","0.0",">",">",">","diff","(","cos",",","1e-30",",","h=0.0001",")","-9.99999998328279e-31",">",">",">","diff","(","cos",",","1e-30",",","addprec=100",")","-1.0e-30","``","''","''","partial","=","False","try",":","orders","=","list","(","n",")","x","=","list","(","x",")","partial","=","True","except","TypeError",":","pass","if","partial",":","x","=","[","ctx.convert","(","_",")","for","_","in","x","]","return","_partial_diff","(","ctx",",","f",",","x",",","orders",",","options",")","method","=","options.get","(","``","method","''",",","``","step","''",")","if","n","==","0","and","method","!","=","``","quad","''","and","not","options.get","(","``","singular","''",")",":","return","f","(","ctx.convert","(","x",")",")","prec","=","ctx.prec","try",":","if","method","==","``","step","''",":","values",",","norm",",","workprec","=","hsteps","(","ctx",",","f",",","x",",","n",",","prec",",","*","*","options",")","ctx.prec","=","workprec","v","=","ctx.difference","(","values",",","n",")","\/","norm","*","*","n","elif","method","==","``","quad","''",":","ctx.prec","+=","10","radius","=","ctx.convert","(","options.get","(","``","radius","''",",","0.25",")",")","def","g","(","t",")",":","rei","=","radius","*","ctx.expj","(","t",")","z","=","x","+","rei","return","f","(","z",")","\/","rei","*","*","n","d","=","ctx.quadts","(","g",",","[","0",",","2","*","ctx.pi","]",")","v","=","d","*","ctx.factorial","(","n",")","\/","(","2","*","ctx.pi",")","else",":","raise","ValueError","(","``","unknown","method",":","%","r","''","%","method",")","finally",":","ctx.prec","=","prec","return","+v"]
68
204
null
differentiation.py
catboost/contrib/python/mpmath/py3/mpmath/calculus/differentiation.py
from ..libmp.backend import xrange from .calculus import defun
15
null
2
13
null
null
null
Use image node_id 3 for calling a global function with example usage: diff(ctx, f, x, n) and returns: unknown, _partial_diff, f, unknown
136
node_id 3
407,214
test_extra_tags_paddle_autolog
global
null
false
null
null
null
null
null
def test_extra_tags_paddle_autolog(): mlflow.paddle.autolog(extra_tags={"test_tag": "paddle_autolog"}) train_model() run = mlflow.last_active_run() assert run.data.tags["test_tag"] == "paddle_autolog" assert ( run.data.tags[mlflow.utils.mlflow_tags.MLFLOW_AUTOLOGGING] == "paddle" )
["def","test_extra_tags_paddle_autolog","(",")",":","mlflow.paddle.autolog","(","extra_tags=","{","``","test_tag","''",":","``","paddle_autolog","''","}",")","train_model","(",")","run","=","mlflow.last_active_run","(",")","assert","run.data.tags","[","``","test_tag","''","]","==","``","paddle_autolog","''","assert","(","run.data.tags","[","mlflow.utils.mlflow_tags.MLFLOW_AUTOLOGGING","]","==","``","paddle","''",")"]
107
113
null
test_paddle_autolog.py
mlflow/tests/paddle/test_paddle_autolog.py
import paddle import pytest import mlflow from mlflow import MlflowClient
15
null
4
7
null
null
null
Use image node_id 7 for calling a global function with example usage: test_extra_tags_paddle_autolog() without return types
123
node_id 7
1,356,432
_partial_diff
global
null
false
ctx,f,xs,orders,options
null
null
null
null
_partial_diff,f,f,ctx,f
def _partial_diff(ctx, f, xs, orders, options): if not orders: return f() if not sum(orders): return f(*xs) i = 0 for i in range(len(orders)): if orders[i]: break order = orders[i] def fdiff_inner(*f_args): def inner(t): return f(*(f_args[:i] + (t,) + f_args[i + 1 :])) return ctx.diff(inner, f_args[i], order, **options) orders[i] = 0 return _partial_diff(ctx, fdiff_inner, xs, orders, options)
["def","_partial_diff","(","ctx",",","f",",","xs",",","orders",",","options",")",":","if","not","orders",":","return","f","(",")","if","not","sum","(","orders",")",":","return","f","(","*","xs",")","i","=","0","for","i","in","range","(","len","(","orders",")",")",":","if","orders","[","i","]",":","break","order","=","orders","[","i","]","def","fdiff_inner","(","*","f_args",")",":","def","inner","(","t",")",":","return","f","(","*","(","f_args","[",":","i","]","+","(","t",",",")","+","f_args","[","i","+","1",":","]",")",")","return","ctx.diff","(","inner",",","f_args","[","i","]",",","order",",","*","*","options",")","orders","[","i","]","=","0","return","_partial_diff","(","ctx",",","fdiff_inner",",","xs",",","orders",",","options",")"]
206
221
null
differentiation.py
catboost/contrib/python/mpmath/py3/mpmath/calculus/differentiation.py
from ..libmp.backend import xrange from .calculus import defun
15
null
2
13
null
null
null
Use image node_id 4 for calling a global function with example usage: _partial_diff(ctx, f, xs, orders, options) and returns: _partial_diff, f, f, ctx, f
153
node_id 4
407,215
__init__
AudioInput
null
true
self,channels_first,channels,sample_rate,batch_size
Create audio batch.
["Create","audio","batch","."]
null
null
AudioInput
def __init__( self, channels_first, channels, sample_rate=44100, batch_size=2 ): self.channels_first = channels_first self.channels = channels self.sample_rate = sample_rate self.batch_size = batch_size
["def","__init__","(","self",",","channels_first",",","channels",",","sample_rate=44100",",","batch_size=2",")",":","self.channels_first","=","channels_first","self.channels","=","channels","self.sample_rate","=","sample_rate","self.batch_size","=","batch_size"]
38
42
null
test_mp3_compression_pytorch.py
adversarial-robustness-toolbox/tests/defences/preprocessor/test_mp3_compression_pytorch.py
from __future__ import absolute_import, division, print_function, unicode_literals import logging import numpy import pytest from numpy.testing import assert_array_equal from art.defences.preprocessor import Mp3CompressionPyTorch from tests.utils import ARTTestException
15
1
7
5
0
2
null
Use image node_id 1 to create a new AudioInput object with example: obj = AudioInput(channels_first, channels, sample_rate, batch_size)
136
node_id 1
235,203
test_autolog_registering_model
global
null
false
null
null
null
null
null
def test_autolog_registering_model(): registered_model_name = "test_autolog_registered_model" mlflow.paddle.autolog(registered_model_name=registered_model_name) with mlflow.start_run(): train_model() registered_model = MlflowClient().get_registered_model( registered_model_name ) assert registered_model.name == registered_model_name
["def","test_autolog_registering_model","(",")",":","registered_model_name","=","``","test_autolog_registered_model","''","mlflow.paddle.autolog","(","registered_model_name=registered_model_name",")","with","mlflow.start_run","(",")",":","train_model","(",")","registered_model","=","MlflowClient","(",")",".get_registered_model","(","registered_model_name",")","assert","registered_model.name","==","registered_model_name"]
96
104
null
test_paddle_autolog.py
mlflow/tests/paddle/test_paddle_autolog.py
import paddle import pytest import mlflow from mlflow import MlflowClient
15
null
4
7
null
null
null
Use image node_id 6 for calling a global function with example usage: test_autolog_registering_model() without return types
123
node_id 6
1,356,431
get_data
AudioInput
null
true
self
Create audio batch.
["Create","audio","batch","."]
null
null
np
def get_data(self): if self.channels_first: shape = (self.batch_size, self.channels, self.sample_rate) else: shape = (self.batch_size, self.sample_rate, self.channels) return np.zeros(shape, dtype=np.int16)
["def","get_data","(","self",")",":","if","self.channels_first",":","shape","=","(","self.batch_size",",","self.channels",",","self.sample_rate",")","else",":","shape","=","(","self.batch_size",",","self.sample_rate",",","self.channels",")","return","np.zeros","(","shape",",","dtype=np.int16",")"]
44
49
null
test_mp3_compression_pytorch.py
adversarial-robustness-toolbox/tests/defences/preprocessor/test_mp3_compression_pytorch.py
from __future__ import absolute_import, division, print_function, unicode_literals import logging import numpy import pytest from numpy.testing import assert_array_equal from art.defences.preprocessor import Mp3CompressionPyTorch from tests.utils import ARTTestException
15
1
7
5
0
2
null
Use image node_id 2 for calling the AudioInput obj's underlying member method code with example usage: obj.get_data() and returns: np
133
node_id 2
235,204
audio_batch
global
null
false
request,channels_first
null
null
null
null
test_input, test_output, audio_input
def audio_batch(request, channels_first): """ Audio fixtures of shape `(batch_size, channels, samples)` or `(batch_size, samples, channels)`. """ channels = request.param audio_input = AudioInput(channels_first, channels) test_input = audio_input.get_data() test_output = test_input.copy() return test_input, test_output, audio_input.sample_rate
["def","audio_batch","(","request",",","channels_first",")",":","``","''","''","Audio","fixtures","of","shape","`","(","batch_size",",","channels",",","samples",")","`","or","`","(","batch_size",",","samples",",","channels",")","`",".","``","''","''","channels","=","request.param","audio_input","=","AudioInput","(","channels_first",",","channels",")","test_input","=","audio_input.get_data","(",")","test_output","=","test_input.copy","(",")","return","test_input",",","test_output",",","audio_input.sample_rate"]
53
61
null
test_mp3_compression_pytorch.py
adversarial-robustness-toolbox/tests/defences/preprocessor/test_mp3_compression_pytorch.py
from __future__ import absolute_import, division, print_function, unicode_literals import logging import numpy import pytest from numpy.testing import assert_array_equal from art.defences.preprocessor import Mp3CompressionPyTorch from tests.utils import ARTTestException
15
null
7
5
null
null
null
Use image node_id 1 for calling a global function with example usage: audio_batch(request, channels_first) and returns: test_input, test_output, audio_input
158
node_id 1
235,205
log
NullLogger
null
true
self,source_name
null
null
null
null
null
def log(self, source_name, *args): if self.on() and self.interesting(source_name): self.do_log(self.indent_) for i in args: self.do_log(i) self.do_log("\n")
["def","log","(","self",",","source_name",",","*","args",")",":","if","self.on","(",")","and","self.interesting","(","source_name",")",":","self.do_log","(","self.indent_",")","for","i","in","args",":","self.do_log","(","i",")","self.do_log","(","``","\\n","''",")"]
11
16
null
logger.py
turicreate/src/external/boost/boost_1_68_0/tools/build/src/util/logger.py
import sys
15
2
1
0
1
7
null
Use image node_id 2 for calling the NullLogger obj's underlying member method code with example usage: obj.log(source_name) without return types
144
node_id 2
2,276,648
increase_indent
NullLogger
null
true
self
null
null
null
null
null
def increase_indent(self): if self.on(): self.indent_ += " "
["def","increase_indent","(","self",")",":","if","self.on","(",")",":","self.indent_","+=","``","``"]
18
20
null
logger.py
turicreate/src/external/boost/boost_1_68_0/tools/build/src/util/logger.py
import sys
15
2
1
0
1
7
null
Use image node_id 3 for calling the NullLogger obj's underlying member method code with example usage: obj.increase_indent() without return types
145
node_id 3
2,276,649
_pof_given_samples
global
null
false
f,pts,map
null
null
null
null
pof
def _pof_given_samples(f, pts, map=None): """ use given sample pts to calculate probability of failure for function f Inputs: f -- a function that returns True for 'success' and False for 'failure' pts -- a list of sample points map -- the mapping function [Default is builtins.map]""" if map is None: from builtins import map from numpy import transpose, atleast_2d results = list(map(f, atleast_2d(transpose(pts)).tolist())) pof = float(results.count(False)) / float(len(results)) return pof
["def","_pof_given_samples","(","f",",","pts",",","map=None",")",":","``","''","''","use","given","sample","pts","to","calculate","probability","of","failure","for","function","f","Inputs",":","f","--","a","function","that","returns","True","for","'success","'","and","False","for","'failure'","pts","--","a","list","of","sample","points","map","--","the","mapping","function","[","Default","is","builtins.map","]","''","''","''","if","map","is","None",":","from","builtins","import","map","from","numpy","import","transpose",",","atleast_2d","results","=","list","(","map","(","f",",","atleast_2d","(","transpose","(","pts",")",")",".tolist","(",")",")",")","pof","=","float","(","results.count","(","False",")",")","\/","float","(","len","(","results",")",")","return","pof"]
164
178
null
samples.py
mystic/mystic/math/samples.py
15
null
0
15
null
null
null
Use image node_id 7 for calling a global function with example usage: _pof_given_samples(f, pts, map) and returns: pof
118
node_id 7
1,407,027
_minimum_given_samples
global
null
false
f,pts,map
null
null
null
null
min
def _minimum_given_samples(f, pts, map=None): """ use given sample pts to calculate minimum for function f Inputs: f -- a function that returns a single value, given a list of inputs pts -- a list of sample points map -- the mapping function [Default is builtins.map]""" if map is None: from builtins import map from numpy import transpose, atleast_2d return min(list(map(f, atleast_2d(transpose(pts)).tolist())))
["def","_minimum_given_samples","(","f",",","pts",",","map=None",")",":","``","''","''","use","given","sample","pts","to","calculate","minimum","for","function","f","Inputs",":","f","--","a","function","that","returns","a","single","value",",","given","a","list","of","inputs","pts","--","a","list","of","sample","points","map","--","the","mapping","function","[","Default","is","builtins.map","]","''","''","''","if","map","is","None",":","from","builtins","import","map","from","numpy","import","transpose",",","atleast_2d","return","min","(","list","(","map","(","f",",","atleast_2d","(","transpose","(","pts",")",")",".tolist","(",")",")",")",")"]
181
193
null
samples.py
mystic/mystic/math/samples.py
15
null
0
15
null
null
null
Use image node_id 8 for calling a global function with example usage: _minimum_given_samples(f, pts, map) and returns: min
122
node_id 8
1,407,028
to_json_file
Config
null
true
self,json_file
Hold some config params to control generation and report procedure.
["Hold","some","config","params","to","control","generation","and","report","procedure","."]
Serializes this instance to a JSON file.
["Serializes","this","instance","to","a","JSON","file","."]
null
def to_json_file(self, json_file): r""" Serializes this instance to a JSON file. """ with open(json_file, "w+", encoding="utf-8") as writer: json.dump( self.to_dict(), writer, indent=2, ensure_ascii=False )
["def","to_json_file","(","self",",","json_file",")",":","r","''","''","''","Serializes","this","instance","to","a","JSON","file.","``","''","''","with","open","(","json_file",",","``","w+","''",",","encoding=","''","utf-8","''",")","as","writer",":","json.dump","(","self.to_dict","(",")",",","writer",",","indent=2",",","ensure_ascii=False",")"]
233
244
null
config.py
textflint/textflint/input/config/config.py
import os import six import json import copy from ...common.utils.logger import logger from ...common.settings import NLP_TASK_MAP, ALLOWED_TRANSFORMATIONS, TRANSFORM_FIELDS, ALLOWED_SUBPOPULATIONS, ALLOWED_VALIDATORS, ALLOWED_cn_TRANSFORMATIONS
15
1
6
0
0
8
null
Use image node_id 8 for calling the Config obj's underlying member method code with example usage: obj.to_json_file(json_file) without return types
147
node_id 8
2,188,355
_expectation_given_samples
global
null
false
f,pts,map
null
null
null
null
mean
def _expectation_given_samples(f, pts, map=None): """ use given sample pts to calculate expected value for function f Inputs: f -- a function that returns a single value, given a list of inputs pts -- a list of sample points map -- the mapping function [Default is builtins.map]""" if map is None: from builtins import map from numpy import transpose, mean, atleast_2d return mean(list(map(f, atleast_2d(transpose(pts)).tolist())))
["def","_expectation_given_samples","(","f",",","pts",",","map=None",")",":","``","''","''","use","given","sample","pts","to","calculate","expected","value","for","function","f","Inputs",":","f","--","a","function","that","returns","a","single","value",",","given","a","list","of","inputs","pts","--","a","list","of","sample","points","map","--","the","mapping","function","[","Default","is","builtins.map","]","''","''","''","if","map","is","None",":","from","builtins","import","map","from","numpy","import","transpose",",","mean",",","atleast_2d","return","mean","(","list","(","map","(","f",",","atleast_2d","(","transpose","(","pts",")",")",".tolist","(",")",")",")",")"]
196
208
null
samples.py
mystic/mystic/math/samples.py
15
null
0
15
null
null
null
Use image node_id 9 for calling a global function with example usage: _expectation_given_samples(f, pts, map) and returns: mean
127
node_id 9
1,407,029
test_import_vispy
global
null
false
null
null
null
null
null
def test_import_vispy(): """Importing vispy should only pull in other vispy.util submodule.""" modnames = loaded_vispy_modules("vispy", 2) assert_equal(modnames, set(_min_modules))
["def","test_import_vispy","(",")",":","``","''","''","Importing","vispy","should","only","pull","in","other","vispy.util","submodule",".","''","''","''","modnames","=","loaded_vispy_modules","(","``","vispy","''",",","2",")","assert_equal","(","modnames",",","set","(","_min_modules",")",")"]
58
61
null
test_import.py
vispy/vispy/util/tests/test_import.py
import sys import os from vispy.testing import assert_in, assert_not_in, requires_pyopengl, run_tests_if_main, assert_equal from vispy.util import run_subprocess import vispy
15
null
5
10
null
null
null
Use image node_id 3 for calling a global function with example usage: test_import_vispy() without return types
110
node_id 3
2,320,261
test_import_vispy_util
global
null
false
null
null
null
null
null
def test_import_vispy_util(): """Importing vispy.util should not pull in other vispy submodules.""" modnames = loaded_vispy_modules("vispy.util", 2) assert_equal(modnames, set(_min_modules))
["def","test_import_vispy_util","(",")",":","``","''","''","Importing","vispy.util","should","not","pull","in","other","vispy","submodules",".","''","''","''","modnames","=","loaded_vispy_modules","(","``","vispy.util","''",",","2",")","assert_equal","(","modnames",",","set","(","_min_modules",")",")"]
64
67
null
test_import.py
vispy/vispy/util/tests/test_import.py
import sys import os from vispy.testing import assert_in, assert_not_in, requires_pyopengl, run_tests_if_main, assert_equal from vispy.util import run_subprocess import vispy
15
null
5
10
null
null
null
Use image node_id 4 for calling a global function with example usage: test_import_vispy_util() without return types
115
node_id 4
2,320,262
_variance_given_samples
global
null
false
f,pts,map
null
null
null
null
var
def _variance_given_samples(f, pts, map=None): """ use given sample pts to calculate expected variance for function f Inputs: f -- a function that returns a single value, given a list of inputs pts -- a list of sample points map -- the mapping function [Default is builtins.map]""" if map is None: from builtins import map from numpy import transpose, var, atleast_2d return var(list(map(f, atleast_2d(transpose(pts)).tolist())))
["def","_variance_given_samples","(","f",",","pts",",","map=None",")",":","``","''","''","use","given","sample","pts","to","calculate","expected","variance","for","function","f","Inputs",":","f","--","a","function","that","returns","a","single","value",",","given","a","list","of","inputs","pts","--","a","list","of","sample","points","map","--","the","mapping","function","[","Default","is","builtins.map","]","''","''","''","if","map","is","None",":","from","builtins","import","map","from","numpy","import","transpose",",","var",",","atleast_2d","return","var","(","list","(","map","(","f",",","atleast_2d","(","transpose","(","pts",")",")",".tolist","(",")",")",")",")"]
211
223
null
samples.py
mystic/mystic/math/samples.py
15
null
0
15
null
null
null
Use image node_id 10 for calling a global function with example usage: _variance_given_samples(f, pts, map) and returns: var
124
node_id 10
1,407,030
__init__
InterestRateSwap
null
true
self,start_date,maturity_date,pay_leg,receive_leg,holiday_calendar,dtype,name
Represents a batch of Interest Rate Swaps (IRS). An Interest rate swap (IRS) is a contract between two counterparties for an exchange of a series of payments over a period of time. The payments are made periodically (for example quarterly or semi-annually) where the last payment is made at the maturity (or termination) of the contract. In the case of fixed-for-floating IRS, one counterparty pays a fixed rate while the other counterparty's payments are linked to a floating index, most commonly the LIBOR rate. On the other hand, in the case of interest rate basis swap, the payments of both counterparties are linked to a floating index. Typically, the floating rate is observed (or fixed) at the beginning of each period while the payments are made at the end of each period [1]. For example, consider a vanilla swap with the starting date T_0 and maturity date T_n and equally spaced coupon payment dates T_1, T_2, ..., T_n such that T_0 < T_1 < T_2 < ... < T_n and dt_i = T_(i+1) - T_i (A) The floating rate is fixed on T_0, T_1, ..., T_(n-1) and both the fixed and floating payments are made on T_1, T_2, ..., T_n (payment dates). The InterestRateSwap class can be used to create and price multiple IRS simultaneously. The class supports vanilla fixed-for-floating swaps as well as basis swaps. However all IRS within an IRS object must be priced using a common reference and discount curve. #### Example (non batch): The following example illustrates the construction of an IRS instrument and calculating its price. ```python import numpy as np import tensorflow as tf import tf_quant_finance as tff dates = tff.datetime instruments = tff.experimental.instruments dtype = np.float64 start_date = dates.convert_to_date_tensor([(2020, 2, 8)]) maturity_date = dates.convert_to_date_tensor([(2022, 2, 8)]) valuation_date = dates.convert_to_date_tensor([(2020, 2, 8)]) period_3m = dates.periods.months(3) period_6m = dates.periods.months(6) fix_spec = instruments.FixedCouponSpecs( coupon_frequency=period_6m, currency='usd', notional=1., coupon_rate=0.03134, daycount_convention=instruments.DayCountConvention.ACTUAL_365, businessday_rule=dates.BusinessDayConvention.NONE) flt_spec = instruments.FloatCouponSpecs( coupon_frequency=period_3m, reference_rate_term=period_3m, reset_frequency=period_3m, currency='usd', notional=1., businessday_rule=dates.BusinessDayConvention.NONE, coupon_basis=0., coupon_multiplier=1., daycount_convention=instruments.DayCountConvention.ACTUAL_365) swap = instruments.InterestRateSwap([(2020,2,2)], [(2023,2,2)], [fix_spec], [flt_spec], dtype=np.float64) curve_dates = valuation_date + dates.periods.years([1, 2, 3, 5, 7, 10, 30]) reference_curve = instruments.RateCurve( curve_dates, np.array([ 0.02834814, 0.03077457, 0.03113739, 0.03130794, 0.03160892, 0.03213901, 0.03257991 ], dtype=dtype), valuation_date=valuation_date, dtype=dtype) market = instruments.InterestRateMarket( reference_curve=reference_curve, discount_curve=reference_curve) price = swap.price(valuation_date, market) # Expected result: 1e-7 ``` #### Example (batch): The following example illustrates the construction and pricing of IRS using batches. ```python import numpy as np import tensorflow as tf import tf_quant_finance as tff dates = tff.datetime instruments = tff.experimental.instruments dtype = np.float64 notional = 1.0 maturity_date = dates.convert_to_date_tensor([(2023, 2, 8), (2027, 2, 8)]) start_date = dates.convert_to_date_tensor([(2020, 2, 8), (2020, 2, 8)]) valuation_date = dates.convert_to_date_tensor([(2020, 2, 8)]) period3m = dates.periods.months([3, 3]) period6m = dates.periods.months([6, 6]) fix_spec = instruments.FixedCouponSpecs( coupon_frequency=period6m, currency='usd', notional=notional, coupon_rate=[0.03134, 0.03181], daycount_convention=instruments.DayCountConvention.ACTUAL_365, businessday_rule=dates.BusinessDayConvention.NONE) flt_spec = instruments.FloatCouponSpecs( coupon_frequency=period3m, reference_rate_term=period3m, reset_frequency=period3m, currency='usd', notional=notional, businessday_rule=dates.BusinessDayConvention.NONE, coupon_basis=0.0, coupon_multiplier=1.0, daycount_convention=instruments.DayCountConvention.ACTUAL_365) swap = instruments.InterestRateSwap(start_date, maturity_date, fix_spec, flt_spec, dtype=dtype) curve_dates = valuation_date + dates.periods.years([1, 2, 3, 5, 7, 10, 30]) reference_curve = instruments.RateCurve( curve_dates, np.array([ 0.02834814, 0.03077457, 0.03113739, 0.03130794, 0.03160892, 0.03213901, 0.03257991 ], dtype=dtype), valuation_date=valuation_date, dtype=dtype) market = instruments.InterestRateMarket( reference_curve=reference_curve, discount_curve=reference_curve) price = swap.price(valuation_date, market) # Expected result: [1.0e-7, 1.0e-7] ``` #### References: [1]: Leif B.G. Andersen and Vladimir V. Piterbarg. Interest Rate Modeling, Volume I: Foundations and Vanilla Models. Chapter 5. 2010.
["Represents","a","batch","of","Interest","Rate","Swaps","(","IRS",")",".","An","Interest","rate","swap","(","IRS",")","is","a","contract","between","two","counterparties","for","an","exchange","of","a","series","of","payments","over","a","period","of","time",".","The","payments","are","made","periodically","(","for","example","quarterly","or","semi-annually",")","where","the","last","payment","is","made","at","the","maturity","(","or","termination",")","of","the","contract",".","In","the","case","of","fixed-for-floating","IRS",",","one","counterparty","pays","a","fixed","rate","while","the","other","counterparty","'s","payments","are","linked","to","a","floating","index",",","most","commonly","the","LIBOR","rate",".","On","the","other","hand",",","in","the","case","of","interest","rate","basis","swap",",","the","payments","of","both","counterparties","are","linked","to","a","floating","index",".","Typically",",","the","floating","rate","is","observed","(","or","fixed",")","at","the","beginning","of","each","period","while","the","payments","are","made","at","the","end","of","each","period","[","1","]",".","For","example",",","consider","a","vanilla","swap","with","the","starting","date","T_0","and","maturity","date","T_n","and","equally","spaced","coupon","payment","dates","T_1",",","T_2",",","...",",","T_n","such","that","T_0","<","T_1","<","T_2","<","...","<","T_n","and","dt_i","=","T_","(","i+1",")","-","T_i","(","A",")","The","floating","rate","is","fixed","on","T_0",",","T_1",",","...",",","T_","(","n-1",")","and","both","the","fixed","and","floating","payments","are","made","on","T_1",",","T_2",",","...",",","T_n","(","payment","dates",")",".","The","InterestRateSwap","class","can","be","used","to","create","and","price","multiple","IRS","simultaneously",".","The","class","supports","vanilla","fixed-for-floating","swaps","as","well","as","basis","swaps",".","However","all","IRS","within","an","IRS","object","must","be","priced","using","a","common","reference","and","discount","curve",".","#","#","#","#","Example","(","non","batch",")",":","The","following","example","illustrates","the","construction","of","an","IRS","instrument","and","calculating","its","price",".","``","`","python","import","numpy","as","np","import","tensorflow","as","tf","import","tf_quant_finance","as","tff","dates","=","tff.datetime","instruments","=","tff.experimental.instruments","dtype","=","np.float64","start_date","=","dates.convert_to_date_tensor","(","[","(","2020",",","2",",","8",")","]",")","maturity_date","=","dates.convert_to_date_tensor","(","[","(","2022",",","2",",","8",")","]",")","valuation_date","=","dates.convert_to_date_tensor","(","[","(","2020",",","2",",","8",")","]",")","period_3m","=","dates.periods.months","(","3",")","period_6m","=","dates.periods.months","(","6",")","fix_spec","=","instruments.FixedCouponSpecs","(","coupon_frequency=period_6m",",","currency='usd","'",",","notional=1.",",","coupon_rate=0.03134",",","daycount_convention=instruments.DayCountConvention.ACTUAL_365",",","businessday_rule=dates.BusinessDayConvention.NONE",")","flt_spec","=","instruments.FloatCouponSpecs","(","coupon_frequency=period_3m",",","reference_rate_term=period_3m",",","reset_frequency=period_3m",",","currency='usd","'",",","notional=1.",",","businessday_rule=dates.BusinessDayConvention.NONE",",","coupon_basis=0.",",","coupon_multiplier=1.",",","daycount_convention=instruments.DayCountConvention.ACTUAL_365",")","swap","=","instruments.InterestRateSwap","(","[","(","2020,2,2",")","]",",","[","(","2023,2,2",")","]",",","[","fix_spec","]",",","[","flt_spec","]",",","dtype=np.float64",")","curve_dates","=","valuation_date","+","dates.periods.years","(","[","1",",","2",",","3",",","5",",","7",",","10",",","30","]",")","reference_curve","=","instruments.RateCurve","(","curve_dates",",","np.array","(","[","0.02834814",",","0.03077457",",","0.03113739",",","0.03130794",",","0.03160892",",","0.03213901",",","0.03257991","]",",","dtype=dtype",")",",","valuation_date=valuation_date",",","dtype=dtype",")","market","=","instruments.InterestRateMarket","(","reference_curve=reference_curve",",","discount_curve=reference_curve",")","price","=","swap.price","(","valuation_date",",","market",")","#","Expected","result",":","1e-7","``","`","#","#","#","#","Example","(","batch",")",":","The","following","example","illustrates","the","construction","and","pricing","of","IRS","using","batches",".","``","`","python","import","numpy","as","np","import","tensorflow","as","tf","import","tf_quant_finance","as","tff","dates","=","tff.datetime","instruments","=","tff.experimental.instruments","dtype","=","np.float64","notional","=","1.0","maturity_date","=","dates.convert_to_date_tensor","(","[","(","2023",",","2",",","8",")",",","(","2027",",","2",",","8",")","]",")","start_date","=","dates.convert_to_date_tensor","(","[","(","2020",",","2",",","8",")",",","(","2020",",","2",",","8",")","]",")","valuation_date","=","dates.convert_to_date_tensor","(","[","(","2020",",","2",",","8",")","]",")","period3m","=","dates.periods.months","(","[","3",",","3","]",")","period6m","=","dates.periods.months","(","[","6",",","6","]",")","fix_spec","=","instruments.FixedCouponSpecs","(","coupon_frequency=period6m",",","currency='usd","'",",","notional=notional",",","coupon_rate=","[","0.03134",",","0.03181","]",",","daycount_convention=instruments.DayCountConvention.ACTUAL_365",",","businessday_rule=dates.BusinessDayConvention.NONE",")","flt_spec","=","instruments.FloatCouponSpecs","(","coupon_frequency=period3m",",","reference_rate_term=period3m",",","reset_frequency=period3m",",","currency='usd","'",",","notional=notional",",","businessday_rule=dates.BusinessDayConvention.NONE",",","coupon_basis=0.0",",","coupon_multiplier=1.0",",","daycount_convention=instruments.DayCountConvention.ACTUAL_365",")","swap","=","instruments.InterestRateSwap","(","start_date",",","maturity_date",",","fix_spec",",","flt_spec",",","dtype=dtype",")","curve_dates","=","valuation_date","+","dates.periods.years","(","[","1",",","2",",","3",",","5",",","7",",","10",",","30","]",")","reference_curve","=","instruments.RateCurve","(","curve_dates",",","np.array","(","[","0.02834814",",","0.03077457",",","0.03113739",",","0.03130794",",","0.03160892",",","0.03213901",",","0.03257991","]",",","dtype=dtype",")",",","valuation_date=valuation_date",",","dtype=dtype",")","market","=","instruments.InterestRateMarket","(","reference_curve=reference_curve",",","discount_curve=reference_curve",")","price","=","swap.price","(","valuation_date",",","market",")","#","Expected","result",":","[","1.0e-7",",","1.0e-7","]","``","`","#","#","#","#","References",":","[","1","]",":","Leif","B.G",".","Andersen","and","Vladimir","V.","Piterbarg",".","Interest","Rate","Modeling",",","Volume","I",":","Foundations","and","Vanilla","Models",".","Chapter","5",".","2010","."]
Initialize a batch of IRS contracts. Args: start_date: A rank 1 `DateTensor` specifying the dates for the inception (start of the accrual) of the swap contracts. The shape of the input correspond to the number of instruments being created. maturity_date: A rank 1 `DateTensor` specifying the maturity dates for each contract. The shape of the input should be the same as that of `start_date`. pay_leg: A scalar or a list of either `FixedCouponSpecs` or `FloatCouponSpecs` specifying the coupon payments for the payment leg of the swap. If specified as a list then the length of the list should be the same as the number of instruments being created. If specified as a scalar, then the elements of the namedtuple must be of the same shape as (or compatible to) the shape of `start_date`. receive_leg: A scalar or a list of either `FixedCouponSpecs` or `FloatCouponSpecs` specifying the coupon payments for the receiving leg of the swap. If specified as a list then the length of the list should be the same as the number of instruments being created. If specified as a scalar, then the elements of the namedtuple must be of the same shape as (or compatible with) the shape of `start_date`. holiday_calendar: An instance of `dates.HolidayCalendar` to specify weekends and holidays. Default value: None in which case a holiday calendar would be created with Saturday and Sunday being the holidays. dtype: `tf.Dtype`. If supplied the dtype for the real variables or ops either supplied to the IRS object or created by the IRS object. Default value: None which maps to the default dtype inferred by TensorFlow. name: Python str. The name to give to the ops created by this class. Default value: `None` which maps to 'interest_rate_swap'.
["Initialize","a","batch","of","IRS","contracts",".","Args",":","start_date",":","A","rank","1","`","DateTensor","`","specifying","the","dates","for","the","inception","(","start","of","the","accrual",")","of","the","swap","contracts",".","The","shape","of","the","input","correspond","to","the","number","of","instruments","being","created",".","maturity_date",":","A","rank","1","`","DateTensor","`","specifying","the","maturity","dates","for","each","contract",".","The","shape","of","the","input","should","be","the","same","as","that","of","`","start_date","`",".","pay_leg",":","A","scalar","or","a","list","of","either","`","FixedCouponSpecs","`","or","`","FloatCouponSpecs","`","specifying","the","coupon","payments","for","the","payment","leg","of","the","swap",".","If","specified","as","a","list","then","the","length","of","the","list","should","be","the","same","as","the","number","of","instruments","being","created",".","If","specified","as","a","scalar",",","then","the","elements","of","the","namedtuple","must","be","of","the","same","shape","as","(","or","compatible","to",")","the","shape","of","`","start_date","`",".","receive_leg",":","A","scalar","or","a","list","of","either","`","FixedCouponSpecs","`","or","`","FloatCouponSpecs","`","specifying","the","coupon","payments","for","the","receiving","leg","of","the","swap",".","If","specified","as","a","list","then","the","length","of","the","list","should","be","the","same","as","the","number","of","instruments","being","created",".","If","specified","as","a","scalar",",","then","the","elements","of","the","namedtuple","must","be","of","the","same","shape","as","(","or","compatible","with",")","the","shape","of","`","start_date","`",".","holiday_calendar",":","An","instance","of","`","dates.HolidayCalendar","`","to","specify","weekends","and","holidays",".","Default","value",":","None","in","which","case","a","holiday","calendar","would","be","created","with","Saturday","and","Sunday","being","the","holidays",".","dtype",":","`","tf.Dtype","`",".","If","supplied","the","dtype","for","the","real","variables","or","ops","either","supplied","to","the","IRS","object","or","created","by","the","IRS","object",".","Default","value",":","None","which","maps","to","the","default","dtype","inferred","by","TensorFlow",".","name",":","Python","str",".","The","name","to","give","to","the","ops","created","by","this","class",".","Default","value",":","`","None","`","which","maps","to","'interest_rate_swap","'","."]
InterestRateSwap
def __init__( self, start_date, maturity_date, pay_leg, receive_leg, holiday_calendar=None, dtype=None, name=None, ): """Initialize a batch of IRS contracts. Args: start_date: A rank 1 `DateTensor` specifying the dates for the inception (start of the accrual) of the swap contracts. The shape of the input correspond to the number of instruments being created. maturity_date: A rank 1 `DateTensor` specifying the maturity dates for each contract. The shape of the input should be the same as that of `start_date`. pay_leg: A scalar or a list of either `FixedCouponSpecs` or `FloatCouponSpecs` specifying the coupon payments for the payment leg of the swap. If specified as a list then the length of the list should be the same as the number of instruments being created. If specified as a scalar, then the elements of the namedtuple must be of the same shape as (or compatible to) the shape of `start_date`. receive_leg: A scalar or a list of either `FixedCouponSpecs` or `FloatCouponSpecs` specifying the coupon payments for the receiving leg of the swap. If specified as a list then the length of the list should be the same as the number of instruments being created. If specified as a scalar, then the elements of the namedtuple must be of the same shape as (or compatible with) the shape of `start_date`. holiday_calendar: An instance of `dates.HolidayCalendar` to specify weekends and holidays. Default value: None in which case a holiday calendar would be created with Saturday and Sunday being the holidays. dtype: `tf.Dtype`. If supplied the dtype for the real variables or ops either supplied to the IRS object or created by the IRS object. Default value: None which maps to the default dtype inferred by TensorFlow. name: Python str. The name to give to the ops created by this class. Default value: `None` which maps to 'interest_rate_swap'. """ self._name = name or "interest_rate_swap" if holiday_calendar is None: holiday_calendar = dates.create_holiday_calendar( weekend_mask=dates.WeekendMask.SATURDAY_SUNDAY ) with tf.name_scope(self._name): self._dtype = dtype self._start_date = dates.convert_to_date_tensor(start_date) self._maturity_date = dates.convert_to_date_tensor( maturity_date ) self._holiday_calendar = holiday_calendar self._floating_leg = None self._fixed_leg = None self._pay_leg = self._setup_leg(pay_leg) self._receive_leg = self._setup_leg(receive_leg) self._is_payer = isinstance( self._pay_leg, cs.FixedCashflowStream )
["def","__init__","(","self",",","start_date",",","maturity_date",",","pay_leg",",","receive_leg",",","holiday_calendar=None",",","dtype=None",",","name=None",",",")",":","``","''","''","Initialize","a","batch","of","IRS","contracts",".","Args",":","start_date",":","A","rank","1","`","DateTensor","`","specifying","the","dates","for","the","inception","(","start","of","the","accrual",")","of","the","swap","contracts",".","The","shape","of","the","input","correspond","to","the","number","of","instruments","being","created",".","maturity_date",":","A","rank","1","`","DateTensor","`","specifying","the","maturity","dates","for","each","contract",".","The","shape","of","the","input","should","be","the","same","as","that","of","`","start_date","`",".","pay_leg",":","A","scalar","or","a","list","of","either","`","FixedCouponSpecs","`","or","`","FloatCouponSpecs","`","specifying","the","coupon","payments","for","the","payment","leg","of","the","swap",".","If","specified","as","a","list","then","the","length","of","the","list","should","be","the","same","as","the","number","of","instruments","being","created",".","If","specified","as","a","scalar",",","then","the","elements","of","the","namedtuple","must","be","of","the","same","shape","as","(","or","compatible","to",")","the","shape","of","`","start_date","`",".","receive_leg",":","A","scalar","or","a","list","of","either","`","FixedCouponSpecs","`","or","`","FloatCouponSpecs","`","specifying","the","coupon","payments","for","the","receiving","leg","of","the","swap",".","If","specified","as","a","list","then","the","length","of","the","list","should","be","the","same","as","the","number","of","instruments","being","created",".","If","specified","as","a","scalar",",","then","the","elements","of","the","namedtuple","must","be","of","the","same","shape","as","(","or","compatible","with",")","the","shape","of","`","start_date","`",".","holiday_calendar",":","An","instance","of","`","dates.HolidayCalendar","`","to","specify","weekends","and","holidays",".","Default","value",":","None","in","which","case","a","holiday","calendar","would","be","created","with","Saturday","and","Sunday","being","the","holidays",".","dtype",":","`","tf.Dtype","`",".","If","supplied","the","dtype","for","the","real","variables","or","ops","either","supplied","to","the","IRS","object","or","created","by","the","IRS","object",".","Default","value",":","None","which","maps","to","the","default","dtype","inferred","by","TensorFlow",".","name",":","Python","str",".","The","name","to","give","to","the","ops","created","by","this","class",".","Default","value",":","`","None","`","which","maps","to","'interest_rate_swap'.","``","''","''","self._name","=","name","or","``","interest_rate_swap","''","if","holiday_calendar","is","None",":","holiday_calendar","=","dates.create_holiday_calendar","(","weekend_mask=dates.WeekendMask.SATURDAY_SUNDAY",")","with","tf.name_scope","(","self._name",")",":","self._dtype","=","dtype","self._start_date","=","dates.convert_to_date_tensor","(","start_date",")","self._maturity_date","=","dates.convert_to_date_tensor","(","maturity_date",")","self._holiday_calendar","=","holiday_calendar","self._floating_leg","=","None","self._fixed_leg","=","None","self._pay_leg","=","self._setup_leg","(","pay_leg",")","self._receive_leg","=","self._setup_leg","(","receive_leg",")","self._is_payer","=","isinstance","(","self._pay_leg",",","cs.FixedCashflowStream",")"]
156
211
null
interest_rate_swap.py
tf-quant-finance/tf_quant_finance/experimental/instruments/interest_rate_swap.py
import tensorflow.compat.v2 from tf_quant_finance import datetime from tf_quant_finance.experimental.instruments import cashflow_stream from tf_quant_finance.experimental.instruments import rates_common
15
1
4
0
0
10
null
Use image node_id 1 to create a new InterestRateSwap object with example: obj = InterestRateSwap(start_date, maturity_date, pay_leg, receive_leg, holiday_calendar, dtype, name)
177
node_id 1
2,191,434
__call__
ImageGPTFeatureExtractor
FeatureExtractionMixin,ImageFeatureExtractionMixin
true
self,images,return_tensors
Constructs an ImageGPT feature extractor. This feature extractor can be used to resize images to a smaller resolution (such as 32x32 or 64x64), normalize them and finally color quantize them to obtain sequences of "pixel values" (color clusters). This feature extractor inherits from [`FeatureExtractionMixin`] which contains most of the main methods. Users should refer to this superclass for more information regarding those methods. Args: clusters (`np.ndarray`): The color clusters to use, as a `np.ndarray` of shape `(n_clusters, 3)`. do_resize (`bool`, *optional*, defaults to `True`): Whether to resize the input to a certain `size`. size (`int` or `Tuple(int)`, *optional*, defaults to 32): Resize the input to the given size. If a tuple is provided, it should be (width, height). If only an integer is provided, then the input will be resized to (size, size). Only has an effect if `do_resize` is set to `True`. resample (`int`, *optional*, defaults to `PIL.Image.Resampling.BILINEAR`): An optional resampling filter. This can be one of `PIL.Image.Resampling.NEAREST`, `PIL.Image.Resampling.BOX`, `PIL.Image.Resampling.BILINEAR`, `PIL.Image.Resampling.HAMMING`, `PIL.Image.Resampling.BICUBIC` or `PIL.Image.Resampling.LANCZOS`. Only has an effect if `do_resize` is set to `True`. do_normalize (`bool`, *optional*, defaults to `True`): Whether or not to normalize the input to the range between -1 and +1.
["Constructs","an","ImageGPT","feature","extractor",".","This","feature","extractor","can","be","used","to","resize","images","to","a","smaller","resolution","(","such","as","32x32","or","64x64",")",",","normalize","them","and","finally","color","quantize","them","to","obtain","sequences","of","``","pixel","values","''","(","color","clusters",")",".","This","feature","extractor","inherits","from","[","`","FeatureExtractionMixin","`","]","which","contains","most","of","the","main","methods",".","Users","should","refer","to","this","superclass","for","more","information","regarding","those","methods",".","Args",":","clusters","(","`","np.ndarray","`",")",":","The","color","clusters","to","use",",","as","a","`","np.ndarray","`","of","shape","`","(","n_clusters",",","3",")","`",".","do_resize","(","`","bool","`",",","*","optional","*",",","defaults","to","`","True","`",")",":","Whether","to","resize","the","input","to","a","certain","`","size","`",".","size","(","`","int","`","or","`","Tuple","(","int",")","`",",","*","optional","*",",","defaults","to","32",")",":","Resize","the","input","to","the","given","size",".","If","a","tuple","is","provided",",","it","should","be","(","width",",","height",")",".","If","only","an","integer","is","provided",",","then","the","input","will","be","resized","to","(","size",",","size",")",".","Only","has","an","effect","if","`","do_resize","`","is","set","to","`","True","`",".","resample","(","`","int","`",",","*","optional","*",",","defaults","to","`","PIL.Image.Resampling.BILINEAR","`",")",":","An","optional","resampling","filter",".","This","can","be","one","of","`","PIL.Image.Resampling.NEAREST","`",",","`","PIL.Image.Resampling.BOX","`",",","`","PIL.Image.Resampling.BILINEAR","`",",","`","PIL.Image.Resampling.HAMMING","`",",","`","PIL.Image.Resampling.BICUBIC","`","or","`","PIL.Image.Resampling.LANCZOS","`",".","Only","has","an","effect","if","`","do_resize","`","is","set","to","`","True","`",".","do_normalize","(","`","bool","`",",","*","optional","*",",","defaults","to","`","True","`",")",":","Whether","or","not","to","normalize","the","input","to","the","range","between","-1","and","+1","."]
Main method to prepare for the model one or several image(s). <Tip warning={true}> NumPy arrays and PyTorch tensors are converted to PIL images when resizing, so the most efficient is to pass PIL images. </Tip> Args: images (`PIL.Image.Image`, `np.ndarray`, `torch.Tensor`, `List[PIL.Image.Image]`, `List[np.ndarray]`, `List[torch.Tensor]`): The image or batch of images to be prepared. Each image can be a PIL image, NumPy array or PyTorch tensor. In case of a NumPy array/PyTorch tensor, each image should be of shape (C, H, W), where C is a number of channels, H and W are image height and width. return_tensors (`str` or [`~utils.TensorType`], *optional*, defaults to `'np'`): If set, will return tensors of a particular framework. Acceptable values are: - `'tf'`: Return TensorFlow `tf.constant` objects. - `'pt'`: Return PyTorch `torch.Tensor` objects. - `'np'`: Return NumPy `np.ndarray` objects. - `'jax'`: Return JAX `jnp.ndarray` objects. Returns: [`BatchFeature`]: A [`BatchFeature`] with the following fields: - **input_ids** -- Input IDs to be fed to a model, of shape `(batch_size, height * width)`.
["Main","method","to","prepare","for","the","model","one","or","several","image","(","s",")",".","<","Tip","warning=","{","true","}",">","NumPy","arrays","and","PyTorch","tensors","are","converted","to","PIL","images","when","resizing",",","so","the","most","efficient","is","to","pass","PIL","images",".","<","\/Tip",">","Args",":","images","(","`","PIL.Image.Image","`",",","`","np.ndarray","`",",","`","torch.Tensor","`",",","`","List","[","PIL.Image.Image","]","`",",","`","List","[","np.ndarray","]","`",",","`","List","[","torch.Tensor","]","`",")",":","The","image","or","batch","of","images","to","be","prepared",".","Each","image","can","be","a","PIL","image",",","NumPy","array","or","PyTorch","tensor",".","In","case","of","a","NumPy","array\/PyTorch","tensor",",","each","image","should","be","of","shape","(","C",",","H",",","W",")",",","where","C","is","a","number","of","channels",",","H","and","W","are","image","height","and","width",".","return_tensors","(","`","str","`","or","[","`","~utils.TensorType","`","]",",","*","optional","*",",","defaults","to","`","'np","'","`",")",":","If","set",",","will","return","tensors","of","a","particular","framework",".","Acceptable","values","are",":","-","`","'tf","'","`",":","Return","TensorFlow","`","tf.constant","`","objects",".","-","`","'pt","'","`",":","Return","PyTorch","`","torch.Tensor","`","objects",".","-","`","'np","'","`",":","Return","NumPy","`","np.ndarray","`","objects",".","-","`","'jax","'","`",":","Return","JAX","`","jnp.ndarray","`","objects",".","Returns",":","[","`","BatchFeature","`","]",":","A","[","`","BatchFeature","`","]","with","the","following","fields",":","-","*","*","input_ids","*","*","--","Input","IDs","to","be","fed","to","a","model",",","of","shape","`","(","batch_size",",","height","*","width",")","`","."]
encoded_inputs
def __call__( self, images: Union[ Image.Image, np.ndarray, "torch.Tensor", List[Image.Image], List[np.ndarray], List["torch.Tensor"], # noqa ], return_tensors: Optional[Union[str, TensorType]] = None, **kwargs, ) -> BatchFeature: """ Main method to prepare for the model one or several image(s). <Tip warning={true}> NumPy arrays and PyTorch tensors are converted to PIL images when resizing, so the most efficient is to pass PIL images. </Tip> Args: images (`PIL.Image.Image`, `np.ndarray`, `torch.Tensor`, `List[PIL.Image.Image]`, `List[np.ndarray]`, `List[torch.Tensor]`): The image or batch of images to be prepared. Each image can be a PIL image, NumPy array or PyTorch tensor. In case of a NumPy array/PyTorch tensor, each image should be of shape (C, H, W), where C is a number of channels, H and W are image height and width. return_tensors (`str` or [`~utils.TensorType`], *optional*, defaults to `'np'`): If set, will return tensors of a particular framework. Acceptable values are: - `'tf'`: Return TensorFlow `tf.constant` objects. - `'pt'`: Return PyTorch `torch.Tensor` objects. - `'np'`: Return NumPy `np.ndarray` objects. - `'jax'`: Return JAX `jnp.ndarray` objects. Returns: [`BatchFeature`]: A [`BatchFeature`] with the following fields: - **input_ids** -- Input IDs to be fed to a model, of shape `(batch_size, height * width)`. """ # Input type checking for clearer error valid_images = False # Check that images has a valid type if isinstance( images, (Image.Image, np.ndarray) ) or is_torch_tensor(images): valid_images = True elif isinstance(images, (list, tuple)): if ( len(images) == 0 or isinstance(images[0], (Image.Image, np.ndarray)) or is_torch_tensor(images[0]) ): valid_images = True if not valid_images: raise ValueError( "Images must of type `PIL.Image.Image`, `np.ndarray` or `torch.Tensor` (single example), " "`List[PIL.Image.Image]`, `List[np.ndarray]` or `List[torch.Tensor]` (batch of examples)." ) is_batched = bool( isinstance(images, (list, tuple)) and ( isinstance(images[0], (Image.Image, np.ndarray)) or is_torch_tensor(images[0]) ) ) if not is_batched: images = [images] # transformations (resizing + normalization) if self.do_resize and self.size is not None: images = [ self.resize(image, size=self.size, resample=self.resample) for image in images ] if self.do_normalize: images = [self.normalize(image) for image in images] # color quantize from (batch_size, height, width, 3) to (batch_size, height, width) images = np.array(images) images = color_quantize(images, self.clusters).reshape( images.shape[:-1] ) # flatten to (batch_size, height*width) batch_size = images.shape[0] images = images.reshape(batch_size, -1) # return as BatchFeature data = {"input_ids": images} encoded_inputs = BatchFeature( data=data, tensor_type=return_tensors ) return encoded_inputs
["def","__call__","(","self",",","images",":","Union","[","Image.Image",",","np.ndarray",",","``","torch.Tensor","''",",","List","[","Image.Image","]",",","List","[","np.ndarray","]",",","List","[","``","torch.Tensor","''","]",",","#","noqa","]",",","return_tensors",":","Optional","[","Union","[","str",",","TensorType","]","]","=","None",",","*","*","kwargs",",",")","-",">","BatchFeature",":","``","''","''","Main","method","to","prepare","for","the","model","one","or","several","image","(","s",")",".","<","Tip","warning=","{","true","}",">","NumPy","arrays","and","PyTorch","tensors","are","converted","to","PIL","images","when","resizing",",","so","the","most","efficient","is","to","pass","PIL","images",".","<","\/Tip",">","Args",":","images","(","`","PIL.Image.Image","`",",","`","np.ndarray","`",",","`","torch.Tensor","`",",","`","List","[","PIL.Image.Image","]","`",",","`","List","[","np.ndarray","]","`",",","`","List","[","torch.Tensor","]","`",")",":","The","image","or","batch","of","images","to","be","prepared",".","Each","image","can","be","a","PIL","image",",","NumPy","array","or","PyTorch","tensor",".","In","case","of","a","NumPy","array\/PyTorch","tensor",",","each","image","should","be","of","shape","(","C",",","H",",","W",")",",","where","C","is","a","number","of","channels",",","H","and","W","are","image","height","and","width",".","return_tensors","(","`","str","`","or","[","`","~utils.TensorType","`","]",",","*","optional","*",",","defaults","to","`","'np","'","`",")",":","If","set",",","will","return","tensors","of","a","particular","framework",".","Acceptable","values","are",":","-","`","'tf","'","`",":","Return","TensorFlow","`","tf.constant","`","objects",".","-","`","'pt","'","`",":","Return","PyTorch","`","torch.Tensor","`","objects",".","-","`","'np","'","`",":","Return","NumPy","`","np.ndarray","`","objects",".","-","`","'jax","'","`",":","Return","JAX","`","jnp.ndarray","`","objects",".","Returns",":","[","`","BatchFeature","`","]",":","A","[","`","BatchFeature","`","]","with","the","following","fields",":","-","*","*","input_ids","*","*","--","Input","IDs","to","be","fed","to","a","model",",","of","shape","`","(","batch_size",",","height","*","width",")","`",".","``","''","''","#","Input","type","checking","for","clearer","error","valid_images","=","False","#","Check","that","images","has","a","valid","type","if","isinstance","(","images",",","(","Image.Image",",","np.ndarray",")",")","or","is_torch_tensor","(","images",")",":","valid_images","=","True","elif","isinstance","(","images",",","(","list",",","tuple",")",")",":","if","(","len","(","images",")","==","0","or","isinstance","(","images","[","0","]",",","(","Image.Image",",","np.ndarray",")",")","or","is_torch_tensor","(","images","[","0","]",")",")",":","valid_images","=","True","if","not","valid_images",":","raise","ValueError","(","``","Images","must","of","type","`","PIL.Image.Image","`",",","`","np.ndarray","`","or","`","torch.Tensor","`","(","single","example",")",",","``","``","`","List","[","PIL.Image.Image","]","`",",","`","List","[","np.ndarray","]","`","or","`","List","[","torch.Tensor","]","`","(","batch","of","examples",")",".","''",")","is_batched","=","bool","(","isinstance","(","images",",","(","list",",","tuple",")",")","and","(","isinstance","(","images","[","0","]",",","(","Image.Image",",","np.ndarray",")",")","or","is_torch_tensor","(","images","[","0","]",")",")",")","if","not","is_batched",":","images","=","[","images","]","#","transformations","(","resizing","+","normalization",")","if","self.do_resize","and","self.size","is","not","None",":","images","=","[","self.resize","(","image",",","size=self.size",",","resample=self.resample",")","for","image","in","images","]","if","self.do_normalize",":","images","=","[","self.normalize","(","image",")","for","image","in","images","]","#","color","quantize","from","(","batch_size",",","height",",","width",",","3",")","to","(","batch_size",",","height",",","width",")","images","=","np.array","(","images",")","images","=","color_quantize","(","images",",","self.clusters",")",".reshape","(","images.shape","[",":","-1","]",")","#","flatten","to","(","batch_size",",","height","*","width",")","batch_size","=","images.shape","[","0","]","images","=","images.reshape","(","batch_size",",","-1",")","#","return","as","BatchFeature","data","=","{","``","input_ids","''",":","images","}","encoded_inputs","=","BatchFeature","(","data=data",",","tensor_type=return_tensors",")","return","encoded_inputs"]
101
181
null
feature_extraction_imagegpt.py
H2O/h2o_flexgen/benchmark/third_party/transformers/src/transformers/models/imagegpt/feature_extraction_imagegpt.py
from typing import List, Optional, Union import numpy from PIL import Image from transformers.image_utils import PILImageResampling from ...feature_extraction_utils import BatchFeature, FeatureExtractionMixin from ...image_utils import ImageFeatureExtractionMixin, is_torch_tensor from ...utils import TensorType, logging
15
1
7
2
2
3
2
Use image node_id 3 for calling the ImageGPTFeatureExtractor obj's underlying member method code with example usage: obj.__call__(images, return_tensors) and returns: encoded_inputs
181
node_id 3
95,364
test_sample_rate_error
global
null
false
art_warning
null
null
null
null
null
def test_sample_rate_error(art_warning): try: exc_msg = "Sample rate be must a positive integer." with pytest.raises(ValueError, match=exc_msg): Mp3CompressionPyTorch(sample_rate=0) except ARTTestException as e: art_warning(e)
["def","test_sample_rate_error","(","art_warning",")",":","try",":","exc_msg","=","``","Sample","rate","be","must","a","positive","integer",".","''","with","pytest.raises","(","ValueError",",","match=exc_msg",")",":","Mp3CompressionPyTorch","(","sample_rate=0",")","except","ARTTestException","as","e",":","art_warning","(","e",")"]
65
71
null
test_mp3_compression_pytorch.py
adversarial-robustness-toolbox/tests/defences/preprocessor/test_mp3_compression_pytorch.py
from __future__ import absolute_import, division, print_function, unicode_literals import logging import numpy import pytest from numpy.testing import assert_array_equal from art.defences.preprocessor import Mp3CompressionPyTorch from tests.utils import ARTTestException
15
null
7
5
null
null
null
Use image node_id 2 for calling a global function with example usage: test_sample_rate_error(art_warning) without return types
126
node_id 2
235,206
_mean_square_error
global
null
false
y,y_pred,w
null
null
null
null
np
def _mean_square_error(y, y_pred, w): """Calculate the mean square error.""" return np.average(((y_pred - y) ** 2), weights=w)
["def","_mean_square_error","(","y",",","y_pred",",","w",")",":","``","''","''","Calculate","the","mean","square","error",".","''","''","''","return","np.average","(","(","(","y_pred","-","y",")","*","*","2",")",",","weights=w",")"]
130
132
null
fitness.py
gplearn/gplearn/fitness.py
import numbers import numpy from joblib import wrap_non_picklable_objects from scipy.stats import rankdata
15
null
4
7
null
null
null
Use image node_id 5 for calling a global function with example usage: _mean_square_error(y, y_pred, w) and returns: np
118
node_id 5
1,106,034
_root_mean_square_error
global
null
false
y,y_pred,w
null
null
null
null
np
def _root_mean_square_error(y, y_pred, w): """Calculate the root mean square error.""" return np.sqrt(np.average(((y_pred - y) ** 2), weights=w))
["def","_root_mean_square_error","(","y",",","y_pred",",","w",")",":","``","''","''","Calculate","the","root","mean","square","error",".","''","''","''","return","np.sqrt","(","np.average","(","(","(","y_pred","-","y",")","*","*","2",")",",","weights=w",")",")"]
135
137
null
fitness.py
gplearn/gplearn/fitness.py
import numbers import numpy from joblib import wrap_non_picklable_objects from scipy.stats import rankdata
15
null
4
7
null
null
null
Use image node_id 6 for calling a global function with example usage: _root_mean_square_error(y, y_pred, w) and returns: np
123
node_id 6
1,106,035
decrease_indent
NullLogger
null
true
self
null
null
null
null
null
def decrease_indent(self): if self.on() and len(self.indent_) > 4: self.indent_ = self.indent_[-4:]
["def","decrease_indent","(","self",")",":","if","self.on","(",")","and","len","(","self.indent_",")",">","4",":","self.indent_","=","self.indent_","[","-4",":","]"]
22
24
null
logger.py
turicreate/src/external/boost/boost_1_68_0/tools/build/src/util/logger.py
import sys
15
2
1
0
1
7
null
Use image node_id 4 for calling the NullLogger obj's underlying member method code with example usage: obj.decrease_indent() without return types
145
node_id 4
2,276,650
do_log
NullLogger
null
true
self
null
null
null
null
null
def do_log(self, *args): pass
["def","do_log","(","self",",","*","args",")",":","pass"]
26
27
null
logger.py
turicreate/src/external/boost/boost_1_68_0/tools/build/src/util/logger.py
import sys
15
2
1
0
1
7
null
Use image node_id 5 for calling the NullLogger obj's underlying member method code with example usage: obj.do_log() without return types
136
node_id 5
2,276,651
_log_loss
global
null
false
y,y_pred,w
null
null
null
null
np
def _log_loss(y, y_pred, w): """Calculate the log loss.""" eps = 1e-15 inv_y_pred = np.clip(1 - y_pred, eps, 1 - eps) y_pred = np.clip(y_pred, eps, 1 - eps) score = y * np.log(y_pred) + (1 - y) * np.log(inv_y_pred) return np.average(-score, weights=w)
["def","_log_loss","(","y",",","y_pred",",","w",")",":","``","''","''","Calculate","the","log","loss",".","''","''","''","eps","=","1e-15","inv_y_pred","=","np.clip","(","1","-","y_pred",",","eps",",","1","-","eps",")","y_pred","=","np.clip","(","y_pred",",","eps",",","1","-","eps",")","score","=","y","*","np.log","(","y_pred",")","+","(","1","-","y",")","*","np.log","(","inv_y_pred",")","return","np.average","(","-score",",","weights=w",")"]
140
146
null
fitness.py
gplearn/gplearn/fitness.py
import numbers import numpy from joblib import wrap_non_picklable_objects from scipy.stats import rankdata
15
null
4
7
null
null
null
Use image node_id 7 for calling a global function with example usage: _log_loss(y, y_pred, w) and returns: np
109
node_id 7
1,106,036
interesting
NullLogger
null
true
self,source_name
null
null
null
null
False
def interesting(self, source_name): return False
["def","interesting","(","self",",","source_name",")",":","return","False"]
29
30
null
logger.py
turicreate/src/external/boost/boost_1_68_0/tools/build/src/util/logger.py
import sys
15
2
1
0
1
7
null
Use image node_id 6 for calling the NullLogger obj's underlying member method code with example usage: obj.interesting(source_name) and returns: False
150
node_id 6
2,276,652
on
NullLogger
null
true
self
null
null
null
null
True
def on(self): return True
["def","on","(","self",")",":","return","True"]
32
33
null
logger.py
turicreate/src/external/boost/boost_1_68_0/tools/build/src/util/logger.py
import sys
15
2
1
0
1
7
null
Use image node_id 7 for calling the NullLogger obj's underlying member method code with example usage: obj.on() and returns: True
129
node_id 7
2,276,653
__init__
TextLogger
NullLogger
true
self
null
null
null
null
TextLogger
def __init__(self): NullLogger.__init__(self)
["def","__init__","(","self",")",":","NullLogger.__init__","(","self",")"]
36
37
null
logger.py
turicreate/src/external/boost/boost_1_68_0/tools/build/src/util/logger.py
import sys
15
2
1
0
1
4
1
Use image node_id 1 to create a new TextLogger object from inherited base classes: NullLogger with example: obj = TextLogger()
126
node_id 1
2,276,654