name
stringlengths 1
94
| class_name
stringlengths 1
56
| class_bases
stringlengths 0
73
⌀ | is_member
bool 2
classes | args
stringlengths 0
1.04k
| class_docstr
stringlengths 0
13k
⌀ | class_docstr_tok
stringlengths 2
18.2k
⌀ | docstr
stringlengths 0
6.72k
⌀ | docstr_tok
stringlengths 13
9.01k
⌀ | returns
stringlengths 1
279
⌀ | code
stringlengths 20
27.8k
⌀ | code_tok
stringlengths 32
38.4k
⌀ | lstart
int64 1
1.03k
| lend
int64 2
1.18k
| raises
stringclasses 9
values | filename
stringlengths 5
81
| file_path
stringlengths 14
129
| imports
stringlengths 0
786
| total_objects
int64 7
10
| num_classes
float64 1
4
⌀ | num_imports
int64 0
9
| num_all_bases
float64 0
4
⌀ | num_bases
float64 1
4
⌀ | num_functions
int64 0
10
| num_methods
float64 1
8
⌀ | label_desc
stringlengths 70
1.25k
| label_desc_len
int64 70
1.25k
| label_id
stringclasses 10
values | __index_level_0__
int64 0
514k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
test_encrypted_provided | TestNeo4jHookConn | null | true | self,mock_graph_database,conn_extra,should_provide_encrypted,expected_encrypted | null | null | null | null | null | def test_encrypted_provided(
self,
mock_graph_database,
conn_extra,
should_provide_encrypted,
expected_encrypted,
):
connection = Connection(
conn_type="neo4j",
login="login",
password="password",
host="host",
schema="schema",
extra=conn_extra,
)
with mock.patch.dict(
"os.environ", AIRFLOW_CONN_NEO4J_DEFAULT=connection.get_uri()
):
neo4j_hook = Neo4jHook()
with neo4j_hook.get_conn():
if should_provide_encrypted:
assert (
"encrypted"
in mock_graph_database.call_args.kwargs
)
assert (
mock_graph_database.call_args.kwargs["encrypted"]
== expected_encrypted
)
else:
assert (
"encrypted"
not in mock_graph_database.call_args.kwargs
)
| ["def","test_encrypted_provided","(","self",",","mock_graph_database",",","conn_extra",",","should_provide_encrypted",",","expected_encrypted",",",")",":","connection","=","Connection","(","conn_type=","''","neo4j","''",",","login=","''","login","''",",","password=","''","password","''",",","host=","''","host","''",",","schema=","''","schema","''",",","extra=conn_extra",",",")","with","mock.patch.dict","(","``","os.environ","''",",","AIRFLOW_CONN_NEO4J_DEFAULT=connection.get_uri","(",")",")",":","neo4j_hook","=","Neo4jHook","(",")","with","neo4j_hook.get_conn","(",")",":","if","should_provide_encrypted",":","assert","(","``","encrypted","''","in","mock_graph_database.call_args.kwargs",")","assert","(","mock_graph_database.call_args.kwargs","[","``","encrypted","''","]","==","expected_encrypted",")","else",":","assert","(","``","encrypted","''","not","in","mock_graph_database.call_args.kwargs",")"] | 123 | 141 | null | test_neo4j.py | airflow/tests/providers/neo4j/hooks/test_neo4j.py | from __future__ import annotations
from unittest import mock
import pytest
from airflow.models import Connection
from airflow.providers.neo4j.hooks.neo4j import Neo4jHook | 10 | 1 | 5 | 0 | null | 0 | 4 | Use image node_id 4 for calling the TestNeo4jHookConn obj's underlying member method code with example usage: obj.test_encrypted_provided(mock_graph_database, conn_extra, should_provide_encrypted, expected_encrypted) without return types | 237 | node_id 4 | 25,714 |
test_submodule_customized_state_dict_methods | TestModuleAPIs | JitTestCase | true | self | null | null | Tests that customized state dict methods on submodules are in effect | ["Tests","that","customized","state","dict","methods","on","submodules","are","in","effect"] | x,dict,self | def test_submodule_customized_state_dict_methods(self):
"""Tests that customized state dict methods on submodules are in effect"""
class CustomStateDictModule(torch.nn.Module):
def __init__(self):
super().__init__()
self.conv = torch.nn.Conv2d(6, 16, 5)
self.fc = torch.nn.Linear(16 * 5 * 5, 120)
self.customized_save_state_dict_called: bool = False
self.customized_load_state_dict_called: bool = False
def forward(self, x):
x = self.conv(x)
x = self.fc(x)
return x
@torch.jit.export
def _save_to_state_dict(
self,
destination: Dict[str, torch.Tensor],
prefix: str,
keep_vars: bool,
):
self.customized_save_state_dict_called = True
return {"dummy": torch.ones(1)}
@torch.jit.export
def _load_from_state_dict(
self,
state_dict: Dict[str, torch.Tensor],
prefix: str,
local_metadata: Any,
strict: bool,
missing_keys: List[str],
unexpected_keys: List[str],
error_msgs: List[str],
):
self.customized_load_state_dict_called = True
return
class ParentModule(torch.nn.Module):
def __init__(self):
super().__init__()
self.sub = CustomStateDictModule()
def forward(self, x):
return self.sub(x)
m1 = torch.jit.script(ParentModule())
self.assertFalse(m1.sub.customized_save_state_dict_called)
state_dict = m1.state_dict()
self.assertTrue(m1.sub.customized_save_state_dict_called)
m2 = torch.jit.script(ParentModule())
self.assertFalse(m2.sub.customized_load_state_dict_called)
m2.load_state_dict(state_dict)
self.assertTrue(m2.sub.customized_load_state_dict_called)
| ["def","test_submodule_customized_state_dict_methods","(","self",")",":","``","''","''","Tests","that","customized","state","dict","methods","on","submodules","are","in","effect","''","''","''","class","CustomStateDictModule","(","torch.nn.Module",")",":","def","__init__","(","self",")",":","super","(",")",".__init__","(",")","self.conv","=","torch.nn.Conv2d","(","6",",","16",",","5",")","self.fc","=","torch.nn.Linear","(","16","*","5","*","5",",","120",")","self.customized_save_state_dict_called",":","bool","=","False","self.customized_load_state_dict_called",":","bool","=","False","def","forward","(","self",",","x",")",":","x","=","self.conv","(","x",")","x","=","self.fc","(","x",")","return","x","@","torch.jit.export","def","_save_to_state_dict","(","self",",","destination",":","Dict","[","str",",","torch.Tensor","]",",","prefix",":","str",",","keep_vars",":","bool",",",")",":","self.customized_save_state_dict_called","=","True","return","{","``","dummy","''",":","torch.ones","(","1",")","}","@","torch.jit.export","def","_load_from_state_dict","(","self",",","state_dict",":","Dict","[","str",",","torch.Tensor","]",",","prefix",":","str",",","local_metadata",":","Any",",","strict",":","bool",",","missing_keys",":","List","[","str","]",",","unexpected_keys",":","List","[","str","]",",","error_msgs",":","List","[","str","]",",",")",":","self.customized_load_state_dict_called","=","True","return","class","ParentModule","(","torch.nn.Module",")",":","def","__init__","(","self",")",":","super","(",")",".__init__","(",")","self.sub","=","CustomStateDictModule","(",")","def","forward","(","self",",","x",")",":","return","self.sub","(","x",")","m1","=","torch.jit.script","(","ParentModule","(",")",")","self.assertFalse","(","m1.sub.customized_save_state_dict_called",")","state_dict","=","m1.state_dict","(",")","self.assertTrue","(","m1.sub.customized_save_state_dict_called",")","m2","=","torch.jit.script","(","ParentModule","(",")",")","self.assertFalse","(","m2.sub.customized_load_state_dict_called",")","m2.load_state_dict","(","state_dict",")","self.assertTrue","(","m2.sub.customized_load_state_dict_called",")"] | 80 | 128 | null | test_module_apis.py | pytorch/test/jit/test_module_apis.py | import torch
import os
import sys
from torch.testing._internal.jit_utils import JitTestCase
from typing import Dict, Any, List | 10 | 1 | 5 | 1 | 1 | 0 | 3 | Use image node_id 3 for calling the TestModuleAPIs obj's underlying member method code with example usage: obj.test_submodule_customized_state_dict_methods() and returns: x, dict, self | 184 | node_id 3 | 287,395 |
test_customized_state_dict_methods | TestModuleAPIs | JitTestCase | true | self | null | null | Tests that customized state dict methods are in effect | ["Tests","that","customized","state","dict","methods","are","in","effect"] | x,dict | def test_customized_state_dict_methods(self):
"""Tests that customized state dict methods are in effect"""
class CustomStateDictModule(torch.nn.Module):
def __init__(self):
super().__init__()
self.conv = torch.nn.Conv2d(6, 16, 5)
self.fc = torch.nn.Linear(16 * 5 * 5, 120)
self.customized_save_state_dict_called: bool = False
self.customized_load_state_dict_called: bool = False
def forward(self, x):
x = self.conv(x)
x = self.fc(x)
return x
@torch.jit.export
def _save_to_state_dict(
self,
destination: Dict[str, torch.Tensor],
prefix: str,
keep_vars: bool,
):
self.customized_save_state_dict_called = True
return {"dummy": torch.ones(1)}
@torch.jit.export
def _load_from_state_dict(
self,
state_dict: Dict[str, torch.Tensor],
prefix: str,
local_metadata: Any,
strict: bool,
missing_keys: List[str],
unexpected_keys: List[str],
error_msgs: List[str],
):
self.customized_load_state_dict_called = True
return
m1 = torch.jit.script(CustomStateDictModule())
self.assertFalse(m1.customized_save_state_dict_called)
state_dict = m1.state_dict()
self.assertTrue(m1.customized_save_state_dict_called)
m2 = torch.jit.script(CustomStateDictModule())
self.assertFalse(m2.customized_load_state_dict_called)
m2.load_state_dict(state_dict)
self.assertTrue(m2.customized_load_state_dict_called)
| ["def","test_customized_state_dict_methods","(","self",")",":","``","''","''","Tests","that","customized","state","dict","methods","are","in","effect","''","''","''","class","CustomStateDictModule","(","torch.nn.Module",")",":","def","__init__","(","self",")",":","super","(",")",".__init__","(",")","self.conv","=","torch.nn.Conv2d","(","6",",","16",",","5",")","self.fc","=","torch.nn.Linear","(","16","*","5","*","5",",","120",")","self.customized_save_state_dict_called",":","bool","=","False","self.customized_load_state_dict_called",":","bool","=","False","def","forward","(","self",",","x",")",":","x","=","self.conv","(","x",")","x","=","self.fc","(","x",")","return","x","@","torch.jit.export","def","_save_to_state_dict","(","self",",","destination",":","Dict","[","str",",","torch.Tensor","]",",","prefix",":","str",",","keep_vars",":","bool",",",")",":","self.customized_save_state_dict_called","=","True","return","{","``","dummy","''",":","torch.ones","(","1",")","}","@","torch.jit.export","def","_load_from_state_dict","(","self",",","state_dict",":","Dict","[","str",",","torch.Tensor","]",",","prefix",":","str",",","local_metadata",":","Any",",","strict",":","bool",",","missing_keys",":","List","[","str","]",",","unexpected_keys",":","List","[","str","]",",","error_msgs",":","List","[","str","]",",",")",":","self.customized_load_state_dict_called","=","True","return","m1","=","torch.jit.script","(","CustomStateDictModule","(",")",")","self.assertFalse","(","m1.customized_save_state_dict_called",")","state_dict","=","m1.state_dict","(",")","self.assertTrue","(","m1.customized_save_state_dict_called",")","m2","=","torch.jit.script","(","CustomStateDictModule","(",")",")","self.assertFalse","(","m2.customized_load_state_dict_called",")","m2.load_state_dict","(","state_dict",")","self.assertTrue","(","m2.customized_load_state_dict_called",")"] | 38 | 78 | null | test_module_apis.py | pytorch/test/jit/test_module_apis.py | import torch
import os
import sys
from torch.testing._internal.jit_utils import JitTestCase
from typing import Dict, Any, List | 10 | 1 | 5 | 1 | 1 | 0 | 3 | Use image node_id 2 for calling the TestModuleAPIs obj's underlying member method code with example usage: obj.test_customized_state_dict_methods() and returns: x, dict | 168 | node_id 2 | 287,394 |
test_default_state_dict_methods | TestModuleAPIs | JitTestCase | true | self | null | null | Tests that default state dict methods are automatically available | ["Tests","that","default","state","dict","methods","are","automatically","available"] | x | def test_default_state_dict_methods(self):
"""Tests that default state dict methods are automatically available"""
class DefaultStateDictModule(torch.nn.Module):
def __init__(self):
super().__init__()
self.conv = torch.nn.Conv2d(6, 16, 5)
self.fc = torch.nn.Linear(16 * 5 * 5, 120)
def forward(self, x):
x = self.conv(x)
x = self.fc(x)
return x
m1 = torch.jit.script(DefaultStateDictModule())
m2 = torch.jit.script(DefaultStateDictModule())
state_dict = m1.state_dict()
m2.load_state_dict(state_dict)
| ["def","test_default_state_dict_methods","(","self",")",":","``","''","''","Tests","that","default","state","dict","methods","are","automatically","available","''","''","''","class","DefaultStateDictModule","(","torch.nn.Module",")",":","def","__init__","(","self",")",":","super","(",")",".__init__","(",")","self.conv","=","torch.nn.Conv2d","(","6",",","16",",","5",")","self.fc","=","torch.nn.Linear","(","16","*","5","*","5",",","120",")","def","forward","(","self",",","x",")",":","x","=","self.conv","(","x",")","x","=","self.fc","(","x",")","return","x","m1","=","torch.jit.script","(","DefaultStateDictModule","(",")",")","m2","=","torch.jit.script","(","DefaultStateDictModule","(",")",")","state_dict","=","m1.state_dict","(",")","m2.load_state_dict","(","state_dict",")"] | 19 | 36 | null | test_module_apis.py | pytorch/test/jit/test_module_apis.py | import torch
import os
import sys
from torch.testing._internal.jit_utils import JitTestCase
from typing import Dict, Any, List | 10 | 1 | 5 | 1 | 1 | 0 | 3 | Use image node_id 1 for calling the TestModuleAPIs obj's underlying member method code with example usage: obj.test_default_state_dict_methods() and returns: x | 159 | node_id 1 | 287,393 |
__init__ | Arrayterator | null | true | self,var,buf_size | Buffered iterator for big arrays.
`Arrayterator` creates a buffered iterator for reading big arrays in small
contiguous blocks. The class is useful for objects stored in the
file system. It allows iteration over the object *without* reading
everything in memory; instead, small blocks are read and iterated over.
`Arrayterator` can be used with any object that supports multidimensional
slices. This includes NumPy arrays, but also variables from
Scientific.IO.NetCDF or pynetcdf for example.
Parameters
----------
var : array_like
The object to iterate over.
buf_size : int, optional
The buffer size. If `buf_size` is supplied, the maximum amount of
data that will be read into memory is `buf_size` elements.
Default is None, which will read as many element as possible
into memory.
Attributes
----------
var
buf_size
start
stop
step
shape
flat
See Also
--------
numpy.ndenumerate : Multidimensional array iterator.
numpy.flatiter : Flat array iterator.
numpy.memmap : Create a memory-map to an array stored
in a binary file on disk.
Notes
-----
The algorithm works by first finding a "running dimension", along which
the blocks will be extracted. Given an array of dimensions
``(d1, d2, ..., dn)``, e.g. if `buf_size` is smaller than ``d1``, the
first dimension will be used. If, on the other hand,
``d1 < buf_size < d1*d2`` the second dimension will be used, and so on.
Blocks are extracted along this dimension, and when the last block is
returned the process continues from the next dimension, until all
elements have been read.
Examples
--------
>>> a = np.arange(3 * 4 * 5 * 6).reshape(3, 4, 5, 6)
>>> a_itor = np.lib.Arrayterator(a, 2)
>>> a_itor.shape
(3, 4, 5, 6)
Now we can iterate over ``a_itor``, and it will return arrays of size
two. Since `buf_size` was smaller than any dimension, the first
dimension will be iterated over first:
>>> for subarr in a_itor:
... if not subarr.all():
... print(subarr, subarr.shape) # doctest: +SKIP
>>> # [[[[0 1]]]] (1, 1, 1, 2) | ["Buffered","iterator","for","big","arrays",".","`","Arrayterator","`","creates","a","buffered","iterator","for","reading","big","arrays","in","small","contiguous","blocks",".","The","class","is","useful","for","objects","stored","in","the","file","system",".","It","allows","iteration","over","the","object","*","without","*","reading","everything","in","memory",";","instead",",","small","blocks","are","read","and","iterated","over",".","`","Arrayterator","`","can","be","used","with","any","object","that","supports","multidimensional","slices",".","This","includes","NumPy","arrays",",","but","also","variables","from","Scientific.IO.NetCDF","or","pynetcdf","for","example",".","Parameters","--","--","--","--","--","var",":","array_like","The","object","to","iterate","over",".","buf_size",":","int",",","optional","The","buffer","size",".","If","`","buf_size","`","is","supplied",",","the","maximum","amount","of","data","that","will","be","read","into","memory","is","`","buf_size","`","elements",".","Default","is","None",",","which","will","read","as","many","element","as","possible","into","memory",".","Attributes","--","--","--","--","--","var","buf_size","start","stop","step","shape","flat","See","Also","--","--","--","--","numpy.ndenumerate",":","Multidimensional","array","iterator",".","numpy.flatiter",":","Flat","array","iterator",".","numpy.memmap",":","Create","a","memory-map","to","an","array","stored","in","a","binary","file","on","disk",".","Notes","--","--","-","The","algorithm","works","by","first","finding","a","``","running","dimension","''",",","along","which","the","blocks","will","be","extracted",".","Given","an","array","of","dimensions","``","(","d1",",","d2",",","...",",","dn",")","``",",","e.g",".","if","`","buf_size","`","is","smaller","than","``","d1","``",",","the","first","dimension","will","be","used",".","If",",","on","the","other","hand",",","``","d1","<","buf_size","<","d1","*","d2","``","the","second","dimension","will","be","used",",","and","so","on",".","Blocks","are","extracted","along","this","dimension",",","and","when","the","last","block","is","returned","the","process","continues","from","the","next","dimension",",","until","all","elements","have","been","read",".","Examples","--","--","--","--",">",">",">","a","=","np.arange","(","3","*","4","*","5","*","6",")",".reshape","(","3",",","4",",","5",",","6",")",">",">",">","a_itor","=","np.lib.Arrayterator","(","a",",","2",")",">",">",">","a_itor.shape","(","3",",","4",",","5",",","6",")","Now","we","can","iterate","over","``","a_itor","``",",","and","it","will","return","arrays","of","size","two",".","Since","`","buf_size","`","was","smaller","than","any","dimension",",","the","first","dimension","will","be","iterated","over","first",":",">",">",">","for","subarr","in","a_itor",":","...","if","not","subarr.all","(",")",":","...","print","(","subarr",",","subarr.shape",")","#","doctest",":","+SKIP",">",">",">","#","[","[","[","[","0","1","]","]","]","]","(","1",",","1",",","1",",","2",")"] | null | null | Arrayterator | def __init__(self, var, buf_size=None):
self.var = var
self.buf_size = buf_size
self.start = [0 for dim in var.shape]
self.stop = [dim for dim in var.shape]
self.step = [1 for dim in var.shape]
| ["def","__init__","(","self",",","var",",","buf_size=None",")",":","self.var","=","var","self.buf_size","=","buf_size","self.start","=","[","0","for","dim","in","var.shape","]","self.stop","=","[","dim","for","dim","in","var.shape","]","self.step","=","[","1","for","dim","in","var.shape","]"] | 85 | 91 | null | _arrayterator_impl.py | numpy/numpy/lib/_arrayterator_impl.py | from operator import mul
from functools import reduce | 10 | 1 | 2 | 0 | null | 0 | 7 | Use image node_id 1 to create a new Arrayterator object with example: obj = Arrayterator(var, buf_size) | 104 | node_id 1 | 209,161 |
__getattr__ | Arrayterator | null | true | self,attr | Buffered iterator for big arrays.
`Arrayterator` creates a buffered iterator for reading big arrays in small
contiguous blocks. The class is useful for objects stored in the
file system. It allows iteration over the object *without* reading
everything in memory; instead, small blocks are read and iterated over.
`Arrayterator` can be used with any object that supports multidimensional
slices. This includes NumPy arrays, but also variables from
Scientific.IO.NetCDF or pynetcdf for example.
Parameters
----------
var : array_like
The object to iterate over.
buf_size : int, optional
The buffer size. If `buf_size` is supplied, the maximum amount of
data that will be read into memory is `buf_size` elements.
Default is None, which will read as many element as possible
into memory.
Attributes
----------
var
buf_size
start
stop
step
shape
flat
See Also
--------
numpy.ndenumerate : Multidimensional array iterator.
numpy.flatiter : Flat array iterator.
numpy.memmap : Create a memory-map to an array stored
in a binary file on disk.
Notes
-----
The algorithm works by first finding a "running dimension", along which
the blocks will be extracted. Given an array of dimensions
``(d1, d2, ..., dn)``, e.g. if `buf_size` is smaller than ``d1``, the
first dimension will be used. If, on the other hand,
``d1 < buf_size < d1*d2`` the second dimension will be used, and so on.
Blocks are extracted along this dimension, and when the last block is
returned the process continues from the next dimension, until all
elements have been read.
Examples
--------
>>> a = np.arange(3 * 4 * 5 * 6).reshape(3, 4, 5, 6)
>>> a_itor = np.lib.Arrayterator(a, 2)
>>> a_itor.shape
(3, 4, 5, 6)
Now we can iterate over ``a_itor``, and it will return arrays of size
two. Since `buf_size` was smaller than any dimension, the first
dimension will be iterated over first:
>>> for subarr in a_itor:
... if not subarr.all():
... print(subarr, subarr.shape) # doctest: +SKIP
>>> # [[[[0 1]]]] (1, 1, 1, 2) | ["Buffered","iterator","for","big","arrays",".","`","Arrayterator","`","creates","a","buffered","iterator","for","reading","big","arrays","in","small","contiguous","blocks",".","The","class","is","useful","for","objects","stored","in","the","file","system",".","It","allows","iteration","over","the","object","*","without","*","reading","everything","in","memory",";","instead",",","small","blocks","are","read","and","iterated","over",".","`","Arrayterator","`","can","be","used","with","any","object","that","supports","multidimensional","slices",".","This","includes","NumPy","arrays",",","but","also","variables","from","Scientific.IO.NetCDF","or","pynetcdf","for","example",".","Parameters","--","--","--","--","--","var",":","array_like","The","object","to","iterate","over",".","buf_size",":","int",",","optional","The","buffer","size",".","If","`","buf_size","`","is","supplied",",","the","maximum","amount","of","data","that","will","be","read","into","memory","is","`","buf_size","`","elements",".","Default","is","None",",","which","will","read","as","many","element","as","possible","into","memory",".","Attributes","--","--","--","--","--","var","buf_size","start","stop","step","shape","flat","See","Also","--","--","--","--","numpy.ndenumerate",":","Multidimensional","array","iterator",".","numpy.flatiter",":","Flat","array","iterator",".","numpy.memmap",":","Create","a","memory-map","to","an","array","stored","in","a","binary","file","on","disk",".","Notes","--","--","-","The","algorithm","works","by","first","finding","a","``","running","dimension","''",",","along","which","the","blocks","will","be","extracted",".","Given","an","array","of","dimensions","``","(","d1",",","d2",",","...",",","dn",")","``",",","e.g",".","if","`","buf_size","`","is","smaller","than","``","d1","``",",","the","first","dimension","will","be","used",".","If",",","on","the","other","hand",",","``","d1","<","buf_size","<","d1","*","d2","``","the","second","dimension","will","be","used",",","and","so","on",".","Blocks","are","extracted","along","this","dimension",",","and","when","the","last","block","is","returned","the","process","continues","from","the","next","dimension",",","until","all","elements","have","been","read",".","Examples","--","--","--","--",">",">",">","a","=","np.arange","(","3","*","4","*","5","*","6",")",".reshape","(","3",",","4",",","5",",","6",")",">",">",">","a_itor","=","np.lib.Arrayterator","(","a",",","2",")",">",">",">","a_itor.shape","(","3",",","4",",","5",",","6",")","Now","we","can","iterate","over","``","a_itor","``",",","and","it","will","return","arrays","of","size","two",".","Since","`","buf_size","`","was","smaller","than","any","dimension",",","the","first","dimension","will","be","iterated","over","first",":",">",">",">","for","subarr","in","a_itor",":","...","if","not","subarr.all","(",")",":","...","print","(","subarr",",","subarr.shape",")","#","doctest",":","+SKIP",">",">",">","#","[","[","[","[","0","1","]","]","]","]","(","1",",","1",",","1",",","2",")"] | null | null | getattr | def __getattr__(self, attr):
return getattr(self.var, attr)
| ["def","__getattr__","(","self",",","attr",")",":","return","getattr","(","self.var",",","attr",")"] | 93 | 94 | null | _arrayterator_impl.py | numpy/numpy/lib/_arrayterator_impl.py | from operator import mul
from functools import reduce | 10 | 1 | 2 | 0 | null | 0 | 7 | Use image node_id 2 for calling the Arrayterator obj's underlying member method code with example usage: obj.__getattr__(attr) and returns: getattr | 147 | node_id 2 | 209,162 |
test_add_out_ignorable_args | TestIgnorableArgs | JitTestCase | true | self | null | null | null | null | null | def test_add_out_ignorable_args(self):
@torch.jit.script
def fn(x: torch.Tensor, y: torch.Tensor):
torch.add(x, y, out=y)
FileCheck().check("torch.add(x, y, out=y)").run(fn.code)
| ["def","test_add_out_ignorable_args","(","self",")",":","@","torch.jit.script","def","fn","(","x",":","torch.Tensor",",","y",":","torch.Tensor",")",":","torch.add","(","x",",","y",",","out=y",")","FileCheck","(",")",".check","(","``","torch.add","(","x",",","y",",","out=y",")","''",")",".run","(","fn.code",")"] | 50 | 54 | null | test_ignorable_args.py | pytorch/test/jit/test_ignorable_args.py | import os
import sys
import torch
from torch._C import parse_ir
from torch.testing import FileCheck
from torch.testing._internal.jit_utils import JitTestCase | 10 | 1 | 6 | 1 | 1 | 0 | 2 | Use image node_id 2 for calling the TestIgnorableArgs obj's underlying member method code with example usage: obj.test_add_out_ignorable_args() without return types | 164 | node_id 2 | 287,134 |
test_slice_ignorable_args_for_slice | TestIgnorableArgs | JitTestCase | true | self | null | null | null | null | null | def test_slice_ignorable_args_for_slice(self):
graph_str = """graph():
%13 : int = prim::Constant[value=0]()
%10 : bool = prim::Constant[value=0]()
%8 : NoneType = prim::Constant()
%0 : int = prim::Constant[value=1]()
%1 : int = prim::Constant[value=2]()
%2 : int = prim::Constant[value=3]()
%3 : int = prim::Constant[value=4]()
%4 : int = prim::Constant[value=9]()
%5 : int[] = prim::ListConstruct(%0, %1, %2, %3, %4, %4)
%6 : int[] = prim::ListConstruct(%0, %1, %2, %3, %4, %4)
%7 : int[][] = prim::ListConstruct(%5, %6)
%val.1 : Tensor = aten::tensor(%7, %8, %8, %10)
%16 : Tensor = aten::slice(%val.1, %13, %1, %8, %0)
%20 : Tensor = aten::slice(%16, %0, %8, %0, %0)
return (%20)"""
graph = parse_ir(graph_str)
function = self.createFunctionFromGraph(graph)
function_copy = self.getExportImportCopy(function)
src = str(function.code)
# For a signature:
# aten::slice(Tensor self, int dim, int start, int end, int step) -> Tensor
# We ignore trailing arguments after start=2 for dim 0
# and after end=1 for dim 1
# because in %16, %15 and %0 are default values for the schema.
FileCheck().check(
"torch.slice(torch.slice(torch.tensor(_0), 0, 2), 1, None, 1)"
).run(src)
self.assertEqual(function(), function_copy())
| ["def","test_slice_ignorable_args_for_slice","(","self",")",":","graph_str","=","``","''","''","graph","(",")",":","%","13",":","int","=","prim",":",":Constant","[","value=0","]","(",")","%","10",":","bool","=","prim",":",":Constant","[","value=0","]","(",")","%","8",":","NoneType","=","prim",":",":Constant","(",")","%","0",":","int","=","prim",":",":Constant","[","value=1","]","(",")","%","1",":","int","=","prim",":",":Constant","[","value=2","]","(",")","%","2",":","int","=","prim",":",":Constant","[","value=3","]","(",")","%","3",":","int","=","prim",":",":Constant","[","value=4","]","(",")","%","4",":","int","=","prim",":",":Constant","[","value=9","]","(",")","%","5",":","int","[","]","=","prim",":",":ListConstruct","(","%","0",",","%","1",",","%","2",",","%","3",",","%","4",",","%","4",")","%","6",":","int","[","]","=","prim",":",":ListConstruct","(","%","0",",","%","1",",","%","2",",","%","3",",","%","4",",","%","4",")","%","7",":","int","[","]","[","]","=","prim",":",":ListConstruct","(","%","5",",","%","6",")","%","val.1",":","Tensor","=","aten",":",":tensor","(","%","7",",","%","8",",","%","8",",","%","10",")","%","16",":","Tensor","=","aten",":",":slice","(","%","val.1",",","%","13",",","%","1",",","%","8",",","%","0",")","%","20",":","Tensor","=","aten",":",":slice","(","%","16",",","%","0",",","%","8",",","%","0",",","%","0",")","return","(","%","20",")","''","''","''","graph","=","parse_ir","(","graph_str",")","function","=","self.createFunctionFromGraph","(","graph",")","function_copy","=","self.getExportImportCopy","(","function",")","src","=","str","(","function.code",")","#","For","a","signature",":","#","aten",":",":slice","(","Tensor","self",",","int","dim",",","int","start",",","int","end",",","int","step",")","-",">","Tensor","#","We","ignore","trailing","arguments","after","start=2","for","dim","0","#","and","after","end=1","for","dim","1","#","because","in","%","16",",","%","15","and","%","0","are","default","values","for","the","schema",".","FileCheck","(",")",".check","(","``","torch.slice","(","torch.slice","(","torch.tensor","(","_0",")",",","0",",","2",")",",","1",",","None",",","1",")","''",")",".run","(","src",")","self.assertEqual","(","function","(",")",",","function_copy","(",")",")"] | 21 | 48 | null | test_ignorable_args.py | pytorch/test/jit/test_ignorable_args.py | import os
import sys
import torch
from torch._C import parse_ir
from torch.testing import FileCheck
from torch.testing._internal.jit_utils import JitTestCase | 10 | 1 | 6 | 1 | 1 | 0 | 2 | Use image node_id 1 for calling the TestIgnorableArgs obj's underlying member method code with example usage: obj.test_slice_ignorable_args_for_slice() without return types | 172 | node_id 1 | 287,133 |
__getitem__ | Arrayterator | null | true | self,index | Buffered iterator for big arrays.
`Arrayterator` creates a buffered iterator for reading big arrays in small
contiguous blocks. The class is useful for objects stored in the
file system. It allows iteration over the object *without* reading
everything in memory; instead, small blocks are read and iterated over.
`Arrayterator` can be used with any object that supports multidimensional
slices. This includes NumPy arrays, but also variables from
Scientific.IO.NetCDF or pynetcdf for example.
Parameters
----------
var : array_like
The object to iterate over.
buf_size : int, optional
The buffer size. If `buf_size` is supplied, the maximum amount of
data that will be read into memory is `buf_size` elements.
Default is None, which will read as many element as possible
into memory.
Attributes
----------
var
buf_size
start
stop
step
shape
flat
See Also
--------
numpy.ndenumerate : Multidimensional array iterator.
numpy.flatiter : Flat array iterator.
numpy.memmap : Create a memory-map to an array stored
in a binary file on disk.
Notes
-----
The algorithm works by first finding a "running dimension", along which
the blocks will be extracted. Given an array of dimensions
``(d1, d2, ..., dn)``, e.g. if `buf_size` is smaller than ``d1``, the
first dimension will be used. If, on the other hand,
``d1 < buf_size < d1*d2`` the second dimension will be used, and so on.
Blocks are extracted along this dimension, and when the last block is
returned the process continues from the next dimension, until all
elements have been read.
Examples
--------
>>> a = np.arange(3 * 4 * 5 * 6).reshape(3, 4, 5, 6)
>>> a_itor = np.lib.Arrayterator(a, 2)
>>> a_itor.shape
(3, 4, 5, 6)
Now we can iterate over ``a_itor``, and it will return arrays of size
two. Since `buf_size` was smaller than any dimension, the first
dimension will be iterated over first:
>>> for subarr in a_itor:
... if not subarr.all():
... print(subarr, subarr.shape) # doctest: +SKIP
>>> # [[[[0 1]]]] (1, 1, 1, 2) | ["Buffered","iterator","for","big","arrays",".","`","Arrayterator","`","creates","a","buffered","iterator","for","reading","big","arrays","in","small","contiguous","blocks",".","The","class","is","useful","for","objects","stored","in","the","file","system",".","It","allows","iteration","over","the","object","*","without","*","reading","everything","in","memory",";","instead",",","small","blocks","are","read","and","iterated","over",".","`","Arrayterator","`","can","be","used","with","any","object","that","supports","multidimensional","slices",".","This","includes","NumPy","arrays",",","but","also","variables","from","Scientific.IO.NetCDF","or","pynetcdf","for","example",".","Parameters","--","--","--","--","--","var",":","array_like","The","object","to","iterate","over",".","buf_size",":","int",",","optional","The","buffer","size",".","If","`","buf_size","`","is","supplied",",","the","maximum","amount","of","data","that","will","be","read","into","memory","is","`","buf_size","`","elements",".","Default","is","None",",","which","will","read","as","many","element","as","possible","into","memory",".","Attributes","--","--","--","--","--","var","buf_size","start","stop","step","shape","flat","See","Also","--","--","--","--","numpy.ndenumerate",":","Multidimensional","array","iterator",".","numpy.flatiter",":","Flat","array","iterator",".","numpy.memmap",":","Create","a","memory-map","to","an","array","stored","in","a","binary","file","on","disk",".","Notes","--","--","-","The","algorithm","works","by","first","finding","a","``","running","dimension","''",",","along","which","the","blocks","will","be","extracted",".","Given","an","array","of","dimensions","``","(","d1",",","d2",",","...",",","dn",")","``",",","e.g",".","if","`","buf_size","`","is","smaller","than","``","d1","``",",","the","first","dimension","will","be","used",".","If",",","on","the","other","hand",",","``","d1","<","buf_size","<","d1","*","d2","``","the","second","dimension","will","be","used",",","and","so","on",".","Blocks","are","extracted","along","this","dimension",",","and","when","the","last","block","is","returned","the","process","continues","from","the","next","dimension",",","until","all","elements","have","been","read",".","Examples","--","--","--","--",">",">",">","a","=","np.arange","(","3","*","4","*","5","*","6",")",".reshape","(","3",",","4",",","5",",","6",")",">",">",">","a_itor","=","np.lib.Arrayterator","(","a",",","2",")",">",">",">","a_itor.shape","(","3",",","4",",","5",",","6",")","Now","we","can","iterate","over","``","a_itor","``",",","and","it","will","return","arrays","of","size","two",".","Since","`","buf_size","`","was","smaller","than","any","dimension",",","the","first","dimension","will","be","iterated","over","first",":",">",">",">","for","subarr","in","a_itor",":","...","if","not","subarr.all","(",")",":","...","print","(","subarr",",","subarr.shape",")","#","doctest",":","+SKIP",">",">",">","#","[","[","[","[","0","1","]","]","]","]","(","1",",","1",",","1",",","2",")"] | Return a new arrayterator. | ["Return","a","new","arrayterator","."] | out | def __getitem__(self, index):
"""
Return a new arrayterator.
"""
# Fix index, handling ellipsis and incomplete slices.
if not isinstance(index, tuple):
index = (index,)
fixed = []
length, dims = len(index), self.ndim
for slice_ in index:
if slice_ is Ellipsis:
fixed.extend([slice(None)] * (dims - length + 1))
length = len(fixed)
elif isinstance(slice_, int):
fixed.append(slice(slice_, slice_ + 1, 1))
else:
fixed.append(slice_)
index = tuple(fixed)
if len(index) < dims:
index += (slice(None),) * (dims - len(index))
# Return a new arrayterator object.
out = self.__class__(self.var, self.buf_size)
for i, (start, stop, step, slice_) in enumerate(
zip(self.start, self.stop, self.step, index)
):
out.start[i] = start + (slice_.start or 0)
out.step[i] = step * (slice_.step or 1)
out.stop[i] = start + (slice_.stop or stop - start)
out.stop[i] = min(stop, out.stop[i])
return out
| ["def","__getitem__","(","self",",","index",")",":","``","''","''","Return","a","new","arrayterator.","``","''","''","#","Fix","index",",","handling","ellipsis","and","incomplete","slices",".","if","not","isinstance","(","index",",","tuple",")",":","index","=","(","index",",",")","fixed","=","[","]","length",",","dims","=","len","(","index",")",",","self.ndim","for","slice_","in","index",":","if","slice_","is","Ellipsis",":","fixed.extend","(","[","slice","(","None",")","]","*","(","dims","-","length","+","1",")",")","length","=","len","(","fixed",")","elif","isinstance","(","slice_",",","int",")",":","fixed.append","(","slice","(","slice_",",","slice_","+","1",",","1",")",")","else",":","fixed.append","(","slice_",")","index","=","tuple","(","fixed",")","if","len","(","index",")","<","dims",":","index","+=","(","slice","(","None",")",",",")","*","(","dims","-","len","(","index",")",")","#","Return","a","new","arrayterator","object",".","out","=","self.__class__","(","self.var",",","self.buf_size",")","for","i",",","(","start",",","stop",",","step",",","slice_",")","in","enumerate","(","zip","(","self.start",",","self.stop",",","self.step",",","index",")",")",":","out.start","[","i","]","=","start","+","(","slice_.start","or","0",")","out.step","[","i","]","=","step","*","(","slice_.step","or","1",")","out.stop","[","i","]","=","start","+","(","slice_.stop","or","stop","-","start",")","out.stop","[","i","]","=","min","(","stop",",","out.stop","[","i","]",")","return","out"] | 96 | 126 | null | _arrayterator_impl.py | numpy/numpy/lib/_arrayterator_impl.py | from operator import mul
from functools import reduce | 10 | 1 | 2 | 0 | null | 0 | 7 | Use image node_id 3 for calling the Arrayterator obj's underlying member method code with example usage: obj.__getitem__(index) and returns: out | 144 | node_id 3 | 209,163 |
__array__ | Arrayterator | null | true | self | Buffered iterator for big arrays.
`Arrayterator` creates a buffered iterator for reading big arrays in small
contiguous blocks. The class is useful for objects stored in the
file system. It allows iteration over the object *without* reading
everything in memory; instead, small blocks are read and iterated over.
`Arrayterator` can be used with any object that supports multidimensional
slices. This includes NumPy arrays, but also variables from
Scientific.IO.NetCDF or pynetcdf for example.
Parameters
----------
var : array_like
The object to iterate over.
buf_size : int, optional
The buffer size. If `buf_size` is supplied, the maximum amount of
data that will be read into memory is `buf_size` elements.
Default is None, which will read as many element as possible
into memory.
Attributes
----------
var
buf_size
start
stop
step
shape
flat
See Also
--------
numpy.ndenumerate : Multidimensional array iterator.
numpy.flatiter : Flat array iterator.
numpy.memmap : Create a memory-map to an array stored
in a binary file on disk.
Notes
-----
The algorithm works by first finding a "running dimension", along which
the blocks will be extracted. Given an array of dimensions
``(d1, d2, ..., dn)``, e.g. if `buf_size` is smaller than ``d1``, the
first dimension will be used. If, on the other hand,
``d1 < buf_size < d1*d2`` the second dimension will be used, and so on.
Blocks are extracted along this dimension, and when the last block is
returned the process continues from the next dimension, until all
elements have been read.
Examples
--------
>>> a = np.arange(3 * 4 * 5 * 6).reshape(3, 4, 5, 6)
>>> a_itor = np.lib.Arrayterator(a, 2)
>>> a_itor.shape
(3, 4, 5, 6)
Now we can iterate over ``a_itor``, and it will return arrays of size
two. Since `buf_size` was smaller than any dimension, the first
dimension will be iterated over first:
>>> for subarr in a_itor:
... if not subarr.all():
... print(subarr, subarr.shape) # doctest: +SKIP
>>> # [[[[0 1]]]] (1, 1, 1, 2) | ["Buffered","iterator","for","big","arrays",".","`","Arrayterator","`","creates","a","buffered","iterator","for","reading","big","arrays","in","small","contiguous","blocks",".","The","class","is","useful","for","objects","stored","in","the","file","system",".","It","allows","iteration","over","the","object","*","without","*","reading","everything","in","memory",";","instead",",","small","blocks","are","read","and","iterated","over",".","`","Arrayterator","`","can","be","used","with","any","object","that","supports","multidimensional","slices",".","This","includes","NumPy","arrays",",","but","also","variables","from","Scientific.IO.NetCDF","or","pynetcdf","for","example",".","Parameters","--","--","--","--","--","var",":","array_like","The","object","to","iterate","over",".","buf_size",":","int",",","optional","The","buffer","size",".","If","`","buf_size","`","is","supplied",",","the","maximum","amount","of","data","that","will","be","read","into","memory","is","`","buf_size","`","elements",".","Default","is","None",",","which","will","read","as","many","element","as","possible","into","memory",".","Attributes","--","--","--","--","--","var","buf_size","start","stop","step","shape","flat","See","Also","--","--","--","--","numpy.ndenumerate",":","Multidimensional","array","iterator",".","numpy.flatiter",":","Flat","array","iterator",".","numpy.memmap",":","Create","a","memory-map","to","an","array","stored","in","a","binary","file","on","disk",".","Notes","--","--","-","The","algorithm","works","by","first","finding","a","``","running","dimension","''",",","along","which","the","blocks","will","be","extracted",".","Given","an","array","of","dimensions","``","(","d1",",","d2",",","...",",","dn",")","``",",","e.g",".","if","`","buf_size","`","is","smaller","than","``","d1","``",",","the","first","dimension","will","be","used",".","If",",","on","the","other","hand",",","``","d1","<","buf_size","<","d1","*","d2","``","the","second","dimension","will","be","used",",","and","so","on",".","Blocks","are","extracted","along","this","dimension",",","and","when","the","last","block","is","returned","the","process","continues","from","the","next","dimension",",","until","all","elements","have","been","read",".","Examples","--","--","--","--",">",">",">","a","=","np.arange","(","3","*","4","*","5","*","6",")",".reshape","(","3",",","4",",","5",",","6",")",">",">",">","a_itor","=","np.lib.Arrayterator","(","a",",","2",")",">",">",">","a_itor.shape","(","3",",","4",",","5",",","6",")","Now","we","can","iterate","over","``","a_itor","``",",","and","it","will","return","arrays","of","size","two",".","Since","`","buf_size","`","was","smaller","than","any","dimension",",","the","first","dimension","will","be","iterated","over","first",":",">",">",">","for","subarr","in","a_itor",":","...","if","not","subarr.all","(",")",":","...","print","(","subarr",",","subarr.shape",")","#","doctest",":","+SKIP",">",">",">","#","[","[","[","[","0","1","]","]","]","]","(","1",",","1",",","1",",","2",")"] | Return corresponding data. | ["Return","corresponding","data","."] | self | def __array__(self):
"""
Return corresponding data.
"""
slice_ = tuple(
slice(*t) for t in zip(self.start, self.stop, self.step)
)
return self.var[slice_]
| ["def","__array__","(","self",")",":","``","''","''","Return","corresponding","data.","``","''","''","slice_","=","tuple","(","slice","(","*","t",")","for","t","in","zip","(","self.start",",","self.stop",",","self.step",")",")","return","self.var","[","slice_","]"] | 128 | 135 | null | _arrayterator_impl.py | numpy/numpy/lib/_arrayterator_impl.py | from operator import mul
from functools import reduce | 10 | 1 | 2 | 0 | null | 0 | 7 | Use image node_id 4 for calling the Arrayterator obj's underlying member method code with example usage: obj.__array__() and returns: self | 138 | node_id 4 | 209,164 |
_prettyprint | global | null | false | self | null | null | null | null | name+str+attrs+str | def _prettyprint(self):
"""A dataclass prettyprint function you can use in __str__or __repr__.
Note: You can't set `__str__ = pprint.pformat` because it causes a recursion error.
Mostly identical to pprint but:
* This will contract long lists and dicts (> 10lines) to [...] and {...}.
* This will contract long object reprs to ClassName(...).
"""
fields = []
for f in dataclasses.fields(self):
s = pprint.pformat(getattr(self, f.name))
class_re = r"^(\w+)\(.*\)$"
if s.count("\n") >= 10:
if s.startswith("["):
s = "[...]"
elif s.startswith("{"):
s = "{...}"
elif re.match(class_re, s, flags=re.DOTALL):
s = re.sub(class_re, r"\1(...)", s, flags=re.DOTALL)
else:
s = "..."
else:
width = len(f.name) + 1
s = textwrap.indent(s, " " * width).lstrip(" ")
fields.append(f"{f.name}={s}")
attrs = ",\n".join(fields)
name = self.__class__.__name__
width = len(name) + 1
attrs = textwrap.indent(attrs, " " * width).lstrip(" ")
return f"{name}({attrs})"
| ["def","_prettyprint","(","self",")",":","``","''","''","A","dataclass","prettyprint","function","you","can","use","in","__str__or","__repr__",".","Note",":","You","ca","n't","set","`","__str__","=","pprint.pformat","`","because","it","causes","a","recursion","error",".","Mostly","identical","to","pprint","but",":","*","This","will","contract","long","lists","and","dicts","(",">","10lines",")","to","[","...","]","and","{","...","}",".","*","This","will","contract","long","object","reprs","to","ClassName","(","...",")",".","``","''","''","fields","=","[","]","for","f","in","dataclasses.fields","(","self",")",":","s","=","pprint.pformat","(","getattr","(","self",",","f.name",")",")","class_re","=","r","''","^","(","\\w+",")","\\","(",".","*","\\",")","$","''","if","s.count","(","``","\\n","''",")",">","=","10",":","if","s.startswith","(","``","[","``",")",":","s","=","``","[","...","]","''","elif","s.startswith","(","``","{","``",")",":","s","=","``","{","...","}","''","elif","re.match","(","class_re",",","s",",","flags=re.DOTALL",")",":","s","=","re.sub","(","class_re",",","r","''","\\1","(","...",")","''",",","s",",","flags=re.DOTALL",")","else",":","s","=","``","...","''","else",":","width","=","len","(","f.name",")","+","1","s","=","textwrap.indent","(","s",",","``","``","*","width",")",".lstrip","(","``","``",")","fields.append","(","f","''","{","f.name","}","=","{","s","}","''",")","attrs","=","``",",","\\n","''",".join","(","fields",")","name","=","self.__class__.__name__","width","=","len","(","name",")","+","1","attrs","=","textwrap.indent","(","attrs",",","``","``","*","width",")",".lstrip","(","``","``",")","return","f","''","{","name","}","(","{","attrs","}",")","''"] | 51 | 84 | null | string_utils.py | generative-ai-python/google/generativeai/string_utils.py | from __future__ import annotations
import dataclasses
import pprint
import re
import reprlib
import textwrap | 10 | null | 6 | null | null | 4 | null | Use image node_id 4 for calling a global function with example usage: _prettyprint() and returns: name, str, attrs, str | 119 | node_id 4 | 146,246 |
parse_args | global | null | false | null | null | null | null | args | def parse_args():
parser = argparse.ArgumentParser(
description="question-answer-generation-using-gpt-3"
)
parser.add_argument(
"--pred_path",
required=True,
help="The path to file containing prediction.",
)
parser.add_argument(
"--output_dir",
required=True,
help="The path to save annotation json files.",
)
parser.add_argument(
"--output_json",
required=True,
help="The path to save annotation final combined json file.",
)
parser.add_argument(
"--num_tasks",
required=True,
type=int,
help="Number of splits.",
)
parser.add_argument(
"--num_chunks", default=1, type=int, help="Result splits"
)
parser.add_argument(
"--api_key", required=True, type=str, help="OpenAI API key"
)
parser.add_argument(
"--api_base", default=None, type=str, help="OpenAI API base"
)
args = parser.parse_args()
return args
| ["def","parse_args","(",")",":","parser","=","argparse.ArgumentParser","(","description=","''","question-answer-generation-using-gpt-3","''",")","parser.add_argument","(","``","--","pred_path","''",",","required=True",",","help=","''","The","path","to","file","containing","prediction",".","``",",",")","parser.add_argument","(","``","--","output_dir","''",",","required=True",",","help=","''","The","path","to","save","annotation","json","files",".","``",",",")","parser.add_argument","(","``","--","output_json","''",",","required=True",",","help=","''","The","path","to","save","annotation","final","combined","json","file",".","``",",",")","parser.add_argument","(","``","--","num_tasks","''",",","required=True",",","type=int",",","help=","''","Number","of","splits",".","``",",",")","parser.add_argument","(","``","--","num_chunks","''",",","default=1",",","type=int",",","help=","''","Result","splits","''",")","parser.add_argument","(","``","--","api_key","''",",","required=True",",","type=str",",","help=","''","OpenAI","API","key","''",")","parser.add_argument","(","``","--","api_base","''",",","default=None",",","type=str",",","help=","''","OpenAI","API","base","''",")","args","=","parser.parse_args","(",")","return","args"] | 10 | 20 | null | eval_activitynet_qa.py | llama-vid/llamavid/eval/eval_activitynet_qa.py | import openai
import os
import argparse
import json
import ast
from multiprocessing.pool import Pool
from tqdm import tqdm | 10 | null | 7 | null | null | 3 | null | Use image node_id 1 for calling a global function with example usage: parse_args() and returns: args | 100 | node_id 1 | 167,039 |
|
print_new_cast_table | global | null | false | null | null | null | null | indx, dtype | def print_new_cast_table(*, can_cast=True, legacy=False, flags=False):
"""Prints new casts, the values given are default "can-cast" values, not
actual ones.
"""
from numpy._core._multiarray_tests import get_all_cast_information
cast_table = {
-1: " ",
0: "#", # No cast (classify as equivalent here)
1: "#", # equivalent casting
2: "=", # safe casting
3: "~", # same-kind casting
4: ".", # unsafe casting
}
flags_table = {
0: "▗",
7: "█",
1: "▚",
2: "▐",
4: "▄",
3: "▜",
5: "▙",
6: "▟",
}
cast_info = namedtuple(
"cast_info", ["can_cast", "legacy", "flags"]
)
no_cast_info = cast_info(" ", " ", " ")
casts = get_all_cast_information()
table = {}
dtypes = set()
for cast in casts:
dtypes.add(cast["from"])
dtypes.add(cast["to"])
if cast["from"] not in table:
table[cast["from"]] = {}
to_dict = table[cast["from"]]
can_cast = cast_table[cast["casting"]]
legacy = "L" if cast["legacy"] else "."
flags = 0
if cast["requires_pyapi"]:
flags |= 1
if cast["supports_unaligned"]:
flags |= 2
if cast["no_floatingpoint_errors"]:
flags |= 4
flags = flags_table[flags]
to_dict[cast["to"]] = cast_info(
can_cast=can_cast, legacy=legacy, flags=flags
)
# The np.dtype(x.type) is a bit strange, because dtype classes do
# not expose much yet.
types = np.typecodes["All"]
def sorter(x):
# This is a bit weird hack, to get a table as close as possible to
# the one printing all typecodes (but expecting user-dtypes).
dtype = np.dtype(x.type)
try:
indx = types.index(dtype.char)
except ValueError:
indx = np.inf
return (indx, dtype.char)
dtypes = sorted(dtypes, key=sorter)
def print_table(field="can_cast"):
print("X", end=" ")
for dt in dtypes:
print(np.dtype(dt.type).char, end=" ")
print()
for from_dt in dtypes:
print(np.dtype(from_dt.type).char, end=" ")
row = table.get(from_dt, {})
for to_dt in dtypes:
print(
getattr(row.get(to_dt, no_cast_info), field),
end=" ",
)
print()
if can_cast:
# Print the actual table:
print()
print(
"Casting: # is equivalent, = is safe, ~ is same-kind, and . is unsafe"
)
print()
print_table("can_cast")
if legacy:
print()
print("L denotes a legacy cast . a non-legacy one.")
print()
print_table("legacy")
if flags:
print()
print(
f"{flags_table[0]}: no flags, {flags_table[1]}: PyAPI, "
f"{flags_table[2]}: supports unaligned, {flags_table[4]}: no-float-errors"
)
print()
print_table("flags")
| ["def","print_new_cast_table","(","*",",","can_cast=True",",","legacy=False",",","flags=False",")",":","``","''","''","Prints","new","casts",",","the","values","given","are","default","``","can-cast","''","values",",","not","actual","ones.","``","''","''","from","numpy._core._multiarray_tests","import","get_all_cast_information","cast_table","=","{","-1",":","``","``",",","0",":","``","#","''",",","#","No","cast","(","classify","as","equivalent","here",")","1",":","``","#","''",",","#","equivalent","casting","2",":","``","=","''",",","#","safe","casting","3",":","``","~","''",",","#","same-kind","casting","4",":","``",".","``",",","#","unsafe","casting","}","flags_table","=","{","0",":","``","\u2597","''",",","7",":","``","\u2588","''",",","1",":","``","\u259a","''",",","2",":","``","\u2590","''",",","4",":","``","\u2584","''",",","3",":","``","\u259c","''",",","5",":","``","\u2599","''",",","6",":","``","\u259f","''",",","}","cast_info","=","namedtuple","(","``","cast_info","''",",","[","``","can_cast","''",",","``","legacy","''",",","``","flags","''","]",")","no_cast_info","=","cast_info","(","``","``",",","``","``",",","``","``",")","casts","=","get_all_cast_information","(",")","table","=","{","}","dtypes","=","set","(",")","for","cast","in","casts",":","dtypes.add","(","cast","[","``","from","''","]",")","dtypes.add","(","cast","[","``","to","''","]",")","if","cast","[","``","from","''","]","not","in","table",":","table","[","cast","[","``","from","''","]","]","=","{","}","to_dict","=","table","[","cast","[","``","from","''","]","]","can_cast","=","cast_table","[","cast","[","``","casting","''","]","]","legacy","=","``","L","''","if","cast","[","``","legacy","''","]","else","``",".","''","flags","=","0","if","cast","[","``","requires_pyapi","''","]",":","flags","|=","1","if","cast","[","``","supports_unaligned","''","]",":","flags","|=","2","if","cast","[","``","no_floatingpoint_errors","''","]",":","flags","|=","4","flags","=","flags_table","[","flags","]","to_dict","[","cast","[","``","to","''","]","]","=","cast_info","(","can_cast=can_cast",",","legacy=legacy",",","flags=flags",")","#","The","np.dtype","(","x.type",")","is","a","bit","strange",",","because","dtype","classes","do","#","not","expose","much","yet",".","types","=","np.typecodes","[","``","All","''","]","def","sorter","(","x",")",":","#","This","is","a","bit","weird","hack",",","to","get","a","table","as","close","as","possible","to","#","the","one","printing","all","typecodes","(","but","expecting","user-dtypes",")",".","dtype","=","np.dtype","(","x.type",")","try",":","indx","=","types.index","(","dtype.char",")","except","ValueError",":","indx","=","np.inf","return","(","indx",",","dtype.char",")","dtypes","=","sorted","(","dtypes",",","key=sorter",")","def","print_table","(","field=","''","can_cast","''",")",":","print","(","``","X","''",",","end=","''","``",")","for","dt","in","dtypes",":","print","(","np.dtype","(","dt.type",")",".char",",","end=","''","``",")","print","(",")","for","from_dt","in","dtypes",":","print","(","np.dtype","(","from_dt.type",")",".char",",","end=","''","``",")","row","=","table.get","(","from_dt",",","{","}",")","for","to_dt","in","dtypes",":","print","(","getattr","(","row.get","(","to_dt",",","no_cast_info",")",",","field",")",",","end=","''","``",",",")","print","(",")","if","can_cast",":","#","Print","the","actual","table",":","print","(",")","print","(","``","Casting",":","#","is","equivalent",",","=","is","safe",",","~","is","same-kind",",","and",".","is","unsafe","''",")","print","(",")","print_table","(","``","can_cast","''",")","if","legacy",":","print","(",")","print","(","``","L","denotes","a","legacy","cast",".","a","non-legacy","one",".","''",")","print","(",")","print_table","(","``","legacy","''",")","if","flags",":","print","(",")","print","(","f","''","{","flags_table","[","0","]","}",":","no","flags",",","{","flags_table","[","1","]","}",":","PyAPI",",","``","f","''","{","flags_table","[","2","]","}",":","supports","unaligned",",","{","flags_table","[","4","]","}",":","no-float-errors","''",")","print","(",")","print_table","(","``","flags","''",")"] | 84 | 177 | null | print_coercion_tables.py | numpy/numpy/testing/print_coercion_tables.py | import numpy
from numpy._core.numerictypes import obj2sctype
from collections import namedtuple | 10 | null | 3 | null | null | 3 | null | Use image node_id 3 for calling a global function with example usage: print_new_cast_table() and returns: indx, dtype | 118 | node_id 3 | 213,865 |
|
test_do_bench_using_profiling | TestBench | TestCase | true | self | null | null | null | null | null | def test_do_bench_using_profiling(self):
res = do_bench_using_profiling(self._bench_fn)
log.warning("do_bench_using_profiling result: %s", res)
self.assertGreater(res, 0)
| ["def","test_do_bench_using_profiling","(","self",")",":","res","=","do_bench_using_profiling","(","self._bench_fn",")","log.warning","(","``","do_bench_using_profiling","result",":","%","s","''",",","res",")","self.assertGreater","(","res",",","0",")"] | 29 | 32 | null | test_inductor_utils.py | pytorch/test/inductor/test_inductor_utils.py | import functools
import logging
import torch
from torch._dynamo.test_case import run_tests, TestCase
from torch._inductor.utils import do_bench, do_bench_using_profiling | 10 | 1 | 5 | 1 | 1 | 0 | 3 | Use image node_id 3 for calling the TestBench obj's underlying member method code with example usage: obj.test_do_bench_using_profiling() without return types | 158 | node_id 3 | 285,714 |
test_tensor_autograd | TestSafeguard | TestCase | true | self | null | null | null | null | b,b,b | def test_tensor_autograd(self):
# dynamo errors when Tensor.requires_grad_ change the autograd state
class F1(torch.nn.Module):
def forward(self, a):
a.requires_grad_(True)
b = a + a
return b
f1 = F1()
# dynamo errors when Tensor.requires_grad_ change the autograd state
class F2(torch.nn.Module):
def forward(self, a):
a.requires_grad_(False)
b = a + a
return b
f2 = F2()
# dynamo always errors on Tensor.requires_grad
class F3(torch.nn.Module):
def forward(self, a):
a.requires_grad = False
b = a + a
return b
f3 = F3()
export(f1, (torch.randn(10, requires_grad=True),))
export(f2, (torch.randn(10, requires_grad=False),))
with self.assertRaises(RuntimeError):
export(f1, (torch.randn(10, requires_grad=False),))
with self.assertRaises(RuntimeError):
export(f2, (torch.randn(10, requires_grad=True),))
with self.assertRaises(RuntimeError):
export(f3, (torch.randn(10, requires_grad=False),))
| ["def","test_tensor_autograd","(","self",")",":","#","dynamo","errors","when","Tensor.requires_grad_","change","the","autograd","state","class","F1","(","torch.nn.Module",")",":","def","forward","(","self",",","a",")",":","a.requires_grad_","(","True",")","b","=","a","+","a","return","b","f1","=","F1","(",")","#","dynamo","errors","when","Tensor.requires_grad_","change","the","autograd","state","class","F2","(","torch.nn.Module",")",":","def","forward","(","self",",","a",")",":","a.requires_grad_","(","False",")","b","=","a","+","a","return","b","f2","=","F2","(",")","#","dynamo","always","errors","on","Tensor.requires_grad","class","F3","(","torch.nn.Module",")",":","def","forward","(","self",",","a",")",":","a.requires_grad","=","False","b","=","a","+","a","return","b","f3","=","F3","(",")","export","(","f1",",","(","torch.randn","(","10",",","requires_grad=True",")",",",")",")","export","(","f2",",","(","torch.randn","(","10",",","requires_grad=False",")",",",")",")","with","self.assertRaises","(","RuntimeError",")",":","export","(","f1",",","(","torch.randn","(","10",",","requires_grad=False",")",",",")",")","with","self.assertRaises","(","RuntimeError",")",":","export","(","f2",",","(","torch.randn","(","10",",","requires_grad=True",")",",",")",")","with","self.assertRaises","(","RuntimeError",")",":","export","(","f3",",","(","torch.randn","(","10",",","requires_grad=False",")",",",")",")"] | 68 | 104 | null | test_safeguard.py | pytorch/test/export/test_safeguard.py | import unittest
import torch
import torch._dynamo
from torch.export import export
from torch.testing._internal.common_utils import run_tests, TestCase | 10 | 1 | 5 | 1 | 1 | 0 | 3 | Use image node_id 2 for calling the TestSafeguard obj's underlying member method code with example usage: obj.test_tensor_autograd() and returns: b, b, b | 153 | node_id 2 | 283,747 |
test_global_autograd | TestSafeguard | TestCase | true | self | null | null | null | null | b,b,b,b | def test_global_autograd(self):
class F1(torch.nn.Module):
def forward(self, a):
with torch.no_grad():
b = a + a
return b
f1 = F1()
class F2(torch.nn.Module):
def forward(self, a):
with torch.enable_grad():
b = a + a
return b
f2 = F2()
class F3(torch.nn.Module):
def forward(self, a):
with torch.set_grad_enabled(False):
b = a + a
return b
f3 = F3()
class F4(torch.nn.Module):
def forward(self, a):
with torch.set_grad_enabled(True):
b = a + a
return b
f4 = F4()
a = torch.randn(10)
with torch.no_grad():
export(f1, (a,))
export(f2, (a,))
export(f3, (a,))
export(f4, (a,))
with torch.enable_grad():
export(f2, (a,))
export(f4, (a,))
with self.assertRaisesRegex(
RuntimeError, "Encountered autograd state manager op.*"
):
export(f1, (a,))
with self.assertRaisesRegex(
RuntimeError, "Encountered autograd state manager op.*"
):
export(f3, (a,))
| ["def","test_global_autograd","(","self",")",":","class","F1","(","torch.nn.Module",")",":","def","forward","(","self",",","a",")",":","with","torch.no_grad","(",")",":","b","=","a","+","a","return","b","f1","=","F1","(",")","class","F2","(","torch.nn.Module",")",":","def","forward","(","self",",","a",")",":","with","torch.enable_grad","(",")",":","b","=","a","+","a","return","b","f2","=","F2","(",")","class","F3","(","torch.nn.Module",")",":","def","forward","(","self",",","a",")",":","with","torch.set_grad_enabled","(","False",")",":","b","=","a","+","a","return","b","f3","=","F3","(",")","class","F4","(","torch.nn.Module",")",":","def","forward","(","self",",","a",")",":","with","torch.set_grad_enabled","(","True",")",":","b","=","a","+","a","return","b","f4","=","F4","(",")","a","=","torch.randn","(","10",")","with","torch.no_grad","(",")",":","export","(","f1",",","(","a",",",")",")","export","(","f2",",","(","a",",",")",")","export","(","f3",",","(","a",",",")",")","export","(","f4",",","(","a",",",")",")","with","torch.enable_grad","(",")",":","export","(","f2",",","(","a",",",")",")","export","(","f4",",","(","a",",",")",")","with","self.assertRaisesRegex","(","RuntimeError",",","``","Encountered","autograd","state","manager","op",".","*","''",")",":","export","(","f1",",","(","a",",",")",")","with","self.assertRaisesRegex","(","RuntimeError",",","``","Encountered","autograd","state","manager","op",".","*","''",")",":","export","(","f3",",","(","a",",",")",")"] | 14 | 66 | null | test_safeguard.py | pytorch/test/export/test_safeguard.py | import unittest
import torch
import torch._dynamo
from torch.export import export
from torch.testing._internal.common_utils import run_tests, TestCase | 10 | 1 | 5 | 1 | 1 | 0 | 3 | Use image node_id 1 for calling the TestSafeguard obj's underlying member method code with example usage: obj.test_global_autograd() and returns: b, b, b, b | 156 | node_id 1 | 283,746 |
get_branch_diff | DiffLinter | null | true | self,uncommitted | null | null | Determine the first common ancestor commit.
Find diff between branch and FCA commit.
Note: if `uncommitted` is set, check only
uncommitted changes | ["Determine","the","first","common","ancestor","commit",".","Find","diff","between","branch","and","FCA","commit",".","Note",":","if","`","uncommitted","`","is","set",",","check","only","uncommitted","changes"] | diff | def get_branch_diff(self, uncommitted=False):
"""
Determine the first common ancestor commit.
Find diff between branch and FCA commit.
Note: if `uncommitted` is set, check only
uncommitted changes
"""
try:
commit = self.repo.merge_base(self.branch, self.head)[0]
except exc.GitCommandError:
print(f"Branch with name `{self.branch}` does not exist")
sys.exit(1)
exclude = [f":(exclude){i}" for i in EXCLUDE]
if uncommitted:
diff = self.repo.git.diff(
self.head, "--unified=0", "***.py", *exclude
)
else:
diff = self.repo.git.diff(
commit, self.head, "--unified=0", "***.py", *exclude
)
return diff
| ["def","get_branch_diff","(","self",",","uncommitted=False",")",":","``","''","''","Determine","the","first","common","ancestor","commit",".","Find","diff","between","branch","and","FCA","commit",".","Note",":","if","`","uncommitted","`","is","set",",","check","only","uncommitted","changes","``","''","''","try",":","commit","=","self.repo.merge_base","(","self.branch",",","self.head",")","[","0","]","except","exc.GitCommandError",":","print","(","f","''","Branch","with","name","`","{","self.branch","}","`","does","not","exist","''",")","sys.exit","(","1",")","exclude","=","[","f","''",":","(","exclude",")","{","i","}","''","for","i","in","EXCLUDE","]","if","uncommitted",":","diff","=","self.repo.git.diff","(","self.head",",","``","--","unified=0","''",",","``","*","*","*",".py","''",",","*","exclude",")","else",":","diff","=","self.repo.git.diff","(","commit",",","self.head",",","``","--","unified=0","''",",","``","*","*","*",".py","''",",","*","exclude",")","return","diff"] | 29 | 51 | null | linter.py | numpy/tools/linter.py | import os
import sys
import subprocess
from argparse import ArgumentParser
from git import Repo, exc | 10 | 1 | 5 | 0 | null | 0 | 4 | Use image node_id 2 for calling the DiffLinter obj's underlying member method code with example usage: obj.get_branch_diff(uncommitted) and returns: diff | 153 | node_id 2 | 214,143 |
run_pycodestyle | DiffLinter | null | true | self,diff | null | null | Original Author: Josh Wilson (@person142)
Source:
https://github.com/scipy/scipy/blob/main/tools/lint_diff.py
Run pycodestyle on the given diff. | ["Original","Author",":","Josh","Wilson","(","@","person142",")","Source",":","https",":","\/\/github.com\/scipy\/scipy\/blob\/main\/tools\/lint_diff.py","Run","pycodestyle","on","the","given","diff","."] | res, res | def run_pycodestyle(self, diff):
"""
Original Author: Josh Wilson (@person142)
Source:
https://github.com/scipy/scipy/blob/main/tools/lint_diff.py
Run pycodestyle on the given diff.
"""
res = subprocess.run(
["pycodestyle", "--diff", "--config", CONFIG],
input=diff,
stdout=subprocess.PIPE,
encoding="utf-8",
)
return res.returncode, res.stdout
| ["def","run_pycodestyle","(","self",",","diff",")",":","``","''","''","Original","Author",":","Josh","Wilson","(","@","person142",")","Source",":","https",":","\/\/github.com\/scipy\/scipy\/blob\/main\/tools\/lint_diff.py","Run","pycodestyle","on","the","given","diff.","``","''","''","res","=","subprocess.run","(","[","``","pycodestyle","''",",","``","--","diff","''",",","``","--","config","''",",","CONFIG","]",",","input=diff",",","stdout=subprocess.PIPE",",","encoding=","''","utf-8","''",",",")","return","res.returncode",",","res.stdout"] | 53 | 66 | null | linter.py | numpy/tools/linter.py | import os
import sys
import subprocess
from argparse import ArgumentParser
from git import Repo, exc | 10 | 1 | 5 | 0 | null | 0 | 4 | Use image node_id 3 for calling the DiffLinter obj's underlying member method code with example usage: obj.run_pycodestyle(diff) and returns: res, res | 151 | node_id 3 | 214,144 |
run_lint | DiffLinter | null | true | self,uncommitted | null | null | null | null | null | def run_lint(self, uncommitted):
diff = self.get_branch_diff(uncommitted)
retcode, errors = self.run_pycodestyle(diff)
errors and print(errors)
sys.exit(retcode)
| ["def","run_lint","(","self",",","uncommitted",")",":","diff","=","self.get_branch_diff","(","uncommitted",")","retcode",",","errors","=","self.run_pycodestyle","(","diff",")","errors","and","print","(","errors",")","sys.exit","(","retcode",")"] | 68 | 74 | null | linter.py | numpy/tools/linter.py | import os
import sys
import subprocess
from argparse import ArgumentParser
from git import Repo, exc | 10 | 1 | 5 | 0 | null | 0 | 4 | Use image node_id 4 for calling the DiffLinter obj's underlying member method code with example usage: obj.run_lint(uncommitted) without return types | 149 | node_id 4 | 214,145 |
create_dummy_module_and_function | global | null | false | null | null | null | null | module, module | def create_dummy_module_and_function():
module = types.ModuleType("dummy_module")
module.__spec__ = importlib.machinery.ModuleSpec(
"dummy_module", None, origin=os.path.abspath(__file__)
)
exec(module_code, module.__dict__)
sys.modules["dummy_module"] = module
# Need to override the original function since its __code__.co_filename is not a regular python file name,
# and the skipfiles rules use filename when checking SKIP_DIRS.
module.add = add
return module, module.add
| ["def","create_dummy_module_and_function","(",")",":","module","=","types.ModuleType","(","``","dummy_module","''",")","module.__spec__","=","importlib.machinery.ModuleSpec","(","``","dummy_module","''",",","None",",","origin=os.path.abspath","(","__file__",")",")","exec","(","module_code",",","module.__dict__",")","sys.modules","[","``","dummy_module","''","]","=","module","#","Need","to","override","the","original","function","since","its","__code__.co_filename","is","not","a","regular","python","file","name",",","#","and","the","skipfiles","rules","use","filename","when","checking","SKIP_DIRS",".","module.add","=","add","return","module",",","module.add"] | 40 | 50 | null | utils.py | pytorch/test/dynamo/utils.py | import importlib
import os
import sys
import types
import torch
import torch._dynamo | 10 | null | 6 | null | null | 4 | null | Use image node_id 4 for calling a global function with example usage: create_dummy_module_and_function() and returns: module, module | 133 | node_id 4 | 283,620 |
|
add | global | null | false | x | null | null | null | null | unknown | def add(x):
return x + 1
| ["def","add","(","x",")",":","return","x","+","1"] | 36 | 37 | null | utils.py | pytorch/test/dynamo/utils.py | import importlib
import os
import sys
import types
import torch
import torch._dynamo | 10 | null | 6 | null | null | 4 | null | Use image node_id 3 for calling a global function with example usage: add(x) and returns: unknown | 97 | node_id 3 | 283,619 |
outer_func | global | null | false | func | null | null | null | null | wrapped,torch, inner_func | def outer_func(func):
def wrapped(*args):
a = func(*args)
torch._dynamo.graph_break()
return torch.sin(a + 1), inner_func()
return wrapped
| ["def","outer_func","(","func",")",":","def","wrapped","(","*","args",")",":","a","=","func","(","*","args",")","torch._dynamo.graph_break","(",")","return","torch.sin","(","a","+","1",")",",","inner_func","(",")","return","wrapped"] | 20 | 26 | null | utils.py | pytorch/test/dynamo/utils.py | import importlib
import os
import sys
import types
import torch
import torch._dynamo | 10 | null | 6 | null | null | 4 | null | Use image node_id 2 for calling a global function with example usage: outer_func(func) and returns: wrapped, torch, inner_func | 127 | node_id 2 | 283,618 |
inner_func | global | null | false | null | null | null | null | torch | def inner_func():
return torch.is_grad_enabled()
| ["def","inner_func","(",")",":","return","torch.is_grad_enabled","(",")"] | 16 | 17 | null | utils.py | pytorch/test/dynamo/utils.py | import importlib
import os
import sys
import types
import torch
import torch._dynamo | 10 | null | 6 | null | null | 4 | null | Use image node_id 1 for calling a global function with example usage: inner_func() and returns: torch | 101 | node_id 1 | 283,617 |
|
get_dataset | global | null | false | dataset_config,tokenizer,csv_name | null | null | null | null | dataset | def get_dataset(dataset_config, tokenizer, csv_name=None):
"""cover function for handling loading the working dataset"""
"""dataset loading"""
if csv_name is None:
currPath = (
Path.cwd() / "datasets_grammar" / "grammar_train.csv"
)
print(f"Loading dataset {currPath}")
csv_name = str(currPath)
dataset = grammar(
tokenizer=tokenizer,
csv_name=csv_name,
)
return dataset
| ["def","get_dataset","(","dataset_config",",","tokenizer",",","csv_name=None",")",":","``","''","''","cover","function","for","handling","loading","the","working","dataset","''","''","''","``","''","''","dataset","loading","''","''","''","if","csv_name","is","None",":","currPath","=","(","Path.cwd","(",")","\/","``","datasets_grammar","''","\/","``","grammar_train.csv","''",")","print","(","f","''","Loading","dataset","{","currPath","}","''",")","csv_name","=","str","(","currPath",")","dataset","=","grammar","(","tokenizer=tokenizer",",","csv_name=csv_name",",",")","return","dataset"] | 66 | 80 | null | grammar_dataset.py | llama-recipes/src/llama_recipes/datasets/grammar_dataset/grammar_dataset.py | from datasets import load_dataset
from pathlib import Path
from torch.utils.data import Dataset | 10 | null | 3 | null | null | 1 | null | Use image node_id 1 for calling a global function with example usage: get_dataset(dataset_config, tokenizer, csv_name) and returns: dataset | 139 | node_id 1 | 166,928 |
test_global_autograd_exempt_predispatch | TestSafeguard | TestCase | true | self | null | null | null | null | b,b,b,b | def test_global_autograd_exempt_predispatch(self):
def f1(a):
with torch.no_grad():
b = a + a
return b
def f2(a):
with torch.enable_grad():
b = a + a
return b
def f3(a):
with torch.set_grad_enabled(False):
b = a + a
return b
def f4(a):
with torch.set_grad_enabled(True):
b = a + a
return b
a = torch.randn(10)
from torch.export._trace import _export
with torch.no_grad():
_export(f1, (a,), pre_dispatch=True)
_export(f2, (a,), pre_dispatch=True)
_export(f3, (a,), pre_dispatch=True)
_export(f4, (a,), pre_dispatch=True)
with torch.enable_grad():
_export(f1, (a,), pre_dispatch=True)
_export(f2, (a,), pre_dispatch=True)
_export(f3, (a,), pre_dispatch=True)
_export(f4, (a,), pre_dispatch=True)
| ["def","test_global_autograd_exempt_predispatch","(","self",")",":","def","f1","(","a",")",":","with","torch.no_grad","(",")",":","b","=","a","+","a","return","b","def","f2","(","a",")",":","with","torch.enable_grad","(",")",":","b","=","a","+","a","return","b","def","f3","(","a",")",":","with","torch.set_grad_enabled","(","False",")",":","b","=","a","+","a","return","b","def","f4","(","a",")",":","with","torch.set_grad_enabled","(","True",")",":","b","=","a","+","a","return","b","a","=","torch.randn","(","10",")","from","torch.export._trace","import","_export","with","torch.no_grad","(",")",":","_export","(","f1",",","(","a",",",")",",","pre_dispatch=True",")","_export","(","f2",",","(","a",",",")",",","pre_dispatch=True",")","_export","(","f3",",","(","a",",",")",",","pre_dispatch=True",")","_export","(","f4",",","(","a",",",")",",","pre_dispatch=True",")","with","torch.enable_grad","(",")",":","_export","(","f1",",","(","a",",",")",",","pre_dispatch=True",")","_export","(","f2",",","(","a",",",")",",","pre_dispatch=True",")","_export","(","f3",",","(","a",",",")",",","pre_dispatch=True",")","_export","(","f4",",","(","a",",",")",",","pre_dispatch=True",")"] | 106 | 141 | null | test_safeguard.py | pytorch/test/export/test_safeguard.py | import unittest
import torch
import torch._dynamo
from torch.export import export
from torch.testing._internal.common_utils import run_tests, TestCase | 10 | 1 | 5 | 1 | 1 | 0 | 3 | Use image node_id 3 for calling the TestSafeguard obj's underlying member method code with example usage: obj.test_global_autograd_exempt_predispatch() and returns: b, b, b, b | 175 | node_id 3 | 283,748 |
__getitem__ | grammar | Dataset | true | self,index | null | null | null | null | self | def __getitem__(self, index):
return self.convert_to_features(self.dataset["train"][int(index)])
| ["def","__getitem__","(","self",",","index",")",":","return","self.convert_to_features","(","self.dataset","[","``","train","''","]","[","int","(","index",")","]",")"] | 62 | 63 | null | grammar_dataset.py | llama-recipes/src/llama_recipes/datasets/grammar_dataset/grammar_dataset.py | from datasets import load_dataset
from pathlib import Path
from torch.utils.data import Dataset | 10 | 1 | 3 | 1 | 1 | 1 | 4 | Use image node_id 4 for calling the grammar obj's underlying member method code with example usage: obj.__getitem__(index) and returns: self | 140 | node_id 4 | 166,927 |
__len__ | grammar | Dataset | true | self | null | null | null | null | self | def __len__(self):
return self.dataset["train"].shape[0]
| ["def","__len__","(","self",")",":","return","self.dataset","[","``","train","''","]",".shape","[","0","]"] | 37 | 38 | null | grammar_dataset.py | llama-recipes/src/llama_recipes/datasets/grammar_dataset/grammar_dataset.py | from datasets import load_dataset
from pathlib import Path
from torch.utils.data import Dataset | 10 | 1 | 3 | 1 | 1 | 1 | 4 | Use image node_id 2 for calling the grammar obj's underlying member method code with example usage: obj.__len__() and returns: self | 131 | node_id 2 | 166,925 |
__init__ | grammar | Dataset | true | self,tokenizer,csv_name | null | null | null | null | grammar | def __init__(
self,
tokenizer,
csv_name=None,
):
try:
self.dataset = load_dataset(
"csv",
data_files={
"train": [csv_name]
}, # "eval": "grammar_validation.csv"},
delimiter=",",
)
except Exception as e:
print(
"Loading of grammar dataset failed! Please see recipes/ft_datasets/grammar_dataset/grammar_dataset_process.ipynb for details on how to download the dataset."
)
raise e
# self.dataset = load_dataset("wikihow", "all", data_dir="data/", split=type_path)
# if num_samples:
# self.dataset = self.dataset.select(list(range(0, num_samples)))
self.tokenizer = tokenizer
self.print_text = False
| ["def","__init__","(","self",",","tokenizer",",","csv_name=None",",",")",":","try",":","self.dataset","=","load_dataset","(","``","csv","''",",","data_files=","{","``","train","''",":","[","csv_name","]","}",",","#","``","eval","''",":","``","grammar_validation.csv","''","}",",","delimiter=","''",",","''",",",")","except","Exception","as","e",":","print","(","``","Loading","of","grammar","dataset","failed","!","Please","see","recipes\/ft_datasets\/grammar_dataset\/grammar_dataset_process.ipynb","for","details","on","how","to","download","the","dataset",".","''",")","raise","e","#","self.dataset","=","load_dataset","(","``","wikihow","''",",","``","all","''",",","data_dir=","''","data\/","''",",","split=type_path",")","#","if","num_samples",":","#","self.dataset","=","self.dataset.select","(","list","(","range","(","0",",","num_samples",")",")",")","self.tokenizer","=","tokenizer","self.print_text","=","False"] | 15 | 35 | null | grammar_dataset.py | llama-recipes/src/llama_recipes/datasets/grammar_dataset/grammar_dataset.py | from datasets import load_dataset
from pathlib import Path
from torch.utils.data import Dataset | 10 | 1 | 3 | 1 | 1 | 1 | 4 | Use image node_id 1 to create a new grammar object from inherited base classes: Dataset with example: obj = grammar(tokenizer, csv_name) | 136 | node_id 1 | 166,924 |
main | global | null | false | model_name,peft_model,quantization,max_new_tokens,prompt_file,seed,do_sample,min_length,use_cache,top_p,temperature,top_k,repetition_penalty,length_penalty,enable_azure_content_safety,enable_sensitive_topics,enable_salesforce_content_safety,enable_llamaguard_content_safety,max_padding_length,use_fast_kernels | null | null | null | null | null | def main(
model_name,
peft_model: str = None,
quantization: bool = False,
max_new_tokens=100, # The maximum numbers of tokens to generate
prompt_file: str = None,
seed: int = 42, # seed value for reproducibility
do_sample: bool = True, # Whether or not to use sampling ; use greedy decoding otherwise.
min_length: int = None, # The minimum length of the sequence to be generated, input prompt + min_new_tokens
use_cache: bool = True, # [optional] Whether or not the model should use the past last key/values attentions Whether or not the model should use the past last key/values attentions (if applicable to the model) to speed up decoding.
top_p: float = 1.0, # [optional] If set to float < 1, only the smallest set of most probable tokens with probabilities that add up to top_p or higher are kept for generation.
temperature: float = 1.0, # [optional] The value used to modulate the next token probabilities.
top_k: int = 50, # [optional] The number of highest probability vocabulary tokens to keep for top-k-filtering.
repetition_penalty: float = 1.0, # The parameter for repetition penalty. 1.0 means no penalty.
length_penalty: int = 1, # [optional] Exponential penalty to the length that is used with beam-based generation.
enable_azure_content_safety: bool = False, # Enable safety check with Azure content safety api
enable_sensitive_topics: bool = False, # Enable check for sensitive topics using AuditNLG APIs
enable_salesforce_content_safety: bool = True, # Enable safety check with Salesforce safety flan t5
enable_llamaguard_content_safety: bool = False,
max_padding_length: int = None, # the max padding length to be used with tokenizer padding the prompts.
use_fast_kernels: bool = False, # Enable using SDPA from PyTroch Accelerated Transformers, make use Flash Attention and Xformer memory-efficient kernels
**kwargs,
):
if prompt_file is not None:
assert os.path.exists(
prompt_file
), f"Provided Prompt file does not exist {prompt_file}"
with open(prompt_file, "r") as f:
user_prompt = "\n".join(f.readlines())
elif not sys.stdin.isatty():
user_prompt = "\n".join(sys.stdin.readlines())
else:
print("No user prompt provided. Exiting.")
sys.exit(1)
safety_checker = get_safety_checker(
enable_azure_content_safety,
enable_sensitive_topics,
enable_salesforce_content_safety,
enable_llamaguard_content_safety,
)
# Safety check of the user prompt
safety_results = [check(user_prompt) for check in safety_checker]
are_safe = all([r[1] for r in safety_results])
if are_safe:
print("User prompt deemed safe.")
print(f"User prompt:\n{user_prompt}")
else:
print("User prompt deemed unsafe.")
for method, is_safe, report in safety_results:
if not is_safe:
print(method)
print(report)
print("Skipping the inference as the prompt is not safe.")
sys.exit(1) # Exit the program with an error status
# Set the seeds for reproducibility
if is_xpu_available():
torch.xpu.manual_seed(seed)
else:
torch.cuda.manual_seed(seed)
torch.manual_seed(seed)
model = load_model(model_name, quantization)
if peft_model:
model = load_peft_model(model, peft_model)
model.eval()
if use_fast_kernels:
"""
Setting 'use_fast_kernels' will enable
using of Flash Attention or Xformer memory-efficient kernels
based on the hardware being used. This would speed up inference when used for batched inputs.
"""
try:
from optimum.bettertransformer import BetterTransformer
model = BetterTransformer.transform(model)
except ImportError:
print(
"Module 'optimum' not found. Please install 'optimum' it before proceeding."
)
tokenizer = LlamaTokenizer.from_pretrained(model_name)
tokenizer.pad_token = tokenizer.eos_token
batch = tokenizer(
user_prompt,
padding="max_length",
truncation=True,
max_length=max_padding_length,
return_tensors="pt",
)
if is_xpu_available():
batch = {k: v.to("xpu") for k, v in batch.items()}
else:
batch = {k: v.to("cuda") for k, v in batch.items()}
start = time.perf_counter()
with torch.no_grad():
outputs = model.generate(
**batch,
max_new_tokens=max_new_tokens,
do_sample=do_sample,
top_p=top_p,
temperature=temperature,
min_length=min_length,
use_cache=use_cache,
top_k=top_k,
repetition_penalty=repetition_penalty,
length_penalty=length_penalty,
**kwargs,
)
e2e_inference_time = (time.perf_counter() - start) * 1000
print(f"the inference time is {e2e_inference_time} ms")
output_text = tokenizer.decode(
outputs[0], skip_special_tokens=True
)
# Safety check of the model output
safety_results = [
check(
output_text,
agent_type=AgentType.AGENT,
user_prompt=user_prompt,
)
for check in safety_checker
]
are_safe = all([r[1] for r in safety_results])
if are_safe:
print("User input and model output deemed safe.")
print(f"Model output:\n{output_text}")
else:
print("Model output deemed unsafe.")
for method, is_safe, report in safety_results:
if not is_safe:
print(method)
print(report)
| ["def","main","(","model_name",",","peft_model",":","str","=","None",",","quantization",":","bool","=","False",",","max_new_tokens=100",",","#","The","maximum","numbers","of","tokens","to","generate","prompt_file",":","str","=","None",",","seed",":","int","=","42",",","#","seed","value","for","reproducibility","do_sample",":","bool","=","True",",","#","Whether","or","not","to","use","sampling",";","use","greedy","decoding","otherwise",".","min_length",":","int","=","None",",","#","The","minimum","length","of","the","sequence","to","be","generated",",","input","prompt","+","min_new_tokens","use_cache",":","bool","=","True",",","#","[","optional","]","Whether","or","not","the","model","should","use","the","past","last","key\/values","attentions","Whether","or","not","the","model","should","use","the","past","last","key\/values","attentions","(","if","applicable","to","the","model",")","to","speed","up","decoding",".","top_p",":","float","=","1.0",",","#","[","optional","]","If","set","to","float","<","1",",","only","the","smallest","set","of","most","probable","tokens","with","probabilities","that","add","up","to","top_p","or","higher","are","kept","for","generation",".","temperature",":","float","=","1.0",",","#","[","optional","]","The","value","used","to","modulate","the","next","token","probabilities",".","top_k",":","int","=","50",",","#","[","optional","]","The","number","of","highest","probability","vocabulary","tokens","to","keep","for","top-k-filtering",".","repetition_penalty",":","float","=","1.0",",","#","The","parameter","for","repetition","penalty",".","1.0","means","no","penalty",".","length_penalty",":","int","=","1",",","#","[","optional","]","Exponential","penalty","to","the","length","that","is","used","with","beam-based","generation",".","enable_azure_content_safety",":","bool","=","False",",","#","Enable","safety","check","with","Azure","content","safety","api","enable_sensitive_topics",":","bool","=","False",",","#","Enable","check","for","sensitive","topics","using","AuditNLG","APIs","enable_salesforce_content_safety",":","bool","=","True",",","#","Enable","safety","check","with","Salesforce","safety","flan","t5","enable_llamaguard_content_safety",":","bool","=","False",",","max_padding_length",":","int","=","None",",","#","the","max","padding","length","to","be","used","with","tokenizer","padding","the","prompts",".","use_fast_kernels",":","bool","=","False",",","#","Enable","using","SDPA","from","PyTroch","Accelerated","Transformers",",","make","use","Flash","Attention","and","Xformer","memory-efficient","kernels","*","*","kwargs",",",")",":","if","prompt_file","is","not","None",":","assert","os.path.exists","(","prompt_file",")",",","f","''","Provided","Prompt","file","does","not","exist","{","prompt_file","}","''","with","open","(","prompt_file",",","``","r","''",")","as","f",":","user_prompt","=","``","\\n","''",".join","(","f.readlines","(",")",")","elif","not","sys.stdin.isatty","(",")",":","user_prompt","=","``","\\n","''",".join","(","sys.stdin.readlines","(",")",")","else",":","print","(","``","No","user","prompt","provided",".","Exiting",".","''",")","sys.exit","(","1",")","safety_checker","=","get_safety_checker","(","enable_azure_content_safety",",","enable_sensitive_topics",",","enable_salesforce_content_safety",",","enable_llamaguard_content_safety",",",")","#","Safety","check","of","the","user","prompt","safety_results","=","[","check","(","user_prompt",")","for","check","in","safety_checker","]","are_safe","=","all","(","[","r","[","1","]","for","r","in","safety_results","]",")","if","are_safe",":","print","(","``","User","prompt","deemed","safe",".","''",")","print","(","f","''","User","prompt",":","\\n","{","user_prompt","}","''",")","else",":","print","(","``","User","prompt","deemed","unsafe",".","''",")","for","method",",","is_safe",",","report","in","safety_results",":","if","not","is_safe",":","print","(","method",")","print","(","report",")","print","(","``","Skipping","the","inference","as","the","prompt","is","not","safe",".","''",")","sys.exit","(","1",")","#","Exit","the","program","with","an","error","status","#","Set","the","seeds","for","reproducibility","if","is_xpu_available","(",")",":","torch.xpu.manual_seed","(","seed",")","else",":","torch.cuda.manual_seed","(","seed",")","torch.manual_seed","(","seed",")","model","=","load_model","(","model_name",",","quantization",")","if","peft_model",":","model","=","load_peft_model","(","model",",","peft_model",")","model.eval","(",")","if","use_fast_kernels",":","``","''","''","Setting","'use_fast_kernels","'","will","enable","using","of","Flash","Attention","or","Xformer","memory-efficient","kernels","based","on","the","hardware","being","used",".","This","would","speed","up","inference","when","used","for","batched","inputs.","``","''","''","try",":","from","optimum.bettertransformer","import","BetterTransformer","model","=","BetterTransformer.transform","(","model",")","except","ImportError",":","print","(","``","Module","'optimum","'","not","found",".","Please","install","'optimum","'","it","before","proceeding",".","''",")","tokenizer","=","LlamaTokenizer.from_pretrained","(","model_name",")","tokenizer.pad_token","=","tokenizer.eos_token","batch","=","tokenizer","(","user_prompt",",","padding=","''","max_length","''",",","truncation=True",",","max_length=max_padding_length",",","return_tensors=","''","pt","''",",",")","if","is_xpu_available","(",")",":","batch","=","{","k",":","v.to","(","``","xpu","''",")","for","k",",","v","in","batch.items","(",")","}","else",":","batch","=","{","k",":","v.to","(","``","cuda","''",")","for","k",",","v","in","batch.items","(",")","}","start","=","time.perf_counter","(",")","with","torch.no_grad","(",")",":","outputs","=","model.generate","(","*","*","batch",",","max_new_tokens=max_new_tokens",",","do_sample=do_sample",",","top_p=top_p",",","temperature=temperature",",","min_length=min_length",",","use_cache=use_cache",",","top_k=top_k",",","repetition_penalty=repetition_penalty",",","length_penalty=length_penalty",",","*","*","kwargs",",",")","e2e_inference_time","=","(","time.perf_counter","(",")","-","start",")","*","1000","print","(","f","''","the","inference","time","is","{","e2e_inference_time","}","ms","''",")","output_text","=","tokenizer.decode","(","outputs","[","0","]",",","skip_special_tokens=True",")","#","Safety","check","of","the","model","output","safety_results","=","[","check","(","output_text",",","agent_type=AgentType.AGENT",",","user_prompt=user_prompt",",",")","for","check","in","safety_checker","]","are_safe","=","all","(","[","r","[","1","]","for","r","in","safety_results","]",")","if","are_safe",":","print","(","``","User","input","and","model","output","deemed","safe",".","''",")","print","(","f","''","Model","output",":","\\n","{","output_text","}","''",")","else",":","print","(","``","Model","output","deemed","unsafe",".","''",")","for","method",",","is_safe",",","report","in","safety_results",":","if","not","is_safe",":","print","(","method",")","print","(","report",")"] | 19 | 139 | null | inference.py | llama-recipes/examples/inference.py | import fire
import os
import sys
import time
import torch
from transformers import LlamaTokenizer
from llama_recipes.inference.safety_utils import get_safety_checker, AgentType
from llama_recipes.inference.model_utils import load_model, load_peft_model
from accelerate.utils import is_xpu_available | 10 | null | 9 | null | null | 1 | null | Use image node_id 1 for calling a global function with example usage: main(model_name, peft_model, quantization, max_new_tokens, prompt_file, seed, do_sample, min_length, use_cache, top_p, temperature, top_k, repetition_penalty, length_penalty, enable_azure_content_safety, enable_sensitive_topics, enable_salesforce_content_safety, enable_llamaguard_content_safety, max_padding_length, use_fast_kernels) without return types | 425 | node_id 1 | 166,891 |
RunAndVerify | GTestListTestsUnitTest | gtest_test_utils | true | self,flag_value,expected_output_re,other_flag | Tests using the --gtest_list_tests flag to list all tests. | ["Tests","using","the","--","gtest_list_tests","flag","to","list","all","tests","."] | Run googletest-list-tests-unittest_ and verify the output.
Runs googletest-list-tests-unittest_ and verifies that it prints
the correct tests.
Args:
flag_value: value of the --gtest_list_tests flag; None if the flag
should not be present.
expected_output_re: regular expression that matches the expected output
after running command;
other_flag: a different flag to be passed to command along with
gtest_list_tests; None if the flag should not be present. | ["Run","googletest-list-tests-unittest_","and","verify","the","output",".","Runs","googletest-list-tests-unittest_","and","verifies","that","it","prints","the","correct","tests",".","Args",":","flag_value",":","value","of","the","--","gtest_list_tests","flag",";","None","if","the","flag","should","not","be","present",".","expected_output_re",":","regular","expression","that","matches","the","expected","output","after","running","command",";","other_flag",":","a","different","flag","to","be","passed","to","command","along","with","gtest_list_tests",";","None","if","the","flag","should","not","be","present","."] | null | def RunAndVerify(self, flag_value, expected_output_re, other_flag):
"""Run googletest-list-tests-unittest_ and verify the output.
Runs googletest-list-tests-unittest_ and verifies that it prints
the correct tests.
Args:
flag_value: value of the --gtest_list_tests flag; None if the flag
should not be present.
expected_output_re: regular expression that matches the expected output
after running command;
other_flag: a different flag to be passed to command along with
gtest_list_tests; None if the flag should not be present.
"""
if flag_value is None:
flag = ""
flag_expression = "not set"
elif flag_value == "0":
flag = "--%s=0" % LIST_TESTS_FLAG
flag_expression = "0"
else:
flag = "--%s" % LIST_TESTS_FLAG
flag_expression = "1"
args = [flag]
if other_flag is not None:
args += [other_flag]
output = Run(args)
if expected_output_re:
self.assertTrue(
expected_output_re.match(output),
'when %s is %s, the output of "%s" is "%s",\n'
'which does not match regex "%s"'
% (
LIST_TESTS_FLAG,
flag_expression,
" ".join(args),
output,
expected_output_re.pattern,
),
)
else:
self.assertTrue(
not EXPECTED_OUTPUT_NO_FILTER_RE.match(output),
'when %s is %s, the output of "%s" is "%s"'
% (
LIST_TESTS_FLAG,
flag_expression,
" ".join(args),
output,
),
)
| ["def","RunAndVerify","(","self",",","flag_value",",","expected_output_re",",","other_flag",")",":","``","''","''","Run","googletest-list-tests-unittest_","and","verify","the","output",".","Runs","googletest-list-tests-unittest_","and","verifies","that","it","prints","the","correct","tests",".","Args",":","flag_value",":","value","of","the","--","gtest_list_tests","flag",";","None","if","the","flag","should","not","be","present",".","expected_output_re",":","regular","expression","that","matches","the","expected","output","after","running","command",";","other_flag",":","a","different","flag","to","be","passed","to","command","along","with","gtest_list_tests",";","None","if","the","flag","should","not","be","present.","``","''","''","if","flag_value","is","None",":","flag","=","``","''","flag_expression","=","``","not","set","''","elif","flag_value","==","``","0","''",":","flag","=","``","--","%","s=0","''","%","LIST_TESTS_FLAG","flag_expression","=","``","0","''","else",":","flag","=","``","--","%","s","''","%","LIST_TESTS_FLAG","flag_expression","=","``","1","''","args","=","[","flag","]","if","other_flag","is","not","None",":","args","+=","[","other_flag","]","output","=","Run","(","args",")","if","expected_output_re",":","self.assertTrue","(","expected_output_re.match","(","output",")",",","'when","%","s","is","%","s",",","the","output","of","``","%","s","''","is","``","%","s","''",",","\\n'","'which","does","not","match","regex","``","%","s","''","'","%","(","LIST_TESTS_FLAG",",","flag_expression",",","``","``",".join","(","args",")",",","output",",","expected_output_re.pattern",",",")",",",")","else",":","self.assertTrue","(","not","EXPECTED_OUTPUT_NO_FILTER_RE.match","(","output",")",",","'when","%","s","is","%","s",",","the","output","of","``","%","s","''","is","``","%","s","''","'","%","(","LIST_TESTS_FLAG",",","flag_expression",",","``","``",".join","(","args",")",",","output",",",")",",",")"] | 134 | 184 | null | googletest-list-tests-unittest.py | open-asset-importer/contrib/googletest/googletest/test/googletest-list-tests-unittest.py | import re
from googletest.test import gtest_test_utils | 10 | 1 | 2 | 1 | 1 | 1 | 5 | Use image node_id 1 for calling the GTestListTestsUnitTest obj's underlying member method code with example usage: obj.RunAndVerify(flag_value, expected_output_re, other_flag) without return types | 196 | node_id 1 | 214,593 |
testDefaultBehavior | GTestListTestsUnitTest | gtest_test_utils | true | self | Tests using the --gtest_list_tests flag to list all tests. | ["Tests","using","the","--","gtest_list_tests","flag","to","list","all","tests","."] | Tests the behavior of the default mode. | ["Tests","the","behavior","of","the","default","mode","."] | null | def testDefaultBehavior(self):
"""Tests the behavior of the default mode."""
self.RunAndVerify(
flag_value=None, expected_output_re=None, other_flag=None
)
| ["def","testDefaultBehavior","(","self",")",":","``","''","''","Tests","the","behavior","of","the","default","mode",".","''","''","''","self.RunAndVerify","(","flag_value=None",",","expected_output_re=None",",","other_flag=None",")"] | 186 | 189 | null | googletest-list-tests-unittest.py | open-asset-importer/contrib/googletest/googletest/test/googletest-list-tests-unittest.py | import re
from googletest.test import gtest_test_utils | 10 | 1 | 2 | 1 | 1 | 1 | 5 | Use image node_id 2 for calling the GTestListTestsUnitTest obj's underlying member method code with example usage: obj.testDefaultBehavior() without return types | 161 | node_id 2 | 214,594 |
testFlag | GTestListTestsUnitTest | gtest_test_utils | true | self | Tests using the --gtest_list_tests flag to list all tests. | ["Tests","using","the","--","gtest_list_tests","flag","to","list","all","tests","."] | Tests using the --gtest_list_tests flag. | ["Tests","using","the","--","gtest_list_tests","flag","."] | null | def testFlag(self):
"""Tests using the --gtest_list_tests flag."""
self.RunAndVerify(
flag_value="0", expected_output_re=None, other_flag=None
)
self.RunAndVerify(
flag_value="1",
expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
other_flag=None,
)
| ["def","testFlag","(","self",")",":","``","''","''","Tests","using","the","--","gtest_list_tests","flag",".","''","''","''","self.RunAndVerify","(","flag_value=","''","0","''",",","expected_output_re=None",",","other_flag=None",")","self.RunAndVerify","(","flag_value=","''","1","''",",","expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE",",","other_flag=None",",",")"] | 191 | 199 | null | googletest-list-tests-unittest.py | open-asset-importer/contrib/googletest/googletest/test/googletest-list-tests-unittest.py | import re
from googletest.test import gtest_test_utils | 10 | 1 | 2 | 1 | 1 | 1 | 5 | Use image node_id 3 for calling the GTestListTestsUnitTest obj's underlying member method code with example usage: obj.testFlag() without return types | 150 | node_id 3 | 214,595 |
test_none_skip | global | null | false | setup_rpc | null | null | null | null | input,input | def test_none_skip(setup_rpc):
@skippable(stash=["none"])
class Stash(nn.Module):
def forward(self, input):
yield stash("none", None)
return input # noqa: B901
@skippable(pop=["none"])
class Pop(nn.Module):
def forward(self, input):
none = yield pop("none")
assert none is None
return input
model = nn.Sequential(Stash(), Pop())
model = Pipe(model, chunks=5)
input = torch.rand(10, requires_grad=True)
output = model(input)
def assert_grad_fn_is_not_portal(grad_fn, visited=None):
if visited is None:
visited = set()
if grad_fn in visited or grad_fn is None:
return
assert not isinstance(grad_fn, PortalBlue._backward_cls)
assert not isinstance(grad_fn, PortalCopy._backward_cls)
assert not isinstance(grad_fn, PortalOrange._backward_cls)
visited.add(grad_fn)
for next_grad_fn, _ in grad_fn.next_functions:
assert_grad_fn_is_not_portal(next_grad_fn, visited)
assert_grad_fn_is_not_portal(output.local_value().grad_fn)
output.local_value().sum().backward()
assert input.grad.mean().item() == 1
| ["def","test_none_skip","(","setup_rpc",")",":","@","skippable","(","stash=","[","``","none","''","]",")","class","Stash","(","nn.Module",")",":","def","forward","(","self",",","input",")",":","yield","stash","(","``","none","''",",","None",")","return","input","#","noqa",":","B901","@","skippable","(","pop=","[","``","none","''","]",")","class","Pop","(","nn.Module",")",":","def","forward","(","self",",","input",")",":","none","=","yield","pop","(","``","none","''",")","assert","none","is","None","return","input","model","=","nn.Sequential","(","Stash","(",")",",","Pop","(",")",")","model","=","Pipe","(","model",",","chunks=5",")","input","=","torch.rand","(","10",",","requires_grad=True",")","output","=","model","(","input",")","def","assert_grad_fn_is_not_portal","(","grad_fn",",","visited=None",")",":","if","visited","is","None",":","visited","=","set","(",")","if","grad_fn","in","visited","or","grad_fn","is","None",":","return","assert","not","isinstance","(","grad_fn",",","PortalBlue._backward_cls",")","assert","not","isinstance","(","grad_fn",",","PortalCopy._backward_cls",")","assert","not","isinstance","(","grad_fn",",","PortalOrange._backward_cls",")","visited.add","(","grad_fn",")","for","next_grad_fn",",","_","in","grad_fn.next_functions",":","assert_grad_fn_is_not_portal","(","next_grad_fn",",","visited",")","assert_grad_fn_is_not_portal","(","output.local_value","(",")",".grad_fn",")","output.local_value","(",")",".sum","(",")",".backward","(",")","assert","input.grad.mean","(",")",".item","(",")","==","1"] | 74 | 111 | null | test_gpipe.py | pytorch/test/distributed/pipeline/sync/skip/test_gpipe.py | import pytest
import torch
from torch import nn
from torch.distributed.pipeline.sync import Pipe
from torch.distributed.pipeline.sync.skip import pop, skippable, stash
from torch.distributed.pipeline.sync.skip.portal import PortalBlue, PortalCopy, PortalOrange
from torch.distributed.pipeline.sync.utils import partition_model
from torch.testing._internal.common_utils import run_tests | 10 | null | 8 | null | null | 2 | null | Use image node_id 2 for calling a global function with example usage: test_none_skip(setup_rpc) and returns: input, input | 121 | node_id 2 | 279,979 |
test_1to3 | global | null | false | balance,checkpoint,setup_rpc | null | null | null | null | output,output,output | def test_1to3(balance, checkpoint, setup_rpc):
if torch.cuda.device_count() < len(balance):
pytest.skip(
"at least %d cuda devices required" % len(balance)
)
@skippable(stash=["1to3"])
class Layer1(nn.Module):
def __init__(self):
super().__init__()
self.conv = nn.Conv2d(3, 3, 1)
def forward(self, input):
yield stash("1to3", input)
output = self.conv(input)
return output # noqa: B901
class Layer2(nn.Module):
def __init__(self):
super().__init__()
self.conv = nn.Conv2d(3, 3, 1)
def forward(self, input):
output = self.conv(input)
return output
@skippable(pop=["1to3"])
class Layer3(nn.Module):
def __init__(self):
super().__init__()
self.conv = nn.Conv2d(3, 3, 1)
def forward(self, input):
skip_1to3 = yield pop("1to3")
output = self.conv(input) + skip_1to3
return output
model = nn.Sequential(Layer1(), Layer2(), Layer3())
model = partition_model(model, balance)
model = Pipe(model, chunks=3, checkpoint=checkpoint)
in_device = model.devices[0]
out_device = model.devices[-1]
input = torch.rand(
30, 3, 224, 224, device=in_device, requires_grad=True
)
output = model(input)
loss = output.local_value().mean()
loss.backward()
assert torch.allclose(
output.local_value().norm(),
torch.tensor(1039.0, device=out_device),
atol=6e-1,
)
assert torch.allclose(
input.grad.norm(),
torch.tensor(0.0004533053, device=in_device),
)
| ["def","test_1to3","(","balance",",","checkpoint",",","setup_rpc",")",":","if","torch.cuda.device_count","(",")","<","len","(","balance",")",":","pytest.skip","(","``","at","least","%","d","cuda","devices","required","''","%","len","(","balance",")",")","@","skippable","(","stash=","[","``","1to3","''","]",")","class","Layer1","(","nn.Module",")",":","def","__init__","(","self",")",":","super","(",")",".__init__","(",")","self.conv","=","nn.Conv2d","(","3",",","3",",","1",")","def","forward","(","self",",","input",")",":","yield","stash","(","``","1to3","''",",","input",")","output","=","self.conv","(","input",")","return","output","#","noqa",":","B901","class","Layer2","(","nn.Module",")",":","def","__init__","(","self",")",":","super","(",")",".__init__","(",")","self.conv","=","nn.Conv2d","(","3",",","3",",","1",")","def","forward","(","self",",","input",")",":","output","=","self.conv","(","input",")","return","output","@","skippable","(","pop=","[","``","1to3","''","]",")","class","Layer3","(","nn.Module",")",":","def","__init__","(","self",")",":","super","(",")",".__init__","(",")","self.conv","=","nn.Conv2d","(","3",",","3",",","1",")","def","forward","(","self",",","input",")",":","skip_1to3","=","yield","pop","(","``","1to3","''",")","output","=","self.conv","(","input",")","+","skip_1to3","return","output","model","=","nn.Sequential","(","Layer1","(",")",",","Layer2","(",")",",","Layer3","(",")",")","model","=","partition_model","(","model",",","balance",")","model","=","Pipe","(","model",",","chunks=3",",","checkpoint=checkpoint",")","in_device","=","model.devices","[","0","]","out_device","=","model.devices","[","-1","]","input","=","torch.rand","(","30",",","3",",","224",",","224",",","device=in_device",",","requires_grad=True",")","output","=","model","(","input",")","loss","=","output.local_value","(",")",".mean","(",")","loss.backward","(",")","assert","torch.allclose","(","output.local_value","(",")",".norm","(",")",",","torch.tensor","(","1039.0",",","device=out_device",")",",","atol=6e-1",",",")","assert","torch.allclose","(","input.grad.norm","(",")",",","torch.tensor","(","0.0004533053",",","device=in_device",")",",",")"] | 23 | 71 | null | test_gpipe.py | pytorch/test/distributed/pipeline/sync/skip/test_gpipe.py | import pytest
import torch
from torch import nn
from torch.distributed.pipeline.sync import Pipe
from torch.distributed.pipeline.sync.skip import pop, skippable, stash
from torch.distributed.pipeline.sync.skip.portal import PortalBlue, PortalCopy, PortalOrange
from torch.distributed.pipeline.sync.utils import partition_model
from torch.testing._internal.common_utils import run_tests | 10 | null | 8 | null | null | 2 | null | Use image node_id 1 for calling a global function with example usage: test_1to3(balance, checkpoint, setup_rpc) and returns: output, output, output | 147 | node_id 1 | 279,978 |
view | global | null | false | filepath,quiet | null | null | null | null | null | def view(
filepath: typing.Union[os.PathLike, str], quiet: bool = False
) -> None:
"""Open filepath with its default viewing application (platform-specific).
Args:
filepath: Path to the file to open in viewer.
quiet: Suppress ``stderr`` output
from the viewer process (ineffective on Windows).
Raises:
RuntimeError: If the current platform is not supported.
Note:
There is no option to wait for the application to close,
and no way to retrieve the application's exit status.
"""
try:
view_func = getattr(view, PLATFORM)
except AttributeError:
raise RuntimeError(f"platform {PLATFORM!r} not supported")
view_func(filepath, quiet=quiet)
| ["def","view","(","filepath",":","typing.Union","[","os.PathLike",",","str","]",",","quiet",":","bool","=","False",")","-",">","None",":","``","''","''","Open","filepath","with","its","default","viewing","application","(","platform-specific",")",".","Args",":","filepath",":","Path","to","the","file","to","open","in","viewer",".","quiet",":","Suppress","``","stderr","``","output","from","the","viewer","process","(","ineffective","on","Windows",")",".","Raises",":","RuntimeError",":","If","the","current","platform","is","not","supported",".","Note",":","There","is","no","option","to","wait","for","the","application","to","close",",","and","no","way","to","retrieve","the","application","'s","exit","status.","``","''","''","try",":","view_func","=","getattr","(","view",",","PLATFORM",")","except","AttributeError",":","raise","RuntimeError","(","f","''","platform","{","PLATFORM","!","r","}","not","supported","''",")","view_func","(","filepath",",","quiet=quiet",")"] | 20 | 40 | null | viewing.py | graphviz/graphviz/backend/viewing.py | import logging
import os
import platform
import subprocess
import typing
from ..None import _tools | 10 | null | 6 | null | null | 4 | null | Use image node_id 1 for calling a global function with example usage: view(filepath, quiet) without return types | 112 | node_id 1 | 147,160 |
view_darwin | global | null | false | filepath | null | null | null | null | null | def view_darwin(
filepath: typing.Union[os.PathLike, str], *, quiet: bool
) -> None:
"""Open filepath with its default application (mac)."""
cmd = ["open", filepath]
log.debug("view: %r", cmd)
kwargs = {"stderr": subprocess.DEVNULL} if quiet else {}
subprocess.Popen(cmd, **kwargs)
| ["def","view_darwin","(","filepath",":","typing.Union","[","os.PathLike",",","str","]",",","*",",","quiet",":","bool",")","-",">","None",":","``","''","''","Open","filepath","with","its","default","application","(","mac",")",".","''","''","''","cmd","=","[","``","open","''",",","filepath","]","log.debug","(","``","view",":","%","r","''",",","cmd",")","kwargs","=","{","``","stderr","''",":","subprocess.DEVNULL","}","if","quiet","else","{","}","subprocess.Popen","(","cmd",",","*","*","kwargs",")"] | 44 | 50 | null | viewing.py | graphviz/graphviz/backend/viewing.py | import logging
import os
import platform
import subprocess
import typing
from ..None import _tools | 10 | null | 6 | null | null | 4 | null | Use image node_id 2 for calling a global function with example usage: view_darwin(filepath) without return types | 112 | node_id 2 | 147,161 |
convert_to_features | grammar | Dataset | true | self,example_batch | null | null | null | null | sample | def convert_to_features(self, example_batch):
# Create prompt and tokenize contexts and questions
if self.print_text:
print("Input Text: ", self.clean_text(example_batch["text"]))
input_ = example_batch["input"]
target_ = example_batch["target"]
prompt = f"Correct this to standard English: {input_}\n---\nCorrected: "
prompt_ids = self.tokenizer.encode(
self.tokenizer.bos_token + prompt, add_special_tokens=False
)
label_ids = self.tokenizer.encode(
target_ + self.tokenizer.eos_token, add_special_tokens=False
)
sample = {
"input_ids": prompt_ids + label_ids,
"attention_mask": [1] * len(prompt_ids + label_ids),
"labels": [-100] * len(prompt_ids) + label_ids,
}
return sample
| ["def","convert_to_features","(","self",",","example_batch",")",":","#","Create","prompt","and","tokenize","contexts","and","questions","if","self.print_text",":","print","(","``","Input","Text",":","``",",","self.clean_text","(","example_batch","[","``","text","''","]",")",")","input_","=","example_batch","[","``","input","''","]","target_","=","example_batch","[","``","target","''","]","prompt","=","f","''","Correct","this","to","standard","English",":","{","input_","}","\\n","--","-\\nCorrected",":","``","prompt_ids","=","self.tokenizer.encode","(","self.tokenizer.bos_token","+","prompt",",","add_special_tokens=False",")","label_ids","=","self.tokenizer.encode","(","target_","+","self.tokenizer.eos_token",",","add_special_tokens=False",")","sample","=","{","``","input_ids","''",":","prompt_ids","+","label_ids",",","``","attention_mask","''",":","[","1","]","*","len","(","prompt_ids","+","label_ids",")",",","``","labels","''",":","[","-100","]","*","len","(","prompt_ids",")","+","label_ids",",","}","return","sample"] | 40 | 60 | null | grammar_dataset.py | llama-recipes/src/llama_recipes/datasets/grammar_dataset/grammar_dataset.py | from datasets import load_dataset
from pathlib import Path
from torch.utils.data import Dataset | 10 | 1 | 3 | 1 | 1 | 1 | 4 | Use image node_id 3 for calling the grammar obj's underlying member method code with example usage: obj.convert_to_features(example_batch) and returns: sample | 158 | node_id 3 | 166,926 |
__init__ | DiffLinter | null | true | self,branch | null | null | null | null | DiffLinter | def __init__(self, branch):
self.branch = branch
self.repo = Repo(".")
self.head = self.repo.head.commit
| ["def","__init__","(","self",",","branch",")",":","self.branch","=","branch","self.repo","=","Repo","(","``",".","''",")","self.head","=","self.repo.head.commit"] | 24 | 27 | null | linter.py | numpy/tools/linter.py | import os
import sys
import subprocess
from argparse import ArgumentParser
from git import Repo, exc | 10 | 1 | 5 | 0 | null | 0 | 4 | Use image node_id 1 to create a new DiffLinter object with example: obj = DiffLinter(branch) | 93 | node_id 1 | 214,142 |
func3 | global | null | false | a,b | null | null | null | null | int | def func3(a: str, b: int) -> int:
return 0
| ["def","func3","(","a",":","str",",","b",":","int",")","-",">","int",":","return","0"] | 14 | 15 | null | ufunc_config.py | numpy/numpy/typing/tests/data/pass/ufunc_config.py | import numpy | 10 | null | 1 | null | null | 3 | null | Use image node_id 3 for calling a global function with example usage: func3(a, b) and returns: int | 98 | node_id 3 | 214,082 |
Run | global | null | false | args | null | null | null | null | gtest_test_utils | def Run(args):
"""Runs googletest-list-tests-unittest_ and returns the list of tests printed."""
return gtest_test_utils.Subprocess(
[EXE_PATH] + args, capture_stderr=False
).output
| ["def","Run","(","args",")",":","``","''","''","Runs","googletest-list-tests-unittest_","and","returns","the","list","of","tests","printed",".","''","''","''","return","gtest_test_utils.Subprocess","(","[","EXE_PATH","]","+","args",",","capture_stderr=False",")",".output"] | 120 | 125 | null | googletest-list-tests-unittest.py | googletest/googletest/test/googletest-list-tests-unittest.py | import re
from googletest.test import gtest_test_utils | 10 | null | 2 | null | null | 1 | null | Use image node_id 1 for calling a global function with example usage: Run(args) and returns: gtest_test_utils | 109 | node_id 1 | 146,885 |
test_do_bench | TestBench | TestCase | true | self | null | null | null | null | null | def test_do_bench(self):
res = do_bench(self._bench_fn)
log.warning("do_bench result: %s", res)
self.assertGreater(res, 0)
| ["def","test_do_bench","(","self",")",":","res","=","do_bench","(","self._bench_fn",")","log.warning","(","``","do_bench","result",":","%","s","''",",","res",")","self.assertGreater","(","res",",","0",")"] | 24 | 27 | null | test_inductor_utils.py | pytorch/test/inductor/test_inductor_utils.py | import functools
import logging
import torch
from torch._dynamo.test_case import run_tests, TestCase
from torch._inductor.utils import do_bench, do_bench_using_profiling | 10 | 1 | 5 | 1 | 1 | 0 | 3 | Use image node_id 2 for calling the TestBench obj's underlying member method code with example usage: obj.test_do_bench() without return types | 142 | node_id 2 | 285,713 |
setUpClass | TestBench | TestCase | true | cls | null | null | null | null | null | def setUpClass(cls):
super().setUpClass()
x = torch.rand(1024, 10).cuda().half()
w = torch.rand(512, 10).cuda().half()
cls._bench_fn = functools.partial(
torch.nn.functional.linear, x, w
)
| ["def","setUpClass","(","cls",")",":","super","(",")",".setUpClass","(",")","x","=","torch.rand","(","1024",",","10",")",".cuda","(",")",".half","(",")","w","=","torch.rand","(","512",",","10",")",".cuda","(",")",".half","(",")","cls._bench_fn","=","functools.partial","(","torch.nn.functional.linear",",","x",",","w",")"] | 18 | 22 | null | test_inductor_utils.py | pytorch/test/inductor/test_inductor_utils.py | import functools
import logging
import torch
from torch._dynamo.test_case import run_tests, TestCase
from torch._inductor.utils import do_bench, do_bench_using_profiling | 10 | 1 | 5 | 1 | 1 | 0 | 3 | Use image node_id 1 for calling the TestBench obj's underlying member method code with example usage: obj.setUpClass(cls) without return types | 142 | node_id 1 | 285,712 |
get_suggested_xfails | global | null | false | base,tests | null | null | null | null | result | def get_suggested_xfails(base, tests):
result = []
tests = [
test[len(base) :]
for test in tests
if belongs_to_base(test, base)
]
base_tests = {remove_device_dtype(test) for test in tests}
tests = set(tests)
for base in base_tests:
cpu_variant = base + "_cpu_float32"
cuda_variant = base + "_cuda_float32"
namespace, api, variant = parse_base(base)
if namespace is None:
api = api
else:
api = f"{namespace}.{api}"
if cpu_variant in tests and cuda_variant in tests:
result.append(f"xfail('{api}', '{variant}'),")
continue
if cpu_variant in tests:
result.append(
f"xfail('{api}', '{variant}', device_type='cpu'),"
)
continue
if cuda_variant in tests:
result.append(
f"xfail('{api}', '{variant}', device_type='cuda'),"
)
continue
result.append(f"skip('{api}', '{variant}',")
return result
| ["def","get_suggested_xfails","(","base",",","tests",")",":","result","=","[","]","tests","=","[","test","[","len","(","base",")",":","]","for","test","in","tests","if","belongs_to_base","(","test",",","base",")","]","base_tests","=","{","remove_device_dtype","(","test",")","for","test","in","tests","}","tests","=","set","(","tests",")","for","base","in","base_tests",":","cpu_variant","=","base","+","``","_cpu_float32","''","cuda_variant","=","base","+","``","_cuda_float32","''","namespace",",","api",",","variant","=","parse_base","(","base",")","if","namespace","is","None",":","api","=","api","else",":","api","=","f","''","{","namespace","}",".","{","api","}","''","if","cpu_variant","in","tests","and","cuda_variant","in","tests",":","result.append","(","f","''","xfail","(","'","{","api","}","'",",","'","{","variant","}","'",")",",","''",")","continue","if","cpu_variant","in","tests",":","result.append","(","f","''","xfail","(","'","{","api","}","'",",","'","{","variant","}","'",",","device_type='cpu","'",")",",","''",")","continue","if","cuda_variant","in","tests",":","result.append","(","f","''","xfail","(","'","{","api","}","'",",","'","{","variant","}","'",",","device_type='cuda","'",")",",","''",")","continue","result.append","(","f","''","skip","(","'","{","api","}","'",",","'","{","variant","}","'",",","''",")","return","result"] | 112 | 137 | null | xfail_suggester.py | pytorch/test/functorch/xfail_suggester.py | import re
import torch | 10 | null | 2 | null | null | 8 | null | Use image node_id 8 for calling a global function with example usage: get_suggested_xfails(base, tests) and returns: result | 123 | node_id 8 | 284,917 |
any_starts_with | global | null | false | strs,thing | null | null | null | null | False,True | def any_starts_with(strs, thing):
for s in strs:
if s.startswith(thing):
return True
return False
| ["def","any_starts_with","(","strs",",","thing",")",":","for","s","in","strs",":","if","s.startswith","(","thing",")",":","return","True","return","False"] | 105 | 109 | null | xfail_suggester.py | pytorch/test/functorch/xfail_suggester.py | import re
import torch | 10 | null | 2 | null | null | 8 | null | Use image node_id 7 for calling a global function with example usage: any_starts_with(strs, thing) and returns: False, True | 123 | node_id 7 | 284,916 |
parse_base | global | null | false | base | null | null | null | null | namespace, api, variant | def parse_base(base):
namespace, rest = parse_namespace(base)
apis = dir(get_torch_module(namespace))
apis = sorted(apis, key=lambda x: -len(x))
api = rest
variant = ""
for candidate in apis:
if rest.startswith(candidate):
api = candidate
variant = rest[len(candidate) + 1 :]
break
print(base, namespace, api, variant)
return namespace, api, variant
| ["def","parse_base","(","base",")",":","namespace",",","rest","=","parse_namespace","(","base",")","apis","=","dir","(","get_torch_module","(","namespace",")",")","apis","=","sorted","(","apis",",","key=lambda","x",":","-len","(","x",")",")","api","=","rest","variant","=","``","''","for","candidate","in","apis",":","if","rest.startswith","(","candidate",")",":","api","=","candidate","variant","=","rest","[","len","(","candidate",")","+","1",":","]","break","print","(","base",",","namespace",",","api",",","variant",")","return","namespace",",","api",",","variant"] | 88 | 102 | null | xfail_suggester.py | pytorch/test/functorch/xfail_suggester.py | import re
import torch | 10 | null | 2 | null | null | 8 | null | Use image node_id 6 for calling a global function with example usage: parse_base(base) and returns: namespace, api, variant | 125 | node_id 6 | 284,915 |
get_torch_module | global | null | false | namespace | null | null | null | null | getattr,torch,torch | def get_torch_module(namespace):
if namespace is None:
return torch
if namespace == "nn.functional":
return torch.nn.functional
return getattr(torch, namespace)
| ["def","get_torch_module","(","namespace",")",":","if","namespace","is","None",":","return","torch","if","namespace","==","``","nn.functional","''",":","return","torch.nn.functional","return","getattr","(","torch",",","namespace",")"] | 80 | 85 | null | xfail_suggester.py | pytorch/test/functorch/xfail_suggester.py | import re
import torch | 10 | null | 2 | null | null | 8 | null | Use image node_id 5 for calling a global function with example usage: get_torch_module(namespace) and returns: getattr, torch, torch | 132 | node_id 5 | 284,914 |
parse_namespace | global | null | false | base | null | null | null | null | None, base,mappings, base | def parse_namespace(base):
mappings = {
"nn_functional_": "nn.functional",
"fft_": "fft",
"linalg_": "linalg",
"_masked_": "_masked",
"sparse_": "sparse",
"special_": "special",
}
for heading in mappings.keys():
if base.startswith(heading):
return mappings[heading], base[len(heading) :]
return None, base
| ["def","parse_namespace","(","base",")",":","mappings","=","{","``","nn_functional_","''",":","``","nn.functional","''",",","``","fft_","''",":","``","fft","''",",","``","linalg_","''",":","``","linalg","''",",","``","_masked_","''",":","``","_masked","''",",","``","sparse_","''",":","``","sparse","''",",","``","special_","''",":","``","special","''",",","}","for","heading","in","mappings.keys","(",")",":","if","base.startswith","(","heading",")",":","return","mappings","[","heading","]",",","base","[","len","(","heading",")",":","]","return","None",",","base"] | 65 | 77 | null | xfail_suggester.py | pytorch/test/functorch/xfail_suggester.py | import re
import torch | 10 | null | 2 | null | null | 8 | null | Use image node_id 4 for calling a global function with example usage: parse_namespace(base) and returns: None, base, mappings, base | 133 | node_id 4 | 284,913 |
belongs_to_base | global | null | false | test,base | null | null | null | null | True,False,False | def belongs_to_base(test, base):
if not test.startswith(base):
return False
candidates = [
try_base
for try_base in base_names
if len(try_base) > len(base)
]
for candidate in candidates:
if test.startswith(candidate):
return False
return True
| ["def","belongs_to_base","(","test",",","base",")",":","if","not","test.startswith","(","base",")",":","return","False","candidates","=","[","try_base","for","try_base","in","base_names","if","len","(","try_base",")",">","len","(","base",")","]","for","candidate","in","candidates",":","if","test.startswith","(","candidate",")",":","return","False","return","True"] | 55 | 62 | null | xfail_suggester.py | pytorch/test/functorch/xfail_suggester.py | import re
import torch | 10 | null | 2 | null | null | 8 | null | Use image node_id 3 for calling a global function with example usage: belongs_to_base(test, base) and returns: True, False, False | 129 | node_id 3 | 284,912 |
remove_device_dtype | global | null | false | test | null | null | null | null | str | def remove_device_dtype(test):
return "_".join(test.split("_")[:-2])
| ["def","remove_device_dtype","(","test",")",":","return","``","_","''",".join","(","test.split","(","``","_","''",")","[",":","-2","]",")"] | 51 | 52 | null | xfail_suggester.py | pytorch/test/functorch/xfail_suggester.py | import re
import torch | 10 | null | 2 | null | null | 8 | null | Use image node_id 2 for calling a global function with example usage: remove_device_dtype(test) and returns: str | 112 | node_id 2 | 284,911 |
get_failed_test | global | null | false | line | null | null | null | null | m,None | def get_failed_test(line):
m = p.match(line)
if m is None:
return None
return m.group(1)
| ["def","get_failed_test","(","line",")",":","m","=","p.match","(","line",")","if","m","is","None",":","return","None","return","m.group","(","1",")"] | 18 | 22 | null | xfail_suggester.py | pytorch/test/functorch/xfail_suggester.py | import re
import torch | 10 | null | 2 | null | null | 8 | null | Use image node_id 1 for calling a global function with example usage: get_failed_test(line) and returns: m, None | 112 | node_id 1 | 284,910 |
find_f2py_commands | global | null | false | null | null | null | null | list,list,list | def find_f2py_commands():
if sys.platform == "win32":
exe_dir = dirname(sys.executable)
if exe_dir.endswith("Scripts"): # virtualenv
return [os.path.join(exe_dir, "f2py")]
else:
return [os.path.join(exe_dir, "Scripts", "f2py")]
else:
# Three scripts are installed in Unix-like systems:
# 'f2py', 'f2py{major}', and 'f2py{major.minor}'. For example,
# if installed with python3.9 the scripts would be named
# 'f2py', 'f2py3', and 'f2py3.9'.
version = sys.version_info
major = str(version.major)
minor = str(version.minor)
return ["f2py", "f2py" + major, "f2py" + major + "." + minor]
| ["def","find_f2py_commands","(",")",":","if","sys.platform","==","``","win32","''",":","exe_dir","=","dirname","(","sys.executable",")","if","exe_dir.endswith","(","``","Scripts","''",")",":","#","virtualenv","return","[","os.path.join","(","exe_dir",",","``","f2py","''",")","]","else",":","return","[","os.path.join","(","exe_dir",",","``","Scripts","''",",","``","f2py","''",")","]","else",":","#","Three","scripts","are","installed","in","Unix-like","systems",":","#","'f2py","'",",","'f2py","{","major","}","'",",","and","'f2py","{","major.minor","}","'",".","For","example",",","#","if","installed","with","python3.9","the","scripts","would","be","named","#","'f2py","'",",","'f2py3","'",",","and","'f2py3.9","'",".","version","=","sys.version_info","major","=","str","(","version.major",")","minor","=","str","(","version.minor",")","return","[","``","f2py","''",",","``","f2py","''","+","major",",","``","f2py","''","+","major","+","``",".","''","+","minor","]"] | 17 | 32 | null | test_scripts.py | numpy/numpy/tests/test_scripts.py | import sys
import os
import pytest
from os.path import join, isfile, dirname
import subprocess
import numpy
from numpy.testing import assert_equal, IS_WASM | 10 | null | 7 | null | null | 3 | null | Use image node_id 1 for calling a global function with example usage: find_f2py_commands() and returns: list, list, list | 120 | node_id 1 | 214,045 |
|
test_f2py | global | null | false | f2py_cmd | null | null | null | null | null | def test_f2py(f2py_cmd):
# test that we can run f2py script
stdout = subprocess.check_output([f2py_cmd, "-v"])
assert_equal(stdout.strip(), np.__version__.encode("ascii"))
| ["def","test_f2py","(","f2py_cmd",")",":","#","test","that","we","can","run","f2py","script","stdout","=","subprocess.check_output","(","[","f2py_cmd",",","``","-v","''","]",")","assert_equal","(","stdout.strip","(",")",",","np.__version__.encode","(","``","ascii","''",")",")"] | 38 | 41 | null | test_scripts.py | numpy/numpy/tests/test_scripts.py | import sys
import os
import pytest
from os.path import join, isfile, dirname
import subprocess
import numpy
from numpy.testing import assert_equal, IS_WASM | 10 | null | 7 | null | null | 3 | null | Use image node_id 2 for calling a global function with example usage: test_f2py(f2py_cmd) without return types | 110 | node_id 2 | 214,046 |
test_pep338 | global | null | false | null | null | null | null | null | def test_pep338():
stdout = subprocess.check_output(
[sys.executable, "-mnumpy.f2py", "-v"]
)
assert_equal(stdout.strip(), np.__version__.encode("ascii"))
| ["def","test_pep338","(",")",":","stdout","=","subprocess.check_output","(","[","sys.executable",",","``","-mnumpy.f2py","''",",","``","-v","''","]",")","assert_equal","(","stdout.strip","(",")",",","np.__version__.encode","(","``","ascii","''",")",")"] | 45 | 47 | null | test_scripts.py | numpy/numpy/tests/test_scripts.py | import sys
import os
import pytest
from os.path import join, isfile, dirname
import subprocess
import numpy
from numpy.testing import assert_equal, IS_WASM | 10 | null | 7 | null | null | 3 | null | Use image node_id 3 for calling a global function with example usage: test_pep338() without return types | 104 | node_id 3 | 214,047 |
|
write | Write1 | null | true | self,a | null | null | null | null | None | def write(self, a: str) -> None:
return None
| ["def","write","(","self",",","a",":","str",")","-",">","None",":","return","None"] | 19 | 20 | null | ufunc_config.py | numpy/numpy/typing/tests/data/pass/ufunc_config.py | import numpy | 10 | 3 | 1 | 0 | null | 3 | 1 | Use image node_id 1 for calling the Write1 obj's underlying member method code with example usage: obj.write(a) and returns: None | 129 | node_id 1 | 214,077 |
write | Write2 | null | true | self,a,b | null | null | null | null | None | def write(self, a: str, b: int = 1) -> None:
return None
| ["def","write","(","self",",","a",":","str",",","b",":","int","=","1",")","-",">","None",":","return","None"] | 24 | 25 | null | ufunc_config.py | numpy/numpy/typing/tests/data/pass/ufunc_config.py | import numpy | 10 | 3 | 1 | 0 | null | 3 | 1 | Use image node_id 1 for calling the Write2 obj's underlying member method code with example usage: obj.write(a, b) and returns: None | 132 | node_id 1 | 214,078 |
write | Write3 | null | true | self,a | null | null | null | null | int | def write(self, a: str) -> int:
return 0
| ["def","write","(","self",",","a",":","str",")","-",">","int",":","return","0"] | 29 | 30 | null | ufunc_config.py | numpy/numpy/typing/tests/data/pass/ufunc_config.py | import numpy | 10 | 3 | 1 | 0 | null | 3 | 1 | Use image node_id 1 for calling the Write3 obj's underlying member method code with example usage: obj.write(a) and returns: int | 128 | node_id 1 | 214,079 |
func1 | global | null | false | a,b | null | null | null | null | None | def func1(a: str, b: int) -> None:
return None
| ["def","func1","(","a",":","str",",","b",":","int",")","-",">","None",":","return","None"] | 6 | 7 | null | ufunc_config.py | numpy/numpy/typing/tests/data/pass/ufunc_config.py | import numpy | 10 | null | 1 | null | null | 3 | null | Use image node_id 1 for calling a global function with example usage: func1(a, b) and returns: None | 99 | node_id 1 | 214,080 |
func2 | global | null | false | a,b,c | null | null | null | null | None | def func2(a: str, b: int, c: float = 1.0) -> None:
return None
| ["def","func2","(","a",":","str",",","b",":","int",",","c",":","float","=","1.0",")","-",">","None",":","return","None"] | 10 | 11 | null | ufunc_config.py | numpy/numpy/typing/tests/data/pass/ufunc_config.py | import numpy | 10 | null | 1 | null | null | 3 | null | Use image node_id 2 for calling a global function with example usage: func2(a, b, c) and returns: None | 102 | node_id 2 | 214,081 |
RunAndVerify | GTestListTestsUnitTest | gtest_test_utils | true | self,flag_value,expected_output_re,other_flag | Tests using the --gtest_list_tests flag to list all tests. | ["Tests","using","the","--","gtest_list_tests","flag","to","list","all","tests","."] | Run googletest-list-tests-unittest_ and verify the output.
Runs googletest-list-tests-unittest_ and verifies that it prints
the correct tests.
Args:
flag_value: value of the --gtest_list_tests flag; None if the flag
should not be present.
expected_output_re: regular expression that matches the expected output
after running command;
other_flag: a different flag to be passed to command along with
gtest_list_tests; None if the flag should not be present. | ["Run","googletest-list-tests-unittest_","and","verify","the","output",".","Runs","googletest-list-tests-unittest_","and","verifies","that","it","prints","the","correct","tests",".","Args",":","flag_value",":","value","of","the","--","gtest_list_tests","flag",";","None","if","the","flag","should","not","be","present",".","expected_output_re",":","regular","expression","that","matches","the","expected","output","after","running","command",";","other_flag",":","a","different","flag","to","be","passed","to","command","along","with","gtest_list_tests",";","None","if","the","flag","should","not","be","present","."] | null | def RunAndVerify(self, flag_value, expected_output_re, other_flag):
"""Run googletest-list-tests-unittest_ and verify the output.
Runs googletest-list-tests-unittest_ and verifies that it prints
the correct tests.
Args:
flag_value: value of the --gtest_list_tests flag; None if the flag
should not be present.
expected_output_re: regular expression that matches the expected output
after running command;
other_flag: a different flag to be passed to command along with
gtest_list_tests; None if the flag should not be present.
"""
if flag_value is None:
flag = ""
flag_expression = "not set"
elif flag_value == "0":
flag = "--%s=0" % LIST_TESTS_FLAG
flag_expression = "0"
else:
flag = "--%s" % LIST_TESTS_FLAG
flag_expression = "1"
args = [flag]
if other_flag is not None:
args += [other_flag]
output = Run(args)
if expected_output_re:
self.assertTrue(
expected_output_re.match(output),
'when %s is %s, the output of "%s" is "%s",\n'
'which does not match regex "%s"'
% (
LIST_TESTS_FLAG,
flag_expression,
" ".join(args),
output,
expected_output_re.pattern,
),
)
else:
self.assertTrue(
not EXPECTED_OUTPUT_NO_FILTER_RE.match(output),
'when %s is %s, the output of "%s" is "%s"'
% (
LIST_TESTS_FLAG,
flag_expression,
" ".join(args),
output,
),
)
| ["def","RunAndVerify","(","self",",","flag_value",",","expected_output_re",",","other_flag",")",":","``","''","''","Run","googletest-list-tests-unittest_","and","verify","the","output",".","Runs","googletest-list-tests-unittest_","and","verifies","that","it","prints","the","correct","tests",".","Args",":","flag_value",":","value","of","the","--","gtest_list_tests","flag",";","None","if","the","flag","should","not","be","present",".","expected_output_re",":","regular","expression","that","matches","the","expected","output","after","running","command",";","other_flag",":","a","different","flag","to","be","passed","to","command","along","with","gtest_list_tests",";","None","if","the","flag","should","not","be","present.","``","''","''","if","flag_value","is","None",":","flag","=","``","''","flag_expression","=","``","not","set","''","elif","flag_value","==","``","0","''",":","flag","=","``","--","%","s=0","''","%","LIST_TESTS_FLAG","flag_expression","=","``","0","''","else",":","flag","=","``","--","%","s","''","%","LIST_TESTS_FLAG","flag_expression","=","``","1","''","args","=","[","flag","]","if","other_flag","is","not","None",":","args","+=","[","other_flag","]","output","=","Run","(","args",")","if","expected_output_re",":","self.assertTrue","(","expected_output_re.match","(","output",")",",","'when","%","s","is","%","s",",","the","output","of","``","%","s","''","is","``","%","s","''",",","\\n'","'which","does","not","match","regex","``","%","s","''","'","%","(","LIST_TESTS_FLAG",",","flag_expression",",","``","``",".join","(","args",")",",","output",",","expected_output_re.pattern",",",")",",",")","else",":","self.assertTrue","(","not","EXPECTED_OUTPUT_NO_FILTER_RE.match","(","output",")",",","'when","%","s","is","%","s",",","the","output","of","``","%","s","''","is","``","%","s","''","'","%","(","LIST_TESTS_FLAG",",","flag_expression",",","``","``",".join","(","args",")",",","output",",",")",",",")"] | 134 | 184 | null | googletest-list-tests-unittest.py | googletest/googletest/test/googletest-list-tests-unittest.py | import re
from googletest.test import gtest_test_utils | 10 | 1 | 2 | 1 | 1 | 1 | 5 | Use image node_id 1 for calling the GTestListTestsUnitTest obj's underlying member method code with example usage: obj.RunAndVerify(flag_value, expected_output_re, other_flag) without return types | 196 | node_id 1 | 146,880 |
testDefaultBehavior | GTestListTestsUnitTest | gtest_test_utils | true | self | Tests using the --gtest_list_tests flag to list all tests. | ["Tests","using","the","--","gtest_list_tests","flag","to","list","all","tests","."] | Tests the behavior of the default mode. | ["Tests","the","behavior","of","the","default","mode","."] | null | def testDefaultBehavior(self):
"""Tests the behavior of the default mode."""
self.RunAndVerify(
flag_value=None, expected_output_re=None, other_flag=None
)
| ["def","testDefaultBehavior","(","self",")",":","``","''","''","Tests","the","behavior","of","the","default","mode",".","''","''","''","self.RunAndVerify","(","flag_value=None",",","expected_output_re=None",",","other_flag=None",")"] | 186 | 189 | null | googletest-list-tests-unittest.py | googletest/googletest/test/googletest-list-tests-unittest.py | import re
from googletest.test import gtest_test_utils | 10 | 1 | 2 | 1 | 1 | 1 | 5 | Use image node_id 2 for calling the GTestListTestsUnitTest obj's underlying member method code with example usage: obj.testDefaultBehavior() without return types | 161 | node_id 2 | 146,881 |
testFlag | GTestListTestsUnitTest | gtest_test_utils | true | self | Tests using the --gtest_list_tests flag to list all tests. | ["Tests","using","the","--","gtest_list_tests","flag","to","list","all","tests","."] | Tests using the --gtest_list_tests flag. | ["Tests","using","the","--","gtest_list_tests","flag","."] | null | def testFlag(self):
"""Tests using the --gtest_list_tests flag."""
self.RunAndVerify(
flag_value="0", expected_output_re=None, other_flag=None
)
self.RunAndVerify(
flag_value="1",
expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
other_flag=None,
)
| ["def","testFlag","(","self",")",":","``","''","''","Tests","using","the","--","gtest_list_tests","flag",".","''","''","''","self.RunAndVerify","(","flag_value=","''","0","''",",","expected_output_re=None",",","other_flag=None",")","self.RunAndVerify","(","flag_value=","''","1","''",",","expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE",",","other_flag=None",",",")"] | 191 | 199 | null | googletest-list-tests-unittest.py | googletest/googletest/test/googletest-list-tests-unittest.py | import re
from googletest.test import gtest_test_utils | 10 | 1 | 2 | 1 | 1 | 1 | 5 | Use image node_id 3 for calling the GTestListTestsUnitTest obj's underlying member method code with example usage: obj.testFlag() without return types | 150 | node_id 3 | 146,882 |
testOverrideNonFilterFlags | GTestListTestsUnitTest | gtest_test_utils | true | self | Tests using the --gtest_list_tests flag to list all tests. | ["Tests","using","the","--","gtest_list_tests","flag","to","list","all","tests","."] | Tests that --gtest_list_tests overrides the non-filter flags. | ["Tests","that","--","gtest_list_tests","overrides","the","non-filter","flags","."] | null | def testOverrideNonFilterFlags(self):
"""Tests that --gtest_list_tests overrides the non-filter flags."""
self.RunAndVerify(
flag_value="1",
expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE,
other_flag="--gtest_break_on_failure",
)
| ["def","testOverrideNonFilterFlags","(","self",")",":","``","''","''","Tests","that","--","gtest_list_tests","overrides","the","non-filter","flags",".","''","''","''","self.RunAndVerify","(","flag_value=","''","1","''",",","expected_output_re=EXPECTED_OUTPUT_NO_FILTER_RE",",","other_flag=","''","--","gtest_break_on_failure","''",",",")"] | 201 | 208 | null | googletest-list-tests-unittest.py | googletest/googletest/test/googletest-list-tests-unittest.py | import re
from googletest.test import gtest_test_utils | 10 | 1 | 2 | 1 | 1 | 1 | 5 | Use image node_id 4 for calling the GTestListTestsUnitTest obj's underlying member method code with example usage: obj.testOverrideNonFilterFlags() without return types | 168 | node_id 4 | 146,883 |
testWithFilterFlags | GTestListTestsUnitTest | gtest_test_utils | true | self | Tests using the --gtest_list_tests flag to list all tests. | ["Tests","using","the","--","gtest_list_tests","flag","to","list","all","tests","."] | Tests that --gtest_list_tests takes into account the filter flags.
Tests that --gtest_list_tests takes into account the
--gtest_filter flag. | ["Tests","that","--","gtest_list_tests","takes","into","account","the","filter","flags",".","Tests","that","--","gtest_list_tests","takes","into","account","the","--","gtest_filter","flag","."] | null | def testWithFilterFlags(self):
"""Tests that --gtest_list_tests takes into account the filter flags.
Tests that --gtest_list_tests takes into account the
--gtest_filter flag.
"""
self.RunAndVerify(
flag_value="1",
expected_output_re=EXPECTED_OUTPUT_FILTER_FOO_RE,
other_flag="--gtest_filter=Foo*",
)
| ["def","testWithFilterFlags","(","self",")",":","``","''","''","Tests","that","--","gtest_list_tests","takes","into","account","the","filter","flags",".","Tests","that","--","gtest_list_tests","takes","into","account","the","--","gtest_filter","flag.","``","''","''","self.RunAndVerify","(","flag_value=","''","1","''",",","expected_output_re=EXPECTED_OUTPUT_FILTER_FOO_RE",",","other_flag=","''","--","gtest_filter=Foo","*","''",",",")"] | 210 | 221 | null | googletest-list-tests-unittest.py | googletest/googletest/test/googletest-list-tests-unittest.py | import re
from googletest.test import gtest_test_utils | 10 | 1 | 2 | 1 | 1 | 1 | 5 | Use image node_id 5 for calling the GTestListTestsUnitTest obj's underlying member method code with example usage: obj.testWithFilterFlags() without return types | 161 | node_id 5 | 146,884 |
test_xnnpack_backend_add | TestXNNPackBackend | unittest | true | self | null | null | null | null | z | def test_xnnpack_backend_add(self):
class AddModule(torch.nn.Module):
def forward(self, x, y):
z = x + y
z = z + x
z = z + x
return z
add_module = AddModule()
sample_inputs = (
torch.rand(1, 512, 512, 3),
torch.rand(1, 512, 512, 3),
)
sample_output = torch.zeros(1, 512, 512, 3)
add_module = torch.jit.script(add_module)
expected_output = add_module(sample_inputs[0], sample_inputs[1])
lowered_add_module = torch._C._jit_to_backend(
"xnnpack",
add_module,
{
"forward": {
"inputs": [
sample_inputs[0].clone(),
sample_inputs[1].clone(),
],
"outputs": [sample_output],
}
},
)
actual_output = lowered_add_module.forward(
sample_inputs[0], sample_inputs[1]
)
self.assertTrue(
torch.allclose(
actual_output, expected_output, atol=1e-03, rtol=1e-03
)
)
| ["def","test_xnnpack_backend_add","(","self",")",":","class","AddModule","(","torch.nn.Module",")",":","def","forward","(","self",",","x",",","y",")",":","z","=","x","+","y","z","=","z","+","x","z","=","z","+","x","return","z","add_module","=","AddModule","(",")","sample_inputs","=","(","torch.rand","(","1",",","512",",","512",",","3",")",",","torch.rand","(","1",",","512",",","512",",","3",")",",",")","sample_output","=","torch.zeros","(","1",",","512",",","512",",","3",")","add_module","=","torch.jit.script","(","add_module",")","expected_output","=","add_module","(","sample_inputs","[","0","]",",","sample_inputs","[","1","]",")","lowered_add_module","=","torch._C._jit_to_backend","(","``","xnnpack","''",",","add_module",",","{","``","forward","''",":","{","``","inputs","''",":","[","sample_inputs","[","0","]",".clone","(",")",",","sample_inputs","[","1","]",".clone","(",")",",","]",",","``","outputs","''",":","[","sample_output","]",",","}","}",",",")","actual_output","=","lowered_add_module.forward","(","sample_inputs","[","0","]",",","sample_inputs","[","1","]",")","self.assertTrue","(","torch.allclose","(","actual_output",",","expected_output",",","atol=1e-03",",","rtol=1e-03",")",")"] | 96 | 123 | null | test_xnnpack_delegate.py | pytorch/test/jit/xnnpack/test_xnnpack_delegate.py | import unittest
import torch
import torch._C | 10 | 1 | 3 | 1 | 1 | 0 | 5 | Use image node_id 3 for calling the TestXNNPackBackend obj's underlying member method code with example usage: obj.test_xnnpack_backend_add() and returns: z | 156 | node_id 3 | 288,134 |
ReferToA1 | TableInC | object | true | self | null | null | null | null | None,obj | def ReferToA1(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(
self._tab.Offset(4)
)
if o != 0:
x = self._tab.Indirect(o + self._tab.Pos)
from .TableInFirstNS import TableInFirstNS
obj = TableInFirstNS()
obj.Init(self._tab.Bytes, x)
return obj
return None
| ["def","ReferToA1","(","self",")",":","o","=","flatbuffers.number_types.UOffsetTFlags.py_type","(","self._tab.Offset","(","4",")",")","if","o","!","=","0",":","x","=","self._tab.Indirect","(","o","+","self._tab.Pos",")","from",".TableInFirstNS","import","TableInFirstNS","obj","=","TableInFirstNS","(",")","obj.Init","(","self._tab.Bytes",",","x",")","return","obj","return","None"] | 15 | 23 | null | TableInC.py | flatbuffers/java/src/test/java/NamespaceA/TableInC.py | import flatbuffers | 10 | 1 | 1 | 1 | 1 | 4 | 3 | Use image node_id 2 for calling the TableInC obj's underlying member method code with example usage: obj.ReferToA1() and returns: None, obj | 139 | node_id 2 | 142,891 |
test_run_with_schema | TestNeo4jHookConn | null | true | self,mock_graph_database | null | null | null | null | null | def test_run_with_schema(self, mock_graph_database):
connection = Connection(
conn_type="neo4j",
login="login",
password="password",
host="host",
schema="schema",
)
mock_sql = mock.MagicMock(name="sql")
# Use the environment variable mocking to test saving the configuration as a URI and
# to avoid mocking Airflow models class
with mock.patch.dict(
"os.environ", AIRFLOW_CONN_NEO4J_DEFAULT=connection.get_uri()
):
neo4j_hook = Neo4jHook()
op_result = neo4j_hook.run(mock_sql)
mock_graph_database.assert_has_calls(
[
mock.call.driver(
"bolt://host:7687",
auth=("login", "password"),
encrypted=False,
),
mock.call.driver().session(database="schema"),
mock.call.driver().session().__enter__(),
mock.call.driver()
.session()
.__enter__()
.run(mock_sql),
mock.call.driver().session().__enter__().run().data(),
mock.call.driver()
.session()
.__exit__(None, None, None),
]
)
session = (
mock_graph_database.driver.return_value.session.return_value.__enter__.return_value
)
assert op_result == session.run.return_value.data.return_value
| ["def","test_run_with_schema","(","self",",","mock_graph_database",")",":","connection","=","Connection","(","conn_type=","''","neo4j","''",",","login=","''","login","''",",","password=","''","password","''",",","host=","''","host","''",",","schema=","''","schema","''",",",")","mock_sql","=","mock.MagicMock","(","name=","''","sql","''",")","#","Use","the","environment","variable","mocking","to","test","saving","the","configuration","as","a","URI","and","#","to","avoid","mocking","Airflow","models","class","with","mock.patch.dict","(","``","os.environ","''",",","AIRFLOW_CONN_NEO4J_DEFAULT=connection.get_uri","(",")",")",":","neo4j_hook","=","Neo4jHook","(",")","op_result","=","neo4j_hook.run","(","mock_sql",")","mock_graph_database.assert_has_calls","(","[","mock.call.driver","(","``","bolt",":","\/\/host:7687","''",",","auth=","(","``","login","''",",","``","password","''",")",",","encrypted=False",",",")",",","mock.call.driver","(",")",".session","(","database=","''","schema","''",")",",","mock.call.driver","(",")",".session","(",")",".__enter__","(",")",",","mock.call.driver","(",")",".session","(",")",".__enter__","(",")",".run","(","mock_sql",")",",","mock.call.driver","(",")",".session","(",")",".__enter__","(",")",".run","(",")",".data","(",")",",","mock.call.driver","(",")",".session","(",")",".__exit__","(","None",",","None",",","None",")",",","]",")","session","=","(","mock_graph_database.driver.return_value.session.return_value.__enter__.return_value",")","assert","op_result","==","session.run.return_value.data.return_value"] | 58 | 80 | null | test_neo4j.py | airflow/tests/providers/neo4j/hooks/test_neo4j.py | from __future__ import annotations
from unittest import mock
import pytest
from airflow.models import Connection
from airflow.providers.neo4j.hooks.neo4j import Neo4jHook | 10 | 1 | 5 | 0 | null | 0 | 4 | Use image node_id 2 for calling the TestNeo4jHookConn obj's underlying member method code with example usage: obj.test_run_with_schema(mock_graph_database) without return types | 176 | node_id 2 | 25,712 |
mock_get_conn | global | null | false | null | null | null | null | conn_mock | def mock_get_conn():
commit_mock = mock.MagicMock()
cursor_mock = mock.MagicMock(
execute=[],
fetchall=[["1", "2", "3"]],
description=["a", "b", "c"],
iterate=[["1", "2", "3"]],
)
conn_mock = mock.MagicMock(
commit=commit_mock,
cursor=cursor_mock,
)
return conn_mock
| ["def","mock_get_conn","(",")",":","commit_mock","=","mock.MagicMock","(",")","cursor_mock","=","mock.MagicMock","(","execute=","[","]",",","fetchall=","[","[","``","1","''",",","``","2","''",",","``","3","''","]","]",",","description=","[","``","a","''",",","``","b","''",",","``","c","''","]",",","iterate=","[","[","``","1","''",",","``","2","''",",","``","3","''","]","]",",",")","conn_mock","=","mock.MagicMock","(","commit=commit_mock",",","cursor=cursor_mock",",",")","return","conn_mock"] | 33 | 45 | null | test_vertica_to_mysql.py | airflow/tests/providers/mysql/transfers/test_vertica_to_mysql.py | from __future__ import annotations
import datetime
from unittest import mock
import pytest
from airflow.models.dag import DAG | 10 | null | 5 | null | null | 1 | null | Use image node_id 1 for calling a global function with example usage: mock_get_conn() and returns: conn_mock | 108 | node_id 1 | 25,710 |
|
renew_resnet_paths | global | null | false | old_list,n_shave_prefix_segments | null | null | null | null | mapping | def renew_resnet_paths(old_list, n_shave_prefix_segments=0):
mapping = []
for old_item in old_list:
new_item = old_item
new_item = new_item.replace("block.", "resnets.")
new_item = new_item.replace("conv_shorcut", "conv1")
new_item = new_item.replace("in_shortcut", "conv_shortcut")
new_item = new_item.replace("temb_proj", "time_emb_proj")
new_item = shave_segments(
new_item, n_shave_prefix_segments=n_shave_prefix_segments
)
mapping.append({"old": old_item, "new": new_item})
return mapping
| ["def","renew_resnet_paths","(","old_list",",","n_shave_prefix_segments=0",")",":","mapping","=","[","]","for","old_item","in","old_list",":","new_item","=","old_item","new_item","=","new_item.replace","(","``","block",".","``",",","``","resnets",".","''",")","new_item","=","new_item.replace","(","``","conv_shorcut","''",",","``","conv1","''",")","new_item","=","new_item.replace","(","``","in_shortcut","''",",","``","conv_shortcut","''",")","new_item","=","new_item.replace","(","``","temb_proj","''",",","``","time_emb_proj","''",")","new_item","=","shave_segments","(","new_item",",","n_shave_prefix_segments=n_shave_prefix_segments",")","mapping.append","(","{","``","old","''",":","old_item",",","``","new","''",":","new_item","}",")","return","mapping"] | 19 | 32 | null | convert_ddpm_original_checkpoint_to_diffusers.py | llmga/llmga/diffusers/scripts/convert_ddpm_original_checkpoint_to_diffusers.py | import argparse
import json
import torch
from diffusers import AutoencoderKL, DDPMPipeline, DDPMScheduler, UNet2DModel, VQModel | 10 | null | 4 | null | null | 6 | null | Use image node_id 2 for calling a global function with example usage: renew_resnet_paths(old_list, n_shave_prefix_segments) and returns: mapping | 144 | node_id 2 | 169,070 |
shave_segments | global | null | false | path,n_shave_prefix_segments | null | null | null | null | str,str | def shave_segments(path, n_shave_prefix_segments=1):
"""
Removes segments. Positive values shave the first segments, negative shave the last segments.
"""
if n_shave_prefix_segments >= 0:
return ".".join(path.split(".")[n_shave_prefix_segments:])
else:
return ".".join(path.split(".")[:n_shave_prefix_segments])
| ["def","shave_segments","(","path",",","n_shave_prefix_segments=1",")",":","``","''","''","Removes","segments",".","Positive","values","shave","the","first","segments",",","negative","shave","the","last","segments.","``","''","''","if","n_shave_prefix_segments",">","=","0",":","return","``",".","''",".join","(","path.split","(","``",".","``",")","[","n_shave_prefix_segments",":","]",")","else",":","return","``",".","''",".join","(","path.split","(","``",".","``",")","[",":","n_shave_prefix_segments","]",")"] | 9 | 16 | null | convert_ddpm_original_checkpoint_to_diffusers.py | llmga/llmga/diffusers/scripts/convert_ddpm_original_checkpoint_to_diffusers.py | import argparse
import json
import torch
from diffusers import AutoencoderKL, DDPMPipeline, DDPMScheduler, UNet2DModel, VQModel | 10 | null | 4 | null | null | 6 | null | Use image node_id 1 for calling a global function with example usage: shave_segments(path, n_shave_prefix_segments) and returns: str, str | 137 | node_id 1 | 169,069 |
Init | TableInC | object | true | self,buf,pos | null | null | null | null | null | def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
| ["def","Init","(","self",",","buf",",","pos",")",":","self._tab","=","flatbuffers.table.Table","(","buf",",","pos",")"] | 11 | 12 | null | TableInC.py | flatbuffers/tests/namespace_test/NamespaceA/TableInC.py | import flatbuffers | 10 | 1 | 1 | 1 | 1 | 4 | 3 | Use image node_id 1 for calling the TableInC obj's underlying member method code with example usage: obj.Init(buf, pos) without return types | 140 | node_id 1 | 144,849 |
ReferToA1 | TableInC | object | true | self | null | null | null | null | None,obj | def ReferToA1(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(
self._tab.Offset(4)
)
if o != 0:
x = self._tab.Indirect(o + self._tab.Pos)
from .TableInFirstNS import TableInFirstNS
obj = TableInFirstNS()
obj.Init(self._tab.Bytes, x)
return obj
return None
| ["def","ReferToA1","(","self",")",":","o","=","flatbuffers.number_types.UOffsetTFlags.py_type","(","self._tab.Offset","(","4",")",")","if","o","!","=","0",":","x","=","self._tab.Indirect","(","o","+","self._tab.Pos",")","from",".TableInFirstNS","import","TableInFirstNS","obj","=","TableInFirstNS","(",")","obj.Init","(","self._tab.Bytes",",","x",")","return","obj","return","None"] | 15 | 23 | null | TableInC.py | flatbuffers/tests/namespace_test/NamespaceA/TableInC.py | import flatbuffers | 10 | 1 | 1 | 1 | 1 | 4 | 3 | Use image node_id 2 for calling the TableInC obj's underlying member method code with example usage: obj.ReferToA1() and returns: None, obj | 139 | node_id 2 | 144,850 |
ReferToA2 | TableInC | object | true | self | null | null | null | null | None,obj | def ReferToA2(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(
self._tab.Offset(6)
)
if o != 0:
x = self._tab.Indirect(o + self._tab.Pos)
from .SecondTableInA import SecondTableInA
obj = SecondTableInA()
obj.Init(self._tab.Bytes, x)
return obj
return None
| ["def","ReferToA2","(","self",")",":","o","=","flatbuffers.number_types.UOffsetTFlags.py_type","(","self._tab.Offset","(","6",")",")","if","o","!","=","0",":","x","=","self._tab.Indirect","(","o","+","self._tab.Pos",")","from",".SecondTableInA","import","SecondTableInA","obj","=","SecondTableInA","(",")","obj.Init","(","self._tab.Bytes",",","x",")","return","obj","return","None"] | 26 | 34 | null | TableInC.py | flatbuffers/tests/namespace_test/NamespaceA/TableInC.py | import flatbuffers | 10 | 1 | 1 | 1 | 1 | 4 | 3 | Use image node_id 3 for calling the TableInC obj's underlying member method code with example usage: obj.ReferToA2() and returns: None, obj | 139 | node_id 3 | 144,851 |
TableInCStart | global | null | false | builder | null | null | null | null | null | def TableInCStart(builder):
builder.StartObject(2)
| ["def","TableInCStart","(","builder",")",":","builder.StartObject","(","2",")"] | 36 | 36 | null | TableInC.py | flatbuffers/tests/namespace_test/NamespaceA/TableInC.py | import flatbuffers | 10 | null | 1 | null | null | 4 | null | Use image node_id 1 for calling a global function with example usage: TableInCStart(builder) without return types | 113 | node_id 1 | 144,852 |
TableInCAddReferToA1 | global | null | false | builder,referToA1 | null | null | null | null | null | def TableInCAddReferToA1(builder, referToA1):
builder.PrependUOffsetTRelativeSlot(
0,
flatbuffers.number_types.UOffsetTFlags.py_type(referToA1),
0,
)
| ["def","TableInCAddReferToA1","(","builder",",","referToA1",")",":","builder.PrependUOffsetTRelativeSlot","(","0",",","flatbuffers.number_types.UOffsetTFlags.py_type","(","referToA1",")",",","0",",",")"] | 37 | 37 | null | TableInC.py | flatbuffers/tests/namespace_test/NamespaceA/TableInC.py | import flatbuffers | 10 | null | 1 | null | null | 4 | null | Use image node_id 2 for calling a global function with example usage: TableInCAddReferToA1(builder, referToA1) without return types | 131 | node_id 2 | 144,853 |
TableInCAddReferToA2 | global | null | false | builder,referToA2 | null | null | null | null | null | def TableInCAddReferToA2(builder, referToA2):
builder.PrependUOffsetTRelativeSlot(
1,
flatbuffers.number_types.UOffsetTFlags.py_type(referToA2),
0,
)
| ["def","TableInCAddReferToA2","(","builder",",","referToA2",")",":","builder.PrependUOffsetTRelativeSlot","(","1",",","flatbuffers.number_types.UOffsetTFlags.py_type","(","referToA2",")",",","0",",",")"] | 38 | 38 | null | TableInC.py | flatbuffers/tests/namespace_test/NamespaceA/TableInC.py | import flatbuffers | 10 | null | 1 | null | null | 4 | null | Use image node_id 3 for calling a global function with example usage: TableInCAddReferToA2(builder, referToA2) without return types | 131 | node_id 3 | 144,854 |
TableInCEnd | global | null | false | builder | null | null | null | null | builder | def TableInCEnd(builder):
return builder.EndObject()
| ["def","TableInCEnd","(","builder",")",":","return","builder.EndObject","(",")"] | 39 | 39 | null | TableInC.py | flatbuffers/tests/namespace_test/NamespaceA/TableInC.py | import flatbuffers | 10 | null | 1 | null | null | 4 | null | Use image node_id 4 for calling a global function with example usage: TableInCEnd(builder) and returns: builder | 111 | node_id 4 | 144,855 |
generate | global | null | false | prompt,negative_prompt,seed,guidance_scale,num_inference_steps | null | null | null | null | pipeline | def generate(
prompt,
negative_prompt,
seed=default_seed,
guidance_scale=default_guidance_scale,
num_inference_steps=default_num_steps,
):
prompt_ids, neg_prompt_ids = tokenize_prompt(
prompt, negative_prompt
)
prompt_ids, neg_prompt_ids, rng = replicate_all(
prompt_ids, neg_prompt_ids, seed
)
images = pipeline(
prompt_ids,
p_params,
rng,
num_inference_steps=num_inference_steps,
neg_prompt_ids=neg_prompt_ids,
guidance_scale=guidance_scale,
jit=True,
).images
# convert the images to PIL
images = images.reshape(
(images.shape[0] * images.shape[1],) + images.shape[-3:]
)
return pipeline.numpy_to_pil(np.array(images))
| ["def","generate","(","prompt",",","negative_prompt",",","seed=default_seed",",","guidance_scale=default_guidance_scale",",","num_inference_steps=default_num_steps",",",")",":","prompt_ids",",","neg_prompt_ids","=","tokenize_prompt","(","prompt",",","negative_prompt",")","prompt_ids",",","neg_prompt_ids",",","rng","=","replicate_all","(","prompt_ids",",","neg_prompt_ids",",","seed",")","images","=","pipeline","(","prompt_ids",",","p_params",",","rng",",","num_inference_steps=num_inference_steps",",","neg_prompt_ids=neg_prompt_ids",",","guidance_scale=guidance_scale",",","jit=True",",",")",".images","#","convert","the","images","to","PIL","images","=","images.reshape","(","(","images.shape","[","0","]","*","images.shape","[","1","]",",",")","+","images.shape","[","-3",":","]",")","return","pipeline.numpy_to_pil","(","np.array","(","images",")",")"] | 67 | 88 | null | sdxl_single.py | llmga/llmga/diffusers/examples/research_projects/sdxl_flax/sdxl_single.py | import time
import jax
import jax.numpy
import numpy
from flax.jax_utils import replicate
from jax.experimental.compilation_cache import compilation_cache
from diffusers import FlaxStableDiffusionXLPipeline | 10 | null | 7 | null | null | 3 | null | Use image node_id 3 for calling a global function with example usage: generate(prompt, negative_prompt, seed, guidance_scale, num_inference_steps) and returns: pipeline | 168 | node_id 3 | 168,820 |
renew_attention_paths | global | null | false | old_list,n_shave_prefix_segments,in_mid | null | null | null | null | mapping | def renew_attention_paths(
old_list, n_shave_prefix_segments=0, in_mid=False
):
mapping = []
for old_item in old_list:
new_item = old_item
# In `model.mid`, the layer is called `attn`.
if not in_mid:
new_item = new_item.replace("attn", "attentions")
new_item = new_item.replace(".k.", ".key.")
new_item = new_item.replace(".v.", ".value.")
new_item = new_item.replace(".q.", ".query.")
new_item = new_item.replace("proj_out", "proj_attn")
new_item = new_item.replace("norm", "group_norm")
new_item = shave_segments(
new_item, n_shave_prefix_segments=n_shave_prefix_segments
)
mapping.append({"old": old_item, "new": new_item})
return mapping
| ["def","renew_attention_paths","(","old_list",",","n_shave_prefix_segments=0",",","in_mid=False",")",":","mapping","=","[","]","for","old_item","in","old_list",":","new_item","=","old_item","#","In","`","model.mid","`",",","the","layer","is","called","`","attn","`",".","if","not","in_mid",":","new_item","=","new_item.replace","(","``","attn","''",",","``","attentions","''",")","new_item","=","new_item.replace","(","``",".k",".","``",",","``",".key",".","''",")","new_item","=","new_item.replace","(","``",".v",".","``",",","``",".value",".","''",")","new_item","=","new_item.replace","(","``",".q",".","``",",","``",".query",".","''",")","new_item","=","new_item.replace","(","``","proj_out","''",",","``","proj_attn","''",")","new_item","=","new_item.replace","(","``","norm","''",",","``","group_norm","''",")","new_item","=","shave_segments","(","new_item",",","n_shave_prefix_segments=n_shave_prefix_segments",")","mapping.append","(","{","``","old","''",":","old_item",",","``","new","''",":","new_item","}",")","return","mapping"] | 35 | 53 | null | convert_ddpm_original_checkpoint_to_diffusers.py | llmga/llmga/diffusers/scripts/convert_ddpm_original_checkpoint_to_diffusers.py | import argparse
import json
import torch
from diffusers import AutoencoderKL, DDPMPipeline, DDPMScheduler, UNet2DModel, VQModel | 10 | null | 4 | null | null | 6 | null | Use image node_id 3 for calling a global function with example usage: renew_attention_paths(old_list, n_shave_prefix_segments, in_mid) and returns: mapping | 155 | node_id 3 | 169,071 |
replicate_all | global | null | false | prompt_ids,neg_prompt_ids,seed | null | null | null | null | p_prompt_ids, p_neg_prompt_ids, rng | def replicate_all(prompt_ids, neg_prompt_ids, seed):
p_prompt_ids = replicate(prompt_ids)
p_neg_prompt_ids = replicate(neg_prompt_ids)
rng = jax.random.PRNGKey(seed)
rng = jax.random.split(rng, NUM_DEVICES)
return p_prompt_ids, p_neg_prompt_ids, rng
| ["def","replicate_all","(","prompt_ids",",","neg_prompt_ids",",","seed",")",":","p_prompt_ids","=","replicate","(","prompt_ids",")","p_neg_prompt_ids","=","replicate","(","neg_prompt_ids",")","rng","=","jax.random.PRNGKey","(","seed",")","rng","=","jax.random.split","(","rng",",","NUM_DEVICES",")","return","p_prompt_ids",",","p_neg_prompt_ids",",","rng"] | 58 | 63 | null | sdxl_single.py | llmga/llmga/diffusers/examples/research_projects/sdxl_flax/sdxl_single.py | import time
import jax
import jax.numpy
import numpy
from flax.jax_utils import replicate
from jax.experimental.compilation_cache import compilation_cache
from diffusers import FlaxStableDiffusionXLPipeline | 10 | null | 7 | null | null | 3 | null | Use image node_id 2 for calling a global function with example usage: replicate_all(prompt_ids, neg_prompt_ids, seed) and returns: p_prompt_ids, p_neg_prompt_ids, rng | 168 | node_id 2 | 168,819 |
__init__ | Dataset | data | true | self,data_folder,transform_lr,transform_hr,stage | Characterizes a dataset for PyTorch | ["Characterizes","a","dataset","for","PyTorch"] | Initialization | ["Initialization"] | Dataset | def __init__(self, data_folder, transform_lr, transform_hr, stage):
"Initialization"
file_list = glob.glob("{}/*".format(data_folder))
n = len(file_list)
train_size = np.floor(n * 0.8).astype(np.int)
self.images = (
file_list[:train_size]
if stage is "train"
else file_list[train_size:]
)
self.transform_lr = transforms.Compose(transform_lr)
self.transform_hr = transforms.Compose(transform_hr)
| ["def","__init__","(","self",",","data_folder",",","transform_lr",",","transform_hr",",","stage",")",":","``","Initialization","''","file_list","=","glob.glob","(","``","{","}","\/","*","''",".format","(","data_folder",")",")","n","=","len","(","file_list",")","train_size","=","np.floor","(","n","*","0.8",")",".astype","(","np.int",")","self.images","=","(","file_list","[",":","train_size","]","if","stage","is","``","train","''","else","file_list","[","train_size",":","]",")","self.transform_lr","=","transforms.Compose","(","transform_lr",")","self.transform_hr","=","transforms.Compose","(","transform_hr",")"] | 9 | 17 | null | datasets.py | gans-with-pytorch/srgan/datasets.py | from PIL import Image
from torch.utils import data
import glob
import torchvision.transforms
import numpy | 10 | 1 | 5 | 1 | 1 | 0 | 3 | Use image node_id 1 to create a new Dataset object from inherited base classes: data with example: obj = Dataset(data_folder, transform_lr, transform_hr, stage) | 160 | node_id 1 | 145,345 |
__len__ | Dataset | data | true | self | Characterizes a dataset for PyTorch | ["Characterizes","a","dataset","for","PyTorch"] | Denotes the total number of samples | ["Denotes","the","total","number","of","samples"] | len | def __len__(self):
"Denotes the total number of samples"
return len(self.images)
| ["def","__len__","(","self",")",":","``","Denotes","the","total","number","of","samples","''","return","len","(","self.images",")"] | 19 | 21 | null | datasets.py | gans-with-pytorch/srgan/datasets.py | from PIL import Image
from torch.utils import data
import glob
import torchvision.transforms
import numpy | 10 | 1 | 5 | 1 | 1 | 0 | 3 | Use image node_id 2 for calling the Dataset obj's underlying member method code with example usage: obj.__len__() and returns: len | 130 | node_id 2 | 145,346 |
__getitem__ | Dataset | data | true | self,index | Characterizes a dataset for PyTorch | ["Characterizes","a","dataset","for","PyTorch"] | Generates one sample of data | ["Generates","one","sample","of","data"] | self, self | def __getitem__(self, index):
"Generates one sample of data"
# Select sample
# Load data and get label
hr = Image.open(self.images[index])
return self.transform_lr(hr), self.transform_hr(hr)
| ["def","__getitem__","(","self",",","index",")",":","``","Generates","one","sample","of","data","''","#","Select","sample","#","Load","data","and","get","label","hr","=","Image.open","(","self.images","[","index","]",")","return","self.transform_lr","(","hr",")",",","self.transform_hr","(","hr",")"] | 23 | 30 | null | datasets.py | gans-with-pytorch/srgan/datasets.py | from PIL import Image
from torch.utils import data
import glob
import torchvision.transforms
import numpy | 10 | 1 | 5 | 1 | 1 | 0 | 3 | Use image node_id 3 for calling the Dataset obj's underlying member method code with example usage: obj.__getitem__(index) and returns: self, self | 147 | node_id 3 | 145,347 |
parse_args | global | null | false | null | null | null | null | parser | def parse_args():
parser = argparse.ArgumentParser("", add_help=False)
parser.add_argument(
"--class_prompt",
help="text prompt to retrieve images",
required=True,
type=str,
)
parser.add_argument(
"--class_data_dir",
help="path to save images",
required=True,
type=str,
)
parser.add_argument(
"--num_class_images",
help="number of images to download",
default=200,
type=int,
)
return parser.parse_args()
| ["def","parse_args","(",")",":","parser","=","argparse.ArgumentParser","(","``","''",",","add_help=False",")","parser.add_argument","(","``","--","class_prompt","''",",","help=","''","text","prompt","to","retrieve","images","''",",","required=True",",","type=str",",",")","parser.add_argument","(","``","--","class_data_dir","''",",","help=","''","path","to","save","images","''",",","required=True",",","type=str",",",")","parser.add_argument","(","``","--","num_class_images","''",",","help=","''","number","of","images","to","download","''",",","default=200",",","type=int",",",")","return","parser.parse_args","(",")"] | 77 | 82 | null | retrieve.py | llmga/llmga/diffusers/examples/custom_diffusion/retrieve.py | import argparse
import os
from io import BytesIO
from pathlib import Path
import requests
from clip_retrieval.clip_client import ClipClient
from PIL import Image
from tqdm import tqdm | 10 | null | 8 | null | null | 2 | null | Use image node_id 2 for calling a global function with example usage: parse_args() and returns: parser | 102 | node_id 2 | 168,562 |
|
retrieve | global | null | false | class_prompt,class_data_dir,num_class_images | null | null | null | null | null | def retrieve(class_prompt, class_data_dir, num_class_images):
factor = 1.5
num_images = int(factor * num_class_images)
client = ClipClient(
url="https://knn.laion.ai/knn-service",
indice_name="laion_400m",
num_images=num_images,
aesthetic_weight=0.1,
)
os.makedirs(f"{class_data_dir}/images", exist_ok=True)
if (
len(list(Path(f"{class_data_dir}/images").iterdir()))
>= num_class_images
):
return
while True:
class_images = client.query(text=class_prompt)
if (
len(class_images) >= factor * num_class_images
or num_images > 1e4
):
break
else:
num_images = int(factor * num_images)
client = ClipClient(
url="https://knn.laion.ai/knn-service",
indice_name="laion_400m",
num_images=num_images,
aesthetic_weight=0.1,
)
count = 0
total = 0
pbar = tqdm(
desc="downloading real regularization images",
total=num_class_images,
)
with open(f"{class_data_dir}/caption.txt", "w") as f1, open(
f"{class_data_dir}/urls.txt", "w"
) as f2, open(f"{class_data_dir}/images.txt", "w") as f3:
while total < num_class_images:
images = class_images[count]
count += 1
try:
img = requests.get(images["url"], timeout=30)
if img.status_code == 200:
_ = Image.open(BytesIO(img.content))
with open(
f"{class_data_dir}/images/{total}.jpg", "wb"
) as f:
f.write(img.content)
f1.write(images["caption"] + "\n")
f2.write(images["url"] + "\n")
f3.write(
f"{class_data_dir}/images/{total}.jpg" + "\n"
)
total += 1
pbar.update(1)
else:
continue
except Exception:
continue
return
| ["def","retrieve","(","class_prompt",",","class_data_dir",",","num_class_images",")",":","factor","=","1.5","num_images","=","int","(","factor","*","num_class_images",")","client","=","ClipClient","(","url=","''","https",":","\/\/knn.laion.ai\/knn-service","''",",","indice_name=","''","laion_400m","''",",","num_images=num_images",",","aesthetic_weight=0.1",",",")","os.makedirs","(","f","''","{","class_data_dir","}","\/images","''",",","exist_ok=True",")","if","(","len","(","list","(","Path","(","f","''","{","class_data_dir","}","\/images","''",")",".iterdir","(",")",")",")",">","=","num_class_images",")",":","return","while","True",":","class_images","=","client.query","(","text=class_prompt",")","if","(","len","(","class_images",")",">","=","factor","*","num_class_images","or","num_images",">","1e4",")",":","break","else",":","num_images","=","int","(","factor","*","num_images",")","client","=","ClipClient","(","url=","''","https",":","\/\/knn.laion.ai\/knn-service","''",",","indice_name=","''","laion_400m","''",",","num_images=num_images",",","aesthetic_weight=0.1",",",")","count","=","0","total","=","0","pbar","=","tqdm","(","desc=","''","downloading","real","regularization","images","''",",","total=num_class_images",",",")","with","open","(","f","''","{","class_data_dir","}","\/caption.txt","''",",","``","w","''",")","as","f1",",","open","(","f","''","{","class_data_dir","}","\/urls.txt","''",",","``","w","''",")","as","f2",",","open","(","f","''","{","class_data_dir","}","\/images.txt","''",",","``","w","''",")","as","f3",":","while","total","<","num_class_images",":","images","=","class_images","[","count","]","count","+=","1","try",":","img","=","requests.get","(","images","[","``","url","''","]",",","timeout=30",")","if","img.status_code","==","200",":","_","=","Image.open","(","BytesIO","(","img.content",")",")","with","open","(","f","''","{","class_data_dir","}","\/images\/","{","total","}",".jpg","''",",","``","wb","''",")","as","f",":","f.write","(","img.content",")","f1.write","(","images","[","``","caption","''","]","+","``","\\n","''",")","f2.write","(","images","[","``","url","''","]","+","``","\\n","''",")","f3.write","(","f","''","{","class_data_dir","}","\/images\/","{","total","}",".jpg","''","+","``","\\n","''",")","total","+=","1","pbar.update","(","1",")","else",":","continue","except","Exception",":","continue","return"] | 25 | 74 | null | retrieve.py | llmga/llmga/diffusers/examples/custom_diffusion/retrieve.py | import argparse
import os
from io import BytesIO
from pathlib import Path
import requests
from clip_retrieval.clip_client import ClipClient
from PIL import Image
from tqdm import tqdm | 10 | null | 8 | null | null | 2 | null | Use image node_id 1 for calling a global function with example usage: retrieve(class_prompt, class_data_dir, num_class_images) without return types | 147 | node_id 1 | 168,561 |
init | global | null | false | wish_host,wish_port | null | null | null | null | null | def init(wish_host, wish_port):
global host, port, listener
host = wish_host
port = wish_port
listener.bind((host, port))
listener.listen()
listener.settimeout(0)
| ["def","init","(","wish_host",",","wish_port",")",":","global","host",",","port",",","listener","host","=","wish_host","port","=","wish_port","listener.bind","(","(","host",",","port",")",")","listener.listen","(",")","listener.settimeout","(","0",")"] | 26 | 32 | null | network_gui.py | gaussian-splatting/gaussian_renderer/network_gui.py | import torch
import traceback
import socket
import json
from scene.cameras import MiniCam | 10 | null | 5 | null | null | 5 | null | Use image node_id 1 for calling a global function with example usage: init(wish_host, wish_port) without return types | 117 | node_id 1 | 145,390 |
try_connect | global | null | false | null | null | null | null | null | def try_connect():
global conn, addr, listener
try:
conn, addr = listener.accept()
print(f"\nConnected by {addr}")
conn.settimeout(None)
except Exception as inst:
pass
| ["def","try_connect","(",")",":","global","conn",",","addr",",","listener","try",":","conn",",","addr","=","listener.accept","(",")","print","(","f","''","\\nConnected","by","{","addr","}","''",")","conn.settimeout","(","None",")","except","Exception","as","inst",":","pass"] | 34 | 41 | null | network_gui.py | gaussian-splatting/gaussian_renderer/network_gui.py | import torch
import traceback
import socket
import json
from scene.cameras import MiniCam | 10 | null | 5 | null | null | 5 | null | Use image node_id 2 for calling a global function with example usage: try_connect() without return types | 104 | node_id 2 | 145,391 |
|
read | global | null | false | null | null | null | null | json | def read():
global conn
messageLength = conn.recv(4)
messageLength = int.from_bytes(messageLength, "little")
message = conn.recv(messageLength)
return json.loads(message.decode("utf-8"))
| ["def","read","(",")",":","global","conn","messageLength","=","conn.recv","(","4",")","messageLength","=","int.from_bytes","(","messageLength",",","``","little","''",")","message","=","conn.recv","(","messageLength",")","return","json.loads","(","message.decode","(","``","utf-8","''",")",")"] | 43 | 48 | null | network_gui.py | gaussian-splatting/gaussian_renderer/network_gui.py | import torch
import traceback
import socket
import json
from scene.cameras import MiniCam | 10 | null | 5 | null | null | 5 | null | Use image node_id 3 for calling a global function with example usage: read() and returns: json | 94 | node_id 3 | 145,392 |
|
send | global | null | false | message_bytes,verify | null | null | null | null | null | def send(message_bytes, verify):
global conn
if message_bytes != None:
conn.sendall(message_bytes)
conn.sendall(len(verify).to_bytes(4, "little"))
conn.sendall(bytes(verify, "ascii"))
| ["def","send","(","message_bytes",",","verify",")",":","global","conn","if","message_bytes","!","=","None",":","conn.sendall","(","message_bytes",")","conn.sendall","(","len","(","verify",")",".to_bytes","(","4",",","``","little","''",")",")","conn.sendall","(","bytes","(","verify",",","``","ascii","''",")",")"] | 50 | 55 | null | network_gui.py | gaussian-splatting/gaussian_renderer/network_gui.py | import torch
import traceback
import socket
import json
from scene.cameras import MiniCam | 10 | null | 5 | null | null | 5 | null | Use image node_id 4 for calling a global function with example usage: send(message_bytes, verify) without return types | 118 | node_id 4 | 145,393 |
receive | global | null | false | null | null | null | null | custom_cam, do_training, do_shs_python, do_rot_scale_python, keep_alive, scaling_modifier,None, None, None, None, None, None | def receive():
message = read()
width = message["resolution_x"]
height = message["resolution_y"]
if width != 0 and height != 0:
try:
do_training = bool(message["train"])
fovy = message["fov_y"]
fovx = message["fov_x"]
znear = message["z_near"]
zfar = message["z_far"]
do_shs_python = bool(message["shs_python"])
do_rot_scale_python = bool(message["rot_scale_python"])
keep_alive = bool(message["keep_alive"])
scaling_modifier = message["scaling_modifier"]
world_view_transform = torch.reshape(
torch.tensor(message["view_matrix"]), (4, 4)
).cuda()
world_view_transform[:, 1] = -world_view_transform[:, 1]
world_view_transform[:, 2] = -world_view_transform[:, 2]
full_proj_transform = torch.reshape(
torch.tensor(message["view_projection_matrix"]),
(4, 4),
).cuda()
full_proj_transform[:, 1] = -full_proj_transform[:, 1]
custom_cam = MiniCam(
width,
height,
fovy,
fovx,
znear,
zfar,
world_view_transform,
full_proj_transform,
)
except Exception as e:
print("")
traceback.print_exc()
raise e
return (
custom_cam,
do_training,
do_shs_python,
do_rot_scale_python,
keep_alive,
scaling_modifier,
)
else:
return None, None, None, None, None, None
| ["def","receive","(",")",":","message","=","read","(",")","width","=","message","[","``","resolution_x","''","]","height","=","message","[","``","resolution_y","''","]","if","width","!","=","0","and","height","!","=","0",":","try",":","do_training","=","bool","(","message","[","``","train","''","]",")","fovy","=","message","[","``","fov_y","''","]","fovx","=","message","[","``","fov_x","''","]","znear","=","message","[","``","z_near","''","]","zfar","=","message","[","``","z_far","''","]","do_shs_python","=","bool","(","message","[","``","shs_python","''","]",")","do_rot_scale_python","=","bool","(","message","[","``","rot_scale_python","''","]",")","keep_alive","=","bool","(","message","[","``","keep_alive","''","]",")","scaling_modifier","=","message","[","``","scaling_modifier","''","]","world_view_transform","=","torch.reshape","(","torch.tensor","(","message","[","``","view_matrix","''","]",")",",","(","4",",","4",")",")",".cuda","(",")","world_view_transform","[",":",",","1","]","=","-world_view_transform","[",":",",","1","]","world_view_transform","[",":",",","2","]","=","-world_view_transform","[",":",",","2","]","full_proj_transform","=","torch.reshape","(","torch.tensor","(","message","[","``","view_projection_matrix","''","]",")",",","(","4",",","4",")",",",")",".cuda","(",")","full_proj_transform","[",":",",","1","]","=","-full_proj_transform","[",":",",","1","]","custom_cam","=","MiniCam","(","width",",","height",",","fovy",",","fovx",",","znear",",","zfar",",","world_view_transform",",","full_proj_transform",",",")","except","Exception","as","e",":","print","(","``","''",")","traceback.print_exc","(",")","raise","e","return","(","custom_cam",",","do_training",",","do_shs_python",",","do_rot_scale_python",",","keep_alive",",","scaling_modifier",",",")","else",":","return","None",",","None",",","None",",","None",",","None",",","None"] | 57 | 86 | null | network_gui.py | gaussian-splatting/gaussian_renderer/network_gui.py | import torch
import traceback
import socket
import json
from scene.cameras import MiniCam | 10 | null | 5 | null | null | 5 | null | Use image node_id 5 for calling a global function with example usage: receive() and returns: custom_cam, do_training, do_shs_python, do_rot_scale_python, keep_alive, scaling_modifier, None, None, None, None, None, None | 228 | node_id 5 | 145,394 |
|
tokenize_prompt | global | null | false | prompt,neg_prompt | null | null | null | null | prompt_ids, neg_prompt_ids | def tokenize_prompt(prompt, neg_prompt):
prompt_ids = pipeline.prepare_inputs(prompt)
neg_prompt_ids = pipeline.prepare_inputs(neg_prompt)
return prompt_ids, neg_prompt_ids
| ["def","tokenize_prompt","(","prompt",",","neg_prompt",")",":","prompt_ids","=","pipeline.prepare_inputs","(","prompt",")","neg_prompt_ids","=","pipeline.prepare_inputs","(","neg_prompt",")","return","prompt_ids",",","neg_prompt_ids"] | 44 | 47 | null | sdxl_single.py | llmga/llmga/diffusers/examples/research_projects/sdxl_flax/sdxl_single.py | import time
import jax
import jax.numpy
import numpy
from flax.jax_utils import replicate
from jax.experimental.compilation_cache import compilation_cache
from diffusers import FlaxStableDiffusionXLPipeline | 10 | null | 7 | null | null | 3 | null | Use image node_id 1 for calling a global function with example usage: tokenize_prompt(prompt, neg_prompt) and returns: prompt_ids, neg_prompt_ids | 146 | node_id 1 | 168,818 |
prepare_ptq_linear | global | null | false | qconfig | null | null | null | null | prepared_model | def prepare_ptq_linear(qconfig):
qconfig_dict = {"object_type": [(torch.nn.Linear, qconfig)]}
prepared_model = prepare_qat_fx(
copy.deepcopy(float_model), qconfig_dict
) # fuse modules and insert observers
calibrate(
prepared_model, data_loader_test
) # run calibration on sample data
return prepared_model
| ["def","prepare_ptq_linear","(","qconfig",")",":","qconfig_dict","=","{","``","object_type","''",":","[","(","torch.nn.Linear",",","qconfig",")","]","}","prepared_model","=","prepare_qat_fx","(","copy.deepcopy","(","float_model",")",",","qconfig_dict",")","#","fuse","modules","and","insert","observers","calibrate","(","prepared_model",",","data_loader_test",")","#","run","calibration","on","sample","data","return","prepared_model"] | 57 | 61 | null | apot_fx_graph_mode_ptq.py | pytorch/test/quantization/core/experimental/apot_fx_graph_mode_ptq.py | import torch
import torch.nn
import torch.ao.quantization
from torchvision.models.quantization.resnet import resnet18
from torch.ao.quantization.experimental.quantization_helper import evaluate, prepare_data_loaders
import copy
from torch.ao.quantization.quantize_fx import prepare_qat_fx
from torch.ao.quantization.experimental.qconfig import uniform_qconfig_8bit, apot_weights_qconfig_8bit, apot_qconfig_8bit, uniform_qconfig_4bit, apot_weights_qconfig_4bit, apot_qconfig_4bit | 10 | null | 8 | null | null | 2 | null | Use image node_id 2 for calling a global function with example usage: prepare_ptq_linear(qconfig) and returns: prepared_model | 125 | node_id 2 | 291,215 |
assign_to_checkpoint | global | null | false | paths,checkpoint,old_checkpoint,attention_paths_to_split,additional_replacements,config | null | null | null | null | null | def assign_to_checkpoint(
paths,
checkpoint,
old_checkpoint,
attention_paths_to_split=None,
additional_replacements=None,
config=None,
):
assert isinstance(
paths, list
), "Paths should be a list of dicts containing 'old' and 'new' keys."
if attention_paths_to_split is not None:
if config is None:
raise ValueError(
"Please specify the config if setting 'attention_paths_to_split' to 'True'."
)
for path, path_map in attention_paths_to_split.items():
old_tensor = old_checkpoint[path]
channels = old_tensor.shape[0] // 3
target_shape = (
(-1, channels) if len(old_tensor.shape) == 3 else (-1)
)
num_heads = (
old_tensor.shape[0]
// config.get("num_head_channels", 1)
// 3
)
old_tensor = old_tensor.reshape(
(num_heads, 3 * channels // num_heads)
+ old_tensor.shape[1:]
)
query, key, value = old_tensor.split(
channels // num_heads, dim=1
)
checkpoint[path_map["query"]] = query.reshape(
target_shape
).squeeze()
checkpoint[path_map["key"]] = key.reshape(
target_shape
).squeeze()
checkpoint[path_map["value"]] = value.reshape(
target_shape
).squeeze()
for path in paths:
new_path = path["new"]
if (
attention_paths_to_split is not None
and new_path in attention_paths_to_split
):
continue
new_path = new_path.replace("down.", "down_blocks.")
new_path = new_path.replace("up.", "up_blocks.")
if additional_replacements is not None:
for replacement in additional_replacements:
new_path = new_path.replace(
replacement["old"], replacement["new"]
)
if "attentions" in new_path:
checkpoint[new_path] = old_checkpoint[
path["old"]
].squeeze()
else:
checkpoint[new_path] = old_checkpoint[path["old"]]
| ["def","assign_to_checkpoint","(","paths",",","checkpoint",",","old_checkpoint",",","attention_paths_to_split=None",",","additional_replacements=None",",","config=None",",",")",":","assert","isinstance","(","paths",",","list",")",",","``","Paths","should","be","a","list","of","dicts","containing","'old","'","and","'new","'","keys",".","''","if","attention_paths_to_split","is","not","None",":","if","config","is","None",":","raise","ValueError","(","``","Please","specify","the","config","if","setting","'attention_paths_to_split","'","to","'True","'",".","''",")","for","path",",","path_map","in","attention_paths_to_split.items","(",")",":","old_tensor","=","old_checkpoint","[","path","]","channels","=","old_tensor.shape","[","0","]","\/\/","3","target_shape","=","(","(","-1",",","channels",")","if","len","(","old_tensor.shape",")","==","3","else","(","-1",")",")","num_heads","=","(","old_tensor.shape","[","0","]","\/\/","config.get","(","``","num_head_channels","''",",","1",")","\/\/","3",")","old_tensor","=","old_tensor.reshape","(","(","num_heads",",","3","*","channels","\/\/","num_heads",")","+","old_tensor.shape","[","1",":","]",")","query",",","key",",","value","=","old_tensor.split","(","channels","\/\/","num_heads",",","dim=1",")","checkpoint","[","path_map","[","``","query","''","]","]","=","query.reshape","(","target_shape",")",".squeeze","(",")","checkpoint","[","path_map","[","``","key","''","]","]","=","key.reshape","(","target_shape",")",".squeeze","(",")","checkpoint","[","path_map","[","``","value","''","]","]","=","value.reshape","(","target_shape",")",".squeeze","(",")","for","path","in","paths",":","new_path","=","path","[","``","new","''","]","if","(","attention_paths_to_split","is","not","None","and","new_path","in","attention_paths_to_split",")",":","continue","new_path","=","new_path.replace","(","``","down",".","``",",","``","down_blocks",".","''",")","new_path","=","new_path.replace","(","``","up",".","``",",","``","up_blocks",".","''",")","if","additional_replacements","is","not","None",":","for","replacement","in","additional_replacements",":","new_path","=","new_path.replace","(","replacement","[","``","old","''","]",",","replacement","[","``","new","''","]",")","if","``","attentions","''","in","new_path",":","checkpoint","[","new_path","]","=","old_checkpoint","[","path","[","``","old","''","]","]",".squeeze","(",")","else",":","checkpoint","[","new_path","]","=","old_checkpoint","[","path","[","``","old","''","]","]"] | 56 | 96 | null | convert_ddpm_original_checkpoint_to_diffusers.py | llmga/llmga/diffusers/scripts/convert_ddpm_original_checkpoint_to_diffusers.py | import argparse
import json
import torch
from diffusers import AutoencoderKL, DDPMPipeline, DDPMScheduler, UNet2DModel, VQModel | 10 | null | 4 | null | null | 6 | null | Use image node_id 4 for calling a global function with example usage: assign_to_checkpoint(paths, checkpoint, old_checkpoint, attention_paths_to_split, additional_replacements, config) without return types | 205 | node_id 4 | 169,072 |
convert_vq_autoenc_checkpoint | global | null | false | checkpoint,config | null | null | null | null | new_checkpoint | def convert_vq_autoenc_checkpoint(checkpoint, config):
"""
Takes a state dict and a config, and returns a converted checkpoint.
"""
new_checkpoint = {}
new_checkpoint["encoder.conv_norm_out.weight"] = checkpoint[
"encoder.norm_out.weight"
]
new_checkpoint["encoder.conv_norm_out.bias"] = checkpoint[
"encoder.norm_out.bias"
]
new_checkpoint["encoder.conv_in.weight"] = checkpoint[
"encoder.conv_in.weight"
]
new_checkpoint["encoder.conv_in.bias"] = checkpoint[
"encoder.conv_in.bias"
]
new_checkpoint["encoder.conv_out.weight"] = checkpoint[
"encoder.conv_out.weight"
]
new_checkpoint["encoder.conv_out.bias"] = checkpoint[
"encoder.conv_out.bias"
]
new_checkpoint["decoder.conv_norm_out.weight"] = checkpoint[
"decoder.norm_out.weight"
]
new_checkpoint["decoder.conv_norm_out.bias"] = checkpoint[
"decoder.norm_out.bias"
]
new_checkpoint["decoder.conv_in.weight"] = checkpoint[
"decoder.conv_in.weight"
]
new_checkpoint["decoder.conv_in.bias"] = checkpoint[
"decoder.conv_in.bias"
]
new_checkpoint["decoder.conv_out.weight"] = checkpoint[
"decoder.conv_out.weight"
]
new_checkpoint["decoder.conv_out.bias"] = checkpoint[
"decoder.conv_out.bias"
]
num_down_blocks = len(
{
".".join(layer.split(".")[:3])
for layer in checkpoint
if "down" in layer
}
)
down_blocks = {
layer_id: [
key for key in checkpoint if f"down.{layer_id}" in key
]
for layer_id in range(num_down_blocks)
}
num_up_blocks = len(
{
".".join(layer.split(".")[:3])
for layer in checkpoint
if "up" in layer
}
)
up_blocks = {
layer_id: [
key for key in checkpoint if f"up.{layer_id}" in key
]
for layer_id in range(num_up_blocks)
}
for i in range(num_down_blocks):
block_id = (i - 1) // (config["layers_per_block"] + 1)
if any("downsample" in layer for layer in down_blocks[i]):
new_checkpoint[
f"encoder.down_blocks.{i}.downsamplers.0.conv.weight"
] = checkpoint[f"encoder.down.{i}.downsample.conv.weight"]
new_checkpoint[
f"encoder.down_blocks.{i}.downsamplers.0.conv.bias"
] = checkpoint[f"encoder.down.{i}.downsample.conv.bias"]
if any("block" in layer for layer in down_blocks[i]):
num_blocks = len(
{
".".join(shave_segments(layer, 3).split(".")[:3])
for layer in down_blocks[i]
if "block" in layer
}
)
blocks = {
layer_id: [
key
for key in down_blocks[i]
if f"block.{layer_id}" in key
]
for layer_id in range(num_blocks)
}
if num_blocks > 0:
for j in range(config["layers_per_block"]):
paths = renew_resnet_paths(blocks[j])
assign_to_checkpoint(
paths, new_checkpoint, checkpoint
)
if any("attn" in layer for layer in down_blocks[i]):
num_attn = len(
{
".".join(shave_segments(layer, 3).split(".")[:3])
for layer in down_blocks[i]
if "attn" in layer
}
)
attns = {
layer_id: [
key
for key in down_blocks[i]
if f"attn.{layer_id}" in key
]
for layer_id in range(num_blocks)
}
if num_attn > 0:
for j in range(config["layers_per_block"]):
paths = renew_attention_paths(attns[j])
assign_to_checkpoint(
paths,
new_checkpoint,
checkpoint,
config=config,
)
mid_block_1_layers = [
key for key in checkpoint if "mid.block_1" in key
]
mid_block_2_layers = [
key for key in checkpoint if "mid.block_2" in key
]
mid_attn_1_layers = [
key for key in checkpoint if "mid.attn_1" in key
]
# Mid new 2
paths = renew_resnet_paths(mid_block_1_layers)
assign_to_checkpoint(
paths,
new_checkpoint,
checkpoint,
additional_replacements=[
{"old": "mid.", "new": "mid_new_2."},
{"old": "block_1", "new": "resnets.0"},
],
)
paths = renew_resnet_paths(mid_block_2_layers)
assign_to_checkpoint(
paths,
new_checkpoint,
checkpoint,
additional_replacements=[
{"old": "mid.", "new": "mid_new_2."},
{"old": "block_2", "new": "resnets.1"},
],
)
paths = renew_attention_paths(mid_attn_1_layers, in_mid=True)
assign_to_checkpoint(
paths,
new_checkpoint,
checkpoint,
additional_replacements=[
{"old": "mid.", "new": "mid_new_2."},
{"old": "attn_1", "new": "attentions.0"},
],
)
for i in range(num_up_blocks):
block_id = num_up_blocks - 1 - i
if any("upsample" in layer for layer in up_blocks[i]):
new_checkpoint[
f"decoder.up_blocks.{block_id}.upsamplers.0.conv.weight"
] = checkpoint[f"decoder.up.{i}.upsample.conv.weight"]
new_checkpoint[
f"decoder.up_blocks.{block_id}.upsamplers.0.conv.bias"
] = checkpoint[f"decoder.up.{i}.upsample.conv.bias"]
if any("block" in layer for layer in up_blocks[i]):
num_blocks = len(
{
".".join(shave_segments(layer, 3).split(".")[:3])
for layer in up_blocks[i]
if "block" in layer
}
)
blocks = {
layer_id: [
key
for key in up_blocks[i]
if f"block.{layer_id}" in key
]
for layer_id in range(num_blocks)
}
if num_blocks > 0:
for j in range(config["layers_per_block"] + 1):
replace_indices = {
"old": f"up_blocks.{i}",
"new": f"up_blocks.{block_id}",
}
paths = renew_resnet_paths(blocks[j])
assign_to_checkpoint(
paths,
new_checkpoint,
checkpoint,
additional_replacements=[replace_indices],
)
if any("attn" in layer for layer in up_blocks[i]):
num_attn = len(
{
".".join(shave_segments(layer, 3).split(".")[:3])
for layer in up_blocks[i]
if "attn" in layer
}
)
attns = {
layer_id: [
key
for key in up_blocks[i]
if f"attn.{layer_id}" in key
]
for layer_id in range(num_blocks)
}
if num_attn > 0:
for j in range(config["layers_per_block"] + 1):
replace_indices = {
"old": f"up_blocks.{i}",
"new": f"up_blocks.{block_id}",
}
paths = renew_attention_paths(attns[j])
assign_to_checkpoint(
paths,
new_checkpoint,
checkpoint,
additional_replacements=[replace_indices],
)
new_checkpoint = {
k.replace("mid_new_2", "mid_block"): v
for k, v in new_checkpoint.items()
}
new_checkpoint["quant_conv.weight"] = checkpoint[
"quant_conv.weight"
]
new_checkpoint["quant_conv.bias"] = checkpoint["quant_conv.bias"]
if "quantize.embedding.weight" in checkpoint:
new_checkpoint["quantize.embedding.weight"] = checkpoint[
"quantize.embedding.weight"
]
new_checkpoint["post_quant_conv.weight"] = checkpoint[
"post_quant_conv.weight"
]
new_checkpoint["post_quant_conv.bias"] = checkpoint[
"post_quant_conv.bias"
]
return new_checkpoint
| ["def","convert_vq_autoenc_checkpoint","(","checkpoint",",","config",")",":","``","''","''","Takes","a","state","dict","and","a","config",",","and","returns","a","converted","checkpoint.","``","''","''","new_checkpoint","=","{","}","new_checkpoint","[","``","encoder.conv_norm_out.weight","''","]","=","checkpoint","[","``","encoder.norm_out.weight","''","]","new_checkpoint","[","``","encoder.conv_norm_out.bias","''","]","=","checkpoint","[","``","encoder.norm_out.bias","''","]","new_checkpoint","[","``","encoder.conv_in.weight","''","]","=","checkpoint","[","``","encoder.conv_in.weight","''","]","new_checkpoint","[","``","encoder.conv_in.bias","''","]","=","checkpoint","[","``","encoder.conv_in.bias","''","]","new_checkpoint","[","``","encoder.conv_out.weight","''","]","=","checkpoint","[","``","encoder.conv_out.weight","''","]","new_checkpoint","[","``","encoder.conv_out.bias","''","]","=","checkpoint","[","``","encoder.conv_out.bias","''","]","new_checkpoint","[","``","decoder.conv_norm_out.weight","''","]","=","checkpoint","[","``","decoder.norm_out.weight","''","]","new_checkpoint","[","``","decoder.conv_norm_out.bias","''","]","=","checkpoint","[","``","decoder.norm_out.bias","''","]","new_checkpoint","[","``","decoder.conv_in.weight","''","]","=","checkpoint","[","``","decoder.conv_in.weight","''","]","new_checkpoint","[","``","decoder.conv_in.bias","''","]","=","checkpoint","[","``","decoder.conv_in.bias","''","]","new_checkpoint","[","``","decoder.conv_out.weight","''","]","=","checkpoint","[","``","decoder.conv_out.weight","''","]","new_checkpoint","[","``","decoder.conv_out.bias","''","]","=","checkpoint","[","``","decoder.conv_out.bias","''","]","num_down_blocks","=","len","(","{","``",".","''",".join","(","layer.split","(","``",".","``",")","[",":3","]",")","for","layer","in","checkpoint","if","``","down","''","in","layer","}",")","down_blocks","=","{","layer_id",":","[","key","for","key","in","checkpoint","if","f","''","down",".","{","layer_id","}","''","in","key","]","for","layer_id","in","range","(","num_down_blocks",")","}","num_up_blocks","=","len","(","{","``",".","''",".join","(","layer.split","(","``",".","``",")","[",":3","]",")","for","layer","in","checkpoint","if","``","up","''","in","layer","}",")","up_blocks","=","{","layer_id",":","[","key","for","key","in","checkpoint","if","f","''","up",".","{","layer_id","}","''","in","key","]","for","layer_id","in","range","(","num_up_blocks",")","}","for","i","in","range","(","num_down_blocks",")",":","block_id","=","(","i","-","1",")","\/\/","(","config","[","``","layers_per_block","''","]","+","1",")","if","any","(","``","downsample","''","in","layer","for","layer","in","down_blocks","[","i","]",")",":","new_checkpoint","[","f","''","encoder.down_blocks",".","{","i","}",".downsamplers.0.conv.weight","''","]","=","checkpoint","[","f","''","encoder.down",".","{","i","}",".downsample.conv.weight","''","]","new_checkpoint","[","f","''","encoder.down_blocks",".","{","i","}",".downsamplers.0.conv.bias","''","]","=","checkpoint","[","f","''","encoder.down",".","{","i","}",".downsample.conv.bias","''","]","if","any","(","``","block","''","in","layer","for","layer","in","down_blocks","[","i","]",")",":","num_blocks","=","len","(","{","``",".","``",".join","(","shave_segments","(","layer",",","3",")",".split","(","``",".","``",")","[",":3","]",")","for","layer","in","down_blocks","[","i","]","if","``","block","''","in","layer","}",")","blocks","=","{","layer_id",":","[","key","for","key","in","down_blocks","[","i","]","if","f","''","block",".","{","layer_id","}","''","in","key","]","for","layer_id","in","range","(","num_blocks",")","}","if","num_blocks",">","0",":","for","j","in","range","(","config","[","``","layers_per_block","''","]",")",":","paths","=","renew_resnet_paths","(","blocks","[","j","]",")","assign_to_checkpoint","(","paths",",","new_checkpoint",",","checkpoint",")","if","any","(","``","attn","''","in","layer","for","layer","in","down_blocks","[","i","]",")",":","num_attn","=","len","(","{","``",".","``",".join","(","shave_segments","(","layer",",","3",")",".split","(","``",".","``",")","[",":3","]",")","for","layer","in","down_blocks","[","i","]","if","``","attn","''","in","layer","}",")","attns","=","{","layer_id",":","[","key","for","key","in","down_blocks","[","i","]","if","f","''","attn",".","{","layer_id","}","''","in","key","]","for","layer_id","in","range","(","num_blocks",")","}","if","num_attn",">","0",":","for","j","in","range","(","config","[","``","layers_per_block","''","]",")",":","paths","=","renew_attention_paths","(","attns","[","j","]",")","assign_to_checkpoint","(","paths",",","new_checkpoint",",","checkpoint",",","config=config",",",")","mid_block_1_layers","=","[","key","for","key","in","checkpoint","if","``","mid.block_1","''","in","key","]","mid_block_2_layers","=","[","key","for","key","in","checkpoint","if","``","mid.block_2","''","in","key","]","mid_attn_1_layers","=","[","key","for","key","in","checkpoint","if","``","mid.attn_1","''","in","key","]","#","Mid","new","2","paths","=","renew_resnet_paths","(","mid_block_1_layers",")","assign_to_checkpoint","(","paths",",","new_checkpoint",",","checkpoint",",","additional_replacements=","[","{","``","old","''",":","``","mid",".","``",",","``","new","''",":","``","mid_new_2",".","``","}",",","{","``","old","''",":","``","block_1","''",",","``","new","''",":","``","resnets.0","''","}",",","]",",",")","paths","=","renew_resnet_paths","(","mid_block_2_layers",")","assign_to_checkpoint","(","paths",",","new_checkpoint",",","checkpoint",",","additional_replacements=","[","{","``","old","''",":","``","mid",".","``",",","``","new","''",":","``","mid_new_2",".","``","}",",","{","``","old","''",":","``","block_2","''",",","``","new","''",":","``","resnets.1","''","}",",","]",",",")","paths","=","renew_attention_paths","(","mid_attn_1_layers",",","in_mid=True",")","assign_to_checkpoint","(","paths",",","new_checkpoint",",","checkpoint",",","additional_replacements=","[","{","``","old","''",":","``","mid",".","``",",","``","new","''",":","``","mid_new_2",".","``","}",",","{","``","old","''",":","``","attn_1","''",",","``","new","''",":","``","attentions.0","''","}",",","]",",",")","for","i","in","range","(","num_up_blocks",")",":","block_id","=","num_up_blocks","-","1","-","i","if","any","(","``","upsample","''","in","layer","for","layer","in","up_blocks","[","i","]",")",":","new_checkpoint","[","f","''","decoder.up_blocks",".","{","block_id","}",".upsamplers.0.conv.weight","''","]","=","checkpoint","[","f","''","decoder.up",".","{","i","}",".upsample.conv.weight","''","]","new_checkpoint","[","f","''","decoder.up_blocks",".","{","block_id","}",".upsamplers.0.conv.bias","''","]","=","checkpoint","[","f","''","decoder.up",".","{","i","}",".upsample.conv.bias","''","]","if","any","(","``","block","''","in","layer","for","layer","in","up_blocks","[","i","]",")",":","num_blocks","=","len","(","{","``",".","``",".join","(","shave_segments","(","layer",",","3",")",".split","(","``",".","``",")","[",":3","]",")","for","layer","in","up_blocks","[","i","]","if","``","block","''","in","layer","}",")","blocks","=","{","layer_id",":","[","key","for","key","in","up_blocks","[","i","]","if","f","''","block",".","{","layer_id","}","''","in","key","]","for","layer_id","in","range","(","num_blocks",")","}","if","num_blocks",">","0",":","for","j","in","range","(","config","[","``","layers_per_block","''","]","+","1",")",":","replace_indices","=","{","``","old","''",":","f","''","up_blocks",".","{","i","}","''",",","``","new","''",":","f","''","up_blocks",".","{","block_id","}","''",",","}","paths","=","renew_resnet_paths","(","blocks","[","j","]",")","assign_to_checkpoint","(","paths",",","new_checkpoint",",","checkpoint",",","additional_replacements=","[","replace_indices","]",",",")","if","any","(","``","attn","''","in","layer","for","layer","in","up_blocks","[","i","]",")",":","num_attn","=","len","(","{","``",".","``",".join","(","shave_segments","(","layer",",","3",")",".split","(","``",".","``",")","[",":3","]",")","for","layer","in","up_blocks","[","i","]","if","``","attn","''","in","layer","}",")","attns","=","{","layer_id",":","[","key","for","key","in","up_blocks","[","i","]","if","f","''","attn",".","{","layer_id","}","''","in","key","]","for","layer_id","in","range","(","num_blocks",")","}","if","num_attn",">","0",":","for","j","in","range","(","config","[","``","layers_per_block","''","]","+","1",")",":","replace_indices","=","{","``","old","''",":","f","''","up_blocks",".","{","i","}","''",",","``","new","''",":","f","''","up_blocks",".","{","block_id","}","''",",","}","paths","=","renew_attention_paths","(","attns","[","j","]",")","assign_to_checkpoint","(","paths",",","new_checkpoint",",","checkpoint",",","additional_replacements=","[","replace_indices","]",",",")","new_checkpoint","=","{","k.replace","(","``","mid_new_2","''",",","``","mid_block","''",")",":","v","for","k",",","v","in","new_checkpoint.items","(",")","}","new_checkpoint","[","``","quant_conv.weight","''","]","=","checkpoint","[","``","quant_conv.weight","''","]","new_checkpoint","[","``","quant_conv.bias","''","]","=","checkpoint","[","``","quant_conv.bias","''","]","if","``","quantize.embedding.weight","''","in","checkpoint",":","new_checkpoint","[","``","quantize.embedding.weight","''","]","=","checkpoint","[","``","quantize.embedding.weight","''","]","new_checkpoint","[","``","post_quant_conv.weight","''","]","=","checkpoint","[","``","post_quant_conv.weight","''","]","new_checkpoint","[","``","post_quant_conv.bias","''","]","=","checkpoint","[","``","post_quant_conv.bias","''","]","return","new_checkpoint"] | 235 | 380 | null | convert_ddpm_original_checkpoint_to_diffusers.py | llmga/llmga/diffusers/scripts/convert_ddpm_original_checkpoint_to_diffusers.py | import argparse
import json
import torch
from diffusers import AutoencoderKL, DDPMPipeline, DDPMScheduler, UNet2DModel, VQModel | 10 | null | 4 | null | null | 6 | null | Use image node_id 6 for calling a global function with example usage: convert_vq_autoenc_checkpoint(checkpoint, config) and returns: new_checkpoint | 147 | node_id 6 | 169,074 |
TableInCStart | global | null | false | builder | null | null | null | null | null | def TableInCStart(builder):
builder.StartObject(2)
| ["def","TableInCStart","(","builder",")",":","builder.StartObject","(","2",")"] | 36 | 36 | null | TableInC.py | flatbuffers/java/src/test/java/NamespaceA/TableInC.py | import flatbuffers | 10 | null | 1 | null | null | 4 | null | Use image node_id 1 for calling a global function with example usage: TableInCStart(builder) without return types | 113 | node_id 1 | 142,893 |
TableInCAddReferToA1 | global | null | false | builder,referToA1 | null | null | null | null | null | def TableInCAddReferToA1(builder, referToA1):
builder.PrependUOffsetTRelativeSlot(
0,
flatbuffers.number_types.UOffsetTFlags.py_type(referToA1),
0,
)
| ["def","TableInCAddReferToA1","(","builder",",","referToA1",")",":","builder.PrependUOffsetTRelativeSlot","(","0",",","flatbuffers.number_types.UOffsetTFlags.py_type","(","referToA1",")",",","0",",",")"] | 37 | 37 | null | TableInC.py | flatbuffers/java/src/test/java/NamespaceA/TableInC.py | import flatbuffers | 10 | null | 1 | null | null | 4 | null | Use image node_id 2 for calling a global function with example usage: TableInCAddReferToA1(builder, referToA1) without return types | 131 | node_id 2 | 142,894 |
TableInCAddReferToA2 | global | null | false | builder,referToA2 | null | null | null | null | null | def TableInCAddReferToA2(builder, referToA2):
builder.PrependUOffsetTRelativeSlot(
1,
flatbuffers.number_types.UOffsetTFlags.py_type(referToA2),
0,
)
| ["def","TableInCAddReferToA2","(","builder",",","referToA2",")",":","builder.PrependUOffsetTRelativeSlot","(","1",",","flatbuffers.number_types.UOffsetTFlags.py_type","(","referToA2",")",",","0",",",")"] | 38 | 38 | null | TableInC.py | flatbuffers/java/src/test/java/NamespaceA/TableInC.py | import flatbuffers | 10 | null | 1 | null | null | 4 | null | Use image node_id 3 for calling a global function with example usage: TableInCAddReferToA2(builder, referToA2) without return types | 131 | node_id 3 | 142,895 |
TableInCEnd | global | null | false | builder | null | null | null | null | builder | def TableInCEnd(builder):
return builder.EndObject()
| ["def","TableInCEnd","(","builder",")",":","return","builder.EndObject","(",")"] | 39 | 39 | null | TableInC.py | flatbuffers/java/src/test/java/NamespaceA/TableInC.py | import flatbuffers | 10 | null | 1 | null | null | 4 | null | Use image node_id 4 for calling a global function with example usage: TableInCEnd(builder) and returns: builder | 111 | node_id 4 | 142,896 |
test_distinfo_license | TestLicense | TestCase | true | self | null | null | If run when pytorch is installed via a wheel, the license will be in
site-package/torch-*dist-info/LICENSE. Make sure it contains the third
party bundle of licenses | ["If","run","when","pytorch","is","installed","via","a","wheel",",","the","license","will","be","in","site-package\/torch-","*","dist-info\/LICENSE",".","Make","sure","it","contains","the","third","party","bundle","of","licenses"] | null | def test_distinfo_license(self):
"""If run when pytorch is installed via a wheel, the license will be in
site-package/torch-*dist-info/LICENSE. Make sure it contains the third
party bundle of licenses"""
if len(distinfo) > 1:
raise AssertionError(
'Found too many "torch-*dist-info" directories '
f'in "{site_packages}, expected only one'
)
with open(
os.path.join(os.path.join(distinfo[0], "LICENSE"))
) as fid:
txt = fid.read()
self.assertTrue(starting_txt in txt)
| ["def","test_distinfo_license","(","self",")",":","``","''","''","If","run","when","pytorch","is","installed","via","a","wheel",",","the","license","will","be","in","site-package\/torch-","*","dist-info\/LICENSE",".","Make","sure","it","contains","the","third","party","bundle","of","licenses","''","''","''","if","len","(","distinfo",")",">","1",":","raise","AssertionError","(","'Found","too","many","``","torch-","*","dist-info","''","directories","'","f'in","``","{","site_packages","}",",","expected","only","one'",")","with","open","(","os.path.join","(","os.path.join","(","distinfo","[","0","]",",","``","LICENSE","''",")",")",")","as","fid",":","txt","=","fid.read","(",")","self.assertTrue","(","starting_txt","in","txt",")"] | 37 | 47 | null | test_license.py | pytorch/test/test_license.py | import glob
import io
import os
import unittest
import torch
from torch.testing._internal.common_utils import TestCase, run_tests | 10 | 1 | 6 | 1 | 1 | 0 | 2 | Use image node_id 2 for calling the TestLicense obj's underlying member method code with example usage: obj.test_distinfo_license() without return types | 152 | node_id 2 | 296,344 |
test_license_for_wheel | TestLicense | TestCase | true | self | null | null | null | null | null | def test_license_for_wheel(self):
current = io.StringIO()
create_bundled("third_party", current)
with open(license_file) as fid:
src_tree = fid.read()
if not src_tree == current.getvalue():
raise AssertionError(
f'the contents of "{license_file}" do not '
"match the current state of the third_party files. Use "
'"python third_party/build_bundled.py" to regenerate it'
)
| ["def","test_license_for_wheel","(","self",")",":","current","=","io.StringIO","(",")","create_bundled","(","``","third_party","''",",","current",")","with","open","(","license_file",")","as","fid",":","src_tree","=","fid.read","(",")","if","not","src_tree","==","current.getvalue","(",")",":","raise","AssertionError","(","f'the","contents","of","``","{","license_file","}","''","do","not","'","``","match","the","current","state","of","the","third_party","files",".","Use","``","'","''","python","third_party\/build_bundled.py","''","to","regenerate","it'",")"] | 25 | 34 | null | test_license.py | pytorch/test/test_license.py | import glob
import io
import os
import unittest
import torch
from torch.testing._internal.common_utils import TestCase, run_tests | 10 | 1 | 6 | 1 | 1 | 0 | 2 | Use image node_id 1 for calling the TestLicense obj's underlying member method code with example usage: obj.test_license_for_wheel() without return types | 153 | node_id 1 | 296,343 |
test_slow_tasks | TestFunctionalAutogradBenchmark | TestCase | true | self | null | null | null | null | null | def test_slow_tasks(self):
slow_tasks = ["fcn_resnet", "detr"]
# deepspeech is voluntarily excluded as it takes too long to run without
# proper tuning of the number of threads it should use.
for task in slow_tasks:
# Disable GPU for slow test as the CI GPU don't have enough memory
self._test_runner(task, disable_gpu=True)
| ["def","test_slow_tasks","(","self",")",":","slow_tasks","=","[","``","fcn_resnet","''",",","``","detr","''","]","#","deepspeech","is","voluntarily","excluded","as","it","takes","too","long","to","run","without","#","proper","tuning","of","the","number","of","threads","it","should","use",".","for","task","in","slow_tasks",":","#","Disable","GPU","for","slow","test","as","the","CI","GPU","do","n't","have","enough","memory","self._test_runner","(","task",",","disable_gpu=True",")"] | 52 | 59 | null | test_functional_autograd_benchmark.py | pytorch/test/test_functional_autograd_benchmark.py | from torch.testing._internal.common_utils import TestCase, run_tests, slowTest, IS_WINDOWS
import subprocess
import tempfile
import os
import unittest | 10 | 1 | 5 | 1 | 1 | 0 | 3 | Use image node_id 3 for calling the TestFunctionalAutogradBenchmark obj's underlying member method code with example usage: obj.test_slow_tasks() without return types | 166 | node_id 3 | 294,576 |