language
stringclasses 1
value | repo
stringclasses 346
values | path
stringlengths 6
201
| class_span
dict | source
stringlengths 21
2.38M
| target
stringlengths 1
96
|
|---|---|---|---|---|---|
python
|
run-llama__llama_index
|
llama-index-core/tests/agent/workflow/test_events.py
|
{
"start": 1202,
"end": 1273
}
|
class ____(BaseModel):
operation: str
result: str
|
WrongMathResult
|
python
|
kamyu104__LeetCode-Solutions
|
Python/steps-to-make-array-non-decreasing.py
|
{
"start": 46,
"end": 539
}
|
class ____(object):
def totalSteps(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
dp = [0]*len(nums) # dp[i]: number of rounds for nums[i] to remove all the covered elements
stk = []
for i in reversed(xrange(len(nums))):
while stk and nums[stk[-1]] < nums[i]:
dp[i] = max(dp[i]+1, dp[stk.pop()])
stk.append(i)
return max(dp)
# Time: O(n)
# Space: O(n)
# mono stack, dp
|
Solution
|
python
|
pydantic__pydantic
|
pydantic/v1/errors.py
|
{
"start": 7308,
"end": 7403
}
|
class ____(PydanticTypeError):
msg_template = 'value is not a valid frozenset'
|
FrozenSetError
|
python
|
prompt-toolkit__python-prompt-toolkit
|
src/prompt_toolkit/shortcuts/choice_input.py
|
{
"start": 1224,
"end": 10523
}
|
class ____(Generic[_T]):
"""
Input selection prompt. Ask the user to choose among a set of options.
Example usage::
input_selection = ChoiceInput(
message="Please select a dish:",
options=[
("pizza", "Pizza with mushrooms"),
("salad", "Salad with tomatoes"),
("sushi", "Sushi"),
],
default="pizza",
)
result = input_selection.prompt()
:param message: Plain text or formatted text to be shown before the options.
:param options: Sequence of ``(value, label)`` tuples. The labels can be
formatted text.
:param default: Default value. If none is given, the first option is
considered the default.
:param mouse_support: Enable mouse support.
:param style: :class:`.Style` instance for the color scheme.
:param symbol: Symbol to be displayed in front of the selected choice.
:param bottom_toolbar: Formatted text or callable that returns formatted
text to be displayed at the bottom of the screen.
:param show_frame: `bool` or
:class:`~prompt_toolkit.filters.Filter`. When True, surround the input
with a frame.
:param enable_interrupt: `bool` or
:class:`~prompt_toolkit.filters.Filter`. When True, raise
the ``interrupt_exception`` (``KeyboardInterrupt`` by default) when
control-c has been pressed.
:param interrupt_exception: The exception type that will be raised when
there is a keyboard interrupt (control-c keypress).
"""
def __init__(
self,
*,
message: AnyFormattedText,
options: Sequence[tuple[_T, AnyFormattedText]],
default: _T | None = None,
mouse_support: bool = False,
style: BaseStyle | None = None,
symbol: str = ">",
bottom_toolbar: AnyFormattedText = None,
show_frame: FilterOrBool = False,
enable_suspend: FilterOrBool = False,
enable_interrupt: FilterOrBool = True,
interrupt_exception: type[BaseException] = KeyboardInterrupt,
key_bindings: KeyBindingsBase | None = None,
) -> None:
if style is None:
style = create_default_choice_input_style()
self.message = message
self.default = default
self.options = options
self.mouse_support = mouse_support
self.style = style
self.symbol = symbol
self.show_frame = show_frame
self.enable_suspend = enable_suspend
self.interrupt_exception = interrupt_exception
self.enable_interrupt = enable_interrupt
self.bottom_toolbar = bottom_toolbar
self.key_bindings = key_bindings
def _create_application(self) -> Application[_T]:
radio_list = RadioList(
values=self.options,
default=self.default,
select_on_focus=True,
open_character="",
select_character=self.symbol,
close_character="",
show_cursor=False,
show_numbers=True,
container_style="class:input-selection",
default_style="class:option",
selected_style="",
checked_style="class:selected-option",
number_style="class:number",
show_scrollbar=False,
)
container: AnyContainer = HSplit(
[
Box(
Label(text=self.message, dont_extend_height=True),
padding_top=0,
padding_left=1,
padding_right=1,
padding_bottom=0,
),
Box(
radio_list,
padding_top=0,
padding_left=3,
padding_right=1,
padding_bottom=0,
),
]
)
@Condition
def show_frame_filter() -> bool:
return to_filter(self.show_frame)()
show_bottom_toolbar = (
Condition(lambda: self.bottom_toolbar is not None)
& ~is_done
& renderer_height_is_known
)
container = ConditionalContainer(
Frame(container),
alternative_content=container,
filter=show_frame_filter,
)
bottom_toolbar = ConditionalContainer(
Window(
FormattedTextControl(
lambda: self.bottom_toolbar, style="class:bottom-toolbar.text"
),
style="class:bottom-toolbar",
dont_extend_height=True,
height=Dimension(min=1),
),
filter=show_bottom_toolbar,
)
layout = Layout(
HSplit(
[
container,
# Add an empty window between the selection input and the
# bottom toolbar, if the bottom toolbar is visible, in
# order to allow the bottom toolbar to be displayed at the
# bottom of the screen.
ConditionalContainer(Window(), filter=show_bottom_toolbar),
bottom_toolbar,
]
),
focused_element=radio_list,
)
kb = KeyBindings()
@kb.add("enter", eager=True)
def _accept_input(event: E) -> None:
"Accept input when enter has been pressed."
event.app.exit(result=radio_list.current_value, style="class:accepted")
@Condition
def enable_interrupt() -> bool:
return to_filter(self.enable_interrupt)()
@kb.add("c-c", filter=enable_interrupt)
@kb.add("<sigint>", filter=enable_interrupt)
def _keyboard_interrupt(event: E) -> None:
"Abort when Control-C has been pressed."
event.app.exit(exception=self.interrupt_exception(), style="class:aborting")
suspend_supported = Condition(suspend_to_background_supported)
@Condition
def enable_suspend() -> bool:
return to_filter(self.enable_suspend)()
@kb.add("c-z", filter=suspend_supported & enable_suspend)
def _suspend(event: E) -> None:
"""
Suspend process to background.
"""
event.app.suspend_to_background()
return Application(
layout=layout,
full_screen=False,
mouse_support=self.mouse_support,
key_bindings=merge_key_bindings(
[kb, DynamicKeyBindings(lambda: self.key_bindings)]
),
style=self.style,
)
def prompt(self) -> _T:
return self._create_application().run()
async def prompt_async(self) -> _T:
return await self._create_application().run_async()
def choice(
message: AnyFormattedText,
*,
options: Sequence[tuple[_T, AnyFormattedText]],
default: _T | None = None,
mouse_support: bool = False,
style: BaseStyle | None = None,
symbol: str = ">",
bottom_toolbar: AnyFormattedText = None,
show_frame: bool = False,
enable_suspend: FilterOrBool = False,
enable_interrupt: FilterOrBool = True,
interrupt_exception: type[BaseException] = KeyboardInterrupt,
key_bindings: KeyBindingsBase | None = None,
) -> _T:
"""
Choice selection prompt. Ask the user to choose among a set of options.
Example usage::
result = choice(
message="Please select a dish:",
options=[
("pizza", "Pizza with mushrooms"),
("salad", "Salad with tomatoes"),
("sushi", "Sushi"),
],
default="pizza",
)
:param message: Plain text or formatted text to be shown before the options.
:param options: Sequence of ``(value, label)`` tuples. The labels can be
formatted text.
:param default: Default value. If none is given, the first option is
considered the default.
:param mouse_support: Enable mouse support.
:param style: :class:`.Style` instance for the color scheme.
:param symbol: Symbol to be displayed in front of the selected choice.
:param bottom_toolbar: Formatted text or callable that returns formatted
text to be displayed at the bottom of the screen.
:param show_frame: `bool` or
:class:`~prompt_toolkit.filters.Filter`. When True, surround the input
with a frame.
:param enable_interrupt: `bool` or
:class:`~prompt_toolkit.filters.Filter`. When True, raise
the ``interrupt_exception`` (``KeyboardInterrupt`` by default) when
control-c has been pressed.
:param interrupt_exception: The exception type that will be raised when
there is a keyboard interrupt (control-c keypress).
"""
return ChoiceInput[_T](
message=message,
options=options,
default=default,
mouse_support=mouse_support,
style=style,
symbol=symbol,
bottom_toolbar=bottom_toolbar,
show_frame=show_frame,
enable_suspend=enable_suspend,
enable_interrupt=enable_interrupt,
interrupt_exception=interrupt_exception,
key_bindings=key_bindings,
).prompt()
|
ChoiceInput
|
python
|
viewflow__viewflow
|
tests/workflow/test_nodes__view.py
|
{
"start": 3153,
"end": 3681
}
|
class ____(flow.Flow): # noqa: D101
start = flow.StartHandle().Next(this.approve)
approve = (
flow.View(views.UpdateProcessView.as_view(fields=[]))
.Permission(auto_create=True)
.onCreate(this.toggle_on_create)
.Next(this.end)
)
end = flow.End()
def toggle_on_create(self, activation):
assert not hasattr(self.__class__, "_on_create_executed")
self.__class__._on_create_executed = True
urlpatterns = [path("", FlowAppViewset(TestWorkflow).urls)]
|
TestWorkflow
|
python
|
getsentry__sentry
|
src/sentry/integrations/vsts/integration.py
|
{
"start": 26962,
"end": 29857
}
|
class ____:
def dispatch(self, request: HttpRequest, pipeline: IntegrationPipeline) -> HttpResponseBase:
with IntegrationPipelineViewEvent(
IntegrationPipelineViewType.ACCOUNT_CONFIG,
IntegrationDomain.SOURCE_CODE_MANAGEMENT,
VstsIntegrationProvider.key,
).capture() as lifecycle:
account_id = request.POST.get("account")
if account_id is not None:
state_accounts: Sequence[Mapping[str, Any]] | None = pipeline.fetch_state(
key="accounts"
)
account = self.get_account_from_id(account_id, state_accounts or [])
if account is not None:
pipeline.bind_state("account", account)
return pipeline.next_step()
state: Mapping[str, Any] | None = pipeline.fetch_state(key="identity")
access_token = (state or {}).get("data", {}).get("access_token")
user = get_user_info(access_token)
accounts = self.get_accounts(access_token, user["uuid"])
extra = {
"organization_id": pipeline.organization.id if pipeline.organization else None,
"user_id": request.user.id,
"accounts": accounts,
}
if not accounts or not accounts.get("value"):
lifecycle.record_halt(IntegrationPipelineHaltReason.NO_ACCOUNTS, extra=extra)
return render_to_response(
template="sentry/integrations/vsts-config.html",
context={"no_accounts": True},
request=request,
)
accounts = accounts["value"]
pipeline.bind_state("accounts", accounts)
account_form = AccountForm(accounts)
return render_to_response(
template="sentry/integrations/vsts-config.html",
context={"form": account_form, "no_accounts": False},
request=request,
)
def get_account_from_id(
self, account_id: str, accounts: Sequence[Mapping[str, Any]]
) -> Mapping[str, Any] | None:
for account in accounts:
if account["accountId"] == account_id:
return account
return None
def get_accounts(self, access_token: str, user_id: int) -> Any | None:
url = (
f"https://app.vssps.visualstudio.com/_apis/accounts?memberId={user_id}&api-version=4.1"
)
with http.build_session() as session:
response = session.get(
url,
headers={
"Content-Type": "application/json",
"Authorization": f"Bearer {access_token}",
},
)
if response.status_code == 200:
return response.json()
return None
|
AccountConfigView
|
python
|
pytorch__pytorch
|
test/distributed/_shard/sharded_tensor/ops/test_init.py
|
{
"start": 635,
"end": 4036
}
|
class ____(ShardedTensorTestBase):
"""Testing torch.nn.init functions for ShardedTensor"""
@with_comms
@skip_if_lt_x_gpu(4)
@requires_nccl()
def test_init_sharded_tensor_with_uniform(self):
"""Test torch.nn.init.uniform_(ShardedTensor, a, b)"""
spec = ChunkShardingSpec(
dim=0,
placements=[
"rank:0/cuda:0",
"rank:1/cuda:1",
"rank:2/cuda:2",
"rank:3/cuda:3",
],
)
h, w = 8, 2
a, b = 10, 20
seed = 1234
dtype = torch.double
st = sharded_tensor.empty(spec, h, w, dtype=dtype)
self.assertEqual(1, len(st.local_shards()))
# Clone local tensor to ensure torch.nn.init starts from the same input
local_tensor_clone = torch.clone(st.local_shards()[0].tensor)
torch.manual_seed(seed)
torch.nn.init.uniform_(st, a=a, b=b)
torch.manual_seed(seed)
torch.nn.init.uniform_(local_tensor_clone, a=a, b=b)
self.assertEqual(local_tensor_clone, st.local_shards()[0].tensor)
@with_comms
@skip_if_lt_x_gpu(4)
@requires_nccl()
def test_init_sharded_tensor_with_normal(self):
"""Test torch.nn.init.normal_(ShardedTensor, mean, std)"""
spec = ChunkShardingSpec(
dim=0,
placements=[
"rank:0/cuda:0",
"rank:1/cuda:1",
"rank:2/cuda:2",
"rank:3/cuda:3",
],
)
h, w = 8, 2
mean, std = 10, 5
seed = 1234
dtype = torch.double
st = sharded_tensor.empty(spec, h, w, dtype=dtype)
self.assertEqual(1, len(st.local_shards()))
# Clone local tensor to ensure torch.nn.init starts from the same input
local_tensor_clone = torch.clone(st.local_shards()[0].tensor)
torch.manual_seed(seed)
torch.nn.init.normal_(st, mean=mean, std=std)
torch.manual_seed(seed)
torch.nn.init.normal_(local_tensor_clone, mean=mean, std=std)
self.assertEqual(local_tensor_clone, st.local_shards()[0].tensor)
@with_comms
@skip_if_lt_x_gpu(4)
@requires_nccl()
def test_init_sharded_tensor_with_kaiming_uniform(self):
"""Test torch.nn.init.kaiming_uniform_(ShardedTensor, a, mode, nonlinearit)"""
spec = ChunkShardingSpec(
dim=0,
placements=[
"rank:0/cuda:0",
"rank:1/cuda:1",
"rank:2/cuda:2",
"rank:3/cuda:3",
],
)
h, w = 8, 2
a, mode, nonlinearity = 0, "fan_in", "leaky_relu"
seed = 1234
dtype = torch.double
st = sharded_tensor.empty(spec, h, w, dtype=dtype)
self.assertEqual(1, len(st.local_shards()))
# Clone local tensor to ensure torch.nn.init starts from the same input
local_tensor_clone = torch.clone(st.local_shards()[0].tensor)
torch.manual_seed(seed)
torch.nn.init.kaiming_uniform_(st, a=a, mode=mode, nonlinearity=nonlinearity)
torch.manual_seed(seed)
torch.nn.init.kaiming_uniform_(
local_tensor_clone, a=a, mode=mode, nonlinearity=nonlinearity
)
self.assertEqual(local_tensor_clone, st.local_shards()[0].tensor)
if __name__ == "__main__":
run_tests()
|
TestShardedTensorNNInit
|
python
|
psf__black
|
src/black/mode.py
|
{
"start": 7858,
"end": 7982
}
|
class ____(UserWarning):
"""Visible deprecation warning."""
_MAX_CACHE_KEY_PART_LENGTH: Final = 32
@dataclass
|
Deprecated
|
python
|
PrefectHQ__prefect
|
src/integrations/prefect-github/prefect_github/schemas/graphql_schema.py
|
{
"start": 229315,
"end": 229636
}
|
class ____(sgqlc.types.Type):
"""
See source code for more info.
"""
__schema__ = graphql_schema
__field_names__ = ("cursor", "node")
cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor")
node = sgqlc.types.Field("CommitComment", graphql_name="node")
|
CommitCommentEdge
|
python
|
Pylons__pyramid
|
docs/tutorials/wiki2/src/authentication/tutorial/models/page.py
|
{
"start": 137,
"end": 524
}
|
class ____(Base):
""" The SQLAlchemy declarative model class for a Page object. """
__tablename__ = 'pages'
id: Mapped[int] = mapped_column(primary_key=True)
name: Mapped[str] = mapped_column(unique=True)
data: Mapped[str]
creator_id: Mapped[int] = mapped_column(ForeignKey('users.id'))
creator: Mapped['User'] = relationship(back_populates='created_pages')
|
Page
|
python
|
bottlepy__bottle
|
test/test_auth.py
|
{
"start": 73,
"end": 345
}
|
class ____(ServerTestBase):
def test__header(self):
@bottle.route('/')
@bottle.auth_basic(lambda x, y: False)
def test(): return {}
self.assertStatus(401)
self.assertHeader('Www-Authenticate', 'Basic realm="private"')
|
TestBasicAuth
|
python
|
airbytehq__airbyte
|
airbyte-integrations/connectors/source-github/source_github/github_schema.py
|
{
"start": 856659,
"end": 857069
}
|
class ____(sgqlc.types.Type):
"""An edge in a connection."""
__schema__ = github_schema
__field_names__ = ("cursor", "node")
cursor = sgqlc.types.Field(sgqlc.types.non_null(String), graphql_name="cursor")
"""A cursor for use in pagination."""
node = sgqlc.types.Field("ProjectV2ItemFieldValue", graphql_name="node")
"""The item at the end of the edge."""
|
ProjectV2ItemFieldValueEdge
|
python
|
keon__algorithms
|
tests/test_array.py
|
{
"start": 4244,
"end": 4584
}
|
class ____(unittest.TestCase):
def test_garage(self):
initial = [1, 2, 3, 0, 4]
final = [0, 3, 2, 1, 4]
steps, seq = garage(initial, final)
self.assertEqual(steps, 4)
self.assertListEqual(
seq, [[0, 2, 3, 1, 4], [2, 0, 3, 1, 4], [2, 3, 0, 1, 4], [0, 3, 2, 1, 4]]
)
|
TestGarage
|
python
|
sqlalchemy__sqlalchemy
|
test/orm/dml/test_bulk_statements.py
|
{
"start": 81566,
"end": 87442
}
|
class ____(
fixtures.DeclarativeMappedTest, testing.AssertsExecutionResults
):
run_inserts = "each"
__requires__ = ("insert_returning",)
@classmethod
def setup_classes(cls):
Base = cls.DeclarativeBasic
class A(Base):
__tablename__ = "a"
id: Mapped[int] = mapped_column(
Integer, Identity(), primary_key=True
)
cs = relationship("C")
class B(Base):
__tablename__ = "b"
id: Mapped[int] = mapped_column(
Integer, Identity(), primary_key=True
)
a_id: Mapped[int] = mapped_column(ForeignKey("a.id"))
a = relationship("A")
class C(Base):
__tablename__ = "c"
id: Mapped[int] = mapped_column(
Integer, Identity(), primary_key=True
)
a_id: Mapped[int] = mapped_column(ForeignKey("a.id"))
@classmethod
def insert_data(cls, connection):
A = cls.classes.A
C = cls.classes.C
with Session(connection) as sess:
sess.add_all(
[
A(id=1, cs=[C(id=1), C(id=2)]),
A(id=2),
A(id=3, cs=[C(id=3), C(id=4)]),
]
)
sess.commit()
@testing.fixture
def fixture_with_loader_opt(self):
def go(lazy):
class Base(DeclarativeBase):
pass
class A(Base):
__tablename__ = "a"
id: Mapped[int] = mapped_column(Integer, primary_key=True)
class B(Base):
__tablename__ = "b"
id: Mapped[int] = mapped_column(Integer, primary_key=True)
a_id: Mapped[int] = mapped_column(ForeignKey("a.id"))
a = relationship("A", lazy=lazy)
return A, B
return go
@testing.combinations(
(selectinload,),
(immediateload,),
)
def test_insert_supported(self, loader):
A, B = self.classes("A", "B")
sess = fixture_session()
result = sess.execute(
insert(B).returning(B).options(loader(B.a)),
[
{"id": 1, "a_id": 1},
{"id": 2, "a_id": 1},
{"id": 3, "a_id": 2},
{"id": 4, "a_id": 3},
{"id": 5, "a_id": 3},
],
).scalars()
for b in result:
assert "a" in b.__dict__
@testing.combinations(
(joinedload,),
(subqueryload,),
)
def test_insert_not_supported(self, loader):
"""test #11853"""
A, B = self.classes("A", "B")
sess = fixture_session()
stmt = insert(B).returning(B).options(loader(B.a))
with expect_deprecated(
f"The {loader.__name__} loader option is not compatible "
"with DML statements",
):
sess.execute(stmt, [{"id": 1, "a_id": 1}])
@testing.combinations(
(joinedload,),
(subqueryload,),
(selectinload,),
(immediateload,),
)
def test_secondary_opt_ok(self, loader):
A, B = self.classes("A", "B")
sess = fixture_session()
opt = selectinload(B.a)
opt = getattr(opt, loader.__name__)(A.cs)
result = sess.execute(
insert(B).returning(B).options(opt),
[
{"id": 1, "a_id": 1},
{"id": 2, "a_id": 1},
{"id": 3, "a_id": 2},
{"id": 4, "a_id": 3},
{"id": 5, "a_id": 3},
],
).scalars()
for b in result:
assert "a" in b.__dict__
assert "cs" in b.a.__dict__
@testing.combinations(
("joined",),
("select",),
("subquery",),
("selectin",),
("immediate",),
argnames="lazy_opt",
)
def test_insert_handles_implicit(self, fixture_with_loader_opt, lazy_opt):
"""test #11853"""
A, B = fixture_with_loader_opt(lazy_opt)
sess = fixture_session()
for b_obj in sess.execute(
insert(B).returning(B),
[
{"id": 1, "a_id": 1},
{"id": 2, "a_id": 1},
{"id": 3, "a_id": 2},
{"id": 4, "a_id": 3},
{"id": 5, "a_id": 3},
],
).scalars():
if lazy_opt in ("select", "joined", "subquery"):
# these aren't supported by DML
assert "a" not in b_obj.__dict__
else:
# the other three are
assert "a" in b_obj.__dict__
@testing.combinations(
(lazyload,), (selectinload,), (immediateload,), argnames="loader_opt"
)
@testing.combinations(
(joinedload,),
(subqueryload,),
(selectinload,),
(immediateload,),
(lazyload,),
argnames="secondary_opt",
)
def test_secondary_w_criteria_caching(self, loader_opt, secondary_opt):
"""test #11855"""
A, B, C = self.classes("A", "B", "C")
for i in range(3):
with fixture_session() as sess:
opt = loader_opt(B.a)
opt = getattr(opt, secondary_opt.__name__)(
A.cs.and_(C.a_id == 1)
)
stmt = insert(B).returning(B).options(opt)
b1 = sess.scalar(stmt, [{"a_id": 1}])
eq_({c.id for c in b1.a.cs}, {1, 2})
opt = loader_opt(B.a)
opt = getattr(opt, secondary_opt.__name__)(
A.cs.and_(C.a_id == 3)
)
stmt = insert(B).returning(B).options(opt)
b3 = sess.scalar(stmt, [{"a_id": 3}])
eq_({c.id for c in b3.a.cs}, {3, 4})
|
EagerLoadTest
|
python
|
doocs__leetcode
|
solution/0900-0999/0979.Distribute Coins in Binary Tree/Solution.py
|
{
"start": 192,
"end": 575
}
|
class ____:
def distributeCoins(self, root: Optional[TreeNode]) -> int:
def dfs(root):
if root is None:
return 0
left, right = dfs(root.left), dfs(root.right)
nonlocal ans
ans += abs(left) + abs(right)
return left + right + root.val - 1
ans = 0
dfs(root)
return ans
|
Solution
|
python
|
kamyu104__LeetCode-Solutions
|
Python/smallest-subarrays-with-maximum-bitwise-or.py
|
{
"start": 52,
"end": 512
}
|
class ____(object):
def smallestSubarrays(self, nums):
"""
:type nums: List[int]
:rtype: List[int]
"""
result = [0]*len(nums)
lookup = [-1]*max(max(nums).bit_length(), 1)
for i in reversed(xrange(len(nums))):
for bit in xrange(len(lookup)):
if nums[i]&(1<<bit):
lookup[bit] = i
result[i] = max(max(lookup)-i+1, 1)
return result
|
Solution
|
python
|
kamyu104__LeetCode-Solutions
|
Python/minimum-operations-to-equalize-binary-string.py
|
{
"start": 36,
"end": 862
}
|
class ____(object):
def minOperations(self, s, k):
"""
:type s: str
:type k: int
:rtype: int
"""
def ceil_divide(a, b):
return (a+b-1)//b
zero = s.count('0')
if len(s) == k:
return 0 if zero == 0 else 1 if zero == len(s) else -1
result = float("inf")
if (k&1) == (zero&1):
i = max(ceil_divide(zero, k), ceil_divide(len(s)-zero, len(s)-k))
if (i&1) == 0:
i += 1
result = min(result, i)
if (zero&1) == 0:
i = max(ceil_divide(zero, k), ceil_divide(zero, len(s)-k))
if (i&1) == 1:
i += 1
result = min(result, i)
return result if result != float("inf") else -1
# Time: O(n)
# Space: O(1)
# math
|
Solution
|
python
|
pytorch__pytorch
|
torch/_inductor/codegen/cpp.py
|
{
"start": 183985,
"end": 184179
}
|
class ____(Enum):
SAME_VARS_REDUCE = "same_vars_reduce"
COMPATIBLE_REDUCTION = "compatible_reduction"
COMPATIBLE_RANGES_NO_REDUCTION = "compatible_ranges_no_reduction"
|
ReasonFusedNodes
|
python
|
keon__algorithms
|
tests/test_linkedlist.py
|
{
"start": 350,
"end": 676
}
|
class ____(object):
def __init__(self, x):
self.val = x
self.next = None
# Convert from linked list Node to list for testing
def convert(head):
ret = []
if head:
current = head
while current:
ret.append(current.val)
current = current.next
return ret
|
Node
|
python
|
walkccc__LeetCode
|
solutions/156. Binary Tree Upside Down/156-2.py
|
{
"start": 0,
"end": 415
}
|
class ____:
def upsideDownBinaryTree(self, root: TreeNode | None) -> TreeNode | None:
prevRoot = None
prevRightChild = None
while root:
nextRoot = root.left # Cache the next root.
root.left = prevRightChild
prevRightChild = root.right
root.right = prevRoot
prevRoot = root # Record the previous root.
root = nextRoot # Update the root.
return prevRoot
|
Solution
|
python
|
django__django
|
django/contrib/gis/gdal/prototypes/generation.py
|
{
"start": 445,
"end": 4888
}
|
class ____(c_char_p):
pass
def bool_output(func, argtypes, errcheck=None):
"""Generate a ctypes function that returns a boolean value."""
func.argtypes = argtypes
func.restype = c_bool
if errcheck:
func.errcheck = errcheck
return func
def double_output(func, argtypes, errcheck=False, strarg=False, cpl=False):
"Generate a ctypes function that returns a double value."
func.argtypes = argtypes
func.restype = c_double
if errcheck:
func.errcheck = partial(check_arg_errcode, cpl=cpl)
if strarg:
func.errcheck = check_str_arg
return func
def geom_output(func, argtypes, offset=None):
"""
Generate a function that returns a Geometry either by reference
or directly (if the return_geom keyword is set to True).
"""
# Setting the argument types
func.argtypes = argtypes
if not offset:
# When a geometry pointer is directly returned.
func.restype = c_void_p
func.errcheck = check_geom
else:
# Error code returned, geometry is returned by-reference.
func.restype = c_int
def geomerrcheck(result, func, cargs):
return check_geom_offset(result, func, cargs, offset)
func.errcheck = geomerrcheck
return func
def int_output(func, argtypes, errcheck=None):
"Generate a ctypes function that returns an integer value."
func.argtypes = argtypes
func.restype = c_int
if errcheck:
func.errcheck = errcheck
return func
def int64_output(func, argtypes):
"Generate a ctypes function that returns a 64-bit integer value."
func.argtypes = argtypes
func.restype = c_int64
return func
def srs_output(func, argtypes):
"""
Generate a ctypes prototype for the given function with
the given C arguments that returns a pointer to an OGR
Spatial Reference System.
"""
func.argtypes = argtypes
func.restype = c_void_p
func.errcheck = check_srs
return func
def const_string_output(func, argtypes, offset=None, decoding=None, cpl=False):
func.argtypes = argtypes
if offset:
func.restype = c_int
else:
func.restype = c_char_p
def _check_const(result, func, cargs):
res = check_const_string(result, func, cargs, offset=offset, cpl=cpl)
if res and decoding:
res = res.decode(decoding)
return res
func.errcheck = _check_const
return func
def string_output(func, argtypes, offset=-1, str_result=False, decoding=None):
"""
Generate a ctypes prototype for the given function with the
given argument types that returns a string from a GDAL pointer.
The `const` flag indicates whether the allocated pointer should
be freed via the GDAL library routine VSIFree -- but only applies
only when `str_result` is True.
"""
func.argtypes = argtypes
if str_result:
# Use subclass of c_char_p so the error checking routine
# can free the memory at the pointer's address.
func.restype = gdal_char_p
else:
# Error code is returned
func.restype = c_int
# Dynamically defining our error-checking function with the
# given offset.
def _check_str(result, func, cargs):
res = check_string(result, func, cargs, offset=offset, str_result=str_result)
if res and decoding:
res = res.decode(decoding)
return res
func.errcheck = _check_str
return func
def void_output(func, argtypes, errcheck=True, cpl=False):
"""
For functions that don't only return an error code that needs to
be examined.
"""
if argtypes:
func.argtypes = argtypes
if errcheck:
# `errcheck` keyword may be set to False for routines that
# return void, rather than a status code.
func.restype = c_int
func.errcheck = partial(check_errcode, cpl=cpl)
else:
func.restype = None
return func
def voidptr_output(func, argtypes, errcheck=True):
"For functions that return c_void_p."
func.argtypes = argtypes
func.restype = c_void_p
if errcheck:
func.errcheck = check_pointer
return func
def chararray_output(func, argtypes, errcheck=True):
"""For functions that return a c_char_p array."""
func.argtypes = argtypes
func.restype = POINTER(c_char_p)
if errcheck:
func.errcheck = check_pointer
return func
|
gdal_char_p
|
python
|
sqlalchemy__sqlalchemy
|
test/ext/test_associationproxy.py
|
{
"start": 93288,
"end": 93489
}
|
class ____(
ScalarRemoveTest, fixtures.DeclarativeMappedTest
):
run_create_tables = None
useobject = True
cascade_scalar_deletes = True
uselist = False
|
ScalarRemoveScalarObjectCascade
|
python
|
dask__dask
|
dask/dataframe/dask_expr/_concat.py
|
{
"start": 12484,
"end": 12764
}
|
class ____(ConcatUnindexed):
@staticmethod
def operation(*args, ignore_order, _kwargs, axis, join):
return methods.concat(args, ignore_order=ignore_order, axis=axis, join=join)
def _broadcast_dep(self, dep: Expr):
return dep.npartitions == 1
|
ConcatIndexed
|
python
|
PyCQA__pylint
|
tests/functional/a/assigning/assigning_non_slot.py
|
{
"start": 4919,
"end": 4980
}
|
class ____:
__slots__ = ()
attr2 = MyDescriptor()
|
Base
|
python
|
pandas-dev__pandas
|
asv_bench/benchmarks/hash_functions.py
|
{
"start": 42,
"end": 251
}
|
class ____:
def setup(self):
lst = [x << 32 for x in range(5000)]
self.arr = np.array(lst, dtype=np.object_)
def time_unique(self):
pd.unique(self.arr)
|
UniqueForLargePyObjectInts
|
python
|
openai__openai-python
|
src/openai/types/realtime/realtime_session_create_response.py
|
{
"start": 8191,
"end": 8384
}
|
class ____(BaseModel):
input: Optional[AudioInput] = None
output: Optional[AudioOutput] = None
ToolChoice: TypeAlias = Union[ToolChoiceOptions, ToolChoiceFunction, ToolChoiceMcp]
|
Audio
|
python
|
PyCQA__pylint
|
tests/functional/p/postponed/postponed_evaluation_pep585.py
|
{
"start": 1510,
"end": 1594
}
|
class ____:
my_var: list[int]
@my_decorator
@dataclasses.dataclass
|
CustomDataClass3
|
python
|
sqlalchemy__sqlalchemy
|
lib/sqlalchemy/inspection.py
|
{
"start": 1902,
"end": 2279
}
|
class ____(Generic[_T]):
"""define a class as inspectable.
This allows typing to set up a linkage between an object that
can be inspected and the type of inspection it returns.
Unfortunately we cannot at the moment get all classes that are
returned by inspection to suit this interface as we get into
MRO issues.
"""
__slots__ = ()
|
Inspectable
|
python
|
getsentry__sentry
|
tests/sentry/api/serializers/test_base.py
|
{
"start": 390,
"end": 511
}
|
class ____(Serializer):
def serialize(self, obj, attrs, user, **kwargs):
raise Exception
|
FailingChildSerializer
|
python
|
pypa__pipenv
|
pipenv/vendor/click/types.py
|
{
"start": 14251,
"end": 16933
}
|
class ____(_NumberParamTypeBase):
def __init__(
self,
min: t.Optional[float] = None,
max: t.Optional[float] = None,
min_open: bool = False,
max_open: bool = False,
clamp: bool = False,
) -> None:
self.min = min
self.max = max
self.min_open = min_open
self.max_open = max_open
self.clamp = clamp
def to_info_dict(self) -> t.Dict[str, t.Any]:
info_dict = super().to_info_dict()
info_dict.update(
min=self.min,
max=self.max,
min_open=self.min_open,
max_open=self.max_open,
clamp=self.clamp,
)
return info_dict
def convert(
self, value: t.Any, param: t.Optional["Parameter"], ctx: t.Optional["Context"]
) -> t.Any:
import operator
rv = super().convert(value, param, ctx)
lt_min: bool = self.min is not None and (
operator.le if self.min_open else operator.lt
)(rv, self.min)
gt_max: bool = self.max is not None and (
operator.ge if self.max_open else operator.gt
)(rv, self.max)
if self.clamp:
if lt_min:
return self._clamp(self.min, 1, self.min_open) # type: ignore
if gt_max:
return self._clamp(self.max, -1, self.max_open) # type: ignore
if lt_min or gt_max:
self.fail(
_("{value} is not in the range {range}.").format(
value=rv, range=self._describe_range()
),
param,
ctx,
)
return rv
def _clamp(self, bound: float, dir: "te.Literal[1, -1]", open: bool) -> float:
"""Find the valid value to clamp to bound in the given
direction.
:param bound: The boundary value.
:param dir: 1 or -1 indicating the direction to move.
:param open: If true, the range does not include the bound.
"""
raise NotImplementedError
def _describe_range(self) -> str:
"""Describe the range for use in help text."""
if self.min is None:
op = "<" if self.max_open else "<="
return f"x{op}{self.max}"
if self.max is None:
op = ">" if self.min_open else ">="
return f"x{op}{self.min}"
lop = "<" if self.min_open else "<="
rop = "<" if self.max_open else "<="
return f"{self.min}{lop}x{rop}{self.max}"
def __repr__(self) -> str:
clamp = " clamped" if self.clamp else ""
return f"<{type(self).__name__} {self._describe_range()}{clamp}>"
|
_NumberRangeBase
|
python
|
pytest-dev__pytest-django
|
tests/test_unittest.py
|
{
"start": 474,
"end": 1197
}
|
class ____(TestCase):
def setUp(self) -> None:
"""setUp should be called after starting a transaction"""
assert Item.objects.count() == 0
Item.objects.create(name="Some item")
Item.objects.create(name="Some item again")
def test_count(self) -> None:
self.assertEqual(Item.objects.count(), 2) # noqa: PT009
assert Item.objects.count() == 2
Item.objects.create(name="Foo")
self.assertEqual(Item.objects.count(), 3) # noqa: PT009
def test_count_again(self) -> None:
self.test_count()
def tearDown(self) -> None:
"""tearDown should be called before rolling back the database"""
assert Item.objects.count() == 3
|
TestSetup
|
python
|
numba__numba
|
numba/tests/test_parfors.py
|
{
"start": 82140,
"end": 90140
}
|
class ____(TestParforsBase):
def test_parfor_slice1(self):
def test_impl(a):
(n,) = a.shape
b = a[0:n-2] + a[1:n-1]
return b
self.check(test_impl, np.ones(10))
def test_parfor_slice2(self):
def test_impl(a, m):
(n,) = a.shape
b = a[0:n-2] + a[1:m]
return b
# runtime assertion should succeed
self.check(test_impl, np.ones(10), 9)
# next we expect failure
with self.assertRaises(AssertionError) as raises:
njit(parallel=True)(test_impl)(np.ones(10),10)
self.assertIn("do not match", str(raises.exception))
def test_parfor_slice3(self):
def test_impl(a):
(m,n) = a.shape
b = a[0:m-1,0:n-1] + a[1:m,1:n]
return b
self.check(test_impl, np.ones((4,3)))
def test_parfor_slice4(self):
def test_impl(a):
(m,n) = a.shape
b = a[:,0:n-1] + a[:,1:n]
return b
self.check(test_impl, np.ones((4,3)))
def test_parfor_slice5(self):
def test_impl(a):
(m,n) = a.shape
b = a[0:m-1,:] + a[1:m,:]
return b
self.check(test_impl, np.ones((4,3)))
def test_parfor_slice6(self):
def test_impl(a):
b = a.transpose()
c = a[1,:] + b[:,1]
return c
self.check(test_impl, np.ones((4,3)))
def test_parfor_slice7(self):
def test_impl(a):
b = a.transpose()
c = a[1,:] + b[1,:]
return c
# runtime check should succeed
self.check(test_impl, np.ones((3,3)))
# next we expect failure
with self.assertRaises(AssertionError) as raises:
njit(parallel=True)(test_impl)(np.ones((3,4)))
self.assertIn("do not match", str(raises.exception))
@disabled_test
def test_parfor_slice8(self):
def test_impl(a):
(m,n) = a.shape
b = a.transpose()
b[1:m,1:n] = a[1:m,1:n]
return b
self.check(test_impl, np.arange(9).reshape((3,3)))
@disabled_test
def test_parfor_slice9(self):
def test_impl(a):
(m,n) = a.shape
b = a.transpose()
b[1:n,1:m] = a[:,1:m]
return b
self.check(test_impl, np.arange(12).reshape((3,4)))
@disabled_test
def test_parfor_slice10(self):
def test_impl(a):
(m,n) = a.shape
b = a.transpose()
b[2,1:m] = a[2,1:m]
return b
self.check(test_impl, np.arange(9).reshape((3,3)))
def test_parfor_slice11(self):
def test_impl(a):
(m,n,l) = a.shape
b = a.copy()
b[:,1,1:l] = a[:,2,1:l]
return b
self.check(test_impl, np.arange(27).reshape((3,3,3)))
def test_parfor_slice12(self):
def test_impl(a):
(m,n) = a.shape
b = a.copy()
b[1,1:-1] = a[0,:-2]
return b
self.check(test_impl, np.arange(12).reshape((3,4)))
def test_parfor_slice13(self):
def test_impl(a):
(m,n) = a.shape
b = a.copy()
c = -1
b[1,1:c] = a[0,-n:c-1]
return b
self.check(test_impl, np.arange(12).reshape((3,4)))
def test_parfor_slice14(self):
def test_impl(a):
(m,n) = a.shape
b = a.copy()
b[1,:-1] = a[0,-3:4]
return b
self.check(test_impl, np.arange(12).reshape((3,4)))
def test_parfor_slice15(self):
def test_impl(a):
(m,n) = a.shape
b = a.copy()
b[1,-(n-1):] = a[0,-3:4]
return b
self.check(test_impl, np.arange(12).reshape((3,4)))
@disabled_test
def test_parfor_slice16(self):
""" This test is disabled because if n is larger than the array size
then n and n-1 will both be the end of the array and thus the
slices will in fact be of different sizes and unable to fuse.
"""
def test_impl(a, b, n):
assert(a.shape == b.shape)
a[1:n] = 10
b[0:(n-1)] = 10
return a * b
self.check(test_impl, np.ones(10), np.zeros(10), 8)
args = (numba.float64[:], numba.float64[:], numba.int64)
self.assertEqual(countParfors(test_impl, args), 2)
def test_parfor_slice17(self):
def test_impl(m, A):
B = np.zeros(m)
n = len(A)
B[-n:] = A
return B
self.check(test_impl, 10, np.ones(10))
def test_parfor_slice18(self):
# issue 3534
def test_impl():
a = np.zeros(10)
a[1:8] = np.arange(0, 7)
y = a[3]
return y
self.check(test_impl)
def test_parfor_slice19(self):
# issues #3561 and #3554, empty slice binop
def test_impl(X):
X[:0] += 1
return X
self.check(test_impl, np.ones(10))
def test_parfor_slice20(self):
# issue #4075, slice size
def test_impl():
a = np.ones(10)
c = a[1:]
s = len(c)
return s
self.check(test_impl, check_scheduling=False)
def test_parfor_slice21(self):
def test_impl(x1, x2):
x1 = x1.reshape(x1.size, 1)
x2 = x2.reshape(x2.size, 1)
return x1 >= x2[:-1, :]
x1 = np.random.rand(5)
x2 = np.random.rand(6)
self.check(test_impl, x1, x2)
def test_parfor_slice22(self):
def test_impl(x1, x2):
b = np.zeros((10,))
for i in prange(1):
b += x1[:, x2]
return b
x1 = np.zeros((10,7))
x2 = np.array(4)
self.check(test_impl, x1, x2)
def test_parfor_slice23(self):
# issue #4630
def test_impl(x):
x[:0] = 2
return x
self.check(test_impl, np.ones(10))
def test_parfor_slice24(self):
def test_impl(m, A, n):
B = np.zeros(m)
C = B[n:]
C = A[:len(C)]
return B
for i in range(-15, 15):
self.check(test_impl, 10, np.ones(10), i)
def test_parfor_slice25(self):
def test_impl(m, A, n):
B = np.zeros(m)
C = B[:n]
C = A[:len(C)]
return B
for i in range(-15, 15):
self.check(test_impl, 10, np.ones(10), i)
def test_parfor_slice26(self):
def test_impl(a):
(n,) = a.shape
b = a.copy()
b[-(n-1):] = a[-3:4]
return b
self.check(test_impl, np.arange(4))
def test_parfor_slice27(self):
# issue5601: tests array analysis of the slice with
# n_valid_vals of unknown size.
def test_impl(a):
n_valid_vals = 0
for i in prange(a.shape[0]):
if a[i] != 0:
n_valid_vals += 1
if n_valid_vals:
unused = a[:n_valid_vals]
return 0
self.check(test_impl, np.arange(3))
def test_parfor_array_access_lower_slice(self):
for ts in [slice(1, 3, None), slice(2, None, None), slice(None, 2, -1),
slice(None, None, None), slice(None, None, -2)]:
def test_impl(n):
X = np.arange(n * 4).reshape((n, 4))
y = 0
for i in numba.prange(n):
y += X[i, ts].sum()
return y
n = 10
self.check(test_impl, n)
X = np.arange(n * 4).reshape((n, 4))
def test_impl(X):
y = 0
for i in numba.prange(X.shape[0]):
y += X[i, ts].sum()
return y
self.check(test_impl, X)
@skip_parfors_unsupported
|
TestParforsSlice
|
python
|
pyinstaller__pyinstaller
|
PyInstaller/fake-modules/_pyi_rth_utils/_win32.py
|
{
"start": 1488,
"end": 11564
}
|
class ____(ctypes.Structure):
_fields_ = [
("nLength", ctypes.wintypes.DWORD),
("lpSecurityDescriptor", PSECURITY_DESCRIPTOR),
("bInheritHandle", ctypes.wintypes.BOOL),
]
# win32 API functions, bound via ctypes.
# NOTE: we do not use ctypes.windll.<dll_name> to avoid modifying its (global) function prototypes, which might affect
# user's code.
advapi32 = ctypes.WinDLL("advapi32")
kernel32 = ctypes.WinDLL("kernel32")
advapi32.ConvertSidToStringSidW.restype = ctypes.wintypes.BOOL
advapi32.ConvertSidToStringSidW.argtypes = (
PSID, # [in] PSID Sid
ctypes.POINTER(ctypes.wintypes.LPWSTR), # [out] LPWSTR *StringSid
)
advapi32.ConvertStringSecurityDescriptorToSecurityDescriptorW.restype = ctypes.wintypes.BOOL
advapi32.ConvertStringSecurityDescriptorToSecurityDescriptorW.argtypes = (
ctypes.wintypes.LPCWSTR, # [in] LPCWSTR StringSecurityDescriptor
ctypes.wintypes.DWORD, # [in] DWORD StringSDRevision
ctypes.POINTER(PSECURITY_DESCRIPTOR), # [out] PSECURITY_DESCRIPTOR *SecurityDescriptor
ctypes.wintypes.PULONG, # [out] PULONG SecurityDescriptorSize
)
advapi32.GetTokenInformation.restype = ctypes.wintypes.BOOL
advapi32.GetTokenInformation.argtypes = (
ctypes.wintypes.HANDLE, # [in] HANDLE TokenHandle
ctypes.c_int, # [in] TOKEN_INFORMATION_CLASS TokenInformationClass
ctypes.wintypes.LPVOID, # [out, optional] LPVOID TokenInformation
ctypes.wintypes.DWORD, # [in] DWORD TokenInformationLength
ctypes.wintypes.PDWORD, # [out] PDWORD ReturnLength
)
kernel32.CloseHandle.restype = ctypes.wintypes.BOOL
kernel32.CloseHandle.argtypes = (
ctypes.wintypes.HANDLE, # [in] HANDLE hObject
)
kernel32.CreateDirectoryW.restype = ctypes.wintypes.BOOL
kernel32.CreateDirectoryW.argtypes = (
ctypes.wintypes.LPCWSTR, # [in] LPCWSTR lpPathName
ctypes.POINTER(SECURITY_ATTRIBUTES), # [in, optional] LPSECURITY_ATTRIBUTES lpSecurityAttributes
)
kernel32.FormatMessageW.restype = ctypes.wintypes.DWORD
kernel32.FormatMessageW.argtypes = (
ctypes.wintypes.DWORD, # [in] DWORD dwFlags
ctypes.wintypes.LPCVOID, # [in, optional] LPCVOID lpSource
ctypes.wintypes.DWORD, # [in] DWORD dwMessageId
ctypes.wintypes.DWORD, # [in] DWORD dwLanguageId
ctypes.wintypes.LPWSTR, # [out] LPWSTR lpBuffer
ctypes.wintypes.DWORD, # [in] DWORD nSize
ctypes.wintypes.LPVOID, # [in, optional] va_list *Arguments
)
kernel32.GetCurrentProcess.restype = ctypes.wintypes.HANDLE
# kernel32.GetCurrentProcess has no arguments
kernel32.GetLastError.restype = ctypes.wintypes.DWORD
# kernel32.GetLastError has no arguments
kernel32.LocalFree.restype = ctypes.wintypes.BOOL
kernel32.LocalFree.argtypes = (
ctypes.wintypes.HLOCAL, # [in] _Frees_ptr_opt_ HLOCAL hMem
)
kernel32.OpenProcessToken.restype = ctypes.wintypes.BOOL
kernel32.OpenProcessToken.argtypes = (
ctypes.wintypes.HANDLE, # [in] HANDLE ProcessHandle
ctypes.wintypes.DWORD, # [in] DWORD DesiredAccess
ctypes.wintypes.PHANDLE, # [out] PHANDLE TokenHandle
)
def _win_error_to_message(error_code):
"""
Convert win32 error code to message.
"""
message_wstr = ctypes.wintypes.LPWSTR(None)
ret = kernel32.FormatMessageW(
FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM,
None, # lpSource
error_code, # dwMessageId
0x400, # dwLanguageId = MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT)
ctypes.cast(
ctypes.byref(message_wstr),
ctypes.wintypes.LPWSTR,
), # pointer to LPWSTR due to FORMAT_MESSAGE_ALLOCATE_BUFFER; needs to be cast to LPWSTR
64, # due to FORMAT_MESSAGE_ALLOCATE_BUFFER, this is minimum number of characters to allocate
None,
)
if ret == 0:
return None
message = message_wstr.value
kernel32.LocalFree(message_wstr)
# Strip trailing CR/LF.
if message:
message = message.strip()
return message
def _get_process_sid(token_information_class):
"""
Obtain the SID from the current process by the given token information class.
Args:
token_information_class: Token information class identifying the SID that we're
interested in. Only TokenUser and TokenAppContainerSid are supported.
Returns: SID (if it could be fetched) or None if not available or on error.
"""
process_token = ctypes.wintypes.HANDLE(INVALID_HANDLE)
try:
# Get access token for the current process
ret = kernel32.OpenProcessToken(
kernel32.GetCurrentProcess(),
TOKEN_QUERY,
ctypes.pointer(process_token),
)
if ret == 0:
error_code = kernel32.GetLastError()
raise RuntimeError(f"Failed to open process token! Error code: 0x{error_code:X}")
# Query buffer size for sid
token_info_size = ctypes.wintypes.DWORD(0)
ret = advapi32.GetTokenInformation(
process_token,
token_information_class,
None,
0,
ctypes.byref(token_info_size),
)
# We expect this call to fail with ERROR_INSUFFICIENT_BUFFER
if ret == 0:
error_code = kernel32.GetLastError()
if error_code != ERROR_INSUFFICIENT_BUFFER:
raise RuntimeError(f"Failed to query token information buffer size! Error code: 0x{error_code:X}")
else:
raise RuntimeError("Unexpected return value from GetTokenInformation!")
# Allocate buffer
token_info = ctypes.create_string_buffer(token_info_size.value)
ret = advapi32.GetTokenInformation(
process_token,
token_information_class,
token_info,
token_info_size,
ctypes.byref(token_info_size),
)
if ret == 0:
error_code = kernel32.GetLastError()
raise RuntimeError(f"Failed to query token information! Error code: 0x{error_code:X}")
# Convert SID to string
# Technically, when UserToken is used, we need to pass user_info->User.Sid,
# but as they are at the beginning of the buffer, just pass the buffer instead...
sid_wstr = ctypes.wintypes.LPWSTR(None)
if token_information_class == TokenUser:
sid = ctypes.cast(token_info, PTOKEN_USER).contents.User.Sid
elif token_information_class == TokenAppContainerSid:
sid = ctypes.cast(token_info, PTOKEN_APPCONTAINER_INFORMATION).contents.TokenAppContainer
else:
raise ValueError(f"Unexpected token information class: {token_information_class}")
ret = advapi32.ConvertSidToStringSidW(sid, ctypes.pointer(sid_wstr))
if ret == 0:
error_code = kernel32.GetLastError()
raise RuntimeError(f"Failed to convert SID to string! Error code: 0x{error_code:X}")
sid = sid_wstr.value
kernel32.LocalFree(sid_wstr)
except Exception:
sid = None
finally:
# Close the process token
if process_token.value != INVALID_HANDLE:
kernel32.CloseHandle(process_token)
return sid
# Get and cache current user's SID
_user_sid = _get_process_sid(TokenUser)
# Get and cache current app container's SID (if any)
_app_container_sid = _get_process_sid(TokenAppContainerSid)
def secure_mkdir(dir_name):
"""
Replacement for mkdir that limits the access to created directory to current user.
"""
# Create security descriptor
# Prefer actual user SID over SID S-1-3-4 (current owner), because at the time of writing, Wine does not properly
# support the latter.
user_sid = _user_sid or "S-1-3-4"
# DACL descriptor (D):
# ace_type;ace_flags;rights;object_guid;inherit_object_guid;account_sid;(resource_attribute)
# - ace_type = SDDL_ACCESS_ALLOWED (A)
# - rights = SDDL_FILE_ALL (FA)
# - account_sid = current user (queried SID)
security_desc_str = f"D:(A;;FA;;;{user_sid})"
# If the app is running within an AppContainer, the app container SID has to be added to the DACL.
# Otherwise our process will not have access to the temp dir.
#
# Quoting https://learn.microsoft.com/en-us/windows/win32/secauthz/implementing-an-appcontainer:
# "The AppContainer SID is a persistent unique identifier for the appcontainer. ...
# To allow a single AppContainer to access a resource, add its AppContainerSID to the ACL for that resource."
if _app_container_sid:
security_desc_str += f"(A;;FA;;;{_app_container_sid})"
security_desc = ctypes.wintypes.LPVOID(None)
ret = advapi32.ConvertStringSecurityDescriptorToSecurityDescriptorW(
security_desc_str,
SDDL_REVISION1,
ctypes.byref(security_desc),
None,
)
if ret == 0:
error_code = kernel32.GetLastError()
raise RuntimeError(
f"Failed to create security descriptor! Error code: 0x{error_code:X}, "
f"message: {_win_error_to_message(error_code)}"
)
security_attr = SECURITY_ATTRIBUTES()
security_attr.nLength = ctypes.sizeof(SECURITY_ATTRIBUTES)
security_attr.lpSecurityDescriptor = security_desc
security_attr.bInheritHandle = False
# Create directory
ret = kernel32.CreateDirectoryW(
dir_name,
security_attr,
)
if ret == 0:
# Call failed; store error code immediately, to avoid it being overwritten in cleanup below.
error_code = kernel32.GetLastError()
# Free security descriptor
kernel32.LocalFree(security_desc)
# Exit on succeess
if ret != 0:
return
# Construct OSError from win error code
error_message = _win_error_to_message(error_code)
# Strip trailing dot to match error message from os.mkdir().
if error_message and error_message[-1] == '.':
error_message = error_message[:-1]
raise OSError(
None, # errno
error_message, # strerror
dir_name, # filename
error_code, # winerror
None, # filename2
)
|
SECURITY_ATTRIBUTES
|
python
|
sympy__sympy
|
sympy/sets/sets.py
|
{
"start": 80353,
"end": 81750
}
|
class ____(Kind):
"""
SetKind is kind for all Sets
Every instance of Set will have kind ``SetKind`` parametrised by the kind
of the elements of the ``Set``. The kind of the elements might be
``NumberKind``, or ``TupleKind`` or something else. When not all elements
have the same kind then the kind of the elements will be given as
``UndefinedKind``.
Parameters
==========
element_kind: Kind (optional)
The kind of the elements of the set. In a well defined set all elements
will have the same kind. Otherwise the kind should
:class:`sympy.core.kind.UndefinedKind`. The ``element_kind`` argument is optional but
should only be omitted in the case of ``EmptySet`` whose kind is simply
``SetKind()``
Examples
========
>>> from sympy import Interval
>>> Interval(1, 2).kind
SetKind(NumberKind)
>>> Interval(1,2).kind.element_kind
NumberKind
See Also
========
sympy.core.kind.NumberKind
sympy.matrices.kind.MatrixKind
sympy.core.containers.TupleKind
"""
def __new__(cls, element_kind=None):
obj = super().__new__(cls, element_kind)
obj.element_kind = element_kind
return obj
def __repr__(self):
if not self.element_kind:
return "SetKind()"
else:
return "SetKind(%s)" % self.element_kind
|
SetKind
|
python
|
anthropics__anthropic-sdk-python
|
src/anthropic/types/messages/batch_list_params.py
|
{
"start": 196,
"end": 691
}
|
class ____(TypedDict, total=False):
after_id: str
"""ID of the object to use as a cursor for pagination.
When provided, returns the page of results immediately after this object.
"""
before_id: str
"""ID of the object to use as a cursor for pagination.
When provided, returns the page of results immediately before this object.
"""
limit: int
"""Number of items to return per page.
Defaults to `20`. Ranges from `1` to `1000`.
"""
|
BatchListParams
|
python
|
run-llama__llama_index
|
llama-index-core/llama_index/core/instrumentation/events/agent.py
|
{
"start": 2738,
"end": 3062
}
|
class ____(BaseEvent):
"""
AgentToolCallEvent.
Args:
arguments (str): Arguments.
tool (ToolMetadata): Tool metadata.
"""
arguments: str
tool: ToolMetadata
@classmethod
def class_name(cls) -> str:
"""Class name."""
return "AgentToolCallEvent"
|
AgentToolCallEvent
|
python
|
spyder-ide__spyder
|
spyder/plugins/variableexplorer/widgets/objectexplorer/toggle_column_mixin.py
|
{
"start": 5687,
"end": 6084
}
|
class ____(QTreeWidget, ToggleColumnMixIn):
"""
A QTreeWidget where right clicking on the header allows the user to
show/hide columns.
"""
def _horizontal_header(self):
"""
Returns the horizontal header (of type QHeaderView).
Override this if the horizontalHeader() function does not exist.
"""
return self.header()
|
ToggleColumnTreeWidget
|
python
|
kamyu104__LeetCode-Solutions
|
Python/tree-of-coprimes.py
|
{
"start": 1907,
"end": 3034
}
|
class ____(object):
def getCoprimes(self, nums, edges):
"""
:type nums: List[int]
:type edges: List[List[int]]
:rtype: List[int]
"""
def dfs(nums, adj, prev, node, depth, path, result):
max_d = -1
for x in path.iterkeys():
if fractions.gcd(nums[node], x) != 1:
continue
if path[x][-1][1] > max_d:
max_d = path[x][-1][1]
result[node] = path[x][-1][0]
path[nums[node]].append((node, depth))
for nei in adj[node]:
if nei == prev:
continue
dfs(nums, adj, node, nei, depth+1, path, result)
path[nums[node]].pop()
if not path[nums[node]]:
path.pop(nums[node])
adj = collections.defaultdict(list)
for u, v in edges:
adj[u].append(v)
adj[v].append(u)
result = [-1]*len(nums)
path = collections.defaultdict(list)
dfs(nums, adj, -1, 0, 0, path, result)
return result
|
Solution2
|
python
|
ray-project__ray
|
python/ray/llm/_internal/serve/core/configs/openai_api_models.py
|
{
"start": 1828,
"end": 1957
}
|
class ____(vLLMErrorResponse):
model_config = ConfigDict(arbitrary_types_allowed=True)
# TODO (Kourosh): Upstream
|
ErrorResponse
|
python
|
doocs__leetcode
|
solution/1200-1299/1235.Maximum Profit in Job Scheduling/Solution.py
|
{
"start": 0,
"end": 456
}
|
class ____:
def jobScheduling(
self, startTime: List[int], endTime: List[int], profit: List[int]
) -> int:
@cache
def dfs(i):
if i >= n:
return 0
_, e, p = jobs[i]
j = bisect_left(jobs, e, lo=i + 1, key=lambda x: x[0])
return max(dfs(i + 1), p + dfs(j))
jobs = sorted(zip(startTime, endTime, profit))
n = len(profit)
return dfs(0)
|
Solution
|
python
|
cython__cython
|
Cython/Compiler/Scanning.py
|
{
"start": 1419,
"end": 3247
}
|
class ____:
def __init__(self, outer=None):
self.entries = {}
self.outer = outer
def declare(self, name, value):
self.entries[name] = value
def update(self, other):
self.entries.update(other)
def lookup_here(self, name):
return self.entries[name]
def __contains__(self, name):
return name in self.entries
def lookup(self, name):
try:
return self.lookup_here(name)
except KeyError:
outer = self.outer
if outer:
return outer.lookup(name)
else:
raise
def initial_compile_time_env():
benv = CompileTimeScope()
names = ('UNAME_SYSNAME', 'UNAME_NODENAME', 'UNAME_RELEASE', 'UNAME_VERSION', 'UNAME_MACHINE')
for name, value in zip(names, platform.uname()):
benv.declare(name, value)
import builtins
names = (
'False', 'True',
'abs', 'all', 'any', 'ascii', 'bin', 'bool', 'bytearray', 'bytes',
'chr', 'complex', 'dict', 'divmod', 'enumerate', 'filter',
'float', 'format', 'frozenset', 'hash', 'hex', 'int', 'len',
'list', 'map', 'max', 'min', 'next', 'oct', 'ord', 'pow', 'range',
'repr', 'reversed', 'round', 'set', 'slice', 'sorted', 'str',
'sum', 'tuple', 'zip',
### defined below in a platform independent way
# 'long', 'unicode', 'reduce', 'xrange'
)
for name in names:
benv.declare(name, getattr(builtins, name))
# legacy Py2 names
from functools import reduce
benv.declare('reduce', reduce)
benv.declare('unicode', str)
benv.declare('long', int)
benv.declare('xrange', range)
denv = CompileTimeScope(benv)
return denv
#------------------------------------------------------------------
|
CompileTimeScope
|
python
|
matplotlib__matplotlib
|
lib/mpl_toolkits/axes_grid1/axes_size.py
|
{
"start": 699,
"end": 1612
}
|
class ____:
def __rmul__(self, other):
return self * other
def __mul__(self, other):
if not isinstance(other, Real):
return NotImplemented
return Fraction(other, self)
def __div__(self, other):
return (1 / other) * self
def __add__(self, other):
if isinstance(other, _Base):
return Add(self, other)
else:
return Add(self, Fixed(other))
def __neg__(self):
return -1 * self
def __radd__(self, other):
# other cannot be a _Base instance, because A + B would trigger
# A.__add__(B) first.
return Add(self, Fixed(other))
def __sub__(self, other):
return self + (-other)
def get_size(self, renderer):
"""
Return two-float tuple with relative and absolute sizes.
"""
raise NotImplementedError("Subclasses must implement")
|
_Base
|
python
|
scipy__scipy
|
scipy/stats/tests/test_qmc.py
|
{
"start": 27223,
"end": 30482
}
|
class ____(QMCEngineTests):
qmce = qmc.LatinHypercube
can_scramble = True
def test_continuing(self, *args):
pytest.skip("Not applicable: not a sequence.")
def test_fast_forward(self, *args):
pytest.skip("Not applicable: not a sequence.")
def test_sample(self, *args):
pytest.skip("Not applicable: the value of reference sample is"
" implementation dependent.")
@pytest.mark.parametrize("strength", [1, 2])
@pytest.mark.parametrize("scramble", [False, True])
@pytest.mark.parametrize("optimization", [None, "random-CD"])
def test_sample_stratified(self, optimization, scramble, strength):
rng = np.random.default_rng(37511836202578819870665127532742111260)
p = 5
n = p**2
d = 6
engine = qmc.LatinHypercube(d=d, scramble=scramble,
strength=strength,
optimization=optimization,
rng=rng)
sample = engine.random(n=n)
assert sample.shape == (n, d)
assert engine.num_generated == n
# centering stratifies samples in the middle of equal segments:
# * inter-sample distance is constant in 1D sub-projections
# * after ordering, columns are equal
expected1d = (np.arange(n) + 0.5) / n
expected = np.broadcast_to(expected1d, (d, n)).T
assert np.any(sample != expected)
sorted_sample = np.sort(sample, axis=0)
tol = 0.5 / n if scramble else 0
assert_allclose(sorted_sample, expected, atol=tol)
assert np.any(sample - expected > tol)
if strength == 2 and optimization is None:
unique_elements = np.arange(p)
desired = set(product(unique_elements, unique_elements))
for i, j in combinations(range(engine.d), 2):
samples_2d = sample[:, [i, j]]
res = (samples_2d * p).astype(int)
res_set = {tuple(row) for row in res}
assert_equal(res_set, desired)
def test_optimizer_1d(self):
# discrepancy measures are invariant under permuting factors and runs
engine = self.engine(d=1, scramble=False)
sample_ref = engine.random(n=64)
optimal_ = self.engine(d=1, scramble=False, optimization="random-CD")
sample_ = optimal_.random(n=64)
assert_array_equal(sample_ref, sample_)
def test_raises(self):
message = r"not a valid strength"
with pytest.raises(ValueError, match=message):
qmc.LatinHypercube(1, strength=3)
message = r"n is not the square of a prime number"
with pytest.raises(ValueError, match=message):
engine = qmc.LatinHypercube(d=2, strength=2)
engine.random(16)
message = r"n is not the square of a prime number"
with pytest.raises(ValueError, match=message):
engine = qmc.LatinHypercube(d=2, strength=2)
engine.random(5) # because int(sqrt(5)) would result in 2
message = r"n is too small for d"
with pytest.raises(ValueError, match=message):
engine = qmc.LatinHypercube(d=5, strength=2)
engine.random(9)
|
TestLHS
|
python
|
ApeWorX__ape
|
src/ape_ethereum/transactions.py
|
{
"start": 4753,
"end": 5466
}
|
class ____(BaseTransaction):
"""
Transactions that are pre-EIP-1559 and use the ``gasPrice`` field.
"""
gas_price: Optional[HexInt] = Field(default=None, alias="gasPrice")
max_priority_fee: Optional[HexInt] = Field(default=None, exclude=True) # type: ignore
type: HexInt = Field(default=TransactionType.STATIC.value, exclude=True)
max_fee: Optional[HexInt] = Field(default=None, exclude=True) # type: ignore
@model_validator(mode="after")
def calculate_read_only_max_fee(self):
# Work-around: we cannot use a computed field to override a non-computed field.
self.max_fee = (self.gas_limit or 0) * (self.gas_price or 0)
return self
|
StaticFeeTransaction
|
python
|
charliermarsh__ruff
|
crates/ruff_linter/resources/test/fixtures/ruff/RUF009.py
|
{
"start": 1851,
"end": 2399
}
|
class ____:
quantity_on_hand: IntConversionDescriptor = IntConversionDescriptor(default=100)
# Regression tests for:
# https://github.com/astral-sh/ruff/issues/6447
from typing import NewType
ListOfStrings = NewType("ListOfStrs", list[str])
StringsToInts = NewType("IntsToStrings", dict[str, int])
SpecialString = NewType(name="SpecialString", tp=str)
NegativeInteger = NewType("NegInt", tp=int)
Invalid1 = NewType(*Foo)
Invalid2 = NewType("Invalid2", name=Foo)
Invalid3 = NewType("Invalid3", name=Foo, lorem="ipsum")
@dataclass
|
InventoryItem
|
python
|
doocs__leetcode
|
solution/0300-0399/0379.Design Phone Directory/Solution.py
|
{
"start": 0,
"end": 572
}
|
class ____:
def __init__(self, maxNumbers: int):
self.available = set(range(maxNumbers))
def get(self) -> int:
if not self.available:
return -1
return self.available.pop()
def check(self, number: int) -> bool:
return number in self.available
def release(self, number: int) -> None:
self.available.add(number)
# Your PhoneDirectory object will be instantiated and called as such:
# obj = PhoneDirectory(maxNumbers)
# param_1 = obj.get()
# param_2 = obj.check(number)
# obj.release(number)
|
PhoneDirectory
|
python
|
lepture__authlib
|
tests/django/test_oauth2/models.py
|
{
"start": 653,
"end": 2214
}
|
class ____(Model, ClientMixin):
user = ForeignKey(User, on_delete=CASCADE)
client_id = CharField(max_length=48, unique=True, db_index=True)
client_secret = CharField(max_length=48, blank=True)
redirect_uris = TextField(default="")
default_redirect_uri = TextField(blank=False, default="")
scope = TextField(default="")
response_type = TextField(default="")
grant_type = TextField(default="")
token_endpoint_auth_method = CharField(max_length=120, default="")
def get_client_id(self):
return self.client_id
def get_default_redirect_uri(self):
return self.default_redirect_uri
def get_allowed_scope(self, scope):
if not scope:
return ""
allowed = set(scope_to_list(self.scope))
return list_to_scope([s for s in scope.split() if s in allowed])
def check_redirect_uri(self, redirect_uri):
if redirect_uri == self.default_redirect_uri:
return True
return redirect_uri in self.redirect_uris
def check_client_secret(self, client_secret):
return self.client_secret == client_secret
def check_endpoint_auth_method(self, method, endpoint):
if endpoint == "token":
return self.token_endpoint_auth_method == method
return True
def check_response_type(self, response_type):
allowed = self.response_type.split()
return response_type in allowed
def check_grant_type(self, grant_type):
allowed = self.grant_type.split()
return grant_type in allowed
|
Client
|
python
|
dagster-io__dagster
|
python_modules/libraries/dagster-airflow/dagster_airflow_tests/test_dagster_pipeline_factory/test_load_connections.py
|
{
"start": 3689,
"end": 5209
}
|
class ____(unittest.TestCase):
@mock.patch("dagster_airflow.hooks.dagster_hook.DagsterHook.launch_run", return_value="run_id")
@mock.patch("dagster_airflow.hooks.dagster_hook.DagsterHook.wait_for_run")
def test_ingest_airflow_dags_with_connections(self, launch_run, wait_for_run):
connections = [
Connection(
conn_id="dagster_connection_test",
conn_type="dagster",
host="prod",
password="test_token",
port="test-port", # pyright: ignore[reportArgumentType]
schema="test-port",
extra=json.dumps({"foo": "bar"}),
)
]
with tempfile.TemporaryDirectory() as tmpdir_path:
with open(os.path.join(tmpdir_path, "test_connection_dag.py"), "wb") as f:
f.write(bytes(LOAD_CONNECTION_DAG_AIRFLOW_1_FILE_CONTENTS.encode("utf-8")))
definitions = make_dagster_definitions_from_airflow_dags_path(
tmpdir_path, connections=connections
)
repo = definitions.get_repository_def()
assert repo.has_job("example_connections")
job = repo.get_job("example_connections")
result = job.execute_in_process()
assert result.success
for event in result.all_events:
assert event.event_type_value != "STEP_FAILURE"
launch_run.assert_called_once()
wait_for_run.assert_called_once()
|
TestConnectionsAirflow1
|
python
|
allegroai__clearml
|
clearml/backend_api/services/v2_20/workers.py
|
{
"start": 51783,
"end": 58089
}
|
class ____(Response):
"""
Response of workers.get_all endpoint.
:param workers:
:type workers: Sequence[Worker]
"""
_service = "workers"
_action = "get_all"
_version = "2.20"
_schema = {
"definitions": {
"current_task_entry": {
"properties": {
"id": {"description": "ID", "type": ["string", "null"]},
"last_iteration": {
"description": "Last task iteration",
"type": ["integer", "null"],
},
"name": {"description": "Name", "type": ["string", "null"]},
"running_time": {
"description": "Task running time",
"type": ["integer", "null"],
},
},
"type": "object",
},
"id_name_entry": {
"properties": {
"id": {"description": "ID", "type": ["string", "null"]},
"name": {"description": "Name", "type": ["string", "null"]},
},
"type": "object",
},
"queue_entry": {
"properties": {
"id": {"description": "ID", "type": ["string", "null"]},
"name": {"description": "Name", "type": ["string", "null"]},
"next_task": {
"description": "Next task in the queue",
"oneOf": [
{"$ref": "#/definitions/id_name_entry"},
{"type": "null"},
],
},
"num_tasks": {
"description": "Number of task entries in the queue",
"type": ["integer", "null"],
},
},
"type": "object",
},
"worker": {
"properties": {
"company": {
"description": "Associated company",
"oneOf": [
{"$ref": "#/definitions/id_name_entry"},
{"type": "null"},
],
},
"id": {"description": "Worker ID", "type": ["string", "null"]},
"ip": {
"description": "IP of the worker",
"type": ["string", "null"],
},
"key": {
"description": "Worker entry key",
"type": ["string", "null"],
},
"last_activity_time": {
"description": "Last activity time (even if an error occurred)",
"format": "date-time",
"type": ["string", "null"],
},
"last_report_time": {
"description": "Last successful report time",
"format": "date-time",
"type": ["string", "null"],
},
"project": {
"description": "Project in which currently executing task resides",
"oneOf": [
{"$ref": "#/definitions/id_name_entry"},
{"type": "null"},
],
},
"queue": {
"description": "Queue from which running task was taken",
"oneOf": [
{"$ref": "#/definitions/queue_entry"},
{"type": "null"},
],
},
"queues": {
"description": "List of queues on which the worker is listening",
"items": {"$ref": "#/definitions/queue_entry"},
"type": ["array", "null"],
},
"register_time": {
"description": "Registration time",
"format": "date-time",
"type": ["string", "null"],
},
"tags": {
"description": "User tags for the worker",
"items": {"type": "string"},
"type": ["array", "null"],
},
"task": {
"description": "Task currently being run by the worker",
"oneOf": [
{"$ref": "#/definitions/current_task_entry"},
{"type": "null"},
],
},
"user": {
"description": "Associated user (under whose credentials are used by the worker daemon)",
"oneOf": [
{"$ref": "#/definitions/id_name_entry"},
{"type": "null"},
],
},
},
"type": "object",
},
},
"properties": {
"workers": {
"items": {"$ref": "#/definitions/worker"},
"type": ["array", "null"],
}
},
"type": "object",
}
def __init__(self, workers: Optional[List[Any]] = None, **kwargs: Any) -> None:
super(GetAllResponse, self).__init__(**kwargs)
self.workers = workers
@schema_property("workers")
def workers(self) -> Optional[List[Any]]:
return self._property_workers
@workers.setter
def workers(self, value: Optional[List[Any]]) -> None:
if value is None:
self._property_workers = None
return
self.assert_isinstance(value, "workers", (list, tuple))
if any((isinstance(v, dict) for v in value)):
value = [Worker.from_dict(v) if isinstance(v, dict) else v for v in value]
else:
self.assert_isinstance(value, "workers", Worker, is_array=True)
self._property_workers = value
|
GetAllResponse
|
python
|
anthropics__anthropic-sdk-python
|
src/anthropic/types/beta/beta_content_block_source_param.py
|
{
"start": 351,
"end": 530
}
|
class ____(TypedDict, total=False):
content: Required[Union[str, Iterable[BetaContentBlockSourceContentParam]]]
type: Required[Literal["content"]]
|
BetaContentBlockSourceParam
|
python
|
python__mypy
|
mypy/nodes.py
|
{
"start": 83196,
"end": 83633
}
|
class ____(Expression):
"""Tuple literal expression (..., ...)
Also lvalue sequences (..., ...) and [..., ...]"""
__slots__ = ("items",)
__match_args__ = ("items",)
items: list[Expression]
def __init__(self, items: list[Expression]) -> None:
super().__init__()
self.items = items
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_tuple_expr(self)
|
TupleExpr
|
python
|
huggingface__transformers
|
src/transformers/models/helium/modeling_helium.py
|
{
"start": 2708,
"end": 5708
}
|
class ____(nn.Module):
inv_freq: torch.Tensor # fix linting for `register_buffer`
def __init__(self, config: HeliumConfig, device=None):
super().__init__()
self.max_seq_len_cached = config.max_position_embeddings
self.original_max_seq_len = config.max_position_embeddings
self.config = config
self.rope_type = self.config.rope_parameters["rope_type"]
rope_init_fn: Callable = self.compute_default_rope_parameters
if self.rope_type != "default":
rope_init_fn = ROPE_INIT_FUNCTIONS[self.rope_type]
inv_freq, self.attention_scaling = rope_init_fn(self.config, device)
self.register_buffer("inv_freq", inv_freq, persistent=False)
self.original_inv_freq = inv_freq
@staticmethod
def compute_default_rope_parameters(
config: Optional[HeliumConfig] = None,
device: Optional["torch.device"] = None,
seq_len: Optional[int] = None,
) -> tuple["torch.Tensor", float]:
"""
Computes the inverse frequencies according to the original RoPE implementation
Args:
config ([`~transformers.PreTrainedConfig`]):
The model configuration.
device (`torch.device`):
The device to use for initialization of the inverse frequencies.
seq_len (`int`, *optional*):
The current sequence length. Unused for this type of RoPE.
Returns:
Tuple of (`torch.Tensor`, `float`), containing the inverse frequencies for the RoPE embeddings and the
post-processing scaling factor applied to the computed cos/sin (unused in this type of RoPE).
"""
base = config.rope_parameters["rope_theta"]
dim = getattr(config, "head_dim", None) or config.hidden_size // config.num_attention_heads
attention_factor = 1.0 # Unused in this type of RoPE
# Compute the inverse frequencies
inv_freq = 1.0 / (
base ** (torch.arange(0, dim, 2, dtype=torch.int64).to(device=device, dtype=torch.float) / dim)
)
return inv_freq, attention_factor
@torch.no_grad()
@dynamic_rope_update # power user: used with advanced RoPE types (e.g. dynamic rope)
def forward(self, x, position_ids):
inv_freq_expanded = self.inv_freq[None, :, None].float().expand(position_ids.shape[0], -1, 1).to(x.device)
position_ids_expanded = position_ids[:, None, :].float()
device_type = x.device.type if isinstance(x.device.type, str) and x.device.type != "mps" else "cpu"
with torch.autocast(device_type=device_type, enabled=False): # Force float32
freqs = (inv_freq_expanded.float() @ position_ids_expanded.float()).transpose(1, 2)
emb = torch.cat((freqs, freqs), dim=-1)
cos = emb.cos() * self.attention_scaling
sin = emb.sin() * self.attention_scaling
return cos.to(dtype=x.dtype), sin.to(dtype=x.dtype)
|
HeliumRotaryEmbedding
|
python
|
jmcnamara__XlsxWriter
|
xlsxwriter/test/comparison/test_chart_layout02.py
|
{
"start": 315,
"end": 1670
}
|
class ____(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.set_filename("chart_layout02.xlsx")
def test_create_file(self):
"""Test the creation of an XlsxWriter file with user defined layout."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({"type": "column"})
chart.axis_ids = [68311296, 69198208]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column("A1", data[0])
worksheet.write_column("B1", data[1])
worksheet.write_column("C1", data[2])
chart.add_series({"values": "=Sheet1!$A$1:$A$5"})
chart.add_series({"values": "=Sheet1!$B$1:$B$5"})
chart.add_series({"values": "=Sheet1!$C$1:$C$5"})
chart.set_legend(
{
"layout": {
"x": 0.80197353455818021,
"y": 0.37442403032954213,
"width": 0.12858202099737534,
"height": 0.25115157480314959,
}
}
)
worksheet.insert_chart("E9", chart)
workbook.close()
self.assertExcelEqual()
|
TestCompareXLSXFiles
|
python
|
networkx__networkx
|
networkx/exception.py
|
{
"start": 1993,
"end": 2136
}
|
class ____(NetworkXException):
"""Raised if a graph has a cycle when an algorithm expects that it
will have no cycles.
"""
|
HasACycle
|
python
|
vyperlang__vyper
|
vyper/exceptions.py
|
{
"start": 12118,
"end": 12218
}
|
class ____(VyperInternalException):
"""Invalid code generated during codegen phase"""
|
CodegenPanic
|
python
|
microsoft__pyright
|
packages/pyright-internal/src/tests/samples/callbackProtocol1.py
|
{
"start": 1343,
"end": 1565
}
|
class ____:
def __call__(self, *vals: bytes, maxlen: int | None = None) -> list[bytes]:
return []
# This should generate an error because NotProto is not a protocol class.
not_proto: NotProto = good_cb
|
NotProto
|
python
|
tensorflow__tensorflow
|
tensorflow/python/ops/resource_variable_ops.py
|
{
"start": 105454,
"end": 105665
}
|
class ____(StructurePattern):
"""Represents a singleton leaf StructurePattern."""
def __new__(cls):
if not hasattr(cls, "instance"):
cls.instance = super().__new__(cls)
return cls.instance
|
PLeaf
|
python
|
davidhalter__parso
|
parso/python/tree.py
|
{
"start": 29817,
"end": 29874
}
|
class ____(KeywordStatement):
__slots__ = ()
|
ReturnStmt
|
python
|
pyqtgraph__pyqtgraph
|
pyqtgraph/parametertree/parameterTypes/actiongroup.py
|
{
"start": 1276,
"end": 1924
}
|
class ____(GroupParameter):
itemClass = ActionGroupParameterItem
sigActivated = QtCore.Signal(object)
def __init__(self, **opts):
opts.setdefault("button", {})
super().__init__(**opts)
@QtCore.Slot()
def activate(self):
self.sigActivated.emit(self)
self.emitStateChanged('activated', None)
def setButtonOpts(self, **opts):
"""
Update individual button options without replacing the entire
button definition.
"""
buttonOpts = self.opts.get("button", {}).copy()
buttonOpts.update(opts)
self.setOpts(button=buttonOpts)
|
ActionGroupParameter
|
python
|
kubernetes-client__python
|
kubernetes/base/stream/ws_client_test.py
|
{
"start": 2575,
"end": 7177
}
|
class ____(unittest.TestCase):
def test_websocket_client(self):
for url, ws_url in [
('http://localhost/api', 'ws://localhost/api'),
('https://localhost/api', 'wss://localhost/api'),
('https://domain.com/api', 'wss://domain.com/api'),
('https://api.domain.com/api', 'wss://api.domain.com/api'),
('http://api.domain.com', 'ws://api.domain.com'),
('https://api.domain.com', 'wss://api.domain.com'),
('http://api.domain.com/', 'ws://api.domain.com/'),
('https://api.domain.com/', 'wss://api.domain.com/'),
]:
self.assertEqual(get_websocket_url(url), ws_url)
def test_websocket_proxycare(self):
for proxy, idpass, no_proxy, expect_host, expect_port, expect_auth, expect_noproxy in [
( None, None, None, None, None, None, None ),
( 'http://proxy.example.com:8080/', None, None, 'proxy.example.com', 8080, None, None ),
( 'http://proxy.example.com:8080/', 'user:pass', None, 'proxy.example.com', 8080, ('user','pass'), None),
( 'http://proxy.example.com:8080/', 'user:pass', '', 'proxy.example.com', 8080, ('user','pass'), None),
( 'http://proxy.example.com:8080/', 'user:pass', '*', 'proxy.example.com', 8080, ('user','pass'), ['*']),
( 'http://proxy.example.com:8080/', 'user:pass', '.example.com', 'proxy.example.com', 8080, ('user','pass'), ['.example.com']),
( 'http://proxy.example.com:8080/', 'user:pass', 'localhost,.local,.example.com', 'proxy.example.com', 8080, ('user','pass'), ['localhost','.local','.example.com']),
]:
# input setup
cfg = Configuration()
if proxy:
cfg.proxy = proxy
if idpass:
cfg.proxy_headers = urllib3.util.make_headers(proxy_basic_auth=idpass)
if no_proxy is not None:
cfg.no_proxy = no_proxy
connect_opts = websocket_proxycare({}, cfg, None, None)
assert dictval(connect_opts, 'http_proxy_host') == expect_host
assert dictval(connect_opts, 'http_proxy_port') == expect_port
assert dictval(connect_opts, 'http_proxy_auth') == expect_auth
assert dictval(connect_opts, 'http_no_proxy') == expect_noproxy
@pytest.fixture(scope="module")
def dummy_proxy():
#Dummy Proxy
proxy = DummyProxy(port=8888)
proxy.start()
yield proxy
@pytest.fixture(autouse=True)
def clear_proxy_env(monkeypatch):
for var in ("HTTP_PROXY", "http_proxy", "HTTPS_PROXY", "https_proxy", "NO_PROXY", "no_proxy"):
monkeypatch.delenv(var, raising=False)
def apply_proxy_to_conf():
#apply HTTPS_PROXY env var and set it as global.
cfg = client.Configuration.get_default_copy()
cfg.proxy = os.getenv("HTTPS_PROXY")
cfg.no_proxy = os.getenv("NO_PROXY", "")
client.Configuration.set_default(cfg)
def test_rest_call_ignores_env(dummy_proxy, monkeypatch):
# HTTPS_PROXY to dummy proxy
monkeypatch.setenv("HTTPS_PROXY", "http://127.0.0.1:8888")
# Avoid real HTTP request
monkeypatch.setattr(client.CoreV1Api, "list_namespace", lambda self, *_args, **_kwargs: None)
# Load config using kubeconfig
config.load_kube_config(config_file=os.environ["KUBECONFIG"])
apply_proxy_to_conf()
# HTTPS_PROXY to dummy proxy
monkeypatch.setenv("HTTPS_PROXY", "http://127.0.0.1:8888")
config.load_kube_config(config_file=os.environ["KUBECONFIG"])
apply_proxy_to_conf()
v1 = client.CoreV1Api()
v1.list_namespace(_preload_content=False)
assert not dummy_proxy.received_connect, "REST path should ignore HTTPS_PROXY"
def test_websocket_call_honors_env(dummy_proxy, monkeypatch):
# set HTTPS_PROXY again
monkeypatch.setenv("HTTPS_PROXY", "http://127.0.0.1:8888")
# Load kubeconfig
config.load_kube_config(config_file=os.environ["KUBECONFIG"])
apply_proxy_to_conf()
opts = websocket_proxycare({}, client.Configuration.get_default_copy(), None, None)
assert opts.get('http_proxy_host') == '127.0.0.1'
assert opts.get('http_proxy_port') == 8888
# Optionally verify no_proxy parsing
assert opts.get('http_no_proxy') is None
if __name__ == '__main__':
unittest.main()
|
WSClientTest
|
python
|
huggingface__transformers
|
src/transformers/models/mistral/modular_mistral.py
|
{
"start": 4081,
"end": 4352
}
|
class ____(LlamaDecoderLayer):
def __init__(self, config: MistralConfig, layer_idx: int):
super().__init__(config, layer_idx)
self.self_attn = MistralAttention(config=config, layer_idx=layer_idx)
self.mlp = MistralMLP(config)
|
MistralDecoderLayer
|
python
|
astropy__astropy
|
astropy/coordinates/tests/test_representation.py
|
{
"start": 47988,
"end": 54694
}
|
class ____:
def test_name(self):
assert CylindricalRepresentation.name == "cylindrical"
assert CylindricalRepresentation.name in REPRESENTATION_CLASSES
def test_empty_init(self):
with pytest.raises(TypeError) as exc:
s = CylindricalRepresentation()
def test_init_quantity(self):
s1 = CylindricalRepresentation(rho=1 * u.kpc, phi=2 * u.deg, z=3 * u.kpc)
assert s1.rho.unit is u.kpc
assert s1.phi.unit is u.deg
assert s1.z.unit is u.kpc
assert_allclose(s1.rho.value, 1)
assert_allclose(s1.phi.value, 2)
assert_allclose(s1.z.value, 3)
def test_init_array(self):
s1 = CylindricalRepresentation(
rho=[1, 2, 3] * u.pc, phi=[2, 3, 4] * u.deg, z=[3, 4, 5] * u.kpc
)
assert s1.rho.unit is u.pc
assert s1.phi.unit is u.deg
assert s1.z.unit is u.kpc
assert_allclose(s1.rho.value, [1, 2, 3])
assert_allclose(s1.phi.value, [2, 3, 4])
assert_allclose(s1.z.value, [3, 4, 5])
def test_init_array_nocopy(self):
rho = [8, 9, 10] * u.pc
phi = [5, 6, 7] * u.deg
z = [2, 3, 4] * u.kpc
s1 = CylindricalRepresentation(rho=rho, phi=phi, z=z, copy=False)
rho[:] = [9, 2, 3] * u.kpc
phi[:] = [1, 2, 3] * u.arcmin
z[:] = [-2, 3, 8] * u.kpc
assert_allclose_quantity(rho, s1.rho)
assert_allclose_quantity(phi, s1.phi)
assert_allclose_quantity(z, s1.z)
def test_reprobj(self):
s1 = CylindricalRepresentation(rho=1 * u.kpc, phi=2 * u.deg, z=3 * u.kpc)
s2 = CylindricalRepresentation.from_representation(s1)
assert s2.rho == 1 * u.kpc
assert s2.phi == 2 * u.deg
assert s2.z == 3 * u.kpc
s3 = CylindricalRepresentation(s1)
assert representation_equal(s3, s1)
def test_broadcasting(self):
s1 = CylindricalRepresentation(
rho=[1, 2] * u.kpc, phi=[3, 4] * u.deg, z=5 * u.kpc
)
assert s1.rho.unit == u.kpc
assert s1.phi.unit == u.deg
assert s1.z.unit == u.kpc
assert_allclose(s1.rho.value, [1, 2])
assert_allclose(s1.phi.value, [3, 4])
assert_allclose(s1.z.value, [5, 5])
def test_broadcasting_mismatch(self):
with pytest.raises(
ValueError, match="Input parameters rho, phi, and z cannot be broadcast"
):
s1 = CylindricalRepresentation(
rho=[1, 2] * u.kpc, phi=[3, 4] * u.deg, z=[5, 6, 7] * u.kpc
)
def test_readonly(self):
s1 = CylindricalRepresentation(rho=1 * u.kpc, phi=20 * u.deg, z=3 * u.kpc)
with pytest.raises(AttributeError):
s1.rho = 1.0 * u.kpc
with pytest.raises(AttributeError):
s1.phi = 20 * u.deg
with pytest.raises(AttributeError):
s1.z = 1.0 * u.kpc
def unit_mismatch(self):
q_len = u.Quantity([1], u.kpc)
q_nonlen = u.Quantity([1], u.kg)
with pytest.raises(u.UnitsError) as exc:
s1 = CylindricalRepresentation(rho=q_nonlen, phi=10 * u.deg, z=q_len)
assert exc.value.args[0] == "rho and z should have matching physical types"
with pytest.raises(u.UnitsError) as exc:
s1 = CylindricalRepresentation(rho=q_len, phi=10 * u.deg, z=q_nonlen)
assert exc.value.args[0] == "rho and z should have matching physical types"
def test_getitem(self):
s = CylindricalRepresentation(
rho=np.arange(10) * u.pc, phi=-np.arange(10) * u.deg, z=1 * u.kpc
)
s_slc = s[2:8:2]
assert_allclose_quantity(s_slc.rho, [2, 4, 6] * u.pc)
assert_allclose_quantity(s_slc.phi, [-2, -4, -6] * u.deg)
assert_allclose_quantity(s_slc.z, [1, 1, 1] * u.kpc)
def test_getitem_scalar(self):
s = CylindricalRepresentation(rho=1 * u.pc, phi=-2 * u.deg, z=3 * u.kpc)
with pytest.raises(TypeError):
s_slc = s[0]
def test_transform(self):
s1 = CylindricalRepresentation(
phi=[1, 2] * u.deg, z=[3, 4] * u.pc, rho=[5, 6] * u.kpc
)
s2 = s1.transform(matrices["rotation"])
assert_allclose_quantity(s2.phi, s1.phi + 10 * u.deg)
assert_allclose_quantity(s2.z, s1.z)
assert_allclose_quantity(s2.rho, s1.rho)
assert s2.phi.unit is u.rad
assert s2.z.unit is u.kpc
assert s2.rho.unit is u.kpc
# now with a non rotation matrix
s3 = s1.transform(matrices["general"])
expected = (s1.to_cartesian().transform(matrices["general"])).represent_as(
CylindricalRepresentation
)
assert_allclose_quantity(s3.phi, expected.phi)
assert_allclose_quantity(s3.z, expected.z)
assert_allclose_quantity(s3.rho, expected.rho)
def test_representation_shortcuts(self):
"""Test that shortcuts in ``represent_as`` don't fail."""
difs = CylindricalDifferential(
d_rho=4 * u.km / u.s, d_phi=5 * u.mas / u.yr, d_z=6 * u.km / u.s
)
cyl = CylindricalRepresentation(
rho=1 * u.kpc, phi=2 * u.deg, z=3 * u.kpc, differentials={"s": difs}
)
# PhysicsSpherical Representation
got = cyl.represent_as(
PhysicsSphericalRepresentation, PhysicsSphericalDifferential
)
expected = BaseRepresentation.represent_as(
cyl, PhysicsSphericalRepresentation, PhysicsSphericalDifferential
)
assert_allclose_quantity(got.r, expected.r)
assert_allclose_quantity(got.phi, expected.phi)
assert_allclose_quantity(got.theta, expected.theta)
assert representation_equal_up_to_angular_type(got, expected)
def test_to_physicsspherical_at_the_origin(self):
"""Test that the transformation to physicsspherical at the origin preserves phi."""
cyl = CylindricalRepresentation(
rho=0 * u.kpc,
phi=23.5 * u.deg,
z=3 * u.kpc,
)
sph = cyl.represent_as(PhysicsSphericalRepresentation)
assert_allclose(sph.r, 3 * u.kpc)
assert_allclose(sph.theta, 0 * u.deg)
assert cyl.phi == 23.5 * u.deg # phi is preserved exactly
def test_to_physicsspherical_small_theta(self):
"""Test that the transformation to physicsspherical is accurate for small theta."""
cyl = CylindricalRepresentation(rho=1 * u.m, phi=10 * u.deg, z=1e8 * u.m)
got = cyl.represent_as(PhysicsSphericalRepresentation)
assert_allclose(got.r, 1e8 * u.m)
assert_allclose(got.phi, 10 * u.deg)
assert_allclose(got.theta, 1e-8 * u.rad)
|
TestCylindricalRepresentation
|
python
|
streamlit__streamlit
|
lib/tests/streamlit/web/server/upload_file_request_handler_test.py
|
{
"start": 1203,
"end": 1357
}
|
class ____(NamedTuple):
name: str
data: bytes
def _get_filename(file):
"""Sort key for lists of UploadedFiles"""
return file.name
|
MockFile
|
python
|
modin-project__modin
|
modin/core/dataframe/algebra/default2pandas/default.py
|
{
"start": 1879,
"end": 10405
}
|
class ____(Operator):
"""
Builder for default-to-pandas methods.
Attributes
----------
OBJECT_TYPE : str
Object type name that will be shown in default-to-pandas warning message.
DEFAULT_OBJECT_TYPE : object
Default place to search for a function.
"""
OBJECT_TYPE = "DataFrame"
DEFAULT_OBJECT_TYPE = ObjTypeDeterminer
@classmethod
def register(cls, func, obj_type=None, inplace=None, fn_name=None):
"""
Build function that do fallback to default pandas implementation for passed `func`.
Parameters
----------
func : callable or str,
Function to apply to the casted to pandas frame or its property accesed
by ``cls.frame_wrapper``.
obj_type : object, optional
If `func` is a string with a function name then `obj_type` provides an
object to search function in.
inplace : bool, optional
If True return an object to which `func` was applied, otherwise return
the result of `func`.
fn_name : str, optional
Function name which will be shown in default-to-pandas warning message.
If not specified, name will be deducted from `func`.
Returns
-------
callable
Function that takes query compiler, does fallback to pandas and applies `func`
to the casted to pandas frame or its property accesed by ``cls.frame_wrapper``.
"""
if isinstance(func, str):
if obj_type is None:
obj_type = cls.DEFAULT_OBJECT_TYPE
fn = getattr(obj_type, func)
else:
fn = func
if type(fn) is property:
if fn_name is None and hasattr(fn, "fget"):
# When `fn` is a property, `str(fn)` will be something like
# "<property object at 0x7f8671e09d10>". We instead check its `fget` method to get
# the name of the property.
# Note that this method is still imperfect because we cannot get the class name
# of the property. For example, we can only get "hour" from `Series.dt.hour`.
fn_name = f"<property fget:{getattr(fn.fget, '__name__', 'noname')}>"
fn = cls.build_property_wrapper(fn)
else:
fn_name = getattr(fn, "__name__", str(fn)) if fn_name is None else fn_name
def applyier(df, *args, **kwargs):
"""
Apply target function to the casted to pandas frame.
This function is directly applied to the casted to pandas frame, executes target
function under it and processes result so it is possible to create a valid
query compiler from it.
"""
# pandas default implementation doesn't know how to handle `dtypes` keyword argument
kwargs.pop("dtypes", None)
df = cls.frame_wrapper(df)
result = fn(df, *args, **kwargs)
if (
not isinstance(result, pandas.Series)
and not isinstance(result, pandas.DataFrame)
and func not in ("to_numpy", pandas.DataFrame.to_numpy)
and func not in ("align", pandas.DataFrame.align)
and func not in ("divmod", pandas.Series.divmod)
and func not in ("rdivmod", pandas.Series.rdivmod)
and func not in ("to_list", pandas.Series.to_list)
and func not in ("corr", pandas.Series.corr)
and func not in ("to_dict", pandas.Series.to_dict)
and func not in ("mean", pandas.DataFrame.mean)
and func not in ("median", pandas.DataFrame.median)
and func not in ("skew", pandas.DataFrame.skew)
and func not in ("kurt", pandas.DataFrame.kurt)
):
# When applying a DatetimeProperties or TimedeltaProperties function,
# if we don't specify the dtype for the DataFrame, the frame might
# get the wrong dtype, e.g. for to_pydatetime in
# https://github.com/modin-project/modin/issues/4436
astype_kwargs = {}
dtype = getattr(result, "dtype", None)
if dtype and isinstance(
df,
(
pandas.core.indexes.accessors.DatetimeProperties,
pandas.core.indexes.accessors.TimedeltaProperties,
),
):
astype_kwargs["dtype"] = dtype
result = (
pandas.DataFrame(result, **astype_kwargs)
if is_list_like(result)
else pandas.DataFrame([result], **astype_kwargs)
)
if isinstance(result, pandas.Series):
if result.name is None:
result.name = MODIN_UNNAMED_SERIES_LABEL
result = result.to_frame()
inplace_method = kwargs.get("inplace", False)
if inplace is not None:
inplace_method = inplace
return result if not inplace_method else df
return cls.build_wrapper(applyier, fn_name)
@classmethod
# FIXME: this method is almost a duplicate of `cls.build_default_to_pandas`.
# Those two methods should be merged into a single one.
def build_wrapper(cls, fn, fn_name):
"""
Build function that do fallback to pandas for passed `fn`.
In comparison with ``cls.build_default_to_pandas`` this method also
casts function arguments to pandas before doing fallback.
Parameters
----------
fn : callable
Function to apply to the defaulted frame.
fn_name : str
Function name which will be shown in default-to-pandas warning message.
Returns
-------
callable
Method that does fallback to pandas and applies `fn` to the pandas frame.
"""
wrapper = cls.build_default_to_pandas(fn, fn_name)
def args_cast(self, *args, **kwargs):
"""
Preprocess `default_to_pandas` function arguments and apply default function.
Cast all Modin objects that function arguments contain to its pandas representation.
"""
args = try_cast_to_pandas(args)
kwargs = try_cast_to_pandas(kwargs)
return wrapper(self, *args, **kwargs)
return args_cast
@classmethod
def build_property_wrapper(cls, prop):
"""
Build function that accesses specified property of the frame.
Parameters
----------
prop : str
Property name to access.
Returns
-------
callable
Function that takes DataFrame and returns its value of `prop` property.
"""
def property_wrapper(df):
"""Get specified property of the passed object."""
return prop.fget(df)
return property_wrapper
@classmethod
def build_default_to_pandas(cls, fn, fn_name):
"""
Build function that do fallback to pandas for passed `fn`.
Parameters
----------
fn : callable
Function to apply to the defaulted frame.
fn_name : str
Function name which will be shown in default-to-pandas warning message.
Returns
-------
callable
Method that does fallback to pandas and applies `fn` to the pandas frame.
"""
fn.__name__ = f"<function {cls.OBJECT_TYPE}.{fn_name}>"
def wrapper(self, *args, **kwargs):
"""Do fallback to pandas for the specified function."""
return self.default_to_pandas(fn, *args, **kwargs)
return wrapper
@classmethod
def frame_wrapper(cls, df):
"""
Extract frame property to apply function on.
This method is executed under casted to pandas frame right before applying
a function passed to `register`, which gives an ability to transform frame somehow
or access its properties, by overriding this method in a child class.
Parameters
----------
df : pandas.DataFrame
Returns
-------
pandas.DataFrame
Notes
-----
Being a base implementation, this particular method does nothing with passed frame.
"""
return df
|
DefaultMethod
|
python
|
pypa__pipenv
|
pipenv/patched/pip/_internal/models/link.py
|
{
"start": 6368,
"end": 18789
}
|
class ____:
"""Represents a parsed link from a Package Index's simple URL"""
__slots__ = [
"_parsed_url",
"_url",
"_path",
"_hashes",
"comes_from",
"requires_python",
"yanked_reason",
"metadata_file_data",
"cache_link_parsing",
"egg_fragment",
]
def __init__(
self,
url: str,
comes_from: Optional[Union[str, "IndexContent"]] = None,
requires_python: Optional[str] = None,
yanked_reason: Optional[str] = None,
metadata_file_data: Optional[MetadataFile] = None,
cache_link_parsing: bool = True,
hashes: Optional[Mapping[str, str]] = None,
) -> None:
"""
:param url: url of the resource pointed to (href of the link)
:param comes_from: instance of IndexContent where the link was found,
or string.
:param requires_python: String containing the `Requires-Python`
metadata field, specified in PEP 345. This may be specified by
a data-requires-python attribute in the HTML link tag, as
described in PEP 503.
:param yanked_reason: the reason the file has been yanked, if the
file has been yanked, or None if the file hasn't been yanked.
This is the value of the "data-yanked" attribute, if present, in
a simple repository HTML link. If the file has been yanked but
no reason was provided, this should be the empty string. See
PEP 592 for more information and the specification.
:param metadata_file_data: the metadata attached to the file, or None if
no such metadata is provided. This argument, if not None, indicates
that a separate metadata file exists, and also optionally supplies
hashes for that file.
:param cache_link_parsing: A flag that is used elsewhere to determine
whether resources retrieved from this link should be cached. PyPI
URLs should generally have this set to False, for example.
:param hashes: A mapping of hash names to digests to allow us to
determine the validity of a download.
"""
# The comes_from, requires_python, and metadata_file_data arguments are
# only used by classmethods of this class, and are not used in client
# code directly.
# url can be a UNC windows share
if url.startswith("\\\\"):
url = path_to_url(url)
self._parsed_url = urllib.parse.urlsplit(url)
# Store the url as a private attribute to prevent accidentally
# trying to set a new value.
self._url = url
# The .path property is hot, so calculate its value ahead of time.
self._path = urllib.parse.unquote(self._parsed_url.path)
link_hash = LinkHash.find_hash_url_fragment(url)
hashes_from_link = {} if link_hash is None else link_hash.as_dict()
if hashes is None:
self._hashes = hashes_from_link
else:
self._hashes = {**hashes, **hashes_from_link}
self.comes_from = comes_from
self.requires_python = requires_python if requires_python else None
self.yanked_reason = yanked_reason
self.metadata_file_data = metadata_file_data
self.cache_link_parsing = cache_link_parsing
self.egg_fragment = self._egg_fragment()
@classmethod
def from_json(
cls,
file_data: Dict[str, Any],
page_url: str,
) -> Optional["Link"]:
"""
Convert an pypi json document from a simple repository page into a Link.
"""
file_url = file_data.get("url")
if file_url is None:
return None
url = _ensure_quoted_url(_absolute_link_url(page_url, file_url))
pyrequire = file_data.get("requires-python")
yanked_reason = file_data.get("yanked")
hashes = file_data.get("hashes", {})
# PEP 714: Indexes must use the name core-metadata, but
# clients should support the old name as a fallback for compatibility.
metadata_info = file_data.get("core-metadata")
if metadata_info is None:
metadata_info = file_data.get("dist-info-metadata")
# The metadata info value may be a boolean, or a dict of hashes.
if isinstance(metadata_info, dict):
# The file exists, and hashes have been supplied
metadata_file_data = MetadataFile(supported_hashes(metadata_info))
elif metadata_info:
# The file exists, but there are no hashes
metadata_file_data = MetadataFile(None)
else:
# False or not present: the file does not exist
metadata_file_data = None
# The Link.yanked_reason expects an empty string instead of a boolean.
if yanked_reason and not isinstance(yanked_reason, str):
yanked_reason = ""
# The Link.yanked_reason expects None instead of False.
elif not yanked_reason:
yanked_reason = None
return cls(
url,
comes_from=page_url,
requires_python=pyrequire,
yanked_reason=yanked_reason,
hashes=hashes,
metadata_file_data=metadata_file_data,
)
@classmethod
def from_element(
cls,
anchor_attribs: Dict[str, Optional[str]],
page_url: str,
base_url: str,
) -> Optional["Link"]:
"""
Convert an anchor element's attributes in a simple repository page to a Link.
"""
href = anchor_attribs.get("href")
if not href:
return None
url = _ensure_quoted_url(_absolute_link_url(base_url, href))
pyrequire = anchor_attribs.get("data-requires-python")
yanked_reason = anchor_attribs.get("data-yanked")
# PEP 714: Indexes must use the name data-core-metadata, but
# clients should support the old name as a fallback for compatibility.
metadata_info = anchor_attribs.get("data-core-metadata")
if metadata_info is None:
metadata_info = anchor_attribs.get("data-dist-info-metadata")
# The metadata info value may be the string "true", or a string of
# the form "hashname=hashval"
if metadata_info == "true":
# The file exists, but there are no hashes
metadata_file_data = MetadataFile(None)
elif metadata_info is None:
# The file does not exist
metadata_file_data = None
else:
# The file exists, and hashes have been supplied
hashname, sep, hashval = metadata_info.partition("=")
if sep == "=":
metadata_file_data = MetadataFile(supported_hashes({hashname: hashval}))
else:
# Error - data is wrong. Treat as no hashes supplied.
logger.debug(
"Index returned invalid data-dist-info-metadata value: %s",
metadata_info,
)
metadata_file_data = MetadataFile(None)
return cls(
url,
comes_from=page_url,
requires_python=pyrequire,
yanked_reason=yanked_reason,
metadata_file_data=metadata_file_data,
)
def __str__(self) -> str:
if self.requires_python:
rp = f" (requires-python:{self.requires_python})"
else:
rp = ""
if self.comes_from:
return f"{self.redacted_url} (from {self.comes_from}){rp}"
else:
return self.redacted_url
def __repr__(self) -> str:
return f"<Link {self}>"
def __hash__(self) -> int:
return hash(self.url)
def __eq__(self, other: Any) -> bool:
if not isinstance(other, Link):
return NotImplemented
return self.url == other.url
def __lt__(self, other: Any) -> bool:
if not isinstance(other, Link):
return NotImplemented
return self.url < other.url
@property
def url(self) -> str:
return self._url
@property
def redacted_url(self) -> str:
return redact_auth_from_url(self.url)
@property
def filename(self) -> str:
path = self.path.rstrip("/")
name = posixpath.basename(path)
if not name:
# Make sure we don't leak auth information if the netloc
# includes a username and password.
netloc, user_pass = split_auth_from_netloc(self.netloc)
return netloc
name = urllib.parse.unquote(name)
assert name, f"URL {self._url!r} produced no filename"
return name
@property
def file_path(self) -> str:
return url_to_path(self.url)
@property
def scheme(self) -> str:
return self._parsed_url.scheme
@property
def netloc(self) -> str:
"""
This can contain auth information.
"""
return self._parsed_url.netloc
@property
def path(self) -> str:
return self._path
def splitext(self) -> Tuple[str, str]:
return splitext(posixpath.basename(self.path.rstrip("/")))
@property
def ext(self) -> str:
return self.splitext()[1]
@property
def url_without_fragment(self) -> str:
scheme, netloc, path, query, fragment = self._parsed_url
return urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
_egg_fragment_re = re.compile(r"[#&]egg=([^&]*)")
# Per PEP 508.
_project_name_re = re.compile(
r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
)
def _egg_fragment(self) -> Optional[str]:
match = self._egg_fragment_re.search(self._url)
if not match:
return None
# An egg fragment looks like a PEP 508 project name, along with
# an optional extras specifier. Anything else is invalid.
project_name = match.group(1)
if not self._project_name_re.match(project_name):
deprecated(
reason=f"{self} contains an egg fragment with a non-PEP 508 name.",
replacement="to use the req @ url syntax, and remove the egg fragment",
gone_in="25.2",
issue=13157,
)
return project_name
_subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)")
@property
def subdirectory_fragment(self) -> Optional[str]:
match = self._subdirectory_fragment_re.search(self._url)
if not match:
return None
return match.group(1)
def metadata_link(self) -> Optional["Link"]:
"""Return a link to the associated core metadata file (if any)."""
if self.metadata_file_data is None:
return None
metadata_url = f"{self.url_without_fragment}.metadata"
if self.metadata_file_data.hashes is None:
return Link(metadata_url)
return Link(metadata_url, hashes=self.metadata_file_data.hashes)
def as_hashes(self) -> Hashes:
return Hashes({k: [v] for k, v in self._hashes.items()})
@property
def hash(self) -> Optional[str]:
return next(iter(self._hashes.values()), None)
@property
def hash_name(self) -> Optional[str]:
return next(iter(self._hashes), None)
@property
def show_url(self) -> str:
return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0])
@property
def is_file(self) -> bool:
return self.scheme == "file"
def is_existing_dir(self) -> bool:
return self.is_file and os.path.isdir(self.file_path)
@property
def is_wheel(self) -> bool:
return self.ext == WHEEL_EXTENSION
@property
def is_vcs(self) -> bool:
from pipenv.patched.pip._internal.vcs import vcs
return self.scheme in vcs.all_schemes
@property
def is_yanked(self) -> bool:
return self.yanked_reason is not None
@property
def has_hash(self) -> bool:
return bool(self._hashes)
def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
"""
Return True if the link has a hash and it is allowed by `hashes`.
"""
if hashes is None:
return False
return any(hashes.is_hash_allowed(k, v) for k, v in self._hashes.items())
|
Link
|
python
|
plotly__plotly.py
|
plotly/graph_objs/layout/scene/zaxis/_tickfont.py
|
{
"start": 235,
"end": 9914
}
|
class ____(_BaseLayoutHierarchyType):
_parent_path_str = "layout.scene.zaxis"
_path_str = "layout.scene.zaxis.tickfont"
_valid_props = {
"color",
"family",
"lineposition",
"shadow",
"size",
"style",
"textcase",
"variant",
"weight",
}
@property
def color(self):
"""
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color: see https://plotly.com/python/css-colors/ for a list
Returns
-------
str
"""
return self["color"]
@color.setter
def color(self, val):
self["color"] = val
@property
def family(self):
"""
HTML font family - the typeface that will be applied by the web
browser. The web browser can only apply a font if it is
available on the system where it runs. Provide multiple font
families, separated by commas, to indicate the order in which
to apply fonts if they aren't available.
The 'family' property is a string and must be specified as:
- A non-empty string
Returns
-------
str
"""
return self["family"]
@family.setter
def family(self, val):
self["family"] = val
@property
def lineposition(self):
"""
Sets the kind of decoration line(s) with text, such as an
"under", "over" or "through" as well as combinations e.g.
"under+over", etc.
The 'lineposition' property is a flaglist and may be specified
as a string containing:
- Any combination of ['under', 'over', 'through'] joined with '+' characters
(e.g. 'under+over')
OR exactly one of ['none'] (e.g. 'none')
Returns
-------
Any
"""
return self["lineposition"]
@lineposition.setter
def lineposition(self, val):
self["lineposition"] = val
@property
def shadow(self):
"""
Sets the shape and color of the shadow behind text. "auto"
places minimal shadow and applies contrast text font color. See
https://developer.mozilla.org/en-US/docs/Web/CSS/text-shadow
for additional options.
The 'shadow' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self["shadow"]
@shadow.setter
def shadow(self, val):
self["shadow"] = val
@property
def size(self):
"""
The 'size' property is a number and may be specified as:
- An int or float in the interval [1, inf]
Returns
-------
int|float
"""
return self["size"]
@size.setter
def size(self, val):
self["size"] = val
@property
def style(self):
"""
Sets whether a font should be styled with a normal or italic
face from its family.
The 'style' property is an enumeration that may be specified as:
- One of the following enumeration values:
['normal', 'italic']
Returns
-------
Any
"""
return self["style"]
@style.setter
def style(self, val):
self["style"] = val
@property
def textcase(self):
"""
Sets capitalization of text. It can be used to make text appear
in all-uppercase or all-lowercase, or with each word
capitalized.
The 'textcase' property is an enumeration that may be specified as:
- One of the following enumeration values:
['normal', 'word caps', 'upper', 'lower']
Returns
-------
Any
"""
return self["textcase"]
@textcase.setter
def textcase(self, val):
self["textcase"] = val
@property
def variant(self):
"""
Sets the variant of the font.
The 'variant' property is an enumeration that may be specified as:
- One of the following enumeration values:
['normal', 'small-caps', 'all-small-caps',
'all-petite-caps', 'petite-caps', 'unicase']
Returns
-------
Any
"""
return self["variant"]
@variant.setter
def variant(self, val):
self["variant"] = val
@property
def weight(self):
"""
Sets the weight (or boldness) of the font.
The 'weight' property is a integer and may be specified as:
- An int (or float that will be cast to an int)
in the interval [1, 1000]
OR exactly one of ['normal', 'bold'] (e.g. 'bold')
Returns
-------
int
"""
return self["weight"]
@weight.setter
def weight(self, val):
self["weight"] = val
@property
def _prop_descriptions(self):
return """\
color
family
HTML font family - the typeface that will be applied by
the web browser. The web browser can only apply a font
if it is available on the system where it runs. Provide
multiple font families, separated by commas, to
indicate the order in which to apply fonts if they
aren't available.
lineposition
Sets the kind of decoration line(s) with text, such as
an "under", "over" or "through" as well as combinations
e.g. "under+over", etc.
shadow
Sets the shape and color of the shadow behind text.
"auto" places minimal shadow and applies contrast text
font color. See https://developer.mozilla.org/en-
US/docs/Web/CSS/text-shadow for additional options.
size
style
Sets whether a font should be styled with a normal or
italic face from its family.
textcase
Sets capitalization of text. It can be used to make
text appear in all-uppercase or all-lowercase, or with
each word capitalized.
variant
Sets the variant of the font.
weight
Sets the weight (or boldness) of the font.
"""
def __init__(
self,
arg=None,
color=None,
family=None,
lineposition=None,
shadow=None,
size=None,
style=None,
textcase=None,
variant=None,
weight=None,
**kwargs,
):
"""
Construct a new Tickfont object
Sets the tick font.
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
:class:`plotly.graph_objs.layout.scene.zaxis.Tickfont`
color
family
HTML font family - the typeface that will be applied by
the web browser. The web browser can only apply a font
if it is available on the system where it runs. Provide
multiple font families, separated by commas, to
indicate the order in which to apply fonts if they
aren't available.
lineposition
Sets the kind of decoration line(s) with text, such as
an "under", "over" or "through" as well as combinations
e.g. "under+over", etc.
shadow
Sets the shape and color of the shadow behind text.
"auto" places minimal shadow and applies contrast text
font color. See https://developer.mozilla.org/en-
US/docs/Web/CSS/text-shadow for additional options.
size
style
Sets whether a font should be styled with a normal or
italic face from its family.
textcase
Sets capitalization of text. It can be used to make
text appear in all-uppercase or all-lowercase, or with
each word capitalized.
variant
Sets the variant of the font.
weight
Sets the weight (or boldness) of the font.
Returns
-------
Tickfont
"""
super().__init__("tickfont")
if "_parent" in kwargs:
self._parent = kwargs["_parent"]
return
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError("""\
The first argument to the plotly.graph_objs.layout.scene.zaxis.Tickfont
constructor must be a dict or
an instance of :class:`plotly.graph_objs.layout.scene.zaxis.Tickfont`""")
self._skip_invalid = kwargs.pop("skip_invalid", False)
self._validate = kwargs.pop("_validate", True)
self._set_property("color", arg, color)
self._set_property("family", arg, family)
self._set_property("lineposition", arg, lineposition)
self._set_property("shadow", arg, shadow)
self._set_property("size", arg, size)
self._set_property("style", arg, style)
self._set_property("textcase", arg, textcase)
self._set_property("variant", arg, variant)
self._set_property("weight", arg, weight)
self._process_kwargs(**dict(arg, **kwargs))
self._skip_invalid = False
|
Tickfont
|
python
|
spack__spack
|
lib/spack/spack/vendor/jinja2/exceptions.py
|
{
"start": 364,
"end": 1291
}
|
class ____(IOError, LookupError, TemplateError):
"""Raised if a template does not exist.
.. versionchanged:: 2.11
If the given name is :class:`Undefined` and no message was
provided, an :exc:`UndefinedError` is raised.
"""
# Silence the Python warning about message being deprecated since
# it's not valid here.
message: t.Optional[str] = None
def __init__(
self,
name: t.Optional[t.Union[str, "Undefined"]],
message: t.Optional[str] = None,
) -> None:
IOError.__init__(self, name)
if message is None:
from .runtime import Undefined
if isinstance(name, Undefined):
name._fail_with_undefined_error()
message = name
self.message = message
self.name = name
self.templates = [name]
def __str__(self) -> str:
return str(self.message)
|
TemplateNotFound
|
python
|
spyder-ide__spyder
|
external-deps/python-lsp-server/pylsp/plugins/_rope_task_handle.py
|
{
"start": 1838,
"end": 2883
}
|
class ____(BaseTaskHandle):
name: str
observers: list
job_sets: list[PylspJobSet]
stopped: bool
workspace: Workspace
_report: Callable[[str, str], None]
def __init__(self, workspace: Workspace) -> None:
self.workspace = workspace
self.job_sets = []
self.observers = []
def create_jobset(self, name="JobSet", count: int | None = None):
report_iter = self.workspace.report_progress(
name, None, None, skip_token_initialization=True
)
result = PylspJobSet(count, report_iter)
self.job_sets.append(result)
self._inform_observers()
return result
def stop(self) -> None:
pass
def current_jobset(self) -> BaseJobSet | None:
pass
def add_observer(self) -> None:
pass
def is_stopped(self) -> bool:
pass
def get_jobsets(self) -> Sequence[BaseJobSet]:
pass
def _inform_observers(self) -> None:
for observer in self.observers:
observer()
|
PylspTaskHandle
|
python
|
getsentry__sentry
|
tests/sentry/api/endpoints/test_sudo.py
|
{
"start": 176,
"end": 1620
}
|
class ____(APITestCase):
def test_sudo_required_del_org(self) -> None:
org = self.create_organization()
url = reverse(
"sentry-api-0-organization-details", kwargs={"organization_id_or_slug": org.slug}
)
user = self.create_user(email="foo@example.com")
self.create_member(organization=org, user=user, role="owner")
self.login_as(user)
middleware = list(settings.MIDDLEWARE)
index = middleware.index("sentry.testutils.middleware.SudoMiddleware")
middleware[index] = "sentry.middleware.sudo.SudoMiddleware"
with self.settings(MIDDLEWARE=tuple(middleware)):
response = self.client.delete(url, is_sudo=False)
assert response.status_code == 401
assert response.data["detail"]["code"] == "sudo-required"
assert response.data["detail"]["message"] == "Account verification required."
assert response.data["detail"]["extra"]["username"] == "foo@example.com"
sudo_url = reverse("sentry-api-0-auth", kwargs={})
# Now try to gain sudo access
response = self.client.post(
sudo_url, {"username": "foo@example.com", "password": "admin"}
)
assert response.status_code == 200
# This should now work
response = self.client.delete(url, is_sudo=False)
assert response.status_code == 202
|
SudoTest
|
python
|
pyodide__pyodide
|
src/py/_pyodide/_base.py
|
{
"start": 5485,
"end": 21133
}
|
class ____:
"""This class allows fine control over the execution of a code block.
It is primarily intended for REPLs and other sophisticated consumers that
may wish to add their own AST transformations, separately signal to the user
when parsing is complete, etc. The simpler :py:func:`eval_code` and
:py:func:`eval_code_async` apis should be preferred when their flexibility
suffices.
Parameters
----------
source :
The Python source code to run.
return_mode :
Specifies what should be returned. The options are:
:'last_expr': return the last expression
:'last_expr_or_assign': return the last expression or the last
assignment.
:'none': always return ``None``.
quiet_trailing_semicolon :
Specifies whether a trailing semicolon should suppress the result or
not. When this is ``True`` executing ``"1+1;"`` returns ``None``, when
it is ``False``, executing ``"1+1;"`` return ``2``. ``True`` by default.
filename :
The file name to use in error messages and stack traces. ``'<exec>'`` by
default.
mode :
The "mode" to compile in. One of ``"exec"``, ``"single"``, or
``"eval"``. Defaults to ``"exec"``. For most purposes it's unnecessary
to use this argument. See the documentation for the built-in
:external:py:func:`compile` function.
flags :
The flags to compile with. See the documentation for the built-in
:external:py:func:`compile` function.
dont_inherit :
Whether to inherit ``__future__`` imports from the outer code.
See the documentation for the built-in :external:py:func:`compile` function.
optimize :
Specifies the optimization level of the compiler. See the documentation
for the built-in :external:py:func:`compile` function.
Examples
--------
>>> source = "1 + 1"
>>> code_runner = CodeRunner(source)
>>> code_runner.compile() # doctest: +ELLIPSIS
<_pyodide._base.CodeRunner object at 0x...>
>>> code_runner.run()
2
>>> my_globals = {"x": 20}
>>> my_locals = {"y": 5}
>>> source = "x + y"
>>> code_runner = CodeRunner(source)
>>> code_runner.compile() # doctest: +ELLIPSIS
<_pyodide._base.CodeRunner object at 0x...>
>>> code_runner.run(globals=my_globals, locals=my_locals)
25
"""
ast: ast.Module
"""
The ast from parsing ``source``. If you wish to do an ast transform,
modify this variable before calling :py:meth:`CodeRunner.compile`.
"""
code: CodeType | None
"""
Once you call :py:meth:`CodeRunner.compile` the compiled code will
be available in the code field. You can modify this variable
before calling :py:meth:`CodeRunner.run` to do a code transform.
"""
def __init__(
self,
source: str,
*,
return_mode: ReturnMode = "last_expr",
mode: str = "exec",
quiet_trailing_semicolon: bool = True,
filename: str = "<exec>",
flags: int = 0x0,
dont_inherit: bool = False,
optimize: int = -1,
):
self._compiled = False
self._source = source
self._gen = _parse_and_compile_gen(
source,
return_mode=return_mode,
mode=mode,
quiet_trailing_semicolon=quiet_trailing_semicolon,
filename=filename,
flags=flags,
dont_inherit=dont_inherit,
optimize=optimize,
)
self.ast = next(self._gen)
def compile(self) -> "CodeRunner":
"""Compile the current value of ``self.ast`` and store the result in ``self.code``.
Can only be used once. Returns ``self`` (chainable).
"""
if self._compiled:
raise RuntimeError("Already compiled")
self._compiled = True
try:
# Triggers compilation
self._gen.send(self.ast)
except StopIteration as e:
# generator must return, which raises StopIteration
self.code = e.value
else:
raise AssertionError()
return self
def _set_linecache(self):
assert self.code
filename = self.code.co_filename
if filename.startswith("<") and filename.endswith(">"):
return
source = self._source
linecache.cache[filename] = [lambda: source] # type:ignore[assignment]
def run(
self,
globals: dict[str, Any] | None = None,
locals: dict[str, Any] | None = None,
) -> Any:
"""Executes ``self.code``.
Can only be used after calling compile. The code may not use top level
await, use :py:meth:`CodeRunner.run_async` for code that uses top level
await.
Parameters
----------
globals :
The global scope in which to execute code. This is used as the ``globals``
parameter for :py:func:`exec`. If ``globals`` is absent, a new empty dictionary is used.
locals :
The local scope in which to execute code. This is used as the ``locals``
parameter for :py:func:`exec`. If ``locals`` is absent, the value of ``globals`` is
used.
Returns
-------
If the last nonwhitespace character of ``source`` is a semicolon,
return ``None``. If the last statement is an expression, return the
result of the expression. Use the ``return_mode`` and
``quiet_trailing_semicolon`` parameters to modify this default
behavior.
"""
if globals is None:
globals = {}
if locals is None:
locals = globals
if not self._compiled:
raise RuntimeError("Not yet compiled")
if self.code is None:
return None
self._set_linecache()
try:
coroutine = eval(self.code, globals, locals)
if coroutine:
raise RuntimeError(
"Used eval_code with TOP_LEVEL_AWAIT. Use run_async for this instead."
)
except EvalCodeResultException as e:
# Final expression from code returns here
return e.value
return None
async def run_async(
self,
globals: dict[str, Any] | None = None,
locals: dict[str, Any] | None = None,
) -> Any:
"""Runs ``self.code`` which may use top level await.
Can only be used after calling :py:meth:`CodeRunner.compile`. If
``self.code`` uses top level await, automatically awaits the resulting
coroutine.
Parameters
----------
globals :
The global scope in which to execute code. This is used as the ``globals``
parameter for :py:func:`exec`. If ``globals`` is absent, a new empty dictionary is used.
locals :
The local scope in which to execute code. This is used as the
``locals`` parameter for :py:func:`exec`. If ``locals`` is absent, the
value of ``globals`` is used.
Returns
-------
If the last nonwhitespace character of ``source`` is a semicolon,
return ``None``. If the last statement is an expression, return the
result of the expression. Use the ``return_mode`` and
``quiet_trailing_semicolon`` parameters to modify this default
behavior.
"""
if globals is None:
globals = {}
if locals is None:
locals = globals
if not self._compiled:
raise RuntimeError("Not yet compiled")
if self.code is None:
return
self._set_linecache()
try:
coroutine = eval(self.code, globals, locals)
if coroutine:
await coroutine
except EvalCodeResultException as e:
return e.value
def eval_code(
source: str,
globals: dict[str, Any] | None = None,
locals: dict[str, Any] | None = None,
*,
return_mode: ReturnMode = "last_expr",
quiet_trailing_semicolon: bool = True,
filename: str = "<exec>",
flags: int = 0x0,
dont_inherit: bool = False,
optimize: int = -1,
) -> Any:
"""Runs a string as Python source code.
Parameters
----------
source :
The Python source code to run.
globals :
The global scope in which to execute code. This is used as the
``globals`` parameter for :py:func:`exec`. If ``globals`` is absent, a new
empty dictionary is used.
locals :
The local scope in which to execute code. This is used as the ``locals``
parameter for :py:func:`exec`. If ``locals`` is absent, the value of
``globals`` is used.
return_mode :
Specifies what should be returned. The options are:
:'last_expr': return the last expression
:'last_expr_or_assign': return the last expression or the last
assignment.
:'none': always return ``None``.
quiet_trailing_semicolon :
Specifies whether a trailing semicolon should suppress the result or
not. When this is ``True`` executing ``"1+1 ;"`` returns ``None``, when
it is ``False``, executing ``"1+1 ;"`` return ``2``. ``True`` by
default.
filename :
The file name to use in error messages and stack traces. ``'<exec>'`` by
default.
flags :
The flags to compile with. See the documentation for the built-in
:external:py:func:`compile` function.
Returns
-------
If the last nonwhitespace character of ``source`` is a semicolon, return
``None``. If the last statement is an expression, return the result of the
expression. Use the ``return_mode`` and ``quiet_trailing_semicolon``
parameters to modify this default behavior.
Examples
--------
>>> source = "1 + 1"
>>> eval_code(source)
2
>>> source = "1 + 1;"
>>> eval_code(source, quiet_trailing_semicolon=True)
>>> eval_code(source, quiet_trailing_semicolon=False)
2
>>> my_globals = { "y": "100" }
>>> my_locals = { "y": "200" }
>>> source = "print(locals()['y'], globals()['y'])"
>>> eval_code(source, globals=my_globals, locals=my_locals)
200 100
>>> source = "test = 1 + 1"
>>> eval_code(source, return_mode="last_expr_or_assign")
2
>>> eval_code(source, return_mode="last_expr")
>>> eval_code(source, return_mode="none")
>>> source = "print(pyodide)" # Pretend this is open('example_of_filename.py', 'r').read()
>>> eval_code(source, filename="example_of_filename.py")
Traceback (most recent call last):
...
File "example_of_filename.py", line 1, in <module>
print(pyodide)
^^^^^^^
NameError: name 'pyodide' is not defined
"""
return (
CodeRunner(
source,
return_mode=return_mode,
quiet_trailing_semicolon=quiet_trailing_semicolon,
filename=filename,
flags=flags,
dont_inherit=dont_inherit,
optimize=optimize,
)
.compile()
.run(globals, locals)
)
async def eval_code_async(
source: str,
globals: dict[str, Any] | None = None,
locals: dict[str, Any] | None = None,
*,
return_mode: ReturnMode = "last_expr",
quiet_trailing_semicolon: bool = True,
filename: str = "<exec>",
flags: int = 0x0,
dont_inherit: bool = False,
optimize: int = -1,
) -> Any:
"""Runs a code string asynchronously.
Uses :py:data:`ast.PyCF_ALLOW_TOP_LEVEL_AWAIT` to compile the code.
Parameters
----------
source :
The Python source code to run.
globals :
The global scope in which to execute code. This is used as the
``globals`` parameter for :py:func:`exec`. If ``globals`` is absent, a new
empty dictionary is used.
locals :
The local scope in which to execute code. This is used as the ``locals``
parameter for :py:func:`exec`. If ``locals`` is absent, the value of
``globals`` is used.
return_mode :
Specifies what should be returned. The options are:
:'last_expr': return the last expression
:'last_expr_or_assign': return the last expression or the last
assignment.
:'none': always return ``None``.
quiet_trailing_semicolon :
Specifies whether a trailing semicolon should suppress the result or
not. When this is ``True`` executing ``"1+1 ;"`` returns ``None``, when
it is ``False``, executing ``"1+1 ;"`` return ``2``. ``True`` by
default.
filename :
The file name to use in error messages and stack traces. ``'<exec>'`` by
default.
flags :
The flags to compile with. See the documentation for the built-in
:external:py:func:`compile` function.
Returns
-------
If the last nonwhitespace character of ``source`` is a semicolon, return
``None``. If the last statement is an expression, return the result of
the expression. Use the ``return_mode`` and ``quiet_trailing_semicolon``
parameters to modify this default behavior.
"""
flags = flags or ast.PyCF_ALLOW_TOP_LEVEL_AWAIT
return (
await CodeRunner(
source,
return_mode=return_mode,
quiet_trailing_semicolon=quiet_trailing_semicolon,
filename=filename,
flags=flags,
dont_inherit=dont_inherit,
optimize=optimize,
)
.compile()
.run_async(globals, locals)
)
def _add_prefixes(s: set[str], mod: str) -> None:
[current, *rest] = mod.split(".")
s.add(current)
for part in rest:
current += f".{part}"
s.add(current)
def find_imports(source: str) -> list[str]:
"""
Finds the imports in a Python source code string
Parameters
----------
source :
The Python source code to inspect for imports.
Returns
-------
A list of module names that are imported in ``source``. If ``source`` is
not syntactically correct Python code (after dedenting), returns an
empty list.
Given `import package.module`, `find_imports` will include both
`"package"` and `"package.module"` in the result.
Examples
--------
>>> source = "import numpy as np; import scipy.stats"
>>> find_imports(source)
['numpy', 'scipy', 'scipy.stats']
"""
# handle mis-indented input from multi-line strings
source = dedent(source)
try:
mod = ast.parse(source)
except SyntaxError:
return []
imports: set[str] = set()
for node in ast.walk(mod):
if isinstance(node, ast.Import):
for name in node.names:
node_name = name.name
_add_prefixes(imports, node_name)
elif isinstance(node, ast.ImportFrom):
module_name = node.module
if module_name is None:
continue
_add_prefixes(imports, module_name)
return sorted(imports)
def pyimport_impl(path: str) -> Any:
[stem, *fromlist] = path.rsplit(".", 1)
res = __import__(stem, fromlist=fromlist)
if fromlist:
try:
res = getattr(res, fromlist[0])
except AttributeError:
res = import_module(path)
return res
|
CodeRunner
|
python
|
sqlalchemy__sqlalchemy
|
test/orm/test_query.py
|
{
"start": 5554,
"end": 7704
}
|
class ____(QueryTest):
def test_single_entity_false(self):
User = self.classes.User
query = fixture_session().query(User).only_return_tuples(False)
is_true(query.is_single_entity)
row = query.first()
assert isinstance(row, User)
def test_single_entity_true(self):
User = self.classes.User
query = fixture_session().query(User).only_return_tuples(True)
is_false(query.is_single_entity)
row = query.first()
assert isinstance(row, collections_abc.Sequence)
assert isinstance(row._mapping, collections_abc.Mapping)
@expect_deprecated(".*is deprecated, Row now behaves like a tuple.*")
def test_single_entity_tuples(self):
User = self.classes.User
query = fixture_session().query(User).tuples()
is_false(query.is_single_entity)
row = query.first()
assert isinstance(row, collections_abc.Sequence)
assert isinstance(row._mapping, collections_abc.Mapping)
def test_multiple_entity_false(self):
User = self.classes.User
query = (
fixture_session().query(User.id, User).only_return_tuples(False)
)
is_false(query.is_single_entity)
row = query.first()
assert isinstance(row, collections_abc.Sequence)
assert isinstance(row._mapping, collections_abc.Mapping)
def test_multiple_entity_true(self):
User = self.classes.User
query = fixture_session().query(User.id, User).only_return_tuples(True)
is_false(query.is_single_entity)
row = query.first()
assert isinstance(row, collections_abc.Sequence)
assert isinstance(row._mapping, collections_abc.Mapping)
@expect_deprecated(".*is deprecated, Row now behaves like a tuple.*")
def test_multiple_entity_true_tuples(self):
User = self.classes.User
query = fixture_session().query(User.id, User).tuples()
is_false(query.is_single_entity)
row = query.first()
assert isinstance(row, collections_abc.Sequence)
assert isinstance(row._mapping, collections_abc.Mapping)
|
OnlyReturnTuplesTest
|
python
|
dagster-io__dagster
|
python_modules/libraries/dagster-airbyte/dagster_airbyte/managed/generated/sources.py
|
{
"start": 143123,
"end": 144915
}
|
class ____(GeneratedAirbyteSource):
@public
def __init__(
self,
name: str,
account_id: str,
client_secret: str,
start_date: str,
lookback_window_days: Optional[int] = None,
slice_range: Optional[int] = None,
):
r"""Airbyte Source for Stripe.
Documentation can be found at https://docs.airbyte.com/integrations/sources/stripe
Args:
name (str): The name of the destination.
account_id (str): Your Stripe account ID (starts with 'acct\\_', find yours here).
client_secret (str): Stripe API key (usually starts with 'sk_live\\_'; find yours here).
start_date (str): UTC date and time in the format 2017-01-25T00:00:00Z. Only data generated after this date will be replicated.
lookback_window_days (Optional[int]): When set, the connector will always re-export data from the past N days, where N is the value set here. This is useful if your data is frequently updated after creation. More info here
slice_range (Optional[int]): The time increment used by the connector when requesting data from the Stripe API. The bigger the value is, the less requests will be made and faster the sync will be. On the other hand, the more seldom the state is persisted.
"""
self.account_id = check.str_param(account_id, "account_id")
self.client_secret = check.str_param(client_secret, "client_secret")
self.start_date = check.str_param(start_date, "start_date")
self.lookback_window_days = check.opt_int_param(
lookback_window_days, "lookback_window_days"
)
self.slice_range = check.opt_int_param(slice_range, "slice_range")
super().__init__("Stripe", name)
|
StripeSource
|
python
|
weaviate__weaviate-python-client
|
weaviate/collections/classes/internal.py
|
{
"start": 12650,
"end": 14300
}
|
class ____:
prop: str
number_of_groups: int
objects_per_group: int
def __init__(self, prop: str, number_of_groups: int, objects_per_group: int) -> None:
self.prop = prop
self.number_of_groups = number_of_groups
self.objects_per_group = objects_per_group
def to_grpc(self) -> search_get_pb2.GroupBy:
return search_get_pb2.GroupBy(
path=[self.prop],
number_of_groups=self.number_of_groups,
objects_per_group=self.objects_per_group,
)
@classmethod
def from_input(cls, group_by: Optional[GroupBy]) -> Optional["_GroupBy"]:
return (
cls(
prop=group_by.prop,
number_of_groups=group_by.number_of_groups,
objects_per_group=group_by.objects_per_group,
)
if group_by
else None
)
Nested = Annotated[P, "NESTED"]
def __is_nested(value: Any) -> bool:
return (
get_origin(value) is Annotated
and len(get_args(value)) == 2
and cast(str, get_args(value)[1]) == "NESTED"
)
def __create_nested_property_from_nested(name: str, value: Any) -> QueryNested:
inner_type = get_args(value)[0]
# If this nested property contains an object array, use the element type
if get_origin(inner_type) is list:
inner_type = get_args(inner_type)[0]
return QueryNested(
name=name,
properties=[
__create_nested_property_from_nested(key, val) if __is_nested(val) else key
for key, val in get_type_hints(inner_type, include_extras=True).items()
],
)
|
_GroupBy
|
python
|
google__jax
|
jax/_src/export/_export.py
|
{
"start": 13694,
"end": 13954
}
|
class ____(Protocol):
def __call__(self, aux_data: PyTreeAuxData) -> bytes:
"""Serializes the PyTree node AuxData.
The AuxData is returned by the ``flatten_func`` registered by
:func:`jax.tree_util.register_pytree_node`).
"""
|
_SerializeAuxData
|
python
|
rapidsai__cudf
|
python/cudf_polars/cudf_polars/dsl/ir.py
|
{
"start": 95230,
"end": 97820
}
|
class ____(IR):
"""Sort a dataframe."""
__slots__ = ("by", "null_order", "order", "stable", "zlice")
_non_child = ("schema", "by", "order", "null_order", "stable", "zlice")
by: tuple[expr.NamedExpr, ...]
"""Sort keys."""
order: tuple[plc.types.Order, ...]
"""Sort order for each sort key."""
null_order: tuple[plc.types.NullOrder, ...]
"""Null sorting location for each sort key."""
stable: bool
"""Should the sort be stable?"""
zlice: Zlice | None
"""Optional slice to apply to the result."""
def __init__(
self,
schema: Schema,
by: Sequence[expr.NamedExpr],
order: Sequence[plc.types.Order],
null_order: Sequence[plc.types.NullOrder],
stable: bool, # noqa: FBT001
zlice: Zlice | None,
df: IR,
):
self.schema = schema
self.by = tuple(by)
self.order = tuple(order)
self.null_order = tuple(null_order)
self.stable = stable
self.zlice = zlice
self._non_child_args = (
self.by,
self.order,
self.null_order,
self.stable,
self.zlice,
)
self.children = (df,)
@classmethod
@log_do_evaluate
@nvtx_annotate_cudf_polars(message="Sort")
def do_evaluate(
cls,
by: Sequence[expr.NamedExpr],
order: Sequence[plc.types.Order],
null_order: Sequence[plc.types.NullOrder],
stable: bool, # noqa: FBT001
zlice: Zlice | None,
df: DataFrame,
*,
context: IRExecutionContext,
) -> DataFrame:
"""Evaluate and return a dataframe."""
sort_keys = broadcast(
*(k.evaluate(df) for k in by), target_length=df.num_rows, stream=df.stream
)
do_sort = plc.sorting.stable_sort_by_key if stable else plc.sorting.sort_by_key
table = do_sort(
df.table,
plc.Table([k.obj for k in sort_keys]),
list(order),
list(null_order),
stream=df.stream,
)
result = DataFrame.from_table(
table, df.column_names, df.dtypes, stream=df.stream
)
first_key = sort_keys[0]
name = by[0].name
first_key_in_result = (
name in df.column_map and first_key.obj is df.column_map[name].obj
)
if first_key_in_result:
result.column_map[name].set_sorted(
is_sorted=plc.types.Sorted.YES, order=order[0], null_order=null_order[0]
)
return result.slice(zlice)
|
Sort
|
python
|
pytorch__pytorch
|
torch/distributed/checkpoint/_experimental/config.py
|
{
"start": 433,
"end": 1528
}
|
class ____:
"""
Configuration class for checkpointer construction.
This class consolidates the core component configuration options needed to construct
a checkpointer, providing a clean separation of concerns where each component
manages its own configuration.
Attributes:
writer_config: Configuration options for the checkpoint writer component.
barrier_config: Configuration for barrier construction and arguments.
staging_config: Configuration options for the async staging component.
process_config: Configuration options for the async checkpoint process component.
"""
writer_config: CheckpointWriterConfig = field(
default_factory=CheckpointWriterConfig
)
barrier_config: BarrierConfig = field(default_factory=BarrierConfig)
# Below configs are used for async checkpointing
staging_config: CheckpointStagerConfig = field(
default_factory=CheckpointStagerConfig
)
process_config: CheckpointProcessConfig = field(
default_factory=CheckpointProcessConfig
)
|
CheckpointerConfig
|
python
|
pytorch__pytorch
|
test/distributed/checkpoint/test_state_dict_stager.py
|
{
"start": 32314,
"end": 33529
}
|
class ____(DTensorTestBase):
@with_comms
@requires_accelerator_dist_backend()
@skip_if_lt_x_gpu(2)
def test_dtensor(self):
"""
Test that StateDictStager works correctly with DTensors.
"""
# Create a DTensor
device_mesh = dist.DeviceMesh(
self.device_type, list(range(dist.get_world_size()))
)
tensor = torch.randn(3, 3, device=self.device_type)
dtensor = DTensor.from_local(tensor, device_mesh, [Shard(0)])
dtensor = dtensor + 1
dtensor = dtensor * 2
state_dict = {
"dtensor": dtensor,
}
stager = StateDictStager(pin_memory=True, share_memory=True)
cpu_state_dict = stager.stage(state_dict)
# Verify the original DTensor has the expected values
self.assertTrue(torch.allclose(dtensor.to_local(), (tensor + 1) * 2))
self.assertTrue(
torch.allclose(
cpu_state_dict["dtensor"].to_local(), dtensor.to_local().cpu()
)
)
self.assertEqual(cpu_state_dict["dtensor"]._spec, dtensor._spec)
self.assertEqual(cpu_state_dict["dtensor"].size(), dtensor.size())
|
TestDTensorStateDictStager
|
python
|
astral-sh__uv
|
scripts/benchmark/src/benchmark/resolver.py
|
{
"start": 1972,
"end": 2489
}
|
class ____(enum.Enum):
"""Enumeration of the benchmarks to run."""
RESOLVE_COLD = "resolve-cold"
RESOLVE_WARM = "resolve-warm"
RESOLVE_INCREMENTAL = "resolve-incremental"
RESOLVE_NOOP = "resolve-noop"
INSTALL_COLD = "install-cold"
INSTALL_WARM = "install-warm"
# The requirement to use when benchmarking an incremental resolution.
# Ideally, this requirement is compatible with all requirements files, but does not
# appear in any resolutions.
INCREMENTAL_REQUIREMENT = "django"
|
Benchmark
|
python
|
openai__openai-python
|
src/openai/resources/chat/completions/messages.py
|
{
"start": 3936,
"end": 7116
}
|
class ____(AsyncAPIResource):
@cached_property
def with_raw_response(self) -> AsyncMessagesWithRawResponse:
"""
This property can be used as a prefix for any HTTP method call to return
the raw response object instead of the parsed content.
For more information, see https://www.github.com/openai/openai-python#accessing-raw-response-data-eg-headers
"""
return AsyncMessagesWithRawResponse(self)
@cached_property
def with_streaming_response(self) -> AsyncMessagesWithStreamingResponse:
"""
An alternative to `.with_raw_response` that doesn't eagerly read the response body.
For more information, see https://www.github.com/openai/openai-python#with_streaming_response
"""
return AsyncMessagesWithStreamingResponse(self)
def list(
self,
completion_id: str,
*,
after: str | Omit = omit,
limit: int | Omit = omit,
order: Literal["asc", "desc"] | Omit = omit,
# Use the following arguments if you need to pass additional parameters to the API that aren't available via kwargs.
# The extra values given here take precedence over values defined on the client or passed to this method.
extra_headers: Headers | None = None,
extra_query: Query | None = None,
extra_body: Body | None = None,
timeout: float | httpx.Timeout | None | NotGiven = not_given,
) -> AsyncPaginator[ChatCompletionStoreMessage, AsyncCursorPage[ChatCompletionStoreMessage]]:
"""Get the messages in a stored chat completion.
Only Chat Completions that have
been created with the `store` parameter set to `true` will be returned.
Args:
after: Identifier for the last message from the previous pagination request.
limit: Number of messages to retrieve.
order: Sort order for messages by timestamp. Use `asc` for ascending order or `desc`
for descending order. Defaults to `asc`.
extra_headers: Send extra headers
extra_query: Add additional query parameters to the request
extra_body: Add additional JSON properties to the request
timeout: Override the client-level default timeout for this request, in seconds
"""
if not completion_id:
raise ValueError(f"Expected a non-empty value for `completion_id` but received {completion_id!r}")
return self._get_api_list(
f"/chat/completions/{completion_id}/messages",
page=AsyncCursorPage[ChatCompletionStoreMessage],
options=make_request_options(
extra_headers=extra_headers,
extra_query=extra_query,
extra_body=extra_body,
timeout=timeout,
query=maybe_transform(
{
"after": after,
"limit": limit,
"order": order,
},
message_list_params.MessageListParams,
),
),
model=ChatCompletionStoreMessage,
)
|
AsyncMessages
|
python
|
django__django
|
tests/csrf_tests/tests.py
|
{
"start": 6790,
"end": 7325
}
|
class ____(SessionStore):
"""
A version of SessionStore that stores what cookie values are passed to
set_cookie() when CSRF_USE_SESSIONS=True.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# This is a list of the cookie values passed to set_cookie() over
# the course of the request-response.
self._cookies_set = []
def __setitem__(self, key, value):
super().__setitem__(key, value)
self._cookies_set.append(value)
|
TestingSessionStore
|
python
|
qdrant__qdrant-client
|
qdrant_client/http/models/models.py
|
{
"start": 81620,
"end": 81747
}
|
class ____(BaseModel, extra="forbid"):
overwrite_payload: "SetPayload" = Field(..., description="")
|
OverwritePayloadOperation
|
python
|
sympy__sympy
|
sympy/codegen/cfunctions.py
|
{
"start": 5074,
"end": 6450
}
|
class ____(Function):
"""
Represents the logarithm function with base two.
Explanation
===========
The benefit of using ``log2(x)`` over ``log(x)/log(2)``
is that the latter is not as efficient under finite precision
arithmetic.
Examples
========
>>> from sympy.abc import x
>>> from sympy.codegen.cfunctions import log2
>>> log2(4).evalf() == 2.0
True
>>> log2(x).diff(x)
1/(x*log(2))
See Also
========
exp2
log10
"""
nargs = 1
def fdiff(self, argindex=1):
"""
Returns the first derivative of this function.
"""
if argindex == 1:
return S.One/(log(_Two)*self.args[0])
else:
raise ArgumentIndexError(self, argindex)
@classmethod
def eval(cls, arg):
if arg.is_number:
result = log.eval(arg, base=_Two)
if result.is_Atom:
return result
elif arg.is_Pow and arg.base == _Two:
return arg.exp
def _eval_evalf(self, *args, **kwargs):
return self.rewrite(log).evalf(*args, **kwargs)
def _eval_expand_func(self, **hints):
return _log2(*self.args)
def _eval_rewrite_as_log(self, arg, **kwargs):
return _log2(arg)
_eval_rewrite_as_tractable = _eval_rewrite_as_log
def _fma(x, y, z):
return x*y + z
|
log2
|
python
|
scrapy__scrapy
|
tests/test_contracts.py
|
{
"start": 6426,
"end": 6500
}
|
class ____(DemoSpider):
name = "inherits_demo_spider"
|
InheritsDemoSpider
|
python
|
matplotlib__matplotlib
|
lib/matplotlib/dates.py
|
{
"start": 62303,
"end": 64095
}
|
class ____(units.ConversionInterface):
"""
Converter for `datetime.date` and `datetime.datetime` data, or for
date/time data represented as it would be converted by `date2num`.
The 'unit' tag for such data is None or a `~datetime.tzinfo` instance.
"""
def __init__(self, *, interval_multiples=True):
self._interval_multiples = interval_multiples
super().__init__()
def axisinfo(self, unit, axis):
"""
Return the `~matplotlib.units.AxisInfo` for *unit*.
*unit* is a `~datetime.tzinfo` instance or None.
The *axis* argument is required but not used.
"""
tz = unit
majloc = AutoDateLocator(tz=tz,
interval_multiples=self._interval_multiples)
majfmt = AutoDateFormatter(majloc, tz=tz)
datemin = datetime.date(1970, 1, 1)
datemax = datetime.date(1970, 1, 2)
return units.AxisInfo(majloc=majloc, majfmt=majfmt, label='',
default_limits=(datemin, datemax))
@staticmethod
def convert(value, unit, axis):
"""
If *value* is not already a number or sequence of numbers, convert it
with `date2num`.
The *unit* and *axis* arguments are not used.
"""
return date2num(value)
@staticmethod
def default_units(x, axis):
"""
Return the `~datetime.tzinfo` instance of *x* or of its first element,
or None
"""
if isinstance(x, np.ndarray):
x = x.ravel()
try:
x = cbook._safe_first_finite(x)
except (TypeError, StopIteration):
pass
try:
return x.tzinfo
except AttributeError:
pass
return None
|
DateConverter
|
python
|
sympy__sympy
|
sympy/core/numbers.py
|
{
"start": 94341,
"end": 95258
}
|
class ____(IntegerConstant, metaclass=Singleton):
"""The number one.
One is a singleton, and can be accessed by ``S.One``.
Examples
========
>>> from sympy import S, Integer
>>> Integer(1) is S.One
True
References
==========
.. [1] https://en.wikipedia.org/wiki/1_%28number%29
"""
is_number = True
is_positive = True
p = 1
q = 1
__slots__ = ()
def __getnewargs__(self):
return ()
@staticmethod
def __abs__():
return S.One
@staticmethod
def __neg__():
return S.NegativeOne
def _eval_power(self, expt):
return self
def _eval_order(self, *symbols):
return
@staticmethod
def factors(limit=None, use_trial=True, use_rho=False, use_pm1=False,
verbose=False, visual=False):
if visual:
return S.One
else:
return {}
|
One
|
python
|
jmcnamara__XlsxWriter
|
xlsxwriter/test/worksheet/test_cond_format01.py
|
{
"start": 345,
"end": 2778
}
|
class ____(unittest.TestCase):
"""
Test assembling a complete Worksheet file.
"""
def test_assemble_xml_file(self):
"""Test writing a worksheet with conditional formatting."""
self.maxDiff = None
fh = StringIO()
worksheet = Worksheet()
worksheet._set_filehandle(fh)
worksheet.select()
worksheet.write("A1", 10)
worksheet.write("A2", 20)
worksheet.write("A3", 30)
worksheet.write("A4", 40)
worksheet.conditional_format(
"A1",
{"type": "cell", "criteria": "greater than", "value": 5, "format": None},
)
worksheet._assemble_xml_file()
exp = _xml_to_list(
"""
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<worksheet xmlns="http://schemas.openxmlformats.org/spreadsheetml/2006/main" xmlns:r="http://schemas.openxmlformats.org/officeDocument/2006/relationships">
<dimension ref="A1:A4"/>
<sheetViews>
<sheetView tabSelected="1" workbookViewId="0"/>
</sheetViews>
<sheetFormatPr defaultRowHeight="15"/>
<sheetData>
<row r="1" spans="1:1">
<c r="A1">
<v>10</v>
</c>
</row>
<row r="2" spans="1:1">
<c r="A2">
<v>20</v>
</c>
</row>
<row r="3" spans="1:1">
<c r="A3">
<v>30</v>
</c>
</row>
<row r="4" spans="1:1">
<c r="A4">
<v>40</v>
</c>
</row>
</sheetData>
<conditionalFormatting sqref="A1">
<cfRule type="cellIs" priority="1" operator="greaterThan">
<formula>5</formula>
</cfRule>
</conditionalFormatting>
<pageMargins left="0.7" right="0.7" top="0.75" bottom="0.75" header="0.3" footer="0.3"/>
</worksheet>
"""
)
got = _xml_to_list(fh.getvalue())
self.assertEqual(exp, got)
|
TestAssembleWorksheet
|
python
|
HypothesisWorks__hypothesis
|
hypothesis-python/src/hypothesis/internal/observability.py
|
{
"start": 8045,
"end": 11839
}
|
class ____(BaseObservation):
__test__ = False # no! bad pytest!
type: Literal["test_case"]
status: TestCaseStatus
status_reason: str
representation: str
arguments: dict
how_generated: str
features: dict
coverage: dict[str, list[int]] | None
timing: dict[str, float]
metadata: ObservationMetadata
def add_observability_callback(f: CallbackT, /, *, all_threads: bool = False) -> None:
"""
Adds ``f`` as a callback for :ref:`observability <observability>`. ``f``
should accept one argument, which is an observation. Whenever Hypothesis
produces a new observation, it calls each callback with that observation.
If Hypothesis tests are being run from multiple threads, callbacks are tracked
per-thread. In other words, ``add_observability_callback(f)`` only adds ``f``
as an observability callback for observations produced on that thread.
If ``all_threads=True`` is passed, ``f`` will instead be registered as a
callback for all threads. This means it will be called for observations
generated by all threads, not just the thread which registered ``f`` as a
callback. In this case, ``f`` will be passed two arguments: the first is the
observation, and the second is the integer thread id from
:func:`python:threading.get_ident` where that observation was generated.
We recommend against registering ``f`` as a callback for both ``all_threads=True``
and the default ``all_threads=False``, due to unclear semantics with
|remove_observability_callback|.
"""
if all_threads:
_callbacks_all_threads.append(cast(CallbackAllThreadsT, f))
return
thread_id = threading.get_ident()
if thread_id not in _callbacks:
_callbacks[thread_id] = []
_callbacks[thread_id].append(cast(CallbackThreadT, f))
def remove_observability_callback(f: CallbackT, /) -> None:
"""
Removes ``f`` from the :ref:`observability <observability>` callbacks.
If ``f`` is not in the list of observability callbacks, silently do nothing.
If running under multiple threads, ``f`` will only be removed from the
callbacks for this thread.
"""
if f in _callbacks_all_threads:
_callbacks_all_threads.remove(cast(CallbackAllThreadsT, f))
thread_id = threading.get_ident()
if thread_id not in _callbacks:
return
callbacks = _callbacks[thread_id]
if f in callbacks:
callbacks.remove(cast(CallbackThreadT, f))
if not callbacks:
del _callbacks[thread_id]
def observability_enabled() -> bool:
"""
Returns whether or not Hypothesis considers :ref:`observability <observability>`
to be enabled. Observability is enabled if there is at least one observability
callback present.
Callers might use this method to determine whether they should compute an
expensive representation that is only used under observability, for instance
by |alternative backends|.
"""
return bool(_callbacks) or bool(_callbacks_all_threads)
@contextmanager
def with_observability_callback(
f: Callable[[Observation], None], /, *, all_threads: bool = False
) -> Generator[None, None, None]:
"""
A simple context manager which calls |add_observability_callback| on ``f``
when it enters and |remove_observability_callback| on ``f`` when it exits.
"""
add_observability_callback(f, all_threads=all_threads)
try:
yield
finally:
remove_observability_callback(f)
def deliver_observation(observation: Observation) -> None:
thread_id = threading.get_ident()
for callback in _callbacks.get(thread_id, []):
callback(observation)
for callback in _callbacks_all_threads:
callback(observation, thread_id)
|
TestCaseObservation
|
python
|
dask__dask
|
dask/dataframe/dask_expr/_reductions.py
|
{
"start": 20197,
"end": 22518
}
|
class ____(ApplyConcatApply):
_parameters = ["frame", "columns", "index", "values", "aggfunc"]
_defaults = {"columns": None, "index": None, "values": None, "aggfunc": "mean"}
@functools.cached_property
def _meta(self):
df = self.frame._meta
columns = self.operand("columns")
values = self.operand("values")
index = self.operand("index")
columns_contents = pd.CategoricalIndex(df[columns].cat.categories, name=columns)
if is_scalar(values):
new_columns = columns_contents
else:
new_columns = pd.MultiIndex.from_product(
(sorted(values), columns_contents), names=[None, columns]
)
if self.operand("aggfunc") in ["first", "last"]:
# Infer datatype as non-numeric values are allowed
if is_scalar(values):
meta = pd.DataFrame(
columns=new_columns,
dtype=df[values].dtype,
index=pd.Index(df[index]),
)
else:
meta = pd.DataFrame(
columns=new_columns,
index=pd.Index(df[index]),
)
for value_col in values:
meta[value_col] = meta[value_col].astype(
df[values].dtypes[value_col]
)
else:
# Use float64 as other aggregate functions require numerical data
meta = pd.DataFrame(
columns=new_columns, dtype=np.float64, index=pd.Index(df[index])
)
return meta
def _lower(self):
args = [
self.frame,
self.operand("columns"),
self.operand("index"),
self.operand("values"),
]
if self.aggfunc == "sum":
return PivotTableSum(*args)
elif self.aggfunc == "mean":
return PivotTableSum(*args) / PivotTableCount(*args)
elif self.aggfunc == "count":
return PivotTableCount(*args)
elif self.aggfunc == "first":
return PivotTableFirst(*args)
elif self.aggfunc == "last":
return PivotTableLast(*args)
else:
raise NotImplementedError(f"{self.aggfunc=} is not implemented")
|
PivotTable
|
python
|
doocs__leetcode
|
solution/0600-0699/0640.Solve the Equation/Solution.py
|
{
"start": 0,
"end": 838
}
|
class ____:
def solveEquation(self, equation: str) -> str:
def f(s):
x = y = 0
if s[0] != '-':
s = '+' + s
i, n = 0, len(s)
while i < n:
sign = 1 if s[i] == '+' else -1
i += 1
j = i
while j < n and s[j] not in '+-':
j += 1
v = s[i:j]
if v[-1] == 'x':
x += sign * (int(v[:-1]) if len(v) > 1 else 1)
else:
y += sign * int(v)
i = j
return x, y
a, b = equation.split('=')
x1, y1 = f(a)
x2, y2 = f(b)
if x1 == x2:
return 'Infinite solutions' if y1 == y2 else 'No solution'
return f'x={(y2 - y1) // (x1 - x2)}'
|
Solution
|
python
|
sphinx-doc__sphinx
|
tests/test_ext_autodoc/test_ext_autodoc.py
|
{
"start": 40942,
"end": 99995
}
|
class ____:
def __init__(self, name: str, *, module: str = 'target.enums') -> None:
self.name = name
self.module = module
@property
def target(self) -> str:
"""The autodoc target class."""
return f'{self.module}.{self.name}'
def subtarget(self, name: str) -> str:
"""The autodoc sub-target (an attribute, method, etc)."""
return f'{self.target}.{name}'
def _node(
self,
role: str,
name: str,
doc: str,
*,
args: str,
indent: int,
**options: Any,
) -> list[str]:
prefix = indent * ' '
tab = ' ' * 3
def rst_option(name: str, value: Any) -> str:
value = '' if value == 1 else value # note True == 1.
return f'{prefix}{tab}:{name}: {value!s}'.rstrip()
lines = [
'',
f'{prefix}.. py:{role}:: {name}{args}',
f'{prefix}{tab}:module: {self.module}',
*itertools.starmap(rst_option, options.items()),
]
if doc:
lines.extend(['', f'{prefix}{tab}{doc}'])
lines.append('')
return lines
def entry(
self,
entry_name: str,
doc: str = '',
*,
role: str,
args: str = '',
indent: int = 3,
**rst_options: Any,
) -> list[str]:
"""Get the RST lines for a named attribute, method, etc."""
qualname = f'{self.name}.{entry_name}'
return self._node(role, qualname, doc, args=args, indent=indent, **rst_options)
def preamble_lookup(
self, doc: str, *, indent: int = 0, **options: Any
) -> list[str]:
assert doc, (
f'enumeration class {self.target!r} should have an explicit docstring'
)
args = self._preamble_args(functional_constructor=False)
return self._preamble(doc=doc, args=args, indent=indent, **options)
def preamble_constructor(
self, doc: str, *, indent: int = 0, **options: Any
) -> list[str]:
assert doc, (
f'enumeration class {self.target!r} should have an explicit docstring'
)
args = self._preamble_args(functional_constructor=True)
return self._preamble(doc=doc, args=args, indent=indent, **options)
def _preamble(
self, *, doc: str, args: str, indent: int = 0, **options: Any
) -> list[str]:
"""Generate the preamble of the class being documented."""
return self._node('class', self.name, doc, args=args, indent=indent, **options)
@staticmethod
def _preamble_args(functional_constructor: bool = False) -> str:
"""EnumType.__call__() is a dual-purpose method:
* Look an enum member (valid only if the enum has members)
* Create a new enum class (functional API)
"""
if sys.version_info[:2] >= (3, 14):
if functional_constructor:
return (
'(new_class_name, /, names, *, module=None, '
'qualname=None, type=None, start=1, boundary=None)'
)
else:
return '(*values)'
if sys.version_info[:2] >= (3, 13) or sys.version_info[:3] >= (3, 12, 3):
if functional_constructor:
return (
'(new_class_name, /, names, *, module=None, '
'qualname=None, type=None, start=1, boundary=None)'
)
else:
return '(*values)'
if sys.version_info[:2] >= (3, 12):
return (
'(value, names=None, *values, module=None, '
'qualname=None, type=None, start=1, boundary=None)'
)
return '(value)'
def method(
self,
name: str,
doc: str,
*flags: str,
args: str = '()',
indent: int = 3,
) -> list[str]:
rst_options = dict.fromkeys(flags, '')
return self.entry(
name, doc, role='method', args=args, indent=indent, **rst_options
)
def member(self, name: str, value: Any, doc: str, *, indent: int = 3) -> list[str]:
rst_options = {'value': repr(value)}
return self.entry(name, doc, role='attribute', indent=indent, **rst_options)
@pytest.fixture
def autodoc_enum_options() -> dict[str, object]:
"""Default autodoc options to use when testing enum's documentation."""
return {'members': None, 'undoc-members': None}
def test_enum_class(autodoc_enum_options):
fmt = _EnumFormatter('EnumCls')
options = autodoc_enum_options | {'private-members': None}
actual = do_autodoc('class', fmt.target, options=options)
assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.method(
'say_goodbye', 'a classmethod says good-bye to you.', 'classmethod'
),
*fmt.method('say_hello', 'a method says hello to you.'),
*fmt.member('val1', 12, 'doc for val1'),
*fmt.member('val2', 23, 'doc for val2'),
*fmt.member('val3', 34, 'doc for val3'),
*fmt.member('val4', 34, ''), # val4 is alias of val3
]
# Inherited members exclude the native Enum API (in particular
# the 'name' and 'value' properties), unless they were explicitly
# redefined by the user in one of the bases.
actual = do_autodoc(
'class', fmt.target, options=options | {'inherited-members': None}
)
assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.method(
'say_goodbye', 'a classmethod says good-bye to you.', 'classmethod'
),
*fmt.method('say_hello', 'a method says hello to you.'),
*fmt.member('val1', 12, 'doc for val1'),
*fmt.member('val2', 23, 'doc for val2'),
*fmt.member('val3', 34, 'doc for val3'),
*fmt.member('val4', 34, ''), # val4 is alias of val3
]
# checks for an attribute of EnumCls
actual = do_autodoc('attribute', fmt.subtarget('val1'))
assert actual == fmt.member('val1', 12, 'doc for val1', indent=0)
def test_enum_class_with_data_type(autodoc_enum_options):
fmt = _EnumFormatter('EnumClassWithDataType')
actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.method('say_goodbye', 'docstring', 'classmethod'),
*fmt.method('say_hello', 'docstring'),
*fmt.member('x', 'x', ''),
]
options = autodoc_enum_options | {'inherited-members': None}
actual = do_autodoc('class', fmt.target, options=options)
assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.entry('dtype', 'docstring', role='property'),
*fmt.method('isupper', 'inherited'),
*fmt.method('say_goodbye', 'docstring', 'classmethod'),
*fmt.method('say_hello', 'docstring'),
*fmt.member('x', 'x', ''),
]
def test_enum_class_with_mixin_type(autodoc_enum_options):
fmt = _EnumFormatter('EnumClassWithMixinType')
actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.method('say_goodbye', 'docstring', 'classmethod'),
*fmt.method('say_hello', 'docstring'),
*fmt.member('x', 'X', ''),
]
options = autodoc_enum_options | {'inherited-members': None}
actual = do_autodoc('class', fmt.target, options=options)
assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.method('say_goodbye', 'docstring', 'classmethod'),
*fmt.method('say_hello', 'docstring'),
*fmt.entry('value', 'uppercased', role='property'),
*fmt.member('x', 'X', ''),
]
def test_enum_class_with_mixin_type_and_inheritence(autodoc_enum_options):
fmt = _EnumFormatter('EnumClassWithMixinTypeInherit')
actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.member('x', 'X', ''),
]
options = autodoc_enum_options | {'inherited-members': None}
actual = do_autodoc('class', fmt.target, options=options)
assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.method('say_goodbye', 'inherited', 'classmethod'),
*fmt.method('say_hello', 'inherited'),
*fmt.entry('value', 'uppercased', role='property'),
*fmt.member('x', 'X', ''),
]
def test_enum_class_with_mixin_enum_type(autodoc_enum_options):
fmt = _EnumFormatter('EnumClassWithMixinEnumType')
actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
assert actual == [
*fmt.preamble_lookup('this is enum class'),
# override() is overridden at the class level so it should be rendered
*fmt.method('override', 'overridden'),
# say_goodbye() and say_hello() are not rendered since they are inherited
*fmt.member('x', 'x', ''),
]
options = autodoc_enum_options | {'inherited-members': None}
actual = do_autodoc('class', fmt.target, options=options)
assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.method('override', 'overridden'),
*fmt.method('say_goodbye', 'inherited', 'classmethod'),
*fmt.method('say_hello', 'inherited'),
*fmt.member('x', 'x', ''),
]
def test_enum_class_with_mixin_and_data_type(autodoc_enum_options):
fmt = _EnumFormatter('EnumClassWithMixinAndDataType')
actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.method('isupper', 'overridden'),
*fmt.method('say_goodbye', 'overridden', 'classmethod'),
*fmt.method('say_hello', 'overridden'),
*fmt.member('x', 'X', ''),
]
# add the special member __str__ (but not the inherited members)
options = autodoc_enum_options | {'special-members': '__str__'}
actual = do_autodoc('class', fmt.target, options=options)
assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.method('__str__', 'overridden'),
*fmt.method('isupper', 'overridden'),
*fmt.method('say_goodbye', 'overridden', 'classmethod'),
*fmt.method('say_hello', 'overridden'),
*fmt.member('x', 'X', ''),
]
options = autodoc_enum_options | {'inherited-members': None}
actual = do_autodoc('class', fmt.target, options=options)
assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.entry('dtype', 'docstring', role='property'),
*fmt.method('isupper', 'overridden'),
*fmt.method('say_goodbye', 'overridden', 'classmethod'),
*fmt.method('say_hello', 'overridden'),
*fmt.entry('value', 'uppercased', role='property'),
*fmt.member('x', 'X', ''),
]
def test_enum_with_parent_enum(autodoc_enum_options):
fmt = _EnumFormatter('EnumClassWithParentEnum')
actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.method('isupper', 'overridden'),
*fmt.member('x', 'X', ''),
]
# add the special member __str__ (but not the inherited members)
options = autodoc_enum_options | {'special-members': '__str__'}
actual = do_autodoc('class', fmt.target, options=options)
assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.method('__str__', 'overridden'),
*fmt.method('isupper', 'overridden'),
*fmt.member('x', 'X', ''),
]
options = autodoc_enum_options | {'inherited-members': None}
actual = do_autodoc('class', fmt.target, options=options)
assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.entry('dtype', 'docstring', role='property'),
*fmt.method('isupper', 'overridden'),
*fmt.method('override', 'inherited'),
*fmt.method('say_goodbye', 'inherited', 'classmethod'),
*fmt.method('say_hello', 'inherited'),
*fmt.entry('value', 'uppercased', role='property'),
*fmt.member('x', 'X', ''),
]
def test_enum_sunder_method(autodoc_enum_options):
PRIVATE = {'private-members': None} # sunder methods are recognized as private
fmt = _EnumFormatter('EnumSunderMissingInNonEnumMixin')
actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
assert actual == [*fmt.preamble_constructor('this is enum class')]
actual = do_autodoc('class', fmt.target, options=autodoc_enum_options | PRIVATE)
assert actual == [*fmt.preamble_constructor('this is enum class')]
fmt = _EnumFormatter('EnumSunderMissingInEnumMixin')
actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
assert actual == [*fmt.preamble_constructor('this is enum class')]
actual = do_autodoc('class', fmt.target, options=autodoc_enum_options | PRIVATE)
assert actual == [*fmt.preamble_constructor('this is enum class')]
fmt = _EnumFormatter('EnumSunderMissingInDataType')
actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
assert actual == [*fmt.preamble_constructor('this is enum class')]
actual = do_autodoc('class', fmt.target, options=autodoc_enum_options | PRIVATE)
assert actual == [*fmt.preamble_constructor('this is enum class')]
fmt = _EnumFormatter('EnumSunderMissingInClass')
actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
assert actual == [*fmt.preamble_constructor('this is enum class')]
actual = do_autodoc('class', fmt.target, options=autodoc_enum_options | PRIVATE)
assert actual == [
*fmt.preamble_constructor('this is enum class'),
*fmt.method('_missing_', 'docstring', 'classmethod', args='(value)'),
]
def test_enum_inherited_sunder_method(autodoc_enum_options):
options = autodoc_enum_options | {
'private-members': None,
'inherited-members': None,
}
fmt = _EnumFormatter('EnumSunderMissingInNonEnumMixin')
actual = do_autodoc('class', fmt.target, options=options)
assert actual == [
*fmt.preamble_constructor('this is enum class'),
*fmt.method('_missing_', 'inherited', 'classmethod', args='(value)'),
]
fmt = _EnumFormatter('EnumSunderMissingInEnumMixin')
actual = do_autodoc('class', fmt.target, options=options)
assert actual == [
*fmt.preamble_constructor('this is enum class'),
*fmt.method('_missing_', 'inherited', 'classmethod', args='(value)'),
]
fmt = _EnumFormatter('EnumSunderMissingInDataType')
actual = do_autodoc('class', fmt.target, options=options)
assert actual == [
*fmt.preamble_constructor('this is enum class'),
*fmt.method('_missing_', 'inherited', 'classmethod', args='(value)'),
*fmt.entry('dtype', 'docstring', role='property'),
*fmt.method('isupper', 'inherited'),
]
fmt = _EnumFormatter('EnumSunderMissingInClass')
actual = do_autodoc('class', fmt.target, options=options)
assert actual == [
*fmt.preamble_constructor('this is enum class'),
*fmt.method('_missing_', 'docstring', 'classmethod', args='(value)'),
]
def test_enum_custom_name_property(autodoc_enum_options):
fmt = _EnumFormatter('EnumNamePropertyInNonEnumMixin')
actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
assert actual == [*fmt.preamble_constructor('this is enum class')]
fmt = _EnumFormatter('EnumNamePropertyInEnumMixin')
actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
assert actual == [*fmt.preamble_constructor('this is enum class')]
fmt = _EnumFormatter('EnumNamePropertyInDataType')
actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
assert actual == [*fmt.preamble_constructor('this is enum class')]
fmt = _EnumFormatter('EnumNamePropertyInClass')
actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
assert actual == [
*fmt.preamble_constructor('this is enum class'),
*fmt.entry('name', 'docstring', role='property'),
]
def test_enum_inherited_custom_name_property(autodoc_enum_options):
options = autodoc_enum_options | {'inherited-members': None}
fmt = _EnumFormatter('EnumNamePropertyInNonEnumMixin')
actual = do_autodoc('class', fmt.target, options=options)
assert actual == [
*fmt.preamble_constructor('this is enum class'),
*fmt.entry('name', 'inherited', role='property'),
]
fmt = _EnumFormatter('EnumNamePropertyInEnumMixin')
actual = do_autodoc('class', fmt.target, options=options)
assert actual == [
*fmt.preamble_constructor('this is enum class'),
*fmt.entry('name', 'inherited', role='property'),
]
fmt = _EnumFormatter('EnumNamePropertyInDataType')
actual = do_autodoc('class', fmt.target, options=options)
assert actual == [
*fmt.preamble_constructor('this is enum class'),
*fmt.entry('dtype', 'docstring', role='property'),
*fmt.method('isupper', 'inherited'),
*fmt.entry('name', 'inherited', role='property'),
]
fmt = _EnumFormatter('EnumNamePropertyInClass')
actual = do_autodoc('class', fmt.target, options=options)
assert actual == [
*fmt.preamble_constructor('this is enum class'),
*fmt.entry('name', 'docstring', role='property'),
]
def test_descriptor_class() -> None:
options = {'members': 'CustomDataDescriptor,CustomDataDescriptor2'}
actual = do_autodoc('module', 'target.descriptor', options=options)
assert actual == [
'',
'.. py:module:: target.descriptor',
'',
'',
'.. py:class:: CustomDataDescriptor(doc)',
' :module: target.descriptor',
'',
' Descriptor class docstring.',
'',
'',
' .. py:method:: CustomDataDescriptor.meth()',
' :module: target.descriptor',
'',
' Function.',
'',
'',
'.. py:class:: CustomDataDescriptor2(doc)',
' :module: target.descriptor',
'',
' Descriptor class with custom metaclass docstring.',
'',
]
def test_automethod_for_builtin() -> None:
actual = do_autodoc('method', 'builtins.int.__add__')
assert actual == [
'',
'.. py:method:: int.__add__(value, /)',
' :module: builtins',
'',
' Return self+value.',
'',
]
def test_automethod_for_decorated() -> None:
actual = do_autodoc('method', 'target.decorator.Bar.meth')
assert actual == [
'',
'.. py:method:: Bar.meth(name=None, age=None)',
' :module: target.decorator',
'',
]
def test_abstractmethods() -> None:
options = {
'members': None,
'undoc-members': None,
}
actual = do_autodoc('module', 'target.abstractmethods', options=options)
assert actual == [
'',
'.. py:module:: target.abstractmethods',
'',
'',
'.. py:class:: Base()',
' :module: target.abstractmethods',
'',
'',
' .. py:method:: Base.abstractmeth()',
' :module: target.abstractmethods',
' :abstractmethod:',
'',
'',
' .. py:method:: Base.classmeth()',
' :module: target.abstractmethods',
' :abstractmethod:',
' :classmethod:',
'',
'',
' .. py:method:: Base.coroutinemeth()',
' :module: target.abstractmethods',
' :abstractmethod:',
' :async:',
'',
'',
' .. py:method:: Base.meth()',
' :module: target.abstractmethods',
'',
'',
' .. py:property:: Base.prop',
' :module: target.abstractmethods',
' :abstractmethod:',
'',
'',
' .. py:method:: Base.staticmeth()',
' :module: target.abstractmethods',
' :abstractmethod:',
' :staticmethod:',
'',
]
def test_partialfunction() -> None:
options = {'members': None}
actual = do_autodoc('module', 'target.partialfunction', options=options)
assert actual == [
'',
'.. py:module:: target.partialfunction',
'',
'',
'.. py:function:: func1(a, b, c)',
' :module: target.partialfunction',
'',
' docstring of func1',
'',
'',
'.. py:function:: func2(b, c)',
' :module: target.partialfunction',
'',
' docstring of func1',
'',
'',
'.. py:function:: func3(c)',
' :module: target.partialfunction',
'',
' docstring of func3',
'',
'',
'.. py:function:: func4()',
' :module: target.partialfunction',
'',
' docstring of func3',
'',
]
def test_imported_partialfunction_should_not_shown_without_imported_members() -> None:
options = {'members': None}
actual = do_autodoc('module', 'target.imported_members', options=options)
assert actual == [
'',
'.. py:module:: target.imported_members',
'',
]
def test_bound_method() -> None:
options = {'members': None}
actual = do_autodoc('module', 'target.bound_method', options=options)
assert actual == [
'',
'.. py:module:: target.bound_method',
'',
'',
'.. py:function:: bound_method()',
' :module: target.bound_method',
'',
' Method docstring',
'',
]
def test_partialmethod() -> None:
expected = [
'',
'.. py:class:: Cell()',
' :module: target.partialmethod',
'',
' An example for partialmethod.',
'',
' refs: https://docs.python.org/3/library/functools.html#functools.partialmethod',
'',
'',
' .. py:method:: Cell.set_alive()',
' :module: target.partialmethod',
'',
' Make a cell alive.',
'',
'',
' .. py:method:: Cell.set_state(state)',
' :module: target.partialmethod',
'',
' Update state of cell to *state*.',
'',
]
options = {'members': None}
actual = do_autodoc('class', 'target.partialmethod.Cell', options=options)
assert actual == expected
def test_partialmethod_undoc_members() -> None:
expected = [
'',
'.. py:class:: Cell()',
' :module: target.partialmethod',
'',
' An example for partialmethod.',
'',
' refs: https://docs.python.org/3/library/functools.html#functools.partialmethod',
'',
'',
' .. py:method:: Cell.set_alive()',
' :module: target.partialmethod',
'',
' Make a cell alive.',
'',
'',
' .. py:method:: Cell.set_dead()',
' :module: target.partialmethod',
'',
'',
' .. py:method:: Cell.set_state(state)',
' :module: target.partialmethod',
'',
' Update state of cell to *state*.',
'',
]
options = {
'members': None,
'undoc-members': None,
}
actual = do_autodoc('class', 'target.partialmethod.Cell', options=options)
assert actual == expected
def test_autodoc_typed_instance_variables() -> None:
options = {
'members': None,
'undoc-members': None,
}
# First compute autodoc of a `Derived` member to verify that it
# doesn't result in inherited members in
# `Derived.__annotations__`.
# https://github.com/sphinx-doc/sphinx/issues/13934
do_autodoc('attribute', 'target.typed_vars.Derived.attr2')
actual = do_autodoc('module', 'target.typed_vars', options=options)
assert actual == [
'',
'.. py:module:: target.typed_vars',
'',
'',
'.. py:attribute:: Alias',
' :module: target.typed_vars',
'',
' alias of :py:class:`~target.typed_vars.Derived`',
'',
'.. py:class:: Class()',
' :module: target.typed_vars',
'',
'',
' .. py:attribute:: Class.attr1',
' :module: target.typed_vars',
' :type: int',
' :value: 0',
'',
'',
' .. py:attribute:: Class.attr2',
' :module: target.typed_vars',
' :type: int',
'',
'',
' .. py:attribute:: Class.attr3',
' :module: target.typed_vars',
' :type: int',
' :value: 0',
'',
'',
' .. py:attribute:: Class.attr4',
' :module: target.typed_vars',
' :type: int',
'',
' attr4',
'',
'',
' .. py:attribute:: Class.attr5',
' :module: target.typed_vars',
' :type: int',
'',
' attr5',
'',
'',
' .. py:attribute:: Class.attr6',
' :module: target.typed_vars',
' :type: int',
'',
' attr6',
'',
'',
' .. py:attribute:: Class.descr4',
' :module: target.typed_vars',
' :type: int',
'',
' This is descr4',
'',
'',
'.. py:class:: Derived()',
' :module: target.typed_vars',
'',
'',
' .. py:attribute:: Derived.attr7',
' :module: target.typed_vars',
' :type: int',
'',
'',
'.. py:data:: attr1',
' :module: target.typed_vars',
' :type: str',
" :value: ''",
'',
' attr1',
'',
'',
'.. py:data:: attr2',
' :module: target.typed_vars',
' :type: str',
'',
' attr2',
'',
'',
'.. py:data:: attr3',
' :module: target.typed_vars',
' :type: str',
" :value: ''",
'',
' attr3',
'',
]
def test_autodoc_typed_inherited_instance_variables() -> None:
options = {
'members': None,
'undoc-members': None,
'inherited-members': None,
}
actual = do_autodoc('class', 'target.typed_vars.Derived', options=options)
assert actual == [
'',
'.. py:class:: Derived()',
' :module: target.typed_vars',
'',
'',
' .. py:attribute:: Derived.attr1',
' :module: target.typed_vars',
' :type: int',
' :value: 0',
'',
'',
' .. py:attribute:: Derived.attr2',
' :module: target.typed_vars',
' :type: int',
'',
'',
' .. py:attribute:: Derived.attr3',
' :module: target.typed_vars',
' :type: int',
' :value: 0',
'',
'',
' .. py:attribute:: Derived.attr4',
' :module: target.typed_vars',
' :type: int',
'',
' attr4',
'',
'',
' .. py:attribute:: Derived.attr5',
' :module: target.typed_vars',
' :type: int',
'',
' attr5',
'',
'',
' .. py:attribute:: Derived.attr6',
' :module: target.typed_vars',
' :type: int',
'',
' attr6',
'',
'',
' .. py:attribute:: Derived.attr7',
' :module: target.typed_vars',
' :type: int',
'',
'',
' .. py:attribute:: Derived.descr4',
' :module: target.typed_vars',
' :type: int',
'',
]
def test_autodoc_GenericAlias() -> None:
options = {
'members': None,
'undoc-members': None,
}
actual = do_autodoc('module', 'target.genericalias', options=options)
assert actual == [
'',
'.. py:module:: target.genericalias',
'',
'',
'.. py:class:: Class()',
' :module: target.genericalias',
'',
'',
' .. py:attribute:: Class.T',
' :module: target.genericalias',
'',
' A list of int',
'',
' alias of :py:class:`~typing.List`\\ [:py:class:`int`]',
'',
'',
'.. py:data:: L',
' :module: target.genericalias',
'',
' A list of Class',
'',
' alias of :py:class:`~typing.List`\\ '
'[:py:class:`~target.genericalias.Class`]',
'',
'',
'.. py:data:: T',
' :module: target.genericalias',
'',
' A list of int',
'',
' alias of :py:class:`~typing.List`\\ [:py:class:`int`]',
'',
]
@pytest.mark.skipif(
sys.version_info[:2] < (3, 12),
reason='type statement introduced in Python 3.12',
)
def test_autodoc_pep695_type_alias() -> None:
config = _AutodocConfig(
autodoc_type_aliases={
'buffer_like': 'buffer_like',
'pathlike': 'pathlike',
'Handler': 'Handler',
}
)
options = {
'members': None,
'undoc-members': None,
}
actual = do_autodoc('module', 'target.pep695', config=config, options=options)
assert actual == [
'',
'.. py:module:: target.pep695',
'',
'',
'.. py:class:: Bar',
' :module: target.pep695',
'',
' This is newtype of Pep695Alias.',
'',
' alias of :py:type:`~target.pep695.Pep695Alias`',
'',
'',
'.. py:class:: Foo()',
' :module: target.pep695',
'',
' This is class Foo.',
'',
'',
'.. py:data:: Handler',
' :module: target.pep695',
'',
' A generic type alias',
'',
' alias of :py:class:`type`\\ [:py:class:`Exception`]',
'',
'',
'.. py:type:: HandlerTypeAliasType',
' :module: target.pep695',
' :canonical: type[Exception]',
'',
' This is an explicitly constructed generic alias typing.TypeAlias.',
'',
'',
'.. py:type:: Pep695Alias',
' :module: target.pep695',
' :canonical: ~target.pep695.Foo',
'',
' This is PEP695 type alias.',
'',
'',
'.. py:type:: Pep695AliasC',
' :module: target.pep695',
' :canonical: dict[str, ~target.pep695.Foo]',
'',
' This is PEP695 complex type alias with doc comment.',
'',
'',
'.. py:type:: Pep695AliasOfAlias',
' :module: target.pep695',
' :canonical: ~target.pep695.Pep695AliasC',
'',
' This is PEP695 type alias of PEP695 alias.',
'',
'',
# Undocumented alias should not inherit any documentation
'.. py:type:: Pep695AliasUndocumented',
' :module: target.pep695',
' :canonical: ~target.pep695.Foo',
'',
'',
'.. py:type:: Pep695AliasUnion',
' :module: target.pep695',
' :canonical: str | int',
'',
' This is PEP695 type alias for union.',
'',
'',
'.. py:type:: TypeAliasTypeExplicit',
' :module: target.pep695',
' :canonical: ~target.pep695.Foo',
'',
' This is an explicitly constructed typing.TypeAlias.',
'',
'',
'.. py:type:: TypeAliasTypeExtension',
' :module: target.pep695',
' :canonical: ~target.pep695.Foo',
'',
' This is an explicitly constructed typing_extensions.TypeAlias.',
'',
'',
'.. py:function:: buffer_len(data: buffer_like) -> int',
' :module: target.pep695',
'',
' Return length of a buffer-like object.',
'',
' Tests Union type alias cross-reference resolution.',
'',
'',
'.. py:data:: buffer_like',
' :module: target.pep695',
' :value: bytes | bytearray | memoryview',
'',
' Some buffer-like object',
'',
'',
'.. py:data:: pathlike',
' :module: target.pep695',
f' :value: str | {pathlib.Path.__module__}.Path',
'',
' Any type of path',
'',
'',
'.. py:function:: process_error(handler: Handler, other: ~target.pep695.HandlerTypeAliasType) -> str',
' :module: target.pep695',
'',
' Process an error with a custom handler type.',
'',
' Tests generic type alias cross-reference resolution.',
'',
'',
'.. py:function:: read_file(path: pathlike) -> bytes',
' :module: target.pep695',
'',
' Read a file and return its contents.',
'',
' Tests Union type alias cross-reference resolution.',
'',
'',
'.. py:function:: ret_pep695(a: ~target.pep695.Pep695Alias) -> ~target.pep695.Pep695Alias',
' :module: target.pep695',
'',
' This fn accepts and returns PEP695 alias.',
'',
]
def test_autodoc_TypeVar() -> None:
options = {
'members': None,
'undoc-members': None,
}
actual = do_autodoc('module', 'target.typevar', options=options)
assert actual == [
'',
'.. py:module:: target.typevar',
'',
'',
'.. py:class:: Class()',
' :module: target.typevar',
'',
'',
' .. py:class:: Class.T1',
' :module: target.typevar',
'',
' T1',
'',
" alias of TypeVar('T1')",
'',
'',
' .. py:class:: Class.T6',
' :module: target.typevar',
'',
' T6',
'',
' alias of :py:class:`~datetime.date`',
'',
'',
'.. py:class:: T1',
' :module: target.typevar',
'',
' T1',
'',
" alias of TypeVar('T1')",
'',
'',
'.. py:class:: T3',
' :module: target.typevar',
'',
' T3',
'',
" alias of TypeVar('T3', int, str)",
'',
'',
'.. py:class:: T4',
' :module: target.typevar',
'',
' T4',
'',
" alias of TypeVar('T4', covariant=True)",
'',
'',
'.. py:class:: T5',
' :module: target.typevar',
'',
' T5',
'',
" alias of TypeVar('T5', contravariant=True)",
'',
'',
'.. py:class:: T6',
' :module: target.typevar',
'',
' T6',
'',
' alias of :py:class:`~datetime.date`',
'',
'',
'.. py:class:: T7',
' :module: target.typevar',
'',
' T7',
'',
" alias of TypeVar('T7', bound=\\ :py:class:`int`)",
'',
]
def test_autodoc_Annotated() -> None:
options = {
'members': None,
'member-order': 'bysource',
}
actual = do_autodoc('module', 'target.annotated', options=options)
assert actual == [
'',
'.. py:module:: target.annotated',
'',
'',
'.. py:class:: FuncValidator(func: function)',
' :module: target.annotated',
'',
'',
'.. py:class:: MaxLen(max_length: int, whitelisted_words: list[str])',
' :module: target.annotated',
'',
'',
'.. py:data:: ValidatedString',
' :module: target.annotated',
'',
' Type alias for a validated string.',
'',
' alias of :py:class:`~typing.Annotated`\\ [:py:class:`str`, '
':py:class:`~target.annotated.FuncValidator`\\ (func=\\ :py:class:`~target.annotated.validate`)]',
'',
'',
".. py:function:: hello(name: ~typing.Annotated[str, 'attribute']) -> None",
' :module: target.annotated',
'',
' docstring',
'',
'',
'.. py:class:: AnnotatedAttributes()',
' :module: target.annotated',
'',
' docstring',
'',
'',
' .. py:attribute:: AnnotatedAttributes.name',
' :module: target.annotated',
" :type: ~typing.Annotated[str, 'attribute']",
'',
' Docstring about the ``name`` attribute.',
'',
'',
' .. py:attribute:: AnnotatedAttributes.max_len',
' :module: target.annotated',
" :type: list[~typing.Annotated[str, ~target.annotated.MaxLen(max_length=10, whitelisted_words=['word_one', 'word_two'])]]",
'',
' Docstring about the ``max_len`` attribute.',
'',
'',
' .. py:attribute:: AnnotatedAttributes.validated',
' :module: target.annotated',
' :type: ~typing.Annotated[str, ~target.annotated.FuncValidator(func=~target.annotated.validate)]',
'',
' Docstring about the ``validated`` attribute.',
'',
]
def test_autodoc_TYPE_CHECKING() -> None:
options = {
'members': None,
'undoc-members': None,
}
actual = do_autodoc('module', 'target.TYPE_CHECKING', options=options)
assert actual == [
'',
'.. py:module:: target.TYPE_CHECKING',
'',
'',
'.. py:class:: Foo()',
' :module: target.TYPE_CHECKING',
'',
'',
' .. py:attribute:: Foo.attr1',
' :module: target.TYPE_CHECKING',
' :type: ~io.StringIO',
'',
'',
'.. py:function:: spam(ham: ~collections.abc.Iterable[str]) -> tuple[~gettext.NullTranslations, bool]',
' :module: target.TYPE_CHECKING',
'',
]
def test_autodoc_TYPE_CHECKING_circular_import() -> None:
options = {
'members': None,
'undoc-members': None,
}
actual = do_autodoc('module', 'circular_import', options=options)
assert actual == [
'',
'.. py:module:: circular_import',
'',
]
assert sys.modules['circular_import'].a is sys.modules['circular_import.a']
def test_singledispatch() -> None:
options = {'members': None}
actual = do_autodoc('module', 'target.singledispatch', options=options)
assert actual == [
'',
'.. py:module:: target.singledispatch',
'',
'',
'.. py:function:: func(arg, kwarg=None)',
' func(arg: float, kwarg=None)',
' func(arg: int, kwarg=None)',
' func(arg: str, kwarg=None)',
' func(arg: dict, kwarg=None)',
' :module: target.singledispatch',
'',
' A function for general use.',
'',
]
def test_singledispatchmethod() -> None:
options = {'members': None}
actual = do_autodoc('module', 'target.singledispatchmethod', options=options)
assert actual == [
'',
'.. py:module:: target.singledispatchmethod',
'',
'',
'.. py:class:: Foo()',
' :module: target.singledispatchmethod',
'',
' docstring',
'',
'',
' .. py:method:: Foo.meth(arg, kwarg=None)',
' Foo.meth(arg: float, kwarg=None)',
' Foo.meth(arg: int, kwarg=None)',
' Foo.meth(arg: str, kwarg=None)',
' Foo.meth(arg: dict, kwarg=None)',
' :module: target.singledispatchmethod',
'',
' A method for general use.',
'',
]
def test_singledispatchmethod_automethod() -> None:
actual = do_autodoc('method', 'target.singledispatchmethod.Foo.meth')
assert actual == [
'',
'.. py:method:: Foo.meth(arg, kwarg=None)',
' Foo.meth(arg: float, kwarg=None)',
' Foo.meth(arg: int, kwarg=None)',
' Foo.meth(arg: str, kwarg=None)',
' Foo.meth(arg: dict, kwarg=None)',
' :module: target.singledispatchmethod',
'',
' A method for general use.',
'',
]
def test_singledispatchmethod_classmethod() -> None:
options = {'members': None}
actual = do_autodoc(
'module', 'target.singledispatchmethod_classmethod', options=options
)
assert actual == [
'',
'.. py:module:: target.singledispatchmethod_classmethod',
'',
'',
'.. py:class:: Foo()',
' :module: target.singledispatchmethod_classmethod',
'',
' docstring',
'',
'',
' .. py:method:: Foo.class_meth(arg, kwarg=None)',
' Foo.class_meth(arg: float, kwarg=None)',
' Foo.class_meth(arg: int, kwarg=None)',
' Foo.class_meth(arg: str, kwarg=None)',
' Foo.class_meth(arg: dict, kwarg=None)',
' :module: target.singledispatchmethod_classmethod',
' :classmethod:',
'',
' A class method for general use.',
'',
]
def test_singledispatchmethod_classmethod_automethod() -> None:
actual = do_autodoc(
'method', 'target.singledispatchmethod_classmethod.Foo.class_meth'
)
assert actual == [
'',
'.. py:method:: Foo.class_meth(arg, kwarg=None)',
' Foo.class_meth(arg: float, kwarg=None)',
' Foo.class_meth(arg: int, kwarg=None)',
' Foo.class_meth(arg: str, kwarg=None)',
' Foo.class_meth(arg: dict, kwarg=None)',
' :module: target.singledispatchmethod_classmethod',
' :classmethod:',
'',
' A class method for general use.',
'',
]
@pytest.mark.skipif(
sys.version_info[:2] >= (3, 13),
reason='Cython does not support Python 3.13 yet.',
)
@pytest.mark.skipif(pyximport is None, reason='cython is not installed')
def test_cython() -> None:
options = {
'members': None,
'undoc-members': None,
}
actual = do_autodoc('module', 'target.cython', options=options)
assert actual == [
'',
'.. py:module:: target.cython',
'',
'',
'.. py:class:: Class()',
' :module: target.cython',
'',
' Docstring.',
'',
'',
' .. py:method:: Class.meth(name: str, age: int = 0) -> None',
' :module: target.cython',
'',
' Docstring.',
'',
'',
'.. py:function:: foo(x: int, *args, y: str, **kwargs)',
' :module: target.cython',
'',
' Docstring.',
'',
]
def test_final() -> None:
options = {'members': None}
actual = do_autodoc('module', 'target.final', options=options)
assert actual == [
'',
'.. py:module:: target.final',
'',
'',
'.. py:class:: Class()',
' :module: target.final',
' :final:',
'',
' docstring',
'',
'',
' .. py:method:: Class.meth1()',
' :module: target.final',
' :final:',
'',
' docstring',
'',
'',
' .. py:method:: Class.meth2()',
' :module: target.final',
'',
' docstring',
'',
'',
' .. py:method:: Class.meth3()',
' :module: target.final',
' :final:',
'',
' docstring',
'',
'',
' .. py:method:: Class.meth4()',
' :module: target.final',
' :final:',
'',
' docstring',
'',
]
def test_overload() -> None:
options = {'members': None}
actual = do_autodoc('module', 'target.overload', options=options)
assert actual == [
'',
'.. py:module:: target.overload',
'',
'',
'.. py:class:: Bar(x: int, y: int)',
' Bar(x: str, y: str)',
' :module: target.overload',
'',
' docstring',
'',
'',
'.. py:class:: Baz(x: int, y: int)',
' Baz(x: str, y: str)',
' :module: target.overload',
'',
' docstring',
'',
'',
'.. py:class:: Foo(x: int, y: int)',
' Foo(x: str, y: str)',
' :module: target.overload',
'',
' docstring',
'',
'',
'.. py:class:: Math()',
' :module: target.overload',
'',
' docstring',
'',
'',
' .. py:method:: Math.sum(x: int, y: int = 0) -> int',
' Math.sum(x: float, y: float = 0.0) -> float',
' Math.sum(x: str, y: str = None) -> str',
' :module: target.overload',
'',
' docstring',
'',
'',
'.. py:function:: sum(x: int, y: int = 0) -> int',
' sum(x: float, y: float = 0.0) -> float',
' sum(x: str, y: str = None) -> str',
' :module: target.overload',
'',
' docstring',
'',
]
def test_overload2() -> None:
options = {'members': None}
actual = do_autodoc('module', 'target.overload2', options=options)
assert actual == [
'',
'.. py:module:: target.overload2',
'',
'',
'.. py:class:: Baz(x: int, y: int)',
' Baz(x: str, y: str)',
' :module: target.overload2',
'',
]
def test_overload3() -> None:
options = {'members': None}
actual = do_autodoc('module', 'target.overload3', options=options)
assert actual == [
'',
'.. py:module:: target.overload3',
'',
'',
'.. py:function:: test(x: int) -> int',
' test(x: list[int]) -> list[int]',
' test(x: str) -> str',
' test(x: float) -> float',
' :module: target.overload3',
'',
' Documentation.',
'',
]
def test_pymodule_for_ModuleLevelDocumenter() -> None:
ref_context: dict[str, Any] = {'py:module': 'target.classes'}
actual = do_autodoc('class', 'Foo', ref_context=ref_context)
assert actual == [
'',
'.. py:class:: Foo()',
' :module: target.classes',
'',
]
def test_pymodule_for_ClassLevelDocumenter() -> None:
ref_context: dict[str, Any] = {'py:module': 'target.methods'}
actual = do_autodoc('method', 'Base.meth', ref_context=ref_context)
assert actual == [
'',
'.. py:method:: Base.meth()',
' :module: target.methods',
'',
]
def test_pyclass_for_ClassLevelDocumenter() -> None:
ref_context: dict[str, Any] = {'py:module': 'target.methods', 'py:class': 'Base'}
actual = do_autodoc('method', 'meth', ref_context=ref_context)
assert actual == [
'',
'.. py:method:: Base.meth()',
' :module: target.methods',
'',
]
def test_autodoc(caplog: pytest.LogCaptureFixture) -> None:
# work around sphinx.util.logging.setup()
logger = logging.getLogger('sphinx')
logger.handlers[:] = [caplog.handler]
caplog.set_level(logging.WARNING)
config = _AutodocConfig(autodoc_mock_imports=['dummy'])
options = {'members': None}
actual = do_autodoc(
'module', 'autodoc_dummy_module', config=config, options=options
)
assert actual == [
'',
'.. py:module:: autodoc_dummy_module',
'',
'',
'.. py:function:: test()',
' :module: autodoc_dummy_module',
'',
' Dummy function using dummy.*',
'',
]
# See: https://github.com/sphinx-doc/sphinx/issues/2437
do_autodoc('module', 'bug2437.autodoc_dummy_foo', options=options)
actual = do_autodoc('module', 'autodoc_dummy_bar', options=options)
assert actual == [
'',
'.. py:module:: autodoc_dummy_bar',
'',
'',
'.. py:class:: Bar()',
' :module: autodoc_dummy_bar',
'',
' Dummy class Bar with alias.',
'',
'',
' .. py:attribute:: Bar.my_name',
' :module: autodoc_dummy_bar',
'',
' alias of :py:class:`~bug2437.autodoc_dummy_foo.Foo`',
]
assert not caplog.records
def test_name_conflict() -> None:
actual = do_autodoc('class', 'target.name_conflict.foo')
assert actual == [
'',
'.. py:class:: foo()',
' :module: target.name_conflict',
'',
' docstring of target.name_conflict::foo.',
'',
]
actual = do_autodoc('class', 'target.name_conflict.foo.bar')
assert actual == [
'',
'.. py:class:: bar()',
' :module: target.name_conflict.foo',
'',
' docstring of target.name_conflict.foo::bar.',
'',
]
def test_name_mangling() -> None:
options = {
'members': None,
'undoc-members': None,
'private-members': None,
}
actual = do_autodoc('module', 'target.name_mangling', options=options)
assert actual == [
'',
'.. py:module:: target.name_mangling',
'',
'',
'.. py:class:: Bar()',
' :module: target.name_mangling',
'',
'',
' .. py:attribute:: Bar._Baz__email',
' :module: target.name_mangling',
' :value: None',
'',
' a member having mangled-like name',
'',
'',
' .. py:attribute:: Bar.__address',
' :module: target.name_mangling',
' :value: None',
'',
'',
'.. py:class:: Foo()',
' :module: target.name_mangling',
'',
'',
' .. py:attribute:: Foo.__age',
' :module: target.name_mangling',
' :value: None',
'',
'',
' .. py:attribute:: Foo.__name',
' :module: target.name_mangling',
' :value: None',
'',
' name of Foo',
'',
]
def test_type_union_operator() -> None:
options = {'members': None}
actual = do_autodoc('module', 'target.pep604', options=options)
assert actual == [
'',
'.. py:module:: target.pep604',
'',
'',
'.. py:class:: Foo()',
' :module: target.pep604',
'',
' docstring',
'',
'',
' .. py:attribute:: Foo.attr',
' :module: target.pep604',
' :type: int | str',
'',
' docstring',
'',
'',
' .. py:method:: Foo.meth(x: int | str, y: int | str) -> int | str',
' :module: target.pep604',
'',
' docstring',
'',
'',
'.. py:data:: attr',
' :module: target.pep604',
' :type: int | str',
'',
' docstring',
'',
'',
'.. py:function:: sum(x: int | str, y: int | str) -> int | str',
' :module: target.pep604',
'',
' docstring',
'',
]
def test_hide_value() -> None:
options = {'members': None}
actual = do_autodoc('module', 'target.hide_value', options=options)
assert actual == [
'',
'.. py:module:: target.hide_value',
'',
'',
'.. py:class:: Foo()',
' :module: target.hide_value',
'',
' docstring',
'',
'',
' .. py:attribute:: Foo.SENTINEL1',
' :module: target.hide_value',
'',
' docstring',
'',
' :meta hide-value:',
'',
'',
' .. py:attribute:: Foo.SENTINEL2',
' :module: target.hide_value',
'',
' :meta hide-value:',
'',
'',
'.. py:data:: SENTINEL1',
' :module: target.hide_value',
'',
' docstring',
'',
' :meta hide-value:',
'',
'',
'.. py:data:: SENTINEL2',
' :module: target.hide_value',
'',
' :meta hide-value:',
'',
]
def test_canonical() -> None:
options = {
'members': None,
'imported-members': None,
}
actual = do_autodoc('module', 'target.canonical', options=options)
assert actual == [
'',
'.. py:module:: target.canonical',
'',
'',
'.. py:class:: Bar()',
' :module: target.canonical',
'',
' docstring',
'',
'',
'.. py:class:: Foo()',
' :module: target.canonical',
' :canonical: target.canonical.original.Foo',
'',
' docstring',
'',
'',
' .. py:method:: Foo.meth()',
' :module: target.canonical',
'',
' docstring',
'',
]
def bounded_typevar_rst(name, bound):
return [
'',
f'.. py:class:: {name}',
' :module: target.literal',
'',
' docstring',
'',
f' alias of TypeVar({name!r}, bound={bound})',
'',
]
def function_rst(name, sig):
return [
'',
f'.. py:function:: {name}({sig})',
' :module: target.literal',
'',
' docstring',
'',
]
def test_literal_render() -> None:
config = _AutodocConfig(autodoc_typehints_format='short')
# autodoc_typehints_format can take 'short' or 'fully-qualified' values
# and this will be interpreted as 'smart' or 'fully-qualified-except-typing' by restify()
# and 'smart' or 'fully-qualified' by stringify_annotation().
options = {
'members': None,
'exclude-members': 'MyEnum',
}
actual = do_autodoc('module', 'target.literal', config=config, options=options)
assert actual == [
'',
'.. py:module:: target.literal',
'',
*bounded_typevar_rst('T', r"\ :py:obj:`~typing.Literal`\ [1234, 'abcd']"),
*bounded_typevar_rst(
'U',
r'\ :py:obj:`~typing.Literal`\ ['
r':py:attr:`~target.literal.MyEnum.a`, '
r':py:attr:`~target.literal.MyEnum.b`]',
),
*function_rst('bar', "x: ~typing.Literal[1234, 'abcd']"),
*function_rst('foo', 'x: ~typing.Literal[MyEnum.a, MyEnum.b]'),
]
# restify() assumes that 'fully-qualified' is 'fully-qualified-except-typing'
# because it is more likely that a user wants to suppress 'typing.*'
config = _AutodocConfig(autodoc_typehints_format='fully-qualified')
actual = do_autodoc('module', 'target.literal', config=config, options=options)
assert actual == [
'',
'.. py:module:: target.literal',
'',
*bounded_typevar_rst('T', r"\ :py:obj:`~typing.Literal`\ [1234, 'abcd']"),
*bounded_typevar_rst(
'U',
r'\ :py:obj:`~typing.Literal`\ ['
r':py:attr:`target.literal.MyEnum.a`, '
r':py:attr:`target.literal.MyEnum.b`]',
),
*function_rst('bar', "x: typing.Literal[1234, 'abcd']"),
*function_rst(
'foo',
'x: typing.Literal[target.literal.MyEnum.a, target.literal.MyEnum.b]',
),
]
def test_literal_render_pep604() -> None:
config = _AutodocConfig(
python_display_short_literal_types=True,
autodoc_typehints_format='short',
)
options = {
'members': None,
'exclude-members': 'MyEnum',
}
actual = do_autodoc('module', 'target.literal', config=config, options=options)
assert actual == [
'',
'.. py:module:: target.literal',
'',
*bounded_typevar_rst('T', r"\ :py:obj:`~typing.Literal`\ [1234, 'abcd']"),
*bounded_typevar_rst(
'U',
r'\ :py:obj:`~typing.Literal`\ ['
r':py:attr:`~target.literal.MyEnum.a`, '
r':py:attr:`~target.literal.MyEnum.b`]',
),
*function_rst('bar', "x: 1234 | 'abcd'"),
*function_rst('foo', 'x: MyEnum.a | MyEnum.b'),
]
# restify() assumes that 'fully-qualified' is 'fully-qualified-except-typing'
# because it is more likely that a user wants to suppress 'typing.*'
config = _AutodocConfig(
python_display_short_literal_types=True,
autodoc_typehints_format='fully-qualified',
)
actual = do_autodoc('module', 'target.literal', config=config, options=options)
assert actual == [
'',
'.. py:module:: target.literal',
'',
*bounded_typevar_rst('T', r"\ :py:obj:`~typing.Literal`\ [1234, 'abcd']"),
*bounded_typevar_rst(
'U',
r'\ :py:obj:`~typing.Literal`\ ['
r':py:attr:`target.literal.MyEnum.a`, '
r':py:attr:`target.literal.MyEnum.b`]',
),
*function_rst('bar', "x: 1234 | 'abcd'"),
*function_rst('foo', 'x: target.literal.MyEnum.a | target.literal.MyEnum.b'),
]
def test_no_index_entry() -> None:
# modules can use no-index-entry
options = {'no-index-entry': None}
actual = do_autodoc('module', 'target.module', options=options)
assert ' :no-index-entry:' in actual
# classes can use no-index-entry
actual = do_autodoc('class', 'target.classes.Foo', options=options)
assert ' :no-index-entry:' in actual
# functions can use no-index-entry
actual = do_autodoc('function', 'target.functions.func', options=options)
assert ' :no-index-entry:' in actual
# modules respect no-index-entry in autodoc_default_options
config = _AutodocConfig(autodoc_default_options={'no-index-entry': True})
actual = do_autodoc('module', 'target.module', config=config)
assert ' :no-index-entry:' in actual
# classes respect config-level no-index-entry
actual = do_autodoc('class', 'target.classes.Foo', config=config)
assert ' :no-index-entry:' in actual
# functions respect config-level no-index-entry
actual = do_autodoc('function', 'target.functions.func', config=config)
assert ' :no-index-entry:' in actual
|
_EnumFormatter
|
python
|
sphinx-doc__sphinx
|
sphinx/builders/text.py
|
{
"start": 527,
"end": 2959
}
|
class ____(Builder):
name = 'text'
format = 'text'
epilog = __('The text files are in %(outdir)s.')
out_suffix = '.txt'
allow_parallel = True
default_translator_class = TextTranslator
current_docname: str | None = None
def init(self) -> None:
# section numbers for headings in the currently visited document
self.secnumbers: dict[str, tuple[int, ...]] = {}
def get_outdated_docs(self) -> Iterator[str]:
for docname in self.env.found_docs:
if docname not in self.env.all_docs:
yield docname
continue
targetname = self.outdir / (docname + self.out_suffix)
try:
targetmtime = _last_modified_time(targetname)
except Exception:
targetmtime = 0
try:
srcmtime = _last_modified_time(self.env.doc2path(docname))
if srcmtime > targetmtime:
yield docname
except OSError:
# source doesn't exist anymore
pass
def get_target_uri(self, docname: str, typ: str | None = None) -> str:
return ''
def write_doc(self, docname: str, doctree: nodes.document) -> None:
self.current_docname = docname
self.secnumbers = self.env.toc_secnumbers.get(docname, {})
visitor: TextTranslator = self.create_translator(doctree, self) # type: ignore[assignment]
doctree.walkabout(visitor)
output = visitor.body
out_file_name = self.outdir / (docname + self.out_suffix)
out_file_name.parent.mkdir(parents=True, exist_ok=True)
try:
out_file_name.write_text(output, encoding='utf-8')
except OSError as err:
logger.warning(__('error writing file %s: %s'), out_file_name, err)
def finish(self) -> None:
pass
def setup(app: Sphinx) -> ExtensionMetadata:
app.add_builder(TextBuilder)
app.add_config_value('text_sectionchars', '*=-~"+`', 'env', types=frozenset({str}))
app.add_config_value('text_newlines', 'unix', 'env', types=frozenset({str}))
app.add_config_value('text_add_secnumbers', True, 'env', types=frozenset({bool}))
app.add_config_value('text_secnumber_suffix', '. ', 'env', types=frozenset({str}))
return {
'version': 'builtin',
'parallel_read_safe': True,
'parallel_write_safe': True,
}
|
TextBuilder
|
python
|
microsoft__pyright
|
packages/pyright-internal/src/tests/samples/with1.py
|
{
"start": 2077,
"end": 2203
}
|
class ____(Class5[int]): ...
async def do():
async with Class6() as f:
reveal_type(f, expected_text="Class6")
|
Class6
|
python
|
Textualize__rich
|
rich/prompt.py
|
{
"start": 10414,
"end": 12435
}
|
class ____(PromptBase[bool]):
"""A yes / no confirmation prompt.
Example:
>>> if Confirm.ask("Continue"):
run_job()
"""
response_type = bool
validate_error_message = "[prompt.invalid]Please enter Y or N"
choices: List[str] = ["y", "n"]
def render_default(self, default: DefaultType) -> Text:
"""Render the default as (y) or (n) rather than True/False."""
yes, no = self.choices
return Text(f"({yes})" if default else f"({no})", style="prompt.default")
def process_response(self, value: str) -> bool:
"""Convert choices to a bool."""
value = value.strip().lower()
if value not in self.choices:
raise InvalidResponse(self.validate_error_message)
return value == self.choices[0]
if __name__ == "__main__": # pragma: no cover
from rich import print
if Confirm.ask("Run [i]prompt[/i] tests?", default=True):
while True:
result = IntPrompt.ask(
":rocket: Enter a number between [b]1[/b] and [b]10[/b]", default=5
)
if result >= 1 and result <= 10:
break
print(":pile_of_poo: [prompt.invalid]Number must be between 1 and 10")
print(f"number={result}")
while True:
password = Prompt.ask(
"Please enter a password [cyan](must be at least 5 characters)",
password=True,
)
if len(password) >= 5:
break
print("[prompt.invalid]password too short")
print(f"password={password!r}")
fruit = Prompt.ask("Enter a fruit", choices=["apple", "orange", "pear"])
print(f"fruit={fruit!r}")
doggie = Prompt.ask(
"What's the best Dog? (Case INSENSITIVE)",
choices=["Border Terrier", "Collie", "Labradoodle"],
case_sensitive=False,
)
print(f"doggie={doggie!r}")
else:
print("[b]OK :loudly_crying_face:")
|
Confirm
|
python
|
apache__airflow
|
airflow-core/src/airflow/providers_manager.py
|
{
"start": 6106,
"end": 6303
}
|
class ____:
"""
Provider information.
:param version: version string
:param data: dictionary with information about the provider
"""
version: str
data: dict
|
ProviderInfo
|
python
|
pytorch__pytorch
|
torch/_export/db/examples/class_method.py
|
{
"start": 41,
"end": 499
}
|
class ____(torch.nn.Module):
"""
Class methods are inlined during tracing.
"""
@classmethod
def method(cls, x):
return x + 1
def __init__(self) -> None:
super().__init__()
self.linear = torch.nn.Linear(4, 2)
def forward(self, x):
x = self.linear(x)
return self.method(x) * self.__class__.method(x) * type(self).method(x)
example_args = (torch.randn(3, 4),)
model = ClassMethod()
|
ClassMethod
|
python
|
pyqtgraph__pyqtgraph
|
pyqtgraph/graphicsItems/ROI.py
|
{
"start": 71165,
"end": 72495
}
|
class ____(ROI):
r"""
Rectangular ROI subclass with scale-rotate handles on either side. This
allows the ROI to be positioned as if moving the ends of a line segment.
A third handle controls the width of the ROI orthogonal to its "line" axis.
============== =============================================================
**Arguments**
pos1 (length-2 sequence) The position of the center of the ROI's
left edge.
pos2 (length-2 sequence) The position of the center of the ROI's
right edge.
width (float) The width of the ROI.
\**args All extra keyword arguments are passed to ROI()
============== =============================================================
"""
def __init__(self, pos1, pos2, width, **args):
pos1 = Point(pos1)
pos2 = Point(pos2)
d = pos2-pos1
l = d.length()
ra = d.angle(Point(1, 0), units="radians")
c = Point(width/2. * sin(ra), -width/2. * cos(ra))
pos1 = pos1 + c
ROI.__init__(self, pos1, size=Point(l, width), angle=degrees(ra), **args)
self.addScaleRotateHandle([0, 0.5], [1, 0.5])
self.addScaleRotateHandle([1, 0.5], [0, 0.5])
self.addScaleHandle([0.5, 1], [0.5, 0.5])
|
LineROI
|
python
|
lxml__lxml
|
src/lxml/tests/test_xmlschema.py
|
{
"start": 14086,
"end": 18885
}
|
class ____(HelperTestCase):
resolver_schema_int = BytesIO(b"""\
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema"
xmlns:etype="http://codespeak.net/lxml/test/external"
targetNamespace="http://codespeak.net/lxml/test/internal">
<xsd:import namespace="http://codespeak.net/lxml/test/external" schemaLocation="XXX.xsd" />
<xsd:element name="a" type="etype:AType"/>
</xsd:schema>""")
resolver_schema_int2 = BytesIO(b"""\
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema"
xmlns:etype="http://codespeak.net/lxml/test/external"
targetNamespace="http://codespeak.net/lxml/test/internal">
<xsd:import namespace="http://codespeak.net/lxml/test/external" schemaLocation="YYY.xsd" />
<xsd:element name="a" type="etype:AType"/>
</xsd:schema>""")
resolver_schema_ext = """\
<xsd:schema xmlns:xsd="http://www.w3.org/2001/XMLSchema"
targetNamespace="http://codespeak.net/lxml/test/external">
<xsd:complexType name="AType">
<xsd:sequence><xsd:element name="b" type="xsd:string" minOccurs="0" maxOccurs="unbounded" /></xsd:sequence>
</xsd:complexType>
</xsd:schema>"""
class simple_resolver(etree.Resolver):
def __init__(self, schema):
self.schema = schema
def resolve(self, url, id, context):
assert url == 'XXX.xsd'
return self.resolve_string(self.schema, context)
# tests:
def test_xmlschema_resolvers(self):
# test that resolvers work with schema.
parser = etree.XMLParser()
parser.resolvers.add(self.simple_resolver(self.resolver_schema_ext))
schema_doc = etree.parse(self.resolver_schema_int, parser = parser)
schema = etree.XMLSchema(schema_doc)
def test_xmlschema_resolvers_root(self):
# test that the default resolver will get called if there's no
# specific parser resolver.
root_resolver = self.simple_resolver(self.resolver_schema_ext)
default_resolvers = etree.get_default_parser().resolvers
default_resolvers.add(root_resolver)
try:
schema_doc = etree.parse(self.resolver_schema_int)
schema = etree.XMLSchema(schema_doc)
finally:
default_resolvers.remove(root_resolver)
def test_xmlschema_resolvers_noroot(self):
# test that the default resolver will not get called when a
# more specific resolver is registered.
class res_root(etree.Resolver):
def resolve(self, url, id, context):
assert False
return None
root_resolver = res_root()
default_resolvers = etree.get_default_parser().resolvers
default_resolvers.add(root_resolver)
try:
parser = etree.XMLParser()
parser.resolvers.add(self.simple_resolver(self.resolver_schema_ext))
schema_doc = etree.parse(self.resolver_schema_int, parser = parser)
schema = etree.XMLSchema(schema_doc)
finally:
default_resolvers.remove(root_resolver)
def test_xmlschema_nested_resolvers(self):
# test that resolvers work in a nested fashion.
resolver_schema = self.resolver_schema_ext
class res_nested(etree.Resolver):
def __init__(self, ext_schema):
self.ext_schema = ext_schema
def resolve(self, url, id, context):
assert url == 'YYY.xsd'
return self.resolve_string(self.ext_schema, context)
class res(etree.Resolver):
def __init__(self, ext_schema_1, ext_schema_2):
self.ext_schema_1 = ext_schema_1
self.ext_schema_2 = ext_schema_2
def resolve(self, url, id, context):
assert url == 'XXX.xsd'
new_parser = etree.XMLParser()
new_parser.resolvers.add(res_nested(self.ext_schema_2))
new_schema_doc = etree.parse(self.ext_schema_1, parser = new_parser)
new_schema = etree.XMLSchema(new_schema_doc)
return self.resolve_string(resolver_schema, context)
parser = etree.XMLParser()
parser.resolvers.add(res(self.resolver_schema_int2, self.resolver_schema_ext))
schema_doc = etree.parse(self.resolver_schema_int, parser = parser)
schema = etree.XMLSchema(schema_doc)
def test_suite():
suite = unittest.TestSuite()
suite.addTests([unittest.defaultTestLoader.loadTestsFromTestCase(ETreeXMLSchemaTestCase)])
suite.addTests([unittest.defaultTestLoader.loadTestsFromTestCase(ETreeXMLSchemaResolversTestCase)])
suite.addTests(
[make_doctest('validation.txt')])
return suite
if __name__ == '__main__':
print('to test use test.py %s' % __file__)
|
ETreeXMLSchemaResolversTestCase
|
python
|
huggingface__transformers
|
src/transformers/models/timesformer/modeling_timesformer.py
|
{
"start": 2593,
"end": 7097
}
|
class ____(nn.Module):
"""
Construct the patch and position embeddings.
"""
def __init__(self, config):
super().__init__()
embed_dim = config.hidden_size
num_frames = config.num_frames
drop_rate = config.hidden_dropout_prob
attention_type = config.attention_type
self.attention_type = attention_type
self.patch_embeddings = TimesformerPatchEmbeddings(config)
self.num_patches = self.patch_embeddings.num_patches
# Positional Embeddings
self.cls_token = nn.Parameter(torch.zeros(1, 1, embed_dim))
self.position_embeddings = nn.Parameter(torch.zeros(1, self.num_patches + 1, embed_dim))
self.pos_drop = nn.Dropout(p=drop_rate)
if attention_type != "space_only":
self.time_embeddings = nn.Parameter(torch.zeros(1, num_frames, embed_dim))
self.time_drop = nn.Dropout(p=drop_rate)
def forward(self, pixel_values):
batch_size = pixel_values.shape[0]
# create patch embeddings
embeddings, num_frames, patch_width = self.patch_embeddings(pixel_values)
cls_tokens = self.cls_token.expand(embeddings.size(0), -1, -1)
embeddings = torch.cat((cls_tokens, embeddings), dim=1)
# resizing the positional embeddings in case they don't match the input at inference
if embeddings.size(1) != self.position_embeddings.size(1):
position_embeddings = self.position_embeddings
cls_pos_embed = position_embeddings[0, 0, :].unsqueeze(0).unsqueeze(1)
other_pos_embed = position_embeddings[0, 1:, :].unsqueeze(0).transpose(1, 2)
patch_num = int(other_pos_embed.size(2) ** 0.5)
patch_height = embeddings.size(1) // patch_width
other_pos_embed = other_pos_embed.reshape(1, embeddings.size(2), patch_num, patch_num)
new_pos_embed = nn.functional.interpolate(
other_pos_embed, size=(patch_height, patch_width), mode="nearest"
)
new_pos_embed = new_pos_embed.flatten(2)
new_pos_embed = new_pos_embed.transpose(1, 2)
new_pos_embed = torch.cat((cls_pos_embed, new_pos_embed), 1)
embeddings = embeddings + new_pos_embed
else:
embeddings = embeddings + self.position_embeddings
embeddings = self.pos_drop(embeddings)
# Time Embeddings
if self.attention_type != "space_only":
cls_tokens = embeddings[:batch_size, 0, :].unsqueeze(1)
embeddings = embeddings[:, 1:]
_, patch_height, patch_width = embeddings.shape
embeddings = (
embeddings.reshape(batch_size, num_frames, patch_height, patch_width)
.permute(0, 2, 1, 3)
.reshape(batch_size * patch_height, num_frames, patch_width)
)
# Resizing time embeddings in case they don't match
if num_frames != self.time_embeddings.size(1):
time_embeddings = self.time_embeddings.transpose(1, 2)
new_time_embeddings = nn.functional.interpolate(time_embeddings, size=(num_frames), mode="nearest")
new_time_embeddings = new_time_embeddings.transpose(1, 2)
embeddings = embeddings + new_time_embeddings
else:
embeddings = embeddings + self.time_embeddings
embeddings = self.time_drop(embeddings)
embeddings = embeddings.view(batch_size, patch_height, num_frames, patch_width).reshape(
batch_size, patch_height * num_frames, patch_width
)
embeddings = torch.cat((cls_tokens, embeddings), dim=1)
return embeddings
# Copied from transformers.models.beit.modeling_beit.drop_path
def drop_path(input: torch.Tensor, drop_prob: float = 0.0, training: bool = False) -> torch.Tensor:
"""
Drop paths (Stochastic Depth) per sample (when applied in main path of residual blocks).
"""
if drop_prob == 0.0 or not training:
return input
keep_prob = 1 - drop_prob
shape = (input.shape[0],) + (1,) * (input.ndim - 1) # work with diff dim tensors, not just 2D ConvNets
random_tensor = keep_prob + torch.rand(shape, dtype=input.dtype, device=input.device)
random_tensor.floor_() # binarize
output = input.div(keep_prob) * random_tensor
return output
# Copied from transformers.models.beit.modeling_beit.BeitDropPath with Beit->TimeSformer
|
TimesformerEmbeddings
|
python
|
getsentry__sentry
|
src/sentry/models/projectkey.py
|
{
"start": 1132,
"end": 1916
}
|
class ____(BaseManager["ProjectKey"]):
def post_save(self, *, instance: ProjectKey, created: bool, **kwargs: object) -> None:
schedule_invalidate_project_config(
public_key=instance.public_key, trigger="projectkey.post_save"
)
def post_delete(self, instance, **kwargs):
schedule_invalidate_project_config(
public_key=instance.public_key, trigger="projectkey.post_delete"
)
def for_request(self, request):
"""Return objects that the given request user is allowed to access"""
from sentry.auth.superuser import is_active_superuser
qs = self.get_queryset()
if not is_active_superuser(request):
qs = qs.filter(use_case=UseCase.USER.value)
return qs
|
ProjectKeyManager
|
python
|
ray-project__ray
|
doc/source/serve/doc_code/intel_gaudi_inference_serve_deepspeed.py
|
{
"start": 351,
"end": 4909
}
|
class ____:
def __init__(self, model_id_or_path: str, world_size: int, local_rank: int):
"""An actor that runs a DeepSpeed inference engine.
Arguments:
model_id_or_path: Either a Hugging Face model ID
or a path to a cached model.
world_size: Total number of worker processes.
local_rank: Rank of this worker process.
The rank 0 worker is the head worker.
"""
from transformers import AutoTokenizer, AutoConfig
from optimum.habana.transformers.modeling_utils import (
adapt_transformers_to_gaudi,
)
# Tweak transformers for better performance on Gaudi.
adapt_transformers_to_gaudi()
self.model_id_or_path = model_id_or_path
self._world_size = world_size
self._local_rank = local_rank
self.device = torch.device("hpu")
self.model_config = AutoConfig.from_pretrained(
model_id_or_path,
torch_dtype=torch.bfloat16,
token="",
trust_remote_code=False,
)
# Load and configure the tokenizer.
self.tokenizer = AutoTokenizer.from_pretrained(
model_id_or_path, use_fast=False, token=""
)
self.tokenizer.padding_side = "left"
if self.tokenizer.pad_token is None:
self.tokenizer.pad_token = self.tokenizer.eos_token
import habana_frameworks.torch.distributed.hccl as hccl
# Initialize the distributed backend.
hccl.initialize_distributed_hpu(
world_size=world_size, rank=local_rank, local_rank=local_rank
)
torch.distributed.init_process_group(backend="hccl")
def load_model(self):
"""Load the model to HPU and initialize the DeepSpeed inference engine."""
import deepspeed
from transformers import AutoModelForCausalLM
from optimum.habana.checkpoint_utils import (
get_ds_injection_policy,
write_checkpoints_json,
)
# Construct the model with fake meta Tensors.
# Loads the model weights from the checkpoint later.
with deepspeed.OnDevice(dtype=torch.bfloat16, device="meta"):
model = AutoModelForCausalLM.from_config(
self.model_config, torch_dtype=torch.bfloat16
)
model = model.eval()
# Create a file to indicate where the checkpoint is.
checkpoints_json = tempfile.NamedTemporaryFile(suffix=".json", mode="w+")
write_checkpoints_json(
self.model_id_or_path, self._local_rank, checkpoints_json, token=""
)
# Prepare the DeepSpeed inference configuration.
kwargs = {"dtype": torch.bfloat16}
kwargs["checkpoint"] = checkpoints_json.name
kwargs["tensor_parallel"] = {"tp_size": self._world_size}
# Enable the HPU graph, similar to the cuda graph.
kwargs["enable_cuda_graph"] = True
# Specify the injection policy, required by DeepSpeed Tensor parallelism.
kwargs["injection_policy"] = get_ds_injection_policy(self.model_config)
# Initialize the inference engine.
self.model = deepspeed.init_inference(model, **kwargs).module
def tokenize(self, prompt: str):
"""Tokenize the input and move it to HPU."""
input_tokens = self.tokenizer(prompt, return_tensors="pt", padding=True)
return input_tokens.input_ids.to(device=self.device)
def generate(self, prompt: str, **config: Dict[str, Any]):
"""Take in a prompt and generate a response."""
input_ids = self.tokenize(prompt)
gen_tokens = self.model.generate(input_ids, **config)
return self.tokenizer.batch_decode(gen_tokens, skip_special_tokens=True)[0]
def streaming_generate(self, prompt: str, streamer, **config: Dict[str, Any]):
"""Generate a streamed response given an input."""
input_ids = self.tokenize(prompt)
self.model.generate(input_ids, streamer=streamer, **config)
def get_streamer(self):
"""Return a streamer.
We only need the rank 0 worker's result.
Other workers return a fake streamer.
"""
if self._local_rank == 0:
return RayTextIteratorStreamer(self.tokenizer, skip_special_tokens=True)
else:
class FakeStreamer:
def put(self, value):
pass
def end(self):
pass
return FakeStreamer()
|
DeepSpeedInferenceWorker
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.