Dataset Viewer
code
stringlengths 0
390k
| repo_name
stringclasses 1
value | path
stringlengths 12
69
| language
stringclasses 1
value | license
stringclasses 1
value | size
int64 0
390k
|
---|---|---|---|---|---|
# ruff: noqa
# Configuration file for the Sphinx documentation builder.
#
# For the full list of built-in configuration values, see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Project information -----------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
project = "Algorand Python"
copyright = "2024, Algorand Foundation" # noqa: A001
author = "Algorand Foundation"
# -- General configuration ---------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
extensions = [
"sphinx.ext.githubpages",
"sphinx.ext.intersphinx",
"sphinx_copybutton",
"myst_parser",
"autodoc2",
]
templates_path = ["_templates"]
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "architecture-decisions/*.md"]
intersphinx_mapping = {
"python": ("https://docs.python.org/3", None),
}
# warning exclusions
suppress_warnings = [
"myst.xref_missing",
"autodoc2.dup_item",
]
nitpick_ignore = [
("py:class", "algopy.arc4.AllowedOnCompletes"),
]
nitpick_ignore_regex = [
("py:class", r"algopy.*\._.*"),
]
# -- Options for HTML output -------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
html_theme = "furo"
html_static_path = ["_static"]
html_css_files = [
"custom.css",
]
python_maximum_signature_line_length = 80
# -- Options for myst ---
myst_enable_extensions = [
"colon_fence",
"fieldlist",
]
# -- Options for autodoc2 ---
autodoc2_packages = [
{
"path": "./algopy-stubs",
"module": "algopy",
"auto_mode": False,
},
]
autodoc2_docstring_parser_regexes = [
# this will render all docstrings as Markdown
(r".*", "myst"),
]
autodoc2_hidden_objects = [
"undoc",
]
autodoc2_hidden_regexes = [
r".*\.__subclasshook__", # inherited from Protocol
]
autodoc2_class_inheritance = False
autodoc2_module_all_regexes = [r"algopy.*"]
autodoc2_render_plugin = "myst"
autodoc2_sort_names = True
autodoc2_index_template = None
autodoc2_docstrings = "all"
| algorandfoundation/puya | docs/conf.py | Python | NOASSERTION | 2,216 |
algorandfoundation/puya | examples/__init__.py | Python | NOASSERTION | 0 |
|
# WARNING: This code is provided for example only. Do NOT deploy to mainnet.
from algopy import (
Account,
ARC4Contract,
Asset,
Global,
Txn,
UInt64,
arc4,
gtxn,
itxn,
op,
subroutine,
)
# Total supply of the pool tokens
TOTAL_SUPPLY = 10_000_000_000
# scale helps with precision when doing computation for
# the number of tokens to transfer
SCALE = 1000
# Fee for swaps, 5 represents 0.5% ((fee / scale)*100)
FEE = 5
FACTOR = SCALE - FEE
class ConstantProductAMM(ARC4Contract):
def __init__(self) -> None:
# init runs whenever the txn's app ID is zero, and runs first
# so if we have multiple create methods, this can contain common code.
# The asset id of asset A
self.asset_a = Asset()
# The asset id of asset B
self.asset_b = Asset()
# The current governor of this contract, allowed to do admin type actions
self.governor = Txn.sender
# The asset id of the Pool Token, used to track share of pool the holder may recover
self.pool_token = Asset()
# The ratio between assets (A*Scale/B)
self.ratio = UInt64(0)
@arc4.abimethod()
def set_governor(self, new_governor: Account) -> None:
"""sets the governor of the contract, may only be called by the current governor"""
self._check_is_governor()
self.governor = new_governor
@arc4.abimethod()
def bootstrap(self, seed: gtxn.PaymentTransaction, a_asset: Asset, b_asset: Asset) -> UInt64:
"""bootstraps the contract by opting into the assets and creating the pool token.
Note this method will fail if it is attempted more than once on the same contract
since the assets and pool token application state values are marked as static and
cannot be overridden.
Args:
seed: Initial Payment transaction to the app account so it can opt in to assets
and create pool token.
a_asset: One of the two assets this pool should allow swapping between.
b_asset: The other of the two assets this pool should allow swapping between.
Returns:
The asset id of the pool token created.
"""
assert not self.pool_token, "application has already been bootstrapped"
self._check_is_governor()
assert Global.group_size == 2, "group size not 2"
assert seed.receiver == Global.current_application_address, "receiver not app address"
assert seed.amount >= 300_000, "amount minimum not met" # 0.3 Algos
assert a_asset.id < b_asset.id, "asset a must be less than asset b"
self.asset_a = a_asset
self.asset_b = b_asset
self.pool_token = self._create_pool_token()
self._do_opt_in(self.asset_a)
self._do_opt_in(self.asset_b)
return self.pool_token.id
@arc4.abimethod(
default_args={
"pool_asset": "pool_token",
"a_asset": "asset_a",
"b_asset": "asset_b",
},
)
def mint(
self,
a_xfer: gtxn.AssetTransferTransaction,
b_xfer: gtxn.AssetTransferTransaction,
pool_asset: Asset,
a_asset: Asset,
b_asset: Asset,
) -> None:
"""mint pool tokens given some amount of asset A and asset B.
Given some amount of Asset A and Asset B in the transfers, mint some number of pool
tokens commensurate with the pools current balance and circulating supply of
pool tokens.
Args:
a_xfer: Asset Transfer Transaction of asset A as a deposit to the pool in
exchange for pool tokens.
b_xfer: Asset Transfer Transaction of asset B as a deposit to the pool in
exchange for pool tokens.
pool_asset: The asset ID of the pool token so that we may distribute it.
a_asset: The asset ID of the Asset A so that we may inspect our balance.
b_asset: The asset ID of the Asset B so that we may inspect our balance.
"""
self._check_bootstrapped()
# well-formed mint
assert pool_asset == self.pool_token, "asset pool incorrect"
assert a_asset == self.asset_a, "asset a incorrect"
assert b_asset == self.asset_b, "asset b incorrect"
assert a_xfer.sender == Txn.sender, "sender invalid"
assert b_xfer.sender == Txn.sender, "sender invalid"
# valid asset a xfer
assert (
a_xfer.asset_receiver == Global.current_application_address
), "receiver not app address"
assert a_xfer.xfer_asset == self.asset_a, "asset a incorrect"
assert a_xfer.asset_amount > 0, "amount minimum not met"
# valid asset b xfer
assert (
b_xfer.asset_receiver == Global.current_application_address
), "receiver not app address"
assert b_xfer.xfer_asset == self.asset_b, "asset b incorrect"
assert b_xfer.asset_amount > 0, "amount minimum not met"
to_mint = tokens_to_mint(
pool_balance=self._current_pool_balance(),
a_balance=self._current_a_balance(),
b_balance=self._current_b_balance(),
a_amount=a_xfer.asset_amount,
b_amount=b_xfer.asset_amount,
)
assert to_mint > 0, "send amount too low"
# mint tokens
do_asset_transfer(receiver=Txn.sender, asset=self.pool_token, amount=to_mint)
self._update_ratio()
@arc4.abimethod(
default_args={
"pool_asset": "pool_token",
"a_asset": "asset_a",
"b_asset": "asset_b",
},
)
def burn(
self,
pool_xfer: gtxn.AssetTransferTransaction,
pool_asset: Asset,
a_asset: Asset,
b_asset: Asset,
) -> None:
"""burn pool tokens to get back some amount of asset A and asset B
Args:
pool_xfer: Asset Transfer Transaction of the pool token for the amount the
sender wishes to redeem
pool_asset: Asset ID of the pool token so we may inspect balance.
a_asset: Asset ID of Asset A so we may inspect balance and distribute it
b_asset: Asset ID of Asset B so we may inspect balance and distribute it
"""
self._check_bootstrapped()
assert pool_asset == self.pool_token, "asset pool incorrect"
assert a_asset == self.asset_a, "asset a incorrect"
assert b_asset == self.asset_b, "asset b incorrect"
assert (
pool_xfer.asset_receiver == Global.current_application_address
), "receiver not app address"
assert pool_xfer.asset_amount > 0, "amount minimum not met"
assert pool_xfer.xfer_asset == self.pool_token, "asset pool incorrect"
assert pool_xfer.sender == Txn.sender, "sender invalid"
# Get the total number of tokens issued
# !important: this happens prior to receiving the current axfer of pool tokens
pool_balance = self._current_pool_balance()
a_amt = tokens_to_burn(
pool_balance=pool_balance,
supply=self._current_a_balance(),
amount=pool_xfer.asset_amount,
)
b_amt = tokens_to_burn(
pool_balance=pool_balance,
supply=self._current_b_balance(),
amount=pool_xfer.asset_amount,
)
# Send back commensurate amt of a
do_asset_transfer(receiver=Txn.sender, asset=self.asset_a, amount=a_amt)
# Send back commensurate amt of b
do_asset_transfer(receiver=Txn.sender, asset=self.asset_b, amount=b_amt)
self._update_ratio()
@arc4.abimethod(
default_args={
"a_asset": "asset_a",
"b_asset": "asset_b",
},
)
def swap(
self,
swap_xfer: gtxn.AssetTransferTransaction,
a_asset: Asset,
b_asset: Asset,
) -> None:
"""Swap some amount of either asset A or asset B for the other
Args:
swap_xfer: Asset Transfer Transaction of either Asset A or Asset B
a_asset: Asset ID of asset A so we may inspect balance and possibly transfer it
b_asset: Asset ID of asset B so we may inspect balance and possibly transfer it
"""
self._check_bootstrapped()
assert a_asset == self.asset_a, "asset a incorrect"
assert b_asset == self.asset_b, "asset b incorrect"
assert swap_xfer.asset_amount > 0, "amount minimum not met"
assert swap_xfer.sender == Txn.sender, "sender invalid"
match swap_xfer.xfer_asset:
case self.asset_a:
in_supply = self._current_b_balance()
out_supply = self._current_a_balance()
out_asset = self.asset_a
case self.asset_b:
in_supply = self._current_a_balance()
out_supply = self._current_b_balance()
out_asset = self.asset_b
case _:
assert False, "asset id incorrect"
to_swap = tokens_to_swap(
in_amount=swap_xfer.asset_amount, in_supply=in_supply, out_supply=out_supply
)
assert to_swap > 0, "send amount too low"
do_asset_transfer(receiver=Txn.sender, asset=out_asset, amount=to_swap)
self._update_ratio()
@subroutine
def _check_bootstrapped(self) -> None:
assert self.pool_token, "bootstrap method needs to be called first"
@subroutine
def _update_ratio(self) -> None:
a_balance = self._current_a_balance()
b_balance = self._current_b_balance()
self.ratio = a_balance * SCALE // b_balance
@subroutine
def _check_is_governor(self) -> None:
assert (
Txn.sender == self.governor
), "Only the account set in global_state.governor may call this method"
@subroutine
def _create_pool_token(self) -> Asset:
return (
itxn.AssetConfig(
asset_name=b"DPT-" + self.asset_a.unit_name + b"-" + self.asset_b.unit_name,
unit_name=b"dbt",
total=TOTAL_SUPPLY,
decimals=3,
manager=Global.current_application_address,
reserve=Global.current_application_address,
)
.submit()
.created_asset
)
@subroutine
def _do_opt_in(self, asset: Asset) -> None:
do_asset_transfer(
receiver=Global.current_application_address,
asset=asset,
amount=UInt64(0),
)
@subroutine
def _current_pool_balance(self) -> UInt64:
return self.pool_token.balance(Global.current_application_address)
@subroutine
def _current_a_balance(self) -> UInt64:
return self.asset_a.balance(Global.current_application_address)
@subroutine
def _current_b_balance(self) -> UInt64:
return self.asset_b.balance(Global.current_application_address)
##############
# Mathy methods
##############
# Notes:
# 1) During arithmetic operations, depending on the inputs, these methods may overflow
# the max uint64 value. This will cause the program to immediately terminate.
#
# Care should be taken to fully understand the limitations of these functions and if
# required should be swapped out for the appropriate byte math operations.
#
# 2) When doing division, any remainder is truncated from the result.
#
# Care should be taken to ensure that _when_ the truncation happens,
# it does so in favor of the contract. This is a subtle security issue that,
# if mishandled, could cause the balance of the contract to be drained.
@subroutine
def tokens_to_mint(
*,
pool_balance: UInt64,
a_balance: UInt64,
b_balance: UInt64,
a_amount: UInt64,
b_amount: UInt64,
) -> UInt64:
is_initial_mint = a_balance == a_amount and b_balance == b_amount
if is_initial_mint:
return op.sqrt(a_amount * b_amount) - SCALE
issued = TOTAL_SUPPLY - pool_balance
a_ratio = SCALE * a_amount // (a_balance - a_amount)
b_ratio = SCALE * b_amount // (b_balance - b_amount)
if a_ratio < b_ratio:
return a_ratio * issued // SCALE
else:
return b_ratio * issued // SCALE
@subroutine
def tokens_to_burn(*, pool_balance: UInt64, supply: UInt64, amount: UInt64) -> UInt64:
issued = TOTAL_SUPPLY - pool_balance - amount
return supply * amount // issued
@subroutine
def tokens_to_swap(*, in_amount: UInt64, in_supply: UInt64, out_supply: UInt64) -> UInt64:
in_total = SCALE * (in_supply - in_amount) + (in_amount * FACTOR)
out_total = in_amount * FACTOR * out_supply
return out_total // in_total
@subroutine
def do_asset_transfer(*, receiver: Account, asset: Asset, amount: UInt64) -> None:
itxn.AssetTransfer(
xfer_asset=asset,
asset_amount=amount,
asset_receiver=receiver,
).submit()
| algorandfoundation/puya | examples/amm/contract.py | Python | NOASSERTION | 12,955 |
# This file is auto-generated, do not modify
# flake8: noqa
# fmt: off
import typing
import algopy
class ConstantProductAMM(algopy.arc4.ARC4Client, typing.Protocol):
@algopy.arc4.abimethod
def set_governor(
self,
new_governor: algopy.Account,
) -> None:
"""
sets the governor of the contract, may only be called by the current governor
"""
@algopy.arc4.abimethod
def bootstrap(
self,
seed: algopy.gtxn.PaymentTransaction,
a_asset: algopy.Asset,
b_asset: algopy.Asset,
) -> algopy.arc4.UIntN[typing.Literal[64]]:
"""
bootstraps the contract by opting into the assets and creating the pool token.
Note this method will fail if it is attempted more than once on the same contract since the assets and pool token application state values are marked as static and cannot be overridden.
"""
@algopy.arc4.abimethod
def mint(
self,
a_xfer: algopy.gtxn.AssetTransferTransaction,
b_xfer: algopy.gtxn.AssetTransferTransaction,
pool_asset: algopy.Asset,
a_asset: algopy.Asset,
b_asset: algopy.Asset,
) -> None:
"""
mint pool tokens given some amount of asset A and asset B.
Given some amount of Asset A and Asset B in the transfers, mint some number of pool tokens commensurate with the pools current balance and circulating supply of pool tokens.
"""
@algopy.arc4.abimethod
def burn(
self,
pool_xfer: algopy.gtxn.AssetTransferTransaction,
pool_asset: algopy.Asset,
a_asset: algopy.Asset,
b_asset: algopy.Asset,
) -> None:
"""
burn pool tokens to get back some amount of asset A and asset B
"""
@algopy.arc4.abimethod
def swap(
self,
swap_xfer: algopy.gtxn.AssetTransferTransaction,
a_asset: algopy.Asset,
b_asset: algopy.Asset,
) -> None:
"""
Swap some amount of either asset A or asset B for the other
"""
| algorandfoundation/puya | examples/amm/out/client_ConstantProductAMM.py | Python | NOASSERTION | 2,067 |
import typing
from algopy import ARC4Contract, arc4
class Swapped(arc4.Struct):
a: arc4.UInt64
b: arc4.UInt64
class EventEmitter(ARC4Contract):
@arc4.abimethod
def emit_swapped(self, a: arc4.UInt64, b: arc4.UInt64) -> None:
arc4.emit(Swapped(b, a))
arc4.emit("Swapped(uint64,uint64)", b, a)
arc4.emit("Swapped", b, a)
@arc4.abimethod()
def emit_ufixed(
self,
a: arc4.BigUFixedNxM[typing.Literal[256], typing.Literal[16]],
b: arc4.UFixedNxM[typing.Literal[64], typing.Literal[2]],
) -> None:
arc4.emit("AnEvent(ufixed256x16,ufixed64x2)", a, b)
| algorandfoundation/puya | examples/arc_28/contract.py | Python | NOASSERTION | 633 |
# This file is auto-generated, do not modify
# flake8: noqa
# fmt: off
import typing
import algopy
class EventEmitter(algopy.arc4.ARC4Client, typing.Protocol):
@algopy.arc4.abimethod
def emit_swapped(
self,
a: algopy.arc4.UIntN[typing.Literal[64]],
b: algopy.arc4.UIntN[typing.Literal[64]],
) -> None: ...
@algopy.arc4.abimethod
def emit_ufixed(
self,
a: algopy.arc4.BigUFixedNxM[typing.Literal[256], typing.Literal[16]],
b: algopy.arc4.UFixedNxM[typing.Literal[64], typing.Literal[2]],
) -> None: ...
| algorandfoundation/puya | examples/arc_28/out/client_EventEmitter.py | Python | NOASSERTION | 578 |
from algopy import (
Account,
ARC4Contract,
Asset,
Global,
LocalState,
Txn,
UInt64,
arc4,
gtxn,
itxn,
subroutine,
)
class Auction(ARC4Contract):
def __init__(self) -> None:
self.auction_end = UInt64(0)
self.previous_bid = UInt64(0)
self.asa_amount = UInt64(0)
self.asa = Asset()
self.previous_bidder = Account()
self.claimable_amount = LocalState(UInt64, key="claim", description="The claimable amount")
@arc4.abimethod
def opt_into_asset(self, asset: Asset) -> None:
# Only allow app creator to opt the app account into a ASA
assert Txn.sender == Global.creator_address, "Only creator can opt in to ASA"
# Verify a ASA hasn't already been opted into
assert self.asa.id == 0, "ASA already opted in"
# Save ASA ID in global state
self.asa = asset
# Submit opt-in transaction: 0 asset transfer to self
itxn.AssetTransfer(
asset_receiver=Global.current_application_address,
xfer_asset=asset,
).submit()
@arc4.abimethod
def start_auction(
self,
starting_price: UInt64,
length: UInt64,
axfer: gtxn.AssetTransferTransaction,
) -> None:
assert Txn.sender == Global.creator_address, "auction must be started by creator"
# Ensure the auction hasn't already been started
assert self.auction_end == 0, "auction already started"
# Verify axfer
assert (
axfer.asset_receiver == Global.current_application_address
), "axfer must transfer to this app"
# Set global state
self.asa_amount = axfer.asset_amount
self.auction_end = Global.latest_timestamp + length
self.previous_bid = starting_price
@arc4.abimethod
def opt_in(self) -> None:
pass
@arc4.abimethod
def bid(self, pay: gtxn.PaymentTransaction) -> None:
# Ensure auction hasn't ended
assert Global.latest_timestamp < self.auction_end, "auction has ended"
# Verify payment transaction
assert pay.sender == Txn.sender, "payment sender must match transaction sender"
assert pay.amount > self.previous_bid, "Bid must be higher than previous bid"
# set global state
self.previous_bid = pay.amount
self.previous_bidder = pay.sender
# Update claimable amount
self.claimable_amount[Txn.sender] = pay.amount
@arc4.abimethod
def claim_bids(self) -> None:
amount = original_amount = self.claimable_amount[Txn.sender]
# subtract previous bid if sender is previous bidder
if Txn.sender == self.previous_bidder:
amount -= self.previous_bid
itxn.Payment(
amount=amount,
receiver=Txn.sender,
).submit()
self.claimable_amount[Txn.sender] = original_amount - amount
@arc4.abimethod
def claim_asset(self, asset: Asset) -> None:
assert Global.latest_timestamp > self.auction_end, "auction has not ended"
# Send ASA to previous bidder
itxn.AssetTransfer(
xfer_asset=asset,
asset_close_to=self.previous_bidder,
asset_receiver=self.previous_bidder,
asset_amount=self.asa_amount,
).submit()
@subroutine
def delete_application(self) -> None:
itxn.Payment(
receiver=Global.creator_address,
close_remainder_to=Global.creator_address,
).submit()
def clear_state_program(self) -> bool:
return True
| algorandfoundation/puya | examples/auction/contract.py | Python | NOASSERTION | 3,611 |
# This file is auto-generated, do not modify
# flake8: noqa
# fmt: off
import typing
import algopy
class Auction(algopy.arc4.ARC4Client, typing.Protocol):
@algopy.arc4.abimethod
def opt_into_asset(
self,
asset: algopy.Asset,
) -> None: ...
@algopy.arc4.abimethod
def start_auction(
self,
starting_price: algopy.arc4.UIntN[typing.Literal[64]],
length: algopy.arc4.UIntN[typing.Literal[64]],
axfer: algopy.gtxn.AssetTransferTransaction,
) -> None: ...
@algopy.arc4.abimethod
def opt_in(
self,
) -> None: ...
@algopy.arc4.abimethod
def bid(
self,
pay: algopy.gtxn.PaymentTransaction,
) -> None: ...
@algopy.arc4.abimethod
def claim_bids(
self,
) -> None: ...
@algopy.arc4.abimethod
def claim_asset(
self,
asset: algopy.Asset,
) -> None: ...
| algorandfoundation/puya | examples/auction/out/client_Auction.py | Python | NOASSERTION | 914 |
import typing
from algopy import Box, BoxMap, BoxRef, Bytes, Global, String, Txn, UInt64, arc4, subroutine
StaticInts: typing.TypeAlias = arc4.StaticArray[arc4.UInt8, typing.Literal[4]]
class BoxContract(arc4.ARC4Contract):
def __init__(self) -> None:
self.box_a = Box(UInt64)
self.box_b = Box[arc4.DynamicBytes](arc4.DynamicBytes, key="b")
self.box_c = Box(arc4.String, key=b"BOX_C")
self.box_d = Box(Bytes)
self.box_map = BoxMap(UInt64, String, key_prefix="")
self.box_ref = BoxRef()
@arc4.abimethod
def set_boxes(self, a: UInt64, b: arc4.DynamicBytes, c: arc4.String) -> None:
self.box_a.value = a
self.box_b.value = b.copy()
self.box_c.value = c
self.box_d.value = b.native
b_value = self.box_b.value.copy()
assert self.box_b.value.length == b_value.length, "direct reference should match copy"
self.box_a.value += 3
# test .length
assert self.box_a.length == 8
assert self.box_b.length == b.bytes.length
assert self.box_c.length == c.bytes.length
assert self.box_d.length == b.native.length
# test .value.bytes
assert self.box_c.value.bytes[0] == c.bytes[0]
assert self.box_c.value.bytes[-1] == c.bytes[-1]
assert self.box_c.value.bytes[:-1] == c.bytes[:-1]
assert self.box_c.value.bytes[:2] == c.bytes[:2]
# test .value with Bytes type
assert self.box_d.value[0] == b.native[0]
assert self.box_d.value[-1] == b.native[-1]
assert self.box_d.value[:-1] == b.native[:-1]
assert self.box_d.value[:5] == b.native[:5]
assert self.box_d.value[: UInt64(2)] == b.native[: UInt64(2)]
@arc4.abimethod
def check_keys(self) -> None:
assert self.box_a.key == b"box_a", "box a key ok"
assert self.box_b.key == b"b", "box b key ok"
assert self.box_c.key == b"BOX_C", "box c key ok"
@arc4.abimethod
def delete_boxes(self) -> None:
del self.box_a.value
del self.box_b.value
del self.box_c.value
assert self.box_a.get(default=UInt64(42)) == 42
assert self.box_b.get(default=arc4.DynamicBytes(b"42")).native == b"42"
assert self.box_c.get(default=arc4.String("42")) == "42"
a, a_exists = self.box_a.maybe()
assert not a_exists
assert a == 0
@arc4.abimethod
def read_boxes(self) -> tuple[UInt64, Bytes, arc4.String]:
return get_box_value_plus_1(self.box_a) - 1, self.box_b.value.native, self.box_c.value
@arc4.abimethod
def boxes_exist(self) -> tuple[bool, bool, bool]:
return bool(self.box_a), bool(self.box_b), bool(self.box_c)
@arc4.abimethod
def slice_box(self) -> None:
box_0 = Box(Bytes, key=String("0"))
box_0.value = Bytes(b"Testing testing 123")
assert box_0.value[0:7] == b"Testing"
self.box_c.value = arc4.String("Hello")
assert self.box_c.value.bytes[2:10] == b"Hello"
@arc4.abimethod
def arc4_box(self) -> None:
box_d = Box(StaticInts, key=Bytes(b"d"))
box_d.value = StaticInts(arc4.UInt8(0), arc4.UInt8(1), arc4.UInt8(2), arc4.UInt8(3))
assert box_d.value[0] == 0
assert box_d.value[1] == 1
assert box_d.value[2] == 2
assert box_d.value[3] == 3
@arc4.abimethod
def test_box_ref(self) -> None:
# init ref, with valid key types
box_ref = BoxRef(key="blob")
assert not box_ref, "no data"
box_ref = BoxRef(key=b"blob")
assert not box_ref, "no data"
box_ref = BoxRef(key=Bytes(b"blob"))
assert not box_ref, "no data"
box_ref = BoxRef(key=String("blob"))
assert not box_ref, "no data"
# create
assert box_ref.create(size=32)
assert box_ref, "has data"
# manipulate data
sender_bytes = Txn.sender.bytes
app_address = Global.current_application_address.bytes
value_3 = Bytes(b"hello")
box_ref.replace(0, sender_bytes)
box_ref.resize(8000)
box_ref.splice(0, 0, app_address)
box_ref.replace(64, value_3)
prefix = box_ref.extract(0, 32 * 2 + value_3.length)
assert prefix == app_address + sender_bytes + value_3
# delete
assert box_ref.delete()
assert box_ref.key == b"blob"
# query
value, exists = box_ref.maybe()
assert not exists
assert value == b""
assert box_ref.get(default=sender_bytes) == sender_bytes
# update
box_ref.put(sender_bytes + app_address)
assert box_ref, "Blob exists"
assert box_ref.length == 64
assert get_box_ref_length(box_ref) == 64
# instance box ref
self.box_ref.create(size=UInt64(32))
assert self.box_ref, "has data"
self.box_ref.delete()
@arc4.abimethod
def box_map_test(self) -> None:
key_0 = UInt64(0)
key_1 = UInt64(1)
value = String("Hmmmmm")
self.box_map[key_0] = value
assert self.box_map[key_0].bytes.length == value.bytes.length
assert self.box_map.length(key_0) == value.bytes.length
assert self.box_map.get(key_1, default=String("default")) == String("default")
value, exists = self.box_map.maybe(key_1)
assert not exists
assert key_0 in self.box_map
assert self.box_map.key_prefix == b""
# test box map not assigned to the class and passed to subroutine
tmp_box_map = BoxMap(UInt64, String, key_prefix=Bytes())
tmp_box_map[key_1] = String("hello")
assert get_box_map_value_from_key_plus_1(tmp_box_map, UInt64(0)) == "hello"
del tmp_box_map[key_1]
@arc4.abimethod
def box_map_set(self, key: UInt64, value: String) -> None:
self.box_map[key] = value
@arc4.abimethod
def box_map_get(self, key: UInt64) -> String:
return self.box_map[key]
@arc4.abimethod
def box_map_del(self, key: UInt64) -> None:
del self.box_map[key]
@arc4.abimethod
def box_map_exists(self, key: UInt64) -> bool:
return key in self.box_map
@subroutine
def get_box_value_plus_1(box: Box[UInt64]) -> UInt64:
return box.value + 1
@subroutine
def get_box_ref_length(ref: BoxRef) -> UInt64:
return ref.length
@subroutine
def get_box_map_value_from_key_plus_1(box_map: BoxMap[UInt64, String], key: UInt64) -> String:
return box_map[key + 1]
| algorandfoundation/puya | examples/box_storage/contract.py | Python | NOASSERTION | 6,493 |
# This file is auto-generated, do not modify
# flake8: noqa
# fmt: off
import typing
import algopy
class BoxContract(algopy.arc4.ARC4Client, typing.Protocol):
@algopy.arc4.abimethod
def set_boxes(
self,
a: algopy.arc4.UIntN[typing.Literal[64]],
b: algopy.arc4.DynamicBytes,
c: algopy.arc4.String,
) -> None: ...
@algopy.arc4.abimethod
def check_keys(
self,
) -> None: ...
@algopy.arc4.abimethod
def delete_boxes(
self,
) -> None: ...
@algopy.arc4.abimethod
def read_boxes(
self,
) -> algopy.arc4.Tuple[algopy.arc4.UIntN[typing.Literal[64]], algopy.arc4.DynamicBytes, algopy.arc4.String]: ...
@algopy.arc4.abimethod
def boxes_exist(
self,
) -> algopy.arc4.Tuple[algopy.arc4.Bool, algopy.arc4.Bool, algopy.arc4.Bool]: ...
@algopy.arc4.abimethod
def slice_box(
self,
) -> None: ...
@algopy.arc4.abimethod
def arc4_box(
self,
) -> None: ...
@algopy.arc4.abimethod
def test_box_ref(
self,
) -> None: ...
@algopy.arc4.abimethod
def box_map_test(
self,
) -> None: ...
@algopy.arc4.abimethod
def box_map_set(
self,
key: algopy.arc4.UIntN[typing.Literal[64]],
value: algopy.arc4.String,
) -> None: ...
@algopy.arc4.abimethod
def box_map_get(
self,
key: algopy.arc4.UIntN[typing.Literal[64]],
) -> algopy.arc4.String: ...
@algopy.arc4.abimethod
def box_map_del(
self,
key: algopy.arc4.UIntN[typing.Literal[64]],
) -> None: ...
@algopy.arc4.abimethod
def box_map_exists(
self,
key: algopy.arc4.UIntN[typing.Literal[64]],
) -> algopy.arc4.Bool: ...
| algorandfoundation/puya | examples/box_storage/out/client_BoxContract.py | Python | NOASSERTION | 1,776 |
from algopy import Bytes, Contract, Txn, UInt64, log, op, subroutine
ADD = 1
SUB = 2
MUL = 3
DIV = 4
@subroutine
def itoa(i: UInt64) -> Bytes:
digits = Bytes(b"0123456789")
radix = digits.length
if i < radix:
return digits[i]
return itoa(i // radix) + digits[i % radix]
class MyContract(Contract):
def approval_program(self) -> UInt64:
num_args = Txn.num_app_args
if num_args == 0:
a = UInt64(0)
b = UInt64(0)
action = UInt64(0)
log(a)
log(b)
else:
assert num_args == 3, "Expected 3 args"
action_b = Txn.application_args(0)
action = op.btoi(action_b)
a_bytes = Txn.application_args(1)
b_bytes = Txn.application_args(2)
log(a_bytes)
log(b_bytes)
a = op.btoi(a_bytes)
b = op.btoi(b_bytes)
result = self.do_calc(action, a, b)
result_b = itoa(a) + self.op(action) + itoa(b) + b" = " + itoa(result)
log(result_b)
return UInt64(1)
@subroutine
def op(self, action: UInt64) -> Bytes:
if action == ADD:
return Bytes(b" + ")
elif action == SUB:
return Bytes(b" - ")
elif action == MUL:
return Bytes(b" * ")
elif action == DIV:
return Bytes(b" // ")
else:
return Bytes(b" - ")
@subroutine
def do_calc(self, maybe_action: UInt64, a: UInt64, b: UInt64) -> UInt64:
if maybe_action == ADD:
return self.add(a, b)
elif maybe_action == SUB:
return self.sub(a, b)
elif maybe_action == MUL:
return self.mul(a, b)
elif maybe_action == DIV:
return self.div(a, b)
else:
assert False, "unknown operation"
@subroutine
def add(self, a: UInt64, b: UInt64) -> UInt64:
return a + b
@subroutine
def sub(self, a: UInt64, b: UInt64) -> UInt64:
return a - b
@subroutine
def mul(self, a: UInt64, b: UInt64) -> UInt64:
return a * b
@subroutine
def div(self, a: UInt64, b: UInt64) -> UInt64:
return a // b
def clear_state_program(self) -> bool:
return True
| algorandfoundation/puya | examples/calculator/contract.py | Python | NOASSERTION | 2,278 |
from algopy import (
Account,
Application,
Asset,
Bytes,
Contract,
GlobalState,
String,
UInt64,
subroutine,
)
class AppStateContract(Contract):
def __init__(self) -> None:
self.global_int_full = GlobalState(UInt64(55))
self.global_int_simplified = UInt64(33)
self.global_int_no_default = GlobalState(UInt64)
self.global_bytes_full = GlobalState(Bytes(b"Hello"))
self.global_bytes_simplified = Bytes(b"Hello")
self.global_bytes_no_default = GlobalState(Bytes)
self.global_bool_full = GlobalState(False)
self.global_bool_simplified = True
self.global_bool_no_default = GlobalState(bool)
self.global_asset = GlobalState(Asset)
self.global_application = GlobalState(Application)
self.global_account = GlobalState(Account)
def approval_program(self) -> bool:
assert self.global_int_simplified == 33
assert self.global_int_full
assert self.global_int_full.value == 55
assert not self.global_int_no_default
self.global_int_no_default.value = UInt64(44)
i_value, i_exists = self.global_int_no_default.maybe()
assert i_exists
assert i_value == 44
assert read_global_uint64(Bytes(b"global_int_no_default")) == 44
assert self.global_bytes_simplified == b"Hello"
assert self.global_bytes_full
assert self.global_bytes_full.value == b"Hello"
assert self.global_bytes_full.get(Bytes(b"default")) == b"Hello"
assert not self.global_bytes_no_default
self.global_bytes_no_default.value = Bytes(b"World")
b_value, b_exists = self.global_bytes_no_default.maybe()
assert b_exists
assert b_value == b"World"
assert read_global_bytes(String("global_bytes_no_default")) == b"World"
del self.global_bytes_no_default.value
b_value, b_exists = self.global_bytes_no_default.maybe()
assert not b_exists
assert self.global_bytes_no_default.get(Bytes(b"default")) == b"default"
# Assert 'is set'
assert self.global_bool_full
assert not self.global_bool_no_default
self.global_bool_no_default.value = True
# Assert 'value'
assert not self.global_bool_full.value
assert self.global_bool_simplified
assert self.global_bool_no_default.value
# test the proxy can be passed as an argument
assert get_global_state_plus_1(self.global_int_no_default) == 45
return True
def clear_state_program(self) -> bool:
return True
@subroutine
def get_global_state_plus_1(state: GlobalState[UInt64]) -> UInt64:
return state.value + 1
@subroutine
def read_global_uint64(key: Bytes) -> UInt64:
return GlobalState(UInt64, key=key).value
@subroutine
def read_global_bytes(key: String) -> Bytes:
return GlobalState(Bytes, key=key).value
| algorandfoundation/puya | examples/global_state/contract.py | Python | NOASSERTION | 2,937 |
from algopy import Contract, Txn, log
class HelloWorldContract(Contract):
def approval_program(self) -> bool:
name = Txn.application_args(0)
log(b"Hello, " + name)
return True
def clear_state_program(self) -> bool:
return True
| algorandfoundation/puya | examples/hello_world/contract.py | Python | NOASSERTION | 270 |
from algopy import ARC4Contract, String, arc4
# Note: this contract is also used in the Puya AlgoKit template. So any breaking changes
# that require fixing this contract should also be made there
# https://github.com/algorandfoundation/algokit-puya-template/blob/main/template_content/pyproject.toml.jinja
# https://github.com/algorandfoundation/algokit-puya-template/blob/main/template_content/.algokit/generators/create_contract/smart_contracts/%7B%25%20raw%20%25%7D%7B%7B%20contract_name%20%7D%7D%7B%25%20endraw%20%25%7D/contract.py.j2
class HelloWorldContract(ARC4Contract):
@arc4.abimethod
def hello(self, name: String) -> String:
return "Hello, " + name
| algorandfoundation/puya | examples/hello_world_arc4/contract.py | Python | NOASSERTION | 680 |
# This file is auto-generated, do not modify
# flake8: noqa
# fmt: off
import typing
import algopy
class HelloWorldContract(algopy.arc4.ARC4Client, typing.Protocol):
@algopy.arc4.abimethod
def hello(
self,
name: algopy.arc4.String,
) -> algopy.arc4.String: ...
| algorandfoundation/puya | examples/hello_world_arc4/out/client_HelloWorldContract.py | Python | NOASSERTION | 292 |
from algopy import (
Account,
Bytes,
Contract,
LocalState,
OnCompleteAction,
String,
Txn,
UInt64,
log,
subroutine,
)
class LocalStateContract(Contract):
def __init__(self) -> None:
self.local = LocalState(Bytes)
self.local_bool = LocalState(bool)
def approval_program(self) -> bool:
if Txn.application_id == 0:
return True
if Txn.on_completion not in (OnCompleteAction.NoOp, OnCompleteAction.OptIn):
return False
if Txn.num_app_args == 0:
return False
method = Txn.application_args(0)
if Txn.num_app_args == 1:
if method == b"get_guaranteed_data":
log(self.get_guaranteed_data(Txn.sender))
elif method == b"get_data_or_assert":
log(self.get_data_or_assert(Txn.sender))
elif method == b"delete_data":
self.delete_data(Txn.sender)
log("Deleted")
else:
return False
return True
elif Txn.num_app_args == 2:
if method == b"set_data":
self.set_data(Txn.sender, Txn.application_args(1))
elif method == b"get_data_with_default":
log(self.get_data_with_default(Txn.sender, Txn.application_args(1)))
else:
return False
return True
else:
return False
def clear_state_program(self) -> bool:
return True
@subroutine
def get_guaranteed_data(self, for_account: Account) -> Bytes:
result = self.local[for_account]
# this just tests local state proxy can be passed around
assert result.length == get_local_state_length(self.local, for_account)
# tests for dynamic key
assert local_bytes_exists(for_account, Bytes(b"local"))
assert read_local_bytes(for_account, String("local")) == result
return result
@subroutine
def get_data_with_default(self, for_account: Account, default: Bytes) -> Bytes:
# offset contract use arguments without kwarg
return self.local.get(account=for_account, default=default)
@subroutine
def get_data_or_assert(self, for_account: Account) -> Bytes:
result, exists = self.local.maybe(for_account)
assert exists, "no data for account"
return result
@subroutine
def set_data(self, for_account: Account, value: Bytes) -> None:
self.local[for_account] = value
@subroutine
def delete_data(self, for_account: Account) -> None:
del self.local[for_account]
@subroutine
def get_local_state_length(state: LocalState[Bytes], account: Account) -> UInt64:
return state[account].length
@subroutine
def local_bytes_exists(account: Account, key: Bytes) -> bool:
return account in LocalState(Bytes, key=key)
@subroutine
def read_local_bytes(account: Account, key: String) -> Bytes:
return LocalState(Bytes, key=key)[account]
| algorandfoundation/puya | examples/local_state/local_state_contract.py | Python | NOASSERTION | 3,010 |
from algopy import (
Bytes,
Contract,
LocalState,
OnCompleteAction,
Txn,
UInt64,
log,
op,
subroutine,
)
class LocalStateContract(Contract, name="LocalStateWithOffsets"):
def __init__(self) -> None:
self.local = LocalState(Bytes)
def approval_program(self) -> bool:
if Txn.application_id == 0:
return True
if Txn.on_completion not in (OnCompleteAction.NoOp, OnCompleteAction.OptIn):
return False
if Txn.num_app_args < 1:
return False
offset = op.btoi(Txn.application_args(0))
method = Txn.application_args(1)
if Txn.num_app_args == 2:
if method == b"get_guaranteed_data":
log(self.get_guaranteed_data(offset))
elif method == b"get_data_or_assert":
log(self.get_data_or_assert(offset))
elif method == b"delete_data":
self.delete_data(offset)
log("Deleted")
else:
return False
return True
elif Txn.num_app_args == 3:
if method == b"set_data":
self.set_data(offset, Txn.application_args(2))
elif method == b"get_data_with_default":
log(self.get_data_with_default(offset, Txn.application_args(2)))
else:
return False
return True
else:
return False
def clear_state_program(self) -> bool:
return True
@subroutine
def get_guaranteed_data(self, for_account: UInt64) -> Bytes:
return self.local[for_account]
@subroutine
def get_data_with_default(self, for_account: UInt64, default: Bytes) -> Bytes:
return self.local.get(for_account, default)
@subroutine
def get_data_or_assert(self, for_account: UInt64) -> Bytes:
result, exists = self.local.maybe(for_account)
assert exists, "no data for account"
return result
@subroutine
def set_data(self, for_account: UInt64, value: Bytes) -> None:
self.local[for_account] = value
@subroutine
def delete_data(self, for_account: UInt64) -> None:
del self.local[for_account]
| algorandfoundation/puya | examples/local_state/local_state_with_offsets.py | Python | NOASSERTION | 2,220 |
import typing
from algopy import BigUInt, Bytes, arc4, op, subroutine, urange
Bytes32: typing.TypeAlias = arc4.StaticArray[arc4.Byte, typing.Literal[32]]
Proof: typing.TypeAlias = arc4.DynamicArray[Bytes32]
class MerkleTree(arc4.ARC4Contract):
@arc4.abimethod(create="require")
def create(self, root: Bytes32) -> None:
self.root = root.bytes
@arc4.abimethod
def verify(self, proof: Proof, leaf: Bytes32) -> bool:
return self.root == compute_root_hash(proof, leaf.bytes)
@subroutine
def compute_root_hash(proof: Proof, leaf: Bytes) -> Bytes:
computed = leaf
for idx in urange(proof.length):
computed = hash_pair(computed, proof[idx].bytes)
return computed
@subroutine
def hash_pair(a: Bytes, b: Bytes) -> Bytes:
return op.sha256(a + b if BigUInt.from_bytes(a) < BigUInt.from_bytes(b) else b + a)
| algorandfoundation/puya | examples/merkle/contract.py | Python | NOASSERTION | 860 |
# This file is auto-generated, do not modify
# flake8: noqa
# fmt: off
import typing
import algopy
class MerkleTree(algopy.arc4.ARC4Client, typing.Protocol):
@algopy.arc4.abimethod(create='require')
def create(
self,
root: algopy.arc4.StaticArray[algopy.arc4.Byte, typing.Literal[32]],
) -> None: ...
@algopy.arc4.abimethod
def verify(
self,
proof: algopy.arc4.DynamicArray[algopy.arc4.StaticArray[algopy.arc4.Byte, typing.Literal[32]]],
leaf: algopy.arc4.StaticArray[algopy.arc4.Byte, typing.Literal[32]],
) -> algopy.arc4.Bool: ...
| algorandfoundation/puya | examples/merkle/out/client_MerkleTree.py | Python | NOASSERTION | 602 |
from algopy import ARC4Contract, Asset, arc4, op, subroutine
class UserStruct(arc4.Struct):
name: arc4.String
id: arc4.UInt64
asset: arc4.UInt64
class ExampleContract(ARC4Contract):
@subroutine
def read_from_box(self, user_id: arc4.UInt64) -> UserStruct:
box_data, exists = op.Box.get(user_id.bytes)
assert exists, "User with that id does not exist"
return UserStruct.from_bytes(box_data)
@subroutine
def write_to_box(self, user: UserStruct) -> None:
box_key = user.id.bytes
# Delete existing data, so we don't have to worry about resizing the box
op.Box.delete(box_key)
op.Box.put(box_key, user.bytes)
@subroutine
def box_exists(self, user_id: arc4.UInt64) -> bool:
_data, exists = op.Box.get(user_id.bytes)
return exists
@arc4.abimethod()
def add_user(self, user: UserStruct) -> None:
assert not self.box_exists(user.id), "User with id must not exist"
self.write_to_box(user)
@arc4.abimethod()
def attach_asset_to_user(self, user_id: arc4.UInt64, asset: Asset) -> None:
user = self.read_from_box(user_id)
user.asset = arc4.UInt64(asset.id)
self.write_to_box(user)
@arc4.abimethod()
def get_user(self, user_id: arc4.UInt64) -> UserStruct:
return self.read_from_box(user_id)
| algorandfoundation/puya | examples/struct_in_box/contract.py | Python | NOASSERTION | 1,365 |
# This file is auto-generated, do not modify
# flake8: noqa
# fmt: off
import typing
import algopy
class UserStruct(algopy.arc4.Struct):
name: algopy.arc4.String
id: algopy.arc4.UIntN[typing.Literal[64]]
asset: algopy.arc4.UIntN[typing.Literal[64]]
class ExampleContract(algopy.arc4.ARC4Client, typing.Protocol):
@algopy.arc4.abimethod
def add_user(
self,
user: UserStruct,
) -> None: ...
@algopy.arc4.abimethod
def attach_asset_to_user(
self,
user_id: algopy.arc4.UIntN[typing.Literal[64]],
asset: algopy.Asset,
) -> None: ...
@algopy.arc4.abimethod
def get_user(
self,
user_id: algopy.arc4.UIntN[typing.Literal[64]],
) -> UserStruct: ...
| algorandfoundation/puya | examples/struct_in_box/out/client_ExampleContract.py | Python | NOASSERTION | 749 |
# This file is auto-generated, do not modify
# flake8: noqa
# fmt: off
import typing
import algopy
class TicTacToeContract(algopy.arc4.ARC4Client, typing.Protocol):
@algopy.arc4.abimethod(create='allow')
def new_game(
self,
move: algopy.arc4.Tuple[algopy.arc4.UIntN[typing.Literal[64]], algopy.arc4.UIntN[typing.Literal[64]]],
) -> None: ...
@algopy.arc4.abimethod
def join_game(
self,
move: algopy.arc4.Tuple[algopy.arc4.UIntN[typing.Literal[64]], algopy.arc4.UIntN[typing.Literal[64]]],
) -> None: ...
@algopy.arc4.abimethod
def whose_turn(
self,
) -> algopy.arc4.UIntN[typing.Literal[8]]: ...
@algopy.arc4.abimethod
def play(
self,
move: algopy.arc4.Tuple[algopy.arc4.UIntN[typing.Literal[64]], algopy.arc4.UIntN[typing.Literal[64]]],
) -> None: ...
| algorandfoundation/puya | examples/tictactoe/out/client_TicTacToeContract.py | Python | NOASSERTION | 863 |
# ruff: noqa: PT018
import typing
from algopy import Account, GlobalState, Txn, UInt64, arc4, op, subroutine
Row: typing.TypeAlias = arc4.StaticArray[arc4.UInt8, typing.Literal[3]]
Game: typing.TypeAlias = arc4.StaticArray[Row, typing.Literal[3]]
Move: typing.TypeAlias = tuple[UInt64, UInt64]
EMPTY = 0
HOST = 1
CHALLENGER = 2
DRAW = 3
class TicTacToeContract(arc4.ARC4Contract):
def __init__(self) -> None:
self.challenger = GlobalState(Account)
self.winner = GlobalState(arc4.UInt8)
@arc4.abimethod(create="allow")
def new_game(self, move: Move) -> None:
if Txn.application_id:
# if a challenger has joined, don't allow starting a new game
# until this one is complete
if self.challenger:
assert self.winner, "Game isn't over"
# reset challenger and winner
del self.challenger.value
del self.winner.value
self.host = Txn.sender
self.game = Game.from_bytes(op.bzero(9))
column, row = move
assert column < 3 and row < 3, "Move must be in range"
self.game[row][column] = arc4.UInt8(HOST)
self.turns = UInt64(0)
@arc4.abimethod
def join_game(self, move: Move) -> None:
assert not self.challenger, "Host already has a challenger"
self.challenger.value = Txn.sender
self.make_move(arc4.UInt8(CHALLENGER), move)
@arc4.abimethod
def whose_turn(self) -> arc4.UInt8:
return arc4.UInt8(HOST) if self.turns % 2 else arc4.UInt8(CHALLENGER)
@arc4.abimethod
def play(self, move: Move) -> None:
assert not self.winner, "Game is already finished"
if self.turns % 2:
assert Txn.sender == self.host, "It is the host's turn"
player = arc4.UInt8(HOST)
else:
assert Txn.sender == self.challenger.get(
default=Account()
), "It is the challenger's turn"
player = arc4.UInt8(CHALLENGER)
self.make_move(player, move)
@subroutine
def make_move(self, player: arc4.UInt8, move: Move) -> None:
column, row = move
assert column < 3 and row < 3, "Move must be in range"
assert self.game[row][column] == EMPTY, "Square is already taken"
self.game[row][column] = player
self.turns += 1
if self.did_win(player, column=column, row=row):
self.winner.value = player
elif self.turns == 9:
self.winner.value = arc4.UInt8(DRAW)
@subroutine
def did_win(self, player: arc4.UInt8, column: UInt64, row: UInt64) -> bool:
g = self.game.copy()
if g[row][0] == g[row][1] == g[row][2]:
return True
if g[0][column] == g[1][column] == g[2][column]:
return True
# if player owns center, check diagonals
if player == g[1][1]:
if g[0][0] == player == g[2][2]:
return True
if g[0][2] == player == g[2][0]:
return True
return False
| algorandfoundation/puya | examples/tictactoe/tictactoe.py | Python | NOASSERTION | 3,051 |
# This file is auto-generated, do not modify
# flake8: noqa
# fmt: off
import typing
import algopy
class VotingPreconditions(algopy.arc4.Struct):
is_voting_open: algopy.arc4.UIntN[typing.Literal[64]]
is_allowed_to_vote: algopy.arc4.UIntN[typing.Literal[64]]
has_already_voted: algopy.arc4.UIntN[typing.Literal[64]]
current_time: algopy.arc4.UIntN[typing.Literal[64]]
class VotingRoundApp(algopy.arc4.ARC4Client, typing.Protocol):
@algopy.arc4.abimethod(create='require')
def create(
self,
vote_id: algopy.arc4.String,
snapshot_public_key: algopy.arc4.DynamicBytes,
metadata_ipfs_cid: algopy.arc4.String,
start_time: algopy.arc4.UIntN[typing.Literal[64]],
end_time: algopy.arc4.UIntN[typing.Literal[64]],
option_counts: algopy.arc4.DynamicArray[algopy.arc4.UIntN[typing.Literal[8]]],
quorum: algopy.arc4.UIntN[typing.Literal[64]],
nft_image_url: algopy.arc4.String,
) -> None: ...
@algopy.arc4.abimethod
def bootstrap(
self,
fund_min_bal_req: algopy.gtxn.PaymentTransaction,
) -> None: ...
@algopy.arc4.abimethod
def close(
self,
) -> None: ...
@algopy.arc4.abimethod(readonly=True)
def get_preconditions(
self,
signature: algopy.arc4.DynamicBytes,
) -> VotingPreconditions: ...
@algopy.arc4.abimethod
def vote(
self,
fund_min_bal_req: algopy.gtxn.PaymentTransaction,
signature: algopy.arc4.DynamicBytes,
answer_ids: algopy.arc4.DynamicArray[algopy.arc4.UIntN[typing.Literal[8]]],
) -> None: ...
| algorandfoundation/puya | examples/voting/out/client_VotingRoundApp.py | Python | NOASSERTION | 1,622 |
# Converted from https://github.com/algorandfoundation/nft_voting_tool/blob/c0f8be47ab80c8694d2cf40ca0df54cec07ff14a/src/algorand/smart_contracts/voting.py
import typing
from algopy import (
Account,
ARC4Contract,
BoxMap,
BoxRef,
Bytes,
Global,
GlobalState,
OpUpFeeSource,
String,
Txn,
UInt64,
arc4,
ensure_budget,
gtxn,
itxn,
log,
op,
subroutine,
uenumerate,
urange,
)
VoteIndexArray: typing.TypeAlias = arc4.DynamicArray[arc4.UInt8]
VOTE_INDEX_BYTES = 1
VOTE_COUNT_BYTES = 8
#: The min balance increase per box created
BOX_FLAT_MIN_BALANCE = 2500
#: The min balance increase per byte of boxes (key included)
BOX_BYTE_MIN_BALANCE = 400
#: The min balance increase for each asset opted into
ASSET_MIN_BALANCE = 100000
class VotingPreconditions(arc4.Struct):
is_voting_open: arc4.UInt64
is_allowed_to_vote: arc4.UInt64
has_already_voted: arc4.UInt64
current_time: arc4.UInt64
class VotingRoundApp(ARC4Contract):
def __init__(self) -> None:
self.is_bootstrapped = False
# The minimum number of voters who have voted
self.voter_count = UInt64(0)
self.close_time = GlobalState(UInt64)
self.tally_box = BoxRef(key="V")
self.votes_by_account = BoxMap(Account, VoteIndexArray, key_prefix="")
@arc4.abimethod(create="require")
def create(
self,
vote_id: String,
snapshot_public_key: Bytes,
metadata_ipfs_cid: String,
start_time: UInt64,
end_time: UInt64,
option_counts: VoteIndexArray,
quorum: UInt64,
nft_image_url: String,
) -> None:
assert start_time < end_time, "End time should be after start time"
assert end_time >= Global.latest_timestamp, "End time should be in the future"
self.vote_id = vote_id
self.snapshot_public_key = snapshot_public_key
self.metadata_ipfs_cid = metadata_ipfs_cid
self.start_time = start_time
self.end_time = end_time
self.quorum = quorum
self.nft_image_url = nft_image_url
self.store_option_counts(option_counts.copy())
@arc4.abimethod
def bootstrap(self, fund_min_bal_req: gtxn.PaymentTransaction) -> None:
assert not self.is_bootstrapped, "Must not be already bootstrapped"
self.is_bootstrapped = True
assert (
fund_min_bal_req.receiver == Global.current_application_address
), "Payment must be to app address"
tally_box_size = self.total_options * VOTE_COUNT_BYTES
min_balance_req = (
# minimum balance req for: ALGOs + Vote result NFT asset
ASSET_MIN_BALANCE * 2
# create NFT fee
+ 1000
# tally box
+ BOX_FLAT_MIN_BALANCE
# tally box key "V"
+ BOX_BYTE_MIN_BALANCE
# tally box value
+ (tally_box_size * BOX_BYTE_MIN_BALANCE)
)
log(min_balance_req)
assert (
fund_min_bal_req.amount == min_balance_req
), "Payment must be for the exact min balance requirement"
assert self.tally_box.create(size=tally_box_size)
@arc4.abimethod
def close(self) -> None:
ensure_budget(20000, fee_source=OpUpFeeSource.GroupCredit)
assert not self.close_time, "Already closed"
self.close_time.value = Global.latest_timestamp
note = (
'{"standard":"arc69",'
'"description":"This is a voting result NFT for voting round with ID '
+ self.vote_id
+ '.","properties":{"metadata":"ipfs://'
+ self.metadata_ipfs_cid
+ '","id":"'
+ self.vote_id
+ '","quorum":'
+ itoa(self.quorum)
+ ',"voterCount":'
+ itoa(self.voter_count)
+ ',"tallies":['
)
current_index = UInt64(0)
for question_index, question_options in uenumerate(self.option_counts):
if question_index > 0:
note += ","
if question_options > 0:
note += "["
for option_index in urange(question_options.native):
if option_index > 0:
note += ","
votes_for_option = self.get_vote_from_box(current_index)
note += itoa(votes_for_option)
current_index += 1
note += "]"
note += "]}}"
self.nft_asset_id = (
itxn.AssetConfig(
total=1,
decimals=0,
default_frozen=False,
asset_name="[VOTE RESULT] " + self.vote_id,
unit_name="VOTERSLT",
url=self.nft_image_url,
note=note,
fee=Global.min_txn_fee,
)
.submit()
.created_asset.id
)
@arc4.abimethod(readonly=True)
def get_preconditions(self, signature: Bytes) -> VotingPreconditions:
return VotingPreconditions(
is_voting_open=arc4.UInt64(self.voting_open()),
is_allowed_to_vote=arc4.UInt64(self.allowed_to_vote(signature)),
has_already_voted=arc4.UInt64(self.already_voted()),
current_time=arc4.UInt64(Global.latest_timestamp),
)
@arc4.abimethod
def vote(
self,
fund_min_bal_req: gtxn.PaymentTransaction,
signature: Bytes,
answer_ids: VoteIndexArray,
) -> None:
ensure_budget(7700, fee_source=OpUpFeeSource.GroupCredit)
# Check voting preconditions
assert self.allowed_to_vote(signature), "Not allowed to vote"
assert self.voting_open(), "Voting not open"
assert not self.already_voted(), "Already voted"
questions_count = self.option_counts.length
assert answer_ids.length == questions_count, "Number of answers incorrect"
# Check voter box is funded
min_bal_req = BOX_FLAT_MIN_BALANCE + (
(32 + 2 + VOTE_INDEX_BYTES * answer_ids.length) * BOX_BYTE_MIN_BALANCE
)
assert (
fund_min_bal_req.receiver == Global.current_application_address
), "Payment must be to app address"
log(min_bal_req)
assert fund_min_bal_req.amount == min_bal_req, "Payment must be the exact min balance"
# Record the vote for each question
cumulative_offset = UInt64(0)
for question_index in urange(questions_count):
# Load the user's vote for this question
answer_option_index = answer_ids[question_index].native
options_count = self.option_counts[question_index].native
assert answer_option_index < options_count, "Answer option index invalid"
self.increment_vote_in_box(cumulative_offset + answer_option_index)
cumulative_offset += options_count
self.votes_by_account[Txn.sender] = answer_ids.copy()
self.voter_count += 1
@subroutine
def voting_open(self) -> bool:
return (
self.is_bootstrapped
and not self.close_time
and self.start_time <= Global.latest_timestamp <= self.end_time
)
@subroutine
def already_voted(self) -> bool:
return Txn.sender in self.votes_by_account
@subroutine
def store_option_counts(self, option_counts: VoteIndexArray) -> None:
assert option_counts.length, "option_counts should be non-empty"
assert option_counts.length <= 112, "Can't have more than 112 questions"
total_options = UInt64(0)
for item in option_counts:
total_options += item.native
assert total_options <= 128, "Can't have more than 128 vote options"
self.option_counts = option_counts.copy()
self.total_options = total_options
@subroutine
def allowed_to_vote(self, signature: Bytes) -> bool:
ensure_budget(2000)
return op.ed25519verify_bare(
Txn.sender.bytes,
signature,
self.snapshot_public_key,
)
@subroutine
def get_vote_from_box(self, index: UInt64) -> UInt64:
return op.btoi(self.tally_box.extract(index, VOTE_COUNT_BYTES))
@subroutine
def increment_vote_in_box(self, index: UInt64) -> None:
current_vote = self.get_vote_from_box(index)
self.tally_box.replace(index, op.itob(current_vote + 1))
@subroutine
def itoa(i: UInt64) -> String:
digits = Bytes(b"0123456789")
radix = digits.length
if i < radix:
return String.from_bytes(digits[i])
return itoa(i // radix) + String.from_bytes(digits[i % radix])
| algorandfoundation/puya | examples/voting/voting.py | Python | NOASSERTION | 8,705 |
algorandfoundation/puya | scripts/__init__.py | Python | NOASSERTION | 0 |
|
import csv
import subprocess
from pathlib import Path
from scripts.compile_all_examples import ProgramSizes
_SCRIPTS_DIR = Path(__file__).parent
_ROOT_DIR = _SCRIPTS_DIR.parent
def main() -> None:
sizes_path = _ROOT_DIR / "examples" / "sizes.txt"
curr_text = sizes_path.read_text("utf8")
prev_text = subprocess.run(
["git", "show", "HEAD:examples/sizes.txt"],
capture_output=True,
text=True,
check=True,
cwd=_ROOT_DIR,
).stdout
if prev_text == curr_text:
return
curr_sizes = ProgramSizes.load(curr_text).sizes
prev_sizes = ProgramSizes.load(prev_text).sizes
delta = ProgramSizes()
assert curr_sizes.keys() == prev_sizes.keys(), "can't analyse with different programs"
for program_name in curr_sizes:
prev_prog_size = prev_sizes[program_name]
curr_prog_size = curr_sizes[program_name]
if prev_prog_size != curr_prog_size:
for level in range(3):
delta.sizes[program_name][level] = curr_prog_size[level] - prev_prog_size[level]
_sizes_to_csv(delta)
def _sizes_to_csv(ps: ProgramSizes) -> None:
tmp_dir = _ROOT_DIR / "_tmp"
tmp_dir.mkdir(exist_ok=True)
with (tmp_dir / "sizes_diff.csv").open("w", encoding="utf8") as output:
writer = csv.writer(output)
writer.writerow(["Name", "O0", "O1", "O2", "O0#Ops", "O1#Ops", "O2#Ops"])
# copy sizes and sort by name
for name, prog_sizes in sorted(ps.sizes.items()):
o0, o1, o2 = (prog_sizes[i] for i in range(3))
writer.writerow(
map(str, (name, o0.bytecode, o1.bytecode, o2.bytecode, o0.ops, o1.ops, o2.ops))
)
if __name__ == "__main__":
main()
| algorandfoundation/puya | scripts/analyse_sizes_diff.py | Python | NOASSERTION | 1,735 |
import typing
from collections import Counter
from collections.abc import Iterator
from pathlib import Path
VCS_ROOT = Path(__file__).parent.parent
OUTPUT_BASE_DIRS = ["examples", "test_cases"]
CODE_INDENT = " "
INTERESTING_OPS = frozenset(
[
# pure stack manipulation
"intc",
*[f"intc_{i}" for i in range(4)],
"bytec",
*[f"bytec_{i}" for i in range(4)],
"pushbytes",
"pushbytess",
"pushint",
"pushints",
"frame_dig",
"frame_bury",
"bury",
"cover",
"dig",
"dup",
"dup2",
"dupn",
"pop",
"popn",
"swap",
"uncover",
# constants
"addr",
"byte",
"int",
"method",
"txn",
"txna",
"gtxn",
"gtxna",
"itxn",
"itxna",
"global",
"pushint",
"pushbytes",
"gload",
"gaid",
# other loads
"load",
]
)
def main() -> None:
teal_blocks = read_all_blocks()
single_op_blocks = (block[0] for block in teal_blocks if len(block) == 1)
print("Single op block counts:")
for count, op in sorted(
((count, op) for op, count in Counter(single_op_blocks).items()), reverse=True
):
print(f" {count}x {op}")
window_size = 2
while True:
num_printed = 0
print(f"\nInteresting op sequence of length {window_size} counts:")
seqs = [
tuple(seq)
for block in teal_blocks
for seq in sliding_window(block, window_size)
if INTERESTING_OPS.issuperset(seq)
]
for count, ops in sorted(
((count, ops) for ops, count in Counter(seqs).items()), reverse=True
)[:20]:
if count == 1:
break
print(f" {count}x {'; '.join(ops)}")
num_printed += 1
if num_printed == 0:
break
window_size += 1
def read_all_blocks(*, include_clear_state: bool = True) -> list[list[str]]:
teal_files = list[Path]()
for output_base_dir in OUTPUT_BASE_DIRS:
output_dir = VCS_ROOT / output_base_dir
assert output_dir.is_dir()
teal_files.extend(output_dir.rglob("*/out/*.approval.teal"))
if include_clear_state:
teal_files.extend(output_dir.rglob("*/out/*.clear.teal"))
teal_blocks = list[list[str]]()
for teal_file in teal_files:
current_block = list[str]()
teal = teal_file.read_text("utf8")
file_lines = teal.splitlines()
assert file_lines[0].startswith("#pragma")
for line in file_lines[1:]:
if not line.startswith(CODE_INDENT):
# new block / function
if current_block:
teal_blocks.append(current_block)
current_block = []
else:
op, *_ = line.split()
if op:
current_block.append(op)
if current_block:
teal_blocks.append(current_block)
return teal_blocks
T = typing.TypeVar("T")
def sliding_window(seq: list[T], window_size: int) -> Iterator[list[T]]:
for i in range(len(seq) - window_size + 1):
yield seq[i : i + window_size]
if __name__ == "__main__":
main()
| algorandfoundation/puya | scripts/analyse_teal_op_frequencies.py | Python | NOASSERTION | 3,342 |
import argparse
import base64
from pathlib import Path
from algosdk.v2client.algod import AlgodClient
def main(path: list[Path]) -> None:
algod_client = AlgodClient(algod_token="a" * 64, algod_address="http://localhost:4001")
for p in path:
response = algod_client.compile(p.read_text("utf8"))
compiled: str = response["result"]
compiled_bytes = base64.b64decode(compiled)
p.with_suffix(".teal.bin").write_bytes(compiled_bytes)
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog="assemble")
parser.add_argument("files", type=Path, nargs="+", metavar="FILE")
args = parser.parse_args()
main(args.files)
| algorandfoundation/puya | scripts/assemble.py | Python | NOASSERTION | 677 |
#!/usr/bin/env python3
import argparse
import json
import operator
import os
import re
import shutil
import subprocess
import sys
from collections import defaultdict
from collections.abc import Iterable
from concurrent.futures import ProcessPoolExecutor
from pathlib import Path
import algokit_utils.deploy
import attrs
import prettytable
SCRIPT_DIR = Path(__file__).parent
GIT_ROOT = SCRIPT_DIR.parent
CONTRACT_ROOT_DIRS = [
GIT_ROOT / "examples",
GIT_ROOT / "test_cases",
]
SIZE_TALLY_PATH = GIT_ROOT / "examples" / "sizes.txt"
ENV_WITH_NO_COLOR = dict(os.environ) | {
"NO_COLOR": "1", # disable colour output
"PYTHONUTF8": "1", # force utf8 on windows
}
# iterate optimization levels first and with O1 first and then cases, this is a workaround
# to prevent race conditions that occur when the mypy parsing stage of O0, O2 tries to
# read the client_<contract>.py output from the 01 level before it is finished writing to
# disk
DEFAULT_OPTIMIZATION = (1, 0, 2)
def get_root_and_relative_path(path: Path) -> tuple[Path, Path]:
for root in CONTRACT_ROOT_DIRS:
if path.is_relative_to(root):
return root, path.relative_to(root)
raise RuntimeError(f"{path} is not relative to a known example")
def get_unique_name(path: Path) -> str:
_, rel_path = get_root_and_relative_path(path)
# strip suffixes
while rel_path.suffixes:
rel_path = rel_path.with_suffix("")
use_parts = []
for part in rel_path.parts:
if "MyContract" in part:
use_parts.append("".join(part.split("MyContract")))
elif "Contract" in part:
use_parts.append("".join(part.split("Contract")))
elif part.endswith((f"out{SUFFIX_O0}", f"out{SUFFIX_O1}", f"out{SUFFIX_O2}")):
pass
else:
use_parts.append(part)
return "/".join(filter(None, use_parts))
@attrs.frozen
class Size:
bytecode: int | None = None
ops: int | None = None
def __add__(self, other: object) -> "Size":
if not isinstance(other, Size):
return NotImplemented
return Size(
bytecode=(self.bytecode or 0) + (other.bytecode or 0),
ops=(self.ops or 0) + (other.ops or 0),
)
def __sub__(self, other: object) -> "Size":
if not isinstance(other, Size):
return NotImplemented
return Size(
bytecode=(self.bytecode or 0) - (other.bytecode or 0),
ops=(self.ops or 0) - (other.ops or 0),
)
def _program_to_sizes() -> defaultdict[str, defaultdict[int, Size]]:
def _opt_to_sizes() -> defaultdict[int, Size]:
return defaultdict[int, Size](Size)
return defaultdict[str, defaultdict[int, Size]](_opt_to_sizes)
@attrs.define(str=False)
class ProgramSizes:
sizes: defaultdict[str, defaultdict[int, Size]] = attrs.field(factory=_program_to_sizes)
def add_at_level(self, level: int, teal_file: Path, bin_file: Path) -> None:
name = get_unique_name(bin_file)
# this combines both approval and clear program sizes
self.sizes[name][level] += Size(
bytecode=bin_file.stat().st_size,
ops=_get_num_teal_ops(teal_file),
)
@classmethod
def load(cls, text: str) -> "ProgramSizes":
lines = list(filter(None, text.splitlines()))
program_sizes = ProgramSizes()
sizes = program_sizes.sizes
for line in lines[1:-1]:
name, o0, o1, o2, _, o0_ops, o1_ops, o2_ops = line.rsplit(maxsplit=7)
name = name.strip()
for opt, (bin_str, ops_str) in enumerate(((o0, o0_ops), (o1, o1_ops), (o2, o2_ops))):
if bin_str == "None":
continue
if bin_str == "-":
previous = sizes[name][opt - 1]
bytecode = previous.bytecode
ops = previous.ops
else:
bytecode = int(bin_str)
ops = int(ops_str)
sizes[name][opt] = Size(bytecode=bytecode, ops=ops)
return program_sizes
def __str__(self) -> str:
writer = prettytable.PrettyTable(
field_names=["Name", "O0", "O1", "O2", "|", "O0#Ops", "O1#Ops", "O2#Ops"],
header=True,
border=False,
min_width=6,
left_padding_width=0,
right_padding_width=0,
align="r",
)
writer.align["Name"] = "l"
writer.align["|"] = "c"
# copy sizes and sort by name
sizes = defaultdict(
self.sizes.default_factory, {p: self.sizes[p].copy() for p in sorted(self.sizes)}
)
totals = {i: Size() for i in range(3)}
for prog_sizes in sizes.values():
for i in range(3):
totals[i] += prog_sizes[i]
# Add totals at end
sizes["Total"].update(totals)
for name, prog_sizes in sizes.items():
o0, o1, o2 = (prog_sizes[i] for i in range(3))
row = list(
map(
str, (name, o0.bytecode, o1.bytecode, o2.bytecode, "|", o0.ops, o1.ops, o2.ops)
)
)
if o0 == o1:
for i in (2, 6):
row[i] = "-"
if o1 == o2:
for i in (3, 7):
row[i] = "-"
writer.add_row(row)
return writer.get_string()
def _get_num_teal_ops(path: Path) -> int:
ops = 0
teal = path.read_text("utf8")
for line in algokit_utils.deploy.strip_comments(teal).splitlines():
line = line.strip()
if not line or line.endswith(":") or line.startswith("#"):
# ignore comment only lines, labels and pragmas
pass
else:
ops += 1
return ops
@attrs.define
class CompilationResult:
rel_path: str
ok: bool
bin_files: list[Path]
stdout: str
def _stabilise_logs(stdout: str) -> list[str]:
return [
line.replace("\\", "/").replace(str(GIT_ROOT).replace("\\", "/"), "<git root>")
for line in stdout.splitlines()
if not line.startswith(
(
"debug: Skipping algopy stub ",
"debug: Skipping typeshed stub ",
"warning: Skipping stub: ",
"debug: Skipping stdlib stub ",
"debug: Building AWST for ",
"debug: Discovered user module ",
# ignore platform specific paths
"debug: Using python executable: ",
"debug: Using python site-packages: ",
"debug: Found algopy: ",
)
)
]
def checked_compile(p: Path, flags: list[str], *, out_suffix: str) -> CompilationResult:
assert p.is_dir()
out_dir = (p / f"out{out_suffix}").resolve()
template_vars_path = p / "template.vars"
root, rel_path_ = get_root_and_relative_path(p)
rel_path = str(rel_path_)
if out_dir.exists():
for prev_out_file in out_dir.iterdir():
if prev_out_file.is_dir():
shutil.rmtree(prev_out_file)
elif prev_out_file.suffix != ".log":
prev_out_file.unlink()
cmd = [
"poetry",
"run",
"puyapy",
*flags,
f"--out-dir={out_dir}",
"--output-destructured-ir",
"--output-bytecode",
"--log-level=debug",
*_load_template_vars(template_vars_path),
rel_path,
]
result = subprocess.run(
cmd,
cwd=root,
check=False,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
env=ENV_WITH_NO_COLOR,
encoding="utf-8",
)
bin_files_written = re.findall(r"info: Writing (.+\.bin)", result.stdout)
# normalize ARC-56 output
arc56_files_written = re.findall(r"info: Writing (.+\.arc56\.json)", result.stdout)
for arc56_file in arc56_files_written:
_normalize_arc56(root / arc56_file)
log_path = p / f"puya{out_suffix}.log"
log_txt = "\n".join(_stabilise_logs(result.stdout))
log_path.write_text(log_txt, encoding="utf8")
ok = result.returncode == 0
return CompilationResult(
rel_path=rel_path,
ok=ok,
bin_files=[root / p for p in bin_files_written],
stdout=result.stdout if not ok else "", # don't thunk stdout if no errors
)
def _normalize_arc56(path: Path) -> None:
arc56 = json.loads(path.read_text())
compiler_version = arc56.get("compilerInfo", {}).get("compilerVersion", {})
compiler_version["major"] = 99
compiler_version["minor"] = 99
compiler_version["patch"] = 99
path.write_text(json.dumps(arc56, indent=4), encoding="utf8")
def _load_template_vars(path: Path) -> Iterable[str]:
if path.exists():
for line in path.read_text("utf8").splitlines():
if line.startswith("prefix="):
prefix = line.removeprefix("prefix=")
yield f"--template-vars-prefix={prefix}"
else:
yield f"-T={line}"
SUFFIX_O0 = "_unoptimized"
SUFFIX_O1 = ""
SUFFIX_O2 = "_O2"
def _compile_for_level(arg: tuple[Path, int]) -> tuple[CompilationResult, int]:
p, optimization_level = arg
if optimization_level == 0:
flags = [
"-O0",
"--no-output-arc32",
]
out_suffix = SUFFIX_O0
elif optimization_level == 2:
flags = [
"-O2",
"--no-output-arc32",
"-g0",
]
out_suffix = SUFFIX_O2
else:
assert optimization_level == 1
flags = [
"-O1",
"--output-awst",
"--output-ssa-ir",
"--output-optimization-ir",
"--output-memory-ir",
"--output-client",
"--output-source-map",
"--output-arc56",
]
out_suffix = SUFFIX_O1
result = checked_compile(p, flags=flags, out_suffix=out_suffix)
return result, optimization_level
@attrs.define(kw_only=True)
class CompileAllOptions:
limit_to: list[Path] = attrs.field(factory=list)
optimization_level: list[int] = attrs.field(factory=list)
def main(options: CompileAllOptions) -> None:
limit_to = options.limit_to
if limit_to:
to_compile = [Path(x).resolve() for x in limit_to]
else:
to_compile = [
item
for root in CONTRACT_ROOT_DIRS
for item in root.iterdir()
if item.is_dir() and any(item.glob("*.py"))
]
failures = list[tuple[str, str]]()
program_sizes = ProgramSizes()
# use selected opt levels, but retain original order
opt_levels = [
o
for o in DEFAULT_OPTIMIZATION
if o in (options.optimization_level or DEFAULT_OPTIMIZATION)
]
with ProcessPoolExecutor() as executor:
args = [(case, level) for level in opt_levels for case in to_compile]
for compilation_result, level in executor.map(_compile_for_level, args):
rel_path = compilation_result.rel_path
case_name = f"{rel_path} -O{level}"
for bin_file in compilation_result.bin_files:
program_sizes.add_at_level(level, bin_file.with_suffix(".teal"), bin_file)
if compilation_result.ok:
print(f"✅ {case_name}")
else:
print(f"💥 {case_name}", file=sys.stderr)
failures.append((case_name, compilation_result.stdout))
if failures:
print("Compilation failures:")
for name, stdout in sorted(failures, key=operator.itemgetter(0)):
print(f" ~~~ {name} ~~~ ")
print(
"\n".join(
ln
for ln in stdout.splitlines()
if (ln.startswith("debug: Traceback ") or not ln.startswith("debug: "))
)
)
print("Updating sizes.txt")
if limit_to or options.optimization_level:
print("Loading existing sizes.txt")
# load existing sizes for non-default options
merged = ProgramSizes.load(SIZE_TALLY_PATH.read_text("utf8"))
for program, sizes in program_sizes.sizes.items():
for o, size in sizes.items():
merged.sizes[program][o] = size
program_sizes = merged
SIZE_TALLY_PATH.write_text(str(program_sizes))
sys.exit(len(failures))
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("limit_to", type=Path, nargs="*", metavar="LIMIT_TO")
parser.add_argument(
"-O",
"--optimization-level",
action="extend",
type=int,
choices=DEFAULT_OPTIMIZATION,
nargs="+",
help="Set optimization level of output TEAL / AVM bytecode",
)
options = CompileAllOptions()
parser.parse_args(namespace=options)
main(options)
| algorandfoundation/puya | scripts/compile_all_examples.py | Python | NOASSERTION | 12,912 |
import argparse
import contextlib
import json
import typing
from collections.abc import Iterator
from dataclasses import dataclass
from functools import cached_property
from pathlib import Path
from algosdk.atomic_transaction_composer import (
AccountTransactionSigner,
AtomicTransactionComposer,
TransactionWithSigner,
)
from algosdk.kmd import KMDClient
from algosdk.transaction import ApplicationCallTxn, OnComplete, create_dryrun
from algosdk.v2client.algod import AlgodClient
DEFAULT_ALGOD_ADDRESS = "http://localhost:4001"
DEFAULT_KMD_ADDRESS = "http://localhost:4002"
DEFAULT_TOKEN = "a" * 64
DEFAULT_KMD_WALLET_NAME = "unencrypted-default-wallet"
DEFAULT_KMD_WALLET_PASSWORD = ""
def main(approval_path: Path, clear_path: Path) -> None:
response = dryrun_create(approval_path.read_bytes(), clear_path.read_bytes())
print(json.dumps(response, indent=4))
def dryrun_create(
approval_binary: bytes,
clear_binary: bytes,
) -> dict[str, typing.Any]:
algod = AlgodClient(algod_token=DEFAULT_TOKEN, algod_address=DEFAULT_ALGOD_ADDRESS)
account, *_ = get_accounts()
atc = AtomicTransactionComposer()
atc.add_transaction(
TransactionWithSigner(
txn=ApplicationCallTxn(
sender=account.address,
sp=algod.suggested_params(),
index=0,
on_complete=OnComplete.NoOpOC,
approval_program=approval_binary,
clear_program=clear_binary,
),
signer=account.signer,
)
)
atc.execute(algod, 4)
signed = atc.gather_signatures()
dryrun_request = create_dryrun(algod, signed)
return algod.dryrun(dryrun_request.dictify())
@dataclass(kw_only=True)
class LocalAccount:
"""LocalAccount is a simple dataclass to hold a localnet account details"""
#: The address of a localnet account
address: str
#: The base64 encoded private key of the account
private_key: str
#: An AccountTransactionSigner that can be used as a TransactionSigner
@cached_property
def signer(self) -> AccountTransactionSigner:
return AccountTransactionSigner(self.private_key)
def get_accounts(
kmd_address: str = DEFAULT_KMD_ADDRESS,
kmd_token: str = DEFAULT_TOKEN,
wallet_name: str = DEFAULT_KMD_WALLET_NAME,
wallet_password: str = DEFAULT_KMD_WALLET_PASSWORD,
) -> list[LocalAccount]:
"""gets all the accounts in the localnet kmd, defaults
to the `unencrypted-default-wallet` created on private networks automatically"""
kmd = KMDClient(kmd_token, kmd_address)
with wallet_handle_by_name(kmd, wallet_name, wallet_password) as wallet_handle:
return [
LocalAccount(
address=address,
private_key=kmd.export_key(
wallet_handle,
wallet_password,
address,
),
)
for address in kmd.list_keys(wallet_handle)
]
@contextlib.contextmanager
def wallet_handle_by_name(kmd: KMDClient, wallet_name: str, wallet_password: str) -> Iterator[str]:
wallets = kmd.list_wallets()
try:
wallet_id = next(iter(w["id"] for w in wallets if w["name"] == wallet_name))
except StopIteration:
raise Exception(f"Wallet not found: {wallet_name}") from None
wallet_handle = kmd.init_wallet_handle(
wallet_id,
wallet_password,
)
try:
yield wallet_handle
finally:
kmd.release_wallet_handle(wallet_handle)
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog="dry_run_create")
parser.add_argument("approval_file", type=Path, metavar="FILE")
parser.add_argument("clear_file", type=Path, metavar="FILE")
args = parser.parse_args()
main(args.approval_file, args.clear_file)
| algorandfoundation/puya | scripts/dry_run_create.py | Python | NOASSERTION | 3,852 |
#!/usr/bin/env python3
import json
import subprocess
from pathlib import Path
from puya import log
from puya.ussemble.op_spec_models import ImmediateEnum, ImmediateKind, OpSpec
from scripts.transform_lang_spec import (
LanguageSpec,
)
logger = log.get_logger(__name__)
VCS_ROOT = Path(__file__).parent.parent
def main() -> None:
spec_path = VCS_ROOT / "langspec.puya.json"
lang_spec_json = json.loads(spec_path.read_text(encoding="utf-8"))
lang_spec = LanguageSpec.from_json(lang_spec_json)
ops = build_op_spec(lang_spec)
output_ops(ops)
def build_op_spec(lang_spec: LanguageSpec) -> dict[str, OpSpec]:
ops = {}
for op in sorted(lang_spec.ops.values(), key=lambda x: x.code):
immediates = list[ImmediateKind | ImmediateEnum]()
for imm in op.immediate_args:
if imm.arg_enum is None:
immediates.append(ImmediateKind[imm.immediate_type.name])
else:
immediates.append(
ImmediateEnum(
codes={e.name: e.value for e in lang_spec.arg_enums[imm.arg_enum]}
)
)
op_spec = OpSpec(name=op.name, code=op.code, immediates=immediates)
ops[op_spec.name] = op_spec
return ops
def output_ops(
ops: dict[str, OpSpec],
) -> None:
file: list[str] = [
"from puya.ussemble.op_spec_models import ImmediateEnum, ImmediateKind, OpSpec",
f"OP_SPECS = {ops!r}",
]
output_path = VCS_ROOT / "src" / "puya" / "ussemble" / "op_spec.py"
output_path.write_text("\n".join(file), encoding="utf-8")
subprocess.run(["ruff", "format", str(output_path)], check=True, cwd=VCS_ROOT)
if __name__ == "__main__":
main()
| algorandfoundation/puya | scripts/generate_assemble_op_spec.py | Python | NOASSERTION | 1,735 |
import builtins
import json
import keyword
import subprocess
import textwrap
from collections.abc import Iterable, Iterator
from pathlib import Path
from puya import log
from puya.ir.avm_ops_models import (
AVMOpData,
DynamicVariants,
ImmediateKind,
OpSignature,
RunMode,
StackType,
Variant,
)
from puya.utils import normalise_path_to_str
from scripts import transform_lang_spec as langspec
logger = log.get_logger(__name__)
VCS_ROOT = Path(__file__).parent.parent
SUPPORTED_IMMEDIATE_KINDS = (langspec.ImmediateKind.uint8, langspec.ImmediateKind.arg_enum)
operator_names = {
# bool
"&&": "and",
"||": "or",
"!": "not",
# compare
"==": "eq",
"!=": "neq",
"<": "lt",
"<=": "lte",
">": "gt",
">=": "gte",
# bitwise
"&": "bitwise_and",
"^": "bitwise_xor",
"|": "bitwise_or",
"~": "bitwise_not",
# math
"+": "add",
"-": "sub",
"*": "mul",
"/": "div_floor",
"%": "mod",
}
EXCLUDED_OPCODES = {
# flow control
"bnz",
"bz",
"b",
"callsub",
"retsub",
"proto",
"switch",
"match",
# pure stack manipulation
"intc",
*[f"intc_{i}" for i in range(4)],
"bytec",
*[f"bytec_{i}" for i in range(4)],
"pushbytes",
"pushbytess",
"pushint",
"pushints",
"frame_dig",
"frame_bury",
"bury",
"cover",
"dig",
"dup",
"dup2",
"dupn",
"pop",
"popn",
"swap",
"uncover",
# modifies what other op codes with immediates point to
"intcblock",
"bytecblock",
# halting
"err",
"return",
}
def as_list_str(values: Iterable[str]) -> str | None:
inner = ", ".join(values)
if not inner:
return None
else:
return f"[{inner}]"
BUILTIN_NAMES = frozenset(dir(builtins))
def get_op_name(op: langspec.Op) -> str:
op_code = op.name
if op_code.isidentifier():
op_name = op_code
elif op_code[0] == "b":
op_name = operator_names[op_code[1:]] + "_bytes"
else:
op_name = operator_names[op_code]
if keyword.iskeyword(op_name) or keyword.issoftkeyword(op_name) or op_name in BUILTIN_NAMES:
op_name += "_"
return op_name
def generate_op_node(
enums: dict[str, list[langspec.ArgEnum]], op_name: str, op: langspec.Op
) -> Iterator[str]:
assert not op.halts, "op halts"
dynamic_im_index: int | None = None
for idx, im in enumerate(op.immediate_args):
if im.modifies_stack_input is not None:
assert im.modifies_stack_output is None, "💀"
assert dynamic_im_index is None, "🪦"
dynamic_im_index = idx
elif im.modifies_stack_output is not None:
assert dynamic_im_index is None, "🪦"
dynamic_im_index = idx
immediate_types = tuple(get_immediate_type(im) for im in op.immediate_args)
op_code = op.name
cost = op.cost.value
variant: DynamicVariants | Variant
stack_args = [get_stack_type(arg.stack_type) for arg in op.stack_inputs]
stack_returns = [get_stack_type(out.stack_type) for out in op.stack_outputs]
if dynamic_im_index is None:
variant = Variant(
enum=None,
signature=OpSignature(
args=stack_args,
returns=stack_returns,
),
supported_modes=_map_run_mode(op.mode),
min_avm_version=op.min_avm_version,
)
else:
im = op.immediate_args[dynamic_im_index]
assert im.arg_enum is not None, "💥"
variant = DynamicVariants(
immediate_index=dynamic_im_index,
variant_map={},
)
if im.modifies_stack_input is not None:
list_index = im.modifies_stack_input
to_mod = stack_args
else:
assert im.modifies_stack_output is not None
list_index = im.modifies_stack_output
to_mod = stack_returns
for arg_enum in enums[im.arg_enum]:
assert arg_enum.stack_type is not None, "🤕"
to_mod[list_index] = get_stack_type(arg_enum.stack_type)
variant.variant_map[arg_enum.name] = Variant(
enum=arg_enum.name,
signature=OpSignature(
args=list(stack_args),
returns=list(stack_returns),
),
supported_modes=_map_run_mode(arg_enum.mode),
min_avm_version=arg_enum.min_avm_version,
)
data = AVMOpData(
op_code=op_code,
immediate_types=immediate_types,
variants=variant,
cost=cost,
min_avm_version=op.min_avm_version,
supported_modes=_map_run_mode(op.mode),
)
yield f"{op_name} = {data!r}"
if op.doc:
yield '"""'
for idx, doc_ln in enumerate(op.doc):
if idx > 0:
yield ""
yield from textwrap.wrap(doc_ln, width=99 - 4)
yield '"""'
yield ""
def _map_run_mode(mode: langspec.RunMode) -> RunMode:
match mode:
case langspec.RunMode.app:
return RunMode.app
case langspec.RunMode.sig:
return RunMode.lsig
case langspec.RunMode.any:
return RunMode.any
case _:
raise ValueError(f"Unsupported mode {mode}")
def get_stack_type(stack_type: langspec.StackType) -> StackType:
if stack_type.name.startswith("bytes_"):
return StackType.bytes
else:
return StackType[stack_type.name]
def get_immediate_type(immediate: langspec.Immediate) -> ImmediateKind:
assert immediate.immediate_type in SUPPORTED_IMMEDIATE_KINDS, (
"bad immediate kind",
immediate.immediate_type,
)
return ImmediateKind[immediate.immediate_type.name]
def generate_file(lang_spec: langspec.LanguageSpec) -> Iterator[str]:
script_path = normalise_path_to_str(Path(__file__).relative_to(VCS_ROOT))
preamble = f"""
# AUTO GENERATED BY {script_path}, DO NOT EDIT
import enum
from collections.abc import Sequence
from puya.errors import InternalError
from puya.ir.avm_ops_models import (
AVMOpData,
DynamicVariants,
ImmediateKind,
OpSignature,
RunMode,
StackType,
Variant
)
class AVMOp(enum.StrEnum):
code: str
immediate_types: Sequence[ImmediateKind]
_variants: Variant | DynamicVariants
cost: int | None
min_avm_version: int
def __new__(cls, data: AVMOpData | str) -> "AVMOp":
# the weird union type on data && then assert,
# is to shut mypy up when it wrongly infers the arg type of
# e.g. AVMOp("+") to be invalid
assert isinstance(data, AVMOpData)
op_code = data.op_code
obj = str.__new__(cls, op_code)
obj._value_ = op_code
obj.code = op_code
obj.immediate_types = tuple(data.immediate_types)
obj._variants = data.variants # noqa: SLF001
obj.cost = data.cost
obj.min_avm_version = data.min_avm_version
return obj
def get_variant(self, immediates: Sequence[str | int]) -> Variant:
if isinstance(self._variants, Variant):
return self._variants
im = immediates[self._variants.immediate_index]
assert isinstance(im, str)
try:
return self._variants.variant_map[im]
except KeyError as ex:
raise InternalError(f"Unknown immediate for {{self.code}}: {{im}}") from ex
"""
yield from preamble.strip().splitlines()
yield ""
ops_by_name = {}
for op in lang_spec.ops.values():
if op.name in EXCLUDED_OPCODES:
logger.info(f"Skipping {op.name} due to specific exclusion")
else:
ops_by_name[get_op_name(op)] = op
for op_name, op in sorted(ops_by_name.items()):
yield textwrap.indent(
"\n".join(generate_op_node(lang_spec.arg_enums, op_name, op)), " " * 4
)
def main() -> None:
spec_path = VCS_ROOT / "langspec.puya.json"
lang_spec_json = json.loads(spec_path.read_text(encoding="utf-8"))
lang_spec = langspec.LanguageSpec.from_json(lang_spec_json)
output = "\n".join(generate_file(lang_spec))
ast_gen_path = VCS_ROOT / "src" / "puya" / "ir" / "avm_ops.py"
ast_gen_path.write_text(output, encoding="utf-8")
subprocess.run(["ruff", "format", str(ast_gen_path)], check=True, cwd=VCS_ROOT)
if __name__ == "__main__":
main()
| algorandfoundation/puya | scripts/generate_avm_ops.py | Python | NOASSERTION | 8,430 |
#!/usr/bin/env python3
import subprocess
import sys
import typing
from collections.abc import Callable
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent / "src" / "puyapy" / "_vendor"))
import attrs
import mypy.build
import mypy.nodes
from mypy.visitor import NodeVisitor
from puyapy.compile import get_mypy_options
from puyapy.parse import parse_and_typecheck
SCRIPTS_DIR = Path(__file__).parent
VCS_ROOT = SCRIPTS_DIR.parent
SRC_DIR = VCS_ROOT / "src"
DOCS_DIR = VCS_ROOT / "docs"
STUBS_DIR = VCS_ROOT / "stubs" / "algopy-stubs"
STUBS_DOC_DIR = DOCS_DIR / "algopy-stubs"
@attrs.define
class ModuleImports:
from_imports: dict[str, str | None] = attrs.field(factory=dict)
import_all: bool = False
import_module: bool = False
def main() -> None:
manager, _ = parse_and_typecheck([STUBS_DIR], get_mypy_options())
output_doc_stubs(manager)
run_sphinx()
def output_doc_stubs(manager: mypy.build.BuildManager) -> None:
# parse and output reformatted __init__.pyi
stub = DocStub.process_module(manager, "algopy")
algopy_direct_imports = stub.collected_imports["algopy"]
# remove any algopy imports that are now defined in __init__.py itself
output_combined_stub(stub, STUBS_DOC_DIR / "__init__.pyi")
# remaining imports from algopy are other public modules
# parse and output them too
for other_stub_name in algopy_direct_imports.from_imports:
stub = DocStub.process_module(manager, f"algopy.{other_stub_name}")
output_combined_stub(stub, STUBS_DOC_DIR / f"{other_stub_name}.pyi")
def output_combined_stub(stubs: "DocStub", output: Path) -> None:
# remove algopy imports that have been inlined
lines = ["# ruff: noqa: A001, E501, F403, PYI021, PYI034, W291"]
rexported = list[str]()
for module, imports in stubs.collected_imports.items():
if imports.import_module:
lines.append(f"import {module}")
if imports.from_imports:
rexported.extend(filter(None, imports.from_imports.values()))
from_imports = ", ".join(_name_as(k, v) for k, v in imports.from_imports.items())
lines.append(f"from {module} import {from_imports}")
lines.extend(["", ""])
# assemble __all__
lines.append("__all__ = [")
for symbol in (*rexported, *stubs.collected_symbols):
if symbol.startswith("_"):
continue
lines.append(f' "{symbol}",')
lines.append("]")
# assemble symbols
lines.extend(stubs.collected_symbols.values())
# output and linting
output.parent.mkdir(parents=True, exist_ok=True)
output.write_text("\n".join(lines))
subprocess.run(["ruff", "format", str(output)], check=True, cwd=VCS_ROOT)
subprocess.run(["ruff", "check", "--fix", str(output)], check=True, cwd=VCS_ROOT)
def run_sphinx() -> None:
subprocess.run(
["sphinx-build", ".", "_build", "-W", "--keep-going", "-n", "-E"], check=True, cwd=DOCS_DIR
)
@attrs.define(kw_only=True)
class ClassBases:
klass: mypy.nodes.ClassDef
bases: list[mypy.nodes.Expression]
protocol_bases: list[tuple[mypy.nodes.MypyFile, mypy.nodes.ClassDef]]
@attrs.define
class SymbolCollector(NodeVisitor[None]):
file: mypy.nodes.MypyFile
read_source: Callable[[str], list[str] | None]
all_classes: dict[str, tuple[mypy.nodes.MypyFile, mypy.nodes.ClassDef]]
inlined_protocols: dict[str, set[str]]
symbols: dict[str, str] = attrs.field(factory=dict)
last_stmt: mypy.nodes.Statement | None = None
def get_src(
self, node: mypy.nodes.Context, *, path: str | None = None, entire_lines: bool = True
) -> str:
columns: tuple[int, int] | None = None
if node.end_column and not entire_lines:
columns = (node.column, node.end_column)
return self.get_src_from_lines(node.line, node.end_line or node.line, path, columns)
def get_src_from_lines(
self,
line: int,
end_line: int,
path: str | None = None,
columns: tuple[int, int] | None = None,
) -> str:
src = self.read_source(path or self.file.path)
if not src:
raise Exception("Could not get src")
lines = src[line - 1 : end_line]
if columns:
lines[-1] = lines[-1][: columns[1]]
lines[0] = lines[0][columns[0] :]
return "\n".join(lines)
def visit_mypy_file(self, o: mypy.nodes.MypyFile) -> None:
for stmt in o.defs:
stmt.accept(self)
self.last_stmt = stmt
def _get_bases(self, klass: mypy.nodes.ClassDef) -> ClassBases:
bases = list[mypy.nodes.Expression]()
inline = list[tuple[mypy.nodes.MypyFile, mypy.nodes.ClassDef]]()
for base in klass.base_type_exprs:
if (
isinstance(base, mypy.nodes.NameExpr)
and _should_inline_module(base.fullname)
and self._is_protocol(base.fullname)
):
inline.append(self.all_classes[base.fullname])
else:
bases.append(base)
return ClassBases(klass=klass, bases=bases, protocol_bases=inline)
def _get_inlined_class(self, klass: ClassBases) -> str:
# TODO: what about class keywords
klass_str = f"class {klass.klass.name}"
if klass.bases:
klass_str += f"({', '.join(self.get_src(b, entire_lines=False) for b in klass.bases)})"
src = [f"{klass_str}:"]
src.extend(self.get_src(member) for member in klass.klass.defs.body)
for base_class_file, base_class in klass.protocol_bases:
self.inlined_protocols.setdefault(base_class_file.fullname, set()).add(base_class.name)
src.extend(
self.get_src(member, path=base_class_file.path) for member in base_class.defs.body
)
return "\n".join(src)
def visit_class_def(self, o: mypy.nodes.ClassDef) -> None:
self.all_classes[o.fullname] = self.file, o
class_bases = self._get_bases(o)
if class_bases.protocol_bases:
self.symbols[o.name] = self._get_inlined_class(class_bases)
else:
self.symbols[o.name] = self.get_src(o)
def visit_func_def(self, o: mypy.nodes.FuncDef) -> None:
self.symbols[o.name] = self.get_src(o)
def visit_overloaded_func_def(self, o: mypy.nodes.OverloadedFuncDef) -> None:
line = o.line
end_line = o.end_line or o.line
for item in o.items:
end_line = max(end_line, item.end_line or item.line)
overloaded_src = self.get_src_from_lines(line, end_line)
best_sig = _get_documented_overload(o)
if not best_sig:
src = overloaded_src
else:
best_sig_src = self.get_src(best_sig)
src = f"{overloaded_src}\n{best_sig_src}"
self.symbols[o.name] = src
def visit_assignment_stmt(self, o: mypy.nodes.AssignmentStmt) -> None:
try:
(lvalue,) = o.lvalues
except ValueError as ex:
raise ValueError(f"Multi assignments are not supported: {o}") from ex
if not isinstance(lvalue, mypy.nodes.NameExpr):
raise TypeError(f"Multi assignments are not supported: {lvalue}")
# find actual rvalue src location by taking the entire statement and subtracting the lvalue
loc = mypy.nodes.Context()
loc.set_line(o)
if lvalue.end_column:
loc.column = lvalue.end_column
self.symbols[lvalue.name] = self.get_src(loc)
def visit_expression_stmt(self, o: mypy.nodes.ExpressionStmt) -> None:
if isinstance(o.expr, mypy.nodes.StrExpr) and isinstance(
self.last_stmt, mypy.nodes.AssignmentStmt
):
(lvalue,) = self.last_stmt.lvalues
if isinstance(lvalue, mypy.nodes.NameExpr):
self.symbols[lvalue.name] += "\n" + self.get_src(o.expr)
def _is_protocol(self, fullname: str) -> bool:
try:
klass = self.all_classes[fullname]
except KeyError:
return False
info: mypy.nodes.TypeInfo = klass[1].info
return info.is_protocol
def _get_documented_overload(o: mypy.nodes.OverloadedFuncDef) -> mypy.nodes.FuncDef | None:
best_overload: mypy.nodes.FuncDef | None = None
for overload in o.items:
match overload:
case mypy.nodes.Decorator(func=func_def):
pass
case mypy.nodes.FuncDef() as func_def:
pass
case _:
raise Exception("Only function overloads supported")
docstring = func_def.docstring
# this is good enough until a more complex case arises
if docstring and (
not best_overload or len(func_def.arguments) > len(best_overload.arguments)
):
best_overload = func_def
return best_overload
@attrs.define
class ImportCollector(NodeVisitor[None]):
collected_imports: dict[str, ModuleImports]
def get_imports(self, module_id: str) -> ModuleImports:
try:
imports = self.collected_imports[module_id]
except KeyError:
imports = self.collected_imports[module_id] = ModuleImports()
return imports
def visit_mypy_file(self, o: mypy.nodes.MypyFile) -> None:
for stmt in o.defs:
stmt.accept(self)
def visit_import_from(self, o: mypy.nodes.ImportFrom) -> None:
imports = self.get_imports(o.id)
for name, name_as in o.names:
imports.from_imports[name] = name_as
def visit_import(self, o: mypy.nodes.Import) -> None:
for name, name_as in o.ids:
if name != (name_as or name):
raise Exception("Aliasing symbols in stubs is not supported")
imports = self.get_imports(name)
imports.import_module = True
@attrs.define
class DocStub(NodeVisitor[None]):
read_source: Callable[[str], list[str] | None]
file: mypy.nodes.MypyFile
modules: dict[str, mypy.nodes.MypyFile]
parsed_modules: dict[str, SymbolCollector] = attrs.field(factory=dict)
all_classes: dict[str, tuple[mypy.nodes.MypyFile, mypy.nodes.ClassDef]] = attrs.field(
factory=dict
)
collected_imports: dict[str, ModuleImports] = attrs.field(factory=dict)
inlined_protocols: dict[str, set[str]] = attrs.field(factory=dict)
collected_symbols: dict[str, str] = attrs.field(factory=dict)
@classmethod
def process_module(cls, manager: mypy.build.BuildManager, module_id: str) -> typing.Self:
read_source = manager.errors.read_source
assert read_source
modules = manager.modules
module: mypy.nodes.MypyFile = modules[module_id]
stub = cls(read_source=read_source, file=module, modules=modules)
module.accept(stub)
stub._remove_inlined_symbols() # noqa: SLF001
return stub
def _get_module(self, module_id: str) -> SymbolCollector:
try:
return self.parsed_modules[module_id]
except KeyError:
file = self.modules[module_id]
self.parsed_modules[module_id] = collector = SymbolCollector(
file=file,
read_source=self.read_source,
all_classes=self.all_classes,
inlined_protocols=self.inlined_protocols,
)
file.accept(collector)
self._collect_imports(file)
return collector
def _collect_imports(self, o: mypy.nodes.Node) -> None:
o.accept(ImportCollector(self.collected_imports))
self._remove_inlined_symbols()
def _remove_inlined_symbols(self) -> None:
for module, imports in self.collected_imports.items():
inlined_protocols = self.inlined_protocols.get(module, ())
if imports.import_module and module in self.collected_symbols:
raise Exception(f"Symbol/import collision: {module}")
for name, name_as in list(imports.from_imports.items()):
if name in inlined_protocols:
print(f"Removed inlined protocol: {name}")
del imports.from_imports[name]
del self.collected_symbols[name]
elif name in self.collected_symbols:
if name_as is None:
del imports.from_imports[name]
else:
print(f"Symbol/import collision: from {module} import {name} as {name_as}")
def visit_mypy_file(self, o: mypy.nodes.MypyFile) -> None:
for stmt in o.defs:
stmt.accept(self)
self._add_all_symbols(o.fullname)
def visit_import_from(self, o: mypy.nodes.ImportFrom) -> None:
if not _should_inline_module(o.id):
self._collect_imports(o)
return
module = self._get_module(o.id)
name_mapping = dict(o.names)
for name in module.symbols:
try:
name_as = name_mapping[name]
except KeyError:
continue
if name != (name_as or name):
raise Exception("Aliasing symbols in stubs is not supported")
self.add_symbol(module, name)
def visit_import_all(self, o: mypy.nodes.ImportAll) -> None:
if _should_inline_module(o.id):
self._add_all_symbols(o.id)
else:
self._collect_imports(o)
def _add_all_symbols(self, module_id: str) -> None:
module = self._get_module(module_id)
for sym in module.symbols:
self.add_symbol(module, sym)
def visit_import(self, o: mypy.nodes.Import) -> None:
self._collect_imports(o)
def add_symbol(self, module: SymbolCollector, name: str) -> None:
lines = module.symbols[name]
existing = self.collected_symbols.get(name)
if existing is not None and existing != lines:
raise Exception(f"Duplicate definitions are not supported: {name}\n{lines}")
self.collected_symbols[name] = lines
def _name_as(name: str, name_as: str | None) -> str:
if name_as is None:
return name
return f"{name} as {name_as}"
def _should_inline_module(module_id: str) -> bool:
return module_id.startswith("algopy._")
if __name__ == "__main__":
main()
| algorandfoundation/puya | scripts/generate_docs.py | Python | NOASSERTION | 14,331 |
#!/usr/bin/env python3
import subprocess
from pathlib import Path
SCRIPTS_DIR = Path(__file__).parent
VCS_ROOT = SCRIPTS_DIR.parent
LIB_NAME = "_puya_lib"
def main() -> None:
# compile puya lib
# normalize source_location.path
# save
subprocess.run(["puyapy", "--output-awst-json", f"src/{LIB_NAME}"], check=True, cwd=VCS_ROOT)
awst_path = VCS_ROOT / "module.awst.json"
puya_lib_path = VCS_ROOT / "src" / LIB_NAME
output_path = VCS_ROOT / "src" / "puya" / "ir" / "_puya_lib.awst.json"
replace_awst = awst_path.read_text()
for lib_path in puya_lib_path.glob("*.py"):
path_as_str = str(lib_path).replace("\\", "\\\\")
find_str = f'"file": "{path_as_str}",'
replace_str = '"file": null,'
replace_awst = replace_awst.replace(find_str, replace_str)
output_path.write_text(replace_awst)
awst_path.unlink(missing_ok=True)
if __name__ == "__main__":
main()
| algorandfoundation/puya | scripts/generate_puya_lib.py | Python | NOASSERTION | 935 |
#!/usr/bin/env python3
import builtins
import copy
import json
import keyword
import subprocess
import textwrap
import typing
from collections.abc import Iterable, Iterator, Sequence
from pathlib import Path
import attrs
from puya import log
from puya.algo_constants import SUPPORTED_AVM_VERSIONS
from puyapy.awst_build import pytypes
from puyapy.awst_build.intrinsic_models import FunctionOpMapping, OpMappingWithOverloads
from puyapy.awst_build.utils import snake_case
from scripts.transform_lang_spec import (
ArgEnum,
Immediate,
ImmediateKind,
LanguageSpec,
Op,
StackType,
StackValue,
)
logger = log.get_logger(__name__)
INDENT = " " * 4
VCS_ROOT = Path(__file__).parent.parent
MIN_SUPPORTED_VERSION = min(SUPPORTED_AVM_VERSIONS)
PYTHON_ENUM_CLASS = {
"Mimc Configurations": "MiMCConfigurations",
}
PYTYPE_TO_LITERAL: dict[pytypes.PyType, pytypes.LiteralOnlyType | None] = {
pytypes.BytesType: pytypes.BytesLiteralType,
pytypes.UInt64Type: pytypes.IntLiteralType,
pytypes.AccountType: None, # pytypes.StrLiteralType, # TODO: should we enable this?
pytypes.BigUIntType: pytypes.IntLiteralType,
pytypes.BoolType: None, # already a Python type
pytypes.ApplicationType: pytypes.IntLiteralType,
pytypes.AssetType: pytypes.IntLiteralType,
pytypes.TransactionTypeType: None,
pytypes.OnCompleteActionType: None,
}
PYTYPE_REPR = {
value: f"pytypes.{key}"
for key, value in pytypes.__dict__.items()
if isinstance(value, pytypes.PyType)
}
STACK_TYPE_MAPPING: dict[StackType, Sequence[pytypes.PyType]] = {
StackType.address_or_index: [pytypes.AccountType, pytypes.UInt64Type],
StackType.application: [pytypes.ApplicationType, pytypes.UInt64Type],
StackType.asset: [pytypes.AssetType, pytypes.UInt64Type],
StackType.bytes: [pytypes.BytesType],
StackType.bytes_8: [pytypes.BytesType],
StackType.bytes_32: [pytypes.BytesType],
StackType.bytes_33: [pytypes.BytesType],
StackType.bytes_64: [pytypes.BytesType],
StackType.bytes_80: [pytypes.BytesType],
StackType.bytes_1232: [pytypes.BytesType],
StackType.bytes_1793: [pytypes.BytesType],
StackType.bool: [pytypes.BoolType, pytypes.UInt64Type],
StackType.uint64: [pytypes.UInt64Type],
StackType.any: [pytypes.BytesType, pytypes.UInt64Type],
StackType.box_name: [pytypes.BytesType], # TODO: should this be another type..?
StackType.address: [pytypes.AccountType],
StackType.bigint: [pytypes.BigUIntType],
StackType.state_key: [pytypes.BytesType], # TODO: should this be another type..?
}
BYTES_LITERAL = "bytes"
UINT64_LITERAL = "int"
STUB_NAMESPACE = "op"
ALGORAND_OP_URL = "https://developer.algorand.org/docs/get-details/dapps/avm/teal/opcodes/v10/"
class OpCodeGroup(typing.Protocol):
def handled_ops(self) -> Iterator[str]: ...
@attrs.define(kw_only=True)
class RenamedOpCode(OpCodeGroup):
name: str
stack_aliases: dict[str, list[str]] = attrs.field(factory=dict)
"""ops that are aliases for other ops that take stack values instead of immediates"""
op: str
def handled_ops(self) -> Iterator[str]:
yield self.op
yield from self.stack_aliases.keys()
@attrs.define(kw_only=True)
class MergedOpCodes(OpCodeGroup):
name: str
doc: str
ops: dict[str, dict[str, list[str]]]
def handled_ops(self) -> Iterator[str]:
for op, aliases in self.ops.items():
yield op
yield from aliases.keys()
@attrs.define(kw_only=True)
class GroupedOpCodes(OpCodeGroup):
name: str
"""ops that are aliases for other ops that take stack values instead of immediates"""
doc: str
ops: dict[str, str] = attrs.field(factory=dict)
"""ops to include in group, mapped to their new name"""
def handled_ops(self) -> Iterator[str]:
yield from self.ops.keys()
GROUPED_OP_CODES = [
GroupedOpCodes(
name="AppGlobal",
doc="Get or modify Global app state",
ops={
"app_global_get": "get",
"app_global_get_ex": "get_ex",
"app_global_del": "delete",
"app_global_put": "put",
},
),
GroupedOpCodes(
name="Scratch",
doc="Load or store scratch values",
ops={"loads": "load", "stores": "store"},
),
GroupedOpCodes(
name="AppLocal",
doc="Get or modify Local app state",
ops={
"app_local_get": "get",
"app_local_get_ex": "get_ex",
"app_local_del": "delete",
"app_local_put": "put",
},
),
GroupedOpCodes(
name="Box",
doc="Get or modify box state",
ops={
"box_create": "create",
"box_del": "delete",
"box_extract": "extract",
"box_get": "get",
"box_len": "length",
"box_put": "put",
"box_replace": "replace",
"box_resize": "resize",
"box_splice": "splice",
},
),
GroupedOpCodes(
name="EllipticCurve",
doc="Elliptic Curve functions",
ops={
"ec_add": "add",
"ec_map_to": "map_to",
"ec_multi_scalar_mul": "scalar_mul_multi",
"ec_pairing_check": "pairing_check",
"ec_scalar_mul": "scalar_mul",
"ec_subgroup_check": "subgroup_check",
},
),
GroupedOpCodes(
name="ITxnCreate",
doc="Create inner transactions",
ops={
"itxn_begin": "begin",
"itxn_next": "next",
"itxn_submit": "submit",
"itxn_field": "set",
},
),
]
MERGED_OP_CODES = [
MergedOpCodes(
name="Txn",
doc="Get values for the current executing transaction",
ops={
"txn": {},
"txnas": {
"txna": ["F", "I"],
},
},
),
MergedOpCodes(
name="GTxn",
doc="Get values for transactions in the current group",
ops={
"gtxns": {
"gtxn": ["F", "T"],
},
# field is immediate, first stack arg is txn index, second stack arg is array index
"gtxnsas": {
"gtxnsa": ["F", "A", "I"], # group index on stack
"gtxna": ["F", "T", "I"], # no stack args
"gtxnas": ["F", "T", "A"], # array index on stack
},
},
),
MergedOpCodes(
name="ITxn",
doc="Get values for the last inner transaction",
ops={
"itxn": {},
"itxnas": {
"itxna": ["F", "I"],
},
},
),
MergedOpCodes(
name="GITxn",
doc="Get values for inner transaction in the last group submitted",
ops={
"gitxn": {},
"gitxnas": {
"gitxna": ["T", "F", "I"],
},
},
),
MergedOpCodes(
name="Global",
doc="Get Global values",
ops={"global": {}},
),
]
RENAMED_OP_CODES = [
RenamedOpCode(
name="arg",
op="args",
stack_aliases={"arg": ["N"]},
),
RenamedOpCode(
name="extract",
op="extract3",
stack_aliases={
"extract": ["A", "S", "L"],
},
),
RenamedOpCode(
name="replace",
op="replace3",
stack_aliases={
"replace2": ["A", "S", "B"],
},
),
RenamedOpCode(
name="substring",
op="substring3",
stack_aliases={
"substring": ["A", "S", "E"],
},
),
RenamedOpCode(
name="gload",
op="gloadss",
stack_aliases={
"gload": ["T", "I"],
"gloads": ["A", "I"],
},
),
RenamedOpCode(
name="gaid",
op="gaids",
stack_aliases={"gaid": ["T"]},
),
RenamedOpCode(
name="exit",
op="return",
),
]
EXCLUDED_OPCODES = {
# low level flow control
"bnz",
"bz",
"b",
"callsub",
"retsub",
"proto",
"switch",
"match",
# low level stack manipulation
"intc",
*[f"intc_{i}" for i in range(4)],
"bytec",
*[f"bytec_{i}" for i in range(4)],
"pushbytes",
"pushbytess",
"pushint",
"pushints",
"frame_dig",
"frame_bury",
"bury",
"cover",
"dig",
"dup",
"dup2",
"dupn",
"pop",
"popn",
"swap",
"uncover",
# program scratch slot read/modification (for current program)
"load",
"loads",
"store",
"stores",
# maninuplates what other low level ops point to
"intcblock",
"bytecblock",
# implicit immediates, covered by optimiser and/or assembler
"arg_0",
"arg_1",
"arg_2",
"arg_3",
# have a higher level abstraction that supersedes it
"log",
}
# which ops to treat as properties in the generated stubs
PROPERTY_OPS = {
"global": {"exclude": ["opcode_budget"]},
"txn": {"exclude": list[str]()},
}
@attrs.define
class TypedName:
name: str
type: StackType | ImmediateKind | str
doc: str | None
@attrs.define(kw_only=True)
class FunctionDef:
name: str
doc: list[str]
is_property: bool
args: list[TypedName] = attrs.field(factory=list)
return_docs: list[str] = attrs.field(factory=list)
op_mapping: OpMappingWithOverloads
min_avm_version: int
@attrs.define
class ClassDef:
name: str
doc: str
methods: list[FunctionDef] = attrs.field()
ops: list[str]
def main() -> None:
spec_path = VCS_ROOT / "langspec.puya.json"
lang_spec_json = json.loads(spec_path.read_text(encoding="utf-8"))
lang_spec = LanguageSpec.from_json(lang_spec_json)
non_simple_ops = {
*EXCLUDED_OPCODES,
*dir(builtins),
*keyword.kwlist, # TODO: maybe consider softkwlist too?
}
function_defs = list[FunctionDef]()
class_defs = list[ClassDef]()
enums_to_build = dict[str, bool]()
for merged in MERGED_OP_CODES:
non_simple_ops.update(merged.handled_ops())
class_defs.append(build_merged_ops(lang_spec, merged))
for grouped in GROUPED_OP_CODES:
non_simple_ops.update(grouped.handled_ops())
class_defs.append(build_grouped_ops(lang_spec, grouped, enums_to_build))
for aliased in RENAMED_OP_CODES:
function_defs.extend(build_aliased_ops(lang_spec, aliased))
non_simple_ops.update(aliased.handled_ops())
for op in lang_spec.ops.values():
if op.name in non_simple_ops or not op.name.isidentifier():
logger.info(f"Ignoring: {op.name}")
continue
overriding_immediate = get_overriding_immediate(op)
if overriding_immediate:
class_defs.append(
build_class_from_overriding_immediate(
lang_spec,
op,
class_name=get_python_enum_class(op.name),
class_doc=" ".join(op.doc),
immediate=overriding_immediate,
aliases=[],
)
)
else:
for immediate in op.immediate_args:
if immediate.immediate_type == ImmediateKind.arg_enum and (
immediate.modifies_stack_input is None
and immediate.modifies_stack_output is None
):
assert immediate.arg_enum is not None
enums_to_build[immediate.arg_enum] = True
function_defs.extend(build_operation_methods(op, op.name, []))
function_defs.sort(key=lambda x: x.name)
class_defs.sort(key=lambda x: x.name)
enum_names = list(enums_to_build.keys())
output_stub(lang_spec, enum_names, function_defs, class_defs)
output_awst_data(lang_spec, enum_names, function_defs, class_defs)
def sub_types(type_name: StackType, *, covariant: bool) -> Sequence[pytypes.PyType]:
try:
typs = STACK_TYPE_MAPPING[type_name]
except KeyError as ex:
raise NotImplementedError(
f"Could not map stack type {type_name} to an algopy type"
) from ex
else:
last_index = None if covariant else 1
return typs[:last_index]
def immediate_kind_to_type(kind: ImmediateKind) -> type[int | str]:
match kind:
case ImmediateKind.uint8 | ImmediateKind.int8 | ImmediateKind.varuint:
return int
case ImmediateKind.arg_enum:
return str
case _:
raise ValueError(f"Unexpected ImmediateKind: {kind}")
def get_python_type(
typ: StackType | ImmediateKind | str, *, covariant: bool, any_as: str | None
) -> str:
match typ:
case StackType() as stack_type:
if any_as and stack_type == StackType.any:
return any_as
ptypes_ = sub_types(stack_type, covariant=covariant)
names = [str(wt).removeprefix("algopy.") for wt in ptypes_]
if covariant:
for pt in ptypes_:
lit_t = PYTYPE_TO_LITERAL[pt]
if lit_t is not None:
lit_name = str(lit_t)
if lit_name not in names:
names.append(lit_name)
return " | ".join(names)
case ImmediateKind() as immediate_kind:
return immediate_kind_to_type(immediate_kind).__name__
case _:
return typ
def build_method_stub(function: FunctionDef, prefix: str = "") -> Iterable[str]:
signature = list[str]()
doc = function.doc[:]
signature.append(f"def {function.name}(")
args = list[str]()
for arg in function.args:
python_type = get_python_type(arg.type, covariant=True, any_as=None)
args.append(f"{arg.name}: {python_type}")
if arg.doc:
doc.append(f":param {python_type} {arg.name}: {arg.doc}")
if function.args:
args.append("/") # TODO: remove once we support kwargs
signature.append(", ".join(args))
return_docs = function.return_docs
returns = pytype_stub_repr(function.op_mapping.result)
if return_docs:
if doc:
doc.append(f":returns {returns}: {return_docs[0]}")
doc.extend(return_docs[1:])
else:
doc = return_docs
signature.append(f") -> {returns}:")
teal_ops = sorted({op.op_code for op in function.op_mapping.overloads})
teal_op_desc = ", ".join(_get_algorand_doc(teal_op) for teal_op in teal_ops)
doc.append("")
doc.append(f"Native TEAL opcode: {teal_op_desc}")
body = list[str]()
if doc:
body.append('"""')
body.extend(doc)
body.append('"""')
else:
body.append("...")
yield prefix + "".join(signature)
yield from [textwrap.indent(line, prefix=prefix + INDENT) for line in body]
def build_stub_class(klass: ClassDef) -> Iterable[str]:
ops = [f"{_get_algorand_doc(op)}" for op in klass.ops]
docstring = "\n".join(
[
INDENT + '"""',
INDENT + klass.doc,
INDENT + f"Native TEAL op{'s' if len(ops) > 1 else ''}: {', '.join(ops)}",
INDENT + '"""',
]
)
method_preamble = f"{INDENT}@staticmethod"
yield f"class {klass.name}:"
yield docstring
for method in klass.methods:
if method.is_property:
yield from build_class_var_stub(method, INDENT)
else:
yield method_preamble
yield from build_method_stub(method, prefix=INDENT)
yield ""
def build_class_var_stub(function: FunctionDef, indent: str) -> Iterable[str]:
returns = pytype_stub_repr(function.op_mapping.result)
return_docs = function.return_docs
doc = return_docs if return_docs else function.doc[:]
_maybe_add_min_version_doc(doc, function.min_avm_version)
yield f"{indent}{function.name}: typing.Final[{returns}] = ..."
yield f'{indent}"""'
for doc_line in doc:
yield f"{indent}{doc_line}"
yield f'{indent}"""'
def _get_modified_stack_value(alias: Op) -> StackValue:
immediate = get_overriding_immediate(alias)
assert immediate
if immediate.modifies_stack_input is not None:
return alias.stack_inputs[immediate.modifies_stack_input]
else:
assert immediate.modifies_stack_output is not None
return alias.stack_outputs[immediate.modifies_stack_output]
AliasT: typing.TypeAlias = tuple[Op, list[str]]
def build_class_from_overriding_immediate(
spec: LanguageSpec,
op: Op,
class_name: str,
class_doc: str,
immediate: Immediate,
aliases: list[AliasT],
) -> ClassDef:
assert immediate.arg_enum
logger.info(f"Using overriding immediate for {op.name}")
arg_enum_values = spec.arg_enums[immediate.arg_enum]
# copy inputs so they can be mutated safely
op = copy.deepcopy(op)
aliases = copy.deepcopy(aliases)
# obtain a list of stack values that will be modified for each enum
stacks_to_modify = [_get_modified_stack_value(o) for o, _ in [(op, None), *aliases]]
# build a method for each arg enum value
methods = list[FunctionDef]()
class_ops = {op.name}
for value in arg_enum_values:
stack_type = value.stack_type
assert stack_type
for stack_to_modify in stacks_to_modify:
stack_to_modify.stack_type = stack_type
stack_to_modify.doc = value.doc
method = build_operation_method(
op, snake_case(value.name), aliases, const_immediate_value=(immediate, value)
)
# some enums are reused across ops, so need to take the max minimum of op and enum version
method.min_avm_version = max(op.min_avm_version, value.min_avm_version)
_maybe_add_min_version_doc(method.doc, method.min_avm_version)
for op_mapping in method.op_mapping.overloads:
class_ops.add(op_mapping.op_code)
methods.append(method)
return ClassDef(name=class_name, doc=class_doc, methods=methods, ops=sorted(class_ops))
def get_op_doc(op: Op) -> list[str]:
doc = [d.replace("\\", "\\\\") for d in op.doc]
_maybe_add_min_version_doc(doc, op.min_avm_version)
return doc
def get_python_enum_class(arg_enum: str) -> str:
try:
return PYTHON_ENUM_CLASS[arg_enum]
except KeyError:
pass
# don't change acronyms
if arg_enum.isupper():
return arg_enum
return snake_case(arg_enum).replace("_", " ").title().replace(" ", "")
def get_overriding_immediate(op: Op) -> Immediate | None:
return next(
(
immediate
for immediate in op.immediate_args
if immediate.modifies_stack_input is not None
or immediate.modifies_stack_output is not None
),
None,
)
def build_enum(spec: LanguageSpec, arg_enum: str) -> Iterable[str]:
values = spec.arg_enums[arg_enum]
enum_name = get_python_enum_class(arg_enum)
yield f"class {enum_name}(str):"
yield f'{INDENT}"""Available values for the `{arg_enum}` enum"""'
for value in values:
yield f"{INDENT}{value.name}: {enum_name} = ..."
enum_doc = []
if value.doc:
enum_doc.append(value.doc)
_maybe_add_min_version_doc(enum_doc, value.min_avm_version)
if enum_doc:
yield f'{INDENT}"""'
for doc_line in enum_doc:
yield f"{INDENT}{doc_line}"
yield f'{INDENT}"""'
yield ""
def _maybe_add_min_version_doc(doc: list[str], version: int) -> None:
# only output min AVM version if it is greater than our min supported version
if version > MIN_SUPPORTED_VERSION:
doc.append(f"Min AVM version: {version}")
def build_operation_method(
op: Op,
op_function_name: str,
aliases: list[AliasT],
const_immediate_value: tuple[Immediate, ArgEnum] | None = None,
) -> FunctionDef:
args = []
# python stub args can be different to mapping args, due to immediate args
# that are inferred based on the method/property used
function_args = []
doc = get_op_doc(op)
for immediate in op.immediate_args:
arg_type: ImmediateKind | str
if immediate.immediate_type == ImmediateKind.arg_enum:
assert immediate.arg_enum, "Arg enum expected"
arg_type = get_python_enum_class(immediate.arg_enum)
else:
arg_type = immediate.immediate_type
im_arg = TypedName(name=immediate.name.lower(), type=arg_type, doc=immediate.doc)
args.append(im_arg)
if const_immediate_value and const_immediate_value[0] == immediate:
# omit immediate arg from signature
doc = []
else:
function_args.append(im_arg)
for si in op.stack_inputs:
stack_arg = TypedName(name=si.name.lower(), type=si.stack_type, doc=si.doc)
args.append(stack_arg)
function_args.append(stack_arg)
if op.halts:
return_docs = ["Halts program"]
else:
return_docs = [so.doc for so in op.stack_outputs if so.doc]
try:
property_op = PROPERTY_OPS[op.name]
except KeyError:
is_property = False
else:
is_property = op_function_name not in property_op["exclude"]
if op.halts:
result_typ = pytypes.NeverType
else:
# replace immediate reference to arg enum with a constant enum value
result_ptypes = [sub_types(o.stack_type, covariant=False)[0] for o in op.stack_outputs]
if not result_ptypes:
result_typ = pytypes.NoneType
elif len(op.stack_outputs) == 1:
(result_typ,) = result_ptypes
else:
result_typ = pytypes.GenericTupleType.parameterise(result_ptypes, source_location=None)
if result_typ == pytypes.UInt64Type:
if op_function_name == "on_completion":
result_typ = pytypes.OnCompleteActionType
elif op_function_name == "type_enum":
result_typ = pytypes.TransactionTypeType
op_mappings = []
ops_with_aliases = [(op, list[str]()), *aliases]
for map_op, alias_args in ops_with_aliases:
assert map_op.stack_outputs == op.stack_outputs
if alias_args:
# map the stack or immediate input name to the function signature position
name_to_sig_idx = {n: idx2 for idx2, n in enumerate(alias_args)}
else:
name_to_sig_idx = {tn.name.upper(): idx2 for idx2, tn in enumerate(args)}
map_immediates = list[str | int | type[str | int]]()
map_args_map = dict[int, Sequence[pytypes.PyType] | int]()
for idx, i_arg in enumerate(map_op.immediate_args):
if const_immediate_value and const_immediate_value[0] == i_arg:
map_immediates.append(const_immediate_value[1].name)
else:
im_typ = immediate_kind_to_type(i_arg.immediate_type)
map_immediates.append(im_typ)
sig_idx = name_to_sig_idx[i_arg.name]
map_args_map[sig_idx] = idx
for s_arg in map_op.stack_inputs:
allowed_types = tuple(sub_types(s_arg.stack_type, covariant=True))
sig_idx = name_to_sig_idx[s_arg.name]
map_args_map[sig_idx] = allowed_types
op_mappings.append(
FunctionOpMapping(
op_code=map_op.name,
immediates=map_immediates,
args=[map_args_map[k] for k in sorted(map_args_map)],
)
)
proto_function = FunctionDef(
name=op_function_name,
doc=doc,
is_property=is_property,
args=function_args,
return_docs=return_docs,
op_mapping=OpMappingWithOverloads(
arity=len(function_args),
result=result_typ,
overloads=op_mappings,
),
min_avm_version=op.min_avm_version,
)
return proto_function
def build_operation_methods(
op: Op, op_function_name: str, aliases: list[AliasT]
) -> Iterable[FunctionDef]:
logger.info(f"Mapping {op.name} to {op_function_name}")
if StackType.any in (s.stack_type for s in op.stack_outputs):
logger.info(f"Found any output for {op.name}")
for replace_any_with in (StackType.bytes, StackType.uint64):
new_op = op_any_replaced(op, replace_any_with)
new_name = f"{op_function_name}_{replace_any_with.name}"
new_aliases = [
(op_any_replaced(alias_op, replace_any_with), names) for alias_op, names in aliases
]
yield build_operation_method(new_op, new_name, new_aliases)
else:
yield build_operation_method(op, op_function_name, aliases)
def op_any_replaced(op: Op, replace_any_with: StackType) -> Op:
stack_inputs = []
input_replaced = 0
for si in op.stack_inputs:
if si.stack_type != StackType.any:
stack_inputs.append(si)
else:
input_replaced += 1
stack_inputs.append(attrs.evolve(si, stack_type=replace_any_with))
stack_outputs = []
outputs_replaced = 0
for so in op.stack_outputs:
if so.stack_type != StackType.any:
stack_outputs.append(so)
else:
outputs_replaced += 1
stack_outputs.append(attrs.evolve(so, stack_type=replace_any_with))
assert outputs_replaced == 1
return attrs.evolve(op, stack_inputs=stack_inputs, stack_outputs=stack_outputs)
def build_aliased_ops(spec: LanguageSpec, group: RenamedOpCode) -> Iterable[FunctionDef]:
op = spec.ops[group.op]
aliases = [
(spec.ops[stack_alias], arg_map) for stack_alias, arg_map in group.stack_aliases.items()
]
methods = build_operation_methods(op, group.name, aliases)
return methods
def build_merged_ops(spec: LanguageSpec, group: MergedOpCodes) -> ClassDef:
merge_methods = dict[str, FunctionDef]()
for other_op_name, alias_dict in group.ops.items():
aliases = [(spec.ops[alias_op], arg_map) for alias_op, arg_map in alias_dict.items()]
other_op = spec.ops[other_op_name]
overriding_immediate = get_overriding_immediate(other_op)
assert overriding_immediate
other_class = build_class_from_overriding_immediate(
spec,
other_op,
class_name=group.name,
class_doc=group.doc,
immediate=overriding_immediate,
aliases=aliases,
)
for method in other_class.methods:
merge_methods[method.name] = method
methods = list(merge_methods.values())
return ClassDef(name=group.name, doc=group.doc, methods=methods, ops=sorted(group.ops))
def build_grouped_ops(
spec: LanguageSpec, group: GroupedOpCodes, enums_to_build: dict[str, bool]
) -> ClassDef:
methods = list[FunctionDef]()
for rename_op_name, python_name in group.ops.items():
rename_op = spec.ops[rename_op_name]
rename_immediate = get_overriding_immediate(rename_op)
if rename_immediate:
rename_class = build_class_from_overriding_immediate(
spec,
rename_op,
class_name=group.name,
class_doc=group.doc,
immediate=rename_immediate,
aliases=[],
)
# when grouping an op with immediate overrides, treat python_name as a prefix
for method in rename_class.methods:
method.name = f"{python_name}_{method.name}"
methods.extend(rename_class.methods)
else:
methods.extend(build_operation_methods(rename_op, python_name, aliases=[]))
for arg in rename_op.immediate_args:
if arg.immediate_type == ImmediateKind.arg_enum and (
arg.modifies_stack_input is None and arg.modifies_stack_output is None
):
assert arg.arg_enum is not None
enums_to_build[arg.arg_enum] = True
class_def = ClassDef(
name=group.name,
doc=group.doc,
methods=methods,
ops=sorted(group.ops),
)
return class_def
def pytype_repr(typ: pytypes.PyType) -> str:
try:
return PYTYPE_REPR[typ]
except KeyError:
pass
match typ:
case pytypes.TupleType(items=tuple_items) if len(tuple_items) > 1:
item_strs = [pytype_repr(item) for item in tuple_items]
return (
f"pytypes.GenericTupleType.parameterise("
f"({', '.join(item_strs)}), source_location=None)"
)
raise ValueError(f"Unexpected pytype: {typ}")
def build_op_specification_body(function: FunctionDef) -> Iterable[str]:
if function.is_property:
(op_mapping,) = function.op_mapping.overloads
(immediate,) = op_mapping.immediates
yield (
f"{function.name}=PropertyOpMapping("
f"{op_mapping.op_code!r}, {immediate!r}, {pytype_repr(function.op_mapping.result)},"
f"),"
)
else:
yield f"{function.name}=OpMappingWithOverloads("
if function.op_mapping.result is not pytypes.NoneType:
yield f" result={pytype_repr(function.op_mapping.result)},"
yield f" arity={function.op_mapping.arity}, "
yield " overloads=["
for op_mapping in function.op_mapping.overloads:
yield f"FunctionOpMapping({op_mapping.op_code!r},"
if op_mapping.immediates:
yield " immediates=["
for idx, item in enumerate(op_mapping.immediates):
if idx:
yield ", "
if not isinstance(item, type):
yield repr(item)
else:
yield item.__name__
yield "],"
if op_mapping.args:
yield " args=["
for idx, allowed_types_or_idx in enumerate(op_mapping.args):
if idx:
yield ", "
if isinstance(allowed_types_or_idx, int):
yield repr(allowed_types_or_idx)
else: # noqa: PLR5501
if len(allowed_types_or_idx) == 1:
yield f"({pytype_repr(*allowed_types_or_idx)},)"
else:
yield "("
for idx2, allowed_type in enumerate(allowed_types_or_idx):
if idx2:
yield ","
yield pytype_repr(allowed_type)
yield ")"
yield "],"
yield "),"
yield "]"
yield "),"
def build_awst_data(
lang_spec: LanguageSpec,
enums: list[str],
function_ops: list[FunctionDef],
class_ops: list[ClassDef],
) -> Iterable[str]:
yield "import typing"
yield "from collections.abc import Mapping, Sequence"
yield "from puyapy.awst_build import pytypes"
yield (
"from puyapy.awst_build.intrinsic_models"
" import FunctionOpMapping, OpMappingWithOverloads, PropertyOpMapping"
)
yield "ENUM_CLASSES: typing.Final[Mapping[str, Mapping[str, str]]] = dict("
for enum_name in enums:
yield f"{get_python_enum_class(enum_name)}=dict("
for enum_value in lang_spec.arg_enums[enum_name]:
# enum names currently match enum immediate values
yield f'{enum_value.name}="{enum_value.name}",'
yield "),"
yield ")"
yield ""
yield "FUNC_TO_AST_MAPPER: typing.Final[Mapping[str, OpMappingWithOverloads]] = dict("
for function_op in function_ops:
yield "".join(build_op_specification_body(function_op))
yield ")"
yield (
"NAMESPACE_CLASSES: "
"typing.Final[Mapping[str, Mapping[str, PropertyOpMapping | OpMappingWithOverloads]]]"
" = dict("
)
for class_op in class_ops:
yield f"{class_op.name}=dict("
for method in class_op.methods:
yield "".join(build_op_specification_body(method))
yield "),"
yield ")"
def output_stub(
lang_spec: LanguageSpec,
enums: list[str],
function_ops: list[FunctionDef],
class_ops: list[ClassDef],
) -> None:
references = ", ".join(
sorted(
str(pt).removeprefix("algopy.")
for pt, lit_t in PYTYPE_TO_LITERAL.items()
if str(pt).startswith("algopy.")
)
)
stub: list[str] = [
"import typing",
"",
f"from algopy import {references}",
]
for arg_enum in enums:
stub.extend(build_enum(lang_spec, arg_enum))
for function in function_ops:
stub.extend(build_method_stub(function))
for class_op in class_ops:
stub.extend(build_stub_class(class_op))
stub_out_path = VCS_ROOT / "stubs" / "algopy-stubs" / f"{STUB_NAMESPACE}.pyi"
stub_out_path.write_text("\n".join(stub), encoding="utf-8")
subprocess.run(["ruff", "format", str(stub_out_path)], check=True, cwd=VCS_ROOT)
def pytype_stub_repr(pytype: pytypes.PyType) -> str:
return str(pytype).replace("algopy.", "")
def output_awst_data(
lang_spec: LanguageSpec,
enums: list[str],
function_ops: list[FunctionDef],
class_ops: list[ClassDef],
) -> None:
awst_data = build_awst_data(lang_spec, enums, function_ops, class_ops)
awst_data_path = VCS_ROOT / "src" / "puyapy" / "awst_build" / "intrinsic_data.py"
awst_data_path.write_text("\n".join(awst_data), encoding="utf-8")
subprocess.run(["ruff", "format", str(awst_data_path)], check=True, cwd=VCS_ROOT)
subprocess.run(["ruff", "check", "--fix", str(awst_data_path)], check=False, cwd=VCS_ROOT)
def _get_algorand_doc(op: str) -> str:
return f"[`{op}`]({ALGORAND_OP_URL}#{op})"
if __name__ == "__main__":
main()
| algorandfoundation/puya | scripts/generate_stubs.py | Python | NOASSERTION | 33,877 |
#!/usr/bin/env python3
import contextlib
import enum
import json
import logging
import typing
from pathlib import Path
import attrs
import cattrs
logger = logging.getLogger(__name__)
STACK_INPUT_NAMES = "ABCDE"
STACK_OUTPUT_NAMES_FEW = "XYZ" # 3 or less var
STACK_OUTPUT_NAMES_MANY = "WXYZ" # 4 var
VARIABLE_SIZE_OPCODES = {
"intcblock",
"bytecblock",
"pushbytes",
"pushbytess",
"pushint",
"pushints",
"switch",
"match",
}
class NamedType(typing.TypedDict):
"""
{
"Name": "uint64",
"Abbreviation": "i",
"Bound": [
0,
18446744073709551615
],
"AVMType": "uint64"
},
"""
Name: str
Abbreviation: str
AVMType: str
class ImmediateNote(typing.TypedDict, total=False):
"""
{
"Comment": "transaction field index",
"Encoding": "uint8",
"Name": "F",
"Reference": "txn"
}
"""
Comment: str
Encoding: str
Name: str
Reference: str
class Operation(typing.TypedDict, total=False):
"""
{
"Opcode": 0,
"Name": "err",
"Size": 1,
"Doc": "Fail immediately.",
"IntroducedVersion": 1,
"Groups": [
"Flow Control"
]
},
{
"Opcode": 1,
"Name": "sha256",
"Args": [
"[]byte"
],
"Returns": [
"[32]byte"
],
"Size": 1,
"Doc": "SHA256 hash of value A, yields [32]byte",
"IntroducedVersion": 1,
"Groups": [
"Arithmetic"
]
}
"""
Doc: str
Opcode: int
Size: int
Name: str
IntroducedVersion: int
Groups: list[str]
Args: list[str]
Returns: list[str]
DocExtra: str
ArgEnum: list[str]
ArgEnumTypes: list[str]
ArgEnumBytes: list[int]
ArgModes: list[int]
ArgEnumVersion: list[int]
ImmediateNote: list[ImmediateNote]
# the following values are not in the original langspec.json
# these values are manually patched in during transform
ArgEnumIsInput: bool
Halts: bool
# these values are output by a modified opdoc.go from go-algorand repo
Cost: str
ArgEnumDoc: list[str]
Modes: int
class AlgorandLanguageSpec(typing.TypedDict):
NamedTypes: list[NamedType]
Ops: list[Operation]
class StackType(enum.StrEnum):
uint64 = enum.auto()
bytes = "[]byte"
bytes_8 = "[8]byte"
bytes_32 = "[32]byte"
bytes_33 = "[33]byte"
bytes_64 = "[64]byte"
bytes_80 = "[80]byte"
bytes_1232 = "[1232]byte"
bytes_1793 = "[1793]byte"
bool = enum.auto()
address = enum.auto()
address_or_index = enum.auto()
any = enum.auto()
bigint = enum.auto()
box_name = "boxName"
asset = enum.auto()
application = enum.auto()
state_key = "stateKey"
class RunMode(enum.StrEnum):
app = enum.auto()
sig = enum.auto()
any = enum.auto()
@attrs.define
class StackValue:
name: str
"""Name used to refer to this value in the Op.doc"""
stack_type: StackType
doc: str | None = None
@attrs.define
class ArgEnum:
name: str
doc: str | None
stack_type: StackType | None
mode: RunMode
value: int
min_avm_version: int
class ImmediateKind(enum.StrEnum):
uint8 = enum.auto()
int8 = enum.auto()
label = enum.auto()
varuint = enum.auto()
bytes = enum.auto()
# array types
label_array = enum.auto()
varuint_array = enum.auto()
bytes_array = enum.auto()
# not in original lang spec
arg_enum = enum.auto()
@attrs.frozen(kw_only=True)
class Immediate:
name: str
"""Name used to refer to this value in the Op.doc"""
immediate_type: ImmediateKind
arg_enum: str | None = None
modifies_stack_input: int | None = None
"""Index of stack input type that this immediate modifies"""
modifies_stack_output: int | None = None
"""Index of stack output type that this immediate modifies"""
"""field_group reference if immediate_type is field_group"""
doc: str | None = None
@attrs.define
class Cost:
value: int | None
"""Static cost of op, or None if cost is not static"""
doc: str
"""Documentation describing how cost is calculated"""
@attrs.define
class Op:
name: str
"""Name of op in TEAL"""
code: int
"""Bytecode value"""
size: int
"""Size in bytes of compiled op, 0 indicate size is variable"""
doc: list[str]
cost: Cost
min_avm_version: int
"""AVM version op was introduced"""
halts: bool
mode: RunMode
"""True if this op halts the program"""
groups: list[str] = attrs.field(factory=list)
"""Groups op belongs to"""
stack_inputs: list[StackValue] = attrs.field(factory=list)
"""Inputs that come from the stack"""
immediate_args: list[Immediate] = attrs.field(factory=list)
"""Arguments that are passed as immediate values in TEAL"""
stack_outputs: list[StackValue] = attrs.field(factory=list)
"""Outputs left on the stack"""
@attrs.define
class LanguageSpec:
ops: dict[str, Op] = attrs.field(factory=dict)
arg_enums: dict[str, list[ArgEnum]] = attrs.field(factory=dict)
@staticmethod
def from_json(json: dict[str, typing.Any]) -> "LanguageSpec":
return cattrs.structure(json, LanguageSpec)
def to_json(self) -> dict[str, typing.Any]:
return attrs.asdict(self)
def _patch_lang_spec(lang_spec: dict[str, typing.Any]) -> None:
ops = {op["Name"]: op for op in lang_spec["Ops"]}
# patch ops that use a stack type of any
# for arguments that should be an Address or Address index
for op_name in (
"acct_params_get",
"app_local_get",
"app_local_put",
"app_local_del",
"app_local_get_ex",
"app_opted_in",
"asset_holding_get",
"balance",
"min_balance",
"voter_params_get",
):
_patch_arg_type(ops, op_name, 0, "any", "address_or_index")
# patch ops that use a stack type of uint64
# for arguments that should be an Application
for op_name, arg_index in {
"app_opted_in": 1,
"app_global_get_ex": 0,
"app_local_get_ex": 1,
"app_params_get": 0,
}.items():
_patch_arg_type(ops, op_name, arg_index, "uint64", "application")
# patch ops that use a stack type of uint64
# for return types that should be a bool
for op_name in [
"!",
]:
_patch_return_type(ops, op_name, 0, "uint64", "bool")
# patch ops that use a stack type of uint64
# for arguments that should be an Asset
for op_name, arg_index in {
"asset_holding_get": 1,
"asset_params_get": 0,
}.items():
_patch_arg_type(ops, op_name, arg_index, "uint64", "asset")
for op_name, arg_index in {
"select": 2,
}.items():
_patch_arg_type(ops, op_name, arg_index, "uint64", "bool")
# patch return bytes -> bigint
for op_name in [
"b+",
"b*",
]:
_patch_return_type(ops, op_name, 0, "[]byte", "bigint")
# patch txn enum fields with asset and application types
txn = ops["txn"]
itxn_field = ops["itxn_field"]
for op in (txn, itxn_field):
for immediate in [
"XferAsset",
"ConfigAsset",
"FreezeAsset",
]:
_patch_arg_enum_type(op, immediate, "uint64", "asset")
_patch_arg_enum_type(op, "ApplicationID", "uint64", "application")
_patch_arg_enum_type(txn, "CreatedApplicationID", "uint64", "application")
_patch_arg_enum_type(txn, "CreatedAssetID", "uint64", "asset")
# patch txna enums
txna = ops["txna"]
_patch_arg_enum_type(txna, "Assets", "uint64", "asset")
_patch_arg_enum_type(txna, "Applications", "uint64", "application")
# patch global enums
_patch_arg_enum_type(ops["global"], "CurrentApplicationID", "uint64", "application")
# base64_decode has an ArgEnumTypes array when it probably shouldn't
# as all stack outputs are bytes
del ops["base64_decode"]["ArgEnumTypes"]
# itxn_field reuses the same field group as txn, however it only uses a subset of fields
# additionally ArgEnumTypes refers to the stack input types not the output types
itxn_field = ops["itxn_field"]
itxn_field["ImmediateNote"][0]["Reference"] = "itxn_field"
itxn_field["ArgEnumIsInput"] = True
# ops that never return encode this with a single return type of none
# however currently this information is stripped when generating langspec.json
ops["err"]["Halts"] = True
ops["return"]["Halts"] = True
def _patch_arg_enum_type(
op: dict[str, typing.Any], immediate: str, current_type: str, new_type: str
) -> None:
arg_enum = op["ArgEnum"]
assert immediate in arg_enum, f"Expected {immediate} arg enum for {op['Name']}"
immediate_index = arg_enum.index(immediate)
arg_enum_types = op["ArgEnumTypes"]
assert (
arg_enum_types[immediate_index] == current_type
), f"Expected {immediate} to be {current_type}"
arg_enum_types[immediate_index] = new_type
def _patch_arg_type(
ops: dict[str, typing.Any], op_name: str, arg_index: int, current_type: str, new_type: str
) -> None:
op_args = ops[op_name]["Args"]
assert (
op_args[arg_index] == current_type
), f"Expected {op_name} arg {arg_index} to be {current_type}"
op_args[arg_index] = new_type
def _patch_return_type(
ops: dict[str, typing.Any], op_name: str, return_index: int, current_type: str, new_type: str
) -> None:
returns = ops[op_name]["Returns"]
assert (
returns[return_index] == current_type
), f"Expected {op_name} return {return_index} to be {current_type}"
returns[return_index] = new_type
def create_indexed_enum(op: Operation) -> list[ArgEnum]:
enum_names = op["ArgEnum"]
enum_types: list[str] | list[None] = op.get("ArgEnumTypes", [])
enum_docs = op["ArgEnumDoc"]
enum_bytes = op["ArgEnumBytes"]
enum_modes = op["ArgModes"]
enum_versions = op["ArgEnumVersion"]
if not enum_types:
enum_types = [None] * len(enum_names)
result = list[ArgEnum]()
for enum_name, enum_type, enum_doc, enum_mode, enum_byte, enum_version in zip(
enum_names, enum_types, enum_docs, enum_modes, enum_bytes, enum_versions, strict=True
):
stack_type = None if enum_type is None else StackType(enum_type)
enum_value = ArgEnum(
name=enum_name,
doc=enum_doc if enum_doc else None,
stack_type=stack_type,
mode=_map_enum_mode(op["Modes"], enum_mode),
value=enum_byte,
min_avm_version=enum_version,
)
result.append(enum_value)
return result
def _map_enum_mode(op_mode: int, arg_mode: int = 0) -> RunMode:
mode = arg_mode or op_mode
match mode:
case 1:
return RunMode.sig
case 2:
return RunMode.app
case 3:
return RunMode.any
case _:
raise ValueError("Unexpected run mode")
def transform_encoding(value: str) -> ImmediateKind:
match value:
case "uint8":
result = ImmediateKind.uint8
case "int8":
result = ImmediateKind.int8
case "int16 (big-endian)":
result = ImmediateKind.label
case "varuint":
result = ImmediateKind.varuint
case "varuint length, bytes":
result = ImmediateKind.bytes
case "varuint count, [varuint ...]":
result = ImmediateKind.varuint_array
case "varuint count, [varuint length, bytes ...]":
result = ImmediateKind.bytes_array
case "varuint count, [int16 (big-endian) ...]":
result = ImmediateKind.label_array
case _:
raise ValueError(f"Unknown Encoding: {value}")
return result
def transform_stack_args(op: Operation) -> list[StackValue]:
result = list[StackValue]()
args = op.get("Args", [])
assert len(args) <= len(STACK_INPUT_NAMES), f"More args than expected for {op['Name']}"
for index, arg_type in enumerate(op.get("Args", [])):
name = STACK_INPUT_NAMES[index]
stack_type = StackType(arg_type)
result.append(StackValue(name=name, stack_type=stack_type))
return result
def transform_immediates(
arg_enums: dict[str, list[ArgEnum]],
algorand_ops: dict[str, Operation],
op: Operation,
) -> list[Immediate]:
op_name = op["Name"]
result = list[Immediate]()
for immediate in op.get("ImmediateNote", []):
arg_enum_reference = immediate.get("Reference")
if arg_enum_reference is not None:
arg_enum = op.get("ArgEnum")
if arg_enum_reference not in arg_enums:
try:
enum_op = algorand_ops[arg_enum_reference]
except KeyError:
enum_op = op
assert arg_enum, f"Expected enum for {op_name}"
arg_enums[arg_enum_reference] = create_indexed_enum(enum_op)
if arg_enum is not None:
assert len(arg_enum) == len(
arg_enums[arg_enum_reference]
), f"Arg Enum lengths don't match for {op_name}"
modifies_stack_input: int | None = None
modifies_stack_output: int | None = None
if arg_enum_reference and any(a.stack_type for a in arg_enums[arg_enum_reference]):
assert all(a.stack_type for a in arg_enums[arg_enum_reference])
if op.get("ArgEnumIsInput"):
modifies_stack_input = 0
else:
modifies_stack_output = 0
result.append(
Immediate(
name=immediate["Name"],
immediate_type=(
transform_encoding(immediate["Encoding"])
if arg_enum_reference is None
else ImmediateKind.arg_enum
),
modifies_stack_input=modifies_stack_input,
modifies_stack_output=modifies_stack_output,
arg_enum=arg_enum_reference,
doc=immediate["Comment"],
)
)
return result
def transform_returns(op: Operation) -> list[StackValue]:
try:
returns = op["Returns"]
except KeyError:
return []
num_returns = len(returns)
if num_returns <= len(STACK_OUTPUT_NAMES_FEW):
return_argument_names = STACK_OUTPUT_NAMES_FEW
elif num_returns <= len(STACK_OUTPUT_NAMES_MANY):
return_argument_names = STACK_OUTPUT_NAMES_MANY
else:
raise AssertionError(f"More returns than expected for {op['Name']}")
return [
StackValue(
name=name,
stack_type=StackType(return_type),
)
for name, return_type in zip(return_argument_names, returns, strict=False)
]
def transform_doc(op: Operation) -> list[str]:
doc = op["Doc"].splitlines()
doc_extra = op.get("DocExtra")
if doc_extra:
doc.extend(doc_extra.splitlines())
return doc
def get_immediate_encoded_size(immediate: Immediate) -> int:
match immediate.immediate_type:
case ImmediateKind.uint8 | ImmediateKind.int8 | ImmediateKind.arg_enum:
return 1
case ImmediateKind.label:
return 2
case ImmediateKind():
return 0
case _:
raise ValueError(f"Cannot determine size of {immediate.immediate_type}")
def transform_cost(op: Operation) -> Cost:
algorand_cost = op["Cost"]
cost = Cost(value=None, doc=algorand_cost)
with contextlib.suppress(ValueError):
cost.value = int(algorand_cost)
return cost
def transform_spec(lang_spec: AlgorandLanguageSpec) -> LanguageSpec:
result = LanguageSpec()
arg_enums = result.arg_enums
algorand_ops = {o["Name"]: o for o in sorted(lang_spec["Ops"], key=lambda x: x["Name"])}
for op_name, algorand_op in algorand_ops.items():
op = Op(
name=op_name,
code=algorand_op["Opcode"],
size=algorand_op["Size"],
doc=transform_doc(algorand_op),
cost=transform_cost(algorand_op),
min_avm_version=algorand_op["IntroducedVersion"],
groups=algorand_op["Groups"],
immediate_args=transform_immediates(arg_enums, algorand_ops, algorand_op),
stack_inputs=transform_stack_args(algorand_op),
stack_outputs=transform_returns(algorand_op),
halts=algorand_op.get("Halts", False),
mode=_map_enum_mode(algorand_op["Modes"]),
)
validate_op(result, op)
result.ops[op.name] = op
return result
def validate_op(lang_spec: LanguageSpec, op: Op) -> None:
# validate op size
instruction_size = 0 if op.name in VARIABLE_SIZE_OPCODES else 1
expected_size = (
sum([get_immediate_encoded_size(a) for a in op.immediate_args]) + instruction_size
)
assert op.size == expected_size, f"Unexpected size for specified immediate args for {op.name}"
# validate immediate modifiers
for immediate in op.immediate_args:
if immediate.immediate_type == ImmediateKind.arg_enum:
assert immediate.arg_enum in lang_spec.arg_enums
if immediate.modifies_stack_input is not None:
assert immediate.modifies_stack_input < len(op.stack_inputs), (
f"Immediate for {op.name} references stack input "
f"that does not exist {immediate.modifies_stack_input}"
)
if immediate.modifies_stack_output is not None:
assert immediate.modifies_stack_output < len(op.stack_outputs), (
f"Immediate for {op.name} references stack output "
f"that does not exist {immediate.modifies_stack_output}"
)
else:
assert not immediate.arg_enum
assert not immediate.modifies_stack_input
assert not immediate.modifies_stack_output
def main() -> None:
vcs_root = Path(__file__).parent.parent
spec_path = vcs_root / "langspec.json"
output_path = vcs_root / "langspec.puya.json"
logger.info(f"Transforming {spec_path} to {output_path}")
lang_spec_json = json.loads(spec_path.read_text(encoding="utf-8"))
_patch_lang_spec(lang_spec_json)
lang_spec = typing.cast(AlgorandLanguageSpec, lang_spec_json)
puya_spec = transform_spec(lang_spec)
puya_json = json.dumps(puya_spec.to_json(), indent=4)
output_path.write_text(puya_json, encoding="utf-8")
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO, format="%(message)s")
main()
| algorandfoundation/puya | scripts/transform_lang_spec.py | Python | NOASSERTION | 18,689 |
#!/usr/bin/env python3
import os
import shutil
import subprocess
import sys
import tempfile
from pathlib import Path
MYPY_REPO = "https://github.com/python/mypy.git"
VCS_ROOT = Path(__file__).parent.parent
TYPESHED_README = """
This is PuyaPy's custom typeshed, which is a curated subset of the official MyPy typeshed.
It only includes the required stubs used by PuyaPy as this speeds up MyPy's parsing speed
significantly.
However this means certain python modules such as `enum` or `dataclasses` cannot be used in
PuyaPy stubs unless this typeshed is updated.
The contents of the typeshed are populated by the `scripts/vendor_mypy.py` script, which is used
to vendor new versions of MyPy or to update the stubs included in this typeshed. So to add new
stubs, update that script and rerun.
""".strip()
def clone_branch(version: str) -> str:
git_clone = f"git clone --depth=1 --branch={version} --single-branch {MYPY_REPO} ."
print(f"Executing: {git_clone}")
subprocess.run(git_clone.split(), check=True)
git_hash = subprocess.run("git rev-parse HEAD".split(), capture_output=True, check=True).stdout
assert git_hash is not None
subprocess.run("rm -rf .git".split(), check=True)
return git_hash.decode("utf8").strip()
def vendor_mypy(version: str) -> None:
puya_src_dir = VCS_ROOT / "src" / "puyapy"
vendor_dir = puya_src_dir / "_vendor"
mypy_vendor = vendor_dir / "mypy"
print(f"Vendoring mypy into: {mypy_vendor}")
print("Removing existing mypy files...")
shutil.rmtree(mypy_vendor, ignore_errors=True)
print(f"Cloning mypy {version}...")
with tempfile.TemporaryDirectory() as tmp_dir:
os.chdir(tmp_dir)
git_hash = clone_branch(version)
print(f"Checked out mypy {version} @ {git_hash}")
print(f"Copying mypy into {mypy_vendor}...")
shutil.copytree(Path(tmp_dir) / "mypy", mypy_vendor)
(mypy_vendor / ".version").write_text(f"{version}: {git_hash}")
print("Updating custom typeshed")
update_puya_typeshed(mypy_vendor / "typeshed", puya_src_dir / "_typeshed")
def update_puya_typeshed(mypy_typeshed: Path, puya_typeshed: Path) -> None:
shutil.rmtree(puya_typeshed, ignore_errors=True)
stubs = Path("stubs")
stdlib = Path("stdlib")
relative_to_copy = [
# hard coded in mpyy/modulefinder.py, minimum requirements for mypy
stubs / "mypy-extensions" / "mypy_extensions.pyi",
stdlib / "VERSIONS",
# hard coded in mpyy/build.py, minimum requirements for mypy
stdlib / "builtins.pyi",
stdlib / "typing.pyi",
stdlib / "types.pyi",
stdlib / "typing_extensions.pyi",
stdlib / "_typeshed" / "__init__.pyi",
stdlib / "_collections_abc.pyi",
stdlib / "collections" / "abc.pyi",
stdlib / "sys" / "__init__.pyi",
stdlib / "abc.pyi",
# needed for puyapy
# stdlib / "enum.pyi"
]
(puya_typeshed / stdlib).mkdir(exist_ok=True, parents=True)
(puya_typeshed / stubs).mkdir(exist_ok=True, parents=True)
for relative in relative_to_copy:
copy_src = mypy_typeshed / relative
copy_dst = puya_typeshed / relative
if copy_src.is_dir():
shutil.copytree(copy_src, copy_dst)
else:
copy_dst.parent.mkdir(exist_ok=True, parents=True)
shutil.copy(copy_src, copy_dst)
(puya_typeshed / stdlib / "collections" / "__init__.pyi").touch()
(puya_typeshed / "README.md").write_text(TYPESHED_README)
if __name__ == "__main__":
if len(sys.argv) > 1:
vendor_mypy(version=sys.argv[1])
else:
print("Usage: python vendor_mypy.py <version>")
print("e.g. python vendor_mypy.py v1.5.0")
| algorandfoundation/puya | scripts/vendor_mypy.py | Python | NOASSERTION | 3,730 |
algorandfoundation/puya | src/_puya_lib/__init__.py | Python | NOASSERTION | 0 |
|
from algopy import (
Bytes,
UInt64,
subroutine,
urange,
)
from algopy.op import (
btoi,
bzero,
extract,
extract_uint16,
getbit,
itob,
replace,
select_uint64,
setbit_bytes,
substring,
)
UINT16_SIZE = 2
UINT64_SIZE = 8
UINT16_OFFSET = UINT64_SIZE - UINT16_SIZE
@subroutine
def dynamic_array_pop_bit(array: Bytes) -> tuple[Bytes, Bytes]:
"""
Pop the last item from an arc4 dynamic array of arc4 encoded boolean items
array: The bytes for the source array
returns: tuple of (The popped item, The updated bytes for the source array)
"""
array_length = extract_uint16(array, 0)
length_minus_1 = array_length - 1
result = replace(array, 0, extract(itob(length_minus_1), UINT16_OFFSET, 0))
popped_location = length_minus_1 + UINT16_SIZE * 8
popped = setbit_bytes(b"\x00", 0, getbit(result, popped_location))
result = setbit_bytes(result, popped_location, 0)
result = substring(result, 0, UINT16_SIZE + ((length_minus_1 + 7) // 8))
return popped, result
@subroutine
def dynamic_array_pop_fixed_size(array: Bytes, fixed_byte_size: UInt64) -> tuple[Bytes, Bytes]:
"""
Pop the last item from an arc4 dynamic array of fixed sized items
array: The bytes for the source array
returns: tuple of (The popped item, The updated bytes for the source array)
"""
array_length = extract_uint16(array, 0)
length_minus_1 = array_length - 1
result = replace(array, 0, extract(itob(length_minus_1), UINT16_OFFSET, 0))
item_location = result.length - fixed_byte_size
popped = extract(result, item_location, fixed_byte_size)
result = substring(result, 0, item_location)
return popped, result
@subroutine
def dynamic_array_pop_byte_length_head(array: Bytes) -> tuple[Bytes, Bytes]:
"""
Pop the last item from an arc4 dynamic array of items that are prefixed with their length in
bytes, e.g. arc4.String, arc4.DynamicBytes
source: The bytes for the source array
returns: tuple of (The popped item, The updated bytes for the source array)
"""
array_length = extract_uint16(array, 0)
length_minus_1 = array_length - 1
popped_header_offset = length_minus_1 * UINT16_SIZE
head_and_tail = extract(array, UINT16_SIZE, 0)
popped_offset = extract_uint16(head_and_tail, popped_header_offset)
popped = substring(head_and_tail, popped_offset, head_and_tail.length)
head_and_tail = substring(head_and_tail, 0, popped_header_offset) + substring(
head_and_tail, popped_header_offset + 2, popped_offset
)
updated = extract(
itob(length_minus_1), UINT16_OFFSET, UINT16_SIZE
) + recalculate_head_for_elements_with_byte_length_head(
array_head_and_tail=head_and_tail, length=length_minus_1, start_at_index=UInt64(0)
)
return popped, updated
@subroutine
def dynamic_array_pop_dynamic_element(array: Bytes) -> tuple[Bytes, Bytes]:
"""
Pop the last item from an arc4 dynamic array of dynamically sized items
array: The bytes for the source array
returns: tuple of (The popped item, The updated bytes for the source array)
"""
array_length = extract_uint16(array, 0)
length_minus_1 = array_length - 1
popped_header_offset = length_minus_1 * UINT16_SIZE
head_and_tail = extract(array, UINT16_SIZE, 0)
popped_offset = extract_uint16(head_and_tail, popped_header_offset)
popped = substring(head_and_tail, popped_offset, head_and_tail.length)
new_head = Bytes()
for head_offset in urange(0, length_minus_1 * UINT16_SIZE, UINT16_SIZE):
item_offset = extract_uint16(head_and_tail, head_offset)
item_offset -= UINT16_SIZE
new_head += extract(itob(item_offset), UINT16_OFFSET, UINT16_SIZE)
updated = (
extract(itob(length_minus_1), UINT16_OFFSET, UINT16_SIZE)
+ new_head
+ substring(head_and_tail, popped_header_offset + UINT16_SIZE, popped_offset)
)
return popped, updated
@subroutine
def dynamic_array_concat_bits(
*, array: Bytes, new_items_bytes: Bytes, new_items_count: UInt64, is_packed: bool
) -> Bytes:
"""
Concat data to an arc4 dynamic array of arc4 encoded boolean values
array: The bytes for the source array
new_items_bytes: Either the data portion of an arc4 packed array of booleans
or
a sparse array of concatenated arc4 booleans
new_items_count: The count of new items being added
is_packed: True if new_items_bytes represents a packed array, else False
returns: The updated bytes for the source array
"""
array_length = extract_uint16(array, 0)
new_array_length = array_length + new_items_count
new_array_length_b = extract(itob(new_array_length), UINT16_OFFSET, 0)
result = replace(array, 0, new_array_length_b)
current_bytes = (array_length + 7) // 8
required_bytes = (new_array_length + 7) // 8
if current_bytes < required_bytes:
result += bzero(required_bytes - current_bytes)
write_offset = array_length + 8 * UINT16_SIZE
for i in urange(0, new_items_count, UInt64(1) if is_packed else UInt64(8)):
result = setbit_bytes(result, write_offset, getbit(new_items_bytes, i))
write_offset += 1
return result
@subroutine
def dynamic_array_concat_byte_length_head(
array: Bytes, new_items_bytes: Bytes, new_items_count: UInt64
) -> Bytes:
"""
Replace a single item in an arc4 dynamic array of items that are prefixed with
their byte length
array: The bytes of the source array
new_items_bytes: The bytes for all new items, concatenated
new_items_counts: The count of new items being added
returns: The updated bytes for the source array
"""
array_length = extract_uint16(array, 0)
new_length = array_length + new_items_count
header_end = array_length * UINT16_SIZE + 2
return extract(
itob(new_length), UINT16_OFFSET, UINT16_SIZE
) + recalculate_head_for_elements_with_byte_length_head(
array_head_and_tail=(
substring(array, 2, header_end)
+ bzero(new_items_count * UINT16_SIZE)
+ substring(array, header_end, array.length)
+ new_items_bytes
),
length=new_length,
start_at_index=UInt64(0),
)
@subroutine
def dynamic_array_concat_dynamic_element(
*,
array_items_count: UInt64,
array_head_and_tail: Bytes,
new_items_count: UInt64,
new_head_and_tail: Bytes,
) -> Bytes:
new_head = Bytes()
item_offset_adjustment = new_items_count * UINT16_SIZE
for head_offset in urange(0, array_items_count * UINT16_SIZE, UINT16_SIZE):
item_offset = extract_uint16(array_head_and_tail, head_offset)
new_head += extract(itob(item_offset_adjustment + item_offset), UINT16_OFFSET, UINT16_SIZE)
item_offset_adjustment = array_head_and_tail.length
for head_offset in urange(0, new_items_count * UINT16_SIZE, UINT16_SIZE):
item_offset = extract_uint16(new_head_and_tail, head_offset)
new_head += extract(itob(item_offset_adjustment + item_offset), UINT16_OFFSET, UINT16_SIZE)
return (
extract(itob(array_items_count + new_items_count), UINT16_OFFSET, UINT16_SIZE)
+ new_head
+ substring(
array_head_and_tail, array_items_count * UINT16_SIZE, array_head_and_tail.length
)
+ substring(new_head_and_tail, new_items_count * UINT16_SIZE, new_head_and_tail.length)
)
@subroutine
def dynamic_array_replace_byte_length_head(array: Bytes, new_item: Bytes, index: UInt64) -> Bytes:
"""
Replace a single item in an arc4 dynamic array of items that are prefixed with
their byte length
array: The bytes of the source array
new_item: The bytes of the new item to be inserted
index: The index to insert the new item at
array_length: The length of the array
returns: The updated bytes for the source array
"""
size_b = substring(array, 0, UINT16_SIZE)
array_length = btoi(size_b)
return size_b + static_array_replace_byte_length_head(
array_head_and_tail=extract(array, UINT16_SIZE, 0),
new_item=new_item,
index=index,
array_length=array_length,
)
@subroutine
def dynamic_array_replace_dynamic_element(source: Bytes, new_item: Bytes, index: UInt64) -> Bytes:
size_b = substring(source, 0, UINT16_SIZE)
array_length = btoi(size_b)
return size_b + static_array_replace_dynamic_element(
array_head_and_tail=extract(source, UINT16_SIZE, 0),
new_item=new_item,
index=index,
array_length=array_length,
)
@subroutine
def static_array_replace_dynamic_element(
*, array_head_and_tail: Bytes, new_item: Bytes, index: UInt64, array_length: UInt64
) -> Bytes:
original_offset = extract_uint16(array_head_and_tail, index * 2)
next_item_offset = extract_uint16(array_head_and_tail, (index + 1) * 2)
end_of_tail = array_head_and_tail.length
is_before_end = array_length - index - 1
end_offset = select_uint64(end_of_tail, next_item_offset, is_before_end)
original_item_length = end_offset - original_offset
new_item_length = new_item.length
new_head_and_tail = (
substring(array_head_and_tail, 0, original_offset)
+ new_item
+ substring(array_head_and_tail, end_offset, end_of_tail)
)
for head_offset in urange((index + 1) * 2, array_length * 2, 2):
tail_offset = extract_uint16(new_head_and_tail, head_offset)
tail_offset += new_item_length
tail_offset -= original_item_length
tail_offset_bytes = extract(itob(tail_offset), UINT16_OFFSET, UINT16_SIZE)
new_head_and_tail = replace(new_head_and_tail, head_offset, tail_offset_bytes)
return new_head_and_tail
@subroutine
def static_array_replace_byte_length_head(
array_head_and_tail: Bytes, new_item: Bytes, index: UInt64, array_length: UInt64
) -> Bytes:
"""
Replace a single item in an arc4 dynamic array of items that are prefixed with
their byte length
array_head_and_tail: The head and tail bytes of the source array
new_item: The bytes of the new item to be inserted
index: The index to insert the new item at
array_length: The length of the array
returns: The updated bytes for the source array
"""
assert index < array_length, "Index out of bounds"
offset_for_index = extract_uint16(array_head_and_tail, index * UINT16_SIZE)
old_item_length = extract_uint16(array_head_and_tail, offset_for_index)
old_item_end = offset_for_index + old_item_length + UINT16_SIZE
return recalculate_head_for_elements_with_byte_length_head(
array_head_and_tail=substring(array_head_and_tail, 0, offset_for_index)
+ new_item
+ substring(array_head_and_tail, old_item_end, array_head_and_tail.length),
length=array_length,
start_at_index=index,
)
@subroutine
def recalculate_head_for_elements_with_byte_length_head(
array_head_and_tail: Bytes, length: UInt64, start_at_index: UInt64
) -> Bytes:
"""
Recalculates the offset values of an arc4 static array, where each item's head consists of
its length in bytes as uint16
array_data: The static array data
length: The length of the static array
start_at_index: Optionally start at a non-zero index for performance optimisation. The offset
at this index is assumed to be correct if start_at_index is not 0
returns: The updated bytes for the source array
"""
tail_offset = select_uint64(
length * UINT16_SIZE,
extract_uint16(array_head_and_tail, start_at_index * UINT16_SIZE),
start_at_index, # use length * UINT16_SIZE if 0 otherwise inspect head
)
for head_offset in urange(start_at_index * UINT16_SIZE, length * UINT16_SIZE, UINT16_SIZE):
tail_offset_bytes = extract(itob(tail_offset), UINT16_OFFSET, UINT16_SIZE)
array_head_and_tail = replace(array_head_and_tail, head_offset, tail_offset_bytes)
tail_offset += extract_uint16(array_head_and_tail, tail_offset) + UINT16_SIZE
head_offset += UINT16_SIZE
return array_head_and_tail
| algorandfoundation/puya | src/_puya_lib/arc4.py | Python | NOASSERTION | 12,234 |
from algopy import Bytes, UInt64, op, subroutine
@subroutine
def is_substring(item: Bytes, sequence: Bytes) -> bool:
"""
Search for a shorter string in a larger one.
"""
start = UInt64(0)
while start + item.length <= sequence.length:
if item == op.substring(sequence, start, start + item.length):
return True
start += 1
return False
| algorandfoundation/puya | src/_puya_lib/bytes_.py | Python | NOASSERTION | 388 |
from algopy import (
Bytes,
OnCompleteAction,
OpUpFeeSource,
TransactionType,
UInt64,
op,
subroutine,
)
@subroutine
def ensure_budget(required_budget: UInt64, fee_source: OpUpFeeSource) -> None:
# A budget buffer is necessary to deal with an edge case of ensure_budget():
# if the current budget is equal to or only slightly higher than the
# required budget then it's possible for ensure_budget() to return with a
# current budget less than the required budget. The buffer prevents this
# from being the case.
required_budget_with_buffer = required_budget + 10
while required_budget_with_buffer > op.Global.opcode_budget():
op.ITxnCreate.begin()
op.ITxnCreate.set_type_enum(TransactionType.ApplicationCall)
op.ITxnCreate.set_on_completion(OnCompleteAction.DeleteApplication)
op.ITxnCreate.set_approval_program(Bytes.from_hex("068101"))
op.ITxnCreate.set_clear_state_program(Bytes.from_hex("068101"))
match fee_source:
case OpUpFeeSource.GroupCredit:
op.ITxnCreate.set_fee(0)
case OpUpFeeSource.AppAccount:
op.ITxnCreate.set_fee(op.Global.min_txn_fee)
# case OpUpFeeSource.Any:
# any means no fee set
op.ITxnCreate.submit()
| algorandfoundation/puya | src/_puya_lib/util.py | Python | NOASSERTION | 1,326 |
algorandfoundation/puya | src/puya/__init__.py | Python | NOASSERTION | 0 |
|
import argparse
from importlib.metadata import version
from pathlib import Path
import attrs
from puya.log import LogFormat, LogLevel, configure_logging
from puya.main import main
@attrs.define(kw_only=True)
class _PuyaCLIArgs:
options: Path | None = None
awst: Path | None = None
source_annotations: Path | None = None
log_level: LogLevel = LogLevel.info
log_format: LogFormat = LogFormat.default
def cli() -> None:
parser = argparse.ArgumentParser(
prog="puya", formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
# TODO: use version of puya instead once package is split
parser.add_argument("--version", action="version", version=f"%(prog)s {version('puyapy')}")
parser.add_argument(
"--log-level", type=LogLevel.from_string, choices=list(LogLevel), default=LogLevel.info
)
parser.add_argument(
"--log-format",
type=LogFormat.from_string,
choices=list(LogFormat),
default=LogFormat.default,
)
parser.add_argument("--options", type=Path, required=True)
parser.add_argument("--awst", type=Path, required=True)
parser.add_argument("--source-annotations", type=Path)
parsed_args = _PuyaCLIArgs()
parser.parse_args(namespace=parsed_args)
configure_logging(min_log_level=parsed_args.log_level, log_format=parsed_args.log_format)
assert parsed_args.options
options_json = parsed_args.options.read_text("utf8")
assert parsed_args.awst
awst_json = parsed_args.awst.read_text("utf8")
source_annotations_json = None
if parsed_args.source_annotations:
source_annotations_json = parsed_args.source_annotations.read_text("utf8")
main(
options_json=options_json,
awst_json=awst_json,
source_annotations_json=source_annotations_json,
)
if __name__ == "__main__":
cli()
| algorandfoundation/puya | src/puya/__main__.py | Python | NOASSERTION | 1,862 |
ZERO_ADDRESS = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAY5HFKQ"
ENCODED_ADDRESS_LENGTH = 58
PUBLIC_KEY_HASH_LENGTH = 32
ADDRESS_CHECKSUM_LENGTH = 4
MAX_BIGUINT_BITS = 512
MAX_UINT64 = 2**64 - 1
MAX_BIGUINT_BYTES = MAX_BIGUINT_BITS // 8
MAX_BYTES_LENGTH = 4096
MAX_SCRATCH_SLOT_NUMBER = 255
MAX_GLOBAL_STATE_KEYS = 64
MAX_LOCAL_STATE_KEYS = 16
MAX_STATE_KEY_LENGTH = 64
MIN_BOX_KEY_LENGTH = 1
MAX_BOX_KEY_LENGTH = 64
MAX_TRANSACTION_GROUP_SIZE = 16
MAX_APP_PAGE_SIZE = 2048
HASH_PREFIX_PROGRAM = b"Program"
"""Represents the prefix added to a program before hashing e.g. for a LogicSigs address"""
# Which language versions does this version of puya support targeting
# This will typically just be the current mainnet version and potentially the vNext if it doesn't
# contain breaking changes
SUPPORTED_AVM_VERSIONS = [10, 11, 12]
# Which language version is currently deployed to mainnet
MAINNET_AVM_VERSION = 10
| algorandfoundation/puya | src/puya/algo_constants.py | Python | NOASSERTION | 925 |
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 49