code
stringlengths 0
390k
| repo_name
stringclasses 1
value | path
stringlengths 12
69
| language
stringclasses 1
value | license
stringclasses 1
value | size
int64 0
390k
|
---|---|---|---|---|---|
# ruff: noqa
# Configuration file for the Sphinx documentation builder.
#
# For the full list of built-in configuration values, see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Project information -----------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information
project = "Algorand Python"
copyright = "2024, Algorand Foundation" # noqa: A001
author = "Algorand Foundation"
# -- General configuration ---------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration
extensions = [
"sphinx.ext.githubpages",
"sphinx.ext.intersphinx",
"sphinx_copybutton",
"myst_parser",
"autodoc2",
]
templates_path = ["_templates"]
exclude_patterns = ["_build", "Thumbs.db", ".DS_Store", "architecture-decisions/*.md"]
intersphinx_mapping = {
"python": ("https://docs.python.org/3", None),
}
# warning exclusions
suppress_warnings = [
"myst.xref_missing",
"autodoc2.dup_item",
]
nitpick_ignore = [
("py:class", "algopy.arc4.AllowedOnCompletes"),
]
nitpick_ignore_regex = [
("py:class", r"algopy.*\._.*"),
]
# -- Options for HTML output -------------------------------------------------
# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output
html_theme = "furo"
html_static_path = ["_static"]
html_css_files = [
"custom.css",
]
python_maximum_signature_line_length = 80
# -- Options for myst ---
myst_enable_extensions = [
"colon_fence",
"fieldlist",
]
# -- Options for autodoc2 ---
autodoc2_packages = [
{
"path": "./algopy-stubs",
"module": "algopy",
"auto_mode": False,
},
]
autodoc2_docstring_parser_regexes = [
# this will render all docstrings as Markdown
(r".*", "myst"),
]
autodoc2_hidden_objects = [
"undoc",
]
autodoc2_hidden_regexes = [
r".*\.__subclasshook__", # inherited from Protocol
]
autodoc2_class_inheritance = False
autodoc2_module_all_regexes = [r"algopy.*"]
autodoc2_render_plugin = "myst"
autodoc2_sort_names = True
autodoc2_index_template = None
autodoc2_docstrings = "all"
|
algorandfoundation/puya
|
docs/conf.py
|
Python
|
NOASSERTION
| 2,216 |
algorandfoundation/puya
|
examples/__init__.py
|
Python
|
NOASSERTION
| 0 |
|
# WARNING: This code is provided for example only. Do NOT deploy to mainnet.
from algopy import (
Account,
ARC4Contract,
Asset,
Global,
Txn,
UInt64,
arc4,
gtxn,
itxn,
op,
subroutine,
)
# Total supply of the pool tokens
TOTAL_SUPPLY = 10_000_000_000
# scale helps with precision when doing computation for
# the number of tokens to transfer
SCALE = 1000
# Fee for swaps, 5 represents 0.5% ((fee / scale)*100)
FEE = 5
FACTOR = SCALE - FEE
class ConstantProductAMM(ARC4Contract):
def __init__(self) -> None:
# init runs whenever the txn's app ID is zero, and runs first
# so if we have multiple create methods, this can contain common code.
# The asset id of asset A
self.asset_a = Asset()
# The asset id of asset B
self.asset_b = Asset()
# The current governor of this contract, allowed to do admin type actions
self.governor = Txn.sender
# The asset id of the Pool Token, used to track share of pool the holder may recover
self.pool_token = Asset()
# The ratio between assets (A*Scale/B)
self.ratio = UInt64(0)
@arc4.abimethod()
def set_governor(self, new_governor: Account) -> None:
"""sets the governor of the contract, may only be called by the current governor"""
self._check_is_governor()
self.governor = new_governor
@arc4.abimethod()
def bootstrap(self, seed: gtxn.PaymentTransaction, a_asset: Asset, b_asset: Asset) -> UInt64:
"""bootstraps the contract by opting into the assets and creating the pool token.
Note this method will fail if it is attempted more than once on the same contract
since the assets and pool token application state values are marked as static and
cannot be overridden.
Args:
seed: Initial Payment transaction to the app account so it can opt in to assets
and create pool token.
a_asset: One of the two assets this pool should allow swapping between.
b_asset: The other of the two assets this pool should allow swapping between.
Returns:
The asset id of the pool token created.
"""
assert not self.pool_token, "application has already been bootstrapped"
self._check_is_governor()
assert Global.group_size == 2, "group size not 2"
assert seed.receiver == Global.current_application_address, "receiver not app address"
assert seed.amount >= 300_000, "amount minimum not met" # 0.3 Algos
assert a_asset.id < b_asset.id, "asset a must be less than asset b"
self.asset_a = a_asset
self.asset_b = b_asset
self.pool_token = self._create_pool_token()
self._do_opt_in(self.asset_a)
self._do_opt_in(self.asset_b)
return self.pool_token.id
@arc4.abimethod(
default_args={
"pool_asset": "pool_token",
"a_asset": "asset_a",
"b_asset": "asset_b",
},
)
def mint(
self,
a_xfer: gtxn.AssetTransferTransaction,
b_xfer: gtxn.AssetTransferTransaction,
pool_asset: Asset,
a_asset: Asset,
b_asset: Asset,
) -> None:
"""mint pool tokens given some amount of asset A and asset B.
Given some amount of Asset A and Asset B in the transfers, mint some number of pool
tokens commensurate with the pools current balance and circulating supply of
pool tokens.
Args:
a_xfer: Asset Transfer Transaction of asset A as a deposit to the pool in
exchange for pool tokens.
b_xfer: Asset Transfer Transaction of asset B as a deposit to the pool in
exchange for pool tokens.
pool_asset: The asset ID of the pool token so that we may distribute it.
a_asset: The asset ID of the Asset A so that we may inspect our balance.
b_asset: The asset ID of the Asset B so that we may inspect our balance.
"""
self._check_bootstrapped()
# well-formed mint
assert pool_asset == self.pool_token, "asset pool incorrect"
assert a_asset == self.asset_a, "asset a incorrect"
assert b_asset == self.asset_b, "asset b incorrect"
assert a_xfer.sender == Txn.sender, "sender invalid"
assert b_xfer.sender == Txn.sender, "sender invalid"
# valid asset a xfer
assert (
a_xfer.asset_receiver == Global.current_application_address
), "receiver not app address"
assert a_xfer.xfer_asset == self.asset_a, "asset a incorrect"
assert a_xfer.asset_amount > 0, "amount minimum not met"
# valid asset b xfer
assert (
b_xfer.asset_receiver == Global.current_application_address
), "receiver not app address"
assert b_xfer.xfer_asset == self.asset_b, "asset b incorrect"
assert b_xfer.asset_amount > 0, "amount minimum not met"
to_mint = tokens_to_mint(
pool_balance=self._current_pool_balance(),
a_balance=self._current_a_balance(),
b_balance=self._current_b_balance(),
a_amount=a_xfer.asset_amount,
b_amount=b_xfer.asset_amount,
)
assert to_mint > 0, "send amount too low"
# mint tokens
do_asset_transfer(receiver=Txn.sender, asset=self.pool_token, amount=to_mint)
self._update_ratio()
@arc4.abimethod(
default_args={
"pool_asset": "pool_token",
"a_asset": "asset_a",
"b_asset": "asset_b",
},
)
def burn(
self,
pool_xfer: gtxn.AssetTransferTransaction,
pool_asset: Asset,
a_asset: Asset,
b_asset: Asset,
) -> None:
"""burn pool tokens to get back some amount of asset A and asset B
Args:
pool_xfer: Asset Transfer Transaction of the pool token for the amount the
sender wishes to redeem
pool_asset: Asset ID of the pool token so we may inspect balance.
a_asset: Asset ID of Asset A so we may inspect balance and distribute it
b_asset: Asset ID of Asset B so we may inspect balance and distribute it
"""
self._check_bootstrapped()
assert pool_asset == self.pool_token, "asset pool incorrect"
assert a_asset == self.asset_a, "asset a incorrect"
assert b_asset == self.asset_b, "asset b incorrect"
assert (
pool_xfer.asset_receiver == Global.current_application_address
), "receiver not app address"
assert pool_xfer.asset_amount > 0, "amount minimum not met"
assert pool_xfer.xfer_asset == self.pool_token, "asset pool incorrect"
assert pool_xfer.sender == Txn.sender, "sender invalid"
# Get the total number of tokens issued
# !important: this happens prior to receiving the current axfer of pool tokens
pool_balance = self._current_pool_balance()
a_amt = tokens_to_burn(
pool_balance=pool_balance,
supply=self._current_a_balance(),
amount=pool_xfer.asset_amount,
)
b_amt = tokens_to_burn(
pool_balance=pool_balance,
supply=self._current_b_balance(),
amount=pool_xfer.asset_amount,
)
# Send back commensurate amt of a
do_asset_transfer(receiver=Txn.sender, asset=self.asset_a, amount=a_amt)
# Send back commensurate amt of b
do_asset_transfer(receiver=Txn.sender, asset=self.asset_b, amount=b_amt)
self._update_ratio()
@arc4.abimethod(
default_args={
"a_asset": "asset_a",
"b_asset": "asset_b",
},
)
def swap(
self,
swap_xfer: gtxn.AssetTransferTransaction,
a_asset: Asset,
b_asset: Asset,
) -> None:
"""Swap some amount of either asset A or asset B for the other
Args:
swap_xfer: Asset Transfer Transaction of either Asset A or Asset B
a_asset: Asset ID of asset A so we may inspect balance and possibly transfer it
b_asset: Asset ID of asset B so we may inspect balance and possibly transfer it
"""
self._check_bootstrapped()
assert a_asset == self.asset_a, "asset a incorrect"
assert b_asset == self.asset_b, "asset b incorrect"
assert swap_xfer.asset_amount > 0, "amount minimum not met"
assert swap_xfer.sender == Txn.sender, "sender invalid"
match swap_xfer.xfer_asset:
case self.asset_a:
in_supply = self._current_b_balance()
out_supply = self._current_a_balance()
out_asset = self.asset_a
case self.asset_b:
in_supply = self._current_a_balance()
out_supply = self._current_b_balance()
out_asset = self.asset_b
case _:
assert False, "asset id incorrect"
to_swap = tokens_to_swap(
in_amount=swap_xfer.asset_amount, in_supply=in_supply, out_supply=out_supply
)
assert to_swap > 0, "send amount too low"
do_asset_transfer(receiver=Txn.sender, asset=out_asset, amount=to_swap)
self._update_ratio()
@subroutine
def _check_bootstrapped(self) -> None:
assert self.pool_token, "bootstrap method needs to be called first"
@subroutine
def _update_ratio(self) -> None:
a_balance = self._current_a_balance()
b_balance = self._current_b_balance()
self.ratio = a_balance * SCALE // b_balance
@subroutine
def _check_is_governor(self) -> None:
assert (
Txn.sender == self.governor
), "Only the account set in global_state.governor may call this method"
@subroutine
def _create_pool_token(self) -> Asset:
return (
itxn.AssetConfig(
asset_name=b"DPT-" + self.asset_a.unit_name + b"-" + self.asset_b.unit_name,
unit_name=b"dbt",
total=TOTAL_SUPPLY,
decimals=3,
manager=Global.current_application_address,
reserve=Global.current_application_address,
)
.submit()
.created_asset
)
@subroutine
def _do_opt_in(self, asset: Asset) -> None:
do_asset_transfer(
receiver=Global.current_application_address,
asset=asset,
amount=UInt64(0),
)
@subroutine
def _current_pool_balance(self) -> UInt64:
return self.pool_token.balance(Global.current_application_address)
@subroutine
def _current_a_balance(self) -> UInt64:
return self.asset_a.balance(Global.current_application_address)
@subroutine
def _current_b_balance(self) -> UInt64:
return self.asset_b.balance(Global.current_application_address)
##############
# Mathy methods
##############
# Notes:
# 1) During arithmetic operations, depending on the inputs, these methods may overflow
# the max uint64 value. This will cause the program to immediately terminate.
#
# Care should be taken to fully understand the limitations of these functions and if
# required should be swapped out for the appropriate byte math operations.
#
# 2) When doing division, any remainder is truncated from the result.
#
# Care should be taken to ensure that _when_ the truncation happens,
# it does so in favor of the contract. This is a subtle security issue that,
# if mishandled, could cause the balance of the contract to be drained.
@subroutine
def tokens_to_mint(
*,
pool_balance: UInt64,
a_balance: UInt64,
b_balance: UInt64,
a_amount: UInt64,
b_amount: UInt64,
) -> UInt64:
is_initial_mint = a_balance == a_amount and b_balance == b_amount
if is_initial_mint:
return op.sqrt(a_amount * b_amount) - SCALE
issued = TOTAL_SUPPLY - pool_balance
a_ratio = SCALE * a_amount // (a_balance - a_amount)
b_ratio = SCALE * b_amount // (b_balance - b_amount)
if a_ratio < b_ratio:
return a_ratio * issued // SCALE
else:
return b_ratio * issued // SCALE
@subroutine
def tokens_to_burn(*, pool_balance: UInt64, supply: UInt64, amount: UInt64) -> UInt64:
issued = TOTAL_SUPPLY - pool_balance - amount
return supply * amount // issued
@subroutine
def tokens_to_swap(*, in_amount: UInt64, in_supply: UInt64, out_supply: UInt64) -> UInt64:
in_total = SCALE * (in_supply - in_amount) + (in_amount * FACTOR)
out_total = in_amount * FACTOR * out_supply
return out_total // in_total
@subroutine
def do_asset_transfer(*, receiver: Account, asset: Asset, amount: UInt64) -> None:
itxn.AssetTransfer(
xfer_asset=asset,
asset_amount=amount,
asset_receiver=receiver,
).submit()
|
algorandfoundation/puya
|
examples/amm/contract.py
|
Python
|
NOASSERTION
| 12,955 |
# This file is auto-generated, do not modify
# flake8: noqa
# fmt: off
import typing
import algopy
class ConstantProductAMM(algopy.arc4.ARC4Client, typing.Protocol):
@algopy.arc4.abimethod
def set_governor(
self,
new_governor: algopy.Account,
) -> None:
"""
sets the governor of the contract, may only be called by the current governor
"""
@algopy.arc4.abimethod
def bootstrap(
self,
seed: algopy.gtxn.PaymentTransaction,
a_asset: algopy.Asset,
b_asset: algopy.Asset,
) -> algopy.arc4.UIntN[typing.Literal[64]]:
"""
bootstraps the contract by opting into the assets and creating the pool token.
Note this method will fail if it is attempted more than once on the same contract since the assets and pool token application state values are marked as static and cannot be overridden.
"""
@algopy.arc4.abimethod
def mint(
self,
a_xfer: algopy.gtxn.AssetTransferTransaction,
b_xfer: algopy.gtxn.AssetTransferTransaction,
pool_asset: algopy.Asset,
a_asset: algopy.Asset,
b_asset: algopy.Asset,
) -> None:
"""
mint pool tokens given some amount of asset A and asset B.
Given some amount of Asset A and Asset B in the transfers, mint some number of pool tokens commensurate with the pools current balance and circulating supply of pool tokens.
"""
@algopy.arc4.abimethod
def burn(
self,
pool_xfer: algopy.gtxn.AssetTransferTransaction,
pool_asset: algopy.Asset,
a_asset: algopy.Asset,
b_asset: algopy.Asset,
) -> None:
"""
burn pool tokens to get back some amount of asset A and asset B
"""
@algopy.arc4.abimethod
def swap(
self,
swap_xfer: algopy.gtxn.AssetTransferTransaction,
a_asset: algopy.Asset,
b_asset: algopy.Asset,
) -> None:
"""
Swap some amount of either asset A or asset B for the other
"""
|
algorandfoundation/puya
|
examples/amm/out/client_ConstantProductAMM.py
|
Python
|
NOASSERTION
| 2,067 |
import typing
from algopy import ARC4Contract, arc4
class Swapped(arc4.Struct):
a: arc4.UInt64
b: arc4.UInt64
class EventEmitter(ARC4Contract):
@arc4.abimethod
def emit_swapped(self, a: arc4.UInt64, b: arc4.UInt64) -> None:
arc4.emit(Swapped(b, a))
arc4.emit("Swapped(uint64,uint64)", b, a)
arc4.emit("Swapped", b, a)
@arc4.abimethod()
def emit_ufixed(
self,
a: arc4.BigUFixedNxM[typing.Literal[256], typing.Literal[16]],
b: arc4.UFixedNxM[typing.Literal[64], typing.Literal[2]],
) -> None:
arc4.emit("AnEvent(ufixed256x16,ufixed64x2)", a, b)
|
algorandfoundation/puya
|
examples/arc_28/contract.py
|
Python
|
NOASSERTION
| 633 |
# This file is auto-generated, do not modify
# flake8: noqa
# fmt: off
import typing
import algopy
class EventEmitter(algopy.arc4.ARC4Client, typing.Protocol):
@algopy.arc4.abimethod
def emit_swapped(
self,
a: algopy.arc4.UIntN[typing.Literal[64]],
b: algopy.arc4.UIntN[typing.Literal[64]],
) -> None: ...
@algopy.arc4.abimethod
def emit_ufixed(
self,
a: algopy.arc4.BigUFixedNxM[typing.Literal[256], typing.Literal[16]],
b: algopy.arc4.UFixedNxM[typing.Literal[64], typing.Literal[2]],
) -> None: ...
|
algorandfoundation/puya
|
examples/arc_28/out/client_EventEmitter.py
|
Python
|
NOASSERTION
| 578 |
from algopy import (
Account,
ARC4Contract,
Asset,
Global,
LocalState,
Txn,
UInt64,
arc4,
gtxn,
itxn,
subroutine,
)
class Auction(ARC4Contract):
def __init__(self) -> None:
self.auction_end = UInt64(0)
self.previous_bid = UInt64(0)
self.asa_amount = UInt64(0)
self.asa = Asset()
self.previous_bidder = Account()
self.claimable_amount = LocalState(UInt64, key="claim", description="The claimable amount")
@arc4.abimethod
def opt_into_asset(self, asset: Asset) -> None:
# Only allow app creator to opt the app account into a ASA
assert Txn.sender == Global.creator_address, "Only creator can opt in to ASA"
# Verify a ASA hasn't already been opted into
assert self.asa.id == 0, "ASA already opted in"
# Save ASA ID in global state
self.asa = asset
# Submit opt-in transaction: 0 asset transfer to self
itxn.AssetTransfer(
asset_receiver=Global.current_application_address,
xfer_asset=asset,
).submit()
@arc4.abimethod
def start_auction(
self,
starting_price: UInt64,
length: UInt64,
axfer: gtxn.AssetTransferTransaction,
) -> None:
assert Txn.sender == Global.creator_address, "auction must be started by creator"
# Ensure the auction hasn't already been started
assert self.auction_end == 0, "auction already started"
# Verify axfer
assert (
axfer.asset_receiver == Global.current_application_address
), "axfer must transfer to this app"
# Set global state
self.asa_amount = axfer.asset_amount
self.auction_end = Global.latest_timestamp + length
self.previous_bid = starting_price
@arc4.abimethod
def opt_in(self) -> None:
pass
@arc4.abimethod
def bid(self, pay: gtxn.PaymentTransaction) -> None:
# Ensure auction hasn't ended
assert Global.latest_timestamp < self.auction_end, "auction has ended"
# Verify payment transaction
assert pay.sender == Txn.sender, "payment sender must match transaction sender"
assert pay.amount > self.previous_bid, "Bid must be higher than previous bid"
# set global state
self.previous_bid = pay.amount
self.previous_bidder = pay.sender
# Update claimable amount
self.claimable_amount[Txn.sender] = pay.amount
@arc4.abimethod
def claim_bids(self) -> None:
amount = original_amount = self.claimable_amount[Txn.sender]
# subtract previous bid if sender is previous bidder
if Txn.sender == self.previous_bidder:
amount -= self.previous_bid
itxn.Payment(
amount=amount,
receiver=Txn.sender,
).submit()
self.claimable_amount[Txn.sender] = original_amount - amount
@arc4.abimethod
def claim_asset(self, asset: Asset) -> None:
assert Global.latest_timestamp > self.auction_end, "auction has not ended"
# Send ASA to previous bidder
itxn.AssetTransfer(
xfer_asset=asset,
asset_close_to=self.previous_bidder,
asset_receiver=self.previous_bidder,
asset_amount=self.asa_amount,
).submit()
@subroutine
def delete_application(self) -> None:
itxn.Payment(
receiver=Global.creator_address,
close_remainder_to=Global.creator_address,
).submit()
def clear_state_program(self) -> bool:
return True
|
algorandfoundation/puya
|
examples/auction/contract.py
|
Python
|
NOASSERTION
| 3,611 |
# This file is auto-generated, do not modify
# flake8: noqa
# fmt: off
import typing
import algopy
class Auction(algopy.arc4.ARC4Client, typing.Protocol):
@algopy.arc4.abimethod
def opt_into_asset(
self,
asset: algopy.Asset,
) -> None: ...
@algopy.arc4.abimethod
def start_auction(
self,
starting_price: algopy.arc4.UIntN[typing.Literal[64]],
length: algopy.arc4.UIntN[typing.Literal[64]],
axfer: algopy.gtxn.AssetTransferTransaction,
) -> None: ...
@algopy.arc4.abimethod
def opt_in(
self,
) -> None: ...
@algopy.arc4.abimethod
def bid(
self,
pay: algopy.gtxn.PaymentTransaction,
) -> None: ...
@algopy.arc4.abimethod
def claim_bids(
self,
) -> None: ...
@algopy.arc4.abimethod
def claim_asset(
self,
asset: algopy.Asset,
) -> None: ...
|
algorandfoundation/puya
|
examples/auction/out/client_Auction.py
|
Python
|
NOASSERTION
| 914 |
import typing
from algopy import Box, BoxMap, BoxRef, Bytes, Global, String, Txn, UInt64, arc4, subroutine
StaticInts: typing.TypeAlias = arc4.StaticArray[arc4.UInt8, typing.Literal[4]]
class BoxContract(arc4.ARC4Contract):
def __init__(self) -> None:
self.box_a = Box(UInt64)
self.box_b = Box[arc4.DynamicBytes](arc4.DynamicBytes, key="b")
self.box_c = Box(arc4.String, key=b"BOX_C")
self.box_d = Box(Bytes)
self.box_map = BoxMap(UInt64, String, key_prefix="")
self.box_ref = BoxRef()
@arc4.abimethod
def set_boxes(self, a: UInt64, b: arc4.DynamicBytes, c: arc4.String) -> None:
self.box_a.value = a
self.box_b.value = b.copy()
self.box_c.value = c
self.box_d.value = b.native
b_value = self.box_b.value.copy()
assert self.box_b.value.length == b_value.length, "direct reference should match copy"
self.box_a.value += 3
# test .length
assert self.box_a.length == 8
assert self.box_b.length == b.bytes.length
assert self.box_c.length == c.bytes.length
assert self.box_d.length == b.native.length
# test .value.bytes
assert self.box_c.value.bytes[0] == c.bytes[0]
assert self.box_c.value.bytes[-1] == c.bytes[-1]
assert self.box_c.value.bytes[:-1] == c.bytes[:-1]
assert self.box_c.value.bytes[:2] == c.bytes[:2]
# test .value with Bytes type
assert self.box_d.value[0] == b.native[0]
assert self.box_d.value[-1] == b.native[-1]
assert self.box_d.value[:-1] == b.native[:-1]
assert self.box_d.value[:5] == b.native[:5]
assert self.box_d.value[: UInt64(2)] == b.native[: UInt64(2)]
@arc4.abimethod
def check_keys(self) -> None:
assert self.box_a.key == b"box_a", "box a key ok"
assert self.box_b.key == b"b", "box b key ok"
assert self.box_c.key == b"BOX_C", "box c key ok"
@arc4.abimethod
def delete_boxes(self) -> None:
del self.box_a.value
del self.box_b.value
del self.box_c.value
assert self.box_a.get(default=UInt64(42)) == 42
assert self.box_b.get(default=arc4.DynamicBytes(b"42")).native == b"42"
assert self.box_c.get(default=arc4.String("42")) == "42"
a, a_exists = self.box_a.maybe()
assert not a_exists
assert a == 0
@arc4.abimethod
def read_boxes(self) -> tuple[UInt64, Bytes, arc4.String]:
return get_box_value_plus_1(self.box_a) - 1, self.box_b.value.native, self.box_c.value
@arc4.abimethod
def boxes_exist(self) -> tuple[bool, bool, bool]:
return bool(self.box_a), bool(self.box_b), bool(self.box_c)
@arc4.abimethod
def slice_box(self) -> None:
box_0 = Box(Bytes, key=String("0"))
box_0.value = Bytes(b"Testing testing 123")
assert box_0.value[0:7] == b"Testing"
self.box_c.value = arc4.String("Hello")
assert self.box_c.value.bytes[2:10] == b"Hello"
@arc4.abimethod
def arc4_box(self) -> None:
box_d = Box(StaticInts, key=Bytes(b"d"))
box_d.value = StaticInts(arc4.UInt8(0), arc4.UInt8(1), arc4.UInt8(2), arc4.UInt8(3))
assert box_d.value[0] == 0
assert box_d.value[1] == 1
assert box_d.value[2] == 2
assert box_d.value[3] == 3
@arc4.abimethod
def test_box_ref(self) -> None:
# init ref, with valid key types
box_ref = BoxRef(key="blob")
assert not box_ref, "no data"
box_ref = BoxRef(key=b"blob")
assert not box_ref, "no data"
box_ref = BoxRef(key=Bytes(b"blob"))
assert not box_ref, "no data"
box_ref = BoxRef(key=String("blob"))
assert not box_ref, "no data"
# create
assert box_ref.create(size=32)
assert box_ref, "has data"
# manipulate data
sender_bytes = Txn.sender.bytes
app_address = Global.current_application_address.bytes
value_3 = Bytes(b"hello")
box_ref.replace(0, sender_bytes)
box_ref.resize(8000)
box_ref.splice(0, 0, app_address)
box_ref.replace(64, value_3)
prefix = box_ref.extract(0, 32 * 2 + value_3.length)
assert prefix == app_address + sender_bytes + value_3
# delete
assert box_ref.delete()
assert box_ref.key == b"blob"
# query
value, exists = box_ref.maybe()
assert not exists
assert value == b""
assert box_ref.get(default=sender_bytes) == sender_bytes
# update
box_ref.put(sender_bytes + app_address)
assert box_ref, "Blob exists"
assert box_ref.length == 64
assert get_box_ref_length(box_ref) == 64
# instance box ref
self.box_ref.create(size=UInt64(32))
assert self.box_ref, "has data"
self.box_ref.delete()
@arc4.abimethod
def box_map_test(self) -> None:
key_0 = UInt64(0)
key_1 = UInt64(1)
value = String("Hmmmmm")
self.box_map[key_0] = value
assert self.box_map[key_0].bytes.length == value.bytes.length
assert self.box_map.length(key_0) == value.bytes.length
assert self.box_map.get(key_1, default=String("default")) == String("default")
value, exists = self.box_map.maybe(key_1)
assert not exists
assert key_0 in self.box_map
assert self.box_map.key_prefix == b""
# test box map not assigned to the class and passed to subroutine
tmp_box_map = BoxMap(UInt64, String, key_prefix=Bytes())
tmp_box_map[key_1] = String("hello")
assert get_box_map_value_from_key_plus_1(tmp_box_map, UInt64(0)) == "hello"
del tmp_box_map[key_1]
@arc4.abimethod
def box_map_set(self, key: UInt64, value: String) -> None:
self.box_map[key] = value
@arc4.abimethod
def box_map_get(self, key: UInt64) -> String:
return self.box_map[key]
@arc4.abimethod
def box_map_del(self, key: UInt64) -> None:
del self.box_map[key]
@arc4.abimethod
def box_map_exists(self, key: UInt64) -> bool:
return key in self.box_map
@subroutine
def get_box_value_plus_1(box: Box[UInt64]) -> UInt64:
return box.value + 1
@subroutine
def get_box_ref_length(ref: BoxRef) -> UInt64:
return ref.length
@subroutine
def get_box_map_value_from_key_plus_1(box_map: BoxMap[UInt64, String], key: UInt64) -> String:
return box_map[key + 1]
|
algorandfoundation/puya
|
examples/box_storage/contract.py
|
Python
|
NOASSERTION
| 6,493 |
# This file is auto-generated, do not modify
# flake8: noqa
# fmt: off
import typing
import algopy
class BoxContract(algopy.arc4.ARC4Client, typing.Protocol):
@algopy.arc4.abimethod
def set_boxes(
self,
a: algopy.arc4.UIntN[typing.Literal[64]],
b: algopy.arc4.DynamicBytes,
c: algopy.arc4.String,
) -> None: ...
@algopy.arc4.abimethod
def check_keys(
self,
) -> None: ...
@algopy.arc4.abimethod
def delete_boxes(
self,
) -> None: ...
@algopy.arc4.abimethod
def read_boxes(
self,
) -> algopy.arc4.Tuple[algopy.arc4.UIntN[typing.Literal[64]], algopy.arc4.DynamicBytes, algopy.arc4.String]: ...
@algopy.arc4.abimethod
def boxes_exist(
self,
) -> algopy.arc4.Tuple[algopy.arc4.Bool, algopy.arc4.Bool, algopy.arc4.Bool]: ...
@algopy.arc4.abimethod
def slice_box(
self,
) -> None: ...
@algopy.arc4.abimethod
def arc4_box(
self,
) -> None: ...
@algopy.arc4.abimethod
def test_box_ref(
self,
) -> None: ...
@algopy.arc4.abimethod
def box_map_test(
self,
) -> None: ...
@algopy.arc4.abimethod
def box_map_set(
self,
key: algopy.arc4.UIntN[typing.Literal[64]],
value: algopy.arc4.String,
) -> None: ...
@algopy.arc4.abimethod
def box_map_get(
self,
key: algopy.arc4.UIntN[typing.Literal[64]],
) -> algopy.arc4.String: ...
@algopy.arc4.abimethod
def box_map_del(
self,
key: algopy.arc4.UIntN[typing.Literal[64]],
) -> None: ...
@algopy.arc4.abimethod
def box_map_exists(
self,
key: algopy.arc4.UIntN[typing.Literal[64]],
) -> algopy.arc4.Bool: ...
|
algorandfoundation/puya
|
examples/box_storage/out/client_BoxContract.py
|
Python
|
NOASSERTION
| 1,776 |
from algopy import Bytes, Contract, Txn, UInt64, log, op, subroutine
ADD = 1
SUB = 2
MUL = 3
DIV = 4
@subroutine
def itoa(i: UInt64) -> Bytes:
digits = Bytes(b"0123456789")
radix = digits.length
if i < radix:
return digits[i]
return itoa(i // radix) + digits[i % radix]
class MyContract(Contract):
def approval_program(self) -> UInt64:
num_args = Txn.num_app_args
if num_args == 0:
a = UInt64(0)
b = UInt64(0)
action = UInt64(0)
log(a)
log(b)
else:
assert num_args == 3, "Expected 3 args"
action_b = Txn.application_args(0)
action = op.btoi(action_b)
a_bytes = Txn.application_args(1)
b_bytes = Txn.application_args(2)
log(a_bytes)
log(b_bytes)
a = op.btoi(a_bytes)
b = op.btoi(b_bytes)
result = self.do_calc(action, a, b)
result_b = itoa(a) + self.op(action) + itoa(b) + b" = " + itoa(result)
log(result_b)
return UInt64(1)
@subroutine
def op(self, action: UInt64) -> Bytes:
if action == ADD:
return Bytes(b" + ")
elif action == SUB:
return Bytes(b" - ")
elif action == MUL:
return Bytes(b" * ")
elif action == DIV:
return Bytes(b" // ")
else:
return Bytes(b" - ")
@subroutine
def do_calc(self, maybe_action: UInt64, a: UInt64, b: UInt64) -> UInt64:
if maybe_action == ADD:
return self.add(a, b)
elif maybe_action == SUB:
return self.sub(a, b)
elif maybe_action == MUL:
return self.mul(a, b)
elif maybe_action == DIV:
return self.div(a, b)
else:
assert False, "unknown operation"
@subroutine
def add(self, a: UInt64, b: UInt64) -> UInt64:
return a + b
@subroutine
def sub(self, a: UInt64, b: UInt64) -> UInt64:
return a - b
@subroutine
def mul(self, a: UInt64, b: UInt64) -> UInt64:
return a * b
@subroutine
def div(self, a: UInt64, b: UInt64) -> UInt64:
return a // b
def clear_state_program(self) -> bool:
return True
|
algorandfoundation/puya
|
examples/calculator/contract.py
|
Python
|
NOASSERTION
| 2,278 |
from algopy import (
Account,
Application,
Asset,
Bytes,
Contract,
GlobalState,
String,
UInt64,
subroutine,
)
class AppStateContract(Contract):
def __init__(self) -> None:
self.global_int_full = GlobalState(UInt64(55))
self.global_int_simplified = UInt64(33)
self.global_int_no_default = GlobalState(UInt64)
self.global_bytes_full = GlobalState(Bytes(b"Hello"))
self.global_bytes_simplified = Bytes(b"Hello")
self.global_bytes_no_default = GlobalState(Bytes)
self.global_bool_full = GlobalState(False)
self.global_bool_simplified = True
self.global_bool_no_default = GlobalState(bool)
self.global_asset = GlobalState(Asset)
self.global_application = GlobalState(Application)
self.global_account = GlobalState(Account)
def approval_program(self) -> bool:
assert self.global_int_simplified == 33
assert self.global_int_full
assert self.global_int_full.value == 55
assert not self.global_int_no_default
self.global_int_no_default.value = UInt64(44)
i_value, i_exists = self.global_int_no_default.maybe()
assert i_exists
assert i_value == 44
assert read_global_uint64(Bytes(b"global_int_no_default")) == 44
assert self.global_bytes_simplified == b"Hello"
assert self.global_bytes_full
assert self.global_bytes_full.value == b"Hello"
assert self.global_bytes_full.get(Bytes(b"default")) == b"Hello"
assert not self.global_bytes_no_default
self.global_bytes_no_default.value = Bytes(b"World")
b_value, b_exists = self.global_bytes_no_default.maybe()
assert b_exists
assert b_value == b"World"
assert read_global_bytes(String("global_bytes_no_default")) == b"World"
del self.global_bytes_no_default.value
b_value, b_exists = self.global_bytes_no_default.maybe()
assert not b_exists
assert self.global_bytes_no_default.get(Bytes(b"default")) == b"default"
# Assert 'is set'
assert self.global_bool_full
assert not self.global_bool_no_default
self.global_bool_no_default.value = True
# Assert 'value'
assert not self.global_bool_full.value
assert self.global_bool_simplified
assert self.global_bool_no_default.value
# test the proxy can be passed as an argument
assert get_global_state_plus_1(self.global_int_no_default) == 45
return True
def clear_state_program(self) -> bool:
return True
@subroutine
def get_global_state_plus_1(state: GlobalState[UInt64]) -> UInt64:
return state.value + 1
@subroutine
def read_global_uint64(key: Bytes) -> UInt64:
return GlobalState(UInt64, key=key).value
@subroutine
def read_global_bytes(key: String) -> Bytes:
return GlobalState(Bytes, key=key).value
|
algorandfoundation/puya
|
examples/global_state/contract.py
|
Python
|
NOASSERTION
| 2,937 |
from algopy import Contract, Txn, log
class HelloWorldContract(Contract):
def approval_program(self) -> bool:
name = Txn.application_args(0)
log(b"Hello, " + name)
return True
def clear_state_program(self) -> bool:
return True
|
algorandfoundation/puya
|
examples/hello_world/contract.py
|
Python
|
NOASSERTION
| 270 |
from algopy import ARC4Contract, String, arc4
# Note: this contract is also used in the Puya AlgoKit template. So any breaking changes
# that require fixing this contract should also be made there
# https://github.com/algorandfoundation/algokit-puya-template/blob/main/template_content/pyproject.toml.jinja
# https://github.com/algorandfoundation/algokit-puya-template/blob/main/template_content/.algokit/generators/create_contract/smart_contracts/%7B%25%20raw%20%25%7D%7B%7B%20contract_name%20%7D%7D%7B%25%20endraw%20%25%7D/contract.py.j2
class HelloWorldContract(ARC4Contract):
@arc4.abimethod
def hello(self, name: String) -> String:
return "Hello, " + name
|
algorandfoundation/puya
|
examples/hello_world_arc4/contract.py
|
Python
|
NOASSERTION
| 680 |
# This file is auto-generated, do not modify
# flake8: noqa
# fmt: off
import typing
import algopy
class HelloWorldContract(algopy.arc4.ARC4Client, typing.Protocol):
@algopy.arc4.abimethod
def hello(
self,
name: algopy.arc4.String,
) -> algopy.arc4.String: ...
|
algorandfoundation/puya
|
examples/hello_world_arc4/out/client_HelloWorldContract.py
|
Python
|
NOASSERTION
| 292 |
from algopy import (
Account,
Bytes,
Contract,
LocalState,
OnCompleteAction,
String,
Txn,
UInt64,
log,
subroutine,
)
class LocalStateContract(Contract):
def __init__(self) -> None:
self.local = LocalState(Bytes)
self.local_bool = LocalState(bool)
def approval_program(self) -> bool:
if Txn.application_id == 0:
return True
if Txn.on_completion not in (OnCompleteAction.NoOp, OnCompleteAction.OptIn):
return False
if Txn.num_app_args == 0:
return False
method = Txn.application_args(0)
if Txn.num_app_args == 1:
if method == b"get_guaranteed_data":
log(self.get_guaranteed_data(Txn.sender))
elif method == b"get_data_or_assert":
log(self.get_data_or_assert(Txn.sender))
elif method == b"delete_data":
self.delete_data(Txn.sender)
log("Deleted")
else:
return False
return True
elif Txn.num_app_args == 2:
if method == b"set_data":
self.set_data(Txn.sender, Txn.application_args(1))
elif method == b"get_data_with_default":
log(self.get_data_with_default(Txn.sender, Txn.application_args(1)))
else:
return False
return True
else:
return False
def clear_state_program(self) -> bool:
return True
@subroutine
def get_guaranteed_data(self, for_account: Account) -> Bytes:
result = self.local[for_account]
# this just tests local state proxy can be passed around
assert result.length == get_local_state_length(self.local, for_account)
# tests for dynamic key
assert local_bytes_exists(for_account, Bytes(b"local"))
assert read_local_bytes(for_account, String("local")) == result
return result
@subroutine
def get_data_with_default(self, for_account: Account, default: Bytes) -> Bytes:
# offset contract use arguments without kwarg
return self.local.get(account=for_account, default=default)
@subroutine
def get_data_or_assert(self, for_account: Account) -> Bytes:
result, exists = self.local.maybe(for_account)
assert exists, "no data for account"
return result
@subroutine
def set_data(self, for_account: Account, value: Bytes) -> None:
self.local[for_account] = value
@subroutine
def delete_data(self, for_account: Account) -> None:
del self.local[for_account]
@subroutine
def get_local_state_length(state: LocalState[Bytes], account: Account) -> UInt64:
return state[account].length
@subroutine
def local_bytes_exists(account: Account, key: Bytes) -> bool:
return account in LocalState(Bytes, key=key)
@subroutine
def read_local_bytes(account: Account, key: String) -> Bytes:
return LocalState(Bytes, key=key)[account]
|
algorandfoundation/puya
|
examples/local_state/local_state_contract.py
|
Python
|
NOASSERTION
| 3,010 |
from algopy import (
Bytes,
Contract,
LocalState,
OnCompleteAction,
Txn,
UInt64,
log,
op,
subroutine,
)
class LocalStateContract(Contract, name="LocalStateWithOffsets"):
def __init__(self) -> None:
self.local = LocalState(Bytes)
def approval_program(self) -> bool:
if Txn.application_id == 0:
return True
if Txn.on_completion not in (OnCompleteAction.NoOp, OnCompleteAction.OptIn):
return False
if Txn.num_app_args < 1:
return False
offset = op.btoi(Txn.application_args(0))
method = Txn.application_args(1)
if Txn.num_app_args == 2:
if method == b"get_guaranteed_data":
log(self.get_guaranteed_data(offset))
elif method == b"get_data_or_assert":
log(self.get_data_or_assert(offset))
elif method == b"delete_data":
self.delete_data(offset)
log("Deleted")
else:
return False
return True
elif Txn.num_app_args == 3:
if method == b"set_data":
self.set_data(offset, Txn.application_args(2))
elif method == b"get_data_with_default":
log(self.get_data_with_default(offset, Txn.application_args(2)))
else:
return False
return True
else:
return False
def clear_state_program(self) -> bool:
return True
@subroutine
def get_guaranteed_data(self, for_account: UInt64) -> Bytes:
return self.local[for_account]
@subroutine
def get_data_with_default(self, for_account: UInt64, default: Bytes) -> Bytes:
return self.local.get(for_account, default)
@subroutine
def get_data_or_assert(self, for_account: UInt64) -> Bytes:
result, exists = self.local.maybe(for_account)
assert exists, "no data for account"
return result
@subroutine
def set_data(self, for_account: UInt64, value: Bytes) -> None:
self.local[for_account] = value
@subroutine
def delete_data(self, for_account: UInt64) -> None:
del self.local[for_account]
|
algorandfoundation/puya
|
examples/local_state/local_state_with_offsets.py
|
Python
|
NOASSERTION
| 2,220 |
import typing
from algopy import BigUInt, Bytes, arc4, op, subroutine, urange
Bytes32: typing.TypeAlias = arc4.StaticArray[arc4.Byte, typing.Literal[32]]
Proof: typing.TypeAlias = arc4.DynamicArray[Bytes32]
class MerkleTree(arc4.ARC4Contract):
@arc4.abimethod(create="require")
def create(self, root: Bytes32) -> None:
self.root = root.bytes
@arc4.abimethod
def verify(self, proof: Proof, leaf: Bytes32) -> bool:
return self.root == compute_root_hash(proof, leaf.bytes)
@subroutine
def compute_root_hash(proof: Proof, leaf: Bytes) -> Bytes:
computed = leaf
for idx in urange(proof.length):
computed = hash_pair(computed, proof[idx].bytes)
return computed
@subroutine
def hash_pair(a: Bytes, b: Bytes) -> Bytes:
return op.sha256(a + b if BigUInt.from_bytes(a) < BigUInt.from_bytes(b) else b + a)
|
algorandfoundation/puya
|
examples/merkle/contract.py
|
Python
|
NOASSERTION
| 860 |
# This file is auto-generated, do not modify
# flake8: noqa
# fmt: off
import typing
import algopy
class MerkleTree(algopy.arc4.ARC4Client, typing.Protocol):
@algopy.arc4.abimethod(create='require')
def create(
self,
root: algopy.arc4.StaticArray[algopy.arc4.Byte, typing.Literal[32]],
) -> None: ...
@algopy.arc4.abimethod
def verify(
self,
proof: algopy.arc4.DynamicArray[algopy.arc4.StaticArray[algopy.arc4.Byte, typing.Literal[32]]],
leaf: algopy.arc4.StaticArray[algopy.arc4.Byte, typing.Literal[32]],
) -> algopy.arc4.Bool: ...
|
algorandfoundation/puya
|
examples/merkle/out/client_MerkleTree.py
|
Python
|
NOASSERTION
| 602 |
from algopy import ARC4Contract, Asset, arc4, op, subroutine
class UserStruct(arc4.Struct):
name: arc4.String
id: arc4.UInt64
asset: arc4.UInt64
class ExampleContract(ARC4Contract):
@subroutine
def read_from_box(self, user_id: arc4.UInt64) -> UserStruct:
box_data, exists = op.Box.get(user_id.bytes)
assert exists, "User with that id does not exist"
return UserStruct.from_bytes(box_data)
@subroutine
def write_to_box(self, user: UserStruct) -> None:
box_key = user.id.bytes
# Delete existing data, so we don't have to worry about resizing the box
op.Box.delete(box_key)
op.Box.put(box_key, user.bytes)
@subroutine
def box_exists(self, user_id: arc4.UInt64) -> bool:
_data, exists = op.Box.get(user_id.bytes)
return exists
@arc4.abimethod()
def add_user(self, user: UserStruct) -> None:
assert not self.box_exists(user.id), "User with id must not exist"
self.write_to_box(user)
@arc4.abimethod()
def attach_asset_to_user(self, user_id: arc4.UInt64, asset: Asset) -> None:
user = self.read_from_box(user_id)
user.asset = arc4.UInt64(asset.id)
self.write_to_box(user)
@arc4.abimethod()
def get_user(self, user_id: arc4.UInt64) -> UserStruct:
return self.read_from_box(user_id)
|
algorandfoundation/puya
|
examples/struct_in_box/contract.py
|
Python
|
NOASSERTION
| 1,365 |
# This file is auto-generated, do not modify
# flake8: noqa
# fmt: off
import typing
import algopy
class UserStruct(algopy.arc4.Struct):
name: algopy.arc4.String
id: algopy.arc4.UIntN[typing.Literal[64]]
asset: algopy.arc4.UIntN[typing.Literal[64]]
class ExampleContract(algopy.arc4.ARC4Client, typing.Protocol):
@algopy.arc4.abimethod
def add_user(
self,
user: UserStruct,
) -> None: ...
@algopy.arc4.abimethod
def attach_asset_to_user(
self,
user_id: algopy.arc4.UIntN[typing.Literal[64]],
asset: algopy.Asset,
) -> None: ...
@algopy.arc4.abimethod
def get_user(
self,
user_id: algopy.arc4.UIntN[typing.Literal[64]],
) -> UserStruct: ...
|
algorandfoundation/puya
|
examples/struct_in_box/out/client_ExampleContract.py
|
Python
|
NOASSERTION
| 749 |
# This file is auto-generated, do not modify
# flake8: noqa
# fmt: off
import typing
import algopy
class TicTacToeContract(algopy.arc4.ARC4Client, typing.Protocol):
@algopy.arc4.abimethod(create='allow')
def new_game(
self,
move: algopy.arc4.Tuple[algopy.arc4.UIntN[typing.Literal[64]], algopy.arc4.UIntN[typing.Literal[64]]],
) -> None: ...
@algopy.arc4.abimethod
def join_game(
self,
move: algopy.arc4.Tuple[algopy.arc4.UIntN[typing.Literal[64]], algopy.arc4.UIntN[typing.Literal[64]]],
) -> None: ...
@algopy.arc4.abimethod
def whose_turn(
self,
) -> algopy.arc4.UIntN[typing.Literal[8]]: ...
@algopy.arc4.abimethod
def play(
self,
move: algopy.arc4.Tuple[algopy.arc4.UIntN[typing.Literal[64]], algopy.arc4.UIntN[typing.Literal[64]]],
) -> None: ...
|
algorandfoundation/puya
|
examples/tictactoe/out/client_TicTacToeContract.py
|
Python
|
NOASSERTION
| 863 |
# ruff: noqa: PT018
import typing
from algopy import Account, GlobalState, Txn, UInt64, arc4, op, subroutine
Row: typing.TypeAlias = arc4.StaticArray[arc4.UInt8, typing.Literal[3]]
Game: typing.TypeAlias = arc4.StaticArray[Row, typing.Literal[3]]
Move: typing.TypeAlias = tuple[UInt64, UInt64]
EMPTY = 0
HOST = 1
CHALLENGER = 2
DRAW = 3
class TicTacToeContract(arc4.ARC4Contract):
def __init__(self) -> None:
self.challenger = GlobalState(Account)
self.winner = GlobalState(arc4.UInt8)
@arc4.abimethod(create="allow")
def new_game(self, move: Move) -> None:
if Txn.application_id:
# if a challenger has joined, don't allow starting a new game
# until this one is complete
if self.challenger:
assert self.winner, "Game isn't over"
# reset challenger and winner
del self.challenger.value
del self.winner.value
self.host = Txn.sender
self.game = Game.from_bytes(op.bzero(9))
column, row = move
assert column < 3 and row < 3, "Move must be in range"
self.game[row][column] = arc4.UInt8(HOST)
self.turns = UInt64(0)
@arc4.abimethod
def join_game(self, move: Move) -> None:
assert not self.challenger, "Host already has a challenger"
self.challenger.value = Txn.sender
self.make_move(arc4.UInt8(CHALLENGER), move)
@arc4.abimethod
def whose_turn(self) -> arc4.UInt8:
return arc4.UInt8(HOST) if self.turns % 2 else arc4.UInt8(CHALLENGER)
@arc4.abimethod
def play(self, move: Move) -> None:
assert not self.winner, "Game is already finished"
if self.turns % 2:
assert Txn.sender == self.host, "It is the host's turn"
player = arc4.UInt8(HOST)
else:
assert Txn.sender == self.challenger.get(
default=Account()
), "It is the challenger's turn"
player = arc4.UInt8(CHALLENGER)
self.make_move(player, move)
@subroutine
def make_move(self, player: arc4.UInt8, move: Move) -> None:
column, row = move
assert column < 3 and row < 3, "Move must be in range"
assert self.game[row][column] == EMPTY, "Square is already taken"
self.game[row][column] = player
self.turns += 1
if self.did_win(player, column=column, row=row):
self.winner.value = player
elif self.turns == 9:
self.winner.value = arc4.UInt8(DRAW)
@subroutine
def did_win(self, player: arc4.UInt8, column: UInt64, row: UInt64) -> bool:
g = self.game.copy()
if g[row][0] == g[row][1] == g[row][2]:
return True
if g[0][column] == g[1][column] == g[2][column]:
return True
# if player owns center, check diagonals
if player == g[1][1]:
if g[0][0] == player == g[2][2]:
return True
if g[0][2] == player == g[2][0]:
return True
return False
|
algorandfoundation/puya
|
examples/tictactoe/tictactoe.py
|
Python
|
NOASSERTION
| 3,051 |
# This file is auto-generated, do not modify
# flake8: noqa
# fmt: off
import typing
import algopy
class VotingPreconditions(algopy.arc4.Struct):
is_voting_open: algopy.arc4.UIntN[typing.Literal[64]]
is_allowed_to_vote: algopy.arc4.UIntN[typing.Literal[64]]
has_already_voted: algopy.arc4.UIntN[typing.Literal[64]]
current_time: algopy.arc4.UIntN[typing.Literal[64]]
class VotingRoundApp(algopy.arc4.ARC4Client, typing.Protocol):
@algopy.arc4.abimethod(create='require')
def create(
self,
vote_id: algopy.arc4.String,
snapshot_public_key: algopy.arc4.DynamicBytes,
metadata_ipfs_cid: algopy.arc4.String,
start_time: algopy.arc4.UIntN[typing.Literal[64]],
end_time: algopy.arc4.UIntN[typing.Literal[64]],
option_counts: algopy.arc4.DynamicArray[algopy.arc4.UIntN[typing.Literal[8]]],
quorum: algopy.arc4.UIntN[typing.Literal[64]],
nft_image_url: algopy.arc4.String,
) -> None: ...
@algopy.arc4.abimethod
def bootstrap(
self,
fund_min_bal_req: algopy.gtxn.PaymentTransaction,
) -> None: ...
@algopy.arc4.abimethod
def close(
self,
) -> None: ...
@algopy.arc4.abimethod(readonly=True)
def get_preconditions(
self,
signature: algopy.arc4.DynamicBytes,
) -> VotingPreconditions: ...
@algopy.arc4.abimethod
def vote(
self,
fund_min_bal_req: algopy.gtxn.PaymentTransaction,
signature: algopy.arc4.DynamicBytes,
answer_ids: algopy.arc4.DynamicArray[algopy.arc4.UIntN[typing.Literal[8]]],
) -> None: ...
|
algorandfoundation/puya
|
examples/voting/out/client_VotingRoundApp.py
|
Python
|
NOASSERTION
| 1,622 |
# Converted from https://github.com/algorandfoundation/nft_voting_tool/blob/c0f8be47ab80c8694d2cf40ca0df54cec07ff14a/src/algorand/smart_contracts/voting.py
import typing
from algopy import (
Account,
ARC4Contract,
BoxMap,
BoxRef,
Bytes,
Global,
GlobalState,
OpUpFeeSource,
String,
Txn,
UInt64,
arc4,
ensure_budget,
gtxn,
itxn,
log,
op,
subroutine,
uenumerate,
urange,
)
VoteIndexArray: typing.TypeAlias = arc4.DynamicArray[arc4.UInt8]
VOTE_INDEX_BYTES = 1
VOTE_COUNT_BYTES = 8
#: The min balance increase per box created
BOX_FLAT_MIN_BALANCE = 2500
#: The min balance increase per byte of boxes (key included)
BOX_BYTE_MIN_BALANCE = 400
#: The min balance increase for each asset opted into
ASSET_MIN_BALANCE = 100000
class VotingPreconditions(arc4.Struct):
is_voting_open: arc4.UInt64
is_allowed_to_vote: arc4.UInt64
has_already_voted: arc4.UInt64
current_time: arc4.UInt64
class VotingRoundApp(ARC4Contract):
def __init__(self) -> None:
self.is_bootstrapped = False
# The minimum number of voters who have voted
self.voter_count = UInt64(0)
self.close_time = GlobalState(UInt64)
self.tally_box = BoxRef(key="V")
self.votes_by_account = BoxMap(Account, VoteIndexArray, key_prefix="")
@arc4.abimethod(create="require")
def create(
self,
vote_id: String,
snapshot_public_key: Bytes,
metadata_ipfs_cid: String,
start_time: UInt64,
end_time: UInt64,
option_counts: VoteIndexArray,
quorum: UInt64,
nft_image_url: String,
) -> None:
assert start_time < end_time, "End time should be after start time"
assert end_time >= Global.latest_timestamp, "End time should be in the future"
self.vote_id = vote_id
self.snapshot_public_key = snapshot_public_key
self.metadata_ipfs_cid = metadata_ipfs_cid
self.start_time = start_time
self.end_time = end_time
self.quorum = quorum
self.nft_image_url = nft_image_url
self.store_option_counts(option_counts.copy())
@arc4.abimethod
def bootstrap(self, fund_min_bal_req: gtxn.PaymentTransaction) -> None:
assert not self.is_bootstrapped, "Must not be already bootstrapped"
self.is_bootstrapped = True
assert (
fund_min_bal_req.receiver == Global.current_application_address
), "Payment must be to app address"
tally_box_size = self.total_options * VOTE_COUNT_BYTES
min_balance_req = (
# minimum balance req for: ALGOs + Vote result NFT asset
ASSET_MIN_BALANCE * 2
# create NFT fee
+ 1000
# tally box
+ BOX_FLAT_MIN_BALANCE
# tally box key "V"
+ BOX_BYTE_MIN_BALANCE
# tally box value
+ (tally_box_size * BOX_BYTE_MIN_BALANCE)
)
log(min_balance_req)
assert (
fund_min_bal_req.amount == min_balance_req
), "Payment must be for the exact min balance requirement"
assert self.tally_box.create(size=tally_box_size)
@arc4.abimethod
def close(self) -> None:
ensure_budget(20000, fee_source=OpUpFeeSource.GroupCredit)
assert not self.close_time, "Already closed"
self.close_time.value = Global.latest_timestamp
note = (
'{"standard":"arc69",'
'"description":"This is a voting result NFT for voting round with ID '
+ self.vote_id
+ '.","properties":{"metadata":"ipfs://'
+ self.metadata_ipfs_cid
+ '","id":"'
+ self.vote_id
+ '","quorum":'
+ itoa(self.quorum)
+ ',"voterCount":'
+ itoa(self.voter_count)
+ ',"tallies":['
)
current_index = UInt64(0)
for question_index, question_options in uenumerate(self.option_counts):
if question_index > 0:
note += ","
if question_options > 0:
note += "["
for option_index in urange(question_options.native):
if option_index > 0:
note += ","
votes_for_option = self.get_vote_from_box(current_index)
note += itoa(votes_for_option)
current_index += 1
note += "]"
note += "]}}"
self.nft_asset_id = (
itxn.AssetConfig(
total=1,
decimals=0,
default_frozen=False,
asset_name="[VOTE RESULT] " + self.vote_id,
unit_name="VOTERSLT",
url=self.nft_image_url,
note=note,
fee=Global.min_txn_fee,
)
.submit()
.created_asset.id
)
@arc4.abimethod(readonly=True)
def get_preconditions(self, signature: Bytes) -> VotingPreconditions:
return VotingPreconditions(
is_voting_open=arc4.UInt64(self.voting_open()),
is_allowed_to_vote=arc4.UInt64(self.allowed_to_vote(signature)),
has_already_voted=arc4.UInt64(self.already_voted()),
current_time=arc4.UInt64(Global.latest_timestamp),
)
@arc4.abimethod
def vote(
self,
fund_min_bal_req: gtxn.PaymentTransaction,
signature: Bytes,
answer_ids: VoteIndexArray,
) -> None:
ensure_budget(7700, fee_source=OpUpFeeSource.GroupCredit)
# Check voting preconditions
assert self.allowed_to_vote(signature), "Not allowed to vote"
assert self.voting_open(), "Voting not open"
assert not self.already_voted(), "Already voted"
questions_count = self.option_counts.length
assert answer_ids.length == questions_count, "Number of answers incorrect"
# Check voter box is funded
min_bal_req = BOX_FLAT_MIN_BALANCE + (
(32 + 2 + VOTE_INDEX_BYTES * answer_ids.length) * BOX_BYTE_MIN_BALANCE
)
assert (
fund_min_bal_req.receiver == Global.current_application_address
), "Payment must be to app address"
log(min_bal_req)
assert fund_min_bal_req.amount == min_bal_req, "Payment must be the exact min balance"
# Record the vote for each question
cumulative_offset = UInt64(0)
for question_index in urange(questions_count):
# Load the user's vote for this question
answer_option_index = answer_ids[question_index].native
options_count = self.option_counts[question_index].native
assert answer_option_index < options_count, "Answer option index invalid"
self.increment_vote_in_box(cumulative_offset + answer_option_index)
cumulative_offset += options_count
self.votes_by_account[Txn.sender] = answer_ids.copy()
self.voter_count += 1
@subroutine
def voting_open(self) -> bool:
return (
self.is_bootstrapped
and not self.close_time
and self.start_time <= Global.latest_timestamp <= self.end_time
)
@subroutine
def already_voted(self) -> bool:
return Txn.sender in self.votes_by_account
@subroutine
def store_option_counts(self, option_counts: VoteIndexArray) -> None:
assert option_counts.length, "option_counts should be non-empty"
assert option_counts.length <= 112, "Can't have more than 112 questions"
total_options = UInt64(0)
for item in option_counts:
total_options += item.native
assert total_options <= 128, "Can't have more than 128 vote options"
self.option_counts = option_counts.copy()
self.total_options = total_options
@subroutine
def allowed_to_vote(self, signature: Bytes) -> bool:
ensure_budget(2000)
return op.ed25519verify_bare(
Txn.sender.bytes,
signature,
self.snapshot_public_key,
)
@subroutine
def get_vote_from_box(self, index: UInt64) -> UInt64:
return op.btoi(self.tally_box.extract(index, VOTE_COUNT_BYTES))
@subroutine
def increment_vote_in_box(self, index: UInt64) -> None:
current_vote = self.get_vote_from_box(index)
self.tally_box.replace(index, op.itob(current_vote + 1))
@subroutine
def itoa(i: UInt64) -> String:
digits = Bytes(b"0123456789")
radix = digits.length
if i < radix:
return String.from_bytes(digits[i])
return itoa(i // radix) + String.from_bytes(digits[i % radix])
|
algorandfoundation/puya
|
examples/voting/voting.py
|
Python
|
NOASSERTION
| 8,705 |
algorandfoundation/puya
|
scripts/__init__.py
|
Python
|
NOASSERTION
| 0 |
|
import csv
import subprocess
from pathlib import Path
from scripts.compile_all_examples import ProgramSizes
_SCRIPTS_DIR = Path(__file__).parent
_ROOT_DIR = _SCRIPTS_DIR.parent
def main() -> None:
sizes_path = _ROOT_DIR / "examples" / "sizes.txt"
curr_text = sizes_path.read_text("utf8")
prev_text = subprocess.run(
["git", "show", "HEAD:examples/sizes.txt"],
capture_output=True,
text=True,
check=True,
cwd=_ROOT_DIR,
).stdout
if prev_text == curr_text:
return
curr_sizes = ProgramSizes.load(curr_text).sizes
prev_sizes = ProgramSizes.load(prev_text).sizes
delta = ProgramSizes()
assert curr_sizes.keys() == prev_sizes.keys(), "can't analyse with different programs"
for program_name in curr_sizes:
prev_prog_size = prev_sizes[program_name]
curr_prog_size = curr_sizes[program_name]
if prev_prog_size != curr_prog_size:
for level in range(3):
delta.sizes[program_name][level] = curr_prog_size[level] - prev_prog_size[level]
_sizes_to_csv(delta)
def _sizes_to_csv(ps: ProgramSizes) -> None:
tmp_dir = _ROOT_DIR / "_tmp"
tmp_dir.mkdir(exist_ok=True)
with (tmp_dir / "sizes_diff.csv").open("w", encoding="utf8") as output:
writer = csv.writer(output)
writer.writerow(["Name", "O0", "O1", "O2", "O0#Ops", "O1#Ops", "O2#Ops"])
# copy sizes and sort by name
for name, prog_sizes in sorted(ps.sizes.items()):
o0, o1, o2 = (prog_sizes[i] for i in range(3))
writer.writerow(
map(str, (name, o0.bytecode, o1.bytecode, o2.bytecode, o0.ops, o1.ops, o2.ops))
)
if __name__ == "__main__":
main()
|
algorandfoundation/puya
|
scripts/analyse_sizes_diff.py
|
Python
|
NOASSERTION
| 1,735 |
import typing
from collections import Counter
from collections.abc import Iterator
from pathlib import Path
VCS_ROOT = Path(__file__).parent.parent
OUTPUT_BASE_DIRS = ["examples", "test_cases"]
CODE_INDENT = " "
INTERESTING_OPS = frozenset(
[
# pure stack manipulation
"intc",
*[f"intc_{i}" for i in range(4)],
"bytec",
*[f"bytec_{i}" for i in range(4)],
"pushbytes",
"pushbytess",
"pushint",
"pushints",
"frame_dig",
"frame_bury",
"bury",
"cover",
"dig",
"dup",
"dup2",
"dupn",
"pop",
"popn",
"swap",
"uncover",
# constants
"addr",
"byte",
"int",
"method",
"txn",
"txna",
"gtxn",
"gtxna",
"itxn",
"itxna",
"global",
"pushint",
"pushbytes",
"gload",
"gaid",
# other loads
"load",
]
)
def main() -> None:
teal_blocks = read_all_blocks()
single_op_blocks = (block[0] for block in teal_blocks if len(block) == 1)
print("Single op block counts:")
for count, op in sorted(
((count, op) for op, count in Counter(single_op_blocks).items()), reverse=True
):
print(f" {count}x {op}")
window_size = 2
while True:
num_printed = 0
print(f"\nInteresting op sequence of length {window_size} counts:")
seqs = [
tuple(seq)
for block in teal_blocks
for seq in sliding_window(block, window_size)
if INTERESTING_OPS.issuperset(seq)
]
for count, ops in sorted(
((count, ops) for ops, count in Counter(seqs).items()), reverse=True
)[:20]:
if count == 1:
break
print(f" {count}x {'; '.join(ops)}")
num_printed += 1
if num_printed == 0:
break
window_size += 1
def read_all_blocks(*, include_clear_state: bool = True) -> list[list[str]]:
teal_files = list[Path]()
for output_base_dir in OUTPUT_BASE_DIRS:
output_dir = VCS_ROOT / output_base_dir
assert output_dir.is_dir()
teal_files.extend(output_dir.rglob("*/out/*.approval.teal"))
if include_clear_state:
teal_files.extend(output_dir.rglob("*/out/*.clear.teal"))
teal_blocks = list[list[str]]()
for teal_file in teal_files:
current_block = list[str]()
teal = teal_file.read_text("utf8")
file_lines = teal.splitlines()
assert file_lines[0].startswith("#pragma")
for line in file_lines[1:]:
if not line.startswith(CODE_INDENT):
# new block / function
if current_block:
teal_blocks.append(current_block)
current_block = []
else:
op, *_ = line.split()
if op:
current_block.append(op)
if current_block:
teal_blocks.append(current_block)
return teal_blocks
T = typing.TypeVar("T")
def sliding_window(seq: list[T], window_size: int) -> Iterator[list[T]]:
for i in range(len(seq) - window_size + 1):
yield seq[i : i + window_size]
if __name__ == "__main__":
main()
|
algorandfoundation/puya
|
scripts/analyse_teal_op_frequencies.py
|
Python
|
NOASSERTION
| 3,342 |
import argparse
import base64
from pathlib import Path
from algosdk.v2client.algod import AlgodClient
def main(path: list[Path]) -> None:
algod_client = AlgodClient(algod_token="a" * 64, algod_address="http://localhost:4001")
for p in path:
response = algod_client.compile(p.read_text("utf8"))
compiled: str = response["result"]
compiled_bytes = base64.b64decode(compiled)
p.with_suffix(".teal.bin").write_bytes(compiled_bytes)
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog="assemble")
parser.add_argument("files", type=Path, nargs="+", metavar="FILE")
args = parser.parse_args()
main(args.files)
|
algorandfoundation/puya
|
scripts/assemble.py
|
Python
|
NOASSERTION
| 677 |
#!/usr/bin/env python3
import argparse
import json
import operator
import os
import re
import shutil
import subprocess
import sys
from collections import defaultdict
from collections.abc import Iterable
from concurrent.futures import ProcessPoolExecutor
from pathlib import Path
import algokit_utils.deploy
import attrs
import prettytable
SCRIPT_DIR = Path(__file__).parent
GIT_ROOT = SCRIPT_DIR.parent
CONTRACT_ROOT_DIRS = [
GIT_ROOT / "examples",
GIT_ROOT / "test_cases",
]
SIZE_TALLY_PATH = GIT_ROOT / "examples" / "sizes.txt"
ENV_WITH_NO_COLOR = dict(os.environ) | {
"NO_COLOR": "1", # disable colour output
"PYTHONUTF8": "1", # force utf8 on windows
}
# iterate optimization levels first and with O1 first and then cases, this is a workaround
# to prevent race conditions that occur when the mypy parsing stage of O0, O2 tries to
# read the client_<contract>.py output from the 01 level before it is finished writing to
# disk
DEFAULT_OPTIMIZATION = (1, 0, 2)
def get_root_and_relative_path(path: Path) -> tuple[Path, Path]:
for root in CONTRACT_ROOT_DIRS:
if path.is_relative_to(root):
return root, path.relative_to(root)
raise RuntimeError(f"{path} is not relative to a known example")
def get_unique_name(path: Path) -> str:
_, rel_path = get_root_and_relative_path(path)
# strip suffixes
while rel_path.suffixes:
rel_path = rel_path.with_suffix("")
use_parts = []
for part in rel_path.parts:
if "MyContract" in part:
use_parts.append("".join(part.split("MyContract")))
elif "Contract" in part:
use_parts.append("".join(part.split("Contract")))
elif part.endswith((f"out{SUFFIX_O0}", f"out{SUFFIX_O1}", f"out{SUFFIX_O2}")):
pass
else:
use_parts.append(part)
return "/".join(filter(None, use_parts))
@attrs.frozen
class Size:
bytecode: int | None = None
ops: int | None = None
def __add__(self, other: object) -> "Size":
if not isinstance(other, Size):
return NotImplemented
return Size(
bytecode=(self.bytecode or 0) + (other.bytecode or 0),
ops=(self.ops or 0) + (other.ops or 0),
)
def __sub__(self, other: object) -> "Size":
if not isinstance(other, Size):
return NotImplemented
return Size(
bytecode=(self.bytecode or 0) - (other.bytecode or 0),
ops=(self.ops or 0) - (other.ops or 0),
)
def _program_to_sizes() -> defaultdict[str, defaultdict[int, Size]]:
def _opt_to_sizes() -> defaultdict[int, Size]:
return defaultdict[int, Size](Size)
return defaultdict[str, defaultdict[int, Size]](_opt_to_sizes)
@attrs.define(str=False)
class ProgramSizes:
sizes: defaultdict[str, defaultdict[int, Size]] = attrs.field(factory=_program_to_sizes)
def add_at_level(self, level: int, teal_file: Path, bin_file: Path) -> None:
name = get_unique_name(bin_file)
# this combines both approval and clear program sizes
self.sizes[name][level] += Size(
bytecode=bin_file.stat().st_size,
ops=_get_num_teal_ops(teal_file),
)
@classmethod
def load(cls, text: str) -> "ProgramSizes":
lines = list(filter(None, text.splitlines()))
program_sizes = ProgramSizes()
sizes = program_sizes.sizes
for line in lines[1:-1]:
name, o0, o1, o2, _, o0_ops, o1_ops, o2_ops = line.rsplit(maxsplit=7)
name = name.strip()
for opt, (bin_str, ops_str) in enumerate(((o0, o0_ops), (o1, o1_ops), (o2, o2_ops))):
if bin_str == "None":
continue
if bin_str == "-":
previous = sizes[name][opt - 1]
bytecode = previous.bytecode
ops = previous.ops
else:
bytecode = int(bin_str)
ops = int(ops_str)
sizes[name][opt] = Size(bytecode=bytecode, ops=ops)
return program_sizes
def __str__(self) -> str:
writer = prettytable.PrettyTable(
field_names=["Name", "O0", "O1", "O2", "|", "O0#Ops", "O1#Ops", "O2#Ops"],
header=True,
border=False,
min_width=6,
left_padding_width=0,
right_padding_width=0,
align="r",
)
writer.align["Name"] = "l"
writer.align["|"] = "c"
# copy sizes and sort by name
sizes = defaultdict(
self.sizes.default_factory, {p: self.sizes[p].copy() for p in sorted(self.sizes)}
)
totals = {i: Size() for i in range(3)}
for prog_sizes in sizes.values():
for i in range(3):
totals[i] += prog_sizes[i]
# Add totals at end
sizes["Total"].update(totals)
for name, prog_sizes in sizes.items():
o0, o1, o2 = (prog_sizes[i] for i in range(3))
row = list(
map(
str, (name, o0.bytecode, o1.bytecode, o2.bytecode, "|", o0.ops, o1.ops, o2.ops)
)
)
if o0 == o1:
for i in (2, 6):
row[i] = "-"
if o1 == o2:
for i in (3, 7):
row[i] = "-"
writer.add_row(row)
return writer.get_string()
def _get_num_teal_ops(path: Path) -> int:
ops = 0
teal = path.read_text("utf8")
for line in algokit_utils.deploy.strip_comments(teal).splitlines():
line = line.strip()
if not line or line.endswith(":") or line.startswith("#"):
# ignore comment only lines, labels and pragmas
pass
else:
ops += 1
return ops
@attrs.define
class CompilationResult:
rel_path: str
ok: bool
bin_files: list[Path]
stdout: str
def _stabilise_logs(stdout: str) -> list[str]:
return [
line.replace("\\", "/").replace(str(GIT_ROOT).replace("\\", "/"), "<git root>")
for line in stdout.splitlines()
if not line.startswith(
(
"debug: Skipping algopy stub ",
"debug: Skipping typeshed stub ",
"warning: Skipping stub: ",
"debug: Skipping stdlib stub ",
"debug: Building AWST for ",
"debug: Discovered user module ",
# ignore platform specific paths
"debug: Using python executable: ",
"debug: Using python site-packages: ",
"debug: Found algopy: ",
)
)
]
def checked_compile(p: Path, flags: list[str], *, out_suffix: str) -> CompilationResult:
assert p.is_dir()
out_dir = (p / f"out{out_suffix}").resolve()
template_vars_path = p / "template.vars"
root, rel_path_ = get_root_and_relative_path(p)
rel_path = str(rel_path_)
if out_dir.exists():
for prev_out_file in out_dir.iterdir():
if prev_out_file.is_dir():
shutil.rmtree(prev_out_file)
elif prev_out_file.suffix != ".log":
prev_out_file.unlink()
cmd = [
"poetry",
"run",
"puyapy",
*flags,
f"--out-dir={out_dir}",
"--output-destructured-ir",
"--output-bytecode",
"--log-level=debug",
*_load_template_vars(template_vars_path),
rel_path,
]
result = subprocess.run(
cmd,
cwd=root,
check=False,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
text=True,
env=ENV_WITH_NO_COLOR,
encoding="utf-8",
)
bin_files_written = re.findall(r"info: Writing (.+\.bin)", result.stdout)
# normalize ARC-56 output
arc56_files_written = re.findall(r"info: Writing (.+\.arc56\.json)", result.stdout)
for arc56_file in arc56_files_written:
_normalize_arc56(root / arc56_file)
log_path = p / f"puya{out_suffix}.log"
log_txt = "\n".join(_stabilise_logs(result.stdout))
log_path.write_text(log_txt, encoding="utf8")
ok = result.returncode == 0
return CompilationResult(
rel_path=rel_path,
ok=ok,
bin_files=[root / p for p in bin_files_written],
stdout=result.stdout if not ok else "", # don't thunk stdout if no errors
)
def _normalize_arc56(path: Path) -> None:
arc56 = json.loads(path.read_text())
compiler_version = arc56.get("compilerInfo", {}).get("compilerVersion", {})
compiler_version["major"] = 99
compiler_version["minor"] = 99
compiler_version["patch"] = 99
path.write_text(json.dumps(arc56, indent=4), encoding="utf8")
def _load_template_vars(path: Path) -> Iterable[str]:
if path.exists():
for line in path.read_text("utf8").splitlines():
if line.startswith("prefix="):
prefix = line.removeprefix("prefix=")
yield f"--template-vars-prefix={prefix}"
else:
yield f"-T={line}"
SUFFIX_O0 = "_unoptimized"
SUFFIX_O1 = ""
SUFFIX_O2 = "_O2"
def _compile_for_level(arg: tuple[Path, int]) -> tuple[CompilationResult, int]:
p, optimization_level = arg
if optimization_level == 0:
flags = [
"-O0",
"--no-output-arc32",
]
out_suffix = SUFFIX_O0
elif optimization_level == 2:
flags = [
"-O2",
"--no-output-arc32",
"-g0",
]
out_suffix = SUFFIX_O2
else:
assert optimization_level == 1
flags = [
"-O1",
"--output-awst",
"--output-ssa-ir",
"--output-optimization-ir",
"--output-memory-ir",
"--output-client",
"--output-source-map",
"--output-arc56",
]
out_suffix = SUFFIX_O1
result = checked_compile(p, flags=flags, out_suffix=out_suffix)
return result, optimization_level
@attrs.define(kw_only=True)
class CompileAllOptions:
limit_to: list[Path] = attrs.field(factory=list)
optimization_level: list[int] = attrs.field(factory=list)
def main(options: CompileAllOptions) -> None:
limit_to = options.limit_to
if limit_to:
to_compile = [Path(x).resolve() for x in limit_to]
else:
to_compile = [
item
for root in CONTRACT_ROOT_DIRS
for item in root.iterdir()
if item.is_dir() and any(item.glob("*.py"))
]
failures = list[tuple[str, str]]()
program_sizes = ProgramSizes()
# use selected opt levels, but retain original order
opt_levels = [
o
for o in DEFAULT_OPTIMIZATION
if o in (options.optimization_level or DEFAULT_OPTIMIZATION)
]
with ProcessPoolExecutor() as executor:
args = [(case, level) for level in opt_levels for case in to_compile]
for compilation_result, level in executor.map(_compile_for_level, args):
rel_path = compilation_result.rel_path
case_name = f"{rel_path} -O{level}"
for bin_file in compilation_result.bin_files:
program_sizes.add_at_level(level, bin_file.with_suffix(".teal"), bin_file)
if compilation_result.ok:
print(f"✅ {case_name}")
else:
print(f"💥 {case_name}", file=sys.stderr)
failures.append((case_name, compilation_result.stdout))
if failures:
print("Compilation failures:")
for name, stdout in sorted(failures, key=operator.itemgetter(0)):
print(f" ~~~ {name} ~~~ ")
print(
"\n".join(
ln
for ln in stdout.splitlines()
if (ln.startswith("debug: Traceback ") or not ln.startswith("debug: "))
)
)
print("Updating sizes.txt")
if limit_to or options.optimization_level:
print("Loading existing sizes.txt")
# load existing sizes for non-default options
merged = ProgramSizes.load(SIZE_TALLY_PATH.read_text("utf8"))
for program, sizes in program_sizes.sizes.items():
for o, size in sizes.items():
merged.sizes[program][o] = size
program_sizes = merged
SIZE_TALLY_PATH.write_text(str(program_sizes))
sys.exit(len(failures))
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("limit_to", type=Path, nargs="*", metavar="LIMIT_TO")
parser.add_argument(
"-O",
"--optimization-level",
action="extend",
type=int,
choices=DEFAULT_OPTIMIZATION,
nargs="+",
help="Set optimization level of output TEAL / AVM bytecode",
)
options = CompileAllOptions()
parser.parse_args(namespace=options)
main(options)
|
algorandfoundation/puya
|
scripts/compile_all_examples.py
|
Python
|
NOASSERTION
| 12,912 |
import argparse
import contextlib
import json
import typing
from collections.abc import Iterator
from dataclasses import dataclass
from functools import cached_property
from pathlib import Path
from algosdk.atomic_transaction_composer import (
AccountTransactionSigner,
AtomicTransactionComposer,
TransactionWithSigner,
)
from algosdk.kmd import KMDClient
from algosdk.transaction import ApplicationCallTxn, OnComplete, create_dryrun
from algosdk.v2client.algod import AlgodClient
DEFAULT_ALGOD_ADDRESS = "http://localhost:4001"
DEFAULT_KMD_ADDRESS = "http://localhost:4002"
DEFAULT_TOKEN = "a" * 64
DEFAULT_KMD_WALLET_NAME = "unencrypted-default-wallet"
DEFAULT_KMD_WALLET_PASSWORD = ""
def main(approval_path: Path, clear_path: Path) -> None:
response = dryrun_create(approval_path.read_bytes(), clear_path.read_bytes())
print(json.dumps(response, indent=4))
def dryrun_create(
approval_binary: bytes,
clear_binary: bytes,
) -> dict[str, typing.Any]:
algod = AlgodClient(algod_token=DEFAULT_TOKEN, algod_address=DEFAULT_ALGOD_ADDRESS)
account, *_ = get_accounts()
atc = AtomicTransactionComposer()
atc.add_transaction(
TransactionWithSigner(
txn=ApplicationCallTxn(
sender=account.address,
sp=algod.suggested_params(),
index=0,
on_complete=OnComplete.NoOpOC,
approval_program=approval_binary,
clear_program=clear_binary,
),
signer=account.signer,
)
)
atc.execute(algod, 4)
signed = atc.gather_signatures()
dryrun_request = create_dryrun(algod, signed)
return algod.dryrun(dryrun_request.dictify())
@dataclass(kw_only=True)
class LocalAccount:
"""LocalAccount is a simple dataclass to hold a localnet account details"""
#: The address of a localnet account
address: str
#: The base64 encoded private key of the account
private_key: str
#: An AccountTransactionSigner that can be used as a TransactionSigner
@cached_property
def signer(self) -> AccountTransactionSigner:
return AccountTransactionSigner(self.private_key)
def get_accounts(
kmd_address: str = DEFAULT_KMD_ADDRESS,
kmd_token: str = DEFAULT_TOKEN,
wallet_name: str = DEFAULT_KMD_WALLET_NAME,
wallet_password: str = DEFAULT_KMD_WALLET_PASSWORD,
) -> list[LocalAccount]:
"""gets all the accounts in the localnet kmd, defaults
to the `unencrypted-default-wallet` created on private networks automatically"""
kmd = KMDClient(kmd_token, kmd_address)
with wallet_handle_by_name(kmd, wallet_name, wallet_password) as wallet_handle:
return [
LocalAccount(
address=address,
private_key=kmd.export_key(
wallet_handle,
wallet_password,
address,
),
)
for address in kmd.list_keys(wallet_handle)
]
@contextlib.contextmanager
def wallet_handle_by_name(kmd: KMDClient, wallet_name: str, wallet_password: str) -> Iterator[str]:
wallets = kmd.list_wallets()
try:
wallet_id = next(iter(w["id"] for w in wallets if w["name"] == wallet_name))
except StopIteration:
raise Exception(f"Wallet not found: {wallet_name}") from None
wallet_handle = kmd.init_wallet_handle(
wallet_id,
wallet_password,
)
try:
yield wallet_handle
finally:
kmd.release_wallet_handle(wallet_handle)
if __name__ == "__main__":
parser = argparse.ArgumentParser(prog="dry_run_create")
parser.add_argument("approval_file", type=Path, metavar="FILE")
parser.add_argument("clear_file", type=Path, metavar="FILE")
args = parser.parse_args()
main(args.approval_file, args.clear_file)
|
algorandfoundation/puya
|
scripts/dry_run_create.py
|
Python
|
NOASSERTION
| 3,852 |
#!/usr/bin/env python3
import json
import subprocess
from pathlib import Path
from puya import log
from puya.ussemble.op_spec_models import ImmediateEnum, ImmediateKind, OpSpec
from scripts.transform_lang_spec import (
LanguageSpec,
)
logger = log.get_logger(__name__)
VCS_ROOT = Path(__file__).parent.parent
def main() -> None:
spec_path = VCS_ROOT / "langspec.puya.json"
lang_spec_json = json.loads(spec_path.read_text(encoding="utf-8"))
lang_spec = LanguageSpec.from_json(lang_spec_json)
ops = build_op_spec(lang_spec)
output_ops(ops)
def build_op_spec(lang_spec: LanguageSpec) -> dict[str, OpSpec]:
ops = {}
for op in sorted(lang_spec.ops.values(), key=lambda x: x.code):
immediates = list[ImmediateKind | ImmediateEnum]()
for imm in op.immediate_args:
if imm.arg_enum is None:
immediates.append(ImmediateKind[imm.immediate_type.name])
else:
immediates.append(
ImmediateEnum(
codes={e.name: e.value for e in lang_spec.arg_enums[imm.arg_enum]}
)
)
op_spec = OpSpec(name=op.name, code=op.code, immediates=immediates)
ops[op_spec.name] = op_spec
return ops
def output_ops(
ops: dict[str, OpSpec],
) -> None:
file: list[str] = [
"from puya.ussemble.op_spec_models import ImmediateEnum, ImmediateKind, OpSpec",
f"OP_SPECS = {ops!r}",
]
output_path = VCS_ROOT / "src" / "puya" / "ussemble" / "op_spec.py"
output_path.write_text("\n".join(file), encoding="utf-8")
subprocess.run(["ruff", "format", str(output_path)], check=True, cwd=VCS_ROOT)
if __name__ == "__main__":
main()
|
algorandfoundation/puya
|
scripts/generate_assemble_op_spec.py
|
Python
|
NOASSERTION
| 1,735 |
import builtins
import json
import keyword
import subprocess
import textwrap
from collections.abc import Iterable, Iterator
from pathlib import Path
from puya import log
from puya.ir.avm_ops_models import (
AVMOpData,
DynamicVariants,
ImmediateKind,
OpSignature,
RunMode,
StackType,
Variant,
)
from puya.utils import normalise_path_to_str
from scripts import transform_lang_spec as langspec
logger = log.get_logger(__name__)
VCS_ROOT = Path(__file__).parent.parent
SUPPORTED_IMMEDIATE_KINDS = (langspec.ImmediateKind.uint8, langspec.ImmediateKind.arg_enum)
operator_names = {
# bool
"&&": "and",
"||": "or",
"!": "not",
# compare
"==": "eq",
"!=": "neq",
"<": "lt",
"<=": "lte",
">": "gt",
">=": "gte",
# bitwise
"&": "bitwise_and",
"^": "bitwise_xor",
"|": "bitwise_or",
"~": "bitwise_not",
# math
"+": "add",
"-": "sub",
"*": "mul",
"/": "div_floor",
"%": "mod",
}
EXCLUDED_OPCODES = {
# flow control
"bnz",
"bz",
"b",
"callsub",
"retsub",
"proto",
"switch",
"match",
# pure stack manipulation
"intc",
*[f"intc_{i}" for i in range(4)],
"bytec",
*[f"bytec_{i}" for i in range(4)],
"pushbytes",
"pushbytess",
"pushint",
"pushints",
"frame_dig",
"frame_bury",
"bury",
"cover",
"dig",
"dup",
"dup2",
"dupn",
"pop",
"popn",
"swap",
"uncover",
# modifies what other op codes with immediates point to
"intcblock",
"bytecblock",
# halting
"err",
"return",
}
def as_list_str(values: Iterable[str]) -> str | None:
inner = ", ".join(values)
if not inner:
return None
else:
return f"[{inner}]"
BUILTIN_NAMES = frozenset(dir(builtins))
def get_op_name(op: langspec.Op) -> str:
op_code = op.name
if op_code.isidentifier():
op_name = op_code
elif op_code[0] == "b":
op_name = operator_names[op_code[1:]] + "_bytes"
else:
op_name = operator_names[op_code]
if keyword.iskeyword(op_name) or keyword.issoftkeyword(op_name) or op_name in BUILTIN_NAMES:
op_name += "_"
return op_name
def generate_op_node(
enums: dict[str, list[langspec.ArgEnum]], op_name: str, op: langspec.Op
) -> Iterator[str]:
assert not op.halts, "op halts"
dynamic_im_index: int | None = None
for idx, im in enumerate(op.immediate_args):
if im.modifies_stack_input is not None:
assert im.modifies_stack_output is None, "💀"
assert dynamic_im_index is None, "🪦"
dynamic_im_index = idx
elif im.modifies_stack_output is not None:
assert dynamic_im_index is None, "🪦"
dynamic_im_index = idx
immediate_types = tuple(get_immediate_type(im) for im in op.immediate_args)
op_code = op.name
cost = op.cost.value
variant: DynamicVariants | Variant
stack_args = [get_stack_type(arg.stack_type) for arg in op.stack_inputs]
stack_returns = [get_stack_type(out.stack_type) for out in op.stack_outputs]
if dynamic_im_index is None:
variant = Variant(
enum=None,
signature=OpSignature(
args=stack_args,
returns=stack_returns,
),
supported_modes=_map_run_mode(op.mode),
min_avm_version=op.min_avm_version,
)
else:
im = op.immediate_args[dynamic_im_index]
assert im.arg_enum is not None, "💥"
variant = DynamicVariants(
immediate_index=dynamic_im_index,
variant_map={},
)
if im.modifies_stack_input is not None:
list_index = im.modifies_stack_input
to_mod = stack_args
else:
assert im.modifies_stack_output is not None
list_index = im.modifies_stack_output
to_mod = stack_returns
for arg_enum in enums[im.arg_enum]:
assert arg_enum.stack_type is not None, "🤕"
to_mod[list_index] = get_stack_type(arg_enum.stack_type)
variant.variant_map[arg_enum.name] = Variant(
enum=arg_enum.name,
signature=OpSignature(
args=list(stack_args),
returns=list(stack_returns),
),
supported_modes=_map_run_mode(arg_enum.mode),
min_avm_version=arg_enum.min_avm_version,
)
data = AVMOpData(
op_code=op_code,
immediate_types=immediate_types,
variants=variant,
cost=cost,
min_avm_version=op.min_avm_version,
supported_modes=_map_run_mode(op.mode),
)
yield f"{op_name} = {data!r}"
if op.doc:
yield '"""'
for idx, doc_ln in enumerate(op.doc):
if idx > 0:
yield ""
yield from textwrap.wrap(doc_ln, width=99 - 4)
yield '"""'
yield ""
def _map_run_mode(mode: langspec.RunMode) -> RunMode:
match mode:
case langspec.RunMode.app:
return RunMode.app
case langspec.RunMode.sig:
return RunMode.lsig
case langspec.RunMode.any:
return RunMode.any
case _:
raise ValueError(f"Unsupported mode {mode}")
def get_stack_type(stack_type: langspec.StackType) -> StackType:
if stack_type.name.startswith("bytes_"):
return StackType.bytes
else:
return StackType[stack_type.name]
def get_immediate_type(immediate: langspec.Immediate) -> ImmediateKind:
assert immediate.immediate_type in SUPPORTED_IMMEDIATE_KINDS, (
"bad immediate kind",
immediate.immediate_type,
)
return ImmediateKind[immediate.immediate_type.name]
def generate_file(lang_spec: langspec.LanguageSpec) -> Iterator[str]:
script_path = normalise_path_to_str(Path(__file__).relative_to(VCS_ROOT))
preamble = f"""
# AUTO GENERATED BY {script_path}, DO NOT EDIT
import enum
from collections.abc import Sequence
from puya.errors import InternalError
from puya.ir.avm_ops_models import (
AVMOpData,
DynamicVariants,
ImmediateKind,
OpSignature,
RunMode,
StackType,
Variant
)
class AVMOp(enum.StrEnum):
code: str
immediate_types: Sequence[ImmediateKind]
_variants: Variant | DynamicVariants
cost: int | None
min_avm_version: int
def __new__(cls, data: AVMOpData | str) -> "AVMOp":
# the weird union type on data && then assert,
# is to shut mypy up when it wrongly infers the arg type of
# e.g. AVMOp("+") to be invalid
assert isinstance(data, AVMOpData)
op_code = data.op_code
obj = str.__new__(cls, op_code)
obj._value_ = op_code
obj.code = op_code
obj.immediate_types = tuple(data.immediate_types)
obj._variants = data.variants # noqa: SLF001
obj.cost = data.cost
obj.min_avm_version = data.min_avm_version
return obj
def get_variant(self, immediates: Sequence[str | int]) -> Variant:
if isinstance(self._variants, Variant):
return self._variants
im = immediates[self._variants.immediate_index]
assert isinstance(im, str)
try:
return self._variants.variant_map[im]
except KeyError as ex:
raise InternalError(f"Unknown immediate for {{self.code}}: {{im}}") from ex
"""
yield from preamble.strip().splitlines()
yield ""
ops_by_name = {}
for op in lang_spec.ops.values():
if op.name in EXCLUDED_OPCODES:
logger.info(f"Skipping {op.name} due to specific exclusion")
else:
ops_by_name[get_op_name(op)] = op
for op_name, op in sorted(ops_by_name.items()):
yield textwrap.indent(
"\n".join(generate_op_node(lang_spec.arg_enums, op_name, op)), " " * 4
)
def main() -> None:
spec_path = VCS_ROOT / "langspec.puya.json"
lang_spec_json = json.loads(spec_path.read_text(encoding="utf-8"))
lang_spec = langspec.LanguageSpec.from_json(lang_spec_json)
output = "\n".join(generate_file(lang_spec))
ast_gen_path = VCS_ROOT / "src" / "puya" / "ir" / "avm_ops.py"
ast_gen_path.write_text(output, encoding="utf-8")
subprocess.run(["ruff", "format", str(ast_gen_path)], check=True, cwd=VCS_ROOT)
if __name__ == "__main__":
main()
|
algorandfoundation/puya
|
scripts/generate_avm_ops.py
|
Python
|
NOASSERTION
| 8,430 |
#!/usr/bin/env python3
import subprocess
import sys
import typing
from collections.abc import Callable
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent / "src" / "puyapy" / "_vendor"))
import attrs
import mypy.build
import mypy.nodes
from mypy.visitor import NodeVisitor
from puyapy.compile import get_mypy_options
from puyapy.parse import parse_and_typecheck
SCRIPTS_DIR = Path(__file__).parent
VCS_ROOT = SCRIPTS_DIR.parent
SRC_DIR = VCS_ROOT / "src"
DOCS_DIR = VCS_ROOT / "docs"
STUBS_DIR = VCS_ROOT / "stubs" / "algopy-stubs"
STUBS_DOC_DIR = DOCS_DIR / "algopy-stubs"
@attrs.define
class ModuleImports:
from_imports: dict[str, str | None] = attrs.field(factory=dict)
import_all: bool = False
import_module: bool = False
def main() -> None:
manager, _ = parse_and_typecheck([STUBS_DIR], get_mypy_options())
output_doc_stubs(manager)
run_sphinx()
def output_doc_stubs(manager: mypy.build.BuildManager) -> None:
# parse and output reformatted __init__.pyi
stub = DocStub.process_module(manager, "algopy")
algopy_direct_imports = stub.collected_imports["algopy"]
# remove any algopy imports that are now defined in __init__.py itself
output_combined_stub(stub, STUBS_DOC_DIR / "__init__.pyi")
# remaining imports from algopy are other public modules
# parse and output them too
for other_stub_name in algopy_direct_imports.from_imports:
stub = DocStub.process_module(manager, f"algopy.{other_stub_name}")
output_combined_stub(stub, STUBS_DOC_DIR / f"{other_stub_name}.pyi")
def output_combined_stub(stubs: "DocStub", output: Path) -> None:
# remove algopy imports that have been inlined
lines = ["# ruff: noqa: A001, E501, F403, PYI021, PYI034, W291"]
rexported = list[str]()
for module, imports in stubs.collected_imports.items():
if imports.import_module:
lines.append(f"import {module}")
if imports.from_imports:
rexported.extend(filter(None, imports.from_imports.values()))
from_imports = ", ".join(_name_as(k, v) for k, v in imports.from_imports.items())
lines.append(f"from {module} import {from_imports}")
lines.extend(["", ""])
# assemble __all__
lines.append("__all__ = [")
for symbol in (*rexported, *stubs.collected_symbols):
if symbol.startswith("_"):
continue
lines.append(f' "{symbol}",')
lines.append("]")
# assemble symbols
lines.extend(stubs.collected_symbols.values())
# output and linting
output.parent.mkdir(parents=True, exist_ok=True)
output.write_text("\n".join(lines))
subprocess.run(["ruff", "format", str(output)], check=True, cwd=VCS_ROOT)
subprocess.run(["ruff", "check", "--fix", str(output)], check=True, cwd=VCS_ROOT)
def run_sphinx() -> None:
subprocess.run(
["sphinx-build", ".", "_build", "-W", "--keep-going", "-n", "-E"], check=True, cwd=DOCS_DIR
)
@attrs.define(kw_only=True)
class ClassBases:
klass: mypy.nodes.ClassDef
bases: list[mypy.nodes.Expression]
protocol_bases: list[tuple[mypy.nodes.MypyFile, mypy.nodes.ClassDef]]
@attrs.define
class SymbolCollector(NodeVisitor[None]):
file: mypy.nodes.MypyFile
read_source: Callable[[str], list[str] | None]
all_classes: dict[str, tuple[mypy.nodes.MypyFile, mypy.nodes.ClassDef]]
inlined_protocols: dict[str, set[str]]
symbols: dict[str, str] = attrs.field(factory=dict)
last_stmt: mypy.nodes.Statement | None = None
def get_src(
self, node: mypy.nodes.Context, *, path: str | None = None, entire_lines: bool = True
) -> str:
columns: tuple[int, int] | None = None
if node.end_column and not entire_lines:
columns = (node.column, node.end_column)
return self.get_src_from_lines(node.line, node.end_line or node.line, path, columns)
def get_src_from_lines(
self,
line: int,
end_line: int,
path: str | None = None,
columns: tuple[int, int] | None = None,
) -> str:
src = self.read_source(path or self.file.path)
if not src:
raise Exception("Could not get src")
lines = src[line - 1 : end_line]
if columns:
lines[-1] = lines[-1][: columns[1]]
lines[0] = lines[0][columns[0] :]
return "\n".join(lines)
def visit_mypy_file(self, o: mypy.nodes.MypyFile) -> None:
for stmt in o.defs:
stmt.accept(self)
self.last_stmt = stmt
def _get_bases(self, klass: mypy.nodes.ClassDef) -> ClassBases:
bases = list[mypy.nodes.Expression]()
inline = list[tuple[mypy.nodes.MypyFile, mypy.nodes.ClassDef]]()
for base in klass.base_type_exprs:
if (
isinstance(base, mypy.nodes.NameExpr)
and _should_inline_module(base.fullname)
and self._is_protocol(base.fullname)
):
inline.append(self.all_classes[base.fullname])
else:
bases.append(base)
return ClassBases(klass=klass, bases=bases, protocol_bases=inline)
def _get_inlined_class(self, klass: ClassBases) -> str:
# TODO: what about class keywords
klass_str = f"class {klass.klass.name}"
if klass.bases:
klass_str += f"({', '.join(self.get_src(b, entire_lines=False) for b in klass.bases)})"
src = [f"{klass_str}:"]
src.extend(self.get_src(member) for member in klass.klass.defs.body)
for base_class_file, base_class in klass.protocol_bases:
self.inlined_protocols.setdefault(base_class_file.fullname, set()).add(base_class.name)
src.extend(
self.get_src(member, path=base_class_file.path) for member in base_class.defs.body
)
return "\n".join(src)
def visit_class_def(self, o: mypy.nodes.ClassDef) -> None:
self.all_classes[o.fullname] = self.file, o
class_bases = self._get_bases(o)
if class_bases.protocol_bases:
self.symbols[o.name] = self._get_inlined_class(class_bases)
else:
self.symbols[o.name] = self.get_src(o)
def visit_func_def(self, o: mypy.nodes.FuncDef) -> None:
self.symbols[o.name] = self.get_src(o)
def visit_overloaded_func_def(self, o: mypy.nodes.OverloadedFuncDef) -> None:
line = o.line
end_line = o.end_line or o.line
for item in o.items:
end_line = max(end_line, item.end_line or item.line)
overloaded_src = self.get_src_from_lines(line, end_line)
best_sig = _get_documented_overload(o)
if not best_sig:
src = overloaded_src
else:
best_sig_src = self.get_src(best_sig)
src = f"{overloaded_src}\n{best_sig_src}"
self.symbols[o.name] = src
def visit_assignment_stmt(self, o: mypy.nodes.AssignmentStmt) -> None:
try:
(lvalue,) = o.lvalues
except ValueError as ex:
raise ValueError(f"Multi assignments are not supported: {o}") from ex
if not isinstance(lvalue, mypy.nodes.NameExpr):
raise TypeError(f"Multi assignments are not supported: {lvalue}")
# find actual rvalue src location by taking the entire statement and subtracting the lvalue
loc = mypy.nodes.Context()
loc.set_line(o)
if lvalue.end_column:
loc.column = lvalue.end_column
self.symbols[lvalue.name] = self.get_src(loc)
def visit_expression_stmt(self, o: mypy.nodes.ExpressionStmt) -> None:
if isinstance(o.expr, mypy.nodes.StrExpr) and isinstance(
self.last_stmt, mypy.nodes.AssignmentStmt
):
(lvalue,) = self.last_stmt.lvalues
if isinstance(lvalue, mypy.nodes.NameExpr):
self.symbols[lvalue.name] += "\n" + self.get_src(o.expr)
def _is_protocol(self, fullname: str) -> bool:
try:
klass = self.all_classes[fullname]
except KeyError:
return False
info: mypy.nodes.TypeInfo = klass[1].info
return info.is_protocol
def _get_documented_overload(o: mypy.nodes.OverloadedFuncDef) -> mypy.nodes.FuncDef | None:
best_overload: mypy.nodes.FuncDef | None = None
for overload in o.items:
match overload:
case mypy.nodes.Decorator(func=func_def):
pass
case mypy.nodes.FuncDef() as func_def:
pass
case _:
raise Exception("Only function overloads supported")
docstring = func_def.docstring
# this is good enough until a more complex case arises
if docstring and (
not best_overload or len(func_def.arguments) > len(best_overload.arguments)
):
best_overload = func_def
return best_overload
@attrs.define
class ImportCollector(NodeVisitor[None]):
collected_imports: dict[str, ModuleImports]
def get_imports(self, module_id: str) -> ModuleImports:
try:
imports = self.collected_imports[module_id]
except KeyError:
imports = self.collected_imports[module_id] = ModuleImports()
return imports
def visit_mypy_file(self, o: mypy.nodes.MypyFile) -> None:
for stmt in o.defs:
stmt.accept(self)
def visit_import_from(self, o: mypy.nodes.ImportFrom) -> None:
imports = self.get_imports(o.id)
for name, name_as in o.names:
imports.from_imports[name] = name_as
def visit_import(self, o: mypy.nodes.Import) -> None:
for name, name_as in o.ids:
if name != (name_as or name):
raise Exception("Aliasing symbols in stubs is not supported")
imports = self.get_imports(name)
imports.import_module = True
@attrs.define
class DocStub(NodeVisitor[None]):
read_source: Callable[[str], list[str] | None]
file: mypy.nodes.MypyFile
modules: dict[str, mypy.nodes.MypyFile]
parsed_modules: dict[str, SymbolCollector] = attrs.field(factory=dict)
all_classes: dict[str, tuple[mypy.nodes.MypyFile, mypy.nodes.ClassDef]] = attrs.field(
factory=dict
)
collected_imports: dict[str, ModuleImports] = attrs.field(factory=dict)
inlined_protocols: dict[str, set[str]] = attrs.field(factory=dict)
collected_symbols: dict[str, str] = attrs.field(factory=dict)
@classmethod
def process_module(cls, manager: mypy.build.BuildManager, module_id: str) -> typing.Self:
read_source = manager.errors.read_source
assert read_source
modules = manager.modules
module: mypy.nodes.MypyFile = modules[module_id]
stub = cls(read_source=read_source, file=module, modules=modules)
module.accept(stub)
stub._remove_inlined_symbols() # noqa: SLF001
return stub
def _get_module(self, module_id: str) -> SymbolCollector:
try:
return self.parsed_modules[module_id]
except KeyError:
file = self.modules[module_id]
self.parsed_modules[module_id] = collector = SymbolCollector(
file=file,
read_source=self.read_source,
all_classes=self.all_classes,
inlined_protocols=self.inlined_protocols,
)
file.accept(collector)
self._collect_imports(file)
return collector
def _collect_imports(self, o: mypy.nodes.Node) -> None:
o.accept(ImportCollector(self.collected_imports))
self._remove_inlined_symbols()
def _remove_inlined_symbols(self) -> None:
for module, imports in self.collected_imports.items():
inlined_protocols = self.inlined_protocols.get(module, ())
if imports.import_module and module in self.collected_symbols:
raise Exception(f"Symbol/import collision: {module}")
for name, name_as in list(imports.from_imports.items()):
if name in inlined_protocols:
print(f"Removed inlined protocol: {name}")
del imports.from_imports[name]
del self.collected_symbols[name]
elif name in self.collected_symbols:
if name_as is None:
del imports.from_imports[name]
else:
print(f"Symbol/import collision: from {module} import {name} as {name_as}")
def visit_mypy_file(self, o: mypy.nodes.MypyFile) -> None:
for stmt in o.defs:
stmt.accept(self)
self._add_all_symbols(o.fullname)
def visit_import_from(self, o: mypy.nodes.ImportFrom) -> None:
if not _should_inline_module(o.id):
self._collect_imports(o)
return
module = self._get_module(o.id)
name_mapping = dict(o.names)
for name in module.symbols:
try:
name_as = name_mapping[name]
except KeyError:
continue
if name != (name_as or name):
raise Exception("Aliasing symbols in stubs is not supported")
self.add_symbol(module, name)
def visit_import_all(self, o: mypy.nodes.ImportAll) -> None:
if _should_inline_module(o.id):
self._add_all_symbols(o.id)
else:
self._collect_imports(o)
def _add_all_symbols(self, module_id: str) -> None:
module = self._get_module(module_id)
for sym in module.symbols:
self.add_symbol(module, sym)
def visit_import(self, o: mypy.nodes.Import) -> None:
self._collect_imports(o)
def add_symbol(self, module: SymbolCollector, name: str) -> None:
lines = module.symbols[name]
existing = self.collected_symbols.get(name)
if existing is not None and existing != lines:
raise Exception(f"Duplicate definitions are not supported: {name}\n{lines}")
self.collected_symbols[name] = lines
def _name_as(name: str, name_as: str | None) -> str:
if name_as is None:
return name
return f"{name} as {name_as}"
def _should_inline_module(module_id: str) -> bool:
return module_id.startswith("algopy._")
if __name__ == "__main__":
main()
|
algorandfoundation/puya
|
scripts/generate_docs.py
|
Python
|
NOASSERTION
| 14,331 |
#!/usr/bin/env python3
import subprocess
from pathlib import Path
SCRIPTS_DIR = Path(__file__).parent
VCS_ROOT = SCRIPTS_DIR.parent
LIB_NAME = "_puya_lib"
def main() -> None:
# compile puya lib
# normalize source_location.path
# save
subprocess.run(["puyapy", "--output-awst-json", f"src/{LIB_NAME}"], check=True, cwd=VCS_ROOT)
awst_path = VCS_ROOT / "module.awst.json"
puya_lib_path = VCS_ROOT / "src" / LIB_NAME
output_path = VCS_ROOT / "src" / "puya" / "ir" / "_puya_lib.awst.json"
replace_awst = awst_path.read_text()
for lib_path in puya_lib_path.glob("*.py"):
path_as_str = str(lib_path).replace("\\", "\\\\")
find_str = f'"file": "{path_as_str}",'
replace_str = '"file": null,'
replace_awst = replace_awst.replace(find_str, replace_str)
output_path.write_text(replace_awst)
awst_path.unlink(missing_ok=True)
if __name__ == "__main__":
main()
|
algorandfoundation/puya
|
scripts/generate_puya_lib.py
|
Python
|
NOASSERTION
| 935 |
#!/usr/bin/env python3
import builtins
import copy
import json
import keyword
import subprocess
import textwrap
import typing
from collections.abc import Iterable, Iterator, Sequence
from pathlib import Path
import attrs
from puya import log
from puya.algo_constants import SUPPORTED_AVM_VERSIONS
from puyapy.awst_build import pytypes
from puyapy.awst_build.intrinsic_models import FunctionOpMapping, OpMappingWithOverloads
from puyapy.awst_build.utils import snake_case
from scripts.transform_lang_spec import (
ArgEnum,
Immediate,
ImmediateKind,
LanguageSpec,
Op,
StackType,
StackValue,
)
logger = log.get_logger(__name__)
INDENT = " " * 4
VCS_ROOT = Path(__file__).parent.parent
MIN_SUPPORTED_VERSION = min(SUPPORTED_AVM_VERSIONS)
PYTHON_ENUM_CLASS = {
"Mimc Configurations": "MiMCConfigurations",
}
PYTYPE_TO_LITERAL: dict[pytypes.PyType, pytypes.LiteralOnlyType | None] = {
pytypes.BytesType: pytypes.BytesLiteralType,
pytypes.UInt64Type: pytypes.IntLiteralType,
pytypes.AccountType: None, # pytypes.StrLiteralType, # TODO: should we enable this?
pytypes.BigUIntType: pytypes.IntLiteralType,
pytypes.BoolType: None, # already a Python type
pytypes.ApplicationType: pytypes.IntLiteralType,
pytypes.AssetType: pytypes.IntLiteralType,
pytypes.TransactionTypeType: None,
pytypes.OnCompleteActionType: None,
}
PYTYPE_REPR = {
value: f"pytypes.{key}"
for key, value in pytypes.__dict__.items()
if isinstance(value, pytypes.PyType)
}
STACK_TYPE_MAPPING: dict[StackType, Sequence[pytypes.PyType]] = {
StackType.address_or_index: [pytypes.AccountType, pytypes.UInt64Type],
StackType.application: [pytypes.ApplicationType, pytypes.UInt64Type],
StackType.asset: [pytypes.AssetType, pytypes.UInt64Type],
StackType.bytes: [pytypes.BytesType],
StackType.bytes_8: [pytypes.BytesType],
StackType.bytes_32: [pytypes.BytesType],
StackType.bytes_33: [pytypes.BytesType],
StackType.bytes_64: [pytypes.BytesType],
StackType.bytes_80: [pytypes.BytesType],
StackType.bytes_1232: [pytypes.BytesType],
StackType.bytes_1793: [pytypes.BytesType],
StackType.bool: [pytypes.BoolType, pytypes.UInt64Type],
StackType.uint64: [pytypes.UInt64Type],
StackType.any: [pytypes.BytesType, pytypes.UInt64Type],
StackType.box_name: [pytypes.BytesType], # TODO: should this be another type..?
StackType.address: [pytypes.AccountType],
StackType.bigint: [pytypes.BigUIntType],
StackType.state_key: [pytypes.BytesType], # TODO: should this be another type..?
}
BYTES_LITERAL = "bytes"
UINT64_LITERAL = "int"
STUB_NAMESPACE = "op"
ALGORAND_OP_URL = "https://developer.algorand.org/docs/get-details/dapps/avm/teal/opcodes/v10/"
class OpCodeGroup(typing.Protocol):
def handled_ops(self) -> Iterator[str]: ...
@attrs.define(kw_only=True)
class RenamedOpCode(OpCodeGroup):
name: str
stack_aliases: dict[str, list[str]] = attrs.field(factory=dict)
"""ops that are aliases for other ops that take stack values instead of immediates"""
op: str
def handled_ops(self) -> Iterator[str]:
yield self.op
yield from self.stack_aliases.keys()
@attrs.define(kw_only=True)
class MergedOpCodes(OpCodeGroup):
name: str
doc: str
ops: dict[str, dict[str, list[str]]]
def handled_ops(self) -> Iterator[str]:
for op, aliases in self.ops.items():
yield op
yield from aliases.keys()
@attrs.define(kw_only=True)
class GroupedOpCodes(OpCodeGroup):
name: str
"""ops that are aliases for other ops that take stack values instead of immediates"""
doc: str
ops: dict[str, str] = attrs.field(factory=dict)
"""ops to include in group, mapped to their new name"""
def handled_ops(self) -> Iterator[str]:
yield from self.ops.keys()
GROUPED_OP_CODES = [
GroupedOpCodes(
name="AppGlobal",
doc="Get or modify Global app state",
ops={
"app_global_get": "get",
"app_global_get_ex": "get_ex",
"app_global_del": "delete",
"app_global_put": "put",
},
),
GroupedOpCodes(
name="Scratch",
doc="Load or store scratch values",
ops={"loads": "load", "stores": "store"},
),
GroupedOpCodes(
name="AppLocal",
doc="Get or modify Local app state",
ops={
"app_local_get": "get",
"app_local_get_ex": "get_ex",
"app_local_del": "delete",
"app_local_put": "put",
},
),
GroupedOpCodes(
name="Box",
doc="Get or modify box state",
ops={
"box_create": "create",
"box_del": "delete",
"box_extract": "extract",
"box_get": "get",
"box_len": "length",
"box_put": "put",
"box_replace": "replace",
"box_resize": "resize",
"box_splice": "splice",
},
),
GroupedOpCodes(
name="EllipticCurve",
doc="Elliptic Curve functions",
ops={
"ec_add": "add",
"ec_map_to": "map_to",
"ec_multi_scalar_mul": "scalar_mul_multi",
"ec_pairing_check": "pairing_check",
"ec_scalar_mul": "scalar_mul",
"ec_subgroup_check": "subgroup_check",
},
),
GroupedOpCodes(
name="ITxnCreate",
doc="Create inner transactions",
ops={
"itxn_begin": "begin",
"itxn_next": "next",
"itxn_submit": "submit",
"itxn_field": "set",
},
),
]
MERGED_OP_CODES = [
MergedOpCodes(
name="Txn",
doc="Get values for the current executing transaction",
ops={
"txn": {},
"txnas": {
"txna": ["F", "I"],
},
},
),
MergedOpCodes(
name="GTxn",
doc="Get values for transactions in the current group",
ops={
"gtxns": {
"gtxn": ["F", "T"],
},
# field is immediate, first stack arg is txn index, second stack arg is array index
"gtxnsas": {
"gtxnsa": ["F", "A", "I"], # group index on stack
"gtxna": ["F", "T", "I"], # no stack args
"gtxnas": ["F", "T", "A"], # array index on stack
},
},
),
MergedOpCodes(
name="ITxn",
doc="Get values for the last inner transaction",
ops={
"itxn": {},
"itxnas": {
"itxna": ["F", "I"],
},
},
),
MergedOpCodes(
name="GITxn",
doc="Get values for inner transaction in the last group submitted",
ops={
"gitxn": {},
"gitxnas": {
"gitxna": ["T", "F", "I"],
},
},
),
MergedOpCodes(
name="Global",
doc="Get Global values",
ops={"global": {}},
),
]
RENAMED_OP_CODES = [
RenamedOpCode(
name="arg",
op="args",
stack_aliases={"arg": ["N"]},
),
RenamedOpCode(
name="extract",
op="extract3",
stack_aliases={
"extract": ["A", "S", "L"],
},
),
RenamedOpCode(
name="replace",
op="replace3",
stack_aliases={
"replace2": ["A", "S", "B"],
},
),
RenamedOpCode(
name="substring",
op="substring3",
stack_aliases={
"substring": ["A", "S", "E"],
},
),
RenamedOpCode(
name="gload",
op="gloadss",
stack_aliases={
"gload": ["T", "I"],
"gloads": ["A", "I"],
},
),
RenamedOpCode(
name="gaid",
op="gaids",
stack_aliases={"gaid": ["T"]},
),
RenamedOpCode(
name="exit",
op="return",
),
]
EXCLUDED_OPCODES = {
# low level flow control
"bnz",
"bz",
"b",
"callsub",
"retsub",
"proto",
"switch",
"match",
# low level stack manipulation
"intc",
*[f"intc_{i}" for i in range(4)],
"bytec",
*[f"bytec_{i}" for i in range(4)],
"pushbytes",
"pushbytess",
"pushint",
"pushints",
"frame_dig",
"frame_bury",
"bury",
"cover",
"dig",
"dup",
"dup2",
"dupn",
"pop",
"popn",
"swap",
"uncover",
# program scratch slot read/modification (for current program)
"load",
"loads",
"store",
"stores",
# maninuplates what other low level ops point to
"intcblock",
"bytecblock",
# implicit immediates, covered by optimiser and/or assembler
"arg_0",
"arg_1",
"arg_2",
"arg_3",
# have a higher level abstraction that supersedes it
"log",
}
# which ops to treat as properties in the generated stubs
PROPERTY_OPS = {
"global": {"exclude": ["opcode_budget"]},
"txn": {"exclude": list[str]()},
}
@attrs.define
class TypedName:
name: str
type: StackType | ImmediateKind | str
doc: str | None
@attrs.define(kw_only=True)
class FunctionDef:
name: str
doc: list[str]
is_property: bool
args: list[TypedName] = attrs.field(factory=list)
return_docs: list[str] = attrs.field(factory=list)
op_mapping: OpMappingWithOverloads
min_avm_version: int
@attrs.define
class ClassDef:
name: str
doc: str
methods: list[FunctionDef] = attrs.field()
ops: list[str]
def main() -> None:
spec_path = VCS_ROOT / "langspec.puya.json"
lang_spec_json = json.loads(spec_path.read_text(encoding="utf-8"))
lang_spec = LanguageSpec.from_json(lang_spec_json)
non_simple_ops = {
*EXCLUDED_OPCODES,
*dir(builtins),
*keyword.kwlist, # TODO: maybe consider softkwlist too?
}
function_defs = list[FunctionDef]()
class_defs = list[ClassDef]()
enums_to_build = dict[str, bool]()
for merged in MERGED_OP_CODES:
non_simple_ops.update(merged.handled_ops())
class_defs.append(build_merged_ops(lang_spec, merged))
for grouped in GROUPED_OP_CODES:
non_simple_ops.update(grouped.handled_ops())
class_defs.append(build_grouped_ops(lang_spec, grouped, enums_to_build))
for aliased in RENAMED_OP_CODES:
function_defs.extend(build_aliased_ops(lang_spec, aliased))
non_simple_ops.update(aliased.handled_ops())
for op in lang_spec.ops.values():
if op.name in non_simple_ops or not op.name.isidentifier():
logger.info(f"Ignoring: {op.name}")
continue
overriding_immediate = get_overriding_immediate(op)
if overriding_immediate:
class_defs.append(
build_class_from_overriding_immediate(
lang_spec,
op,
class_name=get_python_enum_class(op.name),
class_doc=" ".join(op.doc),
immediate=overriding_immediate,
aliases=[],
)
)
else:
for immediate in op.immediate_args:
if immediate.immediate_type == ImmediateKind.arg_enum and (
immediate.modifies_stack_input is None
and immediate.modifies_stack_output is None
):
assert immediate.arg_enum is not None
enums_to_build[immediate.arg_enum] = True
function_defs.extend(build_operation_methods(op, op.name, []))
function_defs.sort(key=lambda x: x.name)
class_defs.sort(key=lambda x: x.name)
enum_names = list(enums_to_build.keys())
output_stub(lang_spec, enum_names, function_defs, class_defs)
output_awst_data(lang_spec, enum_names, function_defs, class_defs)
def sub_types(type_name: StackType, *, covariant: bool) -> Sequence[pytypes.PyType]:
try:
typs = STACK_TYPE_MAPPING[type_name]
except KeyError as ex:
raise NotImplementedError(
f"Could not map stack type {type_name} to an algopy type"
) from ex
else:
last_index = None if covariant else 1
return typs[:last_index]
def immediate_kind_to_type(kind: ImmediateKind) -> type[int | str]:
match kind:
case ImmediateKind.uint8 | ImmediateKind.int8 | ImmediateKind.varuint:
return int
case ImmediateKind.arg_enum:
return str
case _:
raise ValueError(f"Unexpected ImmediateKind: {kind}")
def get_python_type(
typ: StackType | ImmediateKind | str, *, covariant: bool, any_as: str | None
) -> str:
match typ:
case StackType() as stack_type:
if any_as and stack_type == StackType.any:
return any_as
ptypes_ = sub_types(stack_type, covariant=covariant)
names = [str(wt).removeprefix("algopy.") for wt in ptypes_]
if covariant:
for pt in ptypes_:
lit_t = PYTYPE_TO_LITERAL[pt]
if lit_t is not None:
lit_name = str(lit_t)
if lit_name not in names:
names.append(lit_name)
return " | ".join(names)
case ImmediateKind() as immediate_kind:
return immediate_kind_to_type(immediate_kind).__name__
case _:
return typ
def build_method_stub(function: FunctionDef, prefix: str = "") -> Iterable[str]:
signature = list[str]()
doc = function.doc[:]
signature.append(f"def {function.name}(")
args = list[str]()
for arg in function.args:
python_type = get_python_type(arg.type, covariant=True, any_as=None)
args.append(f"{arg.name}: {python_type}")
if arg.doc:
doc.append(f":param {python_type} {arg.name}: {arg.doc}")
if function.args:
args.append("/") # TODO: remove once we support kwargs
signature.append(", ".join(args))
return_docs = function.return_docs
returns = pytype_stub_repr(function.op_mapping.result)
if return_docs:
if doc:
doc.append(f":returns {returns}: {return_docs[0]}")
doc.extend(return_docs[1:])
else:
doc = return_docs
signature.append(f") -> {returns}:")
teal_ops = sorted({op.op_code for op in function.op_mapping.overloads})
teal_op_desc = ", ".join(_get_algorand_doc(teal_op) for teal_op in teal_ops)
doc.append("")
doc.append(f"Native TEAL opcode: {teal_op_desc}")
body = list[str]()
if doc:
body.append('"""')
body.extend(doc)
body.append('"""')
else:
body.append("...")
yield prefix + "".join(signature)
yield from [textwrap.indent(line, prefix=prefix + INDENT) for line in body]
def build_stub_class(klass: ClassDef) -> Iterable[str]:
ops = [f"{_get_algorand_doc(op)}" for op in klass.ops]
docstring = "\n".join(
[
INDENT + '"""',
INDENT + klass.doc,
INDENT + f"Native TEAL op{'s' if len(ops) > 1 else ''}: {', '.join(ops)}",
INDENT + '"""',
]
)
method_preamble = f"{INDENT}@staticmethod"
yield f"class {klass.name}:"
yield docstring
for method in klass.methods:
if method.is_property:
yield from build_class_var_stub(method, INDENT)
else:
yield method_preamble
yield from build_method_stub(method, prefix=INDENT)
yield ""
def build_class_var_stub(function: FunctionDef, indent: str) -> Iterable[str]:
returns = pytype_stub_repr(function.op_mapping.result)
return_docs = function.return_docs
doc = return_docs if return_docs else function.doc[:]
_maybe_add_min_version_doc(doc, function.min_avm_version)
yield f"{indent}{function.name}: typing.Final[{returns}] = ..."
yield f'{indent}"""'
for doc_line in doc:
yield f"{indent}{doc_line}"
yield f'{indent}"""'
def _get_modified_stack_value(alias: Op) -> StackValue:
immediate = get_overriding_immediate(alias)
assert immediate
if immediate.modifies_stack_input is not None:
return alias.stack_inputs[immediate.modifies_stack_input]
else:
assert immediate.modifies_stack_output is not None
return alias.stack_outputs[immediate.modifies_stack_output]
AliasT: typing.TypeAlias = tuple[Op, list[str]]
def build_class_from_overriding_immediate(
spec: LanguageSpec,
op: Op,
class_name: str,
class_doc: str,
immediate: Immediate,
aliases: list[AliasT],
) -> ClassDef:
assert immediate.arg_enum
logger.info(f"Using overriding immediate for {op.name}")
arg_enum_values = spec.arg_enums[immediate.arg_enum]
# copy inputs so they can be mutated safely
op = copy.deepcopy(op)
aliases = copy.deepcopy(aliases)
# obtain a list of stack values that will be modified for each enum
stacks_to_modify = [_get_modified_stack_value(o) for o, _ in [(op, None), *aliases]]
# build a method for each arg enum value
methods = list[FunctionDef]()
class_ops = {op.name}
for value in arg_enum_values:
stack_type = value.stack_type
assert stack_type
for stack_to_modify in stacks_to_modify:
stack_to_modify.stack_type = stack_type
stack_to_modify.doc = value.doc
method = build_operation_method(
op, snake_case(value.name), aliases, const_immediate_value=(immediate, value)
)
# some enums are reused across ops, so need to take the max minimum of op and enum version
method.min_avm_version = max(op.min_avm_version, value.min_avm_version)
_maybe_add_min_version_doc(method.doc, method.min_avm_version)
for op_mapping in method.op_mapping.overloads:
class_ops.add(op_mapping.op_code)
methods.append(method)
return ClassDef(name=class_name, doc=class_doc, methods=methods, ops=sorted(class_ops))
def get_op_doc(op: Op) -> list[str]:
doc = [d.replace("\\", "\\\\") for d in op.doc]
_maybe_add_min_version_doc(doc, op.min_avm_version)
return doc
def get_python_enum_class(arg_enum: str) -> str:
try:
return PYTHON_ENUM_CLASS[arg_enum]
except KeyError:
pass
# don't change acronyms
if arg_enum.isupper():
return arg_enum
return snake_case(arg_enum).replace("_", " ").title().replace(" ", "")
def get_overriding_immediate(op: Op) -> Immediate | None:
return next(
(
immediate
for immediate in op.immediate_args
if immediate.modifies_stack_input is not None
or immediate.modifies_stack_output is not None
),
None,
)
def build_enum(spec: LanguageSpec, arg_enum: str) -> Iterable[str]:
values = spec.arg_enums[arg_enum]
enum_name = get_python_enum_class(arg_enum)
yield f"class {enum_name}(str):"
yield f'{INDENT}"""Available values for the `{arg_enum}` enum"""'
for value in values:
yield f"{INDENT}{value.name}: {enum_name} = ..."
enum_doc = []
if value.doc:
enum_doc.append(value.doc)
_maybe_add_min_version_doc(enum_doc, value.min_avm_version)
if enum_doc:
yield f'{INDENT}"""'
for doc_line in enum_doc:
yield f"{INDENT}{doc_line}"
yield f'{INDENT}"""'
yield ""
def _maybe_add_min_version_doc(doc: list[str], version: int) -> None:
# only output min AVM version if it is greater than our min supported version
if version > MIN_SUPPORTED_VERSION:
doc.append(f"Min AVM version: {version}")
def build_operation_method(
op: Op,
op_function_name: str,
aliases: list[AliasT],
const_immediate_value: tuple[Immediate, ArgEnum] | None = None,
) -> FunctionDef:
args = []
# python stub args can be different to mapping args, due to immediate args
# that are inferred based on the method/property used
function_args = []
doc = get_op_doc(op)
for immediate in op.immediate_args:
arg_type: ImmediateKind | str
if immediate.immediate_type == ImmediateKind.arg_enum:
assert immediate.arg_enum, "Arg enum expected"
arg_type = get_python_enum_class(immediate.arg_enum)
else:
arg_type = immediate.immediate_type
im_arg = TypedName(name=immediate.name.lower(), type=arg_type, doc=immediate.doc)
args.append(im_arg)
if const_immediate_value and const_immediate_value[0] == immediate:
# omit immediate arg from signature
doc = []
else:
function_args.append(im_arg)
for si in op.stack_inputs:
stack_arg = TypedName(name=si.name.lower(), type=si.stack_type, doc=si.doc)
args.append(stack_arg)
function_args.append(stack_arg)
if op.halts:
return_docs = ["Halts program"]
else:
return_docs = [so.doc for so in op.stack_outputs if so.doc]
try:
property_op = PROPERTY_OPS[op.name]
except KeyError:
is_property = False
else:
is_property = op_function_name not in property_op["exclude"]
if op.halts:
result_typ = pytypes.NeverType
else:
# replace immediate reference to arg enum with a constant enum value
result_ptypes = [sub_types(o.stack_type, covariant=False)[0] for o in op.stack_outputs]
if not result_ptypes:
result_typ = pytypes.NoneType
elif len(op.stack_outputs) == 1:
(result_typ,) = result_ptypes
else:
result_typ = pytypes.GenericTupleType.parameterise(result_ptypes, source_location=None)
if result_typ == pytypes.UInt64Type:
if op_function_name == "on_completion":
result_typ = pytypes.OnCompleteActionType
elif op_function_name == "type_enum":
result_typ = pytypes.TransactionTypeType
op_mappings = []
ops_with_aliases = [(op, list[str]()), *aliases]
for map_op, alias_args in ops_with_aliases:
assert map_op.stack_outputs == op.stack_outputs
if alias_args:
# map the stack or immediate input name to the function signature position
name_to_sig_idx = {n: idx2 for idx2, n in enumerate(alias_args)}
else:
name_to_sig_idx = {tn.name.upper(): idx2 for idx2, tn in enumerate(args)}
map_immediates = list[str | int | type[str | int]]()
map_args_map = dict[int, Sequence[pytypes.PyType] | int]()
for idx, i_arg in enumerate(map_op.immediate_args):
if const_immediate_value and const_immediate_value[0] == i_arg:
map_immediates.append(const_immediate_value[1].name)
else:
im_typ = immediate_kind_to_type(i_arg.immediate_type)
map_immediates.append(im_typ)
sig_idx = name_to_sig_idx[i_arg.name]
map_args_map[sig_idx] = idx
for s_arg in map_op.stack_inputs:
allowed_types = tuple(sub_types(s_arg.stack_type, covariant=True))
sig_idx = name_to_sig_idx[s_arg.name]
map_args_map[sig_idx] = allowed_types
op_mappings.append(
FunctionOpMapping(
op_code=map_op.name,
immediates=map_immediates,
args=[map_args_map[k] for k in sorted(map_args_map)],
)
)
proto_function = FunctionDef(
name=op_function_name,
doc=doc,
is_property=is_property,
args=function_args,
return_docs=return_docs,
op_mapping=OpMappingWithOverloads(
arity=len(function_args),
result=result_typ,
overloads=op_mappings,
),
min_avm_version=op.min_avm_version,
)
return proto_function
def build_operation_methods(
op: Op, op_function_name: str, aliases: list[AliasT]
) -> Iterable[FunctionDef]:
logger.info(f"Mapping {op.name} to {op_function_name}")
if StackType.any in (s.stack_type for s in op.stack_outputs):
logger.info(f"Found any output for {op.name}")
for replace_any_with in (StackType.bytes, StackType.uint64):
new_op = op_any_replaced(op, replace_any_with)
new_name = f"{op_function_name}_{replace_any_with.name}"
new_aliases = [
(op_any_replaced(alias_op, replace_any_with), names) for alias_op, names in aliases
]
yield build_operation_method(new_op, new_name, new_aliases)
else:
yield build_operation_method(op, op_function_name, aliases)
def op_any_replaced(op: Op, replace_any_with: StackType) -> Op:
stack_inputs = []
input_replaced = 0
for si in op.stack_inputs:
if si.stack_type != StackType.any:
stack_inputs.append(si)
else:
input_replaced += 1
stack_inputs.append(attrs.evolve(si, stack_type=replace_any_with))
stack_outputs = []
outputs_replaced = 0
for so in op.stack_outputs:
if so.stack_type != StackType.any:
stack_outputs.append(so)
else:
outputs_replaced += 1
stack_outputs.append(attrs.evolve(so, stack_type=replace_any_with))
assert outputs_replaced == 1
return attrs.evolve(op, stack_inputs=stack_inputs, stack_outputs=stack_outputs)
def build_aliased_ops(spec: LanguageSpec, group: RenamedOpCode) -> Iterable[FunctionDef]:
op = spec.ops[group.op]
aliases = [
(spec.ops[stack_alias], arg_map) for stack_alias, arg_map in group.stack_aliases.items()
]
methods = build_operation_methods(op, group.name, aliases)
return methods
def build_merged_ops(spec: LanguageSpec, group: MergedOpCodes) -> ClassDef:
merge_methods = dict[str, FunctionDef]()
for other_op_name, alias_dict in group.ops.items():
aliases = [(spec.ops[alias_op], arg_map) for alias_op, arg_map in alias_dict.items()]
other_op = spec.ops[other_op_name]
overriding_immediate = get_overriding_immediate(other_op)
assert overriding_immediate
other_class = build_class_from_overriding_immediate(
spec,
other_op,
class_name=group.name,
class_doc=group.doc,
immediate=overriding_immediate,
aliases=aliases,
)
for method in other_class.methods:
merge_methods[method.name] = method
methods = list(merge_methods.values())
return ClassDef(name=group.name, doc=group.doc, methods=methods, ops=sorted(group.ops))
def build_grouped_ops(
spec: LanguageSpec, group: GroupedOpCodes, enums_to_build: dict[str, bool]
) -> ClassDef:
methods = list[FunctionDef]()
for rename_op_name, python_name in group.ops.items():
rename_op = spec.ops[rename_op_name]
rename_immediate = get_overriding_immediate(rename_op)
if rename_immediate:
rename_class = build_class_from_overriding_immediate(
spec,
rename_op,
class_name=group.name,
class_doc=group.doc,
immediate=rename_immediate,
aliases=[],
)
# when grouping an op with immediate overrides, treat python_name as a prefix
for method in rename_class.methods:
method.name = f"{python_name}_{method.name}"
methods.extend(rename_class.methods)
else:
methods.extend(build_operation_methods(rename_op, python_name, aliases=[]))
for arg in rename_op.immediate_args:
if arg.immediate_type == ImmediateKind.arg_enum and (
arg.modifies_stack_input is None and arg.modifies_stack_output is None
):
assert arg.arg_enum is not None
enums_to_build[arg.arg_enum] = True
class_def = ClassDef(
name=group.name,
doc=group.doc,
methods=methods,
ops=sorted(group.ops),
)
return class_def
def pytype_repr(typ: pytypes.PyType) -> str:
try:
return PYTYPE_REPR[typ]
except KeyError:
pass
match typ:
case pytypes.TupleType(items=tuple_items) if len(tuple_items) > 1:
item_strs = [pytype_repr(item) for item in tuple_items]
return (
f"pytypes.GenericTupleType.parameterise("
f"({', '.join(item_strs)}), source_location=None)"
)
raise ValueError(f"Unexpected pytype: {typ}")
def build_op_specification_body(function: FunctionDef) -> Iterable[str]:
if function.is_property:
(op_mapping,) = function.op_mapping.overloads
(immediate,) = op_mapping.immediates
yield (
f"{function.name}=PropertyOpMapping("
f"{op_mapping.op_code!r}, {immediate!r}, {pytype_repr(function.op_mapping.result)},"
f"),"
)
else:
yield f"{function.name}=OpMappingWithOverloads("
if function.op_mapping.result is not pytypes.NoneType:
yield f" result={pytype_repr(function.op_mapping.result)},"
yield f" arity={function.op_mapping.arity}, "
yield " overloads=["
for op_mapping in function.op_mapping.overloads:
yield f"FunctionOpMapping({op_mapping.op_code!r},"
if op_mapping.immediates:
yield " immediates=["
for idx, item in enumerate(op_mapping.immediates):
if idx:
yield ", "
if not isinstance(item, type):
yield repr(item)
else:
yield item.__name__
yield "],"
if op_mapping.args:
yield " args=["
for idx, allowed_types_or_idx in enumerate(op_mapping.args):
if idx:
yield ", "
if isinstance(allowed_types_or_idx, int):
yield repr(allowed_types_or_idx)
else: # noqa: PLR5501
if len(allowed_types_or_idx) == 1:
yield f"({pytype_repr(*allowed_types_or_idx)},)"
else:
yield "("
for idx2, allowed_type in enumerate(allowed_types_or_idx):
if idx2:
yield ","
yield pytype_repr(allowed_type)
yield ")"
yield "],"
yield "),"
yield "]"
yield "),"
def build_awst_data(
lang_spec: LanguageSpec,
enums: list[str],
function_ops: list[FunctionDef],
class_ops: list[ClassDef],
) -> Iterable[str]:
yield "import typing"
yield "from collections.abc import Mapping, Sequence"
yield "from puyapy.awst_build import pytypes"
yield (
"from puyapy.awst_build.intrinsic_models"
" import FunctionOpMapping, OpMappingWithOverloads, PropertyOpMapping"
)
yield "ENUM_CLASSES: typing.Final[Mapping[str, Mapping[str, str]]] = dict("
for enum_name in enums:
yield f"{get_python_enum_class(enum_name)}=dict("
for enum_value in lang_spec.arg_enums[enum_name]:
# enum names currently match enum immediate values
yield f'{enum_value.name}="{enum_value.name}",'
yield "),"
yield ")"
yield ""
yield "FUNC_TO_AST_MAPPER: typing.Final[Mapping[str, OpMappingWithOverloads]] = dict("
for function_op in function_ops:
yield "".join(build_op_specification_body(function_op))
yield ")"
yield (
"NAMESPACE_CLASSES: "
"typing.Final[Mapping[str, Mapping[str, PropertyOpMapping | OpMappingWithOverloads]]]"
" = dict("
)
for class_op in class_ops:
yield f"{class_op.name}=dict("
for method in class_op.methods:
yield "".join(build_op_specification_body(method))
yield "),"
yield ")"
def output_stub(
lang_spec: LanguageSpec,
enums: list[str],
function_ops: list[FunctionDef],
class_ops: list[ClassDef],
) -> None:
references = ", ".join(
sorted(
str(pt).removeprefix("algopy.")
for pt, lit_t in PYTYPE_TO_LITERAL.items()
if str(pt).startswith("algopy.")
)
)
stub: list[str] = [
"import typing",
"",
f"from algopy import {references}",
]
for arg_enum in enums:
stub.extend(build_enum(lang_spec, arg_enum))
for function in function_ops:
stub.extend(build_method_stub(function))
for class_op in class_ops:
stub.extend(build_stub_class(class_op))
stub_out_path = VCS_ROOT / "stubs" / "algopy-stubs" / f"{STUB_NAMESPACE}.pyi"
stub_out_path.write_text("\n".join(stub), encoding="utf-8")
subprocess.run(["ruff", "format", str(stub_out_path)], check=True, cwd=VCS_ROOT)
def pytype_stub_repr(pytype: pytypes.PyType) -> str:
return str(pytype).replace("algopy.", "")
def output_awst_data(
lang_spec: LanguageSpec,
enums: list[str],
function_ops: list[FunctionDef],
class_ops: list[ClassDef],
) -> None:
awst_data = build_awst_data(lang_spec, enums, function_ops, class_ops)
awst_data_path = VCS_ROOT / "src" / "puyapy" / "awst_build" / "intrinsic_data.py"
awst_data_path.write_text("\n".join(awst_data), encoding="utf-8")
subprocess.run(["ruff", "format", str(awst_data_path)], check=True, cwd=VCS_ROOT)
subprocess.run(["ruff", "check", "--fix", str(awst_data_path)], check=False, cwd=VCS_ROOT)
def _get_algorand_doc(op: str) -> str:
return f"[`{op}`]({ALGORAND_OP_URL}#{op})"
if __name__ == "__main__":
main()
|
algorandfoundation/puya
|
scripts/generate_stubs.py
|
Python
|
NOASSERTION
| 33,877 |
#!/usr/bin/env python3
import contextlib
import enum
import json
import logging
import typing
from pathlib import Path
import attrs
import cattrs
logger = logging.getLogger(__name__)
STACK_INPUT_NAMES = "ABCDE"
STACK_OUTPUT_NAMES_FEW = "XYZ" # 3 or less var
STACK_OUTPUT_NAMES_MANY = "WXYZ" # 4 var
VARIABLE_SIZE_OPCODES = {
"intcblock",
"bytecblock",
"pushbytes",
"pushbytess",
"pushint",
"pushints",
"switch",
"match",
}
class NamedType(typing.TypedDict):
"""
{
"Name": "uint64",
"Abbreviation": "i",
"Bound": [
0,
18446744073709551615
],
"AVMType": "uint64"
},
"""
Name: str
Abbreviation: str
AVMType: str
class ImmediateNote(typing.TypedDict, total=False):
"""
{
"Comment": "transaction field index",
"Encoding": "uint8",
"Name": "F",
"Reference": "txn"
}
"""
Comment: str
Encoding: str
Name: str
Reference: str
class Operation(typing.TypedDict, total=False):
"""
{
"Opcode": 0,
"Name": "err",
"Size": 1,
"Doc": "Fail immediately.",
"IntroducedVersion": 1,
"Groups": [
"Flow Control"
]
},
{
"Opcode": 1,
"Name": "sha256",
"Args": [
"[]byte"
],
"Returns": [
"[32]byte"
],
"Size": 1,
"Doc": "SHA256 hash of value A, yields [32]byte",
"IntroducedVersion": 1,
"Groups": [
"Arithmetic"
]
}
"""
Doc: str
Opcode: int
Size: int
Name: str
IntroducedVersion: int
Groups: list[str]
Args: list[str]
Returns: list[str]
DocExtra: str
ArgEnum: list[str]
ArgEnumTypes: list[str]
ArgEnumBytes: list[int]
ArgModes: list[int]
ArgEnumVersion: list[int]
ImmediateNote: list[ImmediateNote]
# the following values are not in the original langspec.json
# these values are manually patched in during transform
ArgEnumIsInput: bool
Halts: bool
# these values are output by a modified opdoc.go from go-algorand repo
Cost: str
ArgEnumDoc: list[str]
Modes: int
class AlgorandLanguageSpec(typing.TypedDict):
NamedTypes: list[NamedType]
Ops: list[Operation]
class StackType(enum.StrEnum):
uint64 = enum.auto()
bytes = "[]byte"
bytes_8 = "[8]byte"
bytes_32 = "[32]byte"
bytes_33 = "[33]byte"
bytes_64 = "[64]byte"
bytes_80 = "[80]byte"
bytes_1232 = "[1232]byte"
bytes_1793 = "[1793]byte"
bool = enum.auto()
address = enum.auto()
address_or_index = enum.auto()
any = enum.auto()
bigint = enum.auto()
box_name = "boxName"
asset = enum.auto()
application = enum.auto()
state_key = "stateKey"
class RunMode(enum.StrEnum):
app = enum.auto()
sig = enum.auto()
any = enum.auto()
@attrs.define
class StackValue:
name: str
"""Name used to refer to this value in the Op.doc"""
stack_type: StackType
doc: str | None = None
@attrs.define
class ArgEnum:
name: str
doc: str | None
stack_type: StackType | None
mode: RunMode
value: int
min_avm_version: int
class ImmediateKind(enum.StrEnum):
uint8 = enum.auto()
int8 = enum.auto()
label = enum.auto()
varuint = enum.auto()
bytes = enum.auto()
# array types
label_array = enum.auto()
varuint_array = enum.auto()
bytes_array = enum.auto()
# not in original lang spec
arg_enum = enum.auto()
@attrs.frozen(kw_only=True)
class Immediate:
name: str
"""Name used to refer to this value in the Op.doc"""
immediate_type: ImmediateKind
arg_enum: str | None = None
modifies_stack_input: int | None = None
"""Index of stack input type that this immediate modifies"""
modifies_stack_output: int | None = None
"""Index of stack output type that this immediate modifies"""
"""field_group reference if immediate_type is field_group"""
doc: str | None = None
@attrs.define
class Cost:
value: int | None
"""Static cost of op, or None if cost is not static"""
doc: str
"""Documentation describing how cost is calculated"""
@attrs.define
class Op:
name: str
"""Name of op in TEAL"""
code: int
"""Bytecode value"""
size: int
"""Size in bytes of compiled op, 0 indicate size is variable"""
doc: list[str]
cost: Cost
min_avm_version: int
"""AVM version op was introduced"""
halts: bool
mode: RunMode
"""True if this op halts the program"""
groups: list[str] = attrs.field(factory=list)
"""Groups op belongs to"""
stack_inputs: list[StackValue] = attrs.field(factory=list)
"""Inputs that come from the stack"""
immediate_args: list[Immediate] = attrs.field(factory=list)
"""Arguments that are passed as immediate values in TEAL"""
stack_outputs: list[StackValue] = attrs.field(factory=list)
"""Outputs left on the stack"""
@attrs.define
class LanguageSpec:
ops: dict[str, Op] = attrs.field(factory=dict)
arg_enums: dict[str, list[ArgEnum]] = attrs.field(factory=dict)
@staticmethod
def from_json(json: dict[str, typing.Any]) -> "LanguageSpec":
return cattrs.structure(json, LanguageSpec)
def to_json(self) -> dict[str, typing.Any]:
return attrs.asdict(self)
def _patch_lang_spec(lang_spec: dict[str, typing.Any]) -> None:
ops = {op["Name"]: op for op in lang_spec["Ops"]}
# patch ops that use a stack type of any
# for arguments that should be an Address or Address index
for op_name in (
"acct_params_get",
"app_local_get",
"app_local_put",
"app_local_del",
"app_local_get_ex",
"app_opted_in",
"asset_holding_get",
"balance",
"min_balance",
"voter_params_get",
):
_patch_arg_type(ops, op_name, 0, "any", "address_or_index")
# patch ops that use a stack type of uint64
# for arguments that should be an Application
for op_name, arg_index in {
"app_opted_in": 1,
"app_global_get_ex": 0,
"app_local_get_ex": 1,
"app_params_get": 0,
}.items():
_patch_arg_type(ops, op_name, arg_index, "uint64", "application")
# patch ops that use a stack type of uint64
# for return types that should be a bool
for op_name in [
"!",
]:
_patch_return_type(ops, op_name, 0, "uint64", "bool")
# patch ops that use a stack type of uint64
# for arguments that should be an Asset
for op_name, arg_index in {
"asset_holding_get": 1,
"asset_params_get": 0,
}.items():
_patch_arg_type(ops, op_name, arg_index, "uint64", "asset")
for op_name, arg_index in {
"select": 2,
}.items():
_patch_arg_type(ops, op_name, arg_index, "uint64", "bool")
# patch return bytes -> bigint
for op_name in [
"b+",
"b*",
]:
_patch_return_type(ops, op_name, 0, "[]byte", "bigint")
# patch txn enum fields with asset and application types
txn = ops["txn"]
itxn_field = ops["itxn_field"]
for op in (txn, itxn_field):
for immediate in [
"XferAsset",
"ConfigAsset",
"FreezeAsset",
]:
_patch_arg_enum_type(op, immediate, "uint64", "asset")
_patch_arg_enum_type(op, "ApplicationID", "uint64", "application")
_patch_arg_enum_type(txn, "CreatedApplicationID", "uint64", "application")
_patch_arg_enum_type(txn, "CreatedAssetID", "uint64", "asset")
# patch txna enums
txna = ops["txna"]
_patch_arg_enum_type(txna, "Assets", "uint64", "asset")
_patch_arg_enum_type(txna, "Applications", "uint64", "application")
# patch global enums
_patch_arg_enum_type(ops["global"], "CurrentApplicationID", "uint64", "application")
# base64_decode has an ArgEnumTypes array when it probably shouldn't
# as all stack outputs are bytes
del ops["base64_decode"]["ArgEnumTypes"]
# itxn_field reuses the same field group as txn, however it only uses a subset of fields
# additionally ArgEnumTypes refers to the stack input types not the output types
itxn_field = ops["itxn_field"]
itxn_field["ImmediateNote"][0]["Reference"] = "itxn_field"
itxn_field["ArgEnumIsInput"] = True
# ops that never return encode this with a single return type of none
# however currently this information is stripped when generating langspec.json
ops["err"]["Halts"] = True
ops["return"]["Halts"] = True
def _patch_arg_enum_type(
op: dict[str, typing.Any], immediate: str, current_type: str, new_type: str
) -> None:
arg_enum = op["ArgEnum"]
assert immediate in arg_enum, f"Expected {immediate} arg enum for {op['Name']}"
immediate_index = arg_enum.index(immediate)
arg_enum_types = op["ArgEnumTypes"]
assert (
arg_enum_types[immediate_index] == current_type
), f"Expected {immediate} to be {current_type}"
arg_enum_types[immediate_index] = new_type
def _patch_arg_type(
ops: dict[str, typing.Any], op_name: str, arg_index: int, current_type: str, new_type: str
) -> None:
op_args = ops[op_name]["Args"]
assert (
op_args[arg_index] == current_type
), f"Expected {op_name} arg {arg_index} to be {current_type}"
op_args[arg_index] = new_type
def _patch_return_type(
ops: dict[str, typing.Any], op_name: str, return_index: int, current_type: str, new_type: str
) -> None:
returns = ops[op_name]["Returns"]
assert (
returns[return_index] == current_type
), f"Expected {op_name} return {return_index} to be {current_type}"
returns[return_index] = new_type
def create_indexed_enum(op: Operation) -> list[ArgEnum]:
enum_names = op["ArgEnum"]
enum_types: list[str] | list[None] = op.get("ArgEnumTypes", [])
enum_docs = op["ArgEnumDoc"]
enum_bytes = op["ArgEnumBytes"]
enum_modes = op["ArgModes"]
enum_versions = op["ArgEnumVersion"]
if not enum_types:
enum_types = [None] * len(enum_names)
result = list[ArgEnum]()
for enum_name, enum_type, enum_doc, enum_mode, enum_byte, enum_version in zip(
enum_names, enum_types, enum_docs, enum_modes, enum_bytes, enum_versions, strict=True
):
stack_type = None if enum_type is None else StackType(enum_type)
enum_value = ArgEnum(
name=enum_name,
doc=enum_doc if enum_doc else None,
stack_type=stack_type,
mode=_map_enum_mode(op["Modes"], enum_mode),
value=enum_byte,
min_avm_version=enum_version,
)
result.append(enum_value)
return result
def _map_enum_mode(op_mode: int, arg_mode: int = 0) -> RunMode:
mode = arg_mode or op_mode
match mode:
case 1:
return RunMode.sig
case 2:
return RunMode.app
case 3:
return RunMode.any
case _:
raise ValueError("Unexpected run mode")
def transform_encoding(value: str) -> ImmediateKind:
match value:
case "uint8":
result = ImmediateKind.uint8
case "int8":
result = ImmediateKind.int8
case "int16 (big-endian)":
result = ImmediateKind.label
case "varuint":
result = ImmediateKind.varuint
case "varuint length, bytes":
result = ImmediateKind.bytes
case "varuint count, [varuint ...]":
result = ImmediateKind.varuint_array
case "varuint count, [varuint length, bytes ...]":
result = ImmediateKind.bytes_array
case "varuint count, [int16 (big-endian) ...]":
result = ImmediateKind.label_array
case _:
raise ValueError(f"Unknown Encoding: {value}")
return result
def transform_stack_args(op: Operation) -> list[StackValue]:
result = list[StackValue]()
args = op.get("Args", [])
assert len(args) <= len(STACK_INPUT_NAMES), f"More args than expected for {op['Name']}"
for index, arg_type in enumerate(op.get("Args", [])):
name = STACK_INPUT_NAMES[index]
stack_type = StackType(arg_type)
result.append(StackValue(name=name, stack_type=stack_type))
return result
def transform_immediates(
arg_enums: dict[str, list[ArgEnum]],
algorand_ops: dict[str, Operation],
op: Operation,
) -> list[Immediate]:
op_name = op["Name"]
result = list[Immediate]()
for immediate in op.get("ImmediateNote", []):
arg_enum_reference = immediate.get("Reference")
if arg_enum_reference is not None:
arg_enum = op.get("ArgEnum")
if arg_enum_reference not in arg_enums:
try:
enum_op = algorand_ops[arg_enum_reference]
except KeyError:
enum_op = op
assert arg_enum, f"Expected enum for {op_name}"
arg_enums[arg_enum_reference] = create_indexed_enum(enum_op)
if arg_enum is not None:
assert len(arg_enum) == len(
arg_enums[arg_enum_reference]
), f"Arg Enum lengths don't match for {op_name}"
modifies_stack_input: int | None = None
modifies_stack_output: int | None = None
if arg_enum_reference and any(a.stack_type for a in arg_enums[arg_enum_reference]):
assert all(a.stack_type for a in arg_enums[arg_enum_reference])
if op.get("ArgEnumIsInput"):
modifies_stack_input = 0
else:
modifies_stack_output = 0
result.append(
Immediate(
name=immediate["Name"],
immediate_type=(
transform_encoding(immediate["Encoding"])
if arg_enum_reference is None
else ImmediateKind.arg_enum
),
modifies_stack_input=modifies_stack_input,
modifies_stack_output=modifies_stack_output,
arg_enum=arg_enum_reference,
doc=immediate["Comment"],
)
)
return result
def transform_returns(op: Operation) -> list[StackValue]:
try:
returns = op["Returns"]
except KeyError:
return []
num_returns = len(returns)
if num_returns <= len(STACK_OUTPUT_NAMES_FEW):
return_argument_names = STACK_OUTPUT_NAMES_FEW
elif num_returns <= len(STACK_OUTPUT_NAMES_MANY):
return_argument_names = STACK_OUTPUT_NAMES_MANY
else:
raise AssertionError(f"More returns than expected for {op['Name']}")
return [
StackValue(
name=name,
stack_type=StackType(return_type),
)
for name, return_type in zip(return_argument_names, returns, strict=False)
]
def transform_doc(op: Operation) -> list[str]:
doc = op["Doc"].splitlines()
doc_extra = op.get("DocExtra")
if doc_extra:
doc.extend(doc_extra.splitlines())
return doc
def get_immediate_encoded_size(immediate: Immediate) -> int:
match immediate.immediate_type:
case ImmediateKind.uint8 | ImmediateKind.int8 | ImmediateKind.arg_enum:
return 1
case ImmediateKind.label:
return 2
case ImmediateKind():
return 0
case _:
raise ValueError(f"Cannot determine size of {immediate.immediate_type}")
def transform_cost(op: Operation) -> Cost:
algorand_cost = op["Cost"]
cost = Cost(value=None, doc=algorand_cost)
with contextlib.suppress(ValueError):
cost.value = int(algorand_cost)
return cost
def transform_spec(lang_spec: AlgorandLanguageSpec) -> LanguageSpec:
result = LanguageSpec()
arg_enums = result.arg_enums
algorand_ops = {o["Name"]: o for o in sorted(lang_spec["Ops"], key=lambda x: x["Name"])}
for op_name, algorand_op in algorand_ops.items():
op = Op(
name=op_name,
code=algorand_op["Opcode"],
size=algorand_op["Size"],
doc=transform_doc(algorand_op),
cost=transform_cost(algorand_op),
min_avm_version=algorand_op["IntroducedVersion"],
groups=algorand_op["Groups"],
immediate_args=transform_immediates(arg_enums, algorand_ops, algorand_op),
stack_inputs=transform_stack_args(algorand_op),
stack_outputs=transform_returns(algorand_op),
halts=algorand_op.get("Halts", False),
mode=_map_enum_mode(algorand_op["Modes"]),
)
validate_op(result, op)
result.ops[op.name] = op
return result
def validate_op(lang_spec: LanguageSpec, op: Op) -> None:
# validate op size
instruction_size = 0 if op.name in VARIABLE_SIZE_OPCODES else 1
expected_size = (
sum([get_immediate_encoded_size(a) for a in op.immediate_args]) + instruction_size
)
assert op.size == expected_size, f"Unexpected size for specified immediate args for {op.name}"
# validate immediate modifiers
for immediate in op.immediate_args:
if immediate.immediate_type == ImmediateKind.arg_enum:
assert immediate.arg_enum in lang_spec.arg_enums
if immediate.modifies_stack_input is not None:
assert immediate.modifies_stack_input < len(op.stack_inputs), (
f"Immediate for {op.name} references stack input "
f"that does not exist {immediate.modifies_stack_input}"
)
if immediate.modifies_stack_output is not None:
assert immediate.modifies_stack_output < len(op.stack_outputs), (
f"Immediate for {op.name} references stack output "
f"that does not exist {immediate.modifies_stack_output}"
)
else:
assert not immediate.arg_enum
assert not immediate.modifies_stack_input
assert not immediate.modifies_stack_output
def main() -> None:
vcs_root = Path(__file__).parent.parent
spec_path = vcs_root / "langspec.json"
output_path = vcs_root / "langspec.puya.json"
logger.info(f"Transforming {spec_path} to {output_path}")
lang_spec_json = json.loads(spec_path.read_text(encoding="utf-8"))
_patch_lang_spec(lang_spec_json)
lang_spec = typing.cast(AlgorandLanguageSpec, lang_spec_json)
puya_spec = transform_spec(lang_spec)
puya_json = json.dumps(puya_spec.to_json(), indent=4)
output_path.write_text(puya_json, encoding="utf-8")
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO, format="%(message)s")
main()
|
algorandfoundation/puya
|
scripts/transform_lang_spec.py
|
Python
|
NOASSERTION
| 18,689 |
#!/usr/bin/env python3
import os
import shutil
import subprocess
import sys
import tempfile
from pathlib import Path
MYPY_REPO = "https://github.com/python/mypy.git"
VCS_ROOT = Path(__file__).parent.parent
TYPESHED_README = """
This is PuyaPy's custom typeshed, which is a curated subset of the official MyPy typeshed.
It only includes the required stubs used by PuyaPy as this speeds up MyPy's parsing speed
significantly.
However this means certain python modules such as `enum` or `dataclasses` cannot be used in
PuyaPy stubs unless this typeshed is updated.
The contents of the typeshed are populated by the `scripts/vendor_mypy.py` script, which is used
to vendor new versions of MyPy or to update the stubs included in this typeshed. So to add new
stubs, update that script and rerun.
""".strip()
def clone_branch(version: str) -> str:
git_clone = f"git clone --depth=1 --branch={version} --single-branch {MYPY_REPO} ."
print(f"Executing: {git_clone}")
subprocess.run(git_clone.split(), check=True)
git_hash = subprocess.run("git rev-parse HEAD".split(), capture_output=True, check=True).stdout
assert git_hash is not None
subprocess.run("rm -rf .git".split(), check=True)
return git_hash.decode("utf8").strip()
def vendor_mypy(version: str) -> None:
puya_src_dir = VCS_ROOT / "src" / "puyapy"
vendor_dir = puya_src_dir / "_vendor"
mypy_vendor = vendor_dir / "mypy"
print(f"Vendoring mypy into: {mypy_vendor}")
print("Removing existing mypy files...")
shutil.rmtree(mypy_vendor, ignore_errors=True)
print(f"Cloning mypy {version}...")
with tempfile.TemporaryDirectory() as tmp_dir:
os.chdir(tmp_dir)
git_hash = clone_branch(version)
print(f"Checked out mypy {version} @ {git_hash}")
print(f"Copying mypy into {mypy_vendor}...")
shutil.copytree(Path(tmp_dir) / "mypy", mypy_vendor)
(mypy_vendor / ".version").write_text(f"{version}: {git_hash}")
print("Updating custom typeshed")
update_puya_typeshed(mypy_vendor / "typeshed", puya_src_dir / "_typeshed")
def update_puya_typeshed(mypy_typeshed: Path, puya_typeshed: Path) -> None:
shutil.rmtree(puya_typeshed, ignore_errors=True)
stubs = Path("stubs")
stdlib = Path("stdlib")
relative_to_copy = [
# hard coded in mpyy/modulefinder.py, minimum requirements for mypy
stubs / "mypy-extensions" / "mypy_extensions.pyi",
stdlib / "VERSIONS",
# hard coded in mpyy/build.py, minimum requirements for mypy
stdlib / "builtins.pyi",
stdlib / "typing.pyi",
stdlib / "types.pyi",
stdlib / "typing_extensions.pyi",
stdlib / "_typeshed" / "__init__.pyi",
stdlib / "_collections_abc.pyi",
stdlib / "collections" / "abc.pyi",
stdlib / "sys" / "__init__.pyi",
stdlib / "abc.pyi",
# needed for puyapy
# stdlib / "enum.pyi"
]
(puya_typeshed / stdlib).mkdir(exist_ok=True, parents=True)
(puya_typeshed / stubs).mkdir(exist_ok=True, parents=True)
for relative in relative_to_copy:
copy_src = mypy_typeshed / relative
copy_dst = puya_typeshed / relative
if copy_src.is_dir():
shutil.copytree(copy_src, copy_dst)
else:
copy_dst.parent.mkdir(exist_ok=True, parents=True)
shutil.copy(copy_src, copy_dst)
(puya_typeshed / stdlib / "collections" / "__init__.pyi").touch()
(puya_typeshed / "README.md").write_text(TYPESHED_README)
if __name__ == "__main__":
if len(sys.argv) > 1:
vendor_mypy(version=sys.argv[1])
else:
print("Usage: python vendor_mypy.py <version>")
print("e.g. python vendor_mypy.py v1.5.0")
|
algorandfoundation/puya
|
scripts/vendor_mypy.py
|
Python
|
NOASSERTION
| 3,730 |
algorandfoundation/puya
|
src/_puya_lib/__init__.py
|
Python
|
NOASSERTION
| 0 |
|
from algopy import (
Bytes,
UInt64,
subroutine,
urange,
)
from algopy.op import (
btoi,
bzero,
extract,
extract_uint16,
getbit,
itob,
replace,
select_uint64,
setbit_bytes,
substring,
)
UINT16_SIZE = 2
UINT64_SIZE = 8
UINT16_OFFSET = UINT64_SIZE - UINT16_SIZE
@subroutine
def dynamic_array_pop_bit(array: Bytes) -> tuple[Bytes, Bytes]:
"""
Pop the last item from an arc4 dynamic array of arc4 encoded boolean items
array: The bytes for the source array
returns: tuple of (The popped item, The updated bytes for the source array)
"""
array_length = extract_uint16(array, 0)
length_minus_1 = array_length - 1
result = replace(array, 0, extract(itob(length_minus_1), UINT16_OFFSET, 0))
popped_location = length_minus_1 + UINT16_SIZE * 8
popped = setbit_bytes(b"\x00", 0, getbit(result, popped_location))
result = setbit_bytes(result, popped_location, 0)
result = substring(result, 0, UINT16_SIZE + ((length_minus_1 + 7) // 8))
return popped, result
@subroutine
def dynamic_array_pop_fixed_size(array: Bytes, fixed_byte_size: UInt64) -> tuple[Bytes, Bytes]:
"""
Pop the last item from an arc4 dynamic array of fixed sized items
array: The bytes for the source array
returns: tuple of (The popped item, The updated bytes for the source array)
"""
array_length = extract_uint16(array, 0)
length_minus_1 = array_length - 1
result = replace(array, 0, extract(itob(length_minus_1), UINT16_OFFSET, 0))
item_location = result.length - fixed_byte_size
popped = extract(result, item_location, fixed_byte_size)
result = substring(result, 0, item_location)
return popped, result
@subroutine
def dynamic_array_pop_byte_length_head(array: Bytes) -> tuple[Bytes, Bytes]:
"""
Pop the last item from an arc4 dynamic array of items that are prefixed with their length in
bytes, e.g. arc4.String, arc4.DynamicBytes
source: The bytes for the source array
returns: tuple of (The popped item, The updated bytes for the source array)
"""
array_length = extract_uint16(array, 0)
length_minus_1 = array_length - 1
popped_header_offset = length_minus_1 * UINT16_SIZE
head_and_tail = extract(array, UINT16_SIZE, 0)
popped_offset = extract_uint16(head_and_tail, popped_header_offset)
popped = substring(head_and_tail, popped_offset, head_and_tail.length)
head_and_tail = substring(head_and_tail, 0, popped_header_offset) + substring(
head_and_tail, popped_header_offset + 2, popped_offset
)
updated = extract(
itob(length_minus_1), UINT16_OFFSET, UINT16_SIZE
) + recalculate_head_for_elements_with_byte_length_head(
array_head_and_tail=head_and_tail, length=length_minus_1, start_at_index=UInt64(0)
)
return popped, updated
@subroutine
def dynamic_array_pop_dynamic_element(array: Bytes) -> tuple[Bytes, Bytes]:
"""
Pop the last item from an arc4 dynamic array of dynamically sized items
array: The bytes for the source array
returns: tuple of (The popped item, The updated bytes for the source array)
"""
array_length = extract_uint16(array, 0)
length_minus_1 = array_length - 1
popped_header_offset = length_minus_1 * UINT16_SIZE
head_and_tail = extract(array, UINT16_SIZE, 0)
popped_offset = extract_uint16(head_and_tail, popped_header_offset)
popped = substring(head_and_tail, popped_offset, head_and_tail.length)
new_head = Bytes()
for head_offset in urange(0, length_minus_1 * UINT16_SIZE, UINT16_SIZE):
item_offset = extract_uint16(head_and_tail, head_offset)
item_offset -= UINT16_SIZE
new_head += extract(itob(item_offset), UINT16_OFFSET, UINT16_SIZE)
updated = (
extract(itob(length_minus_1), UINT16_OFFSET, UINT16_SIZE)
+ new_head
+ substring(head_and_tail, popped_header_offset + UINT16_SIZE, popped_offset)
)
return popped, updated
@subroutine
def dynamic_array_concat_bits(
*, array: Bytes, new_items_bytes: Bytes, new_items_count: UInt64, is_packed: bool
) -> Bytes:
"""
Concat data to an arc4 dynamic array of arc4 encoded boolean values
array: The bytes for the source array
new_items_bytes: Either the data portion of an arc4 packed array of booleans
or
a sparse array of concatenated arc4 booleans
new_items_count: The count of new items being added
is_packed: True if new_items_bytes represents a packed array, else False
returns: The updated bytes for the source array
"""
array_length = extract_uint16(array, 0)
new_array_length = array_length + new_items_count
new_array_length_b = extract(itob(new_array_length), UINT16_OFFSET, 0)
result = replace(array, 0, new_array_length_b)
current_bytes = (array_length + 7) // 8
required_bytes = (new_array_length + 7) // 8
if current_bytes < required_bytes:
result += bzero(required_bytes - current_bytes)
write_offset = array_length + 8 * UINT16_SIZE
for i in urange(0, new_items_count, UInt64(1) if is_packed else UInt64(8)):
result = setbit_bytes(result, write_offset, getbit(new_items_bytes, i))
write_offset += 1
return result
@subroutine
def dynamic_array_concat_byte_length_head(
array: Bytes, new_items_bytes: Bytes, new_items_count: UInt64
) -> Bytes:
"""
Replace a single item in an arc4 dynamic array of items that are prefixed with
their byte length
array: The bytes of the source array
new_items_bytes: The bytes for all new items, concatenated
new_items_counts: The count of new items being added
returns: The updated bytes for the source array
"""
array_length = extract_uint16(array, 0)
new_length = array_length + new_items_count
header_end = array_length * UINT16_SIZE + 2
return extract(
itob(new_length), UINT16_OFFSET, UINT16_SIZE
) + recalculate_head_for_elements_with_byte_length_head(
array_head_and_tail=(
substring(array, 2, header_end)
+ bzero(new_items_count * UINT16_SIZE)
+ substring(array, header_end, array.length)
+ new_items_bytes
),
length=new_length,
start_at_index=UInt64(0),
)
@subroutine
def dynamic_array_concat_dynamic_element(
*,
array_items_count: UInt64,
array_head_and_tail: Bytes,
new_items_count: UInt64,
new_head_and_tail: Bytes,
) -> Bytes:
new_head = Bytes()
item_offset_adjustment = new_items_count * UINT16_SIZE
for head_offset in urange(0, array_items_count * UINT16_SIZE, UINT16_SIZE):
item_offset = extract_uint16(array_head_and_tail, head_offset)
new_head += extract(itob(item_offset_adjustment + item_offset), UINT16_OFFSET, UINT16_SIZE)
item_offset_adjustment = array_head_and_tail.length
for head_offset in urange(0, new_items_count * UINT16_SIZE, UINT16_SIZE):
item_offset = extract_uint16(new_head_and_tail, head_offset)
new_head += extract(itob(item_offset_adjustment + item_offset), UINT16_OFFSET, UINT16_SIZE)
return (
extract(itob(array_items_count + new_items_count), UINT16_OFFSET, UINT16_SIZE)
+ new_head
+ substring(
array_head_and_tail, array_items_count * UINT16_SIZE, array_head_and_tail.length
)
+ substring(new_head_and_tail, new_items_count * UINT16_SIZE, new_head_and_tail.length)
)
@subroutine
def dynamic_array_replace_byte_length_head(array: Bytes, new_item: Bytes, index: UInt64) -> Bytes:
"""
Replace a single item in an arc4 dynamic array of items that are prefixed with
their byte length
array: The bytes of the source array
new_item: The bytes of the new item to be inserted
index: The index to insert the new item at
array_length: The length of the array
returns: The updated bytes for the source array
"""
size_b = substring(array, 0, UINT16_SIZE)
array_length = btoi(size_b)
return size_b + static_array_replace_byte_length_head(
array_head_and_tail=extract(array, UINT16_SIZE, 0),
new_item=new_item,
index=index,
array_length=array_length,
)
@subroutine
def dynamic_array_replace_dynamic_element(source: Bytes, new_item: Bytes, index: UInt64) -> Bytes:
size_b = substring(source, 0, UINT16_SIZE)
array_length = btoi(size_b)
return size_b + static_array_replace_dynamic_element(
array_head_and_tail=extract(source, UINT16_SIZE, 0),
new_item=new_item,
index=index,
array_length=array_length,
)
@subroutine
def static_array_replace_dynamic_element(
*, array_head_and_tail: Bytes, new_item: Bytes, index: UInt64, array_length: UInt64
) -> Bytes:
original_offset = extract_uint16(array_head_and_tail, index * 2)
next_item_offset = extract_uint16(array_head_and_tail, (index + 1) * 2)
end_of_tail = array_head_and_tail.length
is_before_end = array_length - index - 1
end_offset = select_uint64(end_of_tail, next_item_offset, is_before_end)
original_item_length = end_offset - original_offset
new_item_length = new_item.length
new_head_and_tail = (
substring(array_head_and_tail, 0, original_offset)
+ new_item
+ substring(array_head_and_tail, end_offset, end_of_tail)
)
for head_offset in urange((index + 1) * 2, array_length * 2, 2):
tail_offset = extract_uint16(new_head_and_tail, head_offset)
tail_offset += new_item_length
tail_offset -= original_item_length
tail_offset_bytes = extract(itob(tail_offset), UINT16_OFFSET, UINT16_SIZE)
new_head_and_tail = replace(new_head_and_tail, head_offset, tail_offset_bytes)
return new_head_and_tail
@subroutine
def static_array_replace_byte_length_head(
array_head_and_tail: Bytes, new_item: Bytes, index: UInt64, array_length: UInt64
) -> Bytes:
"""
Replace a single item in an arc4 dynamic array of items that are prefixed with
their byte length
array_head_and_tail: The head and tail bytes of the source array
new_item: The bytes of the new item to be inserted
index: The index to insert the new item at
array_length: The length of the array
returns: The updated bytes for the source array
"""
assert index < array_length, "Index out of bounds"
offset_for_index = extract_uint16(array_head_and_tail, index * UINT16_SIZE)
old_item_length = extract_uint16(array_head_and_tail, offset_for_index)
old_item_end = offset_for_index + old_item_length + UINT16_SIZE
return recalculate_head_for_elements_with_byte_length_head(
array_head_and_tail=substring(array_head_and_tail, 0, offset_for_index)
+ new_item
+ substring(array_head_and_tail, old_item_end, array_head_and_tail.length),
length=array_length,
start_at_index=index,
)
@subroutine
def recalculate_head_for_elements_with_byte_length_head(
array_head_and_tail: Bytes, length: UInt64, start_at_index: UInt64
) -> Bytes:
"""
Recalculates the offset values of an arc4 static array, where each item's head consists of
its length in bytes as uint16
array_data: The static array data
length: The length of the static array
start_at_index: Optionally start at a non-zero index for performance optimisation. The offset
at this index is assumed to be correct if start_at_index is not 0
returns: The updated bytes for the source array
"""
tail_offset = select_uint64(
length * UINT16_SIZE,
extract_uint16(array_head_and_tail, start_at_index * UINT16_SIZE),
start_at_index, # use length * UINT16_SIZE if 0 otherwise inspect head
)
for head_offset in urange(start_at_index * UINT16_SIZE, length * UINT16_SIZE, UINT16_SIZE):
tail_offset_bytes = extract(itob(tail_offset), UINT16_OFFSET, UINT16_SIZE)
array_head_and_tail = replace(array_head_and_tail, head_offset, tail_offset_bytes)
tail_offset += extract_uint16(array_head_and_tail, tail_offset) + UINT16_SIZE
head_offset += UINT16_SIZE
return array_head_and_tail
|
algorandfoundation/puya
|
src/_puya_lib/arc4.py
|
Python
|
NOASSERTION
| 12,234 |
from algopy import Bytes, UInt64, op, subroutine
@subroutine
def is_substring(item: Bytes, sequence: Bytes) -> bool:
"""
Search for a shorter string in a larger one.
"""
start = UInt64(0)
while start + item.length <= sequence.length:
if item == op.substring(sequence, start, start + item.length):
return True
start += 1
return False
|
algorandfoundation/puya
|
src/_puya_lib/bytes_.py
|
Python
|
NOASSERTION
| 388 |
from algopy import (
Bytes,
OnCompleteAction,
OpUpFeeSource,
TransactionType,
UInt64,
op,
subroutine,
)
@subroutine
def ensure_budget(required_budget: UInt64, fee_source: OpUpFeeSource) -> None:
# A budget buffer is necessary to deal with an edge case of ensure_budget():
# if the current budget is equal to or only slightly higher than the
# required budget then it's possible for ensure_budget() to return with a
# current budget less than the required budget. The buffer prevents this
# from being the case.
required_budget_with_buffer = required_budget + 10
while required_budget_with_buffer > op.Global.opcode_budget():
op.ITxnCreate.begin()
op.ITxnCreate.set_type_enum(TransactionType.ApplicationCall)
op.ITxnCreate.set_on_completion(OnCompleteAction.DeleteApplication)
op.ITxnCreate.set_approval_program(Bytes.from_hex("068101"))
op.ITxnCreate.set_clear_state_program(Bytes.from_hex("068101"))
match fee_source:
case OpUpFeeSource.GroupCredit:
op.ITxnCreate.set_fee(0)
case OpUpFeeSource.AppAccount:
op.ITxnCreate.set_fee(op.Global.min_txn_fee)
# case OpUpFeeSource.Any:
# any means no fee set
op.ITxnCreate.submit()
|
algorandfoundation/puya
|
src/_puya_lib/util.py
|
Python
|
NOASSERTION
| 1,326 |
algorandfoundation/puya
|
src/puya/__init__.py
|
Python
|
NOASSERTION
| 0 |
|
import argparse
from importlib.metadata import version
from pathlib import Path
import attrs
from puya.log import LogFormat, LogLevel, configure_logging
from puya.main import main
@attrs.define(kw_only=True)
class _PuyaCLIArgs:
options: Path | None = None
awst: Path | None = None
source_annotations: Path | None = None
log_level: LogLevel = LogLevel.info
log_format: LogFormat = LogFormat.default
def cli() -> None:
parser = argparse.ArgumentParser(
prog="puya", formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
# TODO: use version of puya instead once package is split
parser.add_argument("--version", action="version", version=f"%(prog)s {version('puyapy')}")
parser.add_argument(
"--log-level", type=LogLevel.from_string, choices=list(LogLevel), default=LogLevel.info
)
parser.add_argument(
"--log-format",
type=LogFormat.from_string,
choices=list(LogFormat),
default=LogFormat.default,
)
parser.add_argument("--options", type=Path, required=True)
parser.add_argument("--awst", type=Path, required=True)
parser.add_argument("--source-annotations", type=Path)
parsed_args = _PuyaCLIArgs()
parser.parse_args(namespace=parsed_args)
configure_logging(min_log_level=parsed_args.log_level, log_format=parsed_args.log_format)
assert parsed_args.options
options_json = parsed_args.options.read_text("utf8")
assert parsed_args.awst
awst_json = parsed_args.awst.read_text("utf8")
source_annotations_json = None
if parsed_args.source_annotations:
source_annotations_json = parsed_args.source_annotations.read_text("utf8")
main(
options_json=options_json,
awst_json=awst_json,
source_annotations_json=source_annotations_json,
)
if __name__ == "__main__":
cli()
|
algorandfoundation/puya
|
src/puya/__main__.py
|
Python
|
NOASSERTION
| 1,862 |
ZERO_ADDRESS = "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAY5HFKQ"
ENCODED_ADDRESS_LENGTH = 58
PUBLIC_KEY_HASH_LENGTH = 32
ADDRESS_CHECKSUM_LENGTH = 4
MAX_BIGUINT_BITS = 512
MAX_UINT64 = 2**64 - 1
MAX_BIGUINT_BYTES = MAX_BIGUINT_BITS // 8
MAX_BYTES_LENGTH = 4096
MAX_SCRATCH_SLOT_NUMBER = 255
MAX_GLOBAL_STATE_KEYS = 64
MAX_LOCAL_STATE_KEYS = 16
MAX_STATE_KEY_LENGTH = 64
MIN_BOX_KEY_LENGTH = 1
MAX_BOX_KEY_LENGTH = 64
MAX_TRANSACTION_GROUP_SIZE = 16
MAX_APP_PAGE_SIZE = 2048
HASH_PREFIX_PROGRAM = b"Program"
"""Represents the prefix added to a program before hashing e.g. for a LogicSigs address"""
# Which language versions does this version of puya support targeting
# This will typically just be the current mainnet version and potentially the vNext if it doesn't
# contain breaking changes
SUPPORTED_AVM_VERSIONS = [10, 11, 12]
# Which language version is currently deployed to mainnet
MAINNET_AVM_VERSION = 10
|
algorandfoundation/puya
|
src/puya/algo_constants.py
|
Python
|
NOASSERTION
| 925 |
import base64
import json
import typing
from collections.abc import Collection, Mapping, Sequence
from puya import (
artifact_metadata as md,
log,
)
from puya.avm import OnCompletionAction
from puya.awst.nodes import (
AppStorageKind,
ARC4CreateOption,
)
from puya.parse import SourceLocation
OCA_ARC32_MAPPING = {
OnCompletionAction.NoOp: "no_op",
OnCompletionAction.OptIn: "opt_in",
OnCompletionAction.CloseOut: "close_out",
OnCompletionAction.ClearState: "clear_state",
OnCompletionAction.UpdateApplication: "update_application",
OnCompletionAction.DeleteApplication: "delete_application",
}
JSONValue: typing.TypeAlias = "str | int | float | bool | None | Sequence[JSONValue] | JSONDict"
JSONDict: typing.TypeAlias = Mapping[str, "JSONValue"]
logger = log.get_logger(__name__)
def _encode_source(teal_text: str) -> str:
return base64.b64encode(teal_text.encode()).decode("utf-8")
def _encode_schema_declaration(state: md.ContractState) -> JSONDict:
return {
"type": state.storage_type.name,
"key": state.key_or_prefix.decode("utf-8"), # TODO: support not utf8 keys?
"descr": state.description,
}
def _encode_state_declaration(state: md.StateTotals) -> JSONDict:
return {
"global": {
"num_byte_slices": state.global_bytes,
"num_uints": state.global_uints,
},
"local": {
"num_byte_slices": state.local_bytes,
"num_uints": state.local_uints,
},
}
def _encode_schema(state: Collection[md.ContractState]) -> JSONDict:
return {
"declared": {
s.name: _encode_schema_declaration(s) for s in sorted(state, key=lambda s: s.name)
},
"reserved": {}, # TODO?
}
def _encode_call_config(method: md.ARC4Method) -> JSONDict:
match method.create:
case ARC4CreateOption.require:
call_config = "CREATE"
case ARC4CreateOption.allow:
call_config = "ALL"
case ARC4CreateOption.disallow:
call_config = "CALL"
case never:
typing.assert_never(never)
return {OCA_ARC32_MAPPING[oca]: call_config for oca in method.allowed_completion_types}
def _encode_bare_method_configs(methods: Sequence[md.ARC4BareMethod]) -> JSONDict:
result: dict[str, JSONValue] = {}
for method in methods:
result.update(**_encode_call_config(method))
return result
def _get_signature(method: md.ARC4ABIMethod) -> str:
return f"{method.name}({','.join(m.type_ for m in method.args)}){method.returns.type_}"
def _encode_default_arg(arg: md.ARC4MethodArg, loc: SourceLocation | None) -> JSONDict | None:
match arg.client_default:
case None:
return None
case md.MethodArgDefaultConstant(data=constant_data, type_=constant_arc4_type):
if constant_arc4_type == "string":
string = constant_data[2:].decode("utf8")
return {"source": "constant", "data": string}
elif constant_arc4_type.startswith("uint"):
number = int.from_bytes(constant_data, signed=False)
return {"source": "constant", "data": number}
else:
logger.warning(
f"parameter {arg.name!r} has unsupported default constant type for ARC-32",
location=loc,
)
return None
case md.MethodArgDefaultFromMethod(
name=method_name, return_type=return_type, readonly=readonly
):
return {
"source": "abi-method",
"data": {
"name": method_name,
"args": [],
"readonly": readonly, # ARC-22
"returns": {"type": return_type},
},
}
case md.MethodArgDefaultFromState(kind=kind, key=key):
match kind:
case AppStorageKind.app_global:
source_name = "global-state"
case AppStorageKind.account_local:
source_name = "local-state"
case AppStorageKind.box:
logger.error(
"default argument from box storage are not supported by ARC-32",
location=loc,
)
case unexpected:
typing.assert_never(unexpected)
return {
"source": source_name,
# TODO: handle non utf-8 bytes
"data": key.decode("utf-8"),
}
def _encode_arc32_method_hint(metadata: md.ContractMetaData, method: md.ARC4ABIMethod) -> JSONDict:
structs = {a.name: metadata.structs[a.struct] for a in method.args if a.struct}
if method.returns.struct:
structs["output"] = metadata.structs[method.returns.struct]
default_arguments = {
arg.name: default
for arg in method.args
if (default := _encode_default_arg(arg, method.config_location)) is not None
}
return {
# deprecated by ARC-22
"read_only": True if method.readonly else None,
"default_arguments": default_arguments or None,
"call_config": _encode_call_config(method),
"structs": _encode_arc32_method_structs(structs),
}
def _encode_arc32_method_structs(structs: Mapping[str, md.ARC4Struct]) -> JSONDict | None:
if len(structs):
return {
struct_purpose: {
"name": struct_def.name,
"elements": [[f.name, f.type] for f in struct_def.fields],
}
for struct_purpose, struct_def in structs.items()
}
return None
def _encode_arc32_hints(
metadata: md.ContractMetaData, methods: list[md.ARC4ABIMethod]
) -> JSONDict:
return {
_get_signature(method): _encode_arc32_method_hint(metadata, method) for method in methods
}
def _encode_abi_method(method: md.ARC4ABIMethod) -> JSONDict:
return {
"name": method.name,
"args": [
{
"type": arg.type_,
"name": arg.name,
"desc": arg.desc,
}
for arg in method.args
],
"readonly": method.readonly, # ARC-22
"returns": {
"type": method.returns.type_,
"desc": method.returns.desc,
},
"desc": method.desc,
}
def _encode_arc4_contract(
name: str, desc: str | None, methods: Sequence[md.ARC4ABIMethod]
) -> JSONDict:
return {
"name": name,
"desc": desc,
"methods": [_encode_abi_method(m) for m in methods],
"networks": {},
}
def _filter_none(value: JSONDict) -> JSONValue:
if isinstance(value, dict):
return {k: _filter_none(v) for k, v in value.items() if v is not None}
if isinstance(value, list):
return list(map(_filter_none, value))
return value
def create_arc32_json(
approval_program: str, clear_program: str, metadata: md.ContractMetaData
) -> str:
bare_methods = [m for m in metadata.arc4_methods if isinstance(m, md.ARC4BareMethod)]
abi_methods = [m for m in metadata.arc4_methods if isinstance(m, md.ARC4ABIMethod)]
app_spec = {
"hints": _encode_arc32_hints(metadata, abi_methods),
"source": {
"approval": _encode_source(approval_program),
"clear": _encode_source(clear_program),
},
"state": _encode_state_declaration(metadata.state_totals),
"schema": {
"global": _encode_schema(metadata.global_state.values()),
"local": _encode_schema(metadata.local_state.values()),
},
"contract": _encode_arc4_contract(metadata.name, metadata.description, abi_methods),
"bare_call_config": _encode_bare_method_configs(bare_methods),
}
return json.dumps(_filter_none(app_spec), indent=4)
|
algorandfoundation/puya
|
src/puya/arc32.py
|
Python
|
NOASSERTION
| 7,940 |
import base64
import itertools
import typing
from collections import defaultdict
from collections.abc import Iterable, Mapping, Sequence
from importlib.metadata import version as metadata_version
from cattrs.preconf.json import make_converter
from packaging import version
from puya import (
arc56_models as models,
log,
)
from puya.artifact_metadata import (
ARC4ABIMethod,
ARC4BareMethod,
ARC4Struct,
ContractMetaData,
ContractState,
MethodArgDefault,
MethodArgDefaultConstant,
MethodArgDefaultFromMethod,
MethodArgDefaultFromState,
)
from puya.awst.nodes import (
AppStorageKind,
ARC4CreateOption,
)
from puya.compilation_artifacts import CompiledProgram, DebugInfo
from puya.utils import unique
# TODO: use puya once the backend is shipped as separate package
_ALGOPY_VERSION = version.parse(metadata_version("puyapy"))
logger = log.get_logger(__name__)
def create_arc56_json(
*,
metadata: ContractMetaData,
approval_program: CompiledProgram,
clear_program: CompiledProgram,
template_prefix: str,
) -> str:
assert approval_program.debug_info is not None
assert clear_program.debug_info is not None
converter = make_converter(omit_if_default=True)
bare_methods = [m for m in metadata.arc4_methods if isinstance(m, ARC4BareMethod)]
abi_methods = [m for m in metadata.arc4_methods if isinstance(m, ARC4ABIMethod)]
# use shorter name for structs unless there is a collision
aliases = _StructAliases(metadata.structs.values())
schema = metadata.state_totals
app_spec = models.Contract(
arcs=(22, 28),
name=metadata.name,
desc=metadata.description,
networks={},
structs={
aliases.resolve(n): [
models.StructField(
name=e.name,
type=aliases.resolve(e.struct) or e.type,
)
for e in s.fields
]
for n, s in metadata.structs.items()
},
methods=[
models.Method(
name=m.name,
desc=m.desc,
args=[
models.MethodArg(
type=a.type_,
name=a.name,
desc=a.desc,
struct=aliases.resolve(a.struct),
defaultValue=_encode_default_arg(a.client_default),
)
for a in m.args
],
returns=models.MethodReturns(
type=m.returns.type_,
desc=m.returns.desc,
struct=aliases.resolve(m.returns.struct),
),
actions=_method_actions(m),
readonly=m.readonly,
events=[_struct_to_event(aliases, struct) for struct in m.events],
# left for users to fill in for now
recommendations=models.MethodRecommendations(
innerTransactionCount=None,
boxes=None,
accounts=None,
apps=None,
assets=None,
),
)
for m in abi_methods
],
state=models.ContractState(
schema={
"global": {"ints": schema.global_uints, "bytes": schema.global_bytes},
"local": {"ints": schema.local_uints, "bytes": schema.local_bytes},
},
keys={
"global": _storage_keys(aliases, metadata.global_state),
"local": _storage_keys(aliases, metadata.local_state),
"box": _storage_keys(aliases, metadata.boxes),
},
maps={
# note: at present there is no way of defining global/local maps
"global": _storage_maps(aliases, metadata.global_state),
"local": _storage_maps(aliases, metadata.local_state),
"box": _storage_maps(aliases, metadata.boxes),
},
),
bareActions=_combine_actions(list(map(_method_actions, bare_methods))),
sourceInfo={
"approval": models.ProgramSourceInfo(
sourceInfo=_get_source_info(approval_program.debug_info),
pcOffsetMethod="cblocks" if approval_program.debug_info.op_pc_offset else "none",
),
"clear": models.ProgramSourceInfo(
sourceInfo=_get_source_info(clear_program.debug_info),
pcOffsetMethod="cblocks" if clear_program.debug_info.op_pc_offset else "none",
),
},
source={
"approval": _encode_str(approval_program.teal_src),
"clear": _encode_str(clear_program.teal_src),
},
byteCode=(
{
"approval": _encode_bytes(approval_program.bytecode),
"clear": _encode_bytes(clear_program.bytecode),
}
if approval_program.bytecode and clear_program.bytecode
else None
),
compilerInfo=(
_compiler_info() if approval_program.bytecode and clear_program.bytecode else None
),
events=[
_struct_to_event(aliases, struct)
for struct in unique(
e for m in metadata.arc4_methods if isinstance(m, ARC4ABIMethod) for e in m.events
)
],
templateVariables={
name.removeprefix(template_prefix): models.TemplateVariable(
type=aliases.resolve(metadata.template_variable_types[name]),
value=(
_encode_bytes(value.to_bytes(length=8) if isinstance(value, int) else value)
if value is not None
else None
),
)
for program in (approval_program, clear_program)
for name, value in program.template_variables.items()
},
# TODO: provide a way for contracts to declare "public" scratch vars
scratchVariables=None,
)
return converter.dumps(app_spec, indent=4)
def _get_source_info(debug_info: DebugInfo) -> Sequence[models.SourceInfo]:
errors = defaultdict[str, list[int]](list)
for pc, event in debug_info.pc_events.items():
if error := event.get("error"):
errors[error].append(pc)
return [
models.SourceInfo(
pc=errors[error],
errorMessage=error,
)
for error in sorted(errors)
]
class _StructAliases:
def __init__(self, structs: Iterable[ARC4Struct]) -> None:
alias_to_fullname = dict[str, str]()
for struct in structs:
alias = (
struct.fullname
if struct.name in alias_to_fullname or struct.name in models.AVMType
else struct.name
)
alias_to_fullname[alias] = struct.fullname
self.aliases = {v: k for k, v in alias_to_fullname.items()}
@typing.overload
def resolve(self, struct: str) -> str: ...
@typing.overload
def resolve(self, struct: None) -> None: ...
def resolve(self, struct: str | None) -> str | None:
if struct is None:
return None
return self.aliases.get(struct, struct)
def _struct_to_event(structs: _StructAliases, struct: ARC4Struct) -> models.Event:
return models.Event(
name=structs.resolve(struct.name),
desc=struct.desc,
args=[
models.EventArg(
name=f.name,
type=f.type,
struct=structs.resolve(f.struct),
)
for f in struct.fields
],
)
def _storage_keys(
structs: _StructAliases, state: Mapping[str, ContractState]
) -> models.StorageKeys:
return {
n: models.StorageKey(
desc=m.description,
keyType=structs.resolve(m.arc56_key_type),
valueType=structs.resolve(m.arc56_value_type),
key=_encode_bytes(m.key_or_prefix),
)
for n, m in state.items()
if not m.is_map
}
def _storage_maps(
structs: _StructAliases, state: Mapping[str, ContractState]
) -> models.StorageMaps:
return {
n: models.StorageMap(
desc=m.description,
keyType=structs.resolve(m.arc56_key_type),
valueType=structs.resolve(m.arc56_value_type),
prefix=_encode_bytes(m.key_or_prefix),
)
for n, m in state.items()
if m.is_map
}
def _method_actions(method: ARC4BareMethod | ARC4ABIMethod) -> models.MethodActions:
return models.MethodActions(
create=[
oca.name
for oca in method.allowed_completion_types
if method.create != ARC4CreateOption.disallow and allowed_create_oca(oca.name)
],
call=[
oca.name
for oca in method.allowed_completion_types
if method.create != ARC4CreateOption.require and allowed_call_oca(oca.name)
],
)
def _encode_default_arg(default: MethodArgDefault | None) -> models.MethodArgDefaultValue | None:
match default:
case None:
return None
case MethodArgDefaultConstant(data=data, type_=type_string):
return models.MethodArgDefaultValue(
data=_encode_bytes(data),
type=type_string,
source=models.DefaultValueSource.literal,
)
case MethodArgDefaultFromState(key=key, kind=kind, key_type=key_type):
match kind:
case AppStorageKind.account_local:
source = models.DefaultValueSource.local_
case AppStorageKind.app_global:
source = models.DefaultValueSource.global_
case AppStorageKind.box:
source = models.DefaultValueSource.box
case unexpected:
typing.assert_never(unexpected)
return models.MethodArgDefaultValue(
data=_encode_bytes(key), type=key_type, source=source
)
case MethodArgDefaultFromMethod(signature=signature):
return models.MethodArgDefaultValue(
data=signature,
source=models.DefaultValueSource.method,
)
case unexpected:
typing.assert_never(unexpected)
def _combine_actions(actions: Sequence[models.MethodActions]) -> models.MethodActions:
return models.MethodActions(
create=sorted(set(itertools.chain.from_iterable(a.create for a in actions))),
call=sorted(set(itertools.chain.from_iterable(a.call for a in actions))),
)
def allowed_create_oca(
oca: str,
) -> typing.TypeGuard[typing.Literal["NoOp", "OptIn", "DeleteApplication"]]:
return oca in ("NoOp", "OptIn", "DeleteApplication")
def allowed_call_oca(
oca: str,
) -> typing.TypeGuard[
typing.Literal["NoOp", "OptIn", "CloseOut", "UpdateApplication", "DeleteApplication"]
]:
return oca in ("NoOp", "OptIn", "CloseOut", "UpdateApplication", "DeleteApplication")
def _encode_str(value: str) -> str:
return _encode_bytes(value.encode("utf8"))
def _encode_bytes(value: bytes) -> str:
return base64.b64encode(value).decode("utf-8")
def _compiler_info() -> models.CompilerInfo:
return models.CompilerInfo(
compiler="puya",
compilerVersion=models.CompilerVersion(
major=_ALGOPY_VERSION.major,
minor=_ALGOPY_VERSION.minor,
patch=_ALGOPY_VERSION.micro,
commitHash=None,
),
)
|
algorandfoundation/puya
|
src/puya/arc56.py
|
Python
|
NOASSERTION
| 11,621 |
# ruff: noqa: N815
import enum
import typing
from collections.abc import Mapping, Sequence
import attrs
ABIType = str
"""An ABI-encoded type"""
StructName = str
"""The name of a defined struct"""
ProgramType = typing.Literal["approval", "clear"]
class AVMType(enum.StrEnum):
"""A native AVM type"""
bytes = "AVMBytes"
"""Raw byteslice without the length prefixed that is specified in ARC-4"""
string = "AVMString"
"""A utf-8 string without the length prefix that is specified in ARC-4"""
uint64 = "AVMUint64"
"""A 64-bit unsigned integer"""
@attrs.frozen
class SourceInfo:
pc: Sequence[int]
"""The program counter value(s). Could be offset if pcOffsetMethod is not 'none'"""
errorMessage: str
"""A human-readable string that describes the error when the program fails at the given PC"""
@attrs.frozen
class ProgramSourceInfo:
sourceInfo: Sequence[SourceInfo]
"""The source information for the program"""
pcOffsetMethod: typing.Literal["none", "cblocks"]
"""
How the program counter offset is calculated
none: The pc values in sourceInfo are not offset
cblocks: The pc values in sourceInfo are offset by the PC of the first op after the
last cblock at the top of the program
"""
@attrs.frozen(kw_only=True)
class StorageKey:
"""Describes a single key in app storage"""
desc: str | None = None
"""Description of what this storage key holds"""
keyType: ABIType | AVMType | StructName
"""The type of the key"""
valueType: ABIType | AVMType | StructName
"""The type of the value"""
key: str
"""The base64-encoded key"""
@attrs.frozen(kw_only=True)
class StorageMap:
"""Describes a mapping of key-value pairs in storage"""
desc: str | None = None
"""Description of what the key-value pairs in this mapping hold"""
keyType: ABIType | AVMType | StructName
"""The type of the keys in the map"""
valueType: ABIType | AVMType | StructName
"""The type of the values in the map"""
prefix: str | None = None
"""The base64-encoded prefix of the map keys"""
@attrs.frozen
class StructField:
"""Information about a single field in a struct"""
name: str
"""The name of the struct field"""
type: ABIType | StructName | Sequence["StructField"]
"""The type of the struct field's value"""
@attrs.frozen
class EventArg:
type: ABIType
"""
The type of the argument.
The `struct` field should also be checked to determine if this arg is a struct.
"""
name: str | None = None
"""Optional, user-friendly name for the argument"""
desc: str | None = None
"""Optional, user-friendly description for the argument"""
struct: StructName | None = None
"""
If the type is a struct, the name of the struct
Note: this is a separate field to maintain backwards compatability with ARC-23
"""
@attrs.frozen(kw_only=True)
class Event:
name: str
"""The name of the event"""
desc: str | None = None
"""Optional, user-friendly description for the event"""
args: Sequence[EventArg]
"""The arguments of the event, in order"""
class DefaultValueSource(enum.Enum):
box = "box"
"""The data key signifies the box key to read the value from"""
global_ = "global"
"""The data key signifies the global state key to read the value from"""
local_ = "local"
"""The data key signifies the local state key to read the value from (for the sender)"""
literal = "literal"
"""the value is a literal and should be passed directly as the argument"""
method = "method"
"""
The utf8 signature of the method in this contract to call to get the default value.
If the method has arguments, they all must have default values.
The method **MUST** be readonly so simulate can be used to get the default value.
"""
@attrs.frozen(kw_only=True)
class MethodArgDefaultValue:
source: DefaultValueSource
"""Where the default value is coming from"""
type: ABIType | AVMType | None = None
"""
How the data is encoded.
This is the encoding for the data provided here, not the arg type.
Not relevant if source is method
"""
data: str
"""Base64 encoded bytes, base64 ARC4 encoded uint64, or UTF-8 method selector"""
@attrs.frozen(kw_only=True)
class MethodArg:
type: ABIType
"""
The type of the argument.
The `struct` field should also be checked to determine if this arg is a struct.
"""
struct: StructName | None = None
"""
If the type is a struct, the name of the struct.
Note: this is a separate field to maintain backwards compatability with ARC-4
"""
name: str | None = None
"""Optional, user-friendly name for the argument"""
desc: str | None = None
"""Optional, user-friendly description for the argument"""
defaultValue: MethodArgDefaultValue | None = None
@attrs.frozen(kw_only=True)
class MethodReturns:
type: ABIType
"""
The type of the return value, or "void" to indicate no return value.
The `struct` field should also be checked to determine if this return value is a struct.
"""
struct: StructName | None = None
"""
If the type is a struct, the name of the struct
"""
desc: str | None = None
"""Optional, user-friendly description for the return value"""
@attrs.frozen
class MethodActions:
"""An action is a combination of call/create and an OnComplete"""
create: Sequence[typing.Literal["NoOp", "OptIn", "DeleteApplication"]]
"""OnCompletes this method allows when appID === 0"""
call: Sequence[
typing.Literal["NoOp", "OptIn", "CloseOut", "UpdateApplication", "DeleteApplication"]
]
"""OnCompletes this method allows when appID !== 0"""
@attrs.frozen(kw_only=True)
class MethodBoxRecommendation:
app: int | None = None
"""The app ID for the box"""
key: str
"""The base64 encoded box key"""
readBytes: int
"""The number of bytes being read from the box"""
writeBytes: int
"""The number of bytes being written to the box"""
@attrs.frozen(kw_only=True)
class MethodRecommendations:
innerTransactionCount: int | None = None
"""The number of inner transactions the caller should cover the fees for"""
boxes: MethodBoxRecommendation | None = None
"""Recommended box references to include"""
accounts: Sequence[str] | None = None
"""Recommended foreign accounts"""
apps: Sequence[int] | None = None
"""Recommended foreign apps"""
assets: Sequence[int] | None = None
"""Recommended foreign assets"""
@attrs.frozen(kw_only=True)
class Method:
"""
Describes a method in the contract.
This interface is an extension of the interface described in ARC-4
"""
name: str
"""The name of the method"""
desc: str | None = None
"""Optional, user-friendly description for the method"""
args: Sequence[MethodArg]
"""The arguments of the method, in order"""
returns: MethodReturns
"""Information about the method's return value"""
actions: MethodActions
"""Allowed actions for this method"""
readonly: bool
"""If this method does not write anything to the ledger (ARC-22)"""
events: Sequence[Event] = ()
"""ARC-28 events that MAY be emitted by this method"""
recommendations: MethodRecommendations | None = None
"""Information that clients can use when calling the method"""
@attrs.frozen
class Network:
appID: int
"""The app ID of the deployed contract in this network"""
class SchemaSizes(typing.TypedDict):
ints: int
bytes: int
ContractSchema = typing.TypedDict("ContractSchema", {"global": SchemaSizes, "local": SchemaSizes})
StorageMaps = Mapping[str, StorageMap]
StorageKeys = Mapping[str, StorageKey]
ContractStorage = typing.TypedDict(
"ContractStorage", {"global": StorageMaps, "local": StorageMaps, "box": StorageMaps}
)
ContractKeys = typing.TypedDict(
"ContractKeys", {"global": StorageKeys, "local": StorageKeys, "box": StorageKeys}
)
@attrs.frozen
class ContractState:
schema: ContractSchema
"""
Defines the values that should be used for GlobalNumUint, GlobalNumByteSlice, LocalNumUint,
and LocalNumByteSlice when creating the application
"""
keys: ContractKeys
"""Mapping of human-readable names to StorageKey objects"""
maps: ContractStorage
"""Mapping of human-readable names to StorageMap objects"""
@attrs.frozen(kw_only=True)
class CompilerVersion:
major: int
minor: int
patch: int
commitHash: str | None = None
@attrs.frozen
class CompilerInfo:
compiler: str
"""The name of the compiler"""
compilerVersion: CompilerVersion
@attrs.frozen(kw_only=True)
class TemplateVariable:
type: ABIType | AVMType | StructName
"""The type of the template variable"""
value: str | None = None
"""If given, the the base64 encoded value used for the given app/program"""
@attrs.frozen
class ScratchVariable:
slot: int
type: ABIType | AVMType | StructName
@attrs.frozen(kw_only=True)
class Contract:
"""
Describes the entire contract.
This interface is an extension of the interface described in ARC-4
"""
arcs: Sequence[int] = ()
"""
The ARCs used and/or supported by this contract.
All contracts implicitly support ARC-4 and ARC-56
"""
name: str
"""A user-friendly name for the contract"""
desc: str | None = None
"""Optional, user-friendly description for the interface"""
networks: Mapping[str, Network] | None = None
"""
Optional object listing the contract instances across different networks.
The key is the base64 genesis hash of the network, and the value contains
information about the deployed contract in the network indicated by the
key. A key containing the human-readable name of the network MAY be
included, but the corresponding genesis hash key MUST also be define
"""
structs: Mapping[str, Sequence[StructField]]
"""
Named structs use by the application.
Each struct field appears in the same order as ABI encoding
"""
methods: Sequence[Method]
"""All of the methods that the contract implements"""
state: ContractState | None = None
bareActions: MethodActions | None = None
"""Supported bare actions for the contract"""
sourceInfo: Mapping[ProgramType, ProgramSourceInfo] | None = None
"""Information about the TEAL programs"""
source: Mapping[ProgramType, str] | None = None
"""
The pre-compiled TEAL that may contain template variables.
MUST be omitted if included as part of ARC23
"""
byteCode: Mapping[ProgramType, str] | None = None
"""
The compiled bytecode for the application.
MUST be omitted if included as part of ARC23
"""
compilerInfo: CompilerInfo | None = None
"""
Information used to get the given byteCode and/or PC values in sourceInfo.
MUST be given if byteCode or PC values are present
"""
events: Sequence[Event] | None = None
"""ARC-28 events that MAY be emitted by this contract"""
templateVariables: Mapping[str, TemplateVariable] | None = None
"""
A mapping of template variable names as they appear in the teal (not including TMPL_ prefix)
to their respective types and values (if applicable)
"""
scratchVariables: Mapping[str, ScratchVariable] | None = None
"""The scratch variables used during runtime"""
|
algorandfoundation/puya
|
src/puya/arc56_models.py
|
Python
|
NOASSERTION
| 11,510 |
import re
import typing
from collections.abc import Sequence
import attrs
from immutabledict import immutabledict
from puya import avm
from puya.avm import AVMType
from puya.awst import nodes as awst_nodes
from puya.parse import SourceLocation
from puya.program_refs import ContractReference, LogicSigReference
@attrs.frozen
class ARC4StructField:
name: str
type: str
struct: str | None
@attrs.frozen(kw_only=True)
class ARC4Struct:
fullname: str
desc: str | None = None
fields: Sequence[ARC4StructField] = attrs.field(
default=(), converter=tuple[ARC4StructField, ...]
)
@property
def name(self) -> str:
return re.split(r"\W", self.fullname)[-1]
@attrs.frozen(kw_only=True)
class MethodArgDefaultConstant:
data: bytes
type_: str
@attrs.frozen(kw_only=True)
class MethodArgDefaultFromState:
kind: awst_nodes.AppStorageKind
key: bytes
key_type: str
@attrs.frozen(kw_only=True)
class MethodArgDefaultFromMethod:
name: str
return_type: str
readonly: bool
@property
def signature(self) -> str:
return f"{self.name}(){self.return_type}"
MethodArgDefault = (
MethodArgDefaultConstant | MethodArgDefaultFromState | MethodArgDefaultFromMethod
)
@attrs.frozen
class ARC4MethodArg:
name: str
type_: str
struct: str | None
desc: str | None = attrs.field(hash=False)
client_default: MethodArgDefault | None
@attrs.frozen
class ARC4Returns:
type_: str
struct: str | None
desc: str | None = attrs.field(hash=False)
@attrs.frozen(kw_only=True)
class ARC4ABIMethod:
id: str
desc: str | None = attrs.field(hash=False)
args: Sequence[ARC4MethodArg] = attrs.field(converter=tuple[ARC4MethodArg, ...])
returns: ARC4Returns
events: Sequence[ARC4Struct] = attrs.field(converter=tuple[ARC4Struct, ...])
_config: awst_nodes.ARC4ABIMethodConfig
@property
def name(self) -> str:
return self._config.name
@property
def allowed_completion_types(self) -> Sequence[avm.OnCompletionAction]:
return self._config.allowed_completion_types
@property
def create(self) -> awst_nodes.ARC4CreateOption:
return self._config.create
@property
def readonly(self) -> bool:
return self._config.readonly
@property
def config_location(self) -> SourceLocation:
return self._config.source_location
@property
def signature(self) -> str:
return f"{self.name}({','.join(a.type_ for a in self.args)}){self.returns.type_}"
@attrs.frozen(kw_only=True)
class ARC4BareMethod:
id: str
desc: str | None = attrs.field(hash=False)
_config: awst_nodes.ARC4BareMethodConfig
@property
def allowed_completion_types(self) -> Sequence[avm.OnCompletionAction]:
return self._config.allowed_completion_types
@property
def create(self) -> awst_nodes.ARC4CreateOption:
return self._config.create
@property
def config_location(self) -> SourceLocation:
return self._config.source_location
ARC4Method = ARC4BareMethod | ARC4ABIMethod
@attrs.define(eq=False)
class ContractState:
name: str
source_location: SourceLocation
key_or_prefix: bytes
"""Key value as bytes, or prefix if it is a map"""
arc56_key_type: str
arc56_value_type: str
storage_type: typing.Literal[AVMType.uint64, AVMType.bytes]
description: str | None
is_map: bool
"""State describes a map"""
@attrs.frozen(kw_only=True)
class LogicSignatureMetaData:
ref: LogicSigReference
name: str
description: str | None
@attrs.frozen
class StateTotals:
global_uints: int
local_uints: int
global_bytes: int
local_bytes: int
@attrs.frozen(kw_only=True)
class ContractMetaData:
ref: ContractReference
name: str
description: str | None
global_state: immutabledict[str, ContractState]
local_state: immutabledict[str, ContractState]
boxes: immutabledict[str, ContractState]
state_totals: StateTotals
arc4_methods: Sequence[ARC4Method]
structs: immutabledict[str, ARC4Struct]
template_variable_types: immutabledict[str, str]
"""Mapping of template variable names to their ARC-56 type"""
@property
def is_arc4(self) -> bool:
return bool(self.arc4_methods) # TODO: should this be an explicit flag instead?
|
algorandfoundation/puya
|
src/puya/artifact_metadata.py
|
Python
|
NOASSERTION
| 4,366 |
import graphlib
import typing
from collections.abc import Iterable, Mapping, Sequence
import attrs
from puya import log
from puya.errors import CodeError
from puya.ir.models import CompiledContractReference, CompiledLogicSigReference, ModuleArtifact
from puya.ir.visitor import IRTraverser
from puya.parse import SourceLocation
from puya.program_refs import ContractReference, LogicSigReference
logger = log.get_logger(__name__)
@attrs.frozen(eq=False)
class Artifact:
ir: ModuleArtifact
depends_on: dict[ContractReference | LogicSigReference, SourceLocation | None] = attrs.field(
factory=dict
)
@typing.final
@property
def id(self) -> ContractReference | LogicSigReference:
return self.ir.metadata.ref
@typing.final
@property
def source_location(self) -> SourceLocation:
return self.ir.source_location
@attrs.define
class ArtifactCompilationSorter(IRTraverser):
"""
Sorts IR artifacts so that programs that depend on the byte code of other programs
are processed after their dependencies
"""
artifacts: Mapping[ContractReference | LogicSigReference, Artifact]
artifact: Artifact
@classmethod
def sort(
cls,
all_ir: Sequence[ModuleArtifact],
) -> Iterable[Artifact]:
all_artifacts = {artifact.metadata.ref: Artifact(ir=artifact) for artifact in all_ir}
artifacts = list(all_artifacts.values())
for artifact in artifacts:
reference_collector = cls(
artifacts=all_artifacts,
artifact=artifact,
)
for program in artifact.ir.all_programs():
for subroutine in program.subroutines:
reference_collector.visit_all_blocks(subroutine.body)
sorter = graphlib.TopologicalSorter(
{artifact: [all_artifacts[n] for n in artifact.depends_on] for artifact in artifacts}
)
try:
result = list(sorter.static_order())
except graphlib.CycleError as ex:
artifact_cycle: Sequence[Artifact] = ex.args[1]
*_, before, last = artifact_cycle
cycle_loc = last.depends_on[before.id]
programs = " -> ".join(a.id for a in reversed(artifact_cycle))
raise CodeError(f"cyclical program reference: {programs}", cycle_loc) from None
return result
def visit_compiled_contract_reference(self, const: CompiledContractReference) -> None:
self._check_reference(const.artifact)
# add unique references with source_location
# will re-add a reference if current location is None
if self.artifact.depends_on.get(const.artifact) is None:
self.artifact.depends_on[const.artifact] = const.source_location
def visit_compiled_logicsig_reference(self, const: CompiledLogicSigReference) -> None:
self._check_reference(const.artifact)
# add unique references with source_location
# will re-add a reference if current location is None
if self.artifact.depends_on.get(const.artifact) is None:
self.artifact.depends_on[const.artifact] = const.source_location
def _check_reference(self, ref: ContractReference | LogicSigReference) -> None:
if ref not in self.artifacts:
logger.critical(f"missing reference: {ref}")
|
algorandfoundation/puya
|
src/puya/artifact_sorter.py
|
Python
|
NOASSERTION
| 3,355 |
import enum
@enum.unique
class AVMType(enum.Flag):
bytes = enum.auto()
uint64 = enum.auto()
any = bytes | uint64
# values and names are matched to AVM definitions
class OnCompletionAction(enum.IntEnum):
NoOp = 0
OptIn = 1
CloseOut = 2
ClearState = 3
UpdateApplication = 4
DeleteApplication = 5
class TransactionType(enum.IntEnum):
pay = 1
keyreg = 2
acfg = 3
axfer = 4
afrz = 5
appl = 6
|
algorandfoundation/puya
|
src/puya/avm.py
|
Python
|
NOASSERTION
| 453 |
algorandfoundation/puya
|
src/puya/awst/__init__.py
|
Python
|
NOASSERTION
| 0 |
|
from immutabledict import immutabledict
from puya.awst import wtypes
from puya.errors import CodeError
from puya.parse import SourceLocation
def wtype_to_arc4(wtype: wtypes.WType, loc: SourceLocation | None = None) -> str:
match wtype:
case wtypes.ARC4Type(arc4_name=arc4_name):
return arc4_name
case (
wtypes.void_wtype
| wtypes.asset_wtype
| wtypes.account_wtype
| wtypes.application_wtype
):
return wtype.name
case wtypes.WGroupTransaction(transaction_type=transaction_type):
return transaction_type.name if transaction_type else "txn"
converted = maybe_avm_to_arc4_equivalent_type(wtype)
if converted is None:
raise CodeError(f"not an ARC4 type or native equivalent: {wtype}", loc)
return wtype_to_arc4(converted, loc)
def maybe_avm_to_arc4_equivalent_type(wtype: wtypes.WType) -> wtypes.ARC4Type | None:
match wtype:
case wtypes.bool_wtype:
return wtypes.arc4_bool_wtype
case wtypes.uint64_wtype:
return wtypes.ARC4UIntN(n=64, source_location=None)
case wtypes.biguint_wtype:
return wtypes.ARC4UIntN(n=512, source_location=None)
case wtypes.bytes_wtype:
return wtypes.ARC4DynamicArray(
element_type=wtypes.arc4_byte_alias, native_type=wtype, source_location=None
)
case wtypes.string_wtype:
return wtypes.arc4_string_alias
case wtypes.WTuple(types=tuple_item_types) as wtuple:
arc4_item_types = []
for t in tuple_item_types:
if isinstance(t, wtypes.ARC4Type):
arc4_item_types.append(t)
else:
converted = maybe_avm_to_arc4_equivalent_type(t)
if converted is None:
return None
arc4_item_types.append(converted)
if wtuple.fields:
return wtypes.ARC4Struct(
name=wtuple.name,
desc=wtuple.desc,
frozen=True,
fields=immutabledict(zip(wtuple.fields, arc4_item_types, strict=True)),
)
else:
return wtypes.ARC4Tuple(types=arc4_item_types, source_location=None)
case _:
return None
|
algorandfoundation/puya
|
src/puya/awst/arc4_types.py
|
Python
|
NOASSERTION
| 2,393 |
import typing
from puya.awst import nodes as awst_nodes
from puya.awst.function_traverser import FunctionTraverser
from puya.awst.visitors import ContractMemberVisitor, RootNodeVisitor
class AWSTTraverser(FunctionTraverser, RootNodeVisitor[None], ContractMemberVisitor[None]):
@typing.override
def visit_subroutine(self, statement: awst_nodes.Subroutine) -> None:
statement.body.accept(self)
@typing.override
def visit_contract(self, statement: awst_nodes.Contract) -> None:
for storage in statement.app_state:
storage.accept(self)
for method in statement.all_methods:
method.accept(self)
@typing.override
def visit_logic_signature(self, statement: awst_nodes.LogicSignature) -> None:
statement.program.accept(self)
@typing.override
def visit_app_storage_definition(self, defn: awst_nodes.AppStorageDefinition) -> None:
defn.key.accept(self)
@typing.override
def visit_contract_method(self, statement: awst_nodes.ContractMethod) -> None:
statement.body.accept(self)
|
algorandfoundation/puya
|
src/puya/awst/awst_traverser.py
|
Python
|
NOASSERTION
| 1,085 |
import typing
import puya.awst.visitors
from puya.awst import nodes as awst_nodes
class FunctionTraverser(
puya.awst.visitors.ExpressionVisitor[None],
puya.awst.visitors.StatementVisitor[None],
):
@typing.override
def visit_assignment_statement(self, statement: awst_nodes.AssignmentStatement) -> None:
statement.target.accept(self)
statement.value.accept(self)
@typing.override
def visit_copy(self, expr: awst_nodes.Copy) -> None:
expr.value.accept(self)
@typing.override
def visit_goto(self, statement: awst_nodes.Goto) -> None:
pass
@typing.override
def visit_assignment_expression(self, expr: awst_nodes.AssignmentExpression) -> None:
expr.target.accept(self)
expr.value.accept(self)
@typing.override
def visit_uint64_binary_operation(self, expr: awst_nodes.UInt64BinaryOperation) -> None:
expr.left.accept(self)
expr.right.accept(self)
@typing.override
def visit_biguint_binary_operation(self, expr: awst_nodes.BigUIntBinaryOperation) -> None:
expr.left.accept(self)
expr.right.accept(self)
@typing.override
def visit_reversed(self, expr: awst_nodes.Reversed) -> None:
if isinstance(expr.expr, awst_nodes.Expression):
expr.expr.accept(self)
@typing.override
def visit_integer_constant(self, expr: awst_nodes.IntegerConstant) -> None:
pass
@typing.override
def visit_decimal_constant(self, expr: awst_nodes.DecimalConstant) -> None:
pass
@typing.override
def visit_bool_constant(self, expr: awst_nodes.BoolConstant) -> None:
pass
@typing.override
def visit_bytes_constant(self, expr: awst_nodes.BytesConstant) -> None:
pass
@typing.override
def visit_string_constant(self, expr: awst_nodes.StringConstant) -> None:
pass
@typing.override
def visit_void_constant(self, expr: awst_nodes.VoidConstant) -> None:
pass
@typing.override
def visit_compiled_contract(self, expr: awst_nodes.CompiledContract) -> None:
for value in expr.template_variables.values():
value.accept(self)
@typing.override
def visit_compiled_logicsig(self, expr: awst_nodes.CompiledLogicSig) -> None:
for value in expr.template_variables.values():
value.accept(self)
@typing.override
def visit_arc4_decode(self, expr: awst_nodes.ARC4Decode) -> None:
expr.value.accept(self)
@typing.override
def visit_arc4_encode(self, expr: awst_nodes.ARC4Encode) -> None:
expr.value.accept(self)
@typing.override
def visit_array_concat(self, expr: awst_nodes.ArrayConcat) -> None:
expr.left.accept(self)
expr.right.accept(self)
@typing.override
def visit_array_pop(self, expr: awst_nodes.ArrayPop) -> None:
expr.base.accept(self)
@typing.override
def visit_array_extend(self, expr: awst_nodes.ArrayExtend) -> None:
expr.base.accept(self)
expr.other.accept(self)
@typing.override
def visit_method_constant(self, expr: awst_nodes.MethodConstant) -> None:
pass
@typing.override
def visit_address_constant(self, expr: awst_nodes.AddressConstant) -> None:
pass
@typing.override
def visit_numeric_comparison_expression(
self, expr: awst_nodes.NumericComparisonExpression
) -> None:
expr.lhs.accept(self)
expr.rhs.accept(self)
@typing.override
def visit_var_expression(self, expr: awst_nodes.VarExpression) -> None:
pass
@typing.override
def visit_assert_expression(self, expr: awst_nodes.AssertExpression) -> None:
if expr.condition is not None:
expr.condition.accept(self)
@typing.override
def visit_checked_maybe(self, expr: awst_nodes.CheckedMaybe) -> None:
expr.expr.accept(self)
@typing.override
def visit_intrinsic_call(self, call: awst_nodes.IntrinsicCall) -> None:
for arg in call.stack_args:
arg.accept(self)
@typing.override
def visit_puya_lib_call(self, call: awst_nodes.PuyaLibCall) -> None:
for arg in call.args:
arg.value.accept(self)
@typing.override
def visit_group_transaction_reference(self, ref: awst_nodes.GroupTransactionReference) -> None:
ref.index.accept(self)
@typing.override
def visit_create_inner_transaction(self, call: awst_nodes.CreateInnerTransaction) -> None:
for expr in call.fields.values():
expr.accept(self)
@typing.override
def visit_update_inner_transaction(self, call: awst_nodes.UpdateInnerTransaction) -> None:
call.itxn.accept(self)
for value in call.fields.values():
value.accept(self)
@typing.override
def visit_submit_inner_transaction(self, call: awst_nodes.SubmitInnerTransaction) -> None:
for expr in call.itxns:
expr.accept(self)
@typing.override
def visit_inner_transaction_field(self, itxn_field: awst_nodes.InnerTransactionField) -> None:
itxn_field.itxn.accept(self)
if itxn_field.array_index:
itxn_field.array_index.accept(self)
@typing.override
def visit_tuple_expression(self, expr: awst_nodes.TupleExpression) -> None:
for item in expr.items:
item.accept(self)
@typing.override
def visit_tuple_item_expression(self, expr: awst_nodes.TupleItemExpression) -> None:
expr.base.accept(self)
@typing.override
def visit_field_expression(self, expr: awst_nodes.FieldExpression) -> None:
expr.base.accept(self)
@typing.override
def visit_slice_expression(self, expr: awst_nodes.SliceExpression) -> None:
expr.base.accept(self)
if isinstance(expr.begin_index, awst_nodes.Expression):
expr.begin_index.accept(self)
if isinstance(expr.end_index, awst_nodes.Expression):
expr.end_index.accept(self)
@typing.override
def visit_intersection_slice_expression(
self, expr: awst_nodes.IntersectionSliceExpression
) -> None:
expr.base.accept(self)
if isinstance(expr.begin_index, awst_nodes.Expression):
expr.begin_index.accept(self)
if isinstance(expr.end_index, awst_nodes.Expression):
expr.end_index.accept(self)
@typing.override
def visit_index_expression(self, expr: awst_nodes.IndexExpression) -> None:
expr.base.accept(self)
expr.index.accept(self)
@typing.override
def visit_conditional_expression(self, expr: awst_nodes.ConditionalExpression) -> None:
expr.condition.accept(self)
expr.true_expr.accept(self)
expr.false_expr.accept(self)
@typing.override
def visit_single_evaluation(self, expr: awst_nodes.SingleEvaluation) -> None:
expr.source.accept(self)
@typing.override
def visit_app_state_expression(self, expr: awst_nodes.AppStateExpression) -> None:
expr.key.accept(self)
@typing.override
def visit_app_account_state_expression(
self, expr: awst_nodes.AppAccountStateExpression
) -> None:
expr.key.accept(self)
expr.account.accept(self)
@typing.override
def visit_new_array(self, expr: awst_nodes.NewArray) -> None:
for element in expr.values:
element.accept(self)
@typing.override
def visit_new_struct(self, expr: awst_nodes.NewStruct) -> None:
for element in expr.values.values():
element.accept(self)
@typing.override
def visit_bytes_comparison_expression(
self, expr: awst_nodes.BytesComparisonExpression
) -> None:
expr.lhs.accept(self)
expr.rhs.accept(self)
@typing.override
def visit_subroutine_call_expression(self, expr: awst_nodes.SubroutineCallExpression) -> None:
for arg in expr.args:
arg.value.accept(self)
@typing.override
def visit_bytes_binary_operation(self, expr: awst_nodes.BytesBinaryOperation) -> None:
expr.left.accept(self)
expr.right.accept(self)
@typing.override
def visit_boolean_binary_operation(self, expr: awst_nodes.BooleanBinaryOperation) -> None:
expr.left.accept(self)
expr.right.accept(self)
@typing.override
def visit_uint64_unary_operation(self, expr: awst_nodes.UInt64UnaryOperation) -> None:
expr.expr.accept(self)
@typing.override
def visit_bytes_unary_operation(self, expr: awst_nodes.BytesUnaryOperation) -> None:
expr.expr.accept(self)
@typing.override
def visit_not_expression(self, expr: awst_nodes.Not) -> None:
expr.expr.accept(self)
@typing.override
def visit_block(self, statement: awst_nodes.Block) -> None:
for stmt in statement.body:
stmt.accept(self)
@typing.override
def visit_if_else(self, statement: awst_nodes.IfElse) -> None:
statement.condition.accept(self)
statement.if_branch.accept(self)
if statement.else_branch:
statement.else_branch.accept(self)
@typing.override
def visit_switch(self, statement: awst_nodes.Switch) -> None:
statement.value.accept(self)
for case, block in statement.cases.items():
case.accept(self)
block.accept(self)
if statement.default_case:
statement.default_case.accept(self)
@typing.override
def visit_while_loop(self, statement: awst_nodes.WhileLoop) -> None:
statement.condition.accept(self)
statement.loop_body.accept(self)
@typing.override
def visit_loop_exit(self, statement: awst_nodes.LoopExit) -> None:
pass
@typing.override
def visit_return_statement(self, statement: awst_nodes.ReturnStatement) -> None:
if statement.value is not None:
statement.value.accept(self)
@typing.override
def visit_loop_continue(self, statement: awst_nodes.LoopContinue) -> None:
pass
@typing.override
def visit_expression_statement(self, statement: awst_nodes.ExpressionStatement) -> None:
statement.expr.accept(self)
@typing.override
def visit_template_var(self, statement: awst_nodes.TemplateVar) -> None:
pass
@typing.override
def visit_uint64_augmented_assignment(
self, statement: awst_nodes.UInt64AugmentedAssignment
) -> None:
statement.target.accept(self)
statement.value.accept(self)
@typing.override
def visit_biguint_augmented_assignment(
self, statement: awst_nodes.BigUIntAugmentedAssignment
) -> None:
statement.target.accept(self)
statement.value.accept(self)
@typing.override
def visit_bytes_augmented_assignment(
self, statement: awst_nodes.BytesAugmentedAssignment
) -> None:
statement.target.accept(self)
statement.value.accept(self)
@typing.override
def visit_for_in_loop(self, statement: awst_nodes.ForInLoop) -> None:
statement.sequence.accept(self)
statement.items.accept(self)
statement.loop_body.accept(self)
@typing.override
def visit_reinterpret_cast(self, expr: awst_nodes.ReinterpretCast) -> None:
expr.expr.accept(self)
@typing.override
def visit_enumeration(self, expr: awst_nodes.Enumeration) -> None:
expr.expr.accept(self)
@typing.override
def visit_state_get_ex(self, expr: awst_nodes.StateGetEx) -> None:
expr.field.accept(self)
@typing.override
def visit_state_delete(self, statement: awst_nodes.StateDelete) -> None:
statement.field.accept(self)
@typing.override
def visit_state_get(self, expr: awst_nodes.StateGet) -> None:
expr.field.accept(self)
expr.default.accept(self)
@typing.override
def visit_state_exists(self, expr: awst_nodes.StateExists) -> None:
expr.field.accept(self)
@typing.override
def visit_box_value_expression(self, expr: awst_nodes.BoxValueExpression) -> None:
expr.key.accept(self)
@typing.override
def visit_biguint_postfix_unary_operation(
self, expr: awst_nodes.BigUIntPostfixUnaryOperation
) -> None:
expr.target.accept(self)
@typing.override
def visit_uint64_postfix_unary_operation(
self, expr: awst_nodes.UInt64PostfixUnaryOperation
) -> None:
expr.target.accept(self)
@typing.override
def visit_arc4_router(self, expr: awst_nodes.ARC4Router) -> None:
pass
@typing.override
def visit_range(self, node: awst_nodes.Range) -> None:
node.start.accept(self)
node.stop.accept(self)
node.step.accept(self)
@typing.override
def visit_emit(self, expr: awst_nodes.Emit) -> None:
expr.value.accept(self)
|
algorandfoundation/puya
|
src/puya/awst/function_traverser.py
|
Python
|
NOASSERTION
| 12,822 |
import abc
import decimal
import enum
import typing
from abc import ABC, abstractmethod
from collections.abc import Iterator, Mapping, Sequence, Set
from functools import cached_property
import attrs
from immutabledict import immutabledict
from puya.algo_constants import SUPPORTED_AVM_VERSIONS
from puya.avm import AVMType, OnCompletionAction
from puya.awst import wtypes
from puya.awst.txn_fields import TxnField
from puya.awst.visitors import (
ContractMemberVisitor,
ExpressionVisitor,
RootNodeVisitor,
StatementVisitor,
)
from puya.awst.wtypes import WType
from puya.errors import CodeError, InternalError
from puya.parse import SourceLocation
from puya.program_refs import ContractReference, LogicSigReference
from puya.utils import unique
T = typing.TypeVar("T")
@attrs.frozen
class Node:
source_location: SourceLocation
@attrs.frozen
class Statement(Node, ABC):
@abstractmethod
def accept(self, visitor: StatementVisitor[T]) -> T: ...
@attrs.frozen
class Expression(Node, ABC):
wtype: WType
@abstractmethod
def accept(self, visitor: ExpressionVisitor[T]) -> T: ...
@attrs.frozen
class ExpressionStatement(Statement):
expr: Expression
source_location: SourceLocation = attrs.field(init=False)
@source_location.default
def _source_location(self) -> SourceLocation:
return self.expr.source_location
def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_expression_statement(self)
@attrs.frozen(repr=False)
class _ExpressionHasWType:
instances: tuple[WType, ...]
types: tuple[type[WType], ...]
def __call__(
self,
inst: Node,
attr: attrs.Attribute, # type: ignore[type-arg]
value: Expression,
) -> None:
"""
We use a callable class to be able to change the ``__repr__``.
"""
wtype = value.wtype
if wtype in self.instances:
return
for allowed_t in self.types:
if isinstance(wtype, allowed_t):
return
raise InternalError(
f"{type(inst).__name__}.{attr.name}: expression of WType {wtype} received,"
f" expected {' or '.join(self._names)}"
)
def __repr__(self) -> str:
return f"<expression_has_wtype validator for type {' | '.join(self._names)}>"
@property
def _names(self) -> Iterator[str]:
for inst in self.instances:
yield inst.name
for typ in self.types:
yield typ.__name__
def expression_has_wtype(*one_of_these: WType | type[WType]) -> _ExpressionHasWType:
instances = []
types = list[type[WType]]()
for item in one_of_these:
if isinstance(item, type):
types.append(item)
else:
instances.append(item)
return _ExpressionHasWType(instances=tuple(instances), types=tuple(types))
@attrs.frozen(repr=False)
class _WTypeIsOneOf:
instances: tuple[WType, ...]
types: tuple[type[WType], ...]
def __call__(
self,
inst: Node,
attr: attrs.Attribute, # type: ignore[type-arg]
value: WType,
) -> None:
"""
We use a callable class to be able to change the ``__repr__``.
"""
wtype = value
if wtype in self.instances:
return
for allowed_t in self.types:
if isinstance(wtype, allowed_t):
return
raise InternalError(
f"{type(inst).__name__}.{attr.name}: set to {wtype},"
f" expected {' or '.join(self._names)}"
)
def __repr__(self) -> str:
return f"<expression_has_wtype validator for type {' | '.join(self._names)}>"
@property
def _names(self) -> Iterator[str]:
for inst in self.instances:
yield inst.name
for typ in self.types:
yield typ.__name__
def wtype_is_one_of(*one_of_these: WType | type[WType]) -> _WTypeIsOneOf:
instances = []
types = list[type[WType]]()
for item in one_of_these:
if isinstance(item, type):
types.append(item)
else:
instances.append(item)
return _WTypeIsOneOf(instances=tuple(instances), types=tuple(types))
wtype_is_uint64 = expression_has_wtype(wtypes.uint64_wtype)
wtype_is_biguint = expression_has_wtype(wtypes.biguint_wtype)
wtype_is_bool = expression_has_wtype(wtypes.bool_wtype)
wtype_is_bytes = expression_has_wtype(wtypes.bytes_wtype)
Label = typing.NewType("Label", str)
@attrs.frozen(kw_only=True)
class Block(Statement):
"""
A (non-basic) block used to group statements. Can contain nested blocks, loops, and branching
structures. No lexical scoping is offered or implied by this block.
body: A sequence of statements which represent this block
comment: An optional comment of what this block represents. Only influences
non-functional output
label: An optional label for this block allowing goto statements to jump to this block.
Must be unique per subroutine.
"""
body: Sequence[Statement] = attrs.field(converter=tuple[Statement, ...])
label: Label | None = None
comment: str | None = None
def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_block(self)
@attrs.frozen(kw_only=True)
class Goto(Statement):
"""
Branch unconditionally to the block with the specified label.
target: The label of a block within the same subroutine
"""
target: Label
def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_goto(self)
@attrs.frozen
class IfElse(Statement):
condition: Expression = attrs.field(validator=[wtype_is_bool])
if_branch: Block
else_branch: Block | None
def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_if_else(self)
@attrs.frozen
class Switch(Statement):
value: Expression
cases: Mapping[Expression, Block] = attrs.field(converter=immutabledict)
default_case: Block | None
def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_switch(self)
@attrs.frozen
class WhileLoop(Statement):
condition: Expression = attrs.field(validator=[wtype_is_bool])
loop_body: Block
def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_while_loop(self)
@attrs.frozen
class LoopExit(Statement):
"""break out of the current innermost loop"""
def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_loop_exit(self)
@attrs.frozen
class LoopContinue(Statement):
"""continue with the next iteration of the current innermost loop"""
def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_loop_continue(self)
@attrs.frozen
class ReturnStatement(Statement):
value: Expression | None
def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_return_statement(self)
@attrs.frozen
class AssertExpression(Expression):
condition: Expression | None
error_message: str | None
wtype: WType = attrs.field(default=wtypes.void_wtype, init=False)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_assert_expression(self)
@attrs.frozen(kw_only=True)
class IntegerConstant(Expression):
wtype: WType = attrs.field(
validator=[
wtype_is_one_of(
wtypes.uint64_wtype,
wtypes.biguint_wtype,
wtypes.ARC4UIntN,
)
]
)
value: int = attrs.field()
teal_alias: str | None = None
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_integer_constant(self)
@attrs.frozen
class DecimalConstant(Expression):
wtype: wtypes.ARC4UFixedNxM
value: decimal.Decimal = attrs.field()
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_decimal_constant(self)
def UInt64Constant( # noqa: N802
*, source_location: SourceLocation, value: int, teal_alias: str | None = None
) -> IntegerConstant:
return IntegerConstant(
source_location=source_location,
value=value,
wtype=wtypes.uint64_wtype,
teal_alias=teal_alias,
)
def BigUIntConstant( # noqa: N802
*, source_location: SourceLocation, value: int
) -> IntegerConstant:
return IntegerConstant(
source_location=source_location,
value=value,
wtype=wtypes.biguint_wtype,
)
@attrs.frozen
class BoolConstant(Expression):
value: bool
wtype: WType = attrs.field(
default=wtypes.bool_wtype,
validator=wtype_is_one_of(wtypes.bool_wtype, wtypes.arc4_bool_wtype),
)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_bool_constant(self)
@enum.unique
class BytesEncoding(enum.StrEnum):
unknown = enum.auto()
base16 = enum.auto()
base32 = enum.auto()
base64 = enum.auto()
utf8 = enum.auto()
@attrs.frozen(repr=False)
class _WTypeIsBackedBy:
backed_by: typing.Literal[AVMType.uint64, AVMType.bytes]
def __call__(
self,
inst: Node,
attr: attrs.Attribute, # type: ignore[type-arg]
value: WType,
) -> None:
"""
We use a callable class to be able to change the ``__repr__``.
"""
if not isinstance(inst, Node):
raise InternalError(f"{self!r} used on type {type(inst).__name__}, expected Node")
if value.scalar_type != self.backed_by:
raise InternalError(
f"{type(inst).__name__}.{attr.name}: set to {value},"
f" which is not backed by {value.scalar_type}, not {self.backed_by.name}"
)
def __repr__(self) -> str:
return f"<wtype_is_{self.backed_by.name}_backed validator>"
wtype_is_bytes_backed: typing.Final = _WTypeIsBackedBy(backed_by=AVMType.bytes)
wtype_is_uint64_backed: typing.Final = _WTypeIsBackedBy(backed_by=AVMType.uint64)
@attrs.frozen(kw_only=True)
class BytesConstant(Expression):
wtype: WType = attrs.field(default=wtypes.bytes_wtype, validator=wtype_is_bytes_backed)
value: bytes = attrs.field()
encoding: BytesEncoding = attrs.field()
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_bytes_constant(self)
@attrs.frozen
class StringConstant(Expression):
value: str = attrs.field()
wtype: WType = attrs.field(
default=wtypes.string_wtype,
validator=[
wtype_is_one_of(
wtypes.string_wtype,
wtypes.arc4_string_alias,
)
],
)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_string_constant(self)
@attrs.frozen
class VoidConstant(Expression):
# useful as a "no-op"
wtype: WType = attrs.field(default=wtypes.void_wtype, init=False)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_void_constant(self)
@attrs.frozen
class TemplateVar(Expression):
wtype: WType
name: str
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_template_var(self)
@attrs.frozen
class MethodConstant(Expression):
wtype: WType = attrs.field(default=wtypes.bytes_wtype, init=False)
value: str
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_method_constant(self)
@attrs.frozen(kw_only=True)
class AddressConstant(Expression):
wtype: WType = attrs.field(
default=wtypes.account_wtype,
validator=wtype_is_one_of(wtypes.account_wtype, wtypes.arc4_address_alias),
)
value: str
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_address_constant(self)
@attrs.frozen
class ARC4Encode(Expression):
value: Expression
wtype: wtypes.ARC4Type = attrs.field()
@wtype.validator
def _wtype_validator(self, _attribute: object, wtype: wtypes.ARC4Type) -> None:
if not wtype.can_encode_type(self.value.wtype):
raise InternalError(
f"cannot ARC4 encode {self.value.wtype} to {wtype}", self.source_location
)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_arc4_encode(self)
@attrs.frozen
class Copy(Expression):
"""
Create a new copy of 'value'
"""
value: Expression
wtype: WType = attrs.field(init=False)
@wtype.default
def _wtype(self) -> WType:
return self.value.wtype
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_copy(self)
@attrs.frozen
class ArrayConcat(Expression):
"""
Given 'left' or 'right' that is logically an array - concat it with the other value which is
an iterable type with the same element type
"""
left: Expression
right: Expression
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_array_concat(self)
@attrs.frozen
class ArrayPop(Expression):
base: Expression
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_array_pop(self)
@attrs.frozen
class ArrayExtend(Expression):
"""
Given 'base' that is logically an array - extend it with 'other' which is an iterable type with
the same element type
"""
base: Expression
other: Expression
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_array_extend(self)
@attrs.frozen
class ARC4Decode(Expression):
value: Expression = attrs.field(
validator=expression_has_wtype(
wtypes.arc4_bool_wtype,
wtypes.ARC4UIntN,
wtypes.ARC4Tuple,
wtypes.ARC4Struct,
wtypes.ARC4DynamicArray, # only if element type is bytes for now
)
)
@value.validator
def _value_wtype_validator(self, _attribute: object, value: Expression) -> None:
assert isinstance(value.wtype, wtypes.ARC4Type) # validated by `value`
if not value.wtype.can_encode_type(self.wtype):
raise InternalError(
f"ARC4Decode from {value.wtype} should have non ARC4 target type {self.wtype}",
self.source_location,
)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_arc4_decode(self)
CompileTimeConstantExpression: typing.TypeAlias = (
IntegerConstant
| DecimalConstant
| BoolConstant
| BytesConstant
| AddressConstant
| MethodConstant
)
@attrs.define
class IntrinsicCall(Expression):
op_code: str
immediates: Sequence[str | int] = attrs.field(default=(), converter=tuple[str | int, ...])
stack_args: Sequence[Expression] = attrs.field(default=(), converter=tuple[Expression, ...])
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_intrinsic_call(self)
@attrs.define
class CreateInnerTransaction(Expression):
wtype: wtypes.WInnerTransactionFields
fields: Mapping[TxnField, Expression] = attrs.field(converter=immutabledict)
@fields.validator
def _validate_fields(self, _attribute: object, fields: Mapping[TxnField, Expression]) -> None:
for field, value in fields.items():
if not field.valid_argument_type(value.wtype):
raise CodeError("invalid type for field", value.source_location)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_create_inner_transaction(self)
@attrs.define
class UpdateInnerTransaction(Expression):
itxn: Expression = attrs.field(validator=expression_has_wtype(wtypes.WInnerTransactionFields))
fields: Mapping[TxnField, Expression] = attrs.field(converter=immutabledict)
wtype: WType = attrs.field(default=wtypes.void_wtype, init=False)
@fields.validator
def _validate_fields(self, _attribute: object, fields: Mapping[TxnField, Expression]) -> None:
for field, value in fields.items():
if not field.valid_argument_type(value.wtype):
raise CodeError("invalid type for field", value.source_location)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_update_inner_transaction(self)
@attrs.frozen
class GroupTransactionReference(Expression):
index: Expression = attrs.field(validator=wtype_is_uint64)
wtype: wtypes.WGroupTransaction
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_group_transaction_reference(self)
@attrs.define
class CheckedMaybe(Expression):
"""Allows evaluating a maybe type i.e. tuple[_T, bool] as _T, but with the assertion that
the 2nd bool element is true"""
expr: Expression
comment: str
wtype: wtypes.WType = attrs.field(init=False)
source_location: SourceLocation = attrs.field(init=False)
@source_location.default
def _source_location(self) -> SourceLocation:
return self.expr.source_location
@wtype.default
def _wtype(self) -> wtypes.WType:
match self.expr.wtype:
case wtypes.WTuple(types=(wtype, wtypes.bool_wtype)):
return wtype
case _:
raise InternalError(
f"{type(self).__name__}.expr: expression of WType {self.expr.wtype} received,"
f" expected tuple[_T, bool]"
)
@classmethod
def from_tuple_items(
cls,
expr: Expression,
check: Expression,
source_location: SourceLocation,
comment: str,
) -> typing.Self:
if check.wtype != wtypes.bool_wtype:
raise InternalError(
"Check condition for CheckedMaybe should be a boolean", source_location
)
tuple_expr = TupleExpression.from_items((expr, check), source_location)
return cls(expr=tuple_expr, comment=comment)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_checked_maybe(self)
@attrs.frozen
class TupleExpression(Expression):
items: Sequence[Expression] = attrs.field(converter=tuple[Expression, ...])
wtype: wtypes.WTuple = attrs.field()
@classmethod
def from_items(cls, items: Sequence[Expression], location: SourceLocation) -> typing.Self:
return cls(
items=items,
wtype=wtypes.WTuple((i.wtype for i in items), location),
source_location=location,
)
@wtype.validator
def _wtype_validator(self, _attribute: object, wtype: wtypes.WTuple) -> None:
if tuple(it.wtype for it in self.items) != wtype.types:
raise CodeError("Tuple type mismatch", self.source_location)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_tuple_expression(self)
@attrs.frozen
class TupleItemExpression(Expression):
"""Represents tuple element access.
Note: this is its own item (vs IndexExpression) for two reasons:
1. It's not a valid lvalue (tuples are immutable)
2. The index must always be a literal, and can be negative
"""
base: Expression
index: int
wtype: wtypes.WType = attrs.field(init=False)
@wtype.default
def _wtype(self) -> wtypes.WType:
base_wtype = self.base.wtype
if not isinstance(base_wtype, wtypes.WTuple | wtypes.ARC4Tuple):
raise InternalError(
f"Tuple item expression should be for a tuple type, got {base_wtype}",
self.source_location,
)
try:
wtype = base_wtype.types[self.index]
except IndexError as ex:
raise CodeError("invalid index into tuple expression", self.source_location) from ex
return wtype
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_tuple_item_expression(self)
@attrs.frozen
class VarExpression(Expression):
name: str
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_var_expression(self)
@attrs.frozen(kw_only=True)
class InnerTransactionField(Expression):
itxn: Expression = attrs.field(validator=expression_has_wtype(wtypes.WInnerTransaction))
field: TxnField
array_index: Expression | None = None
def __attrs_post_init__(self) -> None:
has_array = self.array_index is not None
if has_array != self.field.is_array:
raise InternalError(
f"Inconsistent field and array_index combination: "
f"{self.field} and {'' if has_array else 'no '} array provided",
self.source_location,
)
if self.wtype.scalar_type != self.field.avm_type:
raise InternalError(
f"wtype of field {self.field.immediate} is {self.field.wtype}"
f" which is not compatible with specified result type {self.wtype}",
self.source_location,
)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_inner_transaction_field(self)
@attrs.define
class SubmitInnerTransaction(Expression):
itxns: Sequence[Expression] = attrs.field(converter=tuple[Expression, ...])
wtype: WType = attrs.field(init=False)
@wtype.default
def _wtype(self) -> wtypes.WType:
txn_types = []
for expr in self.itxns:
if not isinstance(expr.wtype, wtypes.WInnerTransactionFields):
raise CodeError("invalid expression type for submit", expr.source_location)
txn_types.append(wtypes.WInnerTransaction.from_type(expr.wtype.transaction_type))
try:
(single_txn,) = txn_types
except ValueError:
return wtypes.WTuple(txn_types, self.source_location)
else:
return single_txn
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_submit_inner_transaction(self)
@attrs.frozen
class FieldExpression(Expression):
base: Expression = attrs.field(
validator=expression_has_wtype(wtypes.WStructType, wtypes.ARC4Struct, wtypes.WTuple)
)
name: str
wtype: wtypes.WType = attrs.field(init=False)
@wtype.default
def _wtype_factory(self) -> wtypes.WType:
dataclass_type = self.base.wtype
assert isinstance(dataclass_type, wtypes.WStructType | wtypes.ARC4Struct | wtypes.WTuple)
try:
return dataclass_type.fields[self.name]
except KeyError:
raise CodeError(f"invalid field for {dataclass_type}", self.source_location) from None
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_field_expression(self)
@attrs.frozen
class IndexExpression(Expression):
base: Expression = attrs.field(
validator=expression_has_wtype(
wtypes.bytes_wtype,
wtypes.ARC4StaticArray,
wtypes.ARC4DynamicArray,
# NOTE: tuples (native or arc4) use TupleItemExpression instead
)
)
index: Expression = attrs.field(validator=expression_has_wtype(wtypes.uint64_wtype))
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_index_expression(self)
@attrs.frozen
class SliceExpression(Expression):
base: Expression = attrs.field(
validator=expression_has_wtype(
wtypes.bytes_wtype,
wtypes.WTuple,
)
)
begin_index: Expression | None
end_index: Expression | None
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_slice_expression(self)
@attrs.frozen
class IntersectionSliceExpression(Expression):
"""
Returns the intersection of the slice indexes and the base
"""
base: Expression = attrs.field(
validator=expression_has_wtype(
wtypes.bytes_wtype,
wtypes.WTuple,
)
)
begin_index: Expression | int | None
end_index: Expression | int | None
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_intersection_slice_expression(self)
@attrs.frozen
class AppStateExpression(Expression):
key: Expression = attrs.field(validator=expression_has_wtype(wtypes.state_key))
exists_assertion_message: str | None
"""TEAL comment that will be emitted in a checked-read scenario"""
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_app_state_expression(self)
@attrs.frozen
class AppAccountStateExpression(Expression):
key: Expression = attrs.field(validator=expression_has_wtype(wtypes.state_key))
exists_assertion_message: str | None
"""TEAL comment that will be emitted in a checked-read scenario"""
account: Expression = attrs.field(
validator=expression_has_wtype(wtypes.account_wtype, wtypes.uint64_wtype)
)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_app_account_state_expression(self)
@attrs.frozen
class BoxValueExpression(Expression):
key: Expression = attrs.field(validator=expression_has_wtype(wtypes.box_key))
exists_assertion_message: str | None
"""TEAL comment that will be emitted in a checked-read scenario"""
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_box_value_expression(self)
@attrs.frozen
class SingleEvaluation(Expression):
"""
This node wraps an underlying expression and effectively caches the result of that lowering,
such that regardless of how many times the SingleEvaluation object
(or any object comparing equal to it) appears in the AWST,
the underlying source expression will only be evaluated once.
"""
source: Expression
_id: int = attrs.field()
wtype: WType = attrs.field(init=False, eq=False)
source_location: SourceLocation = attrs.field(eq=False)
@_id.default
def _default_id(self) -> int:
return id(self)
@wtype.default
def _wtype(self) -> WType:
return self.source.wtype
@source_location.default
def _default_source_location(self) -> SourceLocation:
return self.source.source_location
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_single_evaluation(self)
@attrs.frozen
class ReinterpretCast(Expression):
"""Convert an expression to an AVM equivalent type.
Note: the validation of this isn't done until IR construction"""
expr: Expression
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_reinterpret_cast(self)
StorageExpression = AppStateExpression | AppAccountStateExpression | BoxValueExpression
# Expression types that are valid on the left hand side of assignment *statements*
# Note that some of these can be recursive/nested, eg:
# obj.field[index].another_field = 123
Lvalue = VarExpression | FieldExpression | IndexExpression | TupleExpression | StorageExpression
@attrs.frozen
class NewArray(Expression):
wtype: wtypes.WArray | wtypes.ARC4Array
values: Sequence[Expression] = attrs.field(default=(), converter=tuple[Expression, ...])
@values.validator
def _check_element_types(self, _attribute: object, value: tuple[Expression, ...]) -> None:
if any(expr.wtype != self.wtype.element_type for expr in value):
raise ValueError(
f"All array elements should have array type: {self.wtype.element_type}"
)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_new_array(self)
@attrs.frozen
class ConditionalExpression(Expression):
"""A "ternary" operator with conditional evaluation - the true and false expressions must only
be evaluated if they will be the result of expression.
"""
condition: Expression = attrs.field(validator=[wtype_is_bool])
true_expr: Expression
false_expr: Expression
wtype: WType = attrs.field()
@wtype.default
def _wtype(self) -> WType:
if self.true_expr.wtype != self.false_expr.wtype:
raise CodeError(
f"true and false expressions of conditional have differing types:"
f" {self.true_expr.wtype} and {self.false_expr.wtype}",
self.source_location,
)
return self.true_expr.wtype
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_conditional_expression(self)
@attrs.frozen
class AssignmentStatement(Statement):
"""
A single assignment statement e.g. `a = 1`.
Multi-assignment statements like `a = b = 1` should be split in the AST pass.
Will validate that target and value are of the same type, and that said type is usable
as an l-value.
"""
target: Lvalue
value: Expression
def __attrs_post_init__(self) -> None:
if self.value.wtype != self.target.wtype:
raise CodeError(
"assignment target type differs from expression value type",
self.source_location,
)
if self.value.wtype == wtypes.void_wtype:
raise CodeError("void type cannot be assigned", self.source_location)
def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_assignment_statement(self)
@attrs.frozen
class AssignmentExpression(Expression):
"""
This both assigns value to target and returns the value as the result of the expression.
Will validate that target and value are of the same type, and that said type is usable
as an l-value.
"""
target: Lvalue = attrs.field()
value: Expression = attrs.field()
wtype: wtypes.WType = attrs.field(init=False)
@wtype.default
def _wtype(self) -> wtypes.WType:
return self.target.wtype
@value.validator
def _value_validator(self, _attribute: object, value: Expression) -> None:
if value.wtype != self.target.wtype:
raise CodeError(
"assignment target type differs from expression value type",
self.source_location,
)
if value.wtype == wtypes.void_wtype:
raise CodeError("void type cannot be assigned", self.source_location)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_assignment_expression(self)
class EqualityComparison(enum.StrEnum):
eq = "=="
ne = "!="
class NumericComparison(enum.StrEnum):
eq = "==" # 😩 why can't Python have enum inheritance
ne = "!="
lt = "<"
lte = "<="
gt = ">"
gte = ">="
numeric_comparable = expression_has_wtype(
wtypes.uint64_wtype,
wtypes.biguint_wtype,
wtypes.bool_wtype,
wtypes.asset_wtype,
wtypes.application_wtype,
)
@attrs.frozen
class NumericComparisonExpression(Expression):
"""Compare two numeric types.
Any type promotion etc should be done at the source language level,
this operation expects both arguments to already be in the same type.
This is to insulate against language-specific differences in type promotion rules,
or equality comparisons with bool, and so on.
"""
wtype: WType = attrs.field(default=wtypes.bool_wtype, init=False)
# TODO: make these names consistent with other expressions
lhs: Expression = attrs.field(validator=[numeric_comparable])
operator: NumericComparison
rhs: Expression = attrs.field(validator=[numeric_comparable])
def __attrs_post_init__(self) -> None:
if self.lhs.wtype != self.rhs.wtype:
raise InternalError(
"numeric comparison between different wtypes:"
f" {self.lhs.wtype} and {self.rhs.wtype}",
self.source_location,
)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_numeric_comparison_expression(self)
bytes_comparable = expression_has_wtype(
wtypes.bytes_wtype,
wtypes.account_wtype,
wtypes.string_wtype,
wtypes.ARC4Type,
)
@attrs.frozen
class BytesComparisonExpression(Expression):
wtype: WType = attrs.field(default=wtypes.bool_wtype, init=False)
lhs: Expression = attrs.field(validator=[bytes_comparable])
operator: EqualityComparison
rhs: Expression = attrs.field(validator=[bytes_comparable])
def __attrs_post_init__(self) -> None:
if self.lhs.wtype != self.rhs.wtype:
raise InternalError(
"bytes comparison between different wtypes:"
f" {self.lhs.wtype} and {self.rhs.wtype}",
self.source_location,
)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_bytes_comparison_expression(self)
@attrs.frozen
class SubroutineID:
target: str
@attrs.frozen(kw_only=True)
class InstanceMethodTarget:
member_name: str
@attrs.frozen(kw_only=True)
class InstanceSuperMethodTarget:
member_name: str
@attrs.frozen(kw_only=True)
class ContractMethodTarget:
cref: ContractReference
member_name: str
SubroutineTarget = (
SubroutineID | InstanceMethodTarget | InstanceSuperMethodTarget | ContractMethodTarget
)
@attrs.frozen
class CallArg:
name: str | None # if None, then passed positionally
value: Expression
@attrs.frozen
class SubroutineCallExpression(Expression):
target: SubroutineTarget
args: Sequence[CallArg] = attrs.field(converter=tuple[CallArg, ...])
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_subroutine_call_expression(self)
@attrs.frozen
class PuyaLibData:
id: str
params: Mapping[str, wtypes.WType]
wtype: wtypes.WType
class PuyaLibFunction(enum.Enum):
ensure_budget = PuyaLibData(
id="_puya_lib.util.ensure_budget",
params={"required_budget": wtypes.uint64_wtype, "fee_source": wtypes.uint64_wtype},
wtype=wtypes.void_wtype,
)
is_substring = PuyaLibData(
id="_puya_lib.bytes_.is_substring",
params={"item": wtypes.bytes_wtype, "sequence": wtypes.bytes_wtype},
wtype=wtypes.bool_wtype,
)
@attrs.define
class PuyaLibCall(Expression):
func: PuyaLibFunction
args: Sequence[CallArg] = attrs.field(default=(), converter=tuple[CallArg, ...])
wtype: wtypes.WType = attrs.field(init=False)
@wtype.default
def _wtype(self) -> wtypes.WType:
return self.func.value.wtype
@args.validator
def _args_validator(self, _: object, args: Sequence[CallArg]) -> None:
if len(self.func.value.params) != len(args):
raise CodeError(
f"provided args does not match arity for {self.func.name}", self.source_location
)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_puya_lib_call(self)
@enum.unique
class UInt64BinaryOperator(enum.StrEnum):
add = "+"
sub = "-"
mult = "*"
floor_div = "//"
mod = "%"
pow = "**"
lshift = "<<"
rshift = ">>"
bit_or = "|"
bit_xor = "^"
bit_and = "&"
# unsupported:
# / aka ast.Div
# @ aka ast.MatMult
@enum.unique
class BigUIntBinaryOperator(enum.StrEnum):
add = "+"
sub = "-"
mult = "*"
floor_div = "//"
mod = "%"
bit_or = "|"
bit_xor = "^"
bit_and = "&"
# unsupported:
# ** aka ast.Pow
# / aka ast.Div
# @ aka ast.MatMult
# << aka ast.LShift
# >> aka ast.RShift
@enum.unique
class BytesBinaryOperator(enum.StrEnum):
add = "+"
bit_or = "|"
bit_xor = "^"
bit_and = "&"
@enum.unique
class BytesUnaryOperator(enum.StrEnum):
bit_invert = "~"
@enum.unique
class UInt64UnaryOperator(enum.StrEnum):
bit_invert = "~"
@enum.unique
class UInt64PostfixUnaryOperator(enum.StrEnum):
increment = "++"
decrement = "--"
@attrs.frozen
class UInt64UnaryOperation(Expression):
op: UInt64UnaryOperator
expr: Expression = attrs.field(validator=[wtype_is_uint64])
wtype: WType = attrs.field(default=wtypes.uint64_wtype, init=False)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_uint64_unary_operation(self)
@attrs.frozen
class UInt64PostfixUnaryOperation(Expression):
op: UInt64PostfixUnaryOperator
target: Lvalue = attrs.field(validator=[wtype_is_uint64])
wtype: WType = attrs.field(default=wtypes.uint64_wtype, init=False)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_uint64_postfix_unary_operation(self)
@enum.unique
class BigUIntPostfixUnaryOperator(enum.StrEnum):
increment = "++"
decrement = "--"
@attrs.frozen
class BigUIntPostfixUnaryOperation(Expression):
op: BigUIntPostfixUnaryOperator
target: Expression = attrs.field(validator=[wtype_is_biguint])
wtype: WType = attrs.field(default=wtypes.biguint_wtype, init=False)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_biguint_postfix_unary_operation(self)
@attrs.frozen
class BytesUnaryOperation(Expression):
op: BytesUnaryOperator
expr: Expression = attrs.field(validator=[wtype_is_bytes])
wtype: WType = attrs.field(default=wtypes.bytes_wtype, init=False)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_bytes_unary_operation(self)
@attrs.frozen
class UInt64BinaryOperation(Expression):
left: Expression = attrs.field(validator=[wtype_is_uint64])
op: UInt64BinaryOperator
right: Expression = attrs.field(validator=[wtype_is_uint64])
wtype: WType = attrs.field(default=wtypes.uint64_wtype, init=False)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_uint64_binary_operation(self)
@attrs.frozen
class BigUIntBinaryOperation(Expression):
left: Expression = attrs.field(validator=[wtype_is_biguint])
op: BigUIntBinaryOperator
right: Expression = attrs.field(validator=[wtype_is_biguint])
wtype: WType = attrs.field(default=wtypes.biguint_wtype, init=False)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_biguint_binary_operation(self)
@attrs.frozen
class BytesBinaryOperation(Expression):
left: Expression = attrs.field(
validator=[expression_has_wtype(wtypes.bytes_wtype, wtypes.string_wtype)]
)
op: BytesBinaryOperator
right: Expression = attrs.field(
validator=[expression_has_wtype(wtypes.bytes_wtype, wtypes.string_wtype)]
)
wtype: WType = attrs.field(init=False)
@right.validator
def _check_right(self, _attribute: object, right: Expression) -> None:
if right.wtype != self.left.wtype:
raise CodeError(
f"Bytes operation on differing types,"
f" lhs is {self.left.wtype}, rhs is {self.right.wtype}",
self.source_location,
)
@wtype.default
def _wtype_factory(self) -> wtypes.WType:
return self.left.wtype
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_bytes_binary_operation(self)
@enum.unique
class BinaryBooleanOperator(enum.StrEnum):
and_ = "and"
or_ = "or"
@attrs.frozen
class BooleanBinaryOperation(Expression):
left: Expression = attrs.field(validator=[wtype_is_bool])
op: BinaryBooleanOperator
right: Expression = attrs.field(validator=[wtype_is_bool])
wtype: WType = attrs.field(default=wtypes.bool_wtype, init=False)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_boolean_binary_operation(self)
@attrs.frozen
class Not(Expression):
expr: Expression = attrs.field(validator=[wtype_is_bool])
wtype: WType = attrs.field(default=wtypes.bool_wtype, init=False)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_not_expression(self)
@attrs.frozen
class UInt64AugmentedAssignment(Statement):
target: Lvalue = attrs.field(validator=[wtype_is_uint64])
op: UInt64BinaryOperator
value: Expression = attrs.field(validator=[wtype_is_uint64])
def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_uint64_augmented_assignment(self)
@attrs.frozen
class BigUIntAugmentedAssignment(Statement):
target: Lvalue = attrs.field(validator=[wtype_is_biguint])
op: BigUIntBinaryOperator
value: Expression = attrs.field(validator=[wtype_is_biguint])
def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_biguint_augmented_assignment(self)
@attrs.frozen
class BytesAugmentedAssignment(Statement):
target: Lvalue = attrs.field(
validator=[
expression_has_wtype(wtypes.bytes_wtype, wtypes.string_wtype, wtypes.arc4_string_alias)
]
)
op: BytesBinaryOperator
value: Expression = attrs.field(
validator=[
expression_has_wtype(wtypes.bytes_wtype, wtypes.string_wtype, wtypes.arc4_string_alias)
]
)
@value.validator
def _check_value(self, _attribute: object, value: Expression) -> None:
if value.wtype != self.target.wtype:
raise CodeError(
f"Augmented assignment of differing types,"
f" expected {self.target.wtype}, got {value.wtype}",
value.source_location,
)
def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_bytes_augmented_assignment(self)
@attrs.frozen
class Emit(Expression):
signature: str
value: Expression = attrs.field(validator=expression_has_wtype(wtypes.ARC4Struct))
wtype: WType = attrs.field(default=wtypes.void_wtype, init=False)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_emit(self)
@attrs.frozen
class Range(Expression):
wtype: WType = attrs.field(default=wtypes.uint64_range_wtype, init=False)
start: Expression = attrs.field(validator=[wtype_is_uint64])
stop: Expression = attrs.field(validator=[wtype_is_uint64])
step: Expression = attrs.field(validator=[wtype_is_uint64])
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_range(self)
@attrs.frozen
class Enumeration(Expression):
expr: Expression
wtype: wtypes.WEnumeration = attrs.field(init=False)
@wtype.default
def _wtype(self) -> wtypes.WEnumeration:
return wtypes.WEnumeration(self.expr.wtype)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_enumeration(self)
@attrs.frozen
class Reversed(Expression):
expr: Expression
wtype: WType = attrs.field(init=False)
@wtype.default
def _wtype(self) -> WType:
return self.expr.wtype
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_reversed(self)
@attrs.frozen
class ForInLoop(Statement):
sequence: Expression
items: Lvalue # item variable(s)
loop_body: Block
def accept(self, visitor: StatementVisitor[T]) -> T:
return visitor.visit_for_in_loop(self)
@attrs.frozen
class StateGet(Expression):
"""
Get value or default if unset - note that for get without a default,
can just use the underlying StateExpression
"""
field: StorageExpression
default: Expression = attrs.field()
wtype: WType = attrs.field(init=False)
@default.validator
def _check_default(self, _attribute: object, default: Expression) -> None:
if self.field.wtype != default.wtype:
raise CodeError(
"Default state value should match storage type", default.source_location
)
@wtype.default
def _wtype_factory(self) -> WType:
return self.field.wtype
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_state_get(self)
@attrs.frozen
class StateGetEx(Expression):
field: StorageExpression
wtype: wtypes.WTuple = attrs.field(init=False)
@wtype.default
def _wtype_factory(self) -> wtypes.WTuple:
return wtypes.WTuple(
(self.field.wtype, wtypes.bool_wtype),
self.source_location,
)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_state_get_ex(self)
@attrs.frozen
class StateExists(Expression):
field: StorageExpression
wtype: WType = attrs.field(default=wtypes.bool_wtype, init=False)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_state_exists(self)
@attrs.frozen
class StateDelete(Expression):
field: StorageExpression
wtype: WType = attrs.field(default=wtypes.void_wtype, init=False)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_state_delete(self)
@attrs.frozen
class NewStruct(Expression):
wtype: wtypes.WStructType | wtypes.ARC4Struct
values: Mapping[str, Expression] = attrs.field(converter=immutabledict)
@values.validator
def _validate_values(self, _instance: object, values: Mapping[str, Expression]) -> None:
if values.keys() != self.wtype.fields.keys():
raise CodeError("Invalid argument(s)", self.source_location)
for field_name, field_value in self.values.items():
expected_wtype = self.wtype.fields[field_name]
if field_value.wtype != expected_wtype:
raise CodeError("Invalid argument type(s)", self.source_location)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_new_struct(self)
@attrs.frozen
class RootNode(Node, ABC):
@property
@abstractmethod
def id(self) -> str: ...
@abstractmethod
def accept(self, visitor: RootNodeVisitor[T]) -> T: ...
@attrs.frozen(kw_only=True)
class SubroutineArgument:
name: str
source_location: SourceLocation
wtype: WType = attrs.field()
@wtype.validator
def _wtype_validator(self, _attribute: object, wtype: WType) -> None:
if wtype == wtypes.void_wtype:
raise CodeError("void type arguments are not supported", self.source_location)
@attrs.frozen
class MethodDocumentation:
description: str | None = None
args: immutabledict[str, str] = attrs.field(default={}, converter=immutabledict)
returns: str | None = None
@attrs.frozen
class Function(Node, ABC):
args: Sequence[SubroutineArgument] = attrs.field(converter=tuple[SubroutineArgument, ...])
return_type: WType
body: Block
documentation: MethodDocumentation
inline: bool | None = None
@property
@abstractmethod
def short_name(self) -> str: ...
@property
@abstractmethod
def full_name(self) -> str: ...
@attrs.frozen(kw_only=True)
class Subroutine(Function, RootNode):
id: str
name: str
@property
def short_name(self) -> str:
return self.name
@property
def full_name(self) -> str:
return self.id
def accept(self, visitor: RootNodeVisitor[T]) -> T:
return visitor.visit_subroutine(self)
AWST: typing.TypeAlias = Sequence[RootNode]
@attrs.frozen
class ContractMemberNode(Node, ABC):
@property
@abc.abstractmethod
def member_name(self) -> str: ...
@abc.abstractmethod
def accept(self, visitor: ContractMemberVisitor[T]) -> T: ...
@attrs.frozen(kw_only=True)
class ContractMethod(Function, ContractMemberNode):
cref: ContractReference
member_name: str
arc4_method_config: "ARC4MethodConfig | None"
@property
def short_name(self) -> str:
return self.member_name
@property
def full_name(self) -> str:
return f"{self.cref}.{self.member_name}"
def accept(self, visitor: ContractMemberVisitor[T]) -> T:
return visitor.visit_contract_method(self)
@enum.unique
class AppStorageKind(enum.Enum):
app_global = enum.auto()
account_local = enum.auto()
box = enum.auto()
@attrs.frozen
class AppStorageDefinition(ContractMemberNode):
member_name: str
kind: AppStorageKind
storage_wtype: WType
key_wtype: WType | None
"""if not None, then this is a map rather than singular"""
key: BytesConstant
"""for maps, this is the prefix"""
description: str | None
def accept(self, visitor: ContractMemberVisitor[T]) -> T:
return visitor.visit_app_storage_definition(self)
def _validate_avm_version(node: Node, _: object, avm_version: int | None) -> None:
if avm_version is not None and avm_version not in SUPPORTED_AVM_VERSIONS:
raise CodeError(
"unsupported AVM version",
node.source_location,
)
@attrs.frozen(kw_only=True)
class LogicSignature(RootNode):
id: LogicSigReference
short_name: str
program: Subroutine = attrs.field()
docstring: str | None
avm_version: int | None = attrs.field(validator=_validate_avm_version)
@program.validator
def _validate_program(self, _instance: object, program: Subroutine) -> None:
if program.args:
raise CodeError(
"logicsig should not take any args",
program.args[0].source_location,
)
if program.return_type not in (wtypes.uint64_wtype, wtypes.bool_wtype):
raise CodeError(
"Invalid return type for logicsig method, should be either bool or UInt64.",
program.source_location,
)
def accept(self, visitor: RootNodeVisitor[T]) -> T:
return visitor.visit_logic_signature(self)
@attrs.frozen
class CompiledContract(Expression):
contract: ContractReference
allocation_overrides: Mapping[TxnField, Expression] = attrs.field(
factory=immutabledict, converter=immutabledict
)
prefix: str | None = None
"""
Prefix will either be the value specified here or PuyaOptions.template_vars_prefix
if prefix is None
The prefix is then prefixed with the template_variables keys on this node to determine the
final template variable name
"""
template_variables: Mapping[str, Expression] = attrs.field(
factory=immutabledict, converter=immutabledict
)
"""
template variables combined with their prefix defined on this node take precedence over
template variables of the same key defined on PuyaOptions
"""
@allocation_overrides.validator
def _allocation_overrides(
self, _attribute: object, value: Mapping[TxnField, Expression]
) -> None:
if value.keys() - {
TxnField.ExtraProgramPages,
TxnField.GlobalNumUint,
TxnField.GlobalNumByteSlice,
TxnField.LocalNumUint,
TxnField.LocalNumByteSlice,
}:
raise InternalError("only allocation fields can be overridden", self.source_location)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_compiled_contract(self)
@attrs.frozen
class CompiledLogicSig(Expression):
logic_sig: LogicSigReference
prefix: str | None = None
template_variables: Mapping[str, Expression] = attrs.field(
converter=immutabledict, factory=immutabledict
)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_compiled_logicsig(self)
@attrs.frozen(kw_only=True)
class StateTotals:
global_uints: int | None = None
local_uints: int | None = None
global_bytes: int | None = None
local_bytes: int | None = None
@attrs.frozen
class ARC4Router(Expression):
wtype: WType = attrs.field(default=wtypes.bool_wtype, init=False)
def accept(self, visitor: ExpressionVisitor[T]) -> T:
return visitor.visit_arc4_router(self)
@attrs.frozen(kw_only=True)
class Contract(RootNode):
id: ContractReference
"""The fully qualified ID, must be unique within a compilation run."""
name: str
"""The short / friendly name for the Contract, used in output file naming and in ARC-32"""
description: str | None
"""The (user supplied) contract description, currently only used in ARC-32 output"""
method_resolution_order: Sequence[ContractReference]
"""
The lookup order to use when resolving calls to contract methods.
Shouldn't include the current contract, but otherwise should be exhaustive,
ie no recursive lookup is implemented for bases of bases.
Front-ends for languages with multiple-inheritance must linearize their hierarchy in the
correct order, which is why bases of bases are not resolved, since they may end up in a
different order depending on the contract that is ultimately being compiled.
"""
approval_program: ContractMethod = attrs.field()
"""
The entry point to the approval program, can also appear in methods, but this is not required.
"""
clear_program: ContractMethod = attrs.field()
"""
The entry point to the clear-state program, can appear in methods, but not required.
"""
methods: Sequence[ContractMethod] = attrs.field(converter=tuple[ContractMethod, ...])
"""
All the methods in this contract and in it's entire hierarchy.
A Sequence for serialization purposes, order doesn't matter.
"""
app_state: Sequence[AppStorageDefinition] = attrs.field(
converter=tuple[AppStorageDefinition, ...]
)
"""
All the app storage on this contract.
A Sequence for serialization purposes, order doesn't matter.
"""
state_totals: StateTotals | None
"""State totals which can override in part or in full those implied by `app_state`."""
reserved_scratch_space: Set[int]
"""Scratch slots that the contract is explicitly setting aside for direct/explicit usage."""
avm_version: int | None = attrs.field(validator=_validate_avm_version)
"""AVM version to target, defaults to options.target_avm_version"""
@approval_program.validator
def check_approval(self, _attribute: object, approval: ContractMethod) -> None:
if approval.args:
raise CodeError(
"approval method should not take any args (other than self)",
approval.source_location,
)
if approval.return_type not in (wtypes.uint64_wtype, wtypes.bool_wtype):
raise CodeError(
"Invalid return type for approval method, should be either bool or UInt64.",
approval.source_location,
)
if approval.arc4_method_config:
raise CodeError(
"approval method should not be marked as an ABI method",
approval.source_location,
)
@clear_program.validator
def check_clear(self, _attribute: object, clear: ContractMethod) -> None:
if clear.args:
raise CodeError(
"clear-state method should not take any args (other than self)",
clear.source_location,
)
if clear.return_type not in (wtypes.uint64_wtype, wtypes.bool_wtype):
raise CodeError(
"Invalid return type for clear-state method, should be either bool or UInt64.",
clear.source_location,
)
if clear.arc4_method_config:
raise CodeError(
"clear-state method should not be marked as an ABI method",
clear.source_location,
)
@cached_property
def all_methods(self) -> Sequence[ContractMethod]:
return unique((self.approval_program, self.clear_program, *self.methods))
def accept(self, visitor: RootNodeVisitor[T]) -> T:
return visitor.visit_contract(self)
@typing.overload
def resolve_contract_method(self, name: str) -> ContractMethod | None: ...
@typing.overload
def resolve_contract_method(
self,
name: str,
source_location: SourceLocation,
*,
start: ContractReference,
skip: bool = False,
) -> ContractMethod | None: ...
def resolve_contract_method(
self,
name: str,
source_location: SourceLocation | None = None,
*,
start: ContractReference | None = None,
skip: bool = False,
) -> ContractMethod | None:
mro = [self.id, *self.method_resolution_order]
if start:
try:
curr_idx = mro.index(start)
except ValueError:
raise CodeError(
"call to base method outside current hierarchy", source_location
) from None
mro = mro[curr_idx:]
if skip:
mro = mro[1:]
for cref in mro:
for method in self.methods:
if method.member_name == name and method.cref == cref:
return method
return None
class ARC4CreateOption(enum.Enum):
allow = enum.auto()
require = enum.auto()
disallow = enum.auto()
@attrs.frozen(kw_only=True)
class ARC4BareMethodConfig:
source_location: SourceLocation
allowed_completion_types: Sequence[OnCompletionAction] = attrs.field(
default=(OnCompletionAction.NoOp,),
converter=tuple[OnCompletionAction, ...],
validator=attrs.validators.min_len(1),
)
create: ARC4CreateOption = ARC4CreateOption.disallow
@attrs.frozen(kw_only=True)
class ABIMethodArgConstantDefault:
value: Expression
@attrs.frozen(kw_only=True)
class ABIMethodArgMemberDefault:
name: str
ABIMethodArgDefault = ABIMethodArgMemberDefault | ABIMethodArgConstantDefault
@attrs.frozen(kw_only=True)
class ARC4ABIMethodConfig:
source_location: SourceLocation
allowed_completion_types: Sequence[OnCompletionAction] = attrs.field(
default=(OnCompletionAction.NoOp,),
converter=tuple[OnCompletionAction],
validator=attrs.validators.min_len(1),
)
create: ARC4CreateOption = ARC4CreateOption.disallow
name: str
readonly: bool = False
default_args: immutabledict[str, ABIMethodArgDefault] = immutabledict()
"""Mapping is from parameter -> source"""
ARC4MethodConfig = ARC4BareMethodConfig | ARC4ABIMethodConfig
|
algorandfoundation/puya
|
src/puya/awst/nodes.py
|
Python
|
NOASSERTION
| 57,974 |
import decimal
import enum
import functools
import typing
from collections.abc import Mapping
import cattrs
from cattrs import ClassValidationError, IterableValidationError, transform_error
from cattrs.preconf.json import make_converter
from cattrs.strategies import configure_tagged_union, include_subclasses
from immutabledict import immutabledict
from puya import log
from puya.awst import nodes, txn_fields, wtypes
from puya.errors import PuyaError
logger = log.get_logger(__name__)
def _unstructure_optional_enum_literal(value: object) -> object:
if value is None:
return None
if not isinstance(value, enum.Enum):
raise TypeError("expected enum value")
return value.value
@functools.cache
def _get_converter() -> cattrs.preconf.json.JsonConverter:
converter = make_converter()
# literals with optional enum
converter.register_unstructure_hook_factory(
cattrs.converters.is_literal_containing_enums, lambda _: _unstructure_optional_enum_literal
)
# TxnField and PuyaLibFunction as name
for enum_type in (txn_fields.TxnField, nodes.PuyaLibFunction):
converter.register_unstructure_hook(enum_type, lambda v: v.name)
converter.register_structure_hook(enum_type, lambda v, t: t[v])
# decimals as str
converter.register_unstructure_hook(decimal.Decimal, str)
converter.register_structure_hook(decimal.Decimal, lambda v, _: decimal.Decimal(v))
# nodes.Switch has a mapping of Expression -> Block
# which can't be serialized with that structure as a JSON object
# need to convert into a list of pairs instead
def is_switch_cases(typ: object) -> bool:
if typing.get_origin(typ) is Mapping:
args = typing.get_args(typ)
return args == (nodes.Expression, nodes.Block)
return False
def unstructure_switch_cases(value: Mapping[nodes.Expression, nodes.Block]) -> object:
return converter.unstructure(value.items(), list[tuple[nodes.Expression, nodes.Block]])
def structure_switch_cases(value: object, _: type) -> object:
items = converter.structure(value, list[tuple[nodes.Expression, nodes.Block]])
return immutabledict(items)
converter.register_unstructure_hook_func(is_switch_cases, unstructure_switch_cases)
converter.register_structure_hook_func(is_switch_cases, structure_switch_cases)
# register AWST types and unions, order is important to ensure correct configuration
union_strategy = configure_tagged_union
include_subclasses(wtypes.WType, converter, union_strategy=union_strategy)
union_strategy(wtypes.WStructType | wtypes.ARC4Struct, converter)
union_strategy(nodes.SubroutineTarget, converter)
include_subclasses(nodes.Expression, converter, union_strategy=union_strategy)
union_strategy(nodes.Lvalue, converter)
union_strategy(nodes.StorageExpression, converter)
union_strategy(nodes.CompileTimeConstantExpression, converter)
include_subclasses(nodes.Statement, converter, union_strategy=union_strategy)
include_subclasses(nodes.RootNode, converter, union_strategy=union_strategy)
return converter
def awst_to_json(awst: nodes.AWST) -> str:
return _get_converter().dumps(awst, indent=4)
def _find_and_log_puya_errors(err: ClassValidationError | IterableValidationError) -> None:
for ex in err.exceptions:
match ex:
case PuyaError():
logger.error(ex.msg, location=ex.location)
case ClassValidationError() | IterableValidationError():
_find_and_log_puya_errors(ex)
def awst_from_json(json: str) -> nodes.AWST:
try:
return _get_converter().loads(json, nodes.AWST) # type: ignore[type-abstract]
except (ClassValidationError, IterableValidationError) as err:
_find_and_log_puya_errors(err)
logger.debug("Deserialization error: \n" + "\n".join(transform_error(err)))
raise ValueError(
"Error during deserialization of AWST json. See debug log for details"
) from err
|
algorandfoundation/puya
|
src/puya/awst/serialize.py
|
Python
|
NOASSERTION
| 4,052 |
import base64
import typing
from collections.abc import Iterable, Iterator, Mapping
from puya.awst import nodes, wtypes
from puya.awst.visitors import (
ContractMemberVisitor,
ExpressionVisitor,
RootNodeVisitor,
StatementVisitor,
)
from puya.errors import InternalError
class ToCodeVisitor(
RootNodeVisitor[list[str]],
StatementVisitor[list[str]],
ExpressionVisitor[str],
ContractMemberVisitor[list[str]],
):
def __init__(self) -> None:
self._seen_single_evals = dict[nodes.SingleEvaluation, int]()
def _single_eval_index(self, tmp: nodes.SingleEvaluation) -> int:
return self._seen_single_evals.setdefault(tmp, len(self._seen_single_evals))
@typing.override
def visit_array_concat(self, expr: nodes.ArrayConcat) -> str:
left = expr.left.accept(self)
right = expr.right.accept(self)
return f"{left} + {right}"
@typing.override
def visit_array_extend(self, expr: nodes.ArrayExtend) -> str:
base = expr.base.accept(self)
value = expr.other.accept(self)
return f"{base}.extend({value})"
@typing.override
def visit_array_pop(self, expr: nodes.ArrayPop) -> str:
base = expr.base.accept(self)
return f"{base}.pop()"
@typing.override
def visit_copy(self, expr: nodes.Copy) -> str:
value = expr.value.accept(self)
return f"{value}.copy()"
@typing.override
def visit_reversed(self, expr: nodes.Reversed) -> str:
sequence = expr.expr.accept(self)
return f"reversed({sequence})"
def visit_module(self, module: nodes.AWST) -> str:
result = list[str]()
for stmt in module:
lines = stmt.accept(self)
result.extend(lines)
return "\n".join(result).strip()
@typing.override
def visit_arc4_decode(self, expr: nodes.ARC4Decode) -> str:
return f"arc4_decode({expr.value.accept(self)}, {expr.wtype})"
@typing.override
def visit_arc4_encode(self, expr: nodes.ARC4Encode) -> str:
return f"arc4_encode({expr.value.accept(self)}, {expr.wtype})"
@typing.override
def visit_reinterpret_cast(self, expr: nodes.ReinterpretCast) -> str:
return f"reinterpret_cast<{expr.wtype}>({expr.expr.accept(self)})"
@typing.override
def visit_single_evaluation(self, expr: nodes.SingleEvaluation) -> str:
# only render source the first time it is encountered
source = "" if expr in self._seen_single_evals else f", source={expr.source.accept(self)}"
eval_id = self._single_eval_index(expr)
return "".join(
(
f"SINGLE_EVAL(id={eval_id}",
source,
")",
)
)
@typing.override
def visit_app_state_expression(self, expr: nodes.AppStateExpression) -> str:
return f"GlobalState[{expr.key.accept(self)}]"
@typing.override
def visit_app_account_state_expression(self, expr: nodes.AppAccountStateExpression) -> str:
return f"LocalState[{expr.key.accept(self)}, {expr.account.accept(self)}]"
@typing.override
def visit_box_value_expression(self, expr: nodes.BoxValueExpression) -> str:
return f"Box[{expr.key.accept(self)}]"
@typing.override
def visit_new_array(self, expr: nodes.NewArray) -> str:
args = ", ".join(a.accept(self) for a in expr.values)
return f"new {expr.wtype}({args})"
@typing.override
def visit_new_struct(self, expr: nodes.NewStruct) -> str:
args = ", ".join([f"{name}=" + value.accept(self) for name, value in expr.values.items()])
return f"new {expr.wtype}({args})"
@typing.override
def visit_enumeration(self, expr: nodes.Enumeration) -> str:
sequence = expr.expr.accept(self)
return f"enumerate({sequence})"
@typing.override
def visit_bytes_comparison_expression(self, expr: nodes.BytesComparisonExpression) -> str:
return f"{expr.lhs.accept(self)} {expr.operator} {expr.rhs.accept(self)}"
@typing.override
def visit_subroutine_call_expression(self, expr: nodes.SubroutineCallExpression) -> str:
match expr.target:
case nodes.InstanceMethodTarget(member_name=member_name):
target = f"this::{member_name}"
case nodes.InstanceSuperMethodTarget(member_name=member_name):
target = f"super::{member_name}"
case nodes.ContractMethodTarget(cref=cref, member_name=member_name):
target = "::".join((cref, member_name))
case nodes.SubroutineID(target):
pass
case unhandled:
typing.assert_never(unhandled)
args = ", ".join(
[(f"{a.name}=" if a.name else "") + a.value.accept(self) for a in expr.args]
)
return f"{target}({args})"
@typing.override
def visit_bytes_binary_operation(self, expr: nodes.BytesBinaryOperation) -> str:
return f"{expr.left.accept(self)} {expr.op.value} {expr.right.accept(self)}"
@typing.override
def visit_boolean_binary_operation(self, expr: nodes.BooleanBinaryOperation) -> str:
return f"{expr.left.accept(self)} {expr.op.value} {expr.right.accept(self)}"
@typing.override
def visit_not_expression(self, expr: nodes.Not) -> str:
return f"!({expr.expr.accept(self)})"
@typing.override
def visit_bytes_augmented_assignment(
self, statement: nodes.BytesAugmentedAssignment
) -> list[str]:
return [
f"{statement.target.accept(self)} {statement.op.value}= {statement.value.accept(self)}"
]
@typing.override
def visit_range(self, range_node: nodes.Range) -> str:
range_args = ", ".join(
[r.accept(self) for r in [range_node.start, range_node.stop, range_node.step]]
)
return f"range({range_args})"
@typing.override
def visit_for_in_loop(self, statement: nodes.ForInLoop) -> list[str]:
sequence = statement.sequence.accept(self)
loop_body = statement.loop_body.accept(self)
item_vars = statement.items.accept(self)
return [
f"for {item_vars} in {sequence} {{",
*_indent(loop_body),
"}",
]
@typing.override
def visit_subroutine(self, statement: nodes.Subroutine) -> list[str]:
args = ", ".join([f"{a.name}: {a.wtype}" for a in statement.args])
body = statement.body.accept(self)
return [
"",
f"subroutine {statement.name}({args}): {statement.return_type}",
"{",
*_indent(body),
"}",
]
@typing.override
def visit_app_storage_definition(self, defn: nodes.AppStorageDefinition) -> list[str]:
raise InternalError("app storage is converted as part of class")
@typing.override
def visit_contract(self, c: nodes.Contract) -> list[str]:
body = [
"method_resolution_order: (",
*_indent(f"{cref}," for cref in c.method_resolution_order),
")",
]
if c.app_state:
state_by_kind = dict[nodes.AppStorageKind, list[nodes.AppStorageDefinition]]()
for state in c.app_state:
state_by_kind.setdefault(state.kind, []).append(state)
for kind_name, kind in (
("globals", nodes.AppStorageKind.app_global),
("locals", nodes.AppStorageKind.account_local),
("boxes", nodes.AppStorageKind.box),
):
state_of_kind = state_by_kind.pop(kind, [])
if state_of_kind:
body.extend(
[
f"{kind_name} {{",
*_indent(
(
f"[{s.key.accept(self)}]: {s.key_wtype} => {s.storage_wtype}"
if s.key_wtype is not None
else f"[{s.key.accept(self)}]: {s.storage_wtype}"
)
for s in state_of_kind
),
"}",
]
)
if state_by_kind:
raise InternalError(
f"Unhandled app state kinds: {', '.join(map(str, state_by_kind.keys()))}",
c.source_location,
)
if c.reserved_scratch_space:
body.extend(
[
"reserved_scratch_space {",
*_indent([", ".join(_collapse_sequential_ranges(c.reserved_scratch_space))]),
"}",
]
)
for sub in c.all_methods:
lines = sub.accept(self)
body.extend(lines)
if body and not body[0].strip():
body = body[1:]
header = ["contract", c.name]
return [
"",
" ".join(header),
"{",
*_indent(body),
"}",
]
@typing.override
def visit_logic_signature(self, statement: nodes.LogicSignature) -> list[str]:
body = statement.program.body.accept(self)
return [
"",
f"logicsig {statement.id}",
"{",
*_indent(body),
"}",
]
@typing.override
def visit_contract_method(self, statement: nodes.ContractMethod) -> list[str]:
body = statement.body.accept(self)
args = ", ".join([f"{a.name}: {a.wtype}" for a in statement.args])
match statement.arc4_method_config:
case None:
deco = "subroutine"
case nodes.ARC4BareMethodConfig():
deco = "baremethod"
case nodes.ARC4ABIMethodConfig(name=config_name):
if statement.member_name != config_name:
deco = f"abimethod[name_override={config_name}]"
else:
deco = "abimethod"
case other:
typing.assert_never(other)
return [
"",
f"{deco} {statement.full_name}({args}): {statement.return_type}",
"{",
*_indent(body),
"}",
]
@typing.override
def visit_assignment_expression(self, expr: nodes.AssignmentExpression) -> str:
return f"{expr.target.accept(self)}: {expr.target.wtype} := {expr.value.accept(self)}"
@typing.override
def visit_assignment_statement(self, stmt: nodes.AssignmentStatement) -> list[str]:
return [f"{stmt.target.accept(self)}: {stmt.target.wtype} = {stmt.value.accept(self)}"]
@typing.override
def visit_uint64_binary_operation(self, expr: nodes.UInt64BinaryOperation) -> str:
return f"{expr.left.accept(self)} {expr.op.value} {expr.right.accept(self)}"
@typing.override
def visit_biguint_binary_operation(self, expr: nodes.BigUIntBinaryOperation) -> str:
return f"{expr.left.accept(self)} b{expr.op.value} {expr.right.accept(self)}"
@typing.override
def visit_uint64_unary_operation(self, expr: nodes.UInt64UnaryOperation) -> str:
return f"{expr.op.value}({expr.expr.accept(self)})"
@typing.override
def visit_bytes_unary_operation(self, expr: nodes.BytesUnaryOperation) -> str:
return f"b{expr.op.value}({expr.expr.accept(self)})"
@typing.override
def visit_integer_constant(self, expr: nodes.IntegerConstant) -> str:
if expr.teal_alias:
return expr.teal_alias
match expr.wtype:
case wtypes.uint64_wtype:
suffix = "u"
case wtypes.biguint_wtype:
suffix = "n"
case wtypes.ARC4UIntN(n=n):
suffix = f"_arc4u{n}"
case _:
raise InternalError(
f"Numeric type not implemented: {expr.wtype}", expr.source_location
)
return f"{expr.value}{suffix}"
@typing.override
def visit_decimal_constant(self, expr: nodes.DecimalConstant) -> str:
d = str(expr.value)
if expr.wtype.n <= 64:
suffix = f"arc4u{expr.wtype.n}x{expr.wtype.m}"
else:
suffix = f"arc4n{expr.wtype.n}x{expr.wtype.m}"
return f"{d}{suffix}"
@typing.override
def visit_bool_constant(self, expr: nodes.BoolConstant) -> str:
return "true" if expr.value else "false"
@typing.override
def visit_bytes_constant(self, expr: nodes.BytesConstant) -> str:
match expr.encoding:
case nodes.BytesEncoding.utf8:
return _bytes_str(expr.value)
case nodes.BytesEncoding.base32:
return f'b32<"{base64.b32encode(expr.value).decode("ascii")}">'
case nodes.BytesEncoding.base64:
return f'b64<"{base64.b64encode(expr.value).decode("ascii")}">'
case nodes.BytesEncoding.base16 | nodes.BytesEncoding.unknown:
return f'hex<"{expr.value.hex().upper()}">'
@typing.override
def visit_string_constant(self, expr: nodes.StringConstant) -> str:
return repr(expr.value)
@typing.override
def visit_void_constant(self, expr: nodes.VoidConstant) -> str:
return "void"
@typing.override
def visit_method_constant(self, expr: nodes.MethodConstant) -> str:
return f'Method("{expr.value}")'
@typing.override
def visit_address_constant(self, expr: nodes.AddressConstant) -> str:
return f'Address("{expr.value}")'
@typing.override
def visit_compiled_contract(self, expr: nodes.CompiledContract) -> str:
template_vars_fragment = self._template_vars_fragment(expr.prefix, expr.template_variables)
overrides = ", ".join(
f"{k.name}={v.accept(self)}" for k, v in expr.allocation_overrides.items()
)
return f"compiled_contract({expr.contract},{overrides},{template_vars_fragment})"
@typing.override
def visit_compiled_logicsig(self, expr: nodes.CompiledLogicSig) -> str:
template_vars_fragment = self._template_vars_fragment(expr.prefix, expr.template_variables)
return f"compiled_logicsig({expr.logic_sig!r}{template_vars_fragment})"
def _template_vars_fragment(
self, prefix: str | None, variables: Mapping[str, nodes.Expression]
) -> str:
variables_str = ", ".join(f"{k!r}: {v.accept(self)}" for k, v in variables.items())
return f", {prefix=!r}, variables={{{variables_str}}}"
@typing.override
def visit_conditional_expression(self, expr: nodes.ConditionalExpression) -> str:
condition = expr.condition.accept(self)
true = expr.true_expr.accept(self)
false = expr.false_expr.accept(self)
return f"({condition}) ? ({true}) : ({false})"
@typing.override
def visit_numeric_comparison_expression(self, expr: nodes.NumericComparisonExpression) -> str:
return f"{expr.lhs.accept(self)} {expr.operator.value} {expr.rhs.accept(self)}"
@typing.override
def visit_var_expression(self, expr: nodes.VarExpression) -> str:
return expr.name
@typing.override
def visit_checked_maybe(self, expr: nodes.CheckedMaybe) -> str:
return f"checked_maybe({expr.expr.accept(self)})"
@typing.override
def visit_intrinsic_call(self, expr: nodes.IntrinsicCall) -> str:
result = expr.op_code
if expr.immediates:
result += "<" + ", ".join([str(immediate) for immediate in expr.immediates]) + ">"
result += "("
if expr.stack_args:
result += ", ".join([stack_arg.accept(self) for stack_arg in expr.stack_args])
result += ")"
return result
@typing.override
def visit_puya_lib_call(self, expr: nodes.PuyaLibCall) -> str:
result = expr.func.value.id
result += "("
if expr.args:
result += ", ".join(
[(f"{a.name}=" if a.name else "") + a.value.accept(self) for a in expr.args]
)
result += ")"
return result
@typing.override
def visit_group_transaction_reference(self, ref: nodes.GroupTransactionReference) -> str:
if ref.wtype.transaction_type is None:
type_ = "any"
else:
type_ = ref.wtype.transaction_type.name
return f"group_transaction(index={ref.index.accept(self)}, type={type_})"
@typing.override
def visit_create_inner_transaction(self, expr: nodes.CreateInnerTransaction) -> str:
fields = []
for field, value in expr.fields.items():
fields.append(f"{field.immediate}={value.accept(self)}")
return f"create_inner_transaction({', '.join(fields)})"
@typing.override
def visit_update_inner_transaction(self, expr: nodes.UpdateInnerTransaction) -> str:
fields = []
for field, value in expr.fields.items():
fields.append(f"{field.immediate}={value.accept(self)}")
return f"update_inner_transaction({expr.itxn.accept(self)},{', '.join(fields)})"
@typing.override
def visit_submit_inner_transaction(self, call: nodes.SubmitInnerTransaction) -> str:
itxns = f'{", ".join(itxn.accept(self) for itxn in call.itxns)}'
return f"submit_txn({itxns})"
@typing.override
def visit_inner_transaction_field(self, itxn_field: nodes.InnerTransactionField) -> str:
txn = itxn_field.itxn.accept(self)
result = f"{txn}.{itxn_field.field.immediate}"
if itxn_field.array_index is not None:
index = itxn_field.array_index.accept(self)
result = f"{result}[{index}]"
return result
@typing.override
def visit_tuple_expression(self, expr: nodes.TupleExpression) -> str:
items = ", ".join([item.accept(self) for item in expr.items])
return f"({items})"
@typing.override
def visit_tuple_item_expression(self, expr: nodes.TupleItemExpression) -> str:
base = expr.base.accept(self)
return f"{base}[{expr.index}]"
@typing.override
def visit_field_expression(self, expr: nodes.FieldExpression) -> str:
base = expr.base.accept(self)
return f"{base}.{expr.name}"
@typing.override
def visit_index_expression(self, expr: nodes.IndexExpression) -> str:
return f"{expr.base.accept(self)}[{expr.index.accept(self)}]"
@typing.override
def visit_slice_expression(self, expr: nodes.SliceExpression) -> str:
start = expr.begin_index.accept(self) if expr.begin_index else ""
stop = expr.end_index.accept(self) if expr.end_index else ""
return f"{expr.base.accept(self)}[{start}:{stop}]"
@typing.override
def visit_intersection_slice_expression(self, expr: nodes.IntersectionSliceExpression) -> str:
start = (
expr.begin_index.accept(self)
if isinstance(expr.begin_index, nodes.Expression)
else (expr.begin_index if expr.begin_index is not None else "")
)
stop = (
expr.end_index.accept(self)
if isinstance(expr.end_index, nodes.Expression)
else (expr.end_index if expr.end_index is not None else "")
)
return f"{expr.base.accept(self)}[{start}:{stop}]"
@typing.override
def visit_block(self, statement: nodes.Block) -> list[str]:
statements = [line for statement in statement.body for line in statement.accept(self)]
if statement.label:
return [f"{statement.label}:", *statements]
return statements
@typing.override
def visit_goto(self, statement: nodes.Goto) -> list[str]:
return [f"goto {statement.target}"]
@typing.override
def visit_if_else(self, statement: nodes.IfElse) -> list[str]:
if_branch = statement.if_branch.accept(self)
if_block = [
f"if ({statement.condition.accept(self)}) {{",
*_indent(if_branch),
]
if statement.else_branch is not None:
else_branch = statement.else_branch.accept(self)
else_block = ["} else {", *_indent(else_branch)]
else:
else_block = []
return [*if_block, *else_block, "}"]
@typing.override
def visit_switch(self, statement: nodes.Switch) -> list[str]:
match_block = [f"switch ({statement.value.accept(self)}) {{"]
for case_value, case_block in statement.cases.items():
value = case_value.accept(self)
block = case_block.accept(self)
match_block.extend(
_indent(
[
f"case {value}: {{",
*_indent(block),
"}",
]
)
)
if statement.default_case:
default_block = statement.default_case.accept(self)
match_block.extend(
_indent(
[
"case _: {",
*_indent(default_block),
"}",
]
)
)
match_block.append("}")
return match_block
@typing.override
def visit_while_loop(self, statement: nodes.WhileLoop) -> list[str]:
loop_body = statement.loop_body.accept(self)
return [
f"while ({statement.condition.accept(self)}) {{",
*_indent(loop_body),
"}",
]
@typing.override
def visit_loop_exit(self, _statement: nodes.LoopExit) -> list[str]:
return ["break"]
@typing.override
def visit_return_statement(self, statement: nodes.ReturnStatement) -> list[str]:
if not statement.value:
return ["return"]
return [f"return {statement.value.accept(self)}"]
@typing.override
def visit_assert_expression(self, statement: nodes.AssertExpression) -> str:
error_message = "" if statement.error_message is None else f'"{statement.error_message}"'
if not statement.condition:
result = "err("
if error_message:
result += error_message
result += ")"
else:
result = f"assert({statement.condition.accept(self)}"
if error_message:
result += f", comment={error_message}"
result += ")"
return result
@typing.override
def visit_loop_continue(self, _statement: nodes.LoopContinue) -> list[str]:
return ["continue"]
@typing.override
def visit_expression_statement(self, statement: nodes.ExpressionStatement) -> list[str]:
return [
statement.expr.accept(self),
]
@typing.override
def visit_uint64_augmented_assignment(
self, statement: nodes.UInt64AugmentedAssignment
) -> list[str]:
return [
f"{statement.target.accept(self)} {statement.op.value}= {statement.value.accept(self)}"
]
@typing.override
def visit_biguint_augmented_assignment(
self, statement: nodes.BigUIntAugmentedAssignment
) -> list[str]:
return [
f"{statement.target.accept(self)} {statement.op.value}= {statement.value.accept(self)}"
]
@typing.override
def visit_state_get_ex(self, expr: nodes.StateGetEx) -> str:
return f"STATE_GET_EX({expr.field.accept(self)})"
@typing.override
def visit_state_delete(self, statement: nodes.StateDelete) -> str:
return f"STATE_DELETE({statement.field.accept(self)})"
@typing.override
def visit_state_get(self, expr: nodes.StateGet) -> str:
return f"STATE_GET({expr.field.accept(self)}, default={expr.default.accept(self)})"
@typing.override
def visit_state_exists(self, expr: nodes.StateExists) -> str:
return f"STATE_EXISTS({expr.field.accept(self)})"
@typing.override
def visit_template_var(self, expr: nodes.TemplateVar) -> str:
return f"TemplateVar[{expr.wtype}]({expr.name})"
@typing.override
def visit_biguint_postfix_unary_operation(
self, expr: nodes.BigUIntPostfixUnaryOperation
) -> str:
return f"{expr.target.accept(self)}{expr.op}"
@typing.override
def visit_uint64_postfix_unary_operation(self, expr: nodes.UInt64PostfixUnaryOperation) -> str:
return f"{expr.target.accept(self)}{expr.op}"
@typing.override
def visit_arc4_router(self, expr: nodes.ARC4Router) -> str:
return "arc4_router()"
@typing.override
def visit_emit(self, expr: nodes.Emit) -> str:
return f"emit({expr.signature!r}, {expr.value.accept(self)})"
def _indent(lines: Iterable[str], indent_size: str = " ") -> Iterator[str]:
yield from (f"{indent_size}{line}" for line in lines)
def _bytes_str(b: bytes) -> str:
return repr(b)[1:]
def _collapse_sequential_ranges(nums: Iterable[int]) -> Iterable[str]:
ranges = list[tuple[int, int]]()
for num in sorted(nums):
if ranges and num == ranges[-1][1] + 1:
ranges[-1] = (ranges[-1][0], num)
else:
ranges.append((num, num))
for start, stop in ranges:
if start == stop:
yield str(start)
else:
yield f"{start}..{stop}"
|
algorandfoundation/puya
|
src/puya/awst/to_code_visitor.py
|
Python
|
NOASSERTION
| 25,304 |
# ruff: noqa: PIE796
import enum
import typing
import attrs
from puya.avm import AVMType
from puya.awst import wtypes
__all__ = [
"TxnField",
]
@attrs.frozen(eq=False, hash=False)
class _TxnFieldData:
wtype: wtypes.WType
num_values: int = attrs.field(default=1, validator=attrs.validators.ge(1), kw_only=True)
is_inner_param: bool = attrs.field(default=True, kw_only=True)
@enum.unique
class TxnField(enum.Enum):
def __init__(self, data: _TxnFieldData):
# _name_ is set by the EnumType metaclass during construction,
# and refers to the class member name
self.immediate: typing.Final = self._name_
assert data.wtype.scalar_type is not None
self.avm_type: typing.Final = data.wtype.scalar_type
typing.assert_type(self.avm_type, typing.Literal[AVMType.uint64, AVMType.bytes])
self.wtype: typing.Final = data.wtype
self.num_values: typing.Final = data.num_values
self.is_inner_param: typing.Final = data.is_inner_param
@property
def is_array(self) -> bool:
return self.num_values > 1
def valid_argument_type(self, wtype: wtypes.WType) -> bool:
if not self.is_array:
return wtype.scalar_type == self.avm_type
else:
return isinstance(wtype, wtypes.WTuple) and all(
item_wtype.scalar_type == self.avm_type for item_wtype in wtype.types
)
def __repr__(self) -> str:
return (
f"{type(self).__name__}("
f"immediate={self.immediate!r},"
f" wtype={self.wtype},"
f" num_values={self.num_values!r},"
f" is_inner_param={self.is_inner_param!r}"
")"
)
Sender = _TxnFieldData(wtypes.account_wtype)
Fee = _TxnFieldData(wtypes.uint64_wtype)
FirstValid = _TxnFieldData(wtypes.uint64_wtype, is_inner_param=False)
FirstValidTime = _TxnFieldData(wtypes.uint64_wtype, is_inner_param=False)
LastValid = _TxnFieldData(wtypes.uint64_wtype, is_inner_param=False)
Note = _TxnFieldData(wtypes.bytes_wtype)
Lease = _TxnFieldData(wtypes.bytes_wtype, is_inner_param=False)
Receiver = _TxnFieldData(wtypes.account_wtype)
Amount = _TxnFieldData(wtypes.uint64_wtype)
CloseRemainderTo = _TxnFieldData(wtypes.account_wtype)
VotePK = _TxnFieldData(wtypes.bytes_wtype)
SelectionPK = _TxnFieldData(wtypes.bytes_wtype)
VoteFirst = _TxnFieldData(wtypes.uint64_wtype)
VoteLast = _TxnFieldData(wtypes.uint64_wtype)
VoteKeyDilution = _TxnFieldData(wtypes.uint64_wtype)
Type = _TxnFieldData(wtypes.bytes_wtype)
TypeEnum = _TxnFieldData(wtypes.uint64_wtype)
XferAsset = _TxnFieldData(wtypes.asset_wtype)
AssetAmount = _TxnFieldData(wtypes.uint64_wtype)
AssetSender = _TxnFieldData(wtypes.account_wtype)
AssetReceiver = _TxnFieldData(wtypes.account_wtype)
AssetCloseTo = _TxnFieldData(wtypes.account_wtype)
GroupIndex = _TxnFieldData(wtypes.uint64_wtype, is_inner_param=False)
TxID = _TxnFieldData(wtypes.bytes_wtype, is_inner_param=False)
# v2
ApplicationID = _TxnFieldData(wtypes.application_wtype)
OnCompletion = _TxnFieldData(wtypes.uint64_wtype)
NumAppArgs = _TxnFieldData(wtypes.uint64_wtype, is_inner_param=False)
NumAccounts = _TxnFieldData(wtypes.uint64_wtype, is_inner_param=False)
ApprovalProgram = _TxnFieldData(wtypes.bytes_wtype)
ClearStateProgram = _TxnFieldData(wtypes.bytes_wtype)
RekeyTo = _TxnFieldData(wtypes.account_wtype)
ConfigAsset = _TxnFieldData(wtypes.asset_wtype)
ConfigAssetTotal = _TxnFieldData(wtypes.uint64_wtype)
ConfigAssetDecimals = _TxnFieldData(wtypes.uint64_wtype)
ConfigAssetDefaultFrozen = _TxnFieldData(wtypes.bool_wtype)
ConfigAssetUnitName = _TxnFieldData(wtypes.bytes_wtype)
ConfigAssetName = _TxnFieldData(wtypes.bytes_wtype)
ConfigAssetURL = _TxnFieldData(wtypes.bytes_wtype)
ConfigAssetMetadataHash = _TxnFieldData(wtypes.bytes_wtype)
ConfigAssetManager = _TxnFieldData(wtypes.account_wtype)
ConfigAssetReserve = _TxnFieldData(wtypes.account_wtype)
ConfigAssetFreeze = _TxnFieldData(wtypes.account_wtype)
ConfigAssetClawback = _TxnFieldData(wtypes.account_wtype)
FreezeAsset = _TxnFieldData(wtypes.asset_wtype)
FreezeAssetAccount = _TxnFieldData(wtypes.account_wtype)
FreezeAssetFrozen = _TxnFieldData(wtypes.bool_wtype)
# v3
NumAssets = _TxnFieldData(wtypes.uint64_wtype, is_inner_param=False)
NumApplications = _TxnFieldData(wtypes.uint64_wtype, is_inner_param=False)
GlobalNumUint = _TxnFieldData(wtypes.uint64_wtype)
GlobalNumByteSlice = _TxnFieldData(wtypes.uint64_wtype)
LocalNumUint = _TxnFieldData(wtypes.uint64_wtype)
LocalNumByteSlice = _TxnFieldData(wtypes.uint64_wtype)
# v4
ExtraProgramPages = _TxnFieldData(wtypes.uint64_wtype)
# v5
Nonparticipation = _TxnFieldData(wtypes.bool_wtype)
NumLogs = _TxnFieldData(wtypes.uint64_wtype, is_inner_param=False)
CreatedAssetID = _TxnFieldData(wtypes.asset_wtype, is_inner_param=False)
CreatedApplicationID = _TxnFieldData(wtypes.application_wtype, is_inner_param=False)
# v6
LastLog = _TxnFieldData(wtypes.bytes_wtype, is_inner_param=False)
StateProofPK = _TxnFieldData(wtypes.bytes_wtype)
# v7
NumApprovalProgramPages = _TxnFieldData(wtypes.uint64_wtype, is_inner_param=False)
NumClearStateProgramPages = _TxnFieldData(wtypes.uint64_wtype, is_inner_param=False)
# array fields
# TODO: allow configuring as these are consensus values
# v2
ApplicationArgs = _TxnFieldData(wtypes.bytes_wtype, num_values=16)
Accounts = _TxnFieldData(wtypes.account_wtype, num_values=4)
# v3
Assets = _TxnFieldData(wtypes.asset_wtype, num_values=8)
Applications = _TxnFieldData(wtypes.application_wtype, num_values=8)
# v5
Logs = _TxnFieldData(wtypes.bytes_wtype, num_values=32, is_inner_param=False)
# v7
ApprovalProgramPages = _TxnFieldData(wtypes.bytes_wtype, num_values=4)
ClearStateProgramPages = _TxnFieldData(wtypes.bytes_wtype, num_values=4)
|
algorandfoundation/puya
|
src/puya/awst/txn_fields.py
|
Python
|
NOASSERTION
| 6,085 |
algorandfoundation/puya
|
src/puya/awst/validation/__init__.py
|
Python
|
NOASSERTION
| 0 |
|
from collections.abc import Iterator
import attrs
from puya import log
from puya.awst import (
nodes as awst_nodes,
wtypes,
)
from puya.awst.awst_traverser import AWSTTraverser
logger = log.get_logger(__name__)
class ARC4CopyValidator(AWSTTraverser):
@classmethod
def validate(cls, module: awst_nodes.AWST) -> None:
validator = cls()
for module_statement in module:
module_statement.accept(validator)
def __init__(self) -> None:
super().__init__()
self._for_items: awst_nodes.Lvalue | None = None
# for nodes that can't modify the input don't need to check for copies unless an assignment
# expression is being used
def visit_submit_inner_transaction(self, call: awst_nodes.SubmitInnerTransaction) -> None:
if _HasAssignmentVisitor.check(call):
super().visit_submit_inner_transaction(call)
def visit_intrinsic_call(self, call: awst_nodes.IntrinsicCall) -> None:
if _HasAssignmentVisitor.check(call):
super().visit_intrinsic_call(call)
def visit_emit(self, emit: awst_nodes.Emit) -> None:
if _HasAssignmentVisitor.check(emit):
super().visit_emit(emit)
def visit_assert_expression(self, expr: awst_nodes.AssertExpression) -> None:
if expr.condition and _HasAssignmentVisitor.check(expr.condition):
super().visit_assert_expression(expr)
def visit_assignment_statement(self, statement: awst_nodes.AssignmentStatement) -> None:
_check_assignment(statement.target, statement.value)
statement.value.accept(self)
def visit_tuple_expression(self, expr: awst_nodes.TupleExpression) -> None:
super().visit_tuple_expression(expr)
if expr is not self._for_items:
for item in expr.items:
_check_for_arc4_copy(item, "being passed to a tuple expression")
def visit_for_in_loop(self, statement: awst_nodes.ForInLoop) -> None:
# statement.items is immediately checked before entering the for body
# so don't need to worry about preserving _for_items through multiple loops
self._for_items = statement.items
super().visit_for_in_loop(statement)
self._for_items = None
# looping is essentially assigning so check sequence
sequence = statement.sequence
while isinstance(sequence, awst_nodes.Enumeration | awst_nodes.Reversed):
sequence = sequence.expr
if ( # mutable tuples cannot be iterated in a semantically correct way
isinstance(sequence.wtype, wtypes.WTuple)
and _is_referable_expression(sequence)
and _is_arc4_mutable(sequence.wtype)
):
logger.error(
"tuple of mutable ARC4 values cannot be iterated",
location=sequence.source_location,
)
elif ( # arrays of mutable types, must be modified and iterated by index
isinstance(sequence.wtype, wtypes.ARC4Array)
and _is_referable_expression(sequence)
and _is_arc4_mutable(sequence.wtype.element_type)
):
logger.error(
"cannot directly iterate an ARC4 array of mutable objects,"
" construct a for-loop over the indexes instead",
location=sequence.source_location,
)
def visit_assignment_expression(self, expr: awst_nodes.AssignmentExpression) -> None:
_check_assignment(expr.target, expr.value)
expr.value.accept(self)
def visit_subroutine_call_expression(self, expr: awst_nodes.SubroutineCallExpression) -> None:
for arg_ in expr.args:
for arg in _expand_tuple_items(arg_.value):
match arg:
case awst_nodes.VarExpression():
# Var expressions don't need copy as we implicitly return the latest value
# and update the var
continue
case awst_nodes.AppStateExpression() | awst_nodes.AppAccountStateExpression():
message = "being passed to a subroutine from state"
case _:
message = "being passed to a subroutine"
_check_for_arc4_copy(arg, message)
def visit_new_array(self, expr: awst_nodes.NewArray) -> None:
super().visit_new_array(expr)
if isinstance(expr.wtype, wtypes.ARC4Array):
for v in expr.values:
_check_for_arc4_copy(v, "being passed to an array constructor")
def visit_new_struct(self, expr: awst_nodes.NewStruct) -> None:
super().visit_new_struct(expr)
if isinstance(expr.wtype, wtypes.ARC4Struct):
for v in expr.values.values():
_check_for_arc4_copy(v, "being passed to a struct constructor")
def visit_arc4_encode(self, expr: awst_nodes.ARC4Encode) -> None:
super().visit_arc4_encode(expr)
for item in _expand_tuple_items(expr.value):
_check_for_arc4_copy(item, "being passed to a constructor")
def _is_referable_expression(expr: awst_nodes.Expression) -> bool:
"""
Returns True if expr represents something that can be referenced multiple times.
"""
match expr:
case (
awst_nodes.VarExpression()
| awst_nodes.AppStateExpression()
| awst_nodes.AppAccountStateExpression()
| awst_nodes.StateGet()
| awst_nodes.StateGetEx()
| awst_nodes.BoxValueExpression()
):
return True
case (
awst_nodes.IndexExpression(base=base_expr)
| awst_nodes.TupleItemExpression(base=base_expr)
| awst_nodes.FieldExpression(base=base_expr)
):
return _is_referable_expression(base_expr)
return False
def _check_assignment(target: awst_nodes.Expression, value: awst_nodes.Expression) -> None:
if not isinstance(target, awst_nodes.TupleExpression):
_check_for_arc4_copy(value, "being assigned to another variable")
elif _is_referable_expression(value) and any(_is_arc4_mutable(i) for i in target.wtype.types):
logger.error(
"tuples containing a mutable reference to an ARC4-encoded value cannot be unpacked,"
" use index access instead",
location=value.source_location,
)
def _check_for_arc4_copy(expr: awst_nodes.Expression, context_desc: str) -> None:
if _is_arc4_mutable(expr.wtype) and _is_referable_expression(expr):
logger.error(
"mutable reference to ARC4-encoded value"
f" must be copied using .copy() when {context_desc}",
location=expr.source_location,
)
def _expand_tuple_items(expr: awst_nodes.Expression) -> Iterator[awst_nodes.Expression]:
match expr:
case awst_nodes.TupleExpression(items=items):
for item in items:
yield from _expand_tuple_items(item)
case _:
yield expr
def _is_arc4_mutable(wtype: wtypes.WType) -> bool:
"""
Returns True if expr represents an arc4 type that is mutable
"""
match wtype:
case wtypes.ARC4Type(immutable=False):
return True
case wtypes.WTuple(types=types):
return any(_is_arc4_mutable(t) for t in types)
return False
@attrs.define
class _HasAssignmentVisitor(AWSTTraverser):
has_assignment: bool = False
@classmethod
def check(cls, expr: awst_nodes.Expression) -> bool:
visitor = _HasAssignmentVisitor()
expr.accept(visitor)
return visitor.has_assignment
def visit_assignment_expression(self, _: awst_nodes.AssignmentExpression) -> None:
self.has_assignment = True
|
algorandfoundation/puya
|
src/puya/awst/validation/arc4_copy.py
|
Python
|
NOASSERTION
| 7,770 |
import contextlib
import typing
from collections.abc import Iterator
from puya import log
from puya.awst import nodes as awst_nodes
from puya.awst.awst_traverser import AWSTTraverser
from puya.awst.nodes import (
ContractMethodTarget,
InstanceMethodTarget,
InstanceSuperMethodTarget,
SubroutineID,
)
logger = log.get_logger(__name__)
class BaseInvokerValidator(AWSTTraverser):
def __init__(self) -> None:
self._contract: awst_nodes.Contract | None = None
@property
def contract(self) -> awst_nodes.Contract | None:
return self._contract
@contextlib.contextmanager
def _enter_contract(self, contract: awst_nodes.Contract) -> Iterator[None]:
assert self._contract is None
self._contract = contract
try:
yield
finally:
self._contract = None
@classmethod
def validate(cls, module: awst_nodes.AWST) -> None:
validator = cls()
for module_statement in module:
module_statement.accept(validator)
@typing.override
def visit_contract(self, statement: awst_nodes.Contract) -> None:
with self._enter_contract(statement):
super().visit_contract(statement)
@typing.override
def visit_subroutine_call_expression(self, expr: awst_nodes.SubroutineCallExpression) -> None:
super().visit_subroutine_call_expression(expr)
match expr.target:
case SubroutineID():
# always okay
pass
case InstanceMethodTarget() | InstanceSuperMethodTarget():
if self.contract is None:
logger.error(
"invocation of instance method outside of a contract method",
location=expr.source_location,
)
case ContractMethodTarget(cref=target_class):
caller_class = self.contract
if caller_class is None:
logger.error(
"invocation of contract method outside of a contract method",
location=expr.source_location,
)
else:
caller_ref = caller_class.id
if (
target_class != caller_ref
and target_class not in caller_class.method_resolution_order
):
logger.error(
"invocation of a contract method outside of current hierarchy",
location=expr.source_location,
)
case invalid:
typing.assert_never(invalid)
|
algorandfoundation/puya
|
src/puya/awst/validation/base_invoker.py
|
Python
|
NOASSERTION
| 2,702 |
from puya import log
from puya.awst import nodes as awst_nodes
from puya.awst.awst_traverser import AWSTTraverser
logger = log.get_logger(__name__)
class ImmutableValidator(AWSTTraverser):
@classmethod
def validate(cls, module: awst_nodes.AWST) -> None:
validator = cls()
for module_statement in module:
module_statement.accept(validator)
def visit_assignment_expression(self, expr: awst_nodes.AssignmentExpression) -> None:
super().visit_assignment_expression(expr)
_validate_lvalue(expr.target)
def visit_assignment_statement(self, statement: awst_nodes.AssignmentStatement) -> None:
super().visit_assignment_statement(statement)
_validate_lvalue(statement.target)
def visit_array_pop(self, expr: awst_nodes.ArrayPop) -> None:
super().visit_array_pop(expr)
if expr.base.wtype.immutable:
logger.error(
"cannot modify - object is immutable",
location=expr.source_location,
)
def visit_array_extend(self, expr: awst_nodes.ArrayExtend) -> None:
super().visit_array_extend(expr)
if expr.base.wtype.immutable:
logger.error(
"cannot modify - object is immutable",
location=expr.source_location,
)
def _validate_lvalue(lvalue: awst_nodes.Expression) -> None:
if isinstance(lvalue, awst_nodes.FieldExpression | awst_nodes.IndexExpression):
if lvalue.base.wtype.immutable:
logger.error(
"expression is not valid as an assignment target - object is immutable",
location=lvalue.source_location,
)
elif isinstance(lvalue, awst_nodes.TupleExpression):
for item in lvalue.items:
_validate_lvalue(item)
|
algorandfoundation/puya
|
src/puya/awst/validation/immutable.py
|
Python
|
NOASSERTION
| 1,818 |
import contextlib
from collections.abc import Iterator
from puya import log
from puya.awst import (
nodes as awst_nodes,
wtypes,
)
from puya.awst.awst_traverser import AWSTTraverser
from puya.awst.wtypes import WInnerTransaction, WInnerTransactionFields
from puya.parse import SourceLocation
logger = log.get_logger(__name__)
INNER_TRANSACTION_ASSIGNMENT_EXPRESSION_ERROR = (
"inner transactions cannot be used in assignment expressions"
)
INNER_TRANSACTION_COPY_REQUIRED_ERROR = (
"inner transaction fields must be copied using .copy() when assigning to a new local"
)
INNER_TRANSACTION_LOOP_MODIFICATION_ERROR = (
"inner transaction fields cannot be modified after submission while in a loop"
)
INNER_TRANSACTION_MAYBE_STALE_ERROR = (
"inner transaction array field can not be reliably accessed due to other inner transaction "
" submissions or subroutine calls, move array field access closer to {stale_var!r} definition"
)
INNER_TRANSACTION_MAYBE_STALE_WARNING = (
"inner transaction {stale_var!r} potentially becomes stale here"
)
INNER_TRANSACTION_SOURCE_ERROR = "inner transactions can not be used like this"
INNER_TRANSACTION_SUBROUTINE_ERROR = (
"inner transactions cannot be used as a subroutine argument or return value"
)
class InnerTransactionsValidator(AWSTTraverser):
"""
Validates that expressions of type WInnerTransaction and WInnerTransactionFields are only
used in the ways currently supported. Emits errors for:
Reassigning expressions of type WInnerTransaction
Reassigning expressions of type WInnerTransactionFields without a copy()
Using WInnerTransaction or WInnerTransactionFields in a subroutine or assignment expression
"""
@classmethod
def validate(cls, module: awst_nodes.AWST) -> None:
for module_statement in module:
validator = cls()
module_statement.accept(validator)
def visit_contract_method(self, statement: awst_nodes.ContractMethod) -> None:
_check_method_types(statement)
super().visit_contract_method(statement)
def visit_subroutine(self, statement: awst_nodes.Subroutine) -> None:
_check_method_types(statement)
super().visit_subroutine(statement)
def visit_assignment_statement(self, statement: awst_nodes.AssignmentStatement) -> None:
self._check_inner_transaction_assignment(statement.value)
self._check_inner_transaction_fields_assignment(statement.value)
super().visit_assignment_statement(statement)
def _check_inner_transaction_assignment(self, value: awst_nodes.Expression) -> None:
if _is_itxn_wtype(value.wtype) and not _is_assignable_itxn_expr(value):
logger.error(INNER_TRANSACTION_SOURCE_ERROR, location=value.source_location)
def _check_inner_transaction_fields_assignment(self, value: awst_nodes.Expression) -> None:
match value:
case awst_nodes.CreateInnerTransaction() | awst_nodes.Copy():
pass # ok
case (
awst_nodes.VarExpression(wtype=wtype)
| awst_nodes.TupleItemExpression(wtype=wtype)
) if isinstance(wtype, WInnerTransactionFields):
logger.error(
INNER_TRANSACTION_COPY_REQUIRED_ERROR,
location=value.source_location,
)
case awst_nodes.Expression(wtype=wtype) if (
isinstance(wtype, WInnerTransactionFields)
):
logger.error(
INNER_TRANSACTION_SOURCE_ERROR,
location=value.source_location,
)
def visit_assignment_expression(self, expr: awst_nodes.AssignmentExpression) -> None:
super().visit_assignment_expression(expr)
if _is_either_itxn_wtype(expr.wtype):
logger.error(
INNER_TRANSACTION_ASSIGNMENT_EXPRESSION_ERROR,
location=expr.source_location,
)
def visit_subroutine_call_expression(self, expr: awst_nodes.SubroutineCallExpression) -> None:
super().visit_subroutine_call_expression(expr)
for arg in expr.args:
if _is_either_itxn_wtype(arg.value.wtype):
logger.error(
INNER_TRANSACTION_SUBROUTINE_ERROR,
location=expr.source_location,
)
class InnerTransactionUsedInALoopValidator(AWSTTraverser):
"""
Validates that expressions of type WInnerTransactionFields are not modified after submission
while in a loop
Modifying after submission while in a loop
"""
def __init__(self) -> None:
super().__init__()
self._current_itxn_var_stack = list[list[str]]()
@classmethod
def validate(cls, module: awst_nodes.AWST) -> None:
for module_statement in module:
validator = cls()
module_statement.accept(validator)
@property
def _current_loop_itxn_vars(self) -> list[str] | None:
return self._current_itxn_var_stack[-1] if self._current_itxn_var_stack else None
@contextlib.contextmanager
def _enter_loop(self) -> Iterator[None]:
self._current_itxn_var_stack.append(
self._current_loop_itxn_vars.copy() if self._current_loop_itxn_vars else []
)
try:
yield
finally:
self._current_itxn_var_stack.pop()
def visit_for_in_loop(self, statement: awst_nodes.ForInLoop) -> None:
with self._enter_loop():
super().visit_for_in_loop(statement)
def visit_while_loop(self, statement: awst_nodes.WhileLoop) -> None:
with self._enter_loop():
super().visit_while_loop(statement)
def visit_assignment_statement(self, statement: awst_nodes.AssignmentStatement) -> None:
value = statement.value
match value:
case awst_nodes.CreateInnerTransaction() | awst_nodes.Copy():
self._check_itxn_params_not_submitted_in_loop(statement.target)
super().visit_assignment_statement(statement)
def visit_submit_inner_transaction(self, call: awst_nodes.SubmitInnerTransaction) -> None:
if self._current_loop_itxn_vars is not None:
for itxn_params in call.itxns:
match itxn_params:
case awst_nodes.TupleItemExpression(
base=awst_nodes.VarExpression(name=var_name)
):
self._current_loop_itxn_vars.append(var_name)
case awst_nodes.VarExpression(name=var_name):
self._current_loop_itxn_vars.append(var_name)
super().visit_submit_inner_transaction(call)
def visit_update_inner_transaction(self, call: awst_nodes.UpdateInnerTransaction) -> None:
super().visit_update_inner_transaction(call)
self._check_itxn_params_not_submitted_in_loop(call.itxn)
def _check_itxn_params_not_submitted_in_loop(self, expr: awst_nodes.Expression) -> None:
if (
self._current_loop_itxn_vars
and isinstance(expr, awst_nodes.VarExpression)
and expr.name in self._current_loop_itxn_vars
):
logger.error(
INNER_TRANSACTION_LOOP_MODIFICATION_ERROR,
location=expr.source_location,
)
def _check_method_types(stmt: awst_nodes.Function) -> None:
for arg in stmt.args:
if _is_either_itxn_wtype(arg.wtype):
logger.error(
INNER_TRANSACTION_SUBROUTINE_ERROR,
location=arg.source_location,
)
if _is_either_itxn_wtype(stmt.return_type):
logger.error(
INNER_TRANSACTION_SUBROUTINE_ERROR,
location=stmt.source_location,
)
def _is_assignable_itxn_expr(expr: awst_nodes.Expression) -> bool:
if not _is_itxn_wtype(expr.wtype): # non itxn expressions are assignable
return True
match expr:
case awst_nodes.VarExpression(): # local itxn expressions can be copied
return True
case awst_nodes.SubmitInnerTransaction(): # submit expressions are assignable
return True
case awst_nodes.TupleExpression(
items=items
): # tuple expressions composed of assignable expressions are assignable
return all(map(_is_assignable_itxn_expr, items))
case awst_nodes.TupleItemExpression(
base=base
): # tuple items are assignable if their base is assignable
return _is_assignable_itxn_expr(base)
case awst_nodes.SingleEvaluation(source=source):
return _is_assignable_itxn_expr(source)
case awst_nodes.SliceExpression(
base=base, wtype=wtypes.WTuple()
): # tuple slices can be assigned if base can be
return _is_assignable_itxn_expr(base)
# anything else is not considered assignable
return False
def _is_either_itxn_wtype(wtype: wtypes.WType) -> bool:
return _is_itxn_wtype(wtype) or _is_itxn_fields_wtype(wtype)
def _is_itxn_wtype(wtype: wtypes.WType) -> bool:
return isinstance(wtype, WInnerTransaction) or (
isinstance(wtype, wtypes.WTuple) and any(map(_is_itxn_wtype, wtype.types))
)
def _is_itxn_fields_wtype(wtype: wtypes.WType) -> bool:
return isinstance(wtype, WInnerTransactionFields) or (
isinstance(wtype, wtypes.WTuple) and any(map(_is_itxn_fields_wtype, wtype.types))
)
class StaleInnerTransactionsValidator(AWSTTraverser):
"""Validates that inner transaction array access are not stale"""
@classmethod
def validate(cls, module: awst_nodes.AWST) -> None:
for module_statement in module:
validator = cls()
module_statement.accept(validator)
def __init__(self) -> None:
super().__init__()
self._maybe_stale_itxn_vars = dict[str, SourceLocation]()
self._active_itxn_vars = list[str]()
def visit_assignment_statement(self, stmt: awst_nodes.AssignmentStatement) -> None:
super().visit_assignment_statement(stmt)
match stmt.value:
case awst_nodes.SubmitInnerTransaction():
new_itxn_var_names = self._get_var_names(stmt.target)
self._update_active_var_names(new_itxn_var_names)
case awst_nodes.TupleExpression(items=items) if any(
isinstance(item.wtype, WInnerTransaction) for item in items
):
var_names = self._get_var_names(stmt.target)
self._update_active_var_names(var_names)
def visit_intrinsic_call(self, call: awst_nodes.IntrinsicCall) -> None:
super().visit_intrinsic_call(call)
if call.op_code == "itxn_submit":
self._update_maybe_stale_itxn_vars(call.source_location)
def visit_submit_inner_transaction(self, call: awst_nodes.SubmitInnerTransaction) -> None:
super().visit_submit_inner_transaction(call)
self._update_maybe_stale_itxn_vars(call.source_location)
def visit_inner_transaction_field(self, itxn_field: awst_nodes.InnerTransactionField) -> None:
super().visit_inner_transaction_field(itxn_field)
match itxn_field.itxn:
case awst_nodes.VarExpression(name=stale_var):
pass
case awst_nodes.TupleItemExpression(base=awst_nodes.VarExpression(name=stale_var)):
pass
case _:
return
if itxn_field.field.is_array:
try:
stale_var_loc = self._maybe_stale_itxn_vars[stale_var]
except KeyError:
return
logger.error(
INNER_TRANSACTION_MAYBE_STALE_ERROR.format(stale_var=stale_var),
location=itxn_field.itxn.source_location,
)
logger.warning(
INNER_TRANSACTION_MAYBE_STALE_WARNING.format(stale_var=stale_var),
location=stale_var_loc,
)
def visit_subroutine_call_expression(self, expr: awst_nodes.SubroutineCallExpression) -> None:
super().visit_subroutine_call_expression(expr)
self._update_maybe_stale_itxn_vars(expr.source_location)
def _get_var_names(self, expr: awst_nodes.Expression) -> list[str]:
match expr:
case awst_nodes.VarExpression(name=name):
return [name]
case awst_nodes.TupleExpression(items=items):
return [self._get_var_names(item)[0] for item in items]
case _:
return []
def _update_active_var_names(self, var_names: list[str]) -> None:
self._active_itxn_vars = var_names
# if a var_name is reassigned then it is not considered stale
for var_name in self._active_itxn_vars:
with contextlib.suppress(KeyError):
del self._maybe_stale_itxn_vars[var_name]
def _update_maybe_stale_itxn_vars(self, staling_expr_loc: SourceLocation) -> None:
for stale_var_name in self._active_itxn_vars:
self._maybe_stale_itxn_vars[stale_var_name] = staling_expr_loc
self._active_itxn_vars = []
|
algorandfoundation/puya
|
src/puya/awst/validation/inner_transactions.py
|
Python
|
NOASSERTION
| 13,153 |
from collections import defaultdict
from puya import log
from puya.awst import nodes as awst_nodes
from puya.awst.function_traverser import FunctionTraverser
logger = log.get_logger(__name__)
class LabelsValidator(FunctionTraverser):
@classmethod
def validate(cls, module: awst_nodes.AWST) -> None:
for module_statement in module:
if isinstance(module_statement, awst_nodes.Subroutine):
cls(module_statement)
elif isinstance(module_statement, awst_nodes.Contract):
for method in module_statement.all_methods:
cls(method)
def __init__(self, function: awst_nodes.Function) -> None:
self._labelled_blocks = dict[awst_nodes.Label, awst_nodes.Block]()
self._seen_targets = defaultdict[awst_nodes.Label, list[awst_nodes.Goto]](list)
function.body.accept(self)
for target, goto_list in self._seen_targets.items():
if target not in self._labelled_blocks:
for goto in goto_list:
logger.error(
f"label target {target} does not exist", location=goto.source_location
)
def visit_goto(self, goto: awst_nodes.Goto) -> None:
self._seen_targets[goto.target].append(goto)
def visit_block(self, block: awst_nodes.Block) -> None:
if block.label is not None:
first_seen = self._labelled_blocks.setdefault(block.label, block)
if block is not first_seen:
logger.error(
f"block has duplicate label {block.label}", location=block.source_location
)
logger.info("label first seen here", location=first_seen.source_location)
|
algorandfoundation/puya
|
src/puya/awst/validation/labels.py
|
Python
|
NOASSERTION
| 1,741 |
from puya.awst import nodes as awst_nodes
from puya.awst.validation.arc4_copy import ARC4CopyValidator
from puya.awst.validation.base_invoker import BaseInvokerValidator
from puya.awst.validation.immutable import ImmutableValidator
from puya.awst.validation.inner_transactions import (
InnerTransactionsValidator,
InnerTransactionUsedInALoopValidator,
StaleInnerTransactionsValidator,
)
from puya.awst.validation.labels import LabelsValidator
from puya.awst.validation.scratch_slots import ScratchSlotReservationValidator
from puya.awst.validation.storage import StorageTypesValidator
def validate_awst(module: awst_nodes.AWST) -> None:
ARC4CopyValidator.validate(module)
ScratchSlotReservationValidator.validate(module)
InnerTransactionsValidator.validate(module)
InnerTransactionUsedInALoopValidator.validate(module)
StaleInnerTransactionsValidator.validate(module)
BaseInvokerValidator.validate(module)
StorageTypesValidator.validate(module)
LabelsValidator.validate(module)
ImmutableValidator.validate(module)
|
algorandfoundation/puya
|
src/puya/awst/validation/main.py
|
Python
|
NOASSERTION
| 1,064 |
from collections.abc import Collection, Iterator
from puya import log
from puya.awst import nodes as awst_nodes
from puya.awst.awst_traverser import AWSTTraverser
from puya.parse import SourceLocation
logger = log.get_logger(__name__)
class ScratchSlotReservationValidator(AWSTTraverser):
@classmethod
def validate(cls, module: awst_nodes.AWST) -> None:
for module_statement in module:
validator = cls()
module_statement.accept(validator)
for slot, loc in validator.invalid_slot_usages:
logger.error(
f"Scratch slot {slot} has not been reserved.",
location=loc,
)
def __init__(self) -> None:
super().__init__()
self._reserved_slots: Collection[int] = ()
self._used_slots = list[tuple[int, SourceLocation]]()
@property
def invalid_slot_usages(self) -> Iterator[tuple[int, SourceLocation]]:
for slot, loc in self._used_slots:
if slot not in self._reserved_slots:
yield slot, loc
def visit_contract(self, statement: awst_nodes.Contract) -> None:
super().visit_contract(statement)
self._reserved_slots = statement.reserved_scratch_space
def visit_intrinsic_call(self, call: awst_nodes.IntrinsicCall) -> None:
super().visit_intrinsic_call(call)
match call.op_code, call.stack_args:
case "loads", [awst_nodes.IntegerConstant(value=slot, source_location=loc)]:
self._used_slots.append((slot, loc))
case "stores", [awst_nodes.IntegerConstant(value=slot, source_location=loc), *_]:
self._used_slots.append((slot, loc))
match call.op_code, call.immediates:
case "load", [int(slot)]:
self._used_slots.append((slot, call.source_location))
case "store", [int(slot)]:
self._used_slots.append((slot, call.source_location))
|
algorandfoundation/puya
|
src/puya/awst/validation/scratch_slots.py
|
Python
|
NOASSERTION
| 1,972 |
import typing
from collections import defaultdict
from puya import log
from puya.awst import (
nodes as awst_nodes,
wtypes,
)
from puya.awst.awst_traverser import AWSTTraverser
from puya.awst.nodes import AppStorageKind
from puya.utils import set_add
logger = log.get_logger(__name__)
class StorageTypesValidator(AWSTTraverser):
@classmethod
def validate(cls, module: awst_nodes.AWST) -> None:
for module_statement in module:
# create a new instance for each top level construct,
# either subroutine or contract class, so that we can de-dupe
# messages (where possible) there
validator = cls()
module_statement.accept(validator)
def __init__(self) -> None:
super().__init__()
self._seen_keys = defaultdict[AppStorageKind, set[bytes]](set)
@typing.override
def visit_app_storage_definition(self, defn: awst_nodes.AppStorageDefinition) -> None:
super().visit_app_storage_definition(defn)
wtypes.validate_persistable(defn.storage_wtype, defn.source_location)
if defn.key_wtype is not None:
wtypes.validate_persistable(defn.key_wtype, defn.source_location)
@typing.override
def visit_app_state_expression(self, expr: awst_nodes.AppStateExpression) -> None:
super().visit_app_state_expression(expr)
self._validate_usage(expr, AppStorageKind.app_global)
@typing.override
def visit_app_account_state_expression(
self, expr: awst_nodes.AppAccountStateExpression
) -> None:
super().visit_app_account_state_expression(expr)
self._validate_usage(expr, AppStorageKind.account_local)
@typing.override
def visit_box_value_expression(self, expr: awst_nodes.BoxValueExpression) -> None:
super().visit_box_value_expression(expr)
self._validate_usage(expr, AppStorageKind.box)
def _validate_usage(self, expr: awst_nodes.StorageExpression, kind: AppStorageKind) -> None:
if isinstance(expr.key, awst_nodes.BytesConstant) and not set_add(
self._seen_keys[kind], expr.key.value
):
return
wtypes.validate_persistable(expr.wtype, expr.source_location)
|
algorandfoundation/puya
|
src/puya/awst/validation/storage.py
|
Python
|
NOASSERTION
| 2,223 |
from __future__ import annotations # needed to break import cycle
import typing as t
from abc import ABC, abstractmethod
if t.TYPE_CHECKING:
import puya.awst.nodes
class StatementVisitor[T](ABC):
@abstractmethod
def visit_block(self, statement: puya.awst.nodes.Block) -> T: ...
@abstractmethod
def visit_if_else(self, statement: puya.awst.nodes.IfElse) -> T: ...
@abstractmethod
def visit_switch(self, statement: puya.awst.nodes.Switch) -> T: ...
@abstractmethod
def visit_while_loop(self, statement: puya.awst.nodes.WhileLoop) -> T: ...
@abstractmethod
def visit_loop_exit(self, statement: puya.awst.nodes.LoopExit) -> T: ...
@abstractmethod
def visit_return_statement(self, statement: puya.awst.nodes.ReturnStatement) -> T: ...
@abstractmethod
def visit_loop_continue(self, statement: puya.awst.nodes.LoopContinue) -> T: ...
@abstractmethod
def visit_expression_statement(self, statement: puya.awst.nodes.ExpressionStatement) -> T: ...
@abstractmethod
def visit_uint64_augmented_assignment(
self, statement: puya.awst.nodes.UInt64AugmentedAssignment
) -> T: ...
@abstractmethod
def visit_biguint_augmented_assignment(
self, statement: puya.awst.nodes.BigUIntAugmentedAssignment
) -> T: ...
@abstractmethod
def visit_bytes_augmented_assignment(
self, statement: puya.awst.nodes.BytesAugmentedAssignment
) -> T: ...
@abstractmethod
def visit_for_in_loop(self, statement: puya.awst.nodes.ForInLoop) -> T: ...
@abstractmethod
def visit_assignment_statement(self, statement: puya.awst.nodes.AssignmentStatement) -> T: ...
@abstractmethod
def visit_goto(self, statement: puya.awst.nodes.Goto) -> T: ...
class RootNodeVisitor[T](ABC):
@abstractmethod
def visit_subroutine(self, statement: puya.awst.nodes.Subroutine) -> T: ...
@abstractmethod
def visit_contract(self, statement: puya.awst.nodes.Contract) -> T: ...
@abstractmethod
def visit_logic_signature(self, statement: puya.awst.nodes.LogicSignature) -> T: ...
class ContractMemberVisitor[T](ABC):
@abstractmethod
def visit_contract_method(self, statement: puya.awst.nodes.ContractMethod) -> T: ...
@abstractmethod
def visit_app_storage_definition(
self, statement: puya.awst.nodes.AppStorageDefinition
) -> T: ...
class ExpressionVisitor[T](ABC):
@abstractmethod
def visit_state_delete(self, expr: puya.awst.nodes.StateDelete) -> T: ...
@abstractmethod
def visit_assignment_expression(self, expr: puya.awst.nodes.AssignmentExpression) -> T: ...
@abstractmethod
def visit_uint64_binary_operation(self, expr: puya.awst.nodes.UInt64BinaryOperation) -> T: ...
@abstractmethod
def visit_biguint_binary_operation(
self, expr: puya.awst.nodes.BigUIntBinaryOperation
) -> T: ...
@abstractmethod
def visit_integer_constant(self, expr: puya.awst.nodes.IntegerConstant) -> T: ...
@abstractmethod
def visit_decimal_constant(self, expr: puya.awst.nodes.DecimalConstant) -> T: ...
@abstractmethod
def visit_bool_constant(self, expr: puya.awst.nodes.BoolConstant) -> T: ...
@abstractmethod
def visit_bytes_constant(self, expr: puya.awst.nodes.BytesConstant) -> T: ...
@abstractmethod
def visit_string_constant(self, expr: puya.awst.nodes.StringConstant) -> T: ...
@abstractmethod
def visit_void_constant(self, expr: puya.awst.nodes.VoidConstant) -> T: ...
@abstractmethod
def visit_address_constant(self, expr: puya.awst.nodes.AddressConstant) -> T: ...
@abstractmethod
def visit_compiled_contract(self, expr: puya.awst.nodes.CompiledContract) -> T: ...
@abstractmethod
def visit_compiled_logicsig(self, expr: puya.awst.nodes.CompiledLogicSig) -> T: ...
@abstractmethod
def visit_numeric_comparison_expression(
self, expr: puya.awst.nodes.NumericComparisonExpression
) -> T: ...
@abstractmethod
def visit_var_expression(self, expr: puya.awst.nodes.VarExpression) -> T: ...
@abstractmethod
def visit_intrinsic_call(self, call: puya.awst.nodes.IntrinsicCall) -> T: ...
@abstractmethod
def visit_puya_lib_call(self, call: puya.awst.nodes.PuyaLibCall) -> T: ...
@abstractmethod
def visit_group_transaction_reference(
self, ref: puya.awst.nodes.GroupTransactionReference
) -> T: ...
@abstractmethod
def visit_create_inner_transaction(
self, create_itxn: puya.awst.nodes.CreateInnerTransaction
) -> T: ...
@abstractmethod
def visit_update_inner_transaction(
self, update_itxn: puya.awst.nodes.UpdateInnerTransaction
) -> T: ...
@abstractmethod
def visit_submit_inner_transaction(
self, submit: puya.awst.nodes.SubmitInnerTransaction
) -> T: ...
@abstractmethod
def visit_inner_transaction_field(
self, itxn_field: puya.awst.nodes.InnerTransactionField
) -> T: ...
@abstractmethod
def visit_assert_expression(self, statement: puya.awst.nodes.AssertExpression) -> T: ...
@abstractmethod
def visit_checked_maybe(self, call: puya.awst.nodes.CheckedMaybe) -> T: ...
@abstractmethod
def visit_arc4_decode(self, expr: puya.awst.nodes.ARC4Decode) -> T: ...
@abstractmethod
def visit_arc4_encode(self, expr: puya.awst.nodes.ARC4Encode) -> T: ...
@abstractmethod
def visit_array_concat(self, expr: puya.awst.nodes.ArrayConcat) -> T: ...
@abstractmethod
def visit_array_extend(self, expr: puya.awst.nodes.ArrayExtend) -> T: ...
@abstractmethod
def visit_tuple_expression(self, expr: puya.awst.nodes.TupleExpression) -> T: ...
@abstractmethod
def visit_tuple_item_expression(self, expr: puya.awst.nodes.TupleItemExpression) -> T: ...
@abstractmethod
def visit_field_expression(self, expr: puya.awst.nodes.FieldExpression) -> T: ...
@abstractmethod
def visit_index_expression(self, expr: puya.awst.nodes.IndexExpression) -> T: ...
@abstractmethod
def visit_slice_expression(self, expr: puya.awst.nodes.SliceExpression) -> T: ...
@abstractmethod
def visit_conditional_expression(self, expr: puya.awst.nodes.ConditionalExpression) -> T: ...
@abstractmethod
def visit_single_evaluation(self, expr: puya.awst.nodes.SingleEvaluation) -> T: ...
@abstractmethod
def visit_app_state_expression(self, expr: puya.awst.nodes.AppStateExpression) -> T: ...
@abstractmethod
def visit_app_account_state_expression(
self, expr: puya.awst.nodes.AppAccountStateExpression
) -> T: ...
@abstractmethod
def visit_new_array(self, expr: puya.awst.nodes.NewArray) -> T: ...
@abstractmethod
def visit_new_struct(self, expr: puya.awst.nodes.NewStruct) -> T: ...
@abstractmethod
def visit_bytes_comparison_expression(
self, expr: puya.awst.nodes.BytesComparisonExpression
) -> T: ...
@abstractmethod
def visit_subroutine_call_expression(
self, expr: puya.awst.nodes.SubroutineCallExpression
) -> T: ...
@abstractmethod
def visit_bytes_binary_operation(self, expr: puya.awst.nodes.BytesBinaryOperation) -> T: ...
@abstractmethod
def visit_boolean_binary_operation(
self, expr: puya.awst.nodes.BooleanBinaryOperation
) -> T: ...
@abstractmethod
def visit_uint64_unary_operation(self, expr: puya.awst.nodes.UInt64UnaryOperation) -> T: ...
@abstractmethod
def visit_bytes_unary_operation(self, expr: puya.awst.nodes.BytesUnaryOperation) -> T: ...
@abstractmethod
def visit_not_expression(self, expr: puya.awst.nodes.Not) -> T: ...
@abstractmethod
def visit_reinterpret_cast(self, expr: puya.awst.nodes.ReinterpretCast) -> T: ...
@abstractmethod
def visit_enumeration(self, expr: puya.awst.nodes.Enumeration) -> T: ...
@abstractmethod
def visit_method_constant(self, expr: puya.awst.nodes.MethodConstant) -> T: ...
@abstractmethod
def visit_array_pop(self, expr: puya.awst.nodes.ArrayPop) -> T: ...
@abstractmethod
def visit_copy(self, expr: puya.awst.nodes.Copy) -> T: ...
@abstractmethod
def visit_reversed(self, expr: puya.awst.nodes.Reversed) -> T: ...
@abstractmethod
def visit_state_get(self, expr: puya.awst.nodes.StateGet) -> T: ...
@abstractmethod
def visit_state_get_ex(self, expr: puya.awst.nodes.StateGetEx) -> T: ...
@abstractmethod
def visit_state_exists(self, expr: puya.awst.nodes.StateExists) -> T: ...
@abstractmethod
def visit_template_var(self, expr: puya.awst.nodes.TemplateVar) -> T: ...
@abstractmethod
def visit_intersection_slice_expression(
self, expr: puya.awst.nodes.IntersectionSliceExpression
) -> T: ...
@abstractmethod
def visit_box_value_expression(self, expr: puya.awst.nodes.BoxValueExpression) -> T: ...
@abstractmethod
def visit_uint64_postfix_unary_operation(
self, expr: puya.awst.nodes.UInt64PostfixUnaryOperation
) -> T: ...
@abstractmethod
def visit_biguint_postfix_unary_operation(
self, expr: puya.awst.nodes.BigUIntPostfixUnaryOperation
) -> T: ...
@abstractmethod
def visit_arc4_router(self, expr: puya.awst.nodes.ARC4Router) -> T: ...
@abstractmethod
def visit_range(self, node: puya.awst.nodes.Range) -> T: ...
@abstractmethod
def visit_emit(self, emit: puya.awst.nodes.Emit) -> T: ...
|
algorandfoundation/puya
|
src/puya/awst/visitors.py
|
Python
|
NOASSERTION
| 9,483 |
import typing
from collections.abc import Iterable, Mapping
from functools import cached_property
import attrs
from immutabledict import immutabledict
from puya import log
from puya.avm import AVMType, TransactionType
from puya.errors import CodeError, InternalError
from puya.parse import SourceLocation
from puya.utils import unique
logger = log.get_logger(__name__)
@attrs.frozen(kw_only=True)
class WType:
name: str
scalar_type: typing.Literal[AVMType.uint64, AVMType.bytes, None]
"the (unbound) AVM stack type, if any"
ephemeral: bool = False
"""ephemeral types are not suitable for naive storage / persistence,
even if their underlying type is a simple stack value"""
immutable: bool
def __str__(self) -> str:
return self.name
void_wtype: typing.Final = WType(
name="void",
scalar_type=None,
immutable=True,
)
bool_wtype: typing.Final = WType(
name="bool",
scalar_type=AVMType.uint64,
immutable=True,
)
uint64_wtype: typing.Final = WType(
name="uint64",
scalar_type=AVMType.uint64,
immutable=True,
)
biguint_wtype: typing.Final = WType(
name="biguint",
scalar_type=AVMType.bytes,
immutable=True,
)
bytes_wtype: typing.Final = WType(
name="bytes",
scalar_type=AVMType.bytes,
immutable=True,
)
string_wtype: typing.Final = WType(
name="string",
scalar_type=AVMType.bytes,
immutable=True,
)
asset_wtype: typing.Final = WType(
name="asset",
scalar_type=AVMType.uint64,
immutable=True,
)
account_wtype: typing.Final = WType(
name="account",
scalar_type=AVMType.bytes,
immutable=True,
)
application_wtype: typing.Final = WType(
name="application",
scalar_type=AVMType.uint64,
immutable=True,
)
state_key: typing.Final = WType(
name="state_key",
scalar_type=AVMType.bytes,
immutable=True,
)
box_key: typing.Final = WType(
name="box_key",
scalar_type=AVMType.bytes,
immutable=True,
)
uint64_range_wtype: typing.Final = WType(
name="uint64_range",
scalar_type=None,
immutable=True,
)
@attrs.frozen
class WEnumeration(WType):
sequence_type: WType
name: str = attrs.field(init=False)
immutable: bool = attrs.field(default=True, init=False)
scalar_type: None = attrs.field(default=None, init=False)
ephemeral: bool = attrs.field(default=False, init=False)
@name.default
def _name_factory(self) -> str:
return f"enumerate_{self.sequence_type.name}"
@attrs.frozen
class _TransactionRelatedWType(WType):
transaction_type: TransactionType | None
ephemeral: bool = attrs.field(default=True, init=False)
immutable: bool = attrs.field(default=True, init=False)
@typing.final
@attrs.frozen
class WGroupTransaction(_TransactionRelatedWType):
scalar_type: typing.Literal[AVMType.uint64] = attrs.field(default=AVMType.uint64, init=False)
@classmethod
def from_type(cls, transaction_type: TransactionType | None) -> "WGroupTransaction":
name = "group_transaction"
if transaction_type:
name = f"{name}_{transaction_type.name}"
return cls(name=name, transaction_type=transaction_type)
@typing.final
@attrs.frozen
class WInnerTransactionFields(_TransactionRelatedWType):
scalar_type: None = attrs.field(default=None, init=False)
@classmethod
def from_type(cls, transaction_type: TransactionType | None) -> "WInnerTransactionFields":
name = "inner_transaction_fields"
if transaction_type:
name = f"{name}_{transaction_type.name}"
return cls(name=name, transaction_type=transaction_type)
@typing.final
@attrs.frozen
class WInnerTransaction(_TransactionRelatedWType):
scalar_type: None = attrs.field(default=None, init=False)
@classmethod
def from_type(cls, transaction_type: TransactionType | None) -> "WInnerTransaction":
name = "inner_transaction"
if transaction_type:
name = f"{name}_{transaction_type.name}"
return cls(name=name, transaction_type=transaction_type)
@typing.final
@attrs.frozen
class WStructType(WType):
fields: immutabledict[str, WType] = attrs.field(converter=immutabledict)
frozen: bool
immutable: bool = attrs.field(init=False)
scalar_type: None = attrs.field(default=None, init=False)
source_location: SourceLocation | None = attrs.field(eq=False)
desc: str | None = None
@immutable.default
def _immutable(self) -> bool:
# TODO: determine correct behaviour when implementing native structs
raise NotImplementedError
@fields.validator
def _fields_validator(self, _: object, fields: immutabledict[str, WType]) -> None:
if not fields:
raise CodeError("struct needs fields", self.source_location)
if void_wtype in fields.values():
raise CodeError("struct should not contain void types", self.source_location)
@typing.final
@attrs.frozen
class WArray(WType):
element_type: WType = attrs.field()
name: str = attrs.field(init=False)
scalar_type: None = attrs.field(default=None, init=False)
source_location: SourceLocation | None = attrs.field(eq=False)
immutable: bool = attrs.field(default=False, init=False)
@element_type.validator
def _element_type_validator(self, _: object, element_type: WType) -> None:
if element_type == void_wtype:
raise CodeError("array element type cannot be void", self.source_location)
@name.default
def _name(self) -> str:
return f"array<{self.element_type.name}>"
@typing.final
@attrs.frozen(eq=False)
class WTuple(WType):
types: tuple[WType, ...] = attrs.field(converter=tuple[WType, ...])
source_location: SourceLocation | None = attrs.field(default=None)
scalar_type: None = attrs.field(default=None, init=False)
immutable: bool = attrs.field(default=True, init=False)
name: str = attrs.field(kw_only=True)
names: tuple[str, ...] | None = attrs.field(default=None)
desc: str | None = None
def __eq__(self, other: object) -> bool:
# this custom equality check ensures that
# tuple field names are only considered when both sides
# have defined names
if not isinstance(other, WTuple):
return False
return self.types == other.types and (
self.names == other.names or None in (self.names, other.names)
)
def __hash__(self) -> int:
return hash(self.types)
@types.validator
def _types_validator(self, _attribute: object, types: tuple[WType, ...]) -> None:
if void_wtype in types:
raise CodeError("tuple should not contain void types", self.source_location)
@name.default
def _name(self) -> str:
return f"tuple<{','.join([t.name for t in self.types])}>"
@names.validator
def _names_validator(self, _attribute: object, names: tuple[str, ...] | None) -> None:
if names is None:
return
if len(names) != len(self.types):
raise InternalError("mismatch between tuple item names length and types")
if len(names) != len(unique(names)):
raise CodeError("tuple item names are not unique", self.source_location)
@cached_property
def fields(self) -> Mapping[str, WType]:
"""Mapping of item names to types if `names` is defined, otherwise empty."""
if self.names is None:
return {}
return dict(zip(self.names, self.types, strict=True))
def name_to_index(self, name: str, source_location: SourceLocation) -> int:
if self.names is None:
raise CodeError(
"cannot access tuple item by name of an unnamed tuple", source_location
)
try:
return self.names.index(name)
except ValueError:
raise CodeError(f"{name} is not a member of {self.name}") from None
@attrs.frozen(kw_only=True)
class ARC4Type(WType):
scalar_type: typing.Literal[AVMType.bytes] = attrs.field(default=AVMType.bytes, init=False)
arc4_name: str = attrs.field(eq=False) # exclude from equality in case of aliasing
native_type: WType | None
def can_encode_type(self, wtype: WType) -> bool:
return wtype == self.native_type
arc4_bool_wtype: typing.Final = ARC4Type(
name="arc4.bool",
arc4_name="bool",
immutable=True,
native_type=bool_wtype,
)
@typing.final
@attrs.frozen(kw_only=True)
class ARC4UIntN(ARC4Type):
immutable: bool = attrs.field(default=True, init=False)
native_type: WType = attrs.field(default=biguint_wtype, init=False)
n: int = attrs.field()
arc4_name: str = attrs.field(eq=False)
name: str = attrs.field(init=False)
source_location: SourceLocation | None = attrs.field(default=None, eq=False)
@n.validator
def _n_validator(self, _attribute: object, n: int) -> None:
if not (n % 8 == 0):
raise CodeError("Bit size must be multiple of 8", self.source_location)
if not (8 <= n <= 512):
raise CodeError("Bit size must be between 8 and 512 inclusive", self.source_location)
@arc4_name.default
def _arc4_name(self) -> str:
return f"uint{self.n}"
@name.default
def _name(self) -> str:
return f"arc4.{self._arc4_name()}"
def can_encode_type(self, wtype: WType) -> bool:
return wtype in (bool_wtype, uint64_wtype, biguint_wtype)
@typing.final
@attrs.frozen(kw_only=True)
class ARC4UFixedNxM(ARC4Type):
n: int = attrs.field()
m: int = attrs.field()
immutable: bool = attrs.field(default=True, init=False)
arc4_name: str = attrs.field(init=False, eq=False)
name: str = attrs.field(init=False)
source_location: SourceLocation | None = attrs.field(default=None, eq=False)
native_type: None = attrs.field(default=None, init=False)
@arc4_name.default
def _arc4_name(self) -> str:
return f"ufixed{self.n}x{self.m}"
@name.default
def _name(self) -> str:
return f"arc4.{self.arc4_name}"
@n.validator
def _n_validator(self, _attribute: object, n: int) -> None:
if not (n % 8 == 0):
raise CodeError("Bit size must be multiple of 8", self.source_location)
if not (8 <= n <= 512):
raise CodeError("Bit size must be between 8 and 512 inclusive", self.source_location)
@m.validator
def _m_validator(self, _attribute: object, m: int) -> None:
if not (1 <= m <= 160):
raise CodeError("Precision must be between 1 and 160 inclusive", self.source_location)
def _required_arc4_wtypes(wtypes: Iterable[WType]) -> tuple[ARC4Type, ...]:
result = []
for wtype in wtypes:
if not isinstance(wtype, ARC4Type):
raise CodeError(f"expected ARC4 type: {wtype}")
result.append(wtype)
return tuple(result)
@typing.final
@attrs.frozen(kw_only=True)
class ARC4Tuple(ARC4Type):
source_location: SourceLocation | None = attrs.field(default=None, eq=False)
types: tuple[ARC4Type, ...] = attrs.field(converter=_required_arc4_wtypes)
name: str = attrs.field(init=False)
arc4_name: str = attrs.field(init=False, eq=False)
immutable: bool = attrs.field(init=False)
native_type: WTuple = attrs.field(init=False)
@name.default
def _name(self) -> str:
return f"arc4.tuple<{','.join(t.name for t in self.types)}>"
@arc4_name.default
def _arc4_name(self) -> str:
return f"({','.join(item.arc4_name for item in self.types)})"
@immutable.default
def _immutable(self) -> bool:
return all(typ.immutable for typ in self.types)
@native_type.default
def _native_type(self) -> WTuple:
return WTuple(self.types, self.source_location)
def can_encode_type(self, wtype: WType) -> bool:
return super().can_encode_type(wtype) or _is_arc4_encodeable_tuple(wtype, self.types)
def _is_arc4_encodeable_tuple(
wtype: WType, target_types: tuple[ARC4Type, ...]
) -> typing.TypeGuard[WTuple]:
return (
isinstance(wtype, WTuple)
and len(wtype.types) == len(target_types)
and all(
arc4_wtype == encode_wtype or arc4_wtype.can_encode_type(encode_wtype)
for arc4_wtype, encode_wtype in zip(target_types, wtype.types, strict=True)
)
)
def _expect_arc4_type(wtype: WType) -> ARC4Type:
if not isinstance(wtype, ARC4Type):
raise CodeError(f"expected ARC4 type: {wtype}")
return wtype
@attrs.frozen(kw_only=True)
class ARC4Array(ARC4Type):
element_type: ARC4Type = attrs.field(converter=_expect_arc4_type)
native_type: WType | None = None
immutable: bool = False
@typing.final
@attrs.frozen(kw_only=True)
class ARC4DynamicArray(ARC4Array):
name: str = attrs.field(init=False)
arc4_name: str = attrs.field(eq=False)
source_location: SourceLocation | None = attrs.field(default=None, eq=False)
@name.default
def _name(self) -> str:
return f"arc4.dynamic_array<{self.element_type.name}>"
@arc4_name.default
def _arc4_name(self) -> str:
return f"{self.element_type.arc4_name}[]"
@typing.final
@attrs.frozen(kw_only=True)
class ARC4StaticArray(ARC4Array):
array_size: int = attrs.field(validator=attrs.validators.ge(0))
name: str = attrs.field(init=False)
arc4_name: str = attrs.field(eq=False)
source_location: SourceLocation | None = attrs.field(default=None, eq=False)
@name.default
def _name(self) -> str:
return f"arc4.static_array<{self.element_type.name}, {self.array_size}>"
@arc4_name.default
def _arc4_name(self) -> str:
return f"{self.element_type.arc4_name}[{self.array_size}]"
def _require_arc4_fields(fields: Mapping[str, WType]) -> immutabledict[str, ARC4Type]:
if not fields:
raise CodeError("arc4.Struct needs at least one element")
non_arc4_fields = [
field_name
for field_name, field_type in fields.items()
if not isinstance(field_type, ARC4Type)
]
if non_arc4_fields:
raise CodeError(
"invalid ARC4 Struct declaration,"
f" the following fields are not ARC4 encoded types: {', '.join(non_arc4_fields)}",
)
return immutabledict(fields)
@typing.final
@attrs.frozen(kw_only=True)
class ARC4Struct(ARC4Type):
fields: immutabledict[str, ARC4Type] = attrs.field(converter=_require_arc4_fields)
frozen: bool
immutable: bool = attrs.field(init=False)
source_location: SourceLocation | None = attrs.field(default=None, eq=False)
arc4_name: str = attrs.field(init=False, eq=False)
native_type: None = attrs.field(default=None, init=False)
desc: str | None = None
@immutable.default
def _immutable(self) -> bool:
return self.frozen and all(typ.immutable for typ in self.fields.values())
@arc4_name.default
def _arc4_name(self) -> str:
return f"({','.join(item.arc4_name for item in self.types)})"
@cached_property
def names(self) -> tuple[str, ...]:
return tuple(self.fields.keys())
@cached_property
def types(self) -> tuple[ARC4Type, ...]:
return tuple(self.fields.values())
def can_encode_type(self, wtype: WType) -> bool:
return super().can_encode_type(wtype) or (
_is_arc4_encodeable_tuple(wtype, self.types)
and (wtype.names is None or wtype.names == self.names)
)
arc4_byte_alias: typing.Final = ARC4UIntN(
n=8,
arc4_name="byte",
source_location=None,
)
arc4_string_alias: typing.Final = ARC4DynamicArray(
arc4_name="string",
element_type=arc4_byte_alias,
native_type=string_wtype,
immutable=True,
source_location=None,
)
arc4_address_alias: typing.Final = ARC4StaticArray(
arc4_name="address",
element_type=arc4_byte_alias,
native_type=account_wtype,
array_size=32,
immutable=True,
source_location=None,
)
def persistable_stack_type(
wtype: WType, location: SourceLocation
) -> typing.Literal[AVMType.uint64, AVMType.bytes]:
match _storage_type_or_error(wtype):
case str(error):
raise CodeError(error, location=location)
case result:
return result
def validate_persistable(wtype: WType, location: SourceLocation) -> bool:
match _storage_type_or_error(wtype):
case str(error):
logger.error(error, location=location)
return False
case _:
return True
def _storage_type_or_error(wtype: WType) -> str | typing.Literal[AVMType.uint64, AVMType.bytes]:
if wtype.ephemeral:
return "ephemeral types (such as transaction related types) are not suitable for storage"
if wtype.scalar_type is None:
return "type is not suitable for storage"
return wtype.scalar_type
|
algorandfoundation/puya
|
src/puya/awst/wtypes.py
|
Python
|
NOASSERTION
| 16,805 |
import abc
import typing
from collections.abc import Mapping, Sequence
import attrs
from puya.artifact_metadata import ContractMetaData, LogicSignatureMetaData
from puya.parse import SourceLocation
from puya.program_refs import ContractReference, LogicSigReference
TemplateValue = tuple[int | bytes, SourceLocation | None]
class DebugEvent(typing.TypedDict, total=False):
"""Describes various attributes for a particular PC location"""
subroutine: str
"""Subroutine name"""
params: Mapping[str, str]
"""Describes a subroutines parameters and their types"""
block: str
"""Name of a block"""
stack_in: Sequence[str]
"""Variable names on the stack BEFORE the next op executes"""
op: str
"""Op description"""
callsub: str
"""The subroutine that is about to be called"""
retsub: bool
"""Returns from current subroutine"""
stack_out: Sequence[str]
"""Variable names on the stack AFTER the next op executes"""
defined_out: Sequence[str]
"""Variable names that are defined AFTER the next op executes"""
error: str
"""Error message if failure occurs at this op"""
@attrs.frozen
class DebugInfo:
version: int
sources: list[str]
mappings: str
op_pc_offset: int
pc_events: Mapping[int, DebugEvent]
class CompiledProgram(abc.ABC):
@property
@abc.abstractmethod
def teal_src(self) -> str: ...
@property
@abc.abstractmethod
def bytecode(self) -> bytes | None:
"""
bytecode can only be produced if no template variables are used OR template values are
provided, for this reason bytecode is only provided if output_bytecode is True
"""
@property
@abc.abstractmethod
def debug_info(self) -> DebugInfo | None: ...
@property
@abc.abstractmethod
def template_variables(self) -> Mapping[str, int | bytes | None]: ...
class CompiledContract(abc.ABC):
@property
@abc.abstractmethod
def source_location(self) -> SourceLocation | None: ...
@property
@abc.abstractmethod
def approval_program(self) -> CompiledProgram: ...
@property
@abc.abstractmethod
def clear_program(self) -> CompiledProgram: ...
@property
@abc.abstractmethod
def metadata(self) -> ContractMetaData: ...
@typing.final
@property
def id(self) -> ContractReference:
return self.metadata.ref
class CompiledLogicSig(abc.ABC):
@property
@abc.abstractmethod
def source_location(self) -> SourceLocation | None: ...
@property
@abc.abstractmethod
def program(self) -> CompiledProgram: ...
@property
@abc.abstractmethod
def metadata(self) -> LogicSignatureMetaData: ...
@typing.final
@property
def id(self) -> LogicSigReference:
return self.metadata.ref
CompilationArtifact: typing.TypeAlias = CompiledContract | CompiledLogicSig
|
algorandfoundation/puya
|
src/puya/compilation_artifacts.py
|
Python
|
NOASSERTION
| 2,895 |
import itertools
import shutil
import typing
from collections import defaultdict
from collections.abc import Iterator, Mapping, Sequence, Set
from pathlib import Path
import attrs
from cattrs.preconf.json import make_converter
from immutabledict import immutabledict
from puya import log
from puya.arc32 import create_arc32_json
from puya.arc56 import create_arc56_json
from puya.artifact_metadata import ContractMetaData, LogicSignatureMetaData, StateTotals
from puya.artifact_sorter import Artifact, ArtifactCompilationSorter
from puya.awst.nodes import AWST
from puya.awst.validation.main import validate_awst
from puya.compilation_artifacts import (
CompilationArtifact,
CompiledContract,
CompiledLogicSig,
CompiledProgram,
DebugInfo,
TemplateValue,
)
from puya.context import (
ArtifactCompileContext,
CompileContext,
CompiledProgramProvider,
OutputPathProvider,
)
from puya.errors import CodeError, InternalError
from puya.ir import models as ir_models
from puya.ir.destructure.main import destructure_ssa
from puya.ir.main import awst_to_ir
from puya.ir.optimize.main import optimize_program_ir
from puya.ir.to_text_visitor import render_program
from puya.ir.validation.main import validate_module_artifact
from puya.log import LoggingContext
from puya.mir.main import program_ir_to_mir
from puya.options import PuyaOptions
from puya.parse import SourceLocation
from puya.program_refs import ContractReference, LogicSigReference, ProgramKind
from puya.teal.main import mir_to_teal
from puya.teal.models import TealProgram, TealSubroutine
from puya.teal.output import emit_teal
from puya.ussemble.main import assemble_program
from puya.utils import attrs_extend, make_path_relative_to, make_path_relative_to_cwd
logger = log.get_logger(__name__)
def awst_to_teal(
log_ctx: LoggingContext,
options: PuyaOptions,
compilation_set: Mapping[ContractReference | LogicSigReference, Path],
sources_by_path: Mapping[Path, Sequence[str] | None],
awst: AWST,
*,
write: bool = True,
) -> list[CompilationArtifact]:
validate_awst(awst)
log_ctx.exit_if_errors()
context = CompileContext(
options=options,
compilation_set=compilation_set,
sources_by_path=sources_by_path,
)
log_ctx.exit_if_errors()
ir = list(awst_to_ir(context, awst))
log_ctx.exit_if_errors()
teal = list(_ir_to_teal(log_ctx, context, ir))
log_ctx.exit_if_errors()
if write:
_write_artifacts(context, teal)
return teal
def _ir_to_teal(
log_ctx: LoggingContext, context: CompileContext, all_ir: Sequence[ir_models.ModuleArtifact]
) -> Iterator[CompilationArtifact]:
compiled_program_provider = _CompiledProgramProviderImpl(
compile_context=context,
state_totals={
ir.metadata.ref: ir.metadata.state_totals
for ir in all_ir
if isinstance(ir, ir_models.Contract)
},
)
# used to check for conflicts that would occur on output
artifacts_by_output_base = dict[Path, Artifact]()
for artifact in ArtifactCompilationSorter.sort(all_ir):
output_path_provider = None
if out_dir_setting := context.compilation_set.get(artifact.id):
name = artifact.ir.metadata.name
maybe_out_dir = out_dir_setting / f"{name}.ir"
first_seen = artifacts_by_output_base.setdefault(maybe_out_dir, artifact)
if artifact is not first_seen:
logger.error(f"duplicate contract name {name}", location=artifact.source_location)
logger.info(
f"contract name {name} first seen here", location=first_seen.source_location
)
else:
out_dir = maybe_out_dir
shutil.rmtree(out_dir, ignore_errors=True)
output_path_provider = _SequentialOutputPathProvider(
metadata=artifact.ir.metadata, out_dir=out_dir
)
artifact_context = attrs_extend(
ArtifactCompileContext,
context,
output_path_provider=output_path_provider,
compiled_program_provider=compiled_program_provider,
)
num_errors_before_optimization = log_ctx.num_errors
artifact_ir = _optimize_and_destructure_ir(artifact_context, artifact.ir)
# IR validation that occurs at the end of optimize_and_destructure_ir may have revealed
# further errors, add dummy artifacts and continue so other artifacts can still be lowered
# and report any errors they encounter
errors_in_optimization = log_ctx.num_errors > num_errors_before_optimization
if not errors_in_optimization:
compiled: _CompiledContract | _CompiledLogicSig
if isinstance(artifact_ir, ir_models.Contract):
compiled = _contract_ir_to_teal(artifact_context, artifact_ir)
else:
compiled = _logic_sig_to_teal(artifact_context, artifact_ir)
yield compiled
compiled_program_provider.add_compiled_result(artifact, compiled)
@attrs.define
class _CompiledProgramProviderImpl(CompiledProgramProvider):
compile_context: CompileContext
state_totals: Mapping[ContractReference, StateTotals]
_compiled_artifacts: dict[
ContractReference | LogicSigReference, "_CompiledContract | _CompiledLogicSig"
] = attrs.field(factory=dict, init=False)
_bytecode_cache: dict[
tuple[
ContractReference | LogicSigReference, ProgramKind, immutabledict[str, TemplateValue]
],
bytes,
] = attrs.field(factory=dict, init=False)
def add_compiled_result(
self, artifact: Artifact, result: "_CompiledContract | _CompiledLogicSig"
) -> None:
self._compiled_artifacts[artifact.id] = result
@typing.override
def build_program_bytecode(
self,
ref: ContractReference | LogicSigReference,
kind: ProgramKind,
*,
template_constants: immutabledict[str, TemplateValue],
) -> bytes:
cache_key = (ref, kind, template_constants)
try:
return self._bytecode_cache[cache_key]
except KeyError:
pass
try:
comp_ref = self._compiled_artifacts[ref]
except KeyError:
raise CodeError(f"invalid reference: {ref}") from None
match kind, comp_ref:
case ProgramKind.logic_signature, _CompiledLogicSig(program=program):
pass
case ProgramKind.approval, _CompiledContract(approval_program=program):
pass
case ProgramKind.clear_state, _CompiledContract(clear_program=program):
pass
case _:
raise InternalError(f"invalid kind: {kind}, {type(comp_ref)}")
assembled = assemble_program(
self.compile_context,
ref,
program.teal,
template_constants=template_constants,
is_reference=True,
)
result = assembled.bytecode
self._bytecode_cache[cache_key] = result
return result
@typing.override
def get_state_totals(self, ref: ContractReference) -> StateTotals:
return self.state_totals[ref]
@attrs.frozen
class _SequentialOutputPathProvider(OutputPathProvider):
_metadata: ContractMetaData | LogicSignatureMetaData
_out_dir: Path
_output_seq: defaultdict[str, Iterator[int]] = attrs.field(
factory=lambda: defaultdict(itertools.count), init=False
)
@typing.override
def __call__(self, *, kind: str, qualifier: str, suffix: str) -> Path:
out_dir = self._out_dir
out_dir.mkdir(exist_ok=True)
if qualifier:
qualifier = f".{qualifier}"
qualifier = f"{next(self._output_seq[kind])}{qualifier}"
if kind is not ProgramKind.logic_signature:
qualifier = f"{kind}.{qualifier}"
return out_dir / f"{self._metadata.name}.{qualifier}.{suffix}"
def _optimize_and_destructure_ir(
context: ArtifactCompileContext, artifact_ir: ir_models.ModuleArtifact
) -> ir_models.ModuleArtifact:
if isinstance(artifact_ir, ir_models.LogicSignature):
routable_method_ids = None
else:
routable_method_ids = {a4m.id for a4m in artifact_ir.metadata.arc4_methods}
for program in artifact_ir.all_programs():
_optimize_and_destructure_program_ir(
context, artifact_ir.metadata.ref, program, routable_method_ids=routable_method_ids
)
# validation is run as the last step, in case we've accidentally inserted something,
# and in particular post subroutine removal, because some things that are "linked"
# are not necessarily used from the current artifact
validate_module_artifact(context, artifact_ir)
return artifact_ir
def _optimize_and_destructure_program_ir(
context: ArtifactCompileContext,
ref: ContractReference | LogicSigReference,
program: ir_models.Program,
routable_method_ids: Set[str] | None = None,
) -> None:
if context.options.output_ssa_ir:
render_program(context, program, qualifier="ssa")
logger.info(
f"optimizing {program.kind} program of {ref} at level {context.options.optimization_level}"
)
optimize_program_ir(context, program, routable_method_ids=routable_method_ids)
destructure_ssa(context, program)
if context.options.output_destructured_ir:
render_program(context, program, qualifier="destructured")
@attrs.frozen
class _CompiledProgram(CompiledProgram):
teal: TealProgram
teal_src: str
template_variables: Mapping[str, int | bytes | None]
debug_info: DebugInfo | None = None
bytecode: bytes | None = None
@attrs.frozen
class _CompiledContract(CompiledContract):
source_location: SourceLocation | None
approval_program: _CompiledProgram
clear_program: _CompiledProgram
metadata: ContractMetaData
@attrs.frozen
class _CompiledLogicSig(CompiledLogicSig):
source_location: SourceLocation | None
program: _CompiledProgram
metadata: LogicSignatureMetaData
def _dummy_program() -> _CompiledProgram:
from puya.mir.models import Signature
return _CompiledProgram(
teal=TealProgram(
avm_version=0,
main=TealSubroutine(
is_main=True,
signature=Signature(
name="",
parameters=(),
returns=(),
),
blocks=[],
source_location=None,
),
subroutines=[],
),
teal_src="",
template_variables={},
)
def _contract_ir_to_teal(
context: ArtifactCompileContext, contract_ir: ir_models.Contract
) -> _CompiledContract:
approval_mir = program_ir_to_mir(context, contract_ir.approval_program)
clear_state_mir = program_ir_to_mir(context, contract_ir.clear_program)
approval_teal = mir_to_teal(context, approval_mir)
clear_state_teal = mir_to_teal(context, clear_state_mir)
ref = contract_ir.metadata.ref
approval_program = _compile_program(context, ref, approval_teal)
clear_program = _compile_program(context, ref, clear_state_teal)
return _CompiledContract(
approval_program=approval_program,
clear_program=clear_program,
metadata=contract_ir.metadata,
source_location=contract_ir.source_location,
)
def _logic_sig_to_teal(
context: ArtifactCompileContext, logic_sig_ir: ir_models.LogicSignature
) -> _CompiledLogicSig:
program_mir = program_ir_to_mir(context, logic_sig_ir.program)
teal_program = mir_to_teal(context, program_mir)
program = _compile_program(context, logic_sig_ir.metadata.ref, teal_program)
return _CompiledLogicSig(
program=program,
metadata=logic_sig_ir.metadata,
source_location=logic_sig_ir.source_location,
)
def _compile_program(
context: ArtifactCompileContext,
ref: ContractReference | LogicSigReference,
program: TealProgram,
) -> _CompiledProgram:
assembled = assemble_program(context, ref, program)
teal_src = emit_teal(context, program)
return _CompiledProgram(
teal=program,
teal_src=teal_src,
bytecode=assembled.bytecode,
debug_info=assembled.debug_info,
template_variables=assembled.template_variables,
)
def _write_artifacts(
context: CompileContext, compiled_artifacts: list[CompilationArtifact]
) -> None:
if not compiled_artifacts:
logger.warning("No contracts or logic signatures discovered in any source files")
return
for artifact in compiled_artifacts:
out_dir = context.compilation_set.get(artifact.id)
if out_dir is None:
continue
teal_file_stem = artifact.metadata.name
artifact_base_path = out_dir / teal_file_stem
match artifact:
case CompiledLogicSig(program=program):
programs = {"": program}
case CompiledContract(approval_program=approval, clear_program=clear) as contract:
programs = {
".approval": approval,
".clear": clear,
}
if contract.metadata.is_arc4:
if context.options.output_arc32:
app_spec_json = create_arc32_json(
approval.teal_src,
clear.teal_src,
contract.metadata,
)
_write_output(
artifact_base_path,
{".arc32.json": app_spec_json.encode("utf8")},
)
if context.options.output_arc56:
app_spec_json = create_arc56_json(
metadata=contract.metadata,
approval_program=approval,
clear_program=clear,
template_prefix=context.options.template_vars_prefix,
)
_write_output(
artifact_base_path,
{".arc56.json": app_spec_json.encode("utf8")},
)
case _:
typing.assert_never(artifact)
if context.options.output_teal:
_write_output(
artifact_base_path,
{
f"{suffix}.teal": program.teal_src.encode("utf8")
for suffix, program in programs.items()
},
)
if context.options.output_bytecode:
_write_output(
artifact_base_path,
{f"{suffix}.bin": program.bytecode for suffix, program in programs.items()},
)
if context.options.output_source_map:
_write_output(
artifact_base_path,
{
f"{suffix}.puya.map": (
_debug_info_as_json(program.debug_info, out_dir)
if program.debug_info
else None
)
for suffix, program in programs.items()
},
)
_debug_info_converter = make_converter(omit_if_default=True)
def _debug_info_as_json(info: DebugInfo, base_path: Path) -> bytes:
# make sources relative to output
info = attrs.evolve(
info,
sources=[
make_path_relative_to(path=Path(s), to=base_path, walk_up=True) for s in info.sources
],
)
json = _debug_info_converter.dumps(info, DebugInfo, indent=2)
return json.encode("utf-8")
def _write_output(base_path: Path, programs: dict[str, bytes | None]) -> None:
for suffix, program in programs.items():
output_path = base_path.with_suffix(suffix)
if program is None:
logger.critical(f"Unable to output {make_path_relative_to_cwd(output_path)}")
else:
logger.info(f"Writing {make_path_relative_to_cwd(output_path)}")
output_path.write_bytes(program)
|
algorandfoundation/puya
|
src/puya/compile.py
|
Python
|
NOASSERTION
| 16,225 |
import typing
from collections.abc import Mapping, Sequence
from pathlib import Path
import attrs
from immutabledict import immutabledict
from puya import log
from puya.artifact_metadata import StateTotals
from puya.compilation_artifacts import TemplateValue
from puya.options import PuyaOptions
from puya.parse import SourceLocation
from puya.program_refs import ContractReference, LogicSigReference, ProgramKind
logger = log.get_logger(__name__)
@attrs.define(kw_only=True)
class CompileContext:
options: PuyaOptions
compilation_set: Mapping[ContractReference | LogicSigReference, Path]
sources_by_path: Mapping[Path, Sequence[str] | None]
def try_get_source(self, location: SourceLocation | None) -> Sequence[str] | None:
return try_get_source(self.sources_by_path, location)
def try_get_source(
sources_by_path: Mapping[Path, Sequence[str] | None], location: SourceLocation | None
) -> Sequence[str] | None:
if not location or not location.file:
return None
source_lines = sources_by_path.get(location.file)
if source_lines is None:
return None
src_content = list(source_lines[location.line - 1 : location.end_line])
if not src_content:
logger.warning(f"could not locate source: {location}", location=None)
else:
end_column = location.end_column
if end_column is not None:
src_content[-1] = src_content[-1][:end_column]
start_column = location.column
if start_column is not None:
src_content[0] = src_content[0][start_column:]
return src_content
class CompiledProgramProvider(typing.Protocol):
def build_program_bytecode(
self,
ref: ContractReference | LogicSigReference,
kind: ProgramKind,
*,
template_constants: immutabledict[str, TemplateValue],
) -> bytes: ...
def get_state_totals(self, ref: ContractReference) -> StateTotals: ...
class OutputPathProvider(typing.Protocol):
def __call__(self, *, kind: str, qualifier: str, suffix: str) -> Path: ...
@attrs.define(kw_only=True)
class ArtifactCompileContext(CompileContext):
_compiled_program_provider: CompiledProgramProvider = attrs.field(
on_setattr=attrs.setters.frozen
)
_output_path_provider: OutputPathProvider | None = attrs.field(on_setattr=attrs.setters.frozen)
def build_output_path(self, kind: str, qualifier: str, suffix: str) -> Path | None:
if self._output_path_provider is None:
return None
return self._output_path_provider(kind=kind, qualifier=qualifier, suffix=suffix)
@typing.overload
def build_program_bytecode(
self,
ref: ContractReference,
kind: typing.Literal[ProgramKind.approval, ProgramKind.clear_state],
*,
template_constants: immutabledict[str, TemplateValue],
) -> bytes: ...
@typing.overload
def build_program_bytecode(
self,
ref: LogicSigReference,
kind: typing.Literal[ProgramKind.logic_signature],
*,
template_constants: immutabledict[str, TemplateValue],
) -> bytes: ...
def build_program_bytecode(
self,
ref: ContractReference | LogicSigReference,
kind: ProgramKind,
*,
template_constants: immutabledict[str, TemplateValue],
) -> bytes:
return self._compiled_program_provider.build_program_bytecode(
ref, kind, template_constants=template_constants
)
def get_state_totals(self, ref: ContractReference) -> StateTotals:
return self._compiled_program_provider.get_state_totals(ref)
|
algorandfoundation/puya
|
src/puya/context.py
|
Python
|
NOASSERTION
| 3,629 |
import contextlib
import enum
import sys
import traceback
from collections.abc import Iterator
from puya import log
from puya.parse import SourceLocation
logger = log.get_logger(__name__)
class ErrorExitCode(enum.IntEnum):
code = 1
internal = 2
class PuyaError(Exception):
def __init__(self, msg: str, location: SourceLocation | None = None):
super().__init__(msg)
self.msg = msg
self.location = location
class InternalError(PuyaError):
"""Base class for all exceptions that indicate a fault in the compiler."""
class CodeError(PuyaError):
"""Base class for all exceptions that indicate a fault in the code being compiled."""
@contextlib.contextmanager
def log_exceptions(fallback_location: SourceLocation | None = None) -> Iterator[None]:
try:
yield
except CodeError as ex:
logger.error(ex.msg, location=ex.location or fallback_location) # noqa: TRY400
except InternalError as ex:
_log_traceback()
logger.critical(ex.msg, location=ex.location or fallback_location)
sys.exit(ErrorExitCode.internal)
except Exception as ex:
_log_traceback()
logger.critical(f"{type(ex).__name__}: {ex}", location=fallback_location)
sys.exit(ErrorExitCode.internal)
def _log_traceback() -> None:
traceback_lines = traceback.format_exc()
logger.debug(traceback_lines.rstrip("\n"))
|
algorandfoundation/puya
|
src/puya/errors.py
|
Python
|
NOASSERTION
| 1,408 |
IR_SSA = "ssa"
IR_OPTIMIZATION = "pass"
IR_FINAL = "final"
IR_ALL = (IR_SSA, IR_OPTIMIZATION, IR_FINAL)
|
algorandfoundation/puya
|
src/puya/ir/__init__.py
|
Python
|
NOASSERTION
| 105 |
import typing
from collections.abc import Mapping
import attrs
import puya.ir.models as ir
from puya import (
artifact_metadata as models,
log,
)
from puya.avm import AVMType, OnCompletionAction
from puya.awst import (
nodes as awst_nodes,
wtypes,
)
from puya.awst.arc4_types import maybe_avm_to_arc4_equivalent_type, wtype_to_arc4
from puya.context import CompiledProgramProvider
from puya.errors import CodeError
from puya.ir._utils import make_subroutine
from puya.ir.builder.main import FunctionIRBuilder
from puya.ir.context import IRBuildContext
from puya.ir.optimize.context import IROptimizationContext
from puya.ir.optimize.main import get_subroutine_optimizations
from puya.options import PuyaOptions
from puya.parse import SourceLocation
logger = log.get_logger(__name__)
def convert_default_args(
ctx: IRBuildContext,
contract: awst_nodes.Contract,
m: awst_nodes.ContractMethod,
config: awst_nodes.ARC4ABIMethodConfig,
) -> Mapping[awst_nodes.SubroutineArgument, models.MethodArgDefault | None]:
state_by_name = {s.member_name: s for s in contract.app_state}
return {
a: _convert_default_arg(
ctx, contract, state_by_name, a, config.default_args.get(a.name), method_id=m.full_name
)
for a in m.args
}
def _convert_default_arg(
ctx: IRBuildContext,
contract: awst_nodes.Contract,
state_by_name: Mapping[str, awst_nodes.AppStorageDefinition],
param: awst_nodes.SubroutineArgument,
default: awst_nodes.ABIMethodArgDefault | None,
*,
method_id: str,
) -> models.MethodArgDefault | None:
match default:
case None:
return None
case awst_nodes.ABIMethodArgMemberDefault(name=member_name):
result_or_error = _convert_member_arg_default(
contract, state_by_name, param, member_name
)
match result_or_error:
case str(error_message):
logger.error(error_message, location=param.source_location)
return None
case result:
return result
case awst_nodes.ABIMethodArgConstantDefault(value=expr):
return _compile_arc4_default_constant(ctx, method_id, param, expr)
def _convert_member_arg_default(
contract: awst_nodes.Contract,
state_by_name: Mapping[str, awst_nodes.AppStorageDefinition],
param: awst_nodes.SubroutineArgument,
member_name: str,
) -> models.MethodArgDefaultFromState | models.MethodArgDefaultFromMethod | str:
param_arc4_type = wtype_to_arc4(param.wtype)
# special handling for reference types
match param_arc4_type:
case "asset" | "application":
param_arc4_type = "uint64"
case "account":
param_arc4_type = "address"
if (state_source := state_by_name.get(member_name)) is not None:
if state_source.key_wtype is not None:
return "state member is a map"
storage_type = wtypes.persistable_stack_type(
state_source.storage_wtype, param.source_location
)
if (
storage_type is AVMType.uint64
# storage can provide an int to types <= uint64
# TODO: check what ATC does with ufixed, see if it can be added
and not (param_arc4_type == "byte" or param_arc4_type.startswith("uint"))
) or (
storage_type is AVMType.bytes
# storage can provide fixed byte arrays
and not (
param_arc4_type == "address"
or (param_arc4_type.startswith("byte[") and param_arc4_type != "byte[]")
)
):
return f"{member_name!r} cannot provide {param_arc4_type!r} type"
return models.MethodArgDefaultFromState(
kind=state_source.kind,
key=state_source.key.value,
key_type=(
"AVMString"
if state_source.key.encoding == awst_nodes.BytesEncoding.utf8
else "AVMBytes"
),
)
elif (method_source := contract.resolve_contract_method(member_name)) is not None:
abi_method_config = method_source.arc4_method_config
if not isinstance(abi_method_config, awst_nodes.ARC4ABIMethodConfig):
return "only ARC-4 ABI methods can be used as default values"
if OnCompletionAction.NoOp not in abi_method_config.allowed_completion_types:
return f"{member_name!r} does not allow no_op on completion calls"
if abi_method_config.create == awst_nodes.ARC4CreateOption.require:
return f"{member_name!r} can only be used for create calls"
if not abi_method_config.readonly:
return f"{member_name!r} is not readonly"
if method_source.args:
return f"{member_name!r} does not take zero arguments"
if method_source.return_type is wtypes.void_wtype:
return f"{member_name!r} does not provide a value"
return_type_arc4 = wtype_to_arc4(method_source.return_type)
if return_type_arc4 != param_arc4_type:
return f"{member_name!r} does not provide {param_arc4_type!r} type"
return models.MethodArgDefaultFromMethod(
name=abi_method_config.name,
return_type=return_type_arc4,
readonly=abi_method_config.readonly,
)
else:
return f"{member_name!r} is not a known state or method attribute"
def _compile_arc4_default_constant(
ctx: IRBuildContext,
method_id: str,
param: awst_nodes.SubroutineArgument,
expr: awst_nodes.Expression,
) -> models.MethodArgDefaultConstant | None:
location = expr.source_location
logger.debug("Building IR for ARC4 method argument default constant", location=location)
if param.wtype != expr.wtype:
logger.error("mismatch between parameter type and default value type", location=location)
return None
if isinstance(expr.wtype, wtypes.ARC4Type):
arc4_type_name = expr.wtype.arc4_name
else:
arc4_type = maybe_avm_to_arc4_equivalent_type(expr.wtype)
if arc4_type is None:
logger.error("unsupported type for argument default", location=location)
return None
expr = awst_nodes.ARC4Encode(value=expr, wtype=arc4_type, source_location=location)
arc4_type_name = arc4_type.arc4_name
fake_name = f"#default:{param.name}"
awst_subroutine = awst_nodes.Subroutine(
id=method_id + fake_name,
name=fake_name,
source_location=expr.source_location,
args=[],
return_type=expr.wtype,
body=awst_nodes.Block(
body=[awst_nodes.ReturnStatement(value=expr, source_location=expr.source_location)],
source_location=expr.source_location,
),
documentation=awst_nodes.MethodDocumentation(),
inline=False,
)
ir_subroutine = make_subroutine(awst_subroutine, allow_implicits=False)
FunctionIRBuilder.build_body(ctx, awst_subroutine, ir_subroutine)
bytes_result = _try_extract_byte_constant(ir_subroutine)
if bytes_result is None:
_optimize_subroutine(ctx, ir_subroutine, location)
bytes_result = _try_extract_byte_constant(ir_subroutine)
if bytes_result is None:
logger.error("could not determine constant value", location=location)
return None
return models.MethodArgDefaultConstant(data=bytes_result, type_=arc4_type_name)
def _optimize_subroutine(
ctx: IRBuildContext, subroutine: ir.Subroutine, location: SourceLocation
) -> None:
optimization_level = max(2, ctx.options.optimization_level)
logger.debug(
f"Running optimizer at level {optimization_level}"
f" to encode compile time constant to bytes",
location=location,
)
options = PuyaOptions(
optimization_level=optimization_level,
target_avm_version=ctx.options.target_avm_version,
)
dummy_program_provider = _NoCompiledProgramProvider(location)
pass_context = IROptimizationContext(
compilation_set=ctx.compilation_set,
sources_by_path=ctx.sources_by_path,
options=options,
compiled_program_provider=dummy_program_provider,
output_path_provider=None,
expand_all_bytes=True,
)
pipeline = get_subroutine_optimizations(optimization_level=optimization_level)
while True:
modified = False
for optimizer in pipeline:
if optimizer.optimize(pass_context, subroutine):
modified = True
if not modified:
return
def _try_extract_byte_constant(subroutine: ir.Subroutine) -> bytes | None:
match subroutine.body:
case [
ir.BasicBlock(
phis=[],
ops=[],
terminator=ir.SubroutineReturn(result=[ir.BytesConstant(value=result)]),
)
]:
return result
return None
@attrs.frozen
class _NoCompiledProgramProvider(CompiledProgramProvider):
source_location: SourceLocation
@typing.override
def build_program_bytecode(self, *args: object, **kwargs: object) -> typing.Never:
raise CodeError(
"compilation references are not supported as method default constants",
self.source_location,
)
@typing.override
def get_state_totals(self, *args: object, **kwargs: object) -> typing.Never:
raise CodeError(
"compilation references are not supported as method default constants",
self.source_location,
)
|
algorandfoundation/puya
|
src/puya/ir/_arc4_default_args.py
|
Python
|
NOASSERTION
| 9,586 |
import contextlib
import typing
from collections import Counter
from collections.abc import Iterable, Iterator, Mapping, Sequence
from operator import itemgetter
import attrs
from immutabledict import immutabledict
import puya.artifact_metadata as models
from puya import algo_constants, log
from puya.avm import AVMType
from puya.awst import (
nodes as awst_nodes,
wtypes,
)
from puya.awst.arc4_types import maybe_avm_to_arc4_equivalent_type, wtype_to_arc4
from puya.awst.function_traverser import FunctionTraverser
from puya.errors import InternalError
from puya.ir._arc4_default_args import convert_default_args
from puya.ir.context import IRBuildContext
from puya.parse import SourceLocation
from puya.utils import StableSet, set_add, unique
__all__ = [
"build_contract_metadata",
]
logger = log.get_logger(__name__)
def build_contract_metadata(
ctx: IRBuildContext, contract: awst_nodes.Contract
) -> tuple[
models.ContractMetaData,
dict[awst_nodes.ContractMethod, models.ARC4Method],
]:
global_state = dict[str, models.ContractState]()
local_state = dict[str, models.ContractState]()
boxes = dict[str, models.ContractState]()
for state in contract.app_state:
translated = _translate_state(state)
match state.kind:
case awst_nodes.AppStorageKind.app_global:
global_state[state.member_name] = translated
case awst_nodes.AppStorageKind.account_local:
local_state[state.member_name] = translated
case awst_nodes.AppStorageKind.box:
boxes[state.member_name] = translated
case unexpected:
typing.assert_never(unexpected)
state_totals = _build_state_totals(
contract.state_totals,
global_state=global_state,
local_state=local_state,
location=contract.source_location,
)
arc4_method_data, type_refs = _extract_arc4_methods_and_type_refs(ctx, contract)
structs = _extract_structs(type_refs)
template_var_types = _TemplateVariableTypeCollector.collect(ctx, contract, arc4_method_data)
metadata = models.ContractMetaData(
description=contract.description,
name=contract.name,
ref=contract.id,
arc4_methods=list(arc4_method_data.values()),
global_state=immutabledict(global_state),
local_state=immutabledict(local_state),
boxes=immutabledict(boxes),
state_totals=state_totals,
structs=immutabledict(structs),
template_variable_types=immutabledict(template_var_types),
)
return metadata, arc4_method_data
def _translate_state(
state: awst_nodes.AppStorageDefinition,
) -> models.ContractState:
storage_type = wtypes.persistable_stack_type(state.storage_wtype, state.source_location)
if state.key_wtype is not None:
if state.kind is not awst_nodes.AppStorageKind.box:
raise InternalError(
f"maps of {state.kind} are not supported by IR backend yet", state.source_location
)
arc56_key_type = _get_arc56_type(state.key_wtype, state.source_location)
is_map = True
else:
arc56_key_type = (
"AVMString" if state.key.encoding == awst_nodes.BytesEncoding.utf8 else "AVMBytes"
)
is_map = False
arc56_value_type = _get_arc56_type(state.storage_wtype, state.source_location)
return models.ContractState(
name=state.member_name,
source_location=state.source_location,
key_or_prefix=state.key.value,
arc56_key_type=arc56_key_type,
arc56_value_type=arc56_value_type,
storage_type=storage_type,
description=state.description,
is_map=is_map,
)
def _build_state_totals(
declared_totals: awst_nodes.StateTotals | None,
*,
global_state: Mapping[str, models.ContractState],
local_state: Mapping[str, models.ContractState],
location: SourceLocation,
) -> models.StateTotals:
global_by_type = Counter(s.storage_type for s in global_state.values())
local_by_type = Counter(s.storage_type for s in local_state.values())
merged = models.StateTotals(
global_uints=global_by_type[AVMType.uint64],
global_bytes=global_by_type[AVMType.bytes],
local_uints=local_by_type[AVMType.uint64],
local_bytes=local_by_type[AVMType.bytes],
)
if declared_totals is not None:
insufficient_fields = []
declared_dict = attrs.asdict(declared_totals, filter=attrs.filters.include(int))
for field, declared in declared_dict.items():
calculated = getattr(merged, field)
if declared < calculated:
insufficient_fields.append(f"{field}: {declared=}, {calculated=}")
merged = attrs.evolve(merged, **{field: declared})
if insufficient_fields:
logger.warning(
f"State totals declared on the class are less than totals calculated from"
f" explicitly declared properties: {', '.join(sorted(insufficient_fields))}.",
location=location,
)
global_total = merged.global_uints + merged.global_bytes
local_total = merged.local_uints + merged.local_bytes
if global_total > algo_constants.MAX_GLOBAL_STATE_KEYS:
logger.warning(
f"Total global state key count of {global_total}"
f" exceeds consensus parameter value {algo_constants.MAX_GLOBAL_STATE_KEYS}",
location=location,
)
if local_total > algo_constants.MAX_LOCAL_STATE_KEYS:
logger.warning(
f"Total local state key count of {local_total}"
f" exceeds consensus parameter value {algo_constants.MAX_LOCAL_STATE_KEYS}",
location=location,
)
return merged
class _TemplateVariableTypeCollector(FunctionTraverser):
def __init__(self, context: IRBuildContext) -> None:
self.context = context
self.vars = dict[str, awst_nodes.TemplateVar]()
self._seen_functions = set[awst_nodes.Function]()
self._func_stack = list[awst_nodes.Function]()
def process_func(self, func: awst_nodes.Function) -> None:
if set_add(self._seen_functions, func):
with self._enter_func(func):
func.body.accept(self)
@classmethod
def collect(
cls,
context: IRBuildContext,
contract: awst_nodes.Contract,
routable_methods: Iterable[awst_nodes.ContractMethod],
) -> dict[str, str]:
collector = cls(context)
for function in (contract.approval_program, contract.clear_program, *routable_methods):
collector.process_func(function)
return {
name: _get_arc56_type(var.wtype, var.source_location)
for name, var in collector.vars.items()
}
def visit_template_var(self, var: awst_nodes.TemplateVar) -> None:
try:
existing = self.vars[var.name]
except KeyError:
self.vars[var.name] = var
else:
if existing.wtype != var.wtype:
logger.error(
"inconsistent types specified for template var",
location=var.source_location,
)
logger.info("other template var", location=existing.source_location)
@contextlib.contextmanager
def _enter_func(self, func: awst_nodes.Function) -> Iterator[None]:
self._func_stack.append(func)
try:
yield
finally:
self._func_stack.pop()
@property
def current_func(self) -> awst_nodes.Function:
return self._func_stack[-1]
def visit_subroutine_call_expression(self, expr: awst_nodes.SubroutineCallExpression) -> None:
target = self.context.resolve_function_reference(
expr.target,
expr.source_location,
caller=self.current_func,
)
self.process_func(target)
def _extract_arc4_methods_and_type_refs(
ctx: IRBuildContext, contract: awst_nodes.Contract
) -> tuple[dict[awst_nodes.ContractMethod, models.ARC4Method], list[wtypes.WType]]:
event_collector = _EventCollector(ctx)
type_refs = [
typ
for state in contract.app_state
for typ in (state.key_wtype, state.storage_wtype)
if typ is not None
]
methods = dict[awst_nodes.ContractMethod, models.ARC4Method]()
for method_name in unique(m.member_name for m in contract.methods):
m = contract.resolve_contract_method(method_name)
assert m is not None # shouldn't logically be possible
match m.arc4_method_config:
case None:
pass
case awst_nodes.ARC4BareMethodConfig() as bare_method_config:
methods[m] = models.ARC4BareMethod(
id=m.full_name, desc=m.documentation.description, config=bare_method_config
)
case abi_method_config:
event_wtypes = event_collector.process_func(m)
events = list(map(_wtype_to_struct, event_wtypes))
methods[m] = _abi_method_metadata(ctx, contract, m, abi_method_config, events)
if m.return_type is not None:
type_refs.append(m.return_type)
type_refs.extend(arg.wtype for arg in m.args)
for event_struct in event_wtypes:
type_refs.extend(event_struct.types)
return methods, type_refs
def _extract_structs(
typ_refs: Sequence[wtypes.WType],
) -> dict[str, models.ARC4Struct]:
# Produce a unique mapping of struct names to ARC4Struct definitions.
# Will recursively include any structs referenced in fields
struct_wtypes = list(filter(_is_arc4_struct, typ_refs))
struct_results = dict[str, models.ARC4Struct]()
while struct_wtypes:
struct = struct_wtypes.pop()
if struct.name in struct_results:
continue
struct_wtypes.extend(
wtype
for wtype in struct.fields.values()
if _is_arc4_struct(wtype) and wtype.name not in struct_results
)
struct_results[struct.name] = _wtype_to_struct(struct)
return dict(sorted(struct_results.items(), key=itemgetter(0)))
def _abi_method_metadata(
ctx: IRBuildContext,
contract: awst_nodes.Contract,
m: awst_nodes.ContractMethod,
config: awst_nodes.ARC4ABIMethodConfig,
events: Sequence[models.ARC4Struct],
) -> models.ARC4ABIMethod:
assert config is m.arc4_method_config
default_args = convert_default_args(ctx, contract, m, config)
args = [
models.ARC4MethodArg(
name=a.name,
type_=wtype_to_arc4(a.wtype),
struct=_get_arc4_struct_name(a.wtype),
desc=m.documentation.args.get(a.name),
client_default=default_args[a],
)
for a in m.args
]
returns = models.ARC4Returns(
desc=m.documentation.returns,
type_=wtype_to_arc4(m.return_type),
struct=_get_arc4_struct_name(m.return_type),
)
return models.ARC4ABIMethod(
id=m.full_name,
desc=m.documentation.description,
args=args,
returns=returns,
events=events,
config=config,
)
def _is_arc4_struct(
wtype: wtypes.WType | None,
) -> typing.TypeGuard[wtypes.ARC4Struct | wtypes.WTuple]:
return isinstance(wtype, wtypes.ARC4Struct | wtypes.WTuple) and bool(wtype.fields)
def _get_arc4_struct_name(wtype: wtypes.WType) -> str | None:
return wtype.name if _is_arc4_struct(wtype) else None
def _wtype_to_struct(s: wtypes.ARC4Struct | wtypes.WTuple) -> models.ARC4Struct:
fields = []
assert s.fields
for field_name, field_wtype in s.fields.items():
if not isinstance(field_wtype, wtypes.ARC4Type):
maybe_arc4_field_wtype = maybe_avm_to_arc4_equivalent_type(field_wtype)
if maybe_arc4_field_wtype is None:
raise InternalError("expected ARC4 type")
field_wtype = maybe_arc4_field_wtype
fields.append(
models.ARC4StructField(
name=field_name,
type=field_wtype.arc4_name,
struct=_get_arc4_struct_name(field_wtype),
)
)
return models.ARC4Struct(fullname=s.name, desc=s.desc, fields=fields)
@attrs.frozen
class _EventCollector(FunctionTraverser):
context: IRBuildContext
emits: dict[awst_nodes.Function, StableSet[wtypes.ARC4Struct]] = attrs.field(
factory=dict, init=False
)
_func_stack: list[awst_nodes.Function] = attrs.field(factory=list)
def process_func(self, func: awst_nodes.Function) -> StableSet[wtypes.ARC4Struct]:
if func not in self.emits:
self.emits[func] = StableSet[wtypes.ARC4Struct]()
with self._enter_func(func):
func.body.accept(self)
return self.emits[func]
@contextlib.contextmanager
def _enter_func(self, func: awst_nodes.Function) -> Iterator[None]:
self._func_stack.append(func)
try:
yield
finally:
self._func_stack.pop()
@property
def current_func(self) -> awst_nodes.Function:
return self._func_stack[-1]
def visit_emit(self, emit: awst_nodes.Emit) -> None:
assert isinstance(emit.value.wtype, wtypes.ARC4Struct)
self.emits[self.current_func].add(emit.value.wtype)
def visit_subroutine_call_expression(self, expr: awst_nodes.SubroutineCallExpression) -> None:
target = self.context.resolve_function_reference(
expr.target,
expr.source_location,
caller=self.current_func,
)
self.emits[self.current_func] |= self.process_func(target)
def _get_arc56_type(wtype: wtypes.WType, loc: SourceLocation) -> str:
if isinstance(wtype, wtypes.ARC4Struct):
return wtype.name
if isinstance(wtype, wtypes.ARC4Type):
return wtype.arc4_name
if wtype == wtypes.string_wtype:
return "AVMString"
storage_type = wtypes.persistable_stack_type(wtype, loc)
match storage_type:
case AVMType.uint64:
return "AVMUint64"
case AVMType.bytes:
return "AVMBytes"
|
algorandfoundation/puya
|
src/puya/ir/_contract_metadata.py
|
Python
|
NOASSERTION
| 14,260 |
from collections import deque
from collections.abc import Iterator
from puya.awst import (
nodes as awst_nodes,
wtypes,
)
from puya.ir import models
from puya.ir.models import Parameter, Subroutine
from puya.ir.types_ import wtype_to_ir_type, wtype_to_ir_types
from puya.ir.utils import format_tuple_index
from puya.parse import SourceLocation
from puya.utils import set_add
def bfs_block_order(start: models.BasicBlock) -> Iterator[models.BasicBlock]:
q = deque((start,))
visited = {start}
while q:
block = q.popleft()
yield block
q.extend(succ for succ in block.successors if set_add(visited, succ))
def make_subroutine(func: awst_nodes.Function, *, allow_implicits: bool) -> Subroutine:
"""Pre-construct subroutine with an empty body"""
parameters = [
param
for arg in func.args
for param in _expand_tuple_parameters(
arg.name,
arg.wtype,
allow_implicits=allow_implicits,
source_location=arg.source_location,
)
]
returns = wtype_to_ir_types(func.return_type)
return Subroutine(
id=func.full_name,
short_name=func.short_name,
parameters=parameters,
returns=returns,
body=[],
inline=func.inline,
source_location=func.source_location,
)
def _expand_tuple_parameters(
name: str, typ: wtypes.WType, *, allow_implicits: bool, source_location: SourceLocation | None
) -> Iterator[Parameter]:
if isinstance(typ, wtypes.WTuple):
for item_idx, item_type in enumerate(typ.types):
item_name = format_tuple_index(typ, name, item_idx)
yield from _expand_tuple_parameters(
item_name,
item_type,
allow_implicits=allow_implicits,
source_location=source_location,
)
else:
yield Parameter(
name=name,
ir_type=wtype_to_ir_type(typ),
version=0,
implicit_return=allow_implicits and not typ.immutable,
source_location=source_location,
)
|
algorandfoundation/puya
|
src/puya/ir/_utils.py
|
Python
|
NOASSERTION
| 2,124 |
import typing
from collections.abc import Iterable, Mapping, Sequence
import attrs
from puya import (
artifact_metadata as md,
log,
)
from puya.avm import OnCompletionAction
from puya.awst import (
nodes as awst_nodes,
wtypes,
)
from puya.awst.arc4_types import maybe_avm_to_arc4_equivalent_type
from puya.errors import CodeError
from puya.parse import SourceLocation
from puya.utils import set_add
__all__ = [
"create_abi_router",
"AWSTContractMethodSignature",
]
logger = log.get_logger(__name__)
ALL_VALID_APPROVAL_ON_COMPLETION_ACTIONS = {
OnCompletionAction.NoOp,
OnCompletionAction.OptIn,
OnCompletionAction.CloseOut,
OnCompletionAction.UpdateApplication,
OnCompletionAction.DeleteApplication,
}
@attrs.frozen(kw_only=True)
class AWSTContractMethodSignature:
target: awst_nodes.ContractMethodTarget
parameter_types: Sequence[wtypes.WType]
return_type: wtypes.WType
def _btoi(
bytes_arg: awst_nodes.Expression, location: SourceLocation | None = None
) -> awst_nodes.IntrinsicCall:
return awst_nodes.IntrinsicCall(
op_code="btoi",
stack_args=[bytes_arg],
wtype=wtypes.uint64_wtype,
source_location=location or bytes_arg.source_location,
)
def _txn(
immediate: str, wtype: wtypes.WType, location: SourceLocation
) -> awst_nodes.IntrinsicCall:
return awst_nodes.IntrinsicCall(
op_code="txn",
immediates=[immediate],
wtype=wtype,
source_location=location,
)
def _txn_app_args(index: int, loc: SourceLocation) -> awst_nodes.IntrinsicCall:
return awst_nodes.IntrinsicCall(
op_code="txna",
immediates=["ApplicationArgs", index],
source_location=loc,
wtype=wtypes.bytes_wtype,
)
def create_block(
location: SourceLocation, comment: str | None, *stmts: awst_nodes.Statement
) -> awst_nodes.Block:
return awst_nodes.Block(source_location=location, body=stmts, comment=comment)
def call(
location: SourceLocation, sig: AWSTContractMethodSignature, *args: awst_nodes.Expression
) -> awst_nodes.SubroutineCallExpression:
return awst_nodes.SubroutineCallExpression(
target=sig.target,
args=[awst_nodes.CallArg(name=None, value=arg) for arg in args],
wtype=sig.return_type,
source_location=location,
)
def app_arg(
index: int,
wtype: wtypes.WType,
location: SourceLocation,
) -> awst_nodes.Expression:
value = _txn_app_args(index, location)
if wtype == wtypes.bytes_wtype:
return value
return awst_nodes.ReinterpretCast(
source_location=location,
expr=value,
wtype=wtype,
)
def _non_zero(value: awst_nodes.Expression) -> awst_nodes.Expression:
location = value.source_location
return awst_nodes.NumericComparisonExpression(
lhs=value,
rhs=constant(0, location),
operator=awst_nodes.NumericComparison.ne,
source_location=location,
)
def _is_zero(value: awst_nodes.Expression) -> awst_nodes.Expression:
location = value.source_location
return awst_nodes.NumericComparisonExpression(
lhs=value,
rhs=constant(0, location),
operator=awst_nodes.NumericComparison.eq,
source_location=location,
)
def return_(value: bool, location: SourceLocation) -> awst_nodes.ReturnStatement: # noqa: FBT001
return awst_nodes.ReturnStatement(
value=awst_nodes.BoolConstant(value=value, source_location=location),
source_location=location,
)
def reject(location: SourceLocation) -> awst_nodes.ReturnStatement:
return return_(False, location) # noqa: FBT003
def approve(location: SourceLocation) -> awst_nodes.ReturnStatement:
return return_(True, location) # noqa: FBT003
def on_completion(location: SourceLocation) -> awst_nodes.Expression:
return _txn("OnCompletion", wtypes.uint64_wtype, location)
def route_bare_methods(
location: SourceLocation,
bare_methods: Mapping[md.ARC4BareMethod, AWSTContractMethodSignature],
) -> awst_nodes.Block | None:
bare_blocks = dict[OnCompletionAction, awst_nodes.Block]()
for method, sig in bare_methods.items():
bare_location = method.config_location
bare_block = create_block(
bare_location,
sig.target.member_name,
*assert_create_state(method),
awst_nodes.ExpressionStatement(expr=call(bare_location, sig)),
approve(bare_location),
)
for oca in method.allowed_completion_types:
if bare_blocks.setdefault(oca, bare_block) is not bare_block:
logger.error(
f"cannot have multiple bare methods handling the same "
f"OnCompletionAction: {oca.name}",
location=bare_location,
)
return create_block(
location,
"bare_routing",
*_maybe_switch(
on_completion(location),
{constant(oca.value, location): block for oca, block in bare_blocks.items()},
),
)
def log_arc4_result(
location: SourceLocation, result_expression: awst_nodes.Expression
) -> awst_nodes.ExpressionStatement:
abi_log_prefix = awst_nodes.BytesConstant(
source_location=location,
value=0x151F7C75.to_bytes(4),
encoding=awst_nodes.BytesEncoding.base16,
)
abi_log = awst_nodes.BytesBinaryOperation(
source_location=location,
left=abi_log_prefix,
op=awst_nodes.BytesBinaryOperator.add,
right=awst_nodes.ReinterpretCast(
expr=result_expression,
wtype=wtypes.bytes_wtype,
source_location=result_expression.source_location,
),
)
log_op = awst_nodes.IntrinsicCall(
op_code="log",
stack_args=[abi_log],
wtype=wtypes.void_wtype,
source_location=location,
)
return awst_nodes.ExpressionStatement(log_op)
def assert_create_state(method: md.ARC4Method) -> Sequence[awst_nodes.Statement]:
app_id = _txn("ApplicationID", wtypes.uint64_wtype, method.config_location)
match method.create:
case awst_nodes.ARC4CreateOption.allow:
# if create is allowed but not required, we don't need to check anything
return ()
case awst_nodes.ARC4CreateOption.disallow:
condition = _non_zero(app_id)
error_message = "can only call when not creating"
case awst_nodes.ARC4CreateOption.require:
condition = _is_zero(app_id)
error_message = "can only call when creating"
case invalid:
typing.assert_never(invalid)
return [
awst_nodes.ExpressionStatement(
awst_nodes.AssertExpression(
condition=condition,
error_message=error_message,
source_location=method.config_location,
)
)
]
def constant(value: int, location: SourceLocation) -> awst_nodes.Expression:
return awst_nodes.UInt64Constant(value=value, source_location=location)
def left_shift(value: awst_nodes.Expression, location: SourceLocation) -> awst_nodes.Expression:
return awst_nodes.UInt64BinaryOperation(
source_location=location,
left=constant(1, location),
op=awst_nodes.UInt64BinaryOperator.lshift,
right=value,
)
def bit_and(
lhs: awst_nodes.Expression, rhs: awst_nodes.Expression, location: SourceLocation
) -> awst_nodes.Expression:
return awst_nodes.UInt64BinaryOperation(
source_location=location,
left=lhs,
op=awst_nodes.UInt64BinaryOperator.bit_and,
right=rhs,
)
def uint64_sub(
lhs: awst_nodes.Expression, rhs: awst_nodes.Expression, location: SourceLocation
) -> awst_nodes.Expression:
return awst_nodes.UInt64BinaryOperation(
source_location=location,
left=lhs,
op=awst_nodes.UInt64BinaryOperator.sub,
right=rhs,
)
def bit_packed_oca(
allowed_oca: Iterable[OnCompletionAction], location: SourceLocation
) -> awst_nodes.Expression:
"""Returns an integer constant, where each bit corresponding to an OnCompletionAction is
set to 1 if that action is allowed. This allows comparing a transaction's on completion value
against a set of allowed actions using a bitwise and op"""
bit_packed_value = 0
for value in allowed_oca:
bit_packed_value |= 1 << value.value
return constant(bit_packed_value, location)
def check_allowed_oca(
allowed_ocas: Sequence[OnCompletionAction], location: SourceLocation
) -> Sequence[awst_nodes.Statement]:
not_allowed_ocas = sorted(
a for a in ALL_VALID_APPROVAL_ON_COMPLETION_ACTIONS if a not in allowed_ocas
)
if not not_allowed_ocas:
# all actions are allowed, don't need to check
return ()
match allowed_ocas, not_allowed_ocas:
case [[single_allowed], _]:
condition: awst_nodes.Expression = awst_nodes.NumericComparisonExpression(
lhs=on_completion(location),
rhs=awst_nodes.UInt64Constant(
source_location=location,
value=single_allowed.value,
teal_alias=single_allowed.name,
),
operator=awst_nodes.NumericComparison.eq,
source_location=location,
)
case _, [single_disallowed]:
condition = awst_nodes.NumericComparisonExpression(
lhs=on_completion(location),
rhs=awst_nodes.UInt64Constant(
source_location=location,
value=single_disallowed.value,
teal_alias=single_disallowed.name,
),
operator=awst_nodes.NumericComparison.ne,
source_location=location,
)
case _:
condition = bit_and(
left_shift(on_completion(location), location),
bit_packed_oca(allowed_ocas, location),
location,
)
oca_desc = ", ".join(a.name for a in allowed_ocas)
if len(allowed_ocas) > 1:
oca_desc = f"one of {oca_desc}"
return (
awst_nodes.ExpressionStatement(
awst_nodes.AssertExpression(
condition=condition,
error_message=f"OnCompletion is not {oca_desc}",
source_location=location,
)
),
)
def _map_abi_args(
arg_types: Sequence[wtypes.WType], location: SourceLocation
) -> Iterable[awst_nodes.Expression]:
transaction_arg_offset = 0
incoming_types = []
for a in arg_types:
if isinstance(a, wtypes.WGroupTransaction):
transaction_arg_offset += 1
else:
if isinstance(a, wtypes.ARC4Type):
arc4_type = a
else:
converted = maybe_avm_to_arc4_equivalent_type(a)
if converted is not None:
arc4_type = converted
elif _reference_type_array(a) is not None:
arc4_type = wtypes.arc4_byte_alias
else:
raise CodeError(f"not an ARC4 type or native equivalent: {a}", location)
incoming_types.append(arc4_type)
if len(incoming_types) > 15:
unpacked_types, packed_types = incoming_types[:14], incoming_types[14:]
else:
unpacked_types, packed_types = incoming_types, []
abi_args = [
app_arg(array_index, arg_wtype, location)
for array_index, arg_wtype in enumerate(unpacked_types, start=1)
]
if packed_types:
abi_args.extend(
awst_nodes.TupleItemExpression(
base=app_arg(
15, wtypes.ARC4Tuple(types=packed_types, source_location=location), location
),
index=tuple_index,
source_location=location,
)
for tuple_index, _ in enumerate(packed_types)
)
abi_args.reverse() # reverse so we can pop off end
for arg in arg_types:
if isinstance(arg, wtypes.WGroupTransaction):
transaction_index = uint64_sub(
_txn("GroupIndex", wtypes.uint64_wtype, location),
constant(transaction_arg_offset, location),
location,
)
yield awst_nodes.GroupTransactionReference(
index=transaction_index, wtype=arg, source_location=location
)
transaction_arg_offset -= 1
else:
abi_arg = abi_args.pop()
if (ref_array := _reference_type_array(arg)) is not None:
uint64_index = _btoi(abi_arg, location)
yield awst_nodes.IntrinsicCall(
op_code="txnas",
immediates=[ref_array],
stack_args=[uint64_index],
wtype=arg,
source_location=location,
)
else:
if abi_arg.wtype != arg:
abi_arg = awst_nodes.ARC4Decode(
value=abi_arg, wtype=arg, source_location=location
)
yield abi_arg
def route_abi_methods(
location: SourceLocation,
methods: Mapping[md.ARC4ABIMethod, AWSTContractMethodSignature],
) -> awst_nodes.Block:
method_routing_cases = dict[awst_nodes.Expression, awst_nodes.Block]()
seen_signatures = set[str]()
for method, sig in methods.items():
abi_loc = method.config_location
abi_args = list(_map_abi_args(sig.parameter_types, location))
method_result = call(abi_loc, sig, *abi_args)
match sig.return_type:
case wtypes.void_wtype:
call_and_maybe_log = awst_nodes.ExpressionStatement(method_result)
case wtypes.ARC4Type():
call_and_maybe_log = log_arc4_result(abi_loc, method_result)
case _:
converted_return_type = maybe_avm_to_arc4_equivalent_type(sig.return_type)
if converted_return_type is None:
raise CodeError(f"{sig.return_type} is not a valid ABI return type", abi_loc)
arc4_encoded = awst_nodes.ARC4Encode(
value=method_result,
wtype=converted_return_type,
source_location=method_result.source_location,
)
call_and_maybe_log = log_arc4_result(abi_loc, arc4_encoded)
arc4_signature = method.signature
if not set_add(seen_signatures, arc4_signature):
raise CodeError(
f"Cannot have duplicate ARC4 method signatures: {arc4_signature}", abi_loc
)
method_routing_cases[
awst_nodes.MethodConstant(source_location=location, value=arc4_signature)
] = create_block(
abi_loc,
f"{method.name}_route",
*check_allowed_oca(method.allowed_completion_types, abi_loc),
*assert_create_state(method),
call_and_maybe_log,
approve(abi_loc),
)
return create_block(
location,
"abi_routing",
*_maybe_switch(_txn_app_args(0, location), method_routing_cases),
)
def _maybe_switch(
value: awst_nodes.Expression, cases: Mapping[awst_nodes.Expression, awst_nodes.Block]
) -> Sequence[awst_nodes.Statement]:
if not cases:
return ()
return [
awst_nodes.Switch(
value=value,
cases=cases,
default_case=None,
source_location=value.source_location,
)
]
def create_abi_router(
contract: awst_nodes.Contract,
arc4_methods_with_signatures: Mapping[md.ARC4Method, AWSTContractMethodSignature],
) -> awst_nodes.ContractMethod:
router_location = contract.source_location
abi_methods = {}
bare_methods = {}
for method, sig in arc4_methods_with_signatures.items():
if isinstance(method, md.ARC4BareMethod):
bare_methods[method] = sig
else:
abi_methods[method] = sig
abi_routing = route_abi_methods(router_location, abi_methods)
bare_routing = route_bare_methods(router_location, bare_methods)
num_app_args = _txn("NumAppArgs", wtypes.uint64_wtype, router_location)
router = [
awst_nodes.IfElse(
condition=_non_zero(num_app_args),
if_branch=abi_routing,
else_branch=bare_routing,
source_location=router_location,
),
reject(router_location),
]
approval_program = awst_nodes.ContractMethod(
cref=contract.id,
member_name="__puya_arc4_router__",
source_location=router_location,
args=[],
return_type=wtypes.bool_wtype,
body=create_block(router_location, None, *router),
documentation=awst_nodes.MethodDocumentation(),
arc4_method_config=None,
inline=True,
)
return approval_program
def _reference_type_array(wtype: wtypes.WType) -> str | None:
match wtype:
case wtypes.asset_wtype:
return "Assets"
case wtypes.account_wtype:
return "Accounts"
case wtypes.application_wtype:
return "Applications"
return None
|
algorandfoundation/puya
|
src/puya/ir/arc4_router.py
|
Python
|
NOASSERTION
| 17,332 |
# AUTO GENERATED BY scripts/generate_avm_ops.py, DO NOT EDIT
import enum
from collections.abc import Sequence
from puya.errors import InternalError
from puya.ir.avm_ops_models import (
AVMOpData,
DynamicVariants,
ImmediateKind,
OpSignature,
RunMode,
StackType,
Variant,
)
class AVMOp(enum.StrEnum):
code: str
immediate_types: Sequence[ImmediateKind]
_variants: Variant | DynamicVariants
cost: int | None
min_avm_version: int
def __new__(cls, data: AVMOpData | str) -> "AVMOp":
# the weird union type on data && then assert,
# is to shut mypy up when it wrongly infers the arg type of
# e.g. AVMOp("+") to be invalid
assert isinstance(data, AVMOpData)
op_code = data.op_code
obj = str.__new__(cls, op_code)
obj._value_ = op_code
obj.code = op_code
obj.immediate_types = tuple(data.immediate_types)
obj._variants = data.variants # noqa: SLF001
obj.cost = data.cost
obj.min_avm_version = data.min_avm_version
return obj
def get_variant(self, immediates: Sequence[str | int]) -> Variant:
if isinstance(self._variants, Variant):
return self._variants
im = immediates[self._variants.immediate_index]
assert isinstance(im, str)
try:
return self._variants.variant_map[im]
except KeyError as ex:
raise InternalError(f"Unknown immediate for {self.code}: {im}") from ex
acct_params_get = AVMOpData(
op_code="acct_params_get",
variants=DynamicVariants(
immediate_index=0,
variant_map={
"AcctBalance": Variant(
signature=OpSignature(
args=[StackType.address_or_index],
returns=[StackType.uint64, StackType.bool],
),
enum="AcctBalance",
supported_modes=RunMode.app,
min_avm_version=6,
),
"AcctMinBalance": Variant(
signature=OpSignature(
args=[StackType.address_or_index],
returns=[StackType.uint64, StackType.bool],
),
enum="AcctMinBalance",
supported_modes=RunMode.app,
min_avm_version=6,
),
"AcctAuthAddr": Variant(
signature=OpSignature(
args=[StackType.address_or_index],
returns=[StackType.address, StackType.bool],
),
enum="AcctAuthAddr",
supported_modes=RunMode.app,
min_avm_version=6,
),
"AcctTotalNumUint": Variant(
signature=OpSignature(
args=[StackType.address_or_index],
returns=[StackType.uint64, StackType.bool],
),
enum="AcctTotalNumUint",
supported_modes=RunMode.app,
min_avm_version=8,
),
"AcctTotalNumByteSlice": Variant(
signature=OpSignature(
args=[StackType.address_or_index],
returns=[StackType.uint64, StackType.bool],
),
enum="AcctTotalNumByteSlice",
supported_modes=RunMode.app,
min_avm_version=8,
),
"AcctTotalExtraAppPages": Variant(
signature=OpSignature(
args=[StackType.address_or_index],
returns=[StackType.uint64, StackType.bool],
),
enum="AcctTotalExtraAppPages",
supported_modes=RunMode.app,
min_avm_version=8,
),
"AcctTotalAppsCreated": Variant(
signature=OpSignature(
args=[StackType.address_or_index],
returns=[StackType.uint64, StackType.bool],
),
enum="AcctTotalAppsCreated",
supported_modes=RunMode.app,
min_avm_version=8,
),
"AcctTotalAppsOptedIn": Variant(
signature=OpSignature(
args=[StackType.address_or_index],
returns=[StackType.uint64, StackType.bool],
),
enum="AcctTotalAppsOptedIn",
supported_modes=RunMode.app,
min_avm_version=8,
),
"AcctTotalAssetsCreated": Variant(
signature=OpSignature(
args=[StackType.address_or_index],
returns=[StackType.uint64, StackType.bool],
),
enum="AcctTotalAssetsCreated",
supported_modes=RunMode.app,
min_avm_version=8,
),
"AcctTotalAssets": Variant(
signature=OpSignature(
args=[StackType.address_or_index],
returns=[StackType.uint64, StackType.bool],
),
enum="AcctTotalAssets",
supported_modes=RunMode.app,
min_avm_version=8,
),
"AcctTotalBoxes": Variant(
signature=OpSignature(
args=[StackType.address_or_index],
returns=[StackType.uint64, StackType.bool],
),
enum="AcctTotalBoxes",
supported_modes=RunMode.app,
min_avm_version=8,
),
"AcctTotalBoxBytes": Variant(
signature=OpSignature(
args=[StackType.address_or_index],
returns=[StackType.uint64, StackType.bool],
),
enum="AcctTotalBoxBytes",
supported_modes=RunMode.app,
min_avm_version=8,
),
"AcctIncentiveEligible": Variant(
signature=OpSignature(
args=[StackType.address_or_index], returns=[StackType.bool, StackType.bool]
),
enum="AcctIncentiveEligible",
supported_modes=RunMode.app,
min_avm_version=11,
),
"AcctLastProposed": Variant(
signature=OpSignature(
args=[StackType.address_or_index],
returns=[StackType.uint64, StackType.bool],
),
enum="AcctLastProposed",
supported_modes=RunMode.app,
min_avm_version=11,
),
"AcctLastHeartbeat": Variant(
signature=OpSignature(
args=[StackType.address_or_index],
returns=[StackType.uint64, StackType.bool],
),
enum="AcctLastHeartbeat",
supported_modes=RunMode.app,
min_avm_version=11,
),
},
),
immediate_types=(ImmediateKind.arg_enum,),
cost=1,
min_avm_version=6,
supported_modes=RunMode.app,
)
"""
X is field F from account A. Y is 1 if A owns positive algos, else 0
"""
add = AVMOpData(
op_code="+",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.uint64]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
A plus B. Fail on overflow.
Overflow is an error condition which halts execution and fails the transaction. Full precision
is available from `addw`.
"""
add_bytes = AVMOpData(
op_code="b+",
variants=Variant(
signature=OpSignature(
args=[StackType.bigint, StackType.bigint], returns=[StackType.bigint]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=10,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
A plus B. A and B are interpreted as big-endian unsigned integers
"""
addw = AVMOpData(
op_code="addw",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64],
returns=[StackType.uint64, StackType.uint64],
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=2,
),
immediate_types=(),
cost=1,
min_avm_version=2,
supported_modes=RunMode.any,
)
"""
A plus B as a 128-bit result. X is the carry-bit, Y is the low-order 64 bits.
"""
and_ = AVMOpData(
op_code="&&",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.bool]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
A is not zero and B is not zero => {0 or 1}
"""
app_global_del = AVMOpData(
op_code="app_global_del",
variants=Variant(
signature=OpSignature(args=[StackType.state_key], returns=[]),
enum=None,
supported_modes=RunMode.app,
min_avm_version=2,
),
immediate_types=(),
cost=1,
min_avm_version=2,
supported_modes=RunMode.app,
)
"""
delete key A from the global state of the current application
params: state key.
Deleting a key which is already absent has no effect on the application global state. (In
particular, it does _not_ cause the program to fail.)
"""
app_global_get = AVMOpData(
op_code="app_global_get",
variants=Variant(
signature=OpSignature(args=[StackType.state_key], returns=[StackType.any]),
enum=None,
supported_modes=RunMode.app,
min_avm_version=2,
),
immediate_types=(),
cost=1,
min_avm_version=2,
supported_modes=RunMode.app,
)
"""
global state of the key A in the current application
params: state key. Return: value. The value is zero (of type uint64) if the key does not exist.
"""
app_global_get_ex = AVMOpData(
op_code="app_global_get_ex",
variants=Variant(
signature=OpSignature(
args=[StackType.application, StackType.state_key],
returns=[StackType.any, StackType.bool],
),
enum=None,
supported_modes=RunMode.app,
min_avm_version=2,
),
immediate_types=(),
cost=1,
min_avm_version=2,
supported_modes=RunMode.app,
)
"""
X is the global state of application A, key B. Y is 1 if key existed, else 0
params: Txn.ForeignApps offset (or, since v4, an _available_ application id), state key.
Return: did_exist flag (top of the stack, 1 if the application and key existed and 0
otherwise), value. The value is zero (of type uint64) if the key does not exist.
"""
app_global_put = AVMOpData(
op_code="app_global_put",
variants=Variant(
signature=OpSignature(args=[StackType.state_key, StackType.any], returns=[]),
enum=None,
supported_modes=RunMode.app,
min_avm_version=2,
),
immediate_types=(),
cost=1,
min_avm_version=2,
supported_modes=RunMode.app,
)
"""
write B to key A in the global state of the current application
"""
app_local_del = AVMOpData(
op_code="app_local_del",
variants=Variant(
signature=OpSignature(
args=[StackType.address_or_index, StackType.state_key], returns=[]
),
enum=None,
supported_modes=RunMode.app,
min_avm_version=2,
),
immediate_types=(),
cost=1,
min_avm_version=2,
supported_modes=RunMode.app,
)
"""
delete key B from account A's local state of the current application
params: Txn.Accounts offset (or, since v4, an _available_ account address), state key.
Deleting a key which is already absent has no effect on the application local state. (In
particular, it does _not_ cause the program to fail.)
"""
app_local_get = AVMOpData(
op_code="app_local_get",
variants=Variant(
signature=OpSignature(
args=[StackType.address_or_index, StackType.state_key], returns=[StackType.any]
),
enum=None,
supported_modes=RunMode.app,
min_avm_version=2,
),
immediate_types=(),
cost=1,
min_avm_version=2,
supported_modes=RunMode.app,
)
"""
local state of the key B in the current application in account A
params: Txn.Accounts offset (or, since v4, an _available_ account address), state key. Return:
value. The value is zero (of type uint64) if the key does not exist.
"""
app_local_get_ex = AVMOpData(
op_code="app_local_get_ex",
variants=Variant(
signature=OpSignature(
args=[StackType.address_or_index, StackType.application, StackType.state_key],
returns=[StackType.any, StackType.bool],
),
enum=None,
supported_modes=RunMode.app,
min_avm_version=2,
),
immediate_types=(),
cost=1,
min_avm_version=2,
supported_modes=RunMode.app,
)
"""
X is the local state of application B, key C in account A. Y is 1 if key existed, else 0
params: Txn.Accounts offset (or, since v4, an _available_ account address), _available_
application id (or, since v4, a Txn.ForeignApps offset), state key. Return: did_exist flag (top
of the stack, 1 if the application and key existed and 0 otherwise), value. The value is zero
(of type uint64) if the key does not exist.
"""
app_local_put = AVMOpData(
op_code="app_local_put",
variants=Variant(
signature=OpSignature(
args=[StackType.address_or_index, StackType.state_key, StackType.any], returns=[]
),
enum=None,
supported_modes=RunMode.app,
min_avm_version=2,
),
immediate_types=(),
cost=1,
min_avm_version=2,
supported_modes=RunMode.app,
)
"""
write C to key B in account A's local state of the current application
params: Txn.Accounts offset (or, since v4, an _available_ account address), state key, value.
"""
app_opted_in = AVMOpData(
op_code="app_opted_in",
variants=Variant(
signature=OpSignature(
args=[StackType.address_or_index, StackType.application], returns=[StackType.bool]
),
enum=None,
supported_modes=RunMode.app,
min_avm_version=2,
),
immediate_types=(),
cost=1,
min_avm_version=2,
supported_modes=RunMode.app,
)
"""
1 if account A is opted in to application B, else 0
params: Txn.Accounts offset (or, since v4, an _available_ account address), _available_
application id (or, since v4, a Txn.ForeignApps offset). Return: 1 if opted in and 0 otherwise.
"""
app_params_get = AVMOpData(
op_code="app_params_get",
variants=DynamicVariants(
immediate_index=0,
variant_map={
"AppApprovalProgram": Variant(
signature=OpSignature(
args=[StackType.application], returns=[StackType.bytes, StackType.bool]
),
enum="AppApprovalProgram",
supported_modes=RunMode.app,
min_avm_version=5,
),
"AppClearStateProgram": Variant(
signature=OpSignature(
args=[StackType.application], returns=[StackType.bytes, StackType.bool]
),
enum="AppClearStateProgram",
supported_modes=RunMode.app,
min_avm_version=5,
),
"AppGlobalNumUint": Variant(
signature=OpSignature(
args=[StackType.application], returns=[StackType.uint64, StackType.bool]
),
enum="AppGlobalNumUint",
supported_modes=RunMode.app,
min_avm_version=5,
),
"AppGlobalNumByteSlice": Variant(
signature=OpSignature(
args=[StackType.application], returns=[StackType.uint64, StackType.bool]
),
enum="AppGlobalNumByteSlice",
supported_modes=RunMode.app,
min_avm_version=5,
),
"AppLocalNumUint": Variant(
signature=OpSignature(
args=[StackType.application], returns=[StackType.uint64, StackType.bool]
),
enum="AppLocalNumUint",
supported_modes=RunMode.app,
min_avm_version=5,
),
"AppLocalNumByteSlice": Variant(
signature=OpSignature(
args=[StackType.application], returns=[StackType.uint64, StackType.bool]
),
enum="AppLocalNumByteSlice",
supported_modes=RunMode.app,
min_avm_version=5,
),
"AppExtraProgramPages": Variant(
signature=OpSignature(
args=[StackType.application], returns=[StackType.uint64, StackType.bool]
),
enum="AppExtraProgramPages",
supported_modes=RunMode.app,
min_avm_version=5,
),
"AppCreator": Variant(
signature=OpSignature(
args=[StackType.application], returns=[StackType.address, StackType.bool]
),
enum="AppCreator",
supported_modes=RunMode.app,
min_avm_version=5,
),
"AppAddress": Variant(
signature=OpSignature(
args=[StackType.application], returns=[StackType.address, StackType.bool]
),
enum="AppAddress",
supported_modes=RunMode.app,
min_avm_version=5,
),
},
),
immediate_types=(ImmediateKind.arg_enum,),
cost=1,
min_avm_version=5,
supported_modes=RunMode.app,
)
"""
X is field F from app A. Y is 1 if A exists, else 0
params: Txn.ForeignApps offset or an _available_ app id. Return: did_exist flag (1 if the
application existed and 0 otherwise), value.
"""
arg = AVMOpData(
op_code="arg",
variants=Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum=None,
supported_modes=RunMode.lsig,
min_avm_version=1,
),
immediate_types=(ImmediateKind.uint8,),
cost=1,
min_avm_version=1,
supported_modes=RunMode.lsig,
)
"""
Nth LogicSig argument
"""
arg_0 = AVMOpData(
op_code="arg_0",
variants=Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum=None,
supported_modes=RunMode.lsig,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.lsig,
)
"""
LogicSig argument 0
"""
arg_1 = AVMOpData(
op_code="arg_1",
variants=Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum=None,
supported_modes=RunMode.lsig,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.lsig,
)
"""
LogicSig argument 1
"""
arg_2 = AVMOpData(
op_code="arg_2",
variants=Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum=None,
supported_modes=RunMode.lsig,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.lsig,
)
"""
LogicSig argument 2
"""
arg_3 = AVMOpData(
op_code="arg_3",
variants=Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum=None,
supported_modes=RunMode.lsig,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.lsig,
)
"""
LogicSig argument 3
"""
args = AVMOpData(
op_code="args",
variants=Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum=None,
supported_modes=RunMode.lsig,
min_avm_version=5,
),
immediate_types=(),
cost=1,
min_avm_version=5,
supported_modes=RunMode.lsig,
)
"""
Ath LogicSig argument
"""
assert_ = AVMOpData(
op_code="assert",
variants=Variant(
signature=OpSignature(args=[StackType.uint64], returns=[]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=3,
),
immediate_types=(),
cost=1,
min_avm_version=3,
supported_modes=RunMode.any,
)
"""
immediately fail unless A is a non-zero number
"""
asset_holding_get = AVMOpData(
op_code="asset_holding_get",
variants=DynamicVariants(
immediate_index=0,
variant_map={
"AssetBalance": Variant(
signature=OpSignature(
args=[StackType.address_or_index, StackType.asset],
returns=[StackType.uint64, StackType.bool],
),
enum="AssetBalance",
supported_modes=RunMode.app,
min_avm_version=2,
),
"AssetFrozen": Variant(
signature=OpSignature(
args=[StackType.address_or_index, StackType.asset],
returns=[StackType.bool, StackType.bool],
),
enum="AssetFrozen",
supported_modes=RunMode.app,
min_avm_version=2,
),
},
),
immediate_types=(ImmediateKind.arg_enum,),
cost=1,
min_avm_version=2,
supported_modes=RunMode.app,
)
"""
X is field F from account A's holding of asset B. Y is 1 if A is opted into B, else 0
params: Txn.Accounts offset (or, since v4, an _available_ address), asset id (or, since v4, a
Txn.ForeignAssets offset). Return: did_exist flag (1 if the asset existed and 0 otherwise),
value.
"""
asset_params_get = AVMOpData(
op_code="asset_params_get",
variants=DynamicVariants(
immediate_index=0,
variant_map={
"AssetTotal": Variant(
signature=OpSignature(
args=[StackType.asset], returns=[StackType.uint64, StackType.bool]
),
enum="AssetTotal",
supported_modes=RunMode.app,
min_avm_version=2,
),
"AssetDecimals": Variant(
signature=OpSignature(
args=[StackType.asset], returns=[StackType.uint64, StackType.bool]
),
enum="AssetDecimals",
supported_modes=RunMode.app,
min_avm_version=2,
),
"AssetDefaultFrozen": Variant(
signature=OpSignature(
args=[StackType.asset], returns=[StackType.bool, StackType.bool]
),
enum="AssetDefaultFrozen",
supported_modes=RunMode.app,
min_avm_version=2,
),
"AssetUnitName": Variant(
signature=OpSignature(
args=[StackType.asset], returns=[StackType.bytes, StackType.bool]
),
enum="AssetUnitName",
supported_modes=RunMode.app,
min_avm_version=2,
),
"AssetName": Variant(
signature=OpSignature(
args=[StackType.asset], returns=[StackType.bytes, StackType.bool]
),
enum="AssetName",
supported_modes=RunMode.app,
min_avm_version=2,
),
"AssetURL": Variant(
signature=OpSignature(
args=[StackType.asset], returns=[StackType.bytes, StackType.bool]
),
enum="AssetURL",
supported_modes=RunMode.app,
min_avm_version=2,
),
"AssetMetadataHash": Variant(
signature=OpSignature(
args=[StackType.asset], returns=[StackType.bytes, StackType.bool]
),
enum="AssetMetadataHash",
supported_modes=RunMode.app,
min_avm_version=2,
),
"AssetManager": Variant(
signature=OpSignature(
args=[StackType.asset], returns=[StackType.address, StackType.bool]
),
enum="AssetManager",
supported_modes=RunMode.app,
min_avm_version=2,
),
"AssetReserve": Variant(
signature=OpSignature(
args=[StackType.asset], returns=[StackType.address, StackType.bool]
),
enum="AssetReserve",
supported_modes=RunMode.app,
min_avm_version=2,
),
"AssetFreeze": Variant(
signature=OpSignature(
args=[StackType.asset], returns=[StackType.address, StackType.bool]
),
enum="AssetFreeze",
supported_modes=RunMode.app,
min_avm_version=2,
),
"AssetClawback": Variant(
signature=OpSignature(
args=[StackType.asset], returns=[StackType.address, StackType.bool]
),
enum="AssetClawback",
supported_modes=RunMode.app,
min_avm_version=2,
),
"AssetCreator": Variant(
signature=OpSignature(
args=[StackType.asset], returns=[StackType.address, StackType.bool]
),
enum="AssetCreator",
supported_modes=RunMode.app,
min_avm_version=5,
),
},
),
immediate_types=(ImmediateKind.arg_enum,),
cost=1,
min_avm_version=2,
supported_modes=RunMode.app,
)
"""
X is field F from asset A. Y is 1 if A exists, else 0
params: Txn.ForeignAssets offset (or, since v4, an _available_ asset id. Return: did_exist flag
(1 if the asset existed and 0 otherwise), value.
"""
balance = AVMOpData(
op_code="balance",
variants=Variant(
signature=OpSignature(args=[StackType.address_or_index], returns=[StackType.uint64]),
enum=None,
supported_modes=RunMode.app,
min_avm_version=2,
),
immediate_types=(),
cost=1,
min_avm_version=2,
supported_modes=RunMode.app,
)
"""
balance for account A, in microalgos. The balance is observed after the effects of previous
transactions in the group, and after the fee for the current transaction is deducted. Changes
caused by inner transactions are observable immediately following `itxn_submit`
params: Txn.Accounts offset (or, since v4, an _available_ account address), _available_
application id (or, since v4, a Txn.ForeignApps offset). Return: value.
"""
base64_decode = AVMOpData(
op_code="base64_decode",
variants=Variant(
signature=OpSignature(args=[StackType.bytes], returns=[StackType.bytes]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=7,
),
immediate_types=(ImmediateKind.arg_enum,),
cost=None,
min_avm_version=7,
supported_modes=RunMode.any,
)
"""
decode A which was base64-encoded using _encoding_ E. Fail if A is not base64 encoded with
encoding E
*Warning*: Usage should be restricted to very rare use cases. In almost all cases, smart
contracts should directly handle non-encoded byte-strings. This opcode should only be used
in cases where base64 is the only available option, e.g. interoperability with a third-party
that only signs base64 strings.
Decodes A using the base64 encoding E. Specify the encoding with an immediate arg either as
URL and Filename Safe (`URLEncoding`) or Standard (`StdEncoding`). See [RFC 4648 sections 4 and
5](https://rfc-editor.org/rfc/rfc4648.html#section-4). It is assumed that the encoding ends
with the exact number of `=` padding characters as required by the RFC. When padding occurs,
any unused pad bits in the encoding must be set to zero or the decoding will fail. The special
cases of `\n` and `\r` are allowed but completely ignored. An error will result when attempting
to decode a string with a character that is not in the encoding alphabet or not one of `=`,
`\r`, or `\n`.
"""
bitlen = AVMOpData(
op_code="bitlen",
variants=Variant(
signature=OpSignature(args=[StackType.any], returns=[StackType.uint64]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=1,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
The highest set bit in A. If A is a byte-array, it is interpreted as a big-endian unsigned
integer. bitlen of 0 is 0, bitlen of 8 is 4
bitlen interprets arrays as big-endian integers, unlike setbit/getbit
"""
bitwise_and = AVMOpData(
op_code="&",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.uint64]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
A bitwise-and B
"""
bitwise_and_bytes = AVMOpData(
op_code="b&",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.bytes], returns=[StackType.bytes]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=6,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
A bitwise-and B. A and B are zero-left extended to the greater of their lengths
"""
bitwise_not = AVMOpData(
op_code="~",
variants=Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
bitwise invert value A
"""
bitwise_not_bytes = AVMOpData(
op_code="b~",
variants=Variant(
signature=OpSignature(args=[StackType.bytes], returns=[StackType.bytes]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=4,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
A with all bits inverted
"""
bitwise_or = AVMOpData(
op_code="|",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.uint64]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
A bitwise-or B
"""
bitwise_or_bytes = AVMOpData(
op_code="b|",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.bytes], returns=[StackType.bytes]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=6,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
A bitwise-or B. A and B are zero-left extended to the greater of their lengths
"""
bitwise_xor = AVMOpData(
op_code="^",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.uint64]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
A bitwise-xor B
"""
bitwise_xor_bytes = AVMOpData(
op_code="b^",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.bytes], returns=[StackType.bytes]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=6,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
A bitwise-xor B. A and B are zero-left extended to the greater of their lengths
"""
block = AVMOpData(
op_code="block",
variants=DynamicVariants(
immediate_index=0,
variant_map={
"BlkSeed": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="BlkSeed",
supported_modes=RunMode.any,
min_avm_version=7,
),
"BlkTimestamp": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="BlkTimestamp",
supported_modes=RunMode.any,
min_avm_version=7,
),
"BlkProposer": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.address]),
enum="BlkProposer",
supported_modes=RunMode.any,
min_avm_version=11,
),
"BlkFeesCollected": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="BlkFeesCollected",
supported_modes=RunMode.any,
min_avm_version=11,
),
"BlkBonus": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="BlkBonus",
supported_modes=RunMode.any,
min_avm_version=11,
),
"BlkBranch": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="BlkBranch",
supported_modes=RunMode.any,
min_avm_version=11,
),
"BlkFeeSink": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.address]),
enum="BlkFeeSink",
supported_modes=RunMode.any,
min_avm_version=11,
),
"BlkProtocol": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="BlkProtocol",
supported_modes=RunMode.any,
min_avm_version=11,
),
"BlkTxnCounter": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="BlkTxnCounter",
supported_modes=RunMode.any,
min_avm_version=11,
),
"BlkProposerPayout": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="BlkProposerPayout",
supported_modes=RunMode.any,
min_avm_version=11,
),
},
),
immediate_types=(ImmediateKind.arg_enum,),
cost=1,
min_avm_version=7,
supported_modes=RunMode.any,
)
"""
field F of block A. Fail unless A falls between txn.LastValid-1002 and txn.FirstValid
(exclusive)
"""
box_create = AVMOpData(
op_code="box_create",
variants=Variant(
signature=OpSignature(
args=[StackType.box_name, StackType.uint64], returns=[StackType.bool]
),
enum=None,
supported_modes=RunMode.app,
min_avm_version=8,
),
immediate_types=(),
cost=1,
min_avm_version=8,
supported_modes=RunMode.app,
)
"""
create a box named A, of length B. Fail if the name A is empty or B exceeds 32,768. Returns 0
if A already existed, else 1
Newly created boxes are filled with 0 bytes. `box_create` will fail if the referenced box
already exists with a different size. Otherwise, existing boxes are unchanged by `box_create`.
"""
box_del = AVMOpData(
op_code="box_del",
variants=Variant(
signature=OpSignature(args=[StackType.box_name], returns=[StackType.bool]),
enum=None,
supported_modes=RunMode.app,
min_avm_version=8,
),
immediate_types=(),
cost=1,
min_avm_version=8,
supported_modes=RunMode.app,
)
"""
delete box named A if it exists. Return 1 if A existed, 0 otherwise
"""
box_extract = AVMOpData(
op_code="box_extract",
variants=Variant(
signature=OpSignature(
args=[StackType.box_name, StackType.uint64, StackType.uint64],
returns=[StackType.bytes],
),
enum=None,
supported_modes=RunMode.app,
min_avm_version=8,
),
immediate_types=(),
cost=1,
min_avm_version=8,
supported_modes=RunMode.app,
)
"""
read C bytes from box A, starting at offset B. Fail if A does not exist, or the byte range is
outside A's size.
"""
box_get = AVMOpData(
op_code="box_get",
variants=Variant(
signature=OpSignature(
args=[StackType.box_name], returns=[StackType.bytes, StackType.bool]
),
enum=None,
supported_modes=RunMode.app,
min_avm_version=8,
),
immediate_types=(),
cost=1,
min_avm_version=8,
supported_modes=RunMode.app,
)
"""
X is the contents of box A if A exists, else ''. Y is 1 if A exists, else 0.
For boxes that exceed 4,096 bytes, consider `box_create`, `box_extract`, and `box_replace`
"""
box_len = AVMOpData(
op_code="box_len",
variants=Variant(
signature=OpSignature(
args=[StackType.box_name], returns=[StackType.uint64, StackType.bool]
),
enum=None,
supported_modes=RunMode.app,
min_avm_version=8,
),
immediate_types=(),
cost=1,
min_avm_version=8,
supported_modes=RunMode.app,
)
"""
X is the length of box A if A exists, else 0. Y is 1 if A exists, else 0.
"""
box_put = AVMOpData(
op_code="box_put",
variants=Variant(
signature=OpSignature(args=[StackType.box_name, StackType.bytes], returns=[]),
enum=None,
supported_modes=RunMode.app,
min_avm_version=8,
),
immediate_types=(),
cost=1,
min_avm_version=8,
supported_modes=RunMode.app,
)
"""
replaces the contents of box A with byte-array B. Fails if A exists and len(B) != len(box A).
Creates A if it does not exist
For boxes that exceed 4,096 bytes, consider `box_create`, `box_extract`, and `box_replace`
"""
box_replace = AVMOpData(
op_code="box_replace",
variants=Variant(
signature=OpSignature(
args=[StackType.box_name, StackType.uint64, StackType.bytes], returns=[]
),
enum=None,
supported_modes=RunMode.app,
min_avm_version=8,
),
immediate_types=(),
cost=1,
min_avm_version=8,
supported_modes=RunMode.app,
)
"""
write byte-array C into box A, starting at offset B. Fail if A does not exist, or the byte
range is outside A's size.
"""
box_resize = AVMOpData(
op_code="box_resize",
variants=Variant(
signature=OpSignature(args=[StackType.box_name, StackType.uint64], returns=[]),
enum=None,
supported_modes=RunMode.app,
min_avm_version=10,
),
immediate_types=(),
cost=1,
min_avm_version=10,
supported_modes=RunMode.app,
)
"""
change the size of box named A to be of length B, adding zero bytes to end or removing bytes
from the end, as needed. Fail if the name A is empty, A is not an existing box, or B exceeds
32,768.
"""
box_splice = AVMOpData(
op_code="box_splice",
variants=Variant(
signature=OpSignature(
args=[StackType.box_name, StackType.uint64, StackType.uint64, StackType.bytes],
returns=[],
),
enum=None,
supported_modes=RunMode.app,
min_avm_version=10,
),
immediate_types=(),
cost=1,
min_avm_version=10,
supported_modes=RunMode.app,
)
"""
set box A to contain its previous bytes up to index B, followed by D, followed by the original
bytes of A that began at index B+C.
Boxes are of constant length. If C < len(D), then len(D)-C bytes will be removed from the end.
If C > len(D), zero bytes will be appended to the end to reach the box length.
"""
bsqrt = AVMOpData(
op_code="bsqrt",
variants=Variant(
signature=OpSignature(args=[StackType.bigint], returns=[StackType.bigint]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=6,
),
immediate_types=(),
cost=40,
min_avm_version=6,
supported_modes=RunMode.any,
)
"""
The largest integer I such that I^2 <= A. A and I are interpreted as big-endian unsigned
integers
"""
btoi = AVMOpData(
op_code="btoi",
variants=Variant(
signature=OpSignature(args=[StackType.bytes], returns=[StackType.uint64]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
converts big-endian byte array A to uint64. Fails if len(A) > 8. Padded by leading 0s if len(A)
< 8.
`btoi` fails if the input is longer than 8 bytes.
"""
bzero = AVMOpData(
op_code="bzero",
variants=Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=1,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
zero filled byte-array of length A
"""
concat = AVMOpData(
op_code="concat",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.bytes], returns=[StackType.bytes]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=2,
),
immediate_types=(),
cost=1,
min_avm_version=2,
supported_modes=RunMode.any,
)
"""
join A and B
`concat` fails if the result would be greater than 4096 bytes.
"""
div_floor = AVMOpData(
op_code="/",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.uint64]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
A divided by B (truncated division). Fail if B == 0.
`divmodw` is available to divide the two-element values produced by `mulw` and `addw`.
"""
div_floor_bytes = AVMOpData(
op_code="b/",
variants=Variant(
signature=OpSignature(
args=[StackType.bigint, StackType.bigint], returns=[StackType.bigint]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=20,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
A divided by B (truncated division). A and B are interpreted as big-endian unsigned integers.
Fail if B is zero.
"""
divmodw = AVMOpData(
op_code="divmodw",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64, StackType.uint64, StackType.uint64],
returns=[StackType.uint64, StackType.uint64, StackType.uint64, StackType.uint64],
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=20,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
W,X = (A,B / C,D); Y,Z = (A,B modulo C,D)
The notation J,K indicates that two uint64 values J and K are interpreted as a uint128 value,
with J as the high uint64 and K the low.
"""
divw = AVMOpData(
op_code="divw",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64, StackType.uint64],
returns=[StackType.uint64],
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=6,
),
immediate_types=(),
cost=1,
min_avm_version=6,
supported_modes=RunMode.any,
)
"""
A,B / C. Fail if C == 0 or if result overflows.
The notation A,B indicates that A and B are interpreted as a uint128 value, with A as the high
uint64 and B the low.
"""
ec_add = AVMOpData(
op_code="ec_add",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.bytes], returns=[StackType.bytes]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=10,
),
immediate_types=(ImmediateKind.arg_enum,),
cost=None,
min_avm_version=10,
supported_modes=RunMode.any,
)
"""
for curve points A and B, return the curve point A + B
A and B are curve points in affine representation: field element X concatenated with field
element Y. Field element `Z` is encoded as follows.
For the base field elements (Fp), `Z` is encoded as a big-endian number and must be lower than
the field modulus.
For the quadratic field extension (Fp2), `Z` is encoded as the concatenation of the individual
encoding of the coefficients. For an Fp2 element of the form `Z = Z0 + Z1 i`, where `i` is a
formal quadratic non-residue, the encoding of Z is the concatenation of the encoding of `Z0`
and `Z1` in this order. (`Z0` and `Z1` must be less than the field modulus).
The point at infinity is encoded as `(X,Y) = (0,0)`.
Groups G1 and G2 are denoted additively.
Fails if A or B is not in G.
A and/or B are allowed to be the point at infinity.
Does _not_ check if A and B are in the main prime-order subgroup.
"""
ec_map_to = AVMOpData(
op_code="ec_map_to",
variants=Variant(
signature=OpSignature(args=[StackType.bytes], returns=[StackType.bytes]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=10,
),
immediate_types=(ImmediateKind.arg_enum,),
cost=None,
min_avm_version=10,
supported_modes=RunMode.any,
)
"""
maps field element A to group G
BN254 points are mapped by the SVDW map. BLS12-381 points are mapped by the SSWU map.
G1 element inputs are base field elements and G2 element inputs are quadratic field elements,
with nearly the same encoding rules (for field elements) as defined in `ec_add`. There is one
difference of encoding rule: G1 element inputs do not need to be 0-padded if they fit in less
than 32 bytes for BN254 and less than 48 bytes for BLS12-381. (As usual, the empty byte array
represents 0.) G2 elements inputs need to be always have the required size.
"""
ec_multi_scalar_mul = AVMOpData(
op_code="ec_multi_scalar_mul",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.bytes], returns=[StackType.bytes]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=10,
),
immediate_types=(ImmediateKind.arg_enum,),
cost=None,
min_avm_version=10,
supported_modes=RunMode.any,
)
"""
for curve points A and scalars B, return curve point B0A0 + B1A1 + B2A2 + ... + BnAn
A is a list of concatenated points, encoded and checked as described in `ec_add`. B is a list
of concatenated scalars which, unlike ec_scalar_mul, must all be exactly 32 bytes long.
The name `ec_multi_scalar_mul` was chosen to reflect common usage, but a more consistent name
would be `ec_multi_scalar_mul`. AVM values are limited to 4096 bytes, so `ec_multi_scalar_mul`
is limited by the size of the points in the group being operated upon.
"""
ec_pairing_check = AVMOpData(
op_code="ec_pairing_check",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.bytes], returns=[StackType.bool]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=10,
),
immediate_types=(ImmediateKind.arg_enum,),
cost=None,
min_avm_version=10,
supported_modes=RunMode.any,
)
"""
1 if the product of the pairing of each point in A with its respective point in B is equal to
the identity element of the target group Gt, else 0
A and B are concatenated points, encoded and checked as described in `ec_add`. A contains
points of the group G, B contains points of the associated group (G2 if G is G1, and vice
versa). Fails if A and B have a different number of points, or if any point is not in its
described group or outside the main prime-order subgroup - a stronger condition than other
opcodes. AVM values are limited to 4096 bytes, so `ec_pairing_check` is limited by the size of
the points in the groups being operated upon.
"""
ec_scalar_mul = AVMOpData(
op_code="ec_scalar_mul",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.bytes], returns=[StackType.bytes]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=10,
),
immediate_types=(ImmediateKind.arg_enum,),
cost=None,
min_avm_version=10,
supported_modes=RunMode.any,
)
"""
for curve point A and scalar B, return the curve point BA, the point A multiplied by the scalar
B.
A is a curve point encoded and checked as described in `ec_add`. Scalar B is interpreted as a
big-endian unsigned integer. Fails if B exceeds 32 bytes.
"""
ec_subgroup_check = AVMOpData(
op_code="ec_subgroup_check",
variants=Variant(
signature=OpSignature(args=[StackType.bytes], returns=[StackType.bool]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=10,
),
immediate_types=(ImmediateKind.arg_enum,),
cost=None,
min_avm_version=10,
supported_modes=RunMode.any,
)
"""
1 if A is in the main prime-order subgroup of G (including the point at infinity) else 0.
Program fails if A is not in G at all.
"""
ecdsa_pk_decompress = AVMOpData(
op_code="ecdsa_pk_decompress",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes], returns=[StackType.bytes, StackType.bytes]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=5,
),
immediate_types=(ImmediateKind.arg_enum,),
cost=None,
min_avm_version=5,
supported_modes=RunMode.any,
)
"""
decompress pubkey A into components X, Y
The 33 byte public key in a compressed form to be decompressed into X and Y (top) components.
All values are big-endian encoded.
"""
ecdsa_pk_recover = AVMOpData(
op_code="ecdsa_pk_recover",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.uint64, StackType.bytes, StackType.bytes],
returns=[StackType.bytes, StackType.bytes],
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=5,
),
immediate_types=(ImmediateKind.arg_enum,),
cost=2000,
min_avm_version=5,
supported_modes=RunMode.any,
)
"""
for (data A, recovery id B, signature C, D) recover a public key
S (top) and R elements of a signature, recovery id and data (bottom) are expected on the stack
and used to deriver a public key. All values are big-endian encoded. The signed data must be 32
bytes long.
"""
ecdsa_verify = AVMOpData(
op_code="ecdsa_verify",
variants=Variant(
signature=OpSignature(
args=[
StackType.bytes,
StackType.bytes,
StackType.bytes,
StackType.bytes,
StackType.bytes,
],
returns=[StackType.bool],
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=5,
),
immediate_types=(ImmediateKind.arg_enum,),
cost=None,
min_avm_version=5,
supported_modes=RunMode.any,
)
"""
for (data A, signature B, C and pubkey D, E) verify the signature of the data against the
pubkey => {0 or 1}
The 32 byte Y-component of a public key is the last element on the stack, preceded by
X-component of a pubkey, preceded by S and R components of a signature, preceded by the data
that is fifth element on the stack. All values are big-endian encoded. The signed data must be
32 bytes long, and signatures in lower-S form are only accepted.
"""
ed25519verify = AVMOpData(
op_code="ed25519verify",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.bytes, StackType.bytes], returns=[StackType.bool]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1900,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
for (data A, signature B, pubkey C) verify the signature of ("ProgData" || program_hash ||
data) against the pubkey => {0 or 1}
The 32 byte public key is the last element on the stack, preceded by the 64 byte signature at
the second-to-last element on the stack, preceded by the data which was signed at the third-to-
last element on the stack.
"""
ed25519verify_bare = AVMOpData(
op_code="ed25519verify_bare",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.bytes, StackType.bytes], returns=[StackType.bool]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=7,
),
immediate_types=(),
cost=1900,
min_avm_version=7,
supported_modes=RunMode.any,
)
"""
for (data A, signature B, pubkey C) verify the signature of the data against the pubkey => {0
or 1}
"""
eq = AVMOpData(
op_code="==",
variants=Variant(
signature=OpSignature(args=[StackType.any, StackType.any], returns=[StackType.bool]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
A is equal to B => {0 or 1}
"""
eq_bytes = AVMOpData(
op_code="b==",
variants=Variant(
signature=OpSignature(
args=[StackType.bigint, StackType.bigint], returns=[StackType.bool]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=1,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
1 if A is equal to B, else 0. A and B are interpreted as big-endian unsigned integers
"""
exp = AVMOpData(
op_code="exp",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.uint64]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=1,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
A raised to the Bth power. Fail if A == B == 0 and on overflow
"""
expw = AVMOpData(
op_code="expw",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64],
returns=[StackType.uint64, StackType.uint64],
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=10,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
A raised to the Bth power as a 128-bit result in two uint64s. X is the high 64 bits, Y is the
low. Fail if A == B == 0 or if the results exceeds 2^128-1
"""
extract = AVMOpData(
op_code="extract",
variants=Variant(
signature=OpSignature(args=[StackType.bytes], returns=[StackType.bytes]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=5,
),
immediate_types=(ImmediateKind.uint8, ImmediateKind.uint8),
cost=1,
min_avm_version=5,
supported_modes=RunMode.any,
)
"""
A range of bytes from A starting at S up to but not including S+L. If L is 0, then extract to
the end of the string. If S or S+L is larger than the array length, the program fails
"""
extract3 = AVMOpData(
op_code="extract3",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.uint64, StackType.uint64],
returns=[StackType.bytes],
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=5,
),
immediate_types=(),
cost=1,
min_avm_version=5,
supported_modes=RunMode.any,
)
"""
A range of bytes from A starting at B up to but not including B+C. If B+C is larger than the
array length, the program fails
`extract3` can be called using `extract` with no immediates.
"""
extract_uint16 = AVMOpData(
op_code="extract_uint16",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.uint64], returns=[StackType.uint64]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=5,
),
immediate_types=(),
cost=1,
min_avm_version=5,
supported_modes=RunMode.any,
)
"""
A uint16 formed from a range of big-endian bytes from A starting at B up to but not including
B+2. If B+2 is larger than the array length, the program fails
"""
extract_uint32 = AVMOpData(
op_code="extract_uint32",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.uint64], returns=[StackType.uint64]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=5,
),
immediate_types=(),
cost=1,
min_avm_version=5,
supported_modes=RunMode.any,
)
"""
A uint32 formed from a range of big-endian bytes from A starting at B up to but not including
B+4. If B+4 is larger than the array length, the program fails
"""
extract_uint64 = AVMOpData(
op_code="extract_uint64",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.uint64], returns=[StackType.uint64]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=5,
),
immediate_types=(),
cost=1,
min_avm_version=5,
supported_modes=RunMode.any,
)
"""
A uint64 formed from a range of big-endian bytes from A starting at B up to but not including
B+8. If B+8 is larger than the array length, the program fails
"""
falcon_verify = AVMOpData(
op_code="falcon_verify",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.bytes, StackType.bytes], returns=[StackType.bool]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=12,
),
immediate_types=(),
cost=1700,
min_avm_version=12,
supported_modes=RunMode.any,
)
"""
for (data A, compressed-format signature B, pubkey C) verify the signature of data against the
pubkey
"""
gaid = AVMOpData(
op_code="gaid",
variants=Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum=None,
supported_modes=RunMode.app,
min_avm_version=4,
),
immediate_types=(ImmediateKind.uint8,),
cost=1,
min_avm_version=4,
supported_modes=RunMode.app,
)
"""
ID of the asset or application created in the Tth transaction of the current group
`gaid` fails unless the requested transaction created an asset or application and T <
GroupIndex.
"""
gaids = AVMOpData(
op_code="gaids",
variants=Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum=None,
supported_modes=RunMode.app,
min_avm_version=4,
),
immediate_types=(),
cost=1,
min_avm_version=4,
supported_modes=RunMode.app,
)
"""
ID of the asset or application created in the Ath transaction of the current group
`gaids` fails unless the requested transaction created an asset or application and A <
GroupIndex.
"""
getbit = AVMOpData(
op_code="getbit",
variants=Variant(
signature=OpSignature(
args=[StackType.any, StackType.uint64], returns=[StackType.uint64]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=3,
),
immediate_types=(),
cost=1,
min_avm_version=3,
supported_modes=RunMode.any,
)
"""
Bth bit of (byte-array or integer) A. If B is greater than or equal to the bit length of the
value (8*byte length), the program fails
see explanation of bit ordering in setbit
"""
getbyte = AVMOpData(
op_code="getbyte",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.uint64], returns=[StackType.uint64]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=3,
),
immediate_types=(),
cost=1,
min_avm_version=3,
supported_modes=RunMode.any,
)
"""
Bth byte of A, as an integer. If B is greater than or equal to the array length, the program
fails
"""
gitxn = AVMOpData(
op_code="gitxn",
variants=DynamicVariants(
immediate_index=1,
variant_map={
"Sender": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="Sender",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Fee": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="Fee",
supported_modes=RunMode.any,
min_avm_version=0,
),
"FirstValid": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="FirstValid",
supported_modes=RunMode.any,
min_avm_version=0,
),
"FirstValidTime": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="FirstValidTime",
supported_modes=RunMode.any,
min_avm_version=7,
),
"LastValid": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="LastValid",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Note": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="Note",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Lease": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="Lease",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Receiver": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="Receiver",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Amount": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="Amount",
supported_modes=RunMode.any,
min_avm_version=0,
),
"CloseRemainderTo": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="CloseRemainderTo",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VotePK": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="VotePK",
supported_modes=RunMode.any,
min_avm_version=0,
),
"SelectionPK": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="SelectionPK",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VoteFirst": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="VoteFirst",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VoteLast": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="VoteLast",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VoteKeyDilution": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="VoteKeyDilution",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Type": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="Type",
supported_modes=RunMode.any,
min_avm_version=0,
),
"TypeEnum": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="TypeEnum",
supported_modes=RunMode.any,
min_avm_version=0,
),
"XferAsset": Variant(
signature=OpSignature(args=[], returns=[StackType.asset]),
enum="XferAsset",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetAmount": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="AssetAmount",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetSender": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="AssetSender",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetReceiver": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="AssetReceiver",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetCloseTo": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="AssetCloseTo",
supported_modes=RunMode.any,
min_avm_version=0,
),
"GroupIndex": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="GroupIndex",
supported_modes=RunMode.any,
min_avm_version=0,
),
"TxID": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="TxID",
supported_modes=RunMode.any,
min_avm_version=0,
),
"ApplicationID": Variant(
signature=OpSignature(args=[], returns=[StackType.application]),
enum="ApplicationID",
supported_modes=RunMode.any,
min_avm_version=2,
),
"OnCompletion": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="OnCompletion",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ApplicationArgs": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ApplicationArgs",
supported_modes=RunMode.any,
min_avm_version=2,
),
"NumAppArgs": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumAppArgs",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Accounts": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="Accounts",
supported_modes=RunMode.any,
min_avm_version=2,
),
"NumAccounts": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumAccounts",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ApprovalProgram": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ApprovalProgram",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ClearStateProgram": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ClearStateProgram",
supported_modes=RunMode.any,
min_avm_version=2,
),
"RekeyTo": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="RekeyTo",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAsset": Variant(
signature=OpSignature(args=[], returns=[StackType.asset]),
enum="ConfigAsset",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetTotal": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="ConfigAssetTotal",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetDecimals": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="ConfigAssetDecimals",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetDefaultFrozen": Variant(
signature=OpSignature(args=[], returns=[StackType.bool]),
enum="ConfigAssetDefaultFrozen",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetUnitName": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ConfigAssetUnitName",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetName": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ConfigAssetName",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetURL": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ConfigAssetURL",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetMetadataHash": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ConfigAssetMetadataHash",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetManager": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="ConfigAssetManager",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetReserve": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="ConfigAssetReserve",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetFreeze": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="ConfigAssetFreeze",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetClawback": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="ConfigAssetClawback",
supported_modes=RunMode.any,
min_avm_version=2,
),
"FreezeAsset": Variant(
signature=OpSignature(args=[], returns=[StackType.asset]),
enum="FreezeAsset",
supported_modes=RunMode.any,
min_avm_version=2,
),
"FreezeAssetAccount": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="FreezeAssetAccount",
supported_modes=RunMode.any,
min_avm_version=2,
),
"FreezeAssetFrozen": Variant(
signature=OpSignature(args=[], returns=[StackType.bool]),
enum="FreezeAssetFrozen",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Assets": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="Assets",
supported_modes=RunMode.any,
min_avm_version=3,
),
"NumAssets": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumAssets",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Applications": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="Applications",
supported_modes=RunMode.any,
min_avm_version=3,
),
"NumApplications": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumApplications",
supported_modes=RunMode.any,
min_avm_version=3,
),
"GlobalNumUint": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="GlobalNumUint",
supported_modes=RunMode.any,
min_avm_version=3,
),
"GlobalNumByteSlice": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="GlobalNumByteSlice",
supported_modes=RunMode.any,
min_avm_version=3,
),
"LocalNumUint": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="LocalNumUint",
supported_modes=RunMode.any,
min_avm_version=3,
),
"LocalNumByteSlice": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="LocalNumByteSlice",
supported_modes=RunMode.any,
min_avm_version=3,
),
"ExtraProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="ExtraProgramPages",
supported_modes=RunMode.any,
min_avm_version=4,
),
"Nonparticipation": Variant(
signature=OpSignature(args=[], returns=[StackType.bool]),
enum="Nonparticipation",
supported_modes=RunMode.any,
min_avm_version=5,
),
"Logs": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="Logs",
supported_modes=RunMode.app,
min_avm_version=5,
),
"NumLogs": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumLogs",
supported_modes=RunMode.app,
min_avm_version=5,
),
"CreatedAssetID": Variant(
signature=OpSignature(args=[], returns=[StackType.asset]),
enum="CreatedAssetID",
supported_modes=RunMode.app,
min_avm_version=5,
),
"CreatedApplicationID": Variant(
signature=OpSignature(args=[], returns=[StackType.application]),
enum="CreatedApplicationID",
supported_modes=RunMode.app,
min_avm_version=5,
),
"LastLog": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="LastLog",
supported_modes=RunMode.app,
min_avm_version=6,
),
"StateProofPK": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="StateProofPK",
supported_modes=RunMode.any,
min_avm_version=6,
),
"ApprovalProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ApprovalProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"NumApprovalProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumApprovalProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"ClearStateProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ClearStateProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"NumClearStateProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumClearStateProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
},
),
immediate_types=(ImmediateKind.uint8, ImmediateKind.arg_enum),
cost=1,
min_avm_version=6,
supported_modes=RunMode.app,
)
"""
field F of the Tth transaction in the last inner group submitted
"""
gitxna = AVMOpData(
op_code="gitxna",
variants=DynamicVariants(
immediate_index=1,
variant_map={
"ApplicationArgs": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ApplicationArgs",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Accounts": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="Accounts",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Assets": Variant(
signature=OpSignature(args=[], returns=[StackType.asset]),
enum="Assets",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Applications": Variant(
signature=OpSignature(args=[], returns=[StackType.application]),
enum="Applications",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Logs": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="Logs",
supported_modes=RunMode.app,
min_avm_version=5,
),
"ApprovalProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ApprovalProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"ClearStateProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ClearStateProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
},
),
immediate_types=(ImmediateKind.uint8, ImmediateKind.arg_enum, ImmediateKind.uint8),
cost=1,
min_avm_version=6,
supported_modes=RunMode.app,
)
"""
Ith value of the array field F from the Tth transaction in the last inner group submitted
"""
gitxnas = AVMOpData(
op_code="gitxnas",
variants=DynamicVariants(
immediate_index=1,
variant_map={
"ApplicationArgs": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ApplicationArgs",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Accounts": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.address]),
enum="Accounts",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Assets": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.asset]),
enum="Assets",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Applications": Variant(
signature=OpSignature(
args=[StackType.uint64], returns=[StackType.application]
),
enum="Applications",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Logs": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="Logs",
supported_modes=RunMode.app,
min_avm_version=5,
),
"ApprovalProgramPages": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ApprovalProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"ClearStateProgramPages": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ClearStateProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
},
),
immediate_types=(ImmediateKind.uint8, ImmediateKind.arg_enum),
cost=1,
min_avm_version=6,
supported_modes=RunMode.app,
)
"""
Ath value of the array field F from the Tth transaction in the last inner group submitted
"""
gload = AVMOpData(
op_code="gload",
variants=Variant(
signature=OpSignature(args=[], returns=[StackType.any]),
enum=None,
supported_modes=RunMode.app,
min_avm_version=4,
),
immediate_types=(ImmediateKind.uint8, ImmediateKind.uint8),
cost=1,
min_avm_version=4,
supported_modes=RunMode.app,
)
"""
Ith scratch space value of the Tth transaction in the current group
`gload` fails unless the requested transaction is an ApplicationCall and T < GroupIndex.
"""
gloads = AVMOpData(
op_code="gloads",
variants=Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.any]),
enum=None,
supported_modes=RunMode.app,
min_avm_version=4,
),
immediate_types=(ImmediateKind.uint8,),
cost=1,
min_avm_version=4,
supported_modes=RunMode.app,
)
"""
Ith scratch space value of the Ath transaction in the current group
`gloads` fails unless the requested transaction is an ApplicationCall and A < GroupIndex.
"""
gloadss = AVMOpData(
op_code="gloadss",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.any]
),
enum=None,
supported_modes=RunMode.app,
min_avm_version=6,
),
immediate_types=(),
cost=1,
min_avm_version=6,
supported_modes=RunMode.app,
)
"""
Bth scratch space value of the Ath transaction in the current group
"""
global_ = AVMOpData(
op_code="global",
variants=DynamicVariants(
immediate_index=0,
variant_map={
"MinTxnFee": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="MinTxnFee",
supported_modes=RunMode.any,
min_avm_version=0,
),
"MinBalance": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="MinBalance",
supported_modes=RunMode.any,
min_avm_version=0,
),
"MaxTxnLife": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="MaxTxnLife",
supported_modes=RunMode.any,
min_avm_version=0,
),
"ZeroAddress": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="ZeroAddress",
supported_modes=RunMode.any,
min_avm_version=0,
),
"GroupSize": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="GroupSize",
supported_modes=RunMode.any,
min_avm_version=0,
),
"LogicSigVersion": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="LogicSigVersion",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Round": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="Round",
supported_modes=RunMode.app,
min_avm_version=2,
),
"LatestTimestamp": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="LatestTimestamp",
supported_modes=RunMode.app,
min_avm_version=2,
),
"CurrentApplicationID": Variant(
signature=OpSignature(args=[], returns=[StackType.application]),
enum="CurrentApplicationID",
supported_modes=RunMode.app,
min_avm_version=2,
),
"CreatorAddress": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="CreatorAddress",
supported_modes=RunMode.app,
min_avm_version=3,
),
"CurrentApplicationAddress": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="CurrentApplicationAddress",
supported_modes=RunMode.app,
min_avm_version=5,
),
"GroupID": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="GroupID",
supported_modes=RunMode.any,
min_avm_version=5,
),
"OpcodeBudget": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="OpcodeBudget",
supported_modes=RunMode.any,
min_avm_version=6,
),
"CallerApplicationID": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="CallerApplicationID",
supported_modes=RunMode.app,
min_avm_version=6,
),
"CallerApplicationAddress": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="CallerApplicationAddress",
supported_modes=RunMode.app,
min_avm_version=6,
),
"AssetCreateMinBalance": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="AssetCreateMinBalance",
supported_modes=RunMode.any,
min_avm_version=10,
),
"AssetOptInMinBalance": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="AssetOptInMinBalance",
supported_modes=RunMode.any,
min_avm_version=10,
),
"GenesisHash": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="GenesisHash",
supported_modes=RunMode.any,
min_avm_version=10,
),
"PayoutsEnabled": Variant(
signature=OpSignature(args=[], returns=[StackType.bool]),
enum="PayoutsEnabled",
supported_modes=RunMode.any,
min_avm_version=11,
),
"PayoutsGoOnlineFee": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="PayoutsGoOnlineFee",
supported_modes=RunMode.any,
min_avm_version=11,
),
"PayoutsPercent": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="PayoutsPercent",
supported_modes=RunMode.any,
min_avm_version=11,
),
"PayoutsMinBalance": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="PayoutsMinBalance",
supported_modes=RunMode.any,
min_avm_version=11,
),
"PayoutsMaxBalance": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="PayoutsMaxBalance",
supported_modes=RunMode.any,
min_avm_version=11,
),
},
),
immediate_types=(ImmediateKind.arg_enum,),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
global field F
"""
gt = AVMOpData(
op_code=">",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.bool]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
A greater than B => {0 or 1}
"""
gt_bytes = AVMOpData(
op_code="b>",
variants=Variant(
signature=OpSignature(
args=[StackType.bigint, StackType.bigint], returns=[StackType.bool]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=1,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
1 if A is greater than B, else 0. A and B are interpreted as big-endian unsigned integers
"""
gte = AVMOpData(
op_code=">=",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.bool]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
A greater than or equal to B => {0 or 1}
"""
gte_bytes = AVMOpData(
op_code="b>=",
variants=Variant(
signature=OpSignature(
args=[StackType.bigint, StackType.bigint], returns=[StackType.bool]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=1,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
1 if A is greater than or equal to B, else 0. A and B are interpreted as big-endian unsigned
integers
"""
gtxn = AVMOpData(
op_code="gtxn",
variants=DynamicVariants(
immediate_index=1,
variant_map={
"Sender": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="Sender",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Fee": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="Fee",
supported_modes=RunMode.any,
min_avm_version=0,
),
"FirstValid": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="FirstValid",
supported_modes=RunMode.any,
min_avm_version=0,
),
"FirstValidTime": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="FirstValidTime",
supported_modes=RunMode.any,
min_avm_version=7,
),
"LastValid": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="LastValid",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Note": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="Note",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Lease": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="Lease",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Receiver": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="Receiver",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Amount": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="Amount",
supported_modes=RunMode.any,
min_avm_version=0,
),
"CloseRemainderTo": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="CloseRemainderTo",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VotePK": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="VotePK",
supported_modes=RunMode.any,
min_avm_version=0,
),
"SelectionPK": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="SelectionPK",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VoteFirst": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="VoteFirst",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VoteLast": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="VoteLast",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VoteKeyDilution": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="VoteKeyDilution",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Type": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="Type",
supported_modes=RunMode.any,
min_avm_version=0,
),
"TypeEnum": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="TypeEnum",
supported_modes=RunMode.any,
min_avm_version=0,
),
"XferAsset": Variant(
signature=OpSignature(args=[], returns=[StackType.asset]),
enum="XferAsset",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetAmount": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="AssetAmount",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetSender": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="AssetSender",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetReceiver": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="AssetReceiver",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetCloseTo": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="AssetCloseTo",
supported_modes=RunMode.any,
min_avm_version=0,
),
"GroupIndex": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="GroupIndex",
supported_modes=RunMode.any,
min_avm_version=0,
),
"TxID": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="TxID",
supported_modes=RunMode.any,
min_avm_version=0,
),
"ApplicationID": Variant(
signature=OpSignature(args=[], returns=[StackType.application]),
enum="ApplicationID",
supported_modes=RunMode.any,
min_avm_version=2,
),
"OnCompletion": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="OnCompletion",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ApplicationArgs": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ApplicationArgs",
supported_modes=RunMode.any,
min_avm_version=2,
),
"NumAppArgs": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumAppArgs",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Accounts": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="Accounts",
supported_modes=RunMode.any,
min_avm_version=2,
),
"NumAccounts": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumAccounts",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ApprovalProgram": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ApprovalProgram",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ClearStateProgram": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ClearStateProgram",
supported_modes=RunMode.any,
min_avm_version=2,
),
"RekeyTo": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="RekeyTo",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAsset": Variant(
signature=OpSignature(args=[], returns=[StackType.asset]),
enum="ConfigAsset",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetTotal": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="ConfigAssetTotal",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetDecimals": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="ConfigAssetDecimals",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetDefaultFrozen": Variant(
signature=OpSignature(args=[], returns=[StackType.bool]),
enum="ConfigAssetDefaultFrozen",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetUnitName": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ConfigAssetUnitName",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetName": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ConfigAssetName",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetURL": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ConfigAssetURL",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetMetadataHash": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ConfigAssetMetadataHash",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetManager": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="ConfigAssetManager",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetReserve": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="ConfigAssetReserve",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetFreeze": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="ConfigAssetFreeze",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetClawback": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="ConfigAssetClawback",
supported_modes=RunMode.any,
min_avm_version=2,
),
"FreezeAsset": Variant(
signature=OpSignature(args=[], returns=[StackType.asset]),
enum="FreezeAsset",
supported_modes=RunMode.any,
min_avm_version=2,
),
"FreezeAssetAccount": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="FreezeAssetAccount",
supported_modes=RunMode.any,
min_avm_version=2,
),
"FreezeAssetFrozen": Variant(
signature=OpSignature(args=[], returns=[StackType.bool]),
enum="FreezeAssetFrozen",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Assets": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="Assets",
supported_modes=RunMode.any,
min_avm_version=3,
),
"NumAssets": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumAssets",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Applications": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="Applications",
supported_modes=RunMode.any,
min_avm_version=3,
),
"NumApplications": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumApplications",
supported_modes=RunMode.any,
min_avm_version=3,
),
"GlobalNumUint": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="GlobalNumUint",
supported_modes=RunMode.any,
min_avm_version=3,
),
"GlobalNumByteSlice": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="GlobalNumByteSlice",
supported_modes=RunMode.any,
min_avm_version=3,
),
"LocalNumUint": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="LocalNumUint",
supported_modes=RunMode.any,
min_avm_version=3,
),
"LocalNumByteSlice": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="LocalNumByteSlice",
supported_modes=RunMode.any,
min_avm_version=3,
),
"ExtraProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="ExtraProgramPages",
supported_modes=RunMode.any,
min_avm_version=4,
),
"Nonparticipation": Variant(
signature=OpSignature(args=[], returns=[StackType.bool]),
enum="Nonparticipation",
supported_modes=RunMode.any,
min_avm_version=5,
),
"Logs": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="Logs",
supported_modes=RunMode.app,
min_avm_version=5,
),
"NumLogs": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumLogs",
supported_modes=RunMode.app,
min_avm_version=5,
),
"CreatedAssetID": Variant(
signature=OpSignature(args=[], returns=[StackType.asset]),
enum="CreatedAssetID",
supported_modes=RunMode.app,
min_avm_version=5,
),
"CreatedApplicationID": Variant(
signature=OpSignature(args=[], returns=[StackType.application]),
enum="CreatedApplicationID",
supported_modes=RunMode.app,
min_avm_version=5,
),
"LastLog": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="LastLog",
supported_modes=RunMode.app,
min_avm_version=6,
),
"StateProofPK": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="StateProofPK",
supported_modes=RunMode.any,
min_avm_version=6,
),
"ApprovalProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ApprovalProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"NumApprovalProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumApprovalProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"ClearStateProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ClearStateProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"NumClearStateProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumClearStateProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
},
),
immediate_types=(ImmediateKind.uint8, ImmediateKind.arg_enum),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
field F of the Tth transaction in the current group
for notes on transaction fields available, see `txn`. If this transaction is _i_ in the group,
`gtxn i field` is equivalent to `txn field`.
"""
gtxna = AVMOpData(
op_code="gtxna",
variants=DynamicVariants(
immediate_index=1,
variant_map={
"ApplicationArgs": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ApplicationArgs",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Accounts": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="Accounts",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Assets": Variant(
signature=OpSignature(args=[], returns=[StackType.asset]),
enum="Assets",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Applications": Variant(
signature=OpSignature(args=[], returns=[StackType.application]),
enum="Applications",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Logs": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="Logs",
supported_modes=RunMode.app,
min_avm_version=5,
),
"ApprovalProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ApprovalProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"ClearStateProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ClearStateProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
},
),
immediate_types=(ImmediateKind.uint8, ImmediateKind.arg_enum, ImmediateKind.uint8),
cost=1,
min_avm_version=2,
supported_modes=RunMode.any,
)
"""
Ith value of the array field F from the Tth transaction in the current group
`gtxna` can be called using `gtxn` with 3 immediates.
"""
gtxnas = AVMOpData(
op_code="gtxnas",
variants=DynamicVariants(
immediate_index=1,
variant_map={
"ApplicationArgs": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ApplicationArgs",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Accounts": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.address]),
enum="Accounts",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Assets": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.asset]),
enum="Assets",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Applications": Variant(
signature=OpSignature(
args=[StackType.uint64], returns=[StackType.application]
),
enum="Applications",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Logs": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="Logs",
supported_modes=RunMode.app,
min_avm_version=5,
),
"ApprovalProgramPages": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ApprovalProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"ClearStateProgramPages": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ClearStateProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
},
),
immediate_types=(ImmediateKind.uint8, ImmediateKind.arg_enum),
cost=1,
min_avm_version=5,
supported_modes=RunMode.any,
)
"""
Ath value of the array field F from the Tth transaction in the current group
"""
gtxns = AVMOpData(
op_code="gtxns",
variants=DynamicVariants(
immediate_index=0,
variant_map={
"Sender": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.address]),
enum="Sender",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Fee": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="Fee",
supported_modes=RunMode.any,
min_avm_version=0,
),
"FirstValid": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="FirstValid",
supported_modes=RunMode.any,
min_avm_version=0,
),
"FirstValidTime": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="FirstValidTime",
supported_modes=RunMode.any,
min_avm_version=7,
),
"LastValid": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="LastValid",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Note": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="Note",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Lease": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="Lease",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Receiver": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.address]),
enum="Receiver",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Amount": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="Amount",
supported_modes=RunMode.any,
min_avm_version=0,
),
"CloseRemainderTo": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.address]),
enum="CloseRemainderTo",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VotePK": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="VotePK",
supported_modes=RunMode.any,
min_avm_version=0,
),
"SelectionPK": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="SelectionPK",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VoteFirst": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="VoteFirst",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VoteLast": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="VoteLast",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VoteKeyDilution": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="VoteKeyDilution",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Type": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="Type",
supported_modes=RunMode.any,
min_avm_version=0,
),
"TypeEnum": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="TypeEnum",
supported_modes=RunMode.any,
min_avm_version=0,
),
"XferAsset": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.asset]),
enum="XferAsset",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetAmount": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="AssetAmount",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetSender": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.address]),
enum="AssetSender",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetReceiver": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.address]),
enum="AssetReceiver",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetCloseTo": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.address]),
enum="AssetCloseTo",
supported_modes=RunMode.any,
min_avm_version=0,
),
"GroupIndex": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="GroupIndex",
supported_modes=RunMode.any,
min_avm_version=0,
),
"TxID": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="TxID",
supported_modes=RunMode.any,
min_avm_version=0,
),
"ApplicationID": Variant(
signature=OpSignature(
args=[StackType.uint64], returns=[StackType.application]
),
enum="ApplicationID",
supported_modes=RunMode.any,
min_avm_version=2,
),
"OnCompletion": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="OnCompletion",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ApplicationArgs": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ApplicationArgs",
supported_modes=RunMode.any,
min_avm_version=2,
),
"NumAppArgs": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="NumAppArgs",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Accounts": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.address]),
enum="Accounts",
supported_modes=RunMode.any,
min_avm_version=2,
),
"NumAccounts": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="NumAccounts",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ApprovalProgram": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ApprovalProgram",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ClearStateProgram": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ClearStateProgram",
supported_modes=RunMode.any,
min_avm_version=2,
),
"RekeyTo": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.address]),
enum="RekeyTo",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAsset": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.asset]),
enum="ConfigAsset",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetTotal": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="ConfigAssetTotal",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetDecimals": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="ConfigAssetDecimals",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetDefaultFrozen": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bool]),
enum="ConfigAssetDefaultFrozen",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetUnitName": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ConfigAssetUnitName",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetName": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ConfigAssetName",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetURL": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ConfigAssetURL",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetMetadataHash": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ConfigAssetMetadataHash",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetManager": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.address]),
enum="ConfigAssetManager",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetReserve": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.address]),
enum="ConfigAssetReserve",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetFreeze": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.address]),
enum="ConfigAssetFreeze",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetClawback": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.address]),
enum="ConfigAssetClawback",
supported_modes=RunMode.any,
min_avm_version=2,
),
"FreezeAsset": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.asset]),
enum="FreezeAsset",
supported_modes=RunMode.any,
min_avm_version=2,
),
"FreezeAssetAccount": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.address]),
enum="FreezeAssetAccount",
supported_modes=RunMode.any,
min_avm_version=2,
),
"FreezeAssetFrozen": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bool]),
enum="FreezeAssetFrozen",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Assets": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="Assets",
supported_modes=RunMode.any,
min_avm_version=3,
),
"NumAssets": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="NumAssets",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Applications": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="Applications",
supported_modes=RunMode.any,
min_avm_version=3,
),
"NumApplications": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="NumApplications",
supported_modes=RunMode.any,
min_avm_version=3,
),
"GlobalNumUint": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="GlobalNumUint",
supported_modes=RunMode.any,
min_avm_version=3,
),
"GlobalNumByteSlice": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="GlobalNumByteSlice",
supported_modes=RunMode.any,
min_avm_version=3,
),
"LocalNumUint": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="LocalNumUint",
supported_modes=RunMode.any,
min_avm_version=3,
),
"LocalNumByteSlice": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="LocalNumByteSlice",
supported_modes=RunMode.any,
min_avm_version=3,
),
"ExtraProgramPages": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="ExtraProgramPages",
supported_modes=RunMode.any,
min_avm_version=4,
),
"Nonparticipation": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bool]),
enum="Nonparticipation",
supported_modes=RunMode.any,
min_avm_version=5,
),
"Logs": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="Logs",
supported_modes=RunMode.app,
min_avm_version=5,
),
"NumLogs": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="NumLogs",
supported_modes=RunMode.app,
min_avm_version=5,
),
"CreatedAssetID": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.asset]),
enum="CreatedAssetID",
supported_modes=RunMode.app,
min_avm_version=5,
),
"CreatedApplicationID": Variant(
signature=OpSignature(
args=[StackType.uint64], returns=[StackType.application]
),
enum="CreatedApplicationID",
supported_modes=RunMode.app,
min_avm_version=5,
),
"LastLog": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="LastLog",
supported_modes=RunMode.app,
min_avm_version=6,
),
"StateProofPK": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="StateProofPK",
supported_modes=RunMode.any,
min_avm_version=6,
),
"ApprovalProgramPages": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ApprovalProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"NumApprovalProgramPages": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="NumApprovalProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"ClearStateProgramPages": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ClearStateProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"NumClearStateProgramPages": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum="NumClearStateProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
},
),
immediate_types=(ImmediateKind.arg_enum,),
cost=1,
min_avm_version=3,
supported_modes=RunMode.any,
)
"""
field F of the Ath transaction in the current group
for notes on transaction fields available, see `txn`. If top of stack is _i_, `gtxns field` is
equivalent to `gtxn _i_ field`. gtxns exists so that _i_ can be calculated, often based on the
index of the current transaction.
"""
gtxnsa = AVMOpData(
op_code="gtxnsa",
variants=DynamicVariants(
immediate_index=0,
variant_map={
"ApplicationArgs": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ApplicationArgs",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Accounts": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.address]),
enum="Accounts",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Assets": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.asset]),
enum="Assets",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Applications": Variant(
signature=OpSignature(
args=[StackType.uint64], returns=[StackType.application]
),
enum="Applications",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Logs": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="Logs",
supported_modes=RunMode.app,
min_avm_version=5,
),
"ApprovalProgramPages": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ApprovalProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"ClearStateProgramPages": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ClearStateProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
},
),
immediate_types=(ImmediateKind.arg_enum, ImmediateKind.uint8),
cost=1,
min_avm_version=3,
supported_modes=RunMode.any,
)
"""
Ith value of the array field F from the Ath transaction in the current group
`gtxnsa` can be called using `gtxns` with 2 immediates.
"""
gtxnsas = AVMOpData(
op_code="gtxnsas",
variants=DynamicVariants(
immediate_index=0,
variant_map={
"ApplicationArgs": Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.bytes]
),
enum="ApplicationArgs",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Accounts": Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.address]
),
enum="Accounts",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Assets": Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.asset]
),
enum="Assets",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Applications": Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.application]
),
enum="Applications",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Logs": Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.bytes]
),
enum="Logs",
supported_modes=RunMode.app,
min_avm_version=5,
),
"ApprovalProgramPages": Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.bytes]
),
enum="ApprovalProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"ClearStateProgramPages": Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.bytes]
),
enum="ClearStateProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
},
),
immediate_types=(ImmediateKind.arg_enum,),
cost=1,
min_avm_version=5,
supported_modes=RunMode.any,
)
"""
Bth value of the array field F from the Ath transaction in the current group
"""
itob = AVMOpData(
op_code="itob",
variants=Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
converts uint64 A to big-endian byte array, always of length 8
"""
itxn = AVMOpData(
op_code="itxn",
variants=DynamicVariants(
immediate_index=0,
variant_map={
"Sender": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="Sender",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Fee": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="Fee",
supported_modes=RunMode.any,
min_avm_version=0,
),
"FirstValid": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="FirstValid",
supported_modes=RunMode.any,
min_avm_version=0,
),
"FirstValidTime": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="FirstValidTime",
supported_modes=RunMode.any,
min_avm_version=7,
),
"LastValid": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="LastValid",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Note": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="Note",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Lease": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="Lease",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Receiver": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="Receiver",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Amount": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="Amount",
supported_modes=RunMode.any,
min_avm_version=0,
),
"CloseRemainderTo": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="CloseRemainderTo",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VotePK": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="VotePK",
supported_modes=RunMode.any,
min_avm_version=0,
),
"SelectionPK": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="SelectionPK",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VoteFirst": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="VoteFirst",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VoteLast": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="VoteLast",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VoteKeyDilution": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="VoteKeyDilution",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Type": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="Type",
supported_modes=RunMode.any,
min_avm_version=0,
),
"TypeEnum": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="TypeEnum",
supported_modes=RunMode.any,
min_avm_version=0,
),
"XferAsset": Variant(
signature=OpSignature(args=[], returns=[StackType.asset]),
enum="XferAsset",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetAmount": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="AssetAmount",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetSender": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="AssetSender",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetReceiver": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="AssetReceiver",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetCloseTo": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="AssetCloseTo",
supported_modes=RunMode.any,
min_avm_version=0,
),
"GroupIndex": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="GroupIndex",
supported_modes=RunMode.any,
min_avm_version=0,
),
"TxID": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="TxID",
supported_modes=RunMode.any,
min_avm_version=0,
),
"ApplicationID": Variant(
signature=OpSignature(args=[], returns=[StackType.application]),
enum="ApplicationID",
supported_modes=RunMode.any,
min_avm_version=2,
),
"OnCompletion": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="OnCompletion",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ApplicationArgs": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ApplicationArgs",
supported_modes=RunMode.any,
min_avm_version=2,
),
"NumAppArgs": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumAppArgs",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Accounts": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="Accounts",
supported_modes=RunMode.any,
min_avm_version=2,
),
"NumAccounts": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumAccounts",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ApprovalProgram": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ApprovalProgram",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ClearStateProgram": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ClearStateProgram",
supported_modes=RunMode.any,
min_avm_version=2,
),
"RekeyTo": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="RekeyTo",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAsset": Variant(
signature=OpSignature(args=[], returns=[StackType.asset]),
enum="ConfigAsset",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetTotal": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="ConfigAssetTotal",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetDecimals": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="ConfigAssetDecimals",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetDefaultFrozen": Variant(
signature=OpSignature(args=[], returns=[StackType.bool]),
enum="ConfigAssetDefaultFrozen",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetUnitName": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ConfigAssetUnitName",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetName": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ConfigAssetName",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetURL": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ConfigAssetURL",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetMetadataHash": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ConfigAssetMetadataHash",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetManager": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="ConfigAssetManager",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetReserve": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="ConfigAssetReserve",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetFreeze": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="ConfigAssetFreeze",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetClawback": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="ConfigAssetClawback",
supported_modes=RunMode.any,
min_avm_version=2,
),
"FreezeAsset": Variant(
signature=OpSignature(args=[], returns=[StackType.asset]),
enum="FreezeAsset",
supported_modes=RunMode.any,
min_avm_version=2,
),
"FreezeAssetAccount": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="FreezeAssetAccount",
supported_modes=RunMode.any,
min_avm_version=2,
),
"FreezeAssetFrozen": Variant(
signature=OpSignature(args=[], returns=[StackType.bool]),
enum="FreezeAssetFrozen",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Assets": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="Assets",
supported_modes=RunMode.any,
min_avm_version=3,
),
"NumAssets": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumAssets",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Applications": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="Applications",
supported_modes=RunMode.any,
min_avm_version=3,
),
"NumApplications": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumApplications",
supported_modes=RunMode.any,
min_avm_version=3,
),
"GlobalNumUint": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="GlobalNumUint",
supported_modes=RunMode.any,
min_avm_version=3,
),
"GlobalNumByteSlice": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="GlobalNumByteSlice",
supported_modes=RunMode.any,
min_avm_version=3,
),
"LocalNumUint": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="LocalNumUint",
supported_modes=RunMode.any,
min_avm_version=3,
),
"LocalNumByteSlice": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="LocalNumByteSlice",
supported_modes=RunMode.any,
min_avm_version=3,
),
"ExtraProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="ExtraProgramPages",
supported_modes=RunMode.any,
min_avm_version=4,
),
"Nonparticipation": Variant(
signature=OpSignature(args=[], returns=[StackType.bool]),
enum="Nonparticipation",
supported_modes=RunMode.any,
min_avm_version=5,
),
"Logs": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="Logs",
supported_modes=RunMode.app,
min_avm_version=5,
),
"NumLogs": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumLogs",
supported_modes=RunMode.app,
min_avm_version=5,
),
"CreatedAssetID": Variant(
signature=OpSignature(args=[], returns=[StackType.asset]),
enum="CreatedAssetID",
supported_modes=RunMode.app,
min_avm_version=5,
),
"CreatedApplicationID": Variant(
signature=OpSignature(args=[], returns=[StackType.application]),
enum="CreatedApplicationID",
supported_modes=RunMode.app,
min_avm_version=5,
),
"LastLog": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="LastLog",
supported_modes=RunMode.app,
min_avm_version=6,
),
"StateProofPK": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="StateProofPK",
supported_modes=RunMode.any,
min_avm_version=6,
),
"ApprovalProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ApprovalProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"NumApprovalProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumApprovalProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"ClearStateProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ClearStateProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"NumClearStateProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumClearStateProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
},
),
immediate_types=(ImmediateKind.arg_enum,),
cost=1,
min_avm_version=5,
supported_modes=RunMode.app,
)
"""
field F of the last inner transaction
"""
itxn_begin = AVMOpData(
op_code="itxn_begin",
variants=Variant(
signature=OpSignature(args=[], returns=[]),
enum=None,
supported_modes=RunMode.app,
min_avm_version=5,
),
immediate_types=(),
cost=1,
min_avm_version=5,
supported_modes=RunMode.app,
)
"""
begin preparation of a new inner transaction in a new transaction group
`itxn_begin` initializes Sender to the application address; Fee to the minimum allowable,
taking into account MinTxnFee and credit from overpaying in earlier transactions;
FirstValid/LastValid to the values in the invoking transaction, and all other fields to zero or
empty values.
"""
itxn_field = AVMOpData(
op_code="itxn_field",
variants=DynamicVariants(
immediate_index=0,
variant_map={
"Sender": Variant(
signature=OpSignature(args=[StackType.address], returns=[]),
enum="Sender",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Fee": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[]),
enum="Fee",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Note": Variant(
signature=OpSignature(args=[StackType.bytes], returns=[]),
enum="Note",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Receiver": Variant(
signature=OpSignature(args=[StackType.address], returns=[]),
enum="Receiver",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Amount": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[]),
enum="Amount",
supported_modes=RunMode.any,
min_avm_version=0,
),
"CloseRemainderTo": Variant(
signature=OpSignature(args=[StackType.address], returns=[]),
enum="CloseRemainderTo",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VotePK": Variant(
signature=OpSignature(args=[StackType.bytes], returns=[]),
enum="VotePK",
supported_modes=RunMode.any,
min_avm_version=0,
),
"SelectionPK": Variant(
signature=OpSignature(args=[StackType.bytes], returns=[]),
enum="SelectionPK",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VoteFirst": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[]),
enum="VoteFirst",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VoteLast": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[]),
enum="VoteLast",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VoteKeyDilution": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[]),
enum="VoteKeyDilution",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Type": Variant(
signature=OpSignature(args=[StackType.bytes], returns=[]),
enum="Type",
supported_modes=RunMode.any,
min_avm_version=0,
),
"TypeEnum": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[]),
enum="TypeEnum",
supported_modes=RunMode.any,
min_avm_version=0,
),
"XferAsset": Variant(
signature=OpSignature(args=[StackType.asset], returns=[]),
enum="XferAsset",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetAmount": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[]),
enum="AssetAmount",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetSender": Variant(
signature=OpSignature(args=[StackType.address], returns=[]),
enum="AssetSender",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetReceiver": Variant(
signature=OpSignature(args=[StackType.address], returns=[]),
enum="AssetReceiver",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetCloseTo": Variant(
signature=OpSignature(args=[StackType.address], returns=[]),
enum="AssetCloseTo",
supported_modes=RunMode.any,
min_avm_version=0,
),
"ApplicationID": Variant(
signature=OpSignature(args=[StackType.application], returns=[]),
enum="ApplicationID",
supported_modes=RunMode.any,
min_avm_version=2,
),
"OnCompletion": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[]),
enum="OnCompletion",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ApplicationArgs": Variant(
signature=OpSignature(args=[StackType.bytes], returns=[]),
enum="ApplicationArgs",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Accounts": Variant(
signature=OpSignature(args=[StackType.address], returns=[]),
enum="Accounts",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ApprovalProgram": Variant(
signature=OpSignature(args=[StackType.bytes], returns=[]),
enum="ApprovalProgram",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ClearStateProgram": Variant(
signature=OpSignature(args=[StackType.bytes], returns=[]),
enum="ClearStateProgram",
supported_modes=RunMode.any,
min_avm_version=2,
),
"RekeyTo": Variant(
signature=OpSignature(args=[StackType.address], returns=[]),
enum="RekeyTo",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAsset": Variant(
signature=OpSignature(args=[StackType.asset], returns=[]),
enum="ConfigAsset",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetTotal": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[]),
enum="ConfigAssetTotal",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetDecimals": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[]),
enum="ConfigAssetDecimals",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetDefaultFrozen": Variant(
signature=OpSignature(args=[StackType.bool], returns=[]),
enum="ConfigAssetDefaultFrozen",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetUnitName": Variant(
signature=OpSignature(args=[StackType.bytes], returns=[]),
enum="ConfigAssetUnitName",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetName": Variant(
signature=OpSignature(args=[StackType.bytes], returns=[]),
enum="ConfigAssetName",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetURL": Variant(
signature=OpSignature(args=[StackType.bytes], returns=[]),
enum="ConfigAssetURL",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetMetadataHash": Variant(
signature=OpSignature(args=[StackType.bytes], returns=[]),
enum="ConfigAssetMetadataHash",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetManager": Variant(
signature=OpSignature(args=[StackType.address], returns=[]),
enum="ConfigAssetManager",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetReserve": Variant(
signature=OpSignature(args=[StackType.address], returns=[]),
enum="ConfigAssetReserve",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetFreeze": Variant(
signature=OpSignature(args=[StackType.address], returns=[]),
enum="ConfigAssetFreeze",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetClawback": Variant(
signature=OpSignature(args=[StackType.address], returns=[]),
enum="ConfigAssetClawback",
supported_modes=RunMode.any,
min_avm_version=2,
),
"FreezeAsset": Variant(
signature=OpSignature(args=[StackType.asset], returns=[]),
enum="FreezeAsset",
supported_modes=RunMode.any,
min_avm_version=2,
),
"FreezeAssetAccount": Variant(
signature=OpSignature(args=[StackType.address], returns=[]),
enum="FreezeAssetAccount",
supported_modes=RunMode.any,
min_avm_version=2,
),
"FreezeAssetFrozen": Variant(
signature=OpSignature(args=[StackType.bool], returns=[]),
enum="FreezeAssetFrozen",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Assets": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[]),
enum="Assets",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Applications": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[]),
enum="Applications",
supported_modes=RunMode.any,
min_avm_version=3,
),
"GlobalNumUint": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[]),
enum="GlobalNumUint",
supported_modes=RunMode.any,
min_avm_version=3,
),
"GlobalNumByteSlice": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[]),
enum="GlobalNumByteSlice",
supported_modes=RunMode.any,
min_avm_version=3,
),
"LocalNumUint": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[]),
enum="LocalNumUint",
supported_modes=RunMode.any,
min_avm_version=3,
),
"LocalNumByteSlice": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[]),
enum="LocalNumByteSlice",
supported_modes=RunMode.any,
min_avm_version=3,
),
"ExtraProgramPages": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[]),
enum="ExtraProgramPages",
supported_modes=RunMode.any,
min_avm_version=4,
),
"Nonparticipation": Variant(
signature=OpSignature(args=[StackType.bool], returns=[]),
enum="Nonparticipation",
supported_modes=RunMode.any,
min_avm_version=5,
),
"StateProofPK": Variant(
signature=OpSignature(args=[StackType.bytes], returns=[]),
enum="StateProofPK",
supported_modes=RunMode.any,
min_avm_version=6,
),
"ApprovalProgramPages": Variant(
signature=OpSignature(args=[StackType.bytes], returns=[]),
enum="ApprovalProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"ClearStateProgramPages": Variant(
signature=OpSignature(args=[StackType.bytes], returns=[]),
enum="ClearStateProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
},
),
immediate_types=(ImmediateKind.arg_enum,),
cost=1,
min_avm_version=5,
supported_modes=RunMode.app,
)
"""
set field F of the current inner transaction to A
`itxn_field` fails if A is of the wrong type for F, including a byte array of the wrong size
for use as an address when F is an address field. `itxn_field` also fails if A is an account,
asset, or app that is not _available_, or an attempt is made extend an array field beyond the
limit imposed by consensus parameters. (Addresses set into asset params of acfg transactions
need not be _available_.)
"""
itxn_next = AVMOpData(
op_code="itxn_next",
variants=Variant(
signature=OpSignature(args=[], returns=[]),
enum=None,
supported_modes=RunMode.app,
min_avm_version=6,
),
immediate_types=(),
cost=1,
min_avm_version=6,
supported_modes=RunMode.app,
)
"""
begin preparation of a new inner transaction in the same transaction group
`itxn_next` initializes the transaction exactly as `itxn_begin` does
"""
itxn_submit = AVMOpData(
op_code="itxn_submit",
variants=Variant(
signature=OpSignature(args=[], returns=[]),
enum=None,
supported_modes=RunMode.app,
min_avm_version=5,
),
immediate_types=(),
cost=1,
min_avm_version=5,
supported_modes=RunMode.app,
)
"""
execute the current inner transaction group. Fail if executing this group would exceed the
inner transaction limit, or if any transaction in the group fails.
`itxn_submit` resets the current transaction so that it can not be resubmitted. A new
`itxn_begin` is required to prepare another inner transaction.
"""
itxna = AVMOpData(
op_code="itxna",
variants=DynamicVariants(
immediate_index=0,
variant_map={
"ApplicationArgs": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ApplicationArgs",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Accounts": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="Accounts",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Assets": Variant(
signature=OpSignature(args=[], returns=[StackType.asset]),
enum="Assets",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Applications": Variant(
signature=OpSignature(args=[], returns=[StackType.application]),
enum="Applications",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Logs": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="Logs",
supported_modes=RunMode.app,
min_avm_version=5,
),
"ApprovalProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ApprovalProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"ClearStateProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ClearStateProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
},
),
immediate_types=(ImmediateKind.arg_enum, ImmediateKind.uint8),
cost=1,
min_avm_version=5,
supported_modes=RunMode.app,
)
"""
Ith value of the array field F of the last inner transaction
"""
itxnas = AVMOpData(
op_code="itxnas",
variants=DynamicVariants(
immediate_index=0,
variant_map={
"ApplicationArgs": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ApplicationArgs",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Accounts": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.address]),
enum="Accounts",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Assets": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.asset]),
enum="Assets",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Applications": Variant(
signature=OpSignature(
args=[StackType.uint64], returns=[StackType.application]
),
enum="Applications",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Logs": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="Logs",
supported_modes=RunMode.app,
min_avm_version=5,
),
"ApprovalProgramPages": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ApprovalProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"ClearStateProgramPages": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ClearStateProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
},
),
immediate_types=(ImmediateKind.arg_enum,),
cost=1,
min_avm_version=6,
supported_modes=RunMode.app,
)
"""
Ath value of the array field F of the last inner transaction
"""
json_ref = AVMOpData(
op_code="json_ref",
variants=DynamicVariants(
immediate_index=0,
variant_map={
"JSONString": Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.bytes], returns=[StackType.bytes]
),
enum="JSONString",
supported_modes=RunMode.any,
min_avm_version=7,
),
"JSONUint64": Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.bytes], returns=[StackType.uint64]
),
enum="JSONUint64",
supported_modes=RunMode.any,
min_avm_version=7,
),
"JSONObject": Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.bytes], returns=[StackType.bytes]
),
enum="JSONObject",
supported_modes=RunMode.any,
min_avm_version=7,
),
},
),
immediate_types=(ImmediateKind.arg_enum,),
cost=None,
min_avm_version=7,
supported_modes=RunMode.any,
)
"""
key B's value, of type R, from a [valid](jsonspec.md) utf-8 encoded json object A
*Warning*: Usage should be restricted to very rare use cases, as JSON decoding is expensive and
quite limited. In addition, JSON objects are large and not optimized for size.
Almost all smart contracts should use simpler and smaller methods (such as the
[ABI](https://arc.algorand.foundation/ARCs/arc-0004). This opcode should only be used in cases
where JSON is only available option, e.g. when a third-party only signs JSON.
"""
keccak256 = AVMOpData(
op_code="keccak256",
variants=Variant(
signature=OpSignature(args=[StackType.bytes], returns=[StackType.bytes]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=130,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
Keccak256 hash of value A, yields [32]byte
"""
len_ = AVMOpData(
op_code="len",
variants=Variant(
signature=OpSignature(args=[StackType.bytes], returns=[StackType.uint64]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
yields length of byte value A
"""
load = AVMOpData(
op_code="load",
variants=Variant(
signature=OpSignature(args=[], returns=[StackType.any]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(ImmediateKind.uint8,),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
Ith scratch space value. All scratch spaces are 0 at program start.
"""
loads = AVMOpData(
op_code="loads",
variants=Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.any]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=5,
),
immediate_types=(),
cost=1,
min_avm_version=5,
supported_modes=RunMode.any,
)
"""
Ath scratch space value. All scratch spaces are 0 at program start.
"""
log = AVMOpData(
op_code="log",
variants=Variant(
signature=OpSignature(args=[StackType.bytes], returns=[]),
enum=None,
supported_modes=RunMode.app,
min_avm_version=5,
),
immediate_types=(),
cost=1,
min_avm_version=5,
supported_modes=RunMode.app,
)
"""
write A to log state of the current application
`log` fails if called more than MaxLogCalls times in a program, or if the sum of logged bytes
exceeds 1024 bytes.
"""
lt = AVMOpData(
op_code="<",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.bool]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
A less than B => {0 or 1}
"""
lt_bytes = AVMOpData(
op_code="b<",
variants=Variant(
signature=OpSignature(
args=[StackType.bigint, StackType.bigint], returns=[StackType.bool]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=1,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
1 if A is less than B, else 0. A and B are interpreted as big-endian unsigned integers
"""
lte = AVMOpData(
op_code="<=",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.bool]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
A less than or equal to B => {0 or 1}
"""
lte_bytes = AVMOpData(
op_code="b<=",
variants=Variant(
signature=OpSignature(
args=[StackType.bigint, StackType.bigint], returns=[StackType.bool]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=1,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
1 if A is less than or equal to B, else 0. A and B are interpreted as big-endian unsigned
integers
"""
mimc = AVMOpData(
op_code="mimc",
variants=Variant(
signature=OpSignature(args=[StackType.bytes], returns=[StackType.bytes]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=11,
),
immediate_types=(ImmediateKind.arg_enum,),
cost=None,
min_avm_version=11,
supported_modes=RunMode.any,
)
"""
MiMC hash of scalars A, using curve and parameters specified by configuration C
A is a list of concatenated 32 byte big-endian unsigned integer scalars. Fail if A's length is
not a multiple of 32 or any element exceeds the curve modulus.
The MiMC hash function has known collisions since any input which is a multiple of the elliptic
curve modulus will hash to the same value. MiMC is thus not a general purpose hash function,
but meant to be used in zero knowledge applications to match a zk-circuit implementation.
"""
min_balance = AVMOpData(
op_code="min_balance",
variants=Variant(
signature=OpSignature(args=[StackType.address_or_index], returns=[StackType.uint64]),
enum=None,
supported_modes=RunMode.app,
min_avm_version=3,
),
immediate_types=(),
cost=1,
min_avm_version=3,
supported_modes=RunMode.app,
)
"""
minimum required balance for account A, in microalgos. Required balance is affected by ASA,
App, and Box usage. When creating or opting into an app, the minimum balance grows before the
app code runs, therefore the increase is visible there. When deleting or closing out, the
minimum balance decreases after the app executes. Changes caused by inner transactions or box
usage are observable immediately following the opcode effecting the change.
params: Txn.Accounts offset (or, since v4, an _available_ account address), _available_
application id (or, since v4, a Txn.ForeignApps offset). Return: value.
"""
mod = AVMOpData(
op_code="%",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.uint64]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
A modulo B. Fail if B == 0.
"""
mod_bytes = AVMOpData(
op_code="b%",
variants=Variant(
signature=OpSignature(
args=[StackType.bigint, StackType.bigint], returns=[StackType.bigint]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=20,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
A modulo B. A and B are interpreted as big-endian unsigned integers. Fail if B is zero.
"""
mul = AVMOpData(
op_code="*",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.uint64]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
A times B. Fail on overflow.
Overflow is an error condition which halts execution and fails the transaction. Full precision
is available from `mulw`.
"""
mul_bytes = AVMOpData(
op_code="b*",
variants=Variant(
signature=OpSignature(
args=[StackType.bigint, StackType.bigint], returns=[StackType.bigint]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=20,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
A times B. A and B are interpreted as big-endian unsigned integers.
"""
mulw = AVMOpData(
op_code="mulw",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64],
returns=[StackType.uint64, StackType.uint64],
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
A times B as a 128-bit result in two uint64s. X is the high 64 bits, Y is the low
"""
neq = AVMOpData(
op_code="!=",
variants=Variant(
signature=OpSignature(args=[StackType.any, StackType.any], returns=[StackType.bool]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
A is not equal to B => {0 or 1}
"""
neq_bytes = AVMOpData(
op_code="b!=",
variants=Variant(
signature=OpSignature(
args=[StackType.bigint, StackType.bigint], returns=[StackType.bool]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=1,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
0 if A is equal to B, else 1. A and B are interpreted as big-endian unsigned integers
"""
not_ = AVMOpData(
op_code="!",
variants=Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bool]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
A == 0 yields 1; else 0
"""
online_stake = AVMOpData(
op_code="online_stake",
variants=Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum=None,
supported_modes=RunMode.app,
min_avm_version=11,
),
immediate_types=(),
cost=1,
min_avm_version=11,
supported_modes=RunMode.app,
)
"""
the total online stake in the agreement round
"""
or_ = AVMOpData(
op_code="||",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.bool]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
A is not zero or B is not zero => {0 or 1}
"""
replace2 = AVMOpData(
op_code="replace2",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.bytes], returns=[StackType.bytes]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=7,
),
immediate_types=(ImmediateKind.uint8,),
cost=1,
min_avm_version=7,
supported_modes=RunMode.any,
)
"""
Copy of A with the bytes starting at S replaced by the bytes of B. Fails if S+len(B) exceeds
len(A)
`replace2` can be called using `replace` with 1 immediate.
"""
replace3 = AVMOpData(
op_code="replace3",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.uint64, StackType.bytes],
returns=[StackType.bytes],
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=7,
),
immediate_types=(),
cost=1,
min_avm_version=7,
supported_modes=RunMode.any,
)
"""
Copy of A with the bytes starting at B replaced by the bytes of C. Fails if B+len(C) exceeds
len(A)
`replace3` can be called using `replace` with no immediates.
"""
select = AVMOpData(
op_code="select",
variants=Variant(
signature=OpSignature(
args=[StackType.any, StackType.any, StackType.bool], returns=[StackType.any]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=3,
),
immediate_types=(),
cost=1,
min_avm_version=3,
supported_modes=RunMode.any,
)
"""
selects one of two values based on top-of-stack: B if C != 0, else A
"""
setbit = AVMOpData(
op_code="setbit",
variants=Variant(
signature=OpSignature(
args=[StackType.any, StackType.uint64, StackType.uint64], returns=[StackType.any]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=3,
),
immediate_types=(),
cost=1,
min_avm_version=3,
supported_modes=RunMode.any,
)
"""
Copy of (byte-array or integer) A, with the Bth bit set to (0 or 1) C. If B is greater than or
equal to the bit length of the value (8*byte length), the program fails
When A is a uint64, index 0 is the least significant bit. Setting bit 3 to 1 on the integer 0
yields 8, or 2^3. When A is a byte array, index 0 is the leftmost bit of the leftmost byte.
Setting bits 0 through 11 to 1 in a 4-byte-array of 0s yields the byte array 0xfff00000.
Setting bit 3 to 1 on the 1-byte-array 0x00 yields the byte array 0x10.
"""
setbyte = AVMOpData(
op_code="setbyte",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.uint64, StackType.uint64],
returns=[StackType.bytes],
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=3,
),
immediate_types=(),
cost=1,
min_avm_version=3,
supported_modes=RunMode.any,
)
"""
Copy of A with the Bth byte set to small integer (between 0..255) C. If B is greater than or
equal to the array length, the program fails
"""
sha256 = AVMOpData(
op_code="sha256",
variants=Variant(
signature=OpSignature(args=[StackType.bytes], returns=[StackType.bytes]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=35,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
SHA256 hash of value A, yields [32]byte
"""
sha3_256 = AVMOpData(
op_code="sha3_256",
variants=Variant(
signature=OpSignature(args=[StackType.bytes], returns=[StackType.bytes]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=7,
),
immediate_types=(),
cost=130,
min_avm_version=7,
supported_modes=RunMode.any,
)
"""
SHA3_256 hash of value A, yields [32]byte
"""
sha512_256 = AVMOpData(
op_code="sha512_256",
variants=Variant(
signature=OpSignature(args=[StackType.bytes], returns=[StackType.bytes]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=45,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
SHA512_256 hash of value A, yields [32]byte
"""
shl = AVMOpData(
op_code="shl",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.uint64]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=1,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
A times 2^B, modulo 2^64
"""
shr = AVMOpData(
op_code="shr",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.uint64]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=1,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
A divided by 2^B
"""
sqrt = AVMOpData(
op_code="sqrt",
variants=Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.uint64]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=4,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
The largest integer I such that I^2 <= A
"""
store = AVMOpData(
op_code="store",
variants=Variant(
signature=OpSignature(args=[StackType.any], returns=[]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(ImmediateKind.uint8,),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
store A to the Ith scratch space
"""
stores = AVMOpData(
op_code="stores",
variants=Variant(
signature=OpSignature(args=[StackType.uint64, StackType.any], returns=[]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=5,
),
immediate_types=(),
cost=1,
min_avm_version=5,
supported_modes=RunMode.any,
)
"""
store B to the Ath scratch space
"""
sub = AVMOpData(
op_code="-",
variants=Variant(
signature=OpSignature(
args=[StackType.uint64, StackType.uint64], returns=[StackType.uint64]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=1,
),
immediate_types=(),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
A minus B. Fail if B > A.
"""
sub_bytes = AVMOpData(
op_code="b-",
variants=Variant(
signature=OpSignature(
args=[StackType.bigint, StackType.bigint], returns=[StackType.bigint]
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=4,
),
immediate_types=(),
cost=10,
min_avm_version=4,
supported_modes=RunMode.any,
)
"""
A minus B. A and B are interpreted as big-endian unsigned integers. Fail on underflow.
"""
substring = AVMOpData(
op_code="substring",
variants=Variant(
signature=OpSignature(args=[StackType.bytes], returns=[StackType.bytes]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=2,
),
immediate_types=(ImmediateKind.uint8, ImmediateKind.uint8),
cost=1,
min_avm_version=2,
supported_modes=RunMode.any,
)
"""
A range of bytes from A starting at S up to but not including E. If E < S, or either is larger
than the array length, the program fails
"""
substring3 = AVMOpData(
op_code="substring3",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.uint64, StackType.uint64],
returns=[StackType.bytes],
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=2,
),
immediate_types=(),
cost=1,
min_avm_version=2,
supported_modes=RunMode.any,
)
"""
A range of bytes from A starting at B up to but not including C. If C < B, or either is larger
than the array length, the program fails
"""
sumhash512 = AVMOpData(
op_code="sumhash512",
variants=Variant(
signature=OpSignature(args=[StackType.bytes], returns=[StackType.bytes]),
enum=None,
supported_modes=RunMode.any,
min_avm_version=12,
),
immediate_types=(),
cost=None,
min_avm_version=12,
supported_modes=RunMode.any,
)
"""
sumhash512 of value A, yields [64]byte
"""
txn = AVMOpData(
op_code="txn",
variants=DynamicVariants(
immediate_index=0,
variant_map={
"Sender": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="Sender",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Fee": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="Fee",
supported_modes=RunMode.any,
min_avm_version=0,
),
"FirstValid": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="FirstValid",
supported_modes=RunMode.any,
min_avm_version=0,
),
"FirstValidTime": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="FirstValidTime",
supported_modes=RunMode.any,
min_avm_version=7,
),
"LastValid": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="LastValid",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Note": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="Note",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Lease": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="Lease",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Receiver": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="Receiver",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Amount": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="Amount",
supported_modes=RunMode.any,
min_avm_version=0,
),
"CloseRemainderTo": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="CloseRemainderTo",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VotePK": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="VotePK",
supported_modes=RunMode.any,
min_avm_version=0,
),
"SelectionPK": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="SelectionPK",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VoteFirst": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="VoteFirst",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VoteLast": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="VoteLast",
supported_modes=RunMode.any,
min_avm_version=0,
),
"VoteKeyDilution": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="VoteKeyDilution",
supported_modes=RunMode.any,
min_avm_version=0,
),
"Type": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="Type",
supported_modes=RunMode.any,
min_avm_version=0,
),
"TypeEnum": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="TypeEnum",
supported_modes=RunMode.any,
min_avm_version=0,
),
"XferAsset": Variant(
signature=OpSignature(args=[], returns=[StackType.asset]),
enum="XferAsset",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetAmount": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="AssetAmount",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetSender": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="AssetSender",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetReceiver": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="AssetReceiver",
supported_modes=RunMode.any,
min_avm_version=0,
),
"AssetCloseTo": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="AssetCloseTo",
supported_modes=RunMode.any,
min_avm_version=0,
),
"GroupIndex": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="GroupIndex",
supported_modes=RunMode.any,
min_avm_version=0,
),
"TxID": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="TxID",
supported_modes=RunMode.any,
min_avm_version=0,
),
"ApplicationID": Variant(
signature=OpSignature(args=[], returns=[StackType.application]),
enum="ApplicationID",
supported_modes=RunMode.any,
min_avm_version=2,
),
"OnCompletion": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="OnCompletion",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ApplicationArgs": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ApplicationArgs",
supported_modes=RunMode.any,
min_avm_version=2,
),
"NumAppArgs": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumAppArgs",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Accounts": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="Accounts",
supported_modes=RunMode.any,
min_avm_version=2,
),
"NumAccounts": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumAccounts",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ApprovalProgram": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ApprovalProgram",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ClearStateProgram": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ClearStateProgram",
supported_modes=RunMode.any,
min_avm_version=2,
),
"RekeyTo": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="RekeyTo",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAsset": Variant(
signature=OpSignature(args=[], returns=[StackType.asset]),
enum="ConfigAsset",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetTotal": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="ConfigAssetTotal",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetDecimals": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="ConfigAssetDecimals",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetDefaultFrozen": Variant(
signature=OpSignature(args=[], returns=[StackType.bool]),
enum="ConfigAssetDefaultFrozen",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetUnitName": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ConfigAssetUnitName",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetName": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ConfigAssetName",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetURL": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ConfigAssetURL",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetMetadataHash": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ConfigAssetMetadataHash",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetManager": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="ConfigAssetManager",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetReserve": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="ConfigAssetReserve",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetFreeze": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="ConfigAssetFreeze",
supported_modes=RunMode.any,
min_avm_version=2,
),
"ConfigAssetClawback": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="ConfigAssetClawback",
supported_modes=RunMode.any,
min_avm_version=2,
),
"FreezeAsset": Variant(
signature=OpSignature(args=[], returns=[StackType.asset]),
enum="FreezeAsset",
supported_modes=RunMode.any,
min_avm_version=2,
),
"FreezeAssetAccount": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="FreezeAssetAccount",
supported_modes=RunMode.any,
min_avm_version=2,
),
"FreezeAssetFrozen": Variant(
signature=OpSignature(args=[], returns=[StackType.bool]),
enum="FreezeAssetFrozen",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Assets": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="Assets",
supported_modes=RunMode.any,
min_avm_version=3,
),
"NumAssets": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumAssets",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Applications": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="Applications",
supported_modes=RunMode.any,
min_avm_version=3,
),
"NumApplications": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumApplications",
supported_modes=RunMode.any,
min_avm_version=3,
),
"GlobalNumUint": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="GlobalNumUint",
supported_modes=RunMode.any,
min_avm_version=3,
),
"GlobalNumByteSlice": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="GlobalNumByteSlice",
supported_modes=RunMode.any,
min_avm_version=3,
),
"LocalNumUint": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="LocalNumUint",
supported_modes=RunMode.any,
min_avm_version=3,
),
"LocalNumByteSlice": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="LocalNumByteSlice",
supported_modes=RunMode.any,
min_avm_version=3,
),
"ExtraProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="ExtraProgramPages",
supported_modes=RunMode.any,
min_avm_version=4,
),
"Nonparticipation": Variant(
signature=OpSignature(args=[], returns=[StackType.bool]),
enum="Nonparticipation",
supported_modes=RunMode.any,
min_avm_version=5,
),
"Logs": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="Logs",
supported_modes=RunMode.app,
min_avm_version=5,
),
"NumLogs": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumLogs",
supported_modes=RunMode.app,
min_avm_version=5,
),
"CreatedAssetID": Variant(
signature=OpSignature(args=[], returns=[StackType.asset]),
enum="CreatedAssetID",
supported_modes=RunMode.app,
min_avm_version=5,
),
"CreatedApplicationID": Variant(
signature=OpSignature(args=[], returns=[StackType.application]),
enum="CreatedApplicationID",
supported_modes=RunMode.app,
min_avm_version=5,
),
"LastLog": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="LastLog",
supported_modes=RunMode.app,
min_avm_version=6,
),
"StateProofPK": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="StateProofPK",
supported_modes=RunMode.any,
min_avm_version=6,
),
"ApprovalProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ApprovalProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"NumApprovalProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumApprovalProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"ClearStateProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ClearStateProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"NumClearStateProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.uint64]),
enum="NumClearStateProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
},
),
immediate_types=(ImmediateKind.arg_enum,),
cost=1,
min_avm_version=1,
supported_modes=RunMode.any,
)
"""
field F of current transaction
"""
txna = AVMOpData(
op_code="txna",
variants=DynamicVariants(
immediate_index=0,
variant_map={
"ApplicationArgs": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ApplicationArgs",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Accounts": Variant(
signature=OpSignature(args=[], returns=[StackType.address]),
enum="Accounts",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Assets": Variant(
signature=OpSignature(args=[], returns=[StackType.asset]),
enum="Assets",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Applications": Variant(
signature=OpSignature(args=[], returns=[StackType.application]),
enum="Applications",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Logs": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="Logs",
supported_modes=RunMode.app,
min_avm_version=5,
),
"ApprovalProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ApprovalProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"ClearStateProgramPages": Variant(
signature=OpSignature(args=[], returns=[StackType.bytes]),
enum="ClearStateProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
},
),
immediate_types=(ImmediateKind.arg_enum, ImmediateKind.uint8),
cost=1,
min_avm_version=2,
supported_modes=RunMode.any,
)
"""
Ith value of the array field F of the current transaction
`txna` can be called using `txn` with 2 immediates.
"""
txnas = AVMOpData(
op_code="txnas",
variants=DynamicVariants(
immediate_index=0,
variant_map={
"ApplicationArgs": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ApplicationArgs",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Accounts": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.address]),
enum="Accounts",
supported_modes=RunMode.any,
min_avm_version=2,
),
"Assets": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.asset]),
enum="Assets",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Applications": Variant(
signature=OpSignature(
args=[StackType.uint64], returns=[StackType.application]
),
enum="Applications",
supported_modes=RunMode.any,
min_avm_version=3,
),
"Logs": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="Logs",
supported_modes=RunMode.app,
min_avm_version=5,
),
"ApprovalProgramPages": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ApprovalProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
"ClearStateProgramPages": Variant(
signature=OpSignature(args=[StackType.uint64], returns=[StackType.bytes]),
enum="ClearStateProgramPages",
supported_modes=RunMode.any,
min_avm_version=7,
),
},
),
immediate_types=(ImmediateKind.arg_enum,),
cost=1,
min_avm_version=5,
supported_modes=RunMode.any,
)
"""
Ath value of the array field F of the current transaction
"""
voter_params_get = AVMOpData(
op_code="voter_params_get",
variants=DynamicVariants(
immediate_index=0,
variant_map={
"VoterBalance": Variant(
signature=OpSignature(
args=[StackType.address_or_index],
returns=[StackType.uint64, StackType.bool],
),
enum="VoterBalance",
supported_modes=RunMode.app,
min_avm_version=11,
),
"VoterIncentiveEligible": Variant(
signature=OpSignature(
args=[StackType.address_or_index], returns=[StackType.bool, StackType.bool]
),
enum="VoterIncentiveEligible",
supported_modes=RunMode.app,
min_avm_version=11,
),
},
),
immediate_types=(ImmediateKind.arg_enum,),
cost=1,
min_avm_version=11,
supported_modes=RunMode.app,
)
"""
X is field F from online account A as of the balance round: 320 rounds before the current
round. Y is 1 if A had positive algos online in the agreement round, else Y is 0 and X is a
type specific zero-value
"""
vrf_verify = AVMOpData(
op_code="vrf_verify",
variants=Variant(
signature=OpSignature(
args=[StackType.bytes, StackType.bytes, StackType.bytes],
returns=[StackType.bytes, StackType.bool],
),
enum=None,
supported_modes=RunMode.any,
min_avm_version=7,
),
immediate_types=(ImmediateKind.arg_enum,),
cost=5700,
min_avm_version=7,
supported_modes=RunMode.any,
)
"""
Verify the proof B of message A against pubkey C. Returns vrf output and verification flag.
`VrfAlgorand` is the VRF used in Algorand. It is ECVRF-ED25519-SHA512-Elligator2, specified in
the IETF internet draft [draft-irtf-cfrg-vrf-03](https://datatracker.ietf.org/doc/draft-irtf-
cfrg-vrf/03/).
"""
|
algorandfoundation/puya
|
src/puya/ir/avm_ops.py
|
Python
|
NOASSERTION
| 240,926 |
import enum
from collections.abc import Sequence
import attrs
class StackType(enum.StrEnum):
uint64 = enum.auto()
bytes = "[]byte"
bool = enum.auto()
address = enum.auto()
address_or_index = enum.auto()
any = enum.auto()
bigint = enum.auto()
box_name = "boxName"
asset = enum.auto()
application = enum.auto()
state_key = "stateKey"
def __repr__(self) -> str:
return f"{type(self).__name__}.{self.name}"
class RunMode(enum.StrEnum):
app = enum.auto()
lsig = enum.auto()
any = enum.auto()
def __repr__(self) -> str:
return f"{type(self).__name__}.{self.name}"
class ImmediateKind(enum.StrEnum):
uint8 = enum.auto()
arg_enum = enum.auto()
def __repr__(self) -> str:
return f"{type(self).__name__}.{self.name}"
@attrs.frozen
class OpSignature:
args: Sequence[StackType]
returns: Sequence[StackType]
@attrs.frozen
class Variant:
signature: OpSignature
enum: str | None
supported_modes: RunMode
min_avm_version: int
@attrs.frozen
class DynamicVariants:
immediate_index: int
variant_map: dict[str, Variant]
@attrs.define(kw_only=True)
class AVMOpData:
op_code: str
variants: Variant | DynamicVariants
immediate_types: Sequence[ImmediateKind] = attrs.field(default=())
cost: int | None
min_avm_version: int
supported_modes: RunMode
|
algorandfoundation/puya
|
src/puya/ir/avm_ops_models.py
|
Python
|
NOASSERTION
| 1,398 |
algorandfoundation/puya
|
src/puya/ir/builder/__init__.py
|
Python
|
NOASSERTION
| 0 |
|
from collections.abc import Sequence
from puya.awst import wtypes
from puya.errors import InternalError
from puya.ir.context import IRFunctionBuildContext
from puya.ir.models import Register, Value, ValueProvider, ValueTuple
from puya.ir.types_ import IRType, get_wtype_arity, sum_wtypes_arity, wtype_to_ir_type
from puya.ir.utils import format_tuple_index
from puya.parse import SourceLocation
def get_tuple_item_values(
*,
tuple_values: Sequence[Value],
tuple_wtype: wtypes.WTuple,
index: int | tuple[int, int | None],
target_wtype: wtypes.WType,
source_location: SourceLocation,
) -> ValueProvider:
if isinstance(index, tuple):
skip_values = sum_wtypes_arity(tuple_wtype.types[: index[0]])
target_arity = sum_wtypes_arity(tuple_wtype.types[index[0] : index[1]])
else:
skip_values = sum_wtypes_arity(tuple_wtype.types[:index])
target_arity = get_wtype_arity(tuple_wtype.types[index])
if target_arity != get_wtype_arity(target_wtype):
raise InternalError(
"arity difference between result type and expected type", source_location
)
values = tuple_values[skip_values : skip_values + target_arity]
if len(values) == 1 and not isinstance(target_wtype, wtypes.WTuple):
return values[0]
return ValueTuple(values=values, source_location=source_location)
def build_tuple_registers(
context: IRFunctionBuildContext,
base_name: str,
wtype: wtypes.WType,
source_location: SourceLocation | None,
) -> list[Register]:
return [
context.ssa.new_register(name, ir_type, source_location)
for name, ir_type in build_tuple_item_names(base_name, wtype, source_location)
]
def build_tuple_item_names(
base_name: str,
wtype: wtypes.WType,
source_location: SourceLocation | None,
) -> list[tuple[str, IRType]]:
if not isinstance(wtype, wtypes.WTuple):
return [(base_name, wtype_to_ir_type(wtype, source_location))]
return [
reg
for idx, item_type in enumerate(wtype.types)
for reg in build_tuple_item_names(
format_tuple_index(wtype, base_name, idx), item_type, source_location
)
]
|
algorandfoundation/puya
|
src/puya/ir/builder/_tuple_util.py
|
Python
|
NOASSERTION
| 2,203 |
import typing
from collections.abc import Sequence
import attrs
from puya.awst import (
nodes as awst_nodes,
wtypes,
)
from puya.errors import InternalError
from puya.ir.avm_ops import AVMOp
from puya.ir.context import TMP_VAR_INDICATOR, IRFunctionBuildContext
from puya.ir.models import (
Assignment,
BytesConstant,
Intrinsic,
InvokeSubroutine,
Register,
UInt64Constant,
Value,
ValueProvider,
)
from puya.ir.types_ import AVMBytesEncoding, IRType
from puya.parse import SourceLocation
def assign(
context: IRFunctionBuildContext,
source: ValueProvider,
*,
name: str,
assignment_location: SourceLocation | None,
register_location: SourceLocation | None = None,
) -> Register:
(ir_type,) = source.types
target = context.ssa.new_register(name, ir_type, register_location or assignment_location)
assign_targets(
context=context,
source=source,
targets=[target],
assignment_location=assignment_location,
)
return target
def new_register_version(context: IRFunctionBuildContext, reg: Register) -> Register:
return context.ssa.new_register(
name=reg.name, ir_type=reg.ir_type, location=reg.source_location
)
def assign_temp(
context: IRFunctionBuildContext,
source: ValueProvider,
*,
temp_description: str,
source_location: SourceLocation | None,
) -> Register:
(ir_type,) = source.types
target = mktemp(context, ir_type, source_location, description=temp_description)
assign_targets(
context,
source=source,
targets=[target],
assignment_location=source_location,
)
return target
def assign_targets(
context: IRFunctionBuildContext,
*,
source: ValueProvider,
targets: list[Register],
assignment_location: SourceLocation | None,
) -> None:
if not (source.types or targets):
return
for target in targets:
context.ssa.write_variable(target.name, context.block_builder.active_block, target)
context.block_builder.add(
Assignment(targets=targets, source=source, source_location=assignment_location)
)
# also update any implicitly returned variables
implicit_params = {p.name for p in context.subroutine.parameters if p.implicit_return}
for target in targets:
if target.name in implicit_params:
_update_implicit_out_var(context, target.name, target.ir_type)
def _update_implicit_out_var(context: IRFunctionBuildContext, var: str, ir_type: IRType) -> None:
# emit conditional assignment equivalent to
# if var%is_original:
# var%out = var
loc = SourceLocation(file=None, line=1)
wtype = wtypes.bytes_wtype if ir_type == IRType.bytes else wtypes.uint64_wtype
node = awst_nodes.IfElse(
condition=awst_nodes.VarExpression(
name=get_implicit_return_is_original(var),
wtype=wtypes.bool_wtype,
source_location=loc,
),
if_branch=awst_nodes.Block(
body=[
awst_nodes.AssignmentStatement(
target=awst_nodes.VarExpression(
name=get_implicit_return_out(var),
wtype=wtype,
source_location=loc,
),
value=awst_nodes.VarExpression(
name=var,
wtype=wtype,
source_location=loc,
),
source_location=loc,
)
],
source_location=loc,
),
else_branch=None,
source_location=loc,
)
node.accept(context.visitor)
def get_implicit_return_is_original(var_name: str) -> str:
return f"{var_name}{TMP_VAR_INDICATOR}is_original"
def get_implicit_return_out(var_name: str) -> str:
return f"{var_name}{TMP_VAR_INDICATOR}out"
def mktemp(
context: IRFunctionBuildContext,
ir_type: IRType,
source_location: SourceLocation | None,
*,
description: str,
) -> Register:
name = context.next_tmp_name(description)
return context.ssa.new_register(name, ir_type, source_location)
def assign_intrinsic_op(
context: IRFunctionBuildContext,
*,
target: str | Register,
op: AVMOp,
args: Sequence[int | bytes | Value],
source_location: SourceLocation | None,
immediates: list[int | str] | None = None,
return_type: IRType | None = None,
) -> Register:
intrinsic = Intrinsic(
op=op,
immediates=immediates or [],
args=[_convert_constants(a, source_location) for a in args],
types=(
[return_type]
if return_type is not None
else typing.cast(Sequence[IRType], attrs.NOTHING)
),
source_location=source_location,
)
if isinstance(target, str):
target_reg = mktemp(context, intrinsic.types[0], source_location, description=target)
else:
target_reg = new_register_version(context, target)
assign_targets(
context,
targets=[target_reg],
source=intrinsic,
assignment_location=source_location,
)
return target_reg
def _convert_constants(arg: int | bytes | Value, source_location: SourceLocation | None) -> Value:
match arg:
case int(val):
return UInt64Constant(value=val, source_location=source_location)
case bytes(b_val):
return BytesConstant(
value=b_val, encoding=AVMBytesEncoding.unknown, source_location=source_location
)
case _:
return arg
def invoke_puya_lib_subroutine(
context: IRFunctionBuildContext,
*,
full_name: str,
args: Sequence[Value | int | bytes],
source_location: SourceLocation,
) -> InvokeSubroutine:
sub = context.embedded_funcs_lookup[full_name]
return InvokeSubroutine(
target=sub,
args=[_convert_constants(arg, source_location) for arg in args],
source_location=source_location,
)
def assert_value(
context: IRFunctionBuildContext, value: Value, *, source_location: SourceLocation, comment: str
) -> None:
context.block_builder.add(
Intrinsic(
op=AVMOp.assert_,
source_location=source_location,
args=[value],
error_message=comment,
)
)
def extract_const_int(expr: awst_nodes.Expression | int | None) -> int | None:
"""
Check expr is an IntegerConstant, int literal, or None, and return constant value (or None)
"""
match expr:
case None:
return None
case awst_nodes.IntegerConstant(value=value):
return value
case int(value):
return value
case _:
raise InternalError(
f"Expected either constant or None for index, got {type(expr).__name__}",
expr.source_location,
)
@attrs.frozen
class OpFactory:
context: IRFunctionBuildContext
source_location: SourceLocation | None
def assign(self, value: ValueProvider, temp_desc: str) -> Register:
register = assign_temp(
self.context, value, temp_description=temp_desc, source_location=self.source_location
)
return register
def assign_multiple(self, **values: ValueProvider) -> Sequence[Register]:
return [self.assign(value, desc) for desc, value in values.items()]
def add(self, a: Value, b: Value | int, temp_desc: str) -> Register:
result = assign_intrinsic_op(
self.context,
target=temp_desc,
op=AVMOp.add,
args=[a, b],
source_location=self.source_location,
)
return result
def sub(self, a: Value, b: Value | int, temp_desc: str) -> Register:
result = assign_intrinsic_op(
self.context,
target=temp_desc,
op=AVMOp.sub,
args=[a, b],
source_location=self.source_location,
)
return result
def mul(self, a: Value, b: Value | int, temp_desc: str) -> Register:
result = assign_intrinsic_op(
self.context,
target=temp_desc,
op=AVMOp.mul,
args=[a, b],
source_location=self.source_location,
)
return result
def len(self, value: Value, temp_desc: str) -> Register:
result = assign_intrinsic_op(
self.context,
target=temp_desc,
op=AVMOp.len_,
args=[value],
source_location=self.source_location,
)
return result
def eq(self, a: Value, b: Value, temp_desc: str) -> Register:
result = assign_intrinsic_op(
self.context,
target=temp_desc,
op=AVMOp.eq,
args=[a, b],
source_location=self.source_location,
)
return result
def select(self, false: Value, true: Value, condition: Value, temp_desc: str) -> Register:
result = assign_intrinsic_op(
self.context,
target=temp_desc,
op=AVMOp.select,
args=[false, true, condition],
return_type=true.ir_type,
source_location=self.source_location,
)
return result
def extract_uint16(self, a: Value, b: Value | int, temp_desc: str) -> Register:
result = assign_intrinsic_op(
self.context,
target=temp_desc,
op=AVMOp.extract_uint16,
args=[a, b],
source_location=self.source_location,
)
return result
def itob(self, value: Value | int, temp_desc: str) -> Register:
itob = assign_intrinsic_op(
self.context,
target=temp_desc,
op=AVMOp.itob,
args=[value],
source_location=self.source_location,
)
return itob
def as_u16_bytes(self, a: Value | int, temp_desc: str) -> Register:
as_bytes = self.itob(a, "as_bytes")
result = assign_intrinsic_op(
self.context,
target=temp_desc,
op=AVMOp.extract,
immediates=[6, 2],
args=[as_bytes],
source_location=self.source_location,
)
return result
def concat(self, a: Value | bytes, b: Value | bytes, temp_desc: str) -> Register:
result = assign_intrinsic_op(
self.context,
target=temp_desc,
op=AVMOp.concat,
args=[a, b],
source_location=self.source_location,
)
return result
def constant(self, value: int | bytes) -> Value:
if isinstance(value, int):
return UInt64Constant(value=value, source_location=self.source_location)
else:
return BytesConstant(
value=value, encoding=AVMBytesEncoding.base16, source_location=self.source_location
)
def set_bit(self, *, value: Value, index: int, bit: Value | int, temp_desc: str) -> Register:
result = assign_intrinsic_op(
self.context,
target=temp_desc,
op=AVMOp.setbit,
args=[value, index, bit],
return_type=value.ir_type,
source_location=self.source_location,
)
return result
def get_bit(self, value: Value, index: Value | int, temp_desc: str) -> Register:
result = assign_intrinsic_op(
self.context,
target=temp_desc,
op=AVMOp.getbit,
args=[value, index],
source_location=self.source_location,
)
return result
def extract_to_end(self, value: Value, start: int, temp_desc: str) -> Register:
if start > 255:
raise InternalError(
"Cannot use extract with a length of 0 if start > 255", self.source_location
)
result = assign_intrinsic_op(
self.context,
target=temp_desc,
op=AVMOp.extract,
immediates=[start, 0],
args=[value],
source_location=self.source_location,
)
return result
def substring3(
self,
value: Value | bytes,
start: Value | int,
end_ex: Value | int,
temp_desc: str,
) -> Register:
result = assign_intrinsic_op(
self.context,
target=temp_desc,
op=AVMOp.substring3,
args=[value, start, end_ex],
source_location=self.source_location,
)
return result
def replace(
self,
value: Value | bytes,
index: Value | int,
replacement: Value | bytes,
temp_desc: str,
) -> Register:
result = assign_intrinsic_op(
self.context,
target=temp_desc,
source_location=self.source_location,
op=AVMOp.replace3,
args=[value, index, replacement],
)
return result
def extract3(
self,
value: Value | bytes,
index: Value | int,
length: Value | int,
temp_desc: str,
) -> Register:
result = assign_intrinsic_op(
self.context,
target=temp_desc,
op=AVMOp.extract3,
args=[value, index, length],
source_location=self.source_location,
)
return result
|
algorandfoundation/puya
|
src/puya/ir/builder/_utils.py
|
Python
|
NOASSERTION
| 13,454 |
from collections.abc import Sequence
from itertools import zip_longest
import attrs
from puya import log
from puya.avm import AVMType
from puya.awst import (
nodes as awst_nodes,
wtypes,
)
from puya.errors import CodeError, InternalError
from puya.ir.avm_ops import AVMOp
from puya.ir.builder._utils import (
OpFactory,
assert_value,
assign_intrinsic_op,
assign_targets,
assign_temp,
invoke_puya_lib_subroutine,
mktemp,
)
from puya.ir.builder.assignment import handle_assignment
from puya.ir.context import IRFunctionBuildContext
from puya.ir.models import (
Intrinsic,
Register,
UInt64Constant,
Value,
ValueProvider,
ValueTuple,
)
from puya.ir.types_ import IRType, get_wtype_arity
from puya.parse import SourceLocation, sequential_source_locations_merge
from puya.utils import bits_to_bytes, round_bits_to_nearest_bytes
logger = log.get_logger(__name__)
@attrs.frozen(kw_only=True)
class ArrayIterator:
context: IRFunctionBuildContext
array_wtype: wtypes.ARC4Array
array: Value
array_length: Value
source_location: SourceLocation
def get_value_at_index(self, index: Value) -> ValueProvider:
return arc4_array_index(
self.context,
array_wtype=self.array_wtype,
array=self.array,
index=index,
source_location=self.source_location,
assert_bounds=False, # iteration is always within bounds
)
def decode_expr(context: IRFunctionBuildContext, expr: awst_nodes.ARC4Decode) -> ValueProvider:
value = context.visitor.visit_and_materialise_single(expr.value)
return _decode_arc4_value(context, value, expr.value.wtype, expr.wtype, expr.source_location)
def _decode_arc4_value(
context: IRFunctionBuildContext,
value: Value,
arc4_wtype: wtypes.WType,
target_wtype: wtypes.WType,
loc: SourceLocation,
) -> ValueProvider:
match arc4_wtype, target_wtype:
case wtypes.ARC4UIntN(), wtypes.biguint_wtype:
return value
case wtypes.ARC4UIntN(), (wtypes.uint64_wtype | wtypes.bool_wtype):
return Intrinsic(
op=AVMOp.btoi,
args=[value],
source_location=loc,
)
case wtypes.arc4_bool_wtype, wtypes.bool_wtype:
return Intrinsic(
op=AVMOp.getbit,
args=[value, UInt64Constant(value=0, source_location=None)],
source_location=loc,
types=(IRType.bool,),
)
case wtypes.ARC4DynamicArray(element_type=wtypes.ARC4UIntN(n=8)), (
wtypes.bytes_wtype
| wtypes.string_wtype
):
return Intrinsic(
op=AVMOp.extract,
immediates=[2, 0],
args=[value],
source_location=loc,
)
case (
wtypes.ARC4Tuple()
| wtypes.ARC4Struct() as arc4_tuple,
wtypes.WTuple() as native_tuple,
) if (len(arc4_tuple.types) == len(native_tuple.types)):
return _visit_arc4_tuple_decode(
context, arc4_tuple, value, target_wtype=native_tuple, source_location=loc
)
raise InternalError(
f"unsupported ARC4Decode operation from {arc4_wtype} to {target_wtype}", loc
)
def encode_arc4_struct(
context: IRFunctionBuildContext, expr: awst_nodes.NewStruct, wtype: wtypes.ARC4Struct
) -> ValueProvider:
assert expr.wtype == wtype
elements = [
context.visitor.visit_and_materialise_single(expr.values[field_name])
for field_name in expr.wtype.fields
]
return _visit_arc4_tuple_encode(context, elements, wtype.types, expr.source_location)
def encode_expr(context: IRFunctionBuildContext, expr: awst_nodes.ARC4Encode) -> ValueProvider:
value = context.visitor.visit_expr(expr.value)
return _encode_expr(context, value, expr.value.wtype, expr.wtype, expr.source_location)
def _encode_expr(
context: IRFunctionBuildContext,
value_provider: ValueProvider,
value_wtype: wtypes.WType,
arc4_wtype: wtypes.ARC4Type,
loc: SourceLocation,
) -> ValueProvider:
match arc4_wtype:
case wtypes.arc4_bool_wtype:
(value,) = context.visitor.materialise_value_provider(
value_provider, description="to_encode"
)
return _encode_arc4_bool(context, value, loc)
case wtypes.ARC4UIntN(n=bits):
(value,) = context.visitor.materialise_value_provider(
value_provider, description="to_encode"
)
num_bytes = bits // 8
return _itob_fixed(context, value, num_bytes, loc)
case wtypes.ARC4Tuple(types=arc4_item_types):
assert isinstance(
value_wtype, wtypes.WTuple
), f"expected WTuple argument, got {value_wtype.name}"
elements = context.visitor.materialise_value_provider(
value_provider, description="elements_to_encode"
)
arc4_items = _encode_arc4_tuple_items(
context, elements, value_wtype.types, arc4_item_types, loc
)
return _visit_arc4_tuple_encode(context, arc4_items, arc4_item_types, loc)
case wtypes.ARC4Struct(types=arc4_item_types):
assert isinstance(
value_wtype, wtypes.WTuple
), f"expected WTuple argument, got {value_wtype.name}"
elements = context.visitor.materialise_value_provider(
value_provider, description="elements_to_encode"
)
arc4_items = _encode_arc4_tuple_items(
context, elements, value_wtype.types, arc4_item_types, loc
)
return _visit_arc4_tuple_encode(context, arc4_items, arc4_item_types, loc)
case wtypes.ARC4DynamicArray(element_type=wtypes.ARC4UIntN(n=8)):
(value,) = context.visitor.materialise_value_provider(
value_provider, description="to_encode"
)
factory = OpFactory(context, loc)
length = factory.len(value, "length")
length_uint16 = factory.as_u16_bytes(length, "length_uint16")
return factory.concat(length_uint16, value, "encoded_value")
case wtypes.ARC4DynamicArray() | wtypes.ARC4StaticArray():
raise InternalError(
"NewArray should be used instead of ARC4Encode for arrays",
loc,
)
case _:
raise InternalError(
f"Unsupported wtype for ARC4Encode: {value_wtype}",
location=loc,
)
def _encode_arc4_tuple_items(
context: IRFunctionBuildContext,
elements: list[Value],
item_wtypes: Sequence[wtypes.WType],
arc4_item_wtypes: Sequence[wtypes.ARC4Type],
loc: SourceLocation,
) -> Sequence[Value]:
arc4_items = []
for item_wtype, arc4_item_wtype in zip(item_wtypes, arc4_item_wtypes, strict=True):
item_arity = get_wtype_arity(item_wtype)
item_elements = elements[:item_arity]
elements = elements[item_arity:]
if item_wtype == arc4_item_wtype:
arc4_items.extend(item_elements)
continue
item_value_provider = (
item_elements[0]
if item_arity == 1
else ValueTuple(
values=item_elements,
source_location=sequential_source_locations_merge(
[e.source_location for e in item_elements]
),
)
)
arc4_item_vp = _encode_expr(
context,
item_value_provider,
item_wtype,
arc4_item_wtype,
item_value_provider.source_location or loc,
)
(arc4_item,) = context.visitor.materialise_value_provider(arc4_item_vp, "arc4_item")
arc4_items.append(arc4_item)
return arc4_items
def encode_arc4_array(context: IRFunctionBuildContext, expr: awst_nodes.NewArray) -> ValueProvider:
if not isinstance(expr.wtype, wtypes.ARC4Array):
raise InternalError("Expected ARC4 Array expression", expr.source_location)
len_prefix = (
len(expr.values).to_bytes(2, "big")
if isinstance(expr.wtype, wtypes.ARC4DynamicArray)
else b""
)
factory = OpFactory(context, expr.source_location)
elements = [context.visitor.visit_and_materialise_single(value) for value in expr.values]
element_type = expr.wtype.element_type
if element_type == wtypes.arc4_bool_wtype:
array_head_and_tail = factory.constant(b"")
for index, el in enumerate(elements):
if index % 8 == 0:
array_head_and_tail = factory.concat(
array_head_and_tail, el, temp_desc="array_head_and_tail"
)
else:
is_true = factory.get_bit(el, 0, "is_true")
array_head_and_tail = factory.set_bit(
value=array_head_and_tail,
index=index,
bit=is_true,
temp_desc="array_head_and_tail",
)
else:
array_head_and_tail = _arc4_items_as_arc4_tuple(
context, element_type, elements, expr.source_location
)
return factory.concat(len_prefix, array_head_and_tail, "array_data")
def arc4_array_index(
context: IRFunctionBuildContext,
*,
array_wtype: wtypes.ARC4Array,
array: Value,
index: Value,
source_location: SourceLocation,
assert_bounds: bool = True,
) -> ValueProvider:
factory = OpFactory(context, source_location)
array_length_vp = _get_arc4_array_length(array_wtype, array, source_location)
array_head_and_tail_vp = _get_arc4_array_head_and_tail(array_wtype, array, source_location)
array_head_and_tail = factory.assign(array_head_and_tail_vp, "array_head_and_tail")
item_wtype = array_wtype.element_type
if is_arc4_dynamic_size(item_wtype):
inner_element_size = _maybe_get_inner_element_size(item_wtype)
if inner_element_size is not None:
if assert_bounds:
_assert_index_in_bounds(context, index, array_length_vp, source_location)
return _read_dynamic_item_using_length_from_arc4_container(
context,
array_head_and_tail=array_head_and_tail,
inner_element_size=inner_element_size,
index=index,
source_location=source_location,
)
else:
# no _assert_index_in_bounds here as end offset calculation implicitly checks
return _read_dynamic_item_using_end_offset_from_arc4_container(
context,
array_length_vp=array_length_vp,
array_head_and_tail=array_head_and_tail,
index=index,
source_location=source_location,
)
if item_wtype == wtypes.arc4_bool_wtype:
if assert_bounds:
# this catches the edge case of bit arrays that are not a multiple of 8
# e.g. reading index 6 & 7 of an array that has a length of 6
_assert_index_in_bounds(context, index, array_length_vp, source_location)
return _read_nth_bool_from_arc4_container(
context,
data=array_head_and_tail,
index=index,
source_location=source_location,
)
else:
item_bit_size = _get_arc4_fixed_bit_size(item_wtype)
# no _assert_index_in_bounds here as static items will error on read if past end of array
return _read_static_item_from_arc4_container(
data=array_head_and_tail,
offset=factory.mul(index, item_bit_size // 8, "item_offset"),
item_wtype=item_wtype,
source_location=source_location,
)
def arc4_tuple_index(
context: IRFunctionBuildContext,
base: Value,
index: int,
wtype: wtypes.ARC4Tuple | wtypes.ARC4Struct,
source_location: SourceLocation,
) -> ValueProvider:
return _read_nth_item_of_arc4_heterogeneous_container(
context,
array_head_and_tail=base,
index=index,
tuple_type=wtype,
source_location=source_location,
)
def build_for_in_array(
context: IRFunctionBuildContext,
array_wtype: wtypes.ARC4Array,
array_expr: awst_nodes.Expression,
source_location: SourceLocation,
) -> ArrayIterator:
if not array_wtype.element_type.immutable:
raise InternalError(
"Attempted iteration of an ARC4 array of mutable objects", source_location
)
array = context.visitor.visit_and_materialise_single(array_expr)
length_vp = _get_arc4_array_length(array_wtype, array, source_location)
array_length = assign_temp(
context,
length_vp,
temp_description="array_length",
source_location=source_location,
)
return ArrayIterator(
context=context,
array=array,
array_length=array_length,
array_wtype=array_wtype,
source_location=source_location,
)
def handle_arc4_assign(
context: IRFunctionBuildContext,
target: awst_nodes.Expression,
value: ValueProvider,
source_location: SourceLocation,
*,
is_nested_update: bool,
) -> Value:
result: Value
match target:
case awst_nodes.IndexExpression(
base=awst_nodes.Expression(
wtype=wtypes.ARC4DynamicArray() | wtypes.ARC4StaticArray() as array_wtype
) as base_expr,
index=index_value,
):
item = _arc4_replace_array_item(
context,
base_expr=base_expr,
index_value_expr=index_value,
wtype=array_wtype,
value=value,
source_location=source_location,
)
return handle_arc4_assign(
context,
target=base_expr,
value=item,
source_location=source_location,
is_nested_update=True,
)
case awst_nodes.FieldExpression(
base=awst_nodes.Expression(wtype=wtypes.ARC4Struct() as struct_wtype) as base_expr,
name=field_name,
):
item = _arc4_replace_struct_item(
context,
base_expr=base_expr,
field_name=field_name,
wtype=struct_wtype,
value=value,
source_location=source_location,
)
return handle_arc4_assign(
context,
target=base_expr,
value=item,
source_location=source_location,
is_nested_update=True,
)
case awst_nodes.TupleItemExpression(
base=awst_nodes.Expression(wtype=wtypes.ARC4Tuple() as tuple_wtype) as base_expr,
index=index_value,
):
item = _arc4_replace_tuple_item(
context,
base_expr=base_expr,
index_int=index_value,
wtype=tuple_wtype,
value=value,
source_location=source_location,
)
return handle_arc4_assign(
context,
target=base_expr,
value=item,
source_location=source_location,
is_nested_update=True,
)
# this function is sometimes invoked outside an assignment expr/stmt, which
# is how a non l-value expression can be possible
# TODO: refactor this so that this special case is handled where it originates
case awst_nodes.TupleItemExpression(
wtype=item_wtype,
) if not item_wtype.immutable:
(result,) = handle_assignment(
context,
target,
value=value,
assignment_location=source_location,
is_nested_update=True,
)
return result
case _:
(result,) = handle_assignment(
context,
target,
value=value,
assignment_location=source_location,
is_nested_update=is_nested_update,
)
return result
def concat_values(
context: IRFunctionBuildContext,
left_expr: awst_nodes.Expression,
right_expr: awst_nodes.Expression,
source_location: SourceLocation,
) -> Value:
factory = OpFactory(context, source_location)
# check left is a valid ARC4 array to concat with
left_wtype = left_expr.wtype
if not isinstance(left_wtype, wtypes.ARC4DynamicArray):
raise InternalError("Expected left expression to be a dynamic ARC4 array", source_location)
left_element_type = left_wtype.element_type
# check right is a valid type to concat
right_wtype = right_expr.wtype
if isinstance(right_wtype, wtypes.ARC4Array):
right_element_type = right_wtype.element_type
elif isinstance(right_wtype, wtypes.WTuple) and all(
t == left_element_type for t in right_wtype.types
):
right_element_type = left_element_type
else:
right_element_type = None
if left_element_type != right_element_type:
raise CodeError(
f"Unexpected operand types or order for concatenation:"
f" {left_wtype} and {right_wtype}",
source_location,
)
if left_element_type == wtypes.arc4_bool_wtype:
left = context.visitor.visit_and_materialise_single(left_expr)
(r_data, r_length) = _get_arc4_array_tail_data_and_item_count(
context, right_expr, source_location
)
is_packed = UInt64Constant(
value=1 if isinstance(right_wtype, wtypes.ARC4Array) else 0,
source_location=source_location,
)
return factory.assign(
invoke_puya_lib_subroutine(
context,
full_name="_puya_lib.arc4.dynamic_array_concat_bits",
args=[left, r_data, r_length, is_packed],
source_location=source_location,
),
"concat_result",
)
if is_arc4_static_size(left_element_type):
element_size = _get_arc4_fixed_bit_size(left_element_type)
return _concat_dynamic_array_fixed_size(
context,
left=left_expr,
right=right_expr,
source_location=source_location,
byte_size=element_size // 8,
)
if _is_byte_length_header(left_element_type):
left = context.visitor.visit_and_materialise_single(left_expr)
(r_data, r_length) = _get_arc4_array_tail_data_and_item_count(
context, right_expr, source_location
)
return factory.assign(
invoke_puya_lib_subroutine(
context,
full_name="_puya_lib.arc4.dynamic_array_concat_byte_length_head",
args=[left, r_data, r_length],
source_location=source_location,
),
"concat_result",
)
if is_arc4_dynamic_size(left_element_type):
assert isinstance(left_wtype, wtypes.ARC4DynamicArray)
left = context.visitor.visit_and_materialise_single(left_expr)
if isinstance(right_wtype, wtypes.WTuple):
right_values = context.visitor.visit_and_materialise(right_expr)
r_count_vp: ValueProvider = UInt64Constant(
value=len(right_wtype.types), source_location=source_location
)
r_head_and_tail_vp: ValueProvider = _arc4_items_as_arc4_tuple(
context, left_element_type, right_values, source_location
)
elif isinstance(right_wtype, wtypes.ARC4Array):
right = context.visitor.visit_and_materialise_single(right_expr)
r_count_vp = _get_arc4_array_length(right_wtype, right, source_location)
r_head_and_tail_vp = _get_arc4_array_head_and_tail(right_wtype, right, source_location)
else:
raise InternalError("Expected array", source_location)
args = factory.assign_multiple(
l_count=_get_arc4_array_length(left_wtype, left, source_location),
l_head_and_tail=_get_arc4_array_head_and_tail(left_wtype, left, source_location),
r_count=r_count_vp,
r_head_and_tail=r_head_and_tail_vp,
)
return factory.assign(
invoke_puya_lib_subroutine(
context,
full_name="_puya_lib.arc4.dynamic_array_concat_dynamic_element",
args=list(args),
source_location=source_location,
),
"concat_result",
)
raise InternalError("Unexpected element type", source_location)
def pop_arc4_array(
context: IRFunctionBuildContext,
expr: awst_nodes.ArrayPop,
array_wtype: wtypes.ARC4DynamicArray,
) -> ValueProvider:
source_location = expr.source_location
base = context.visitor.visit_and_materialise_single(expr.base)
args: list[Value | int | bytes] = [base]
if array_wtype.element_type == wtypes.arc4_bool_wtype:
method_name = "dynamic_array_pop_bit"
elif _is_byte_length_header(array_wtype.element_type): # TODO: multi_byte_length prefix?
method_name = "dynamic_array_pop_byte_length_head"
elif is_arc4_dynamic_size(array_wtype.element_type):
method_name = "dynamic_array_pop_dynamic_element"
else:
fixed_size = _get_arc4_fixed_bit_size(array_wtype.element_type)
method_name = "dynamic_array_pop_fixed_size"
args.append(fixed_size // 8)
popped = mktemp(context, IRType.bytes, source_location, description="popped")
data = mktemp(context, IRType.bytes, source_location, description="data")
assign_targets(
context,
targets=[popped, data],
source=invoke_puya_lib_subroutine(
context,
full_name=f"_puya_lib.arc4.{method_name}",
args=args,
source_location=source_location,
),
assignment_location=source_location,
)
handle_arc4_assign(
context,
target=expr.base,
value=data,
is_nested_update=True,
source_location=source_location,
)
return popped
ARC4_TRUE = 0b10000000.to_bytes(1, "big")
ARC4_FALSE = 0b00000000.to_bytes(1, "big")
def _encode_arc4_bool(
context: IRFunctionBuildContext, bit: Value, source_location: SourceLocation
) -> Value:
factory = OpFactory(context, source_location)
value = factory.constant(0x00.to_bytes(1, "big"))
return factory.set_bit(value=value, index=0, bit=bit, temp_desc="encoded_bool")
def _visit_arc4_tuple_decode(
context: IRFunctionBuildContext,
wtype: wtypes.ARC4Tuple | wtypes.ARC4Struct,
value: Value,
target_wtype: wtypes.WTuple,
source_location: SourceLocation,
) -> ValueProvider:
items = list[Value]()
for index, (target_item_wtype, item_wtype) in enumerate(
zip(target_wtype.types, wtype.types, strict=True)
):
item_value = _read_nth_item_of_arc4_heterogeneous_container(
context,
array_head_and_tail=value,
tuple_type=wtype,
index=index,
source_location=source_location,
)
item = assign_temp(
context,
temp_description=f"item{index}",
source=item_value,
source_location=source_location,
)
if target_item_wtype != item_wtype:
decoded_item = _decode_arc4_value(
context, item, item_wtype, target_item_wtype, source_location
)
items.extend(context.visitor.materialise_value_provider(decoded_item, item.name))
else:
items.append(item)
return ValueTuple(source_location=source_location, values=items)
def _is_byte_length_header(wtype: wtypes.ARC4Type) -> bool:
return (
isinstance(wtype, wtypes.ARC4DynamicArray)
and is_arc4_static_size(wtype.element_type)
and _get_arc4_fixed_bit_size(wtype.element_type) == 8
)
def _maybe_get_inner_element_size(item_wtype: wtypes.ARC4Type) -> int | None:
match item_wtype:
case wtypes.ARC4Array(element_type=inner_static_element_type) if is_arc4_static_size(
inner_static_element_type
):
pass
case _:
return None
return _get_arc4_fixed_bit_size(inner_static_element_type) // 8
def _read_dynamic_item_using_length_from_arc4_container(
context: IRFunctionBuildContext,
*,
array_head_and_tail: Value,
inner_element_size: int,
index: Value,
source_location: SourceLocation,
) -> ValueProvider:
factory = OpFactory(context, source_location)
item_offset_offset = factory.mul(index, 2, "item_offset_offset")
item_start_offset = factory.extract_uint16(
array_head_and_tail, item_offset_offset, "item_offset"
)
item_length = factory.extract_uint16(array_head_and_tail, item_start_offset, "item_length")
item_length_in_bytes = factory.mul(item_length, inner_element_size, "item_length_in_bytes")
item_total_length = factory.add(item_length_in_bytes, 2, "item_head_tail_length")
return Intrinsic(
op=AVMOp.extract3,
args=[array_head_and_tail, item_start_offset, item_total_length],
source_location=source_location,
)
def _read_dynamic_item_using_end_offset_from_arc4_container(
context: IRFunctionBuildContext,
*,
array_length_vp: ValueProvider,
array_head_and_tail: Value,
index: Value,
source_location: SourceLocation,
) -> ValueProvider:
factory = OpFactory(context, source_location)
item_offset_offset = factory.mul(index, 2, "item_offset_offset")
item_start_offset = factory.extract_uint16(
array_head_and_tail, item_offset_offset, "item_offset"
)
array_length = factory.assign(array_length_vp, "array_length")
next_item_index = factory.add(index, 1, "next_index")
# three possible outcomes of this op will determine the end offset
# next_item_index < array_length -> has_next is true, use next_item_offset
# next_item_index == array_length -> has_next is false, use array_length
# next_item_index > array_length -> op will fail, comment provides context to error
has_next = factory.assign(
Intrinsic(
op=AVMOp.sub,
args=[array_length, next_item_index],
source_location=source_location,
error_message="Index access is out of bounds",
),
"has_next",
)
end_of_array = factory.len(array_head_and_tail, "end_of_array")
next_item_offset_offset = factory.mul(next_item_index, 2, "next_item_offset_offset")
# next_item_offset_offset will be past the array head when has_next is false, but this is ok as
# the value will not be used. Additionally, next_item_offset_offset will always be a valid
# offset in the overall array, because there will be at least 1 element (due to has_next
# checking out of bounds) and this element will be dynamically sized,
# which means it's data has at least one u16 in its header
# e.g. reading here... has at least one u16 ........
# v v
# ArrayHead(u16, u16) ArrayTail(DynamicItemHead(... u16, ...), ..., DynamicItemTail, ...)
next_item_offset = factory.extract_uint16(
array_head_and_tail, next_item_offset_offset, "next_item_offset"
)
item_end_offset = factory.select(end_of_array, next_item_offset, has_next, "end_offset")
return Intrinsic(
op=AVMOp.substring3,
args=[array_head_and_tail, item_start_offset, item_end_offset],
source_location=source_location,
)
def _visit_arc4_tuple_encode(
context: IRFunctionBuildContext,
elements: Sequence[Value],
tuple_items: Sequence[wtypes.ARC4Type],
expr_loc: SourceLocation,
) -> ValueProvider:
header_size = _determine_arc4_tuple_head_size(tuple_items, round_end_result=True)
factory = OpFactory(context, expr_loc)
current_tail_offset = factory.assign(factory.constant(header_size // 8), "current_tail_offset")
encoded_tuple_buffer = factory.assign(factory.constant(b""), "encoded_tuple_buffer")
for index, (element, el_wtype) in enumerate(zip(elements, tuple_items, strict=True)):
if el_wtype == wtypes.arc4_bool_wtype:
# Pack boolean
before_header = _determine_arc4_tuple_head_size(
tuple_items[0:index], round_end_result=False
)
if before_header % 8 == 0:
encoded_tuple_buffer = factory.concat(
encoded_tuple_buffer, element, "encoded_tuple_buffer"
)
else:
is_true = factory.get_bit(element, 0, "is_true")
encoded_tuple_buffer = factory.set_bit(
value=encoded_tuple_buffer,
index=before_header,
bit=is_true,
temp_desc="encoded_tuple_buffer",
)
elif is_arc4_static_size(el_wtype):
# Append value
encoded_tuple_buffer = factory.concat(
encoded_tuple_buffer, element, "encoded_tuple_buffer"
)
else:
# Append pointer
offset_as_uint16 = factory.as_u16_bytes(current_tail_offset, "offset_as_uint16")
encoded_tuple_buffer = factory.concat(
encoded_tuple_buffer, offset_as_uint16, "encoded_tuple_buffer"
)
# Update Pointer
data_length = factory.len(element, "data_length")
current_tail_offset = factory.add(
current_tail_offset, data_length, "current_tail_offset"
)
for element, el_wtype in zip(elements, tuple_items, strict=True):
if is_arc4_dynamic_size(el_wtype):
encoded_tuple_buffer = factory.concat(
encoded_tuple_buffer, element, "encoded_tuple_buffer"
)
return encoded_tuple_buffer
def _arc4_replace_struct_item(
context: IRFunctionBuildContext,
base_expr: awst_nodes.Expression,
field_name: str,
wtype: wtypes.ARC4Struct,
value: ValueProvider,
source_location: SourceLocation,
) -> Value:
if not isinstance(wtype, wtypes.ARC4Struct):
raise InternalError("Unsupported indexed assignment target", source_location)
try:
index_int = wtype.names.index(field_name)
except ValueError:
raise CodeError(f"Invalid arc4.Struct field name {field_name}", source_location) from None
return _arc4_replace_tuple_item(context, base_expr, index_int, wtype, value, source_location)
def _arc4_replace_tuple_item(
context: IRFunctionBuildContext,
base_expr: awst_nodes.Expression,
index_int: int,
wtype: wtypes.ARC4Struct | wtypes.ARC4Tuple,
value: ValueProvider,
source_location: SourceLocation,
) -> Value:
factory = OpFactory(context, source_location)
base = context.visitor.visit_and_materialise_single(base_expr)
value = factory.assign(value, "assigned_value")
element_type = wtype.types[index_int]
header_up_to_item = _determine_arc4_tuple_head_size(
wtype.types[0:index_int],
round_end_result=element_type != wtypes.arc4_bool_wtype,
)
if element_type == wtypes.arc4_bool_wtype:
# Use Set bit
is_true = factory.get_bit(value, 0, "is_true")
return factory.set_bit(
value=base,
index=header_up_to_item,
bit=is_true,
temp_desc="updated_data",
)
elif is_arc4_static_size(element_type):
return factory.replace(
base,
header_up_to_item // 8,
value,
"updated_data",
)
else:
dynamic_indices = [index for index, t in enumerate(wtype.types) if is_arc4_dynamic_size(t)]
item_offset = factory.extract_uint16(base, header_up_to_item // 8, "item_offset")
data_up_to_item = factory.extract3(base, 0, item_offset, "data_up_to_item")
dynamic_indices_after_item = [i for i in dynamic_indices if i > index_int]
if not dynamic_indices_after_item:
# This is the last dynamic type in the tuple
# No need to update headers - just replace the data
return factory.concat(data_up_to_item, value, "updated_data")
header_up_to_next_dynamic_item = _determine_arc4_tuple_head_size(
types=wtype.types[0 : dynamic_indices_after_item[0]],
round_end_result=True,
)
# update tail portion with new item
next_item_offset = factory.extract_uint16(
base,
header_up_to_next_dynamic_item // 8,
"next_item_offset",
)
total_data_length = factory.len(base, "total_data_length")
data_beyond_item = factory.substring3(
base,
next_item_offset,
total_data_length,
"data_beyond_item",
)
updated_data = factory.concat(data_up_to_item, value, "updated_data")
updated_data = factory.concat(updated_data, data_beyond_item, "updated_data")
# loop through head and update any offsets after modified item
item_length = factory.sub(next_item_offset, item_offset, "item_length")
new_value_length = factory.len(value, "new_value_length")
for dynamic_index in dynamic_indices_after_item:
header_up_to_dynamic_item = _determine_arc4_tuple_head_size(
types=wtype.types[0:dynamic_index],
round_end_result=True,
)
tail_offset = factory.extract_uint16(
updated_data, header_up_to_dynamic_item // 8, "tail_offset"
)
# have to add the new length and then subtract the original to avoid underflow
tail_offset = factory.add(tail_offset, new_value_length, "tail_offset")
tail_offset = factory.sub(tail_offset, item_length, "tail_offset")
tail_offset_bytes = factory.as_u16_bytes(tail_offset, "tail_offset_bytes")
updated_data = factory.replace(
updated_data, header_up_to_dynamic_item // 8, tail_offset_bytes, "updated_data"
)
return updated_data
def _read_nth_item_of_arc4_heterogeneous_container(
context: IRFunctionBuildContext,
*,
array_head_and_tail: Value,
tuple_type: wtypes.ARC4Tuple | wtypes.ARC4Struct,
index: int,
source_location: SourceLocation,
) -> ValueProvider:
tuple_item_types = tuple_type.types
item_wtype = tuple_item_types[index]
head_up_to_item = _determine_arc4_tuple_head_size(
tuple_item_types[:index], round_end_result=False
)
if item_wtype == wtypes.arc4_bool_wtype:
return _read_nth_bool_from_arc4_container(
context,
data=array_head_and_tail,
index=UInt64Constant(
value=head_up_to_item,
source_location=source_location,
),
source_location=source_location,
)
head_offset = UInt64Constant(
value=bits_to_bytes(head_up_to_item), source_location=source_location
)
if is_arc4_dynamic_size(item_wtype):
item_start_offset = assign_intrinsic_op(
context,
target="item_start_offset",
op=AVMOp.extract_uint16,
args=[array_head_and_tail, head_offset],
source_location=source_location,
)
next_index = index + 1
for tuple_item_index, tuple_item_type in enumerate(
tuple_item_types[next_index:], start=next_index
):
if is_arc4_dynamic_size(tuple_item_type):
head_up_to_next_dynamic_item = _determine_arc4_tuple_head_size(
tuple_item_types[:tuple_item_index], round_end_result=False
)
next_dynamic_head_offset = UInt64Constant(
value=bits_to_bytes(head_up_to_next_dynamic_item),
source_location=source_location,
)
item_end_offset = assign_intrinsic_op(
context,
target="item_end_offset",
op=AVMOp.extract_uint16,
args=[array_head_and_tail, next_dynamic_head_offset],
source_location=source_location,
)
break
else:
item_end_offset = assign_intrinsic_op(
context,
target="item_end_offset",
op=AVMOp.len_,
args=[array_head_and_tail],
source_location=source_location,
)
return Intrinsic(
op=AVMOp.substring3,
args=[array_head_and_tail, item_start_offset, item_end_offset],
source_location=source_location,
)
else:
return _read_static_item_from_arc4_container(
data=array_head_and_tail,
offset=head_offset,
item_wtype=item_wtype,
source_location=source_location,
)
def _read_nth_bool_from_arc4_container(
context: IRFunctionBuildContext,
*,
data: Value,
index: Value,
source_location: SourceLocation,
) -> ValueProvider:
# index is the bit position
is_true = assign_temp(
context,
temp_description="is_true",
source=Intrinsic(op=AVMOp.getbit, args=[data, index], source_location=source_location),
source_location=source_location,
)
return _encode_arc4_bool(context, is_true, source_location)
def _read_static_item_from_arc4_container(
*,
data: Value,
offset: Value,
item_wtype: wtypes.ARC4Type,
source_location: SourceLocation,
) -> ValueProvider:
item_bit_size = _get_arc4_fixed_bit_size(item_wtype)
item_length = UInt64Constant(value=item_bit_size // 8, source_location=source_location)
return Intrinsic(
op=AVMOp.extract3,
args=[data, offset, item_length],
source_location=source_location,
error_message="Index access is out of bounds",
)
def _get_arc4_array_tail_data_and_item_count(
context: IRFunctionBuildContext, expr: awst_nodes.Expression, source_location: SourceLocation
) -> tuple[Value, Value]:
"""
For ARC4 containers (dynamic array, static array) will return the tail data and item count
For native tuples will return the tuple items packed into the equivalent static array
of tail data and item count
"""
factory = OpFactory(context, source_location)
match expr:
case awst_nodes.Expression(
wtype=wtypes.ARC4DynamicArray() | wtypes.ARC4StaticArray() as arr_wtype
):
array = context.visitor.visit_and_materialise_single(expr)
array_length = factory.assign(
_get_arc4_array_length(arr_wtype, array, source_location),
"array_length",
)
array_head_and_tail = factory.assign(
_get_arc4_array_head_and_tail(arr_wtype, array, source_location),
"array_head_and_tail",
)
array_tail = _get_arc4_array_tail(
context,
element_wtype=arr_wtype.element_type,
array_head_and_tail=array_head_and_tail,
array_length=array_length,
source_location=source_location,
)
return array_tail, array_length
case awst_nodes.TupleExpression() as tuple_expr:
if not all(isinstance(t, wtypes.ARC4Type) for t in tuple_expr.wtype.types):
raise InternalError("Expected tuple to contain only ARC4 types", source_location)
values = context.visitor.visit_and_materialise(tuple_expr)
data = factory.constant(b"")
for val in values:
data = factory.concat(data, val, "data")
tuple_length = UInt64Constant(
value=len(values),
source_location=source_location,
)
return data, tuple_length
case _:
raise InternalError(f"Unsupported array type: {expr.wtype}")
def _itob_fixed(
context: IRFunctionBuildContext, value: Value, num_bytes: int, source_location: SourceLocation
) -> ValueProvider:
if value.atype == AVMType.uint64:
val_as_bytes = assign_temp(
context,
temp_description="val_as_bytes",
source=Intrinsic(op=AVMOp.itob, args=[value], source_location=source_location),
source_location=source_location,
)
if num_bytes == 8:
return val_as_bytes
if num_bytes < 8:
return Intrinsic(
op=AVMOp.extract,
immediates=[8 - num_bytes, num_bytes],
args=[val_as_bytes],
source_location=source_location,
)
bytes_value: Value = val_as_bytes
else:
len_ = assign_temp(
context,
temp_description="len_",
source=Intrinsic(op=AVMOp.len_, args=[value], source_location=source_location),
source_location=source_location,
)
no_overflow = assign_temp(
context,
temp_description="no_overflow",
source=Intrinsic(
op=AVMOp.lte,
args=[
len_,
UInt64Constant(value=num_bytes, source_location=source_location),
],
source_location=source_location,
),
source_location=source_location,
)
context.block_builder.add(
Intrinsic(
op=AVMOp.assert_,
args=[no_overflow],
source_location=source_location,
error_message="overflow",
)
)
bytes_value = value
b_zeros = assign_temp(
context,
temp_description="b_zeros",
source=Intrinsic(
op=AVMOp.bzero,
args=[UInt64Constant(value=num_bytes, source_location=source_location)],
source_location=source_location,
),
source_location=source_location,
)
return Intrinsic(
op=AVMOp.bitwise_or_bytes,
args=[bytes_value, b_zeros],
source_location=source_location,
)
def _arc4_replace_array_item(
context: IRFunctionBuildContext,
*,
base_expr: awst_nodes.Expression,
index_value_expr: awst_nodes.Expression,
wtype: wtypes.ARC4DynamicArray | wtypes.ARC4StaticArray,
value: ValueProvider,
source_location: SourceLocation,
) -> Value:
base = context.visitor.visit_and_materialise_single(base_expr)
value = assign_temp(
context, value, temp_description="assigned_value", source_location=source_location
)
index = context.visitor.visit_and_materialise_single(index_value_expr)
def updated_result(method_name: str, args: list[Value | int | bytes]) -> Register:
invoke = invoke_puya_lib_subroutine(
context,
full_name=f"_puya_lib.arc4.{method_name}",
args=args,
source_location=source_location,
)
return assign_temp(
context, invoke, temp_description="updated_value", source_location=source_location
)
if _is_byte_length_header(wtype.element_type):
if isinstance(wtype, wtypes.ARC4DynamicArray):
return updated_result("dynamic_array_replace_byte_length_head", [base, value, index])
else:
return updated_result(
"static_array_replace_byte_length_head", [base, value, index, wtype.array_size]
)
elif is_arc4_dynamic_size(wtype.element_type):
if isinstance(wtype, wtypes.ARC4DynamicArray):
return updated_result("dynamic_array_replace_dynamic_element", [base, value, index])
else:
return updated_result(
"static_array_replace_dynamic_element", [base, value, index, wtype.array_size]
)
array_length = (
UInt64Constant(value=wtype.array_size, source_location=source_location)
if isinstance(wtype, wtypes.ARC4StaticArray)
else Intrinsic(
source_location=source_location,
op=AVMOp.extract_uint16,
args=[base, UInt64Constant(value=0, source_location=source_location)],
)
)
_assert_index_in_bounds(context, index, array_length, source_location)
element_size = _get_arc4_fixed_bit_size(wtype.element_type)
dynamic_offset = 0 if isinstance(wtype, wtypes.ARC4StaticArray) else 2
if element_size == 1:
dynamic_offset *= 8
offset_per_item = element_size
else:
offset_per_item = element_size // 8
if isinstance(index_value_expr, awst_nodes.IntegerConstant):
write_offset: Value = UInt64Constant(
value=index_value_expr.value * offset_per_item + dynamic_offset,
source_location=source_location,
)
else:
write_offset = assign_intrinsic_op(
context,
target="write_offset",
op=AVMOp.mul,
args=[index, offset_per_item],
source_location=source_location,
)
if dynamic_offset:
write_offset = assign_intrinsic_op(
context,
target=write_offset,
op=AVMOp.add,
args=[write_offset, dynamic_offset],
source_location=source_location,
)
if element_size == 1:
is_true = assign_intrinsic_op(
context,
target="is_true",
op=AVMOp.getbit,
args=[value, 0],
source_location=source_location,
)
updated_target = assign_intrinsic_op(
context,
target="updated_target",
op=AVMOp.setbit,
args=[base, write_offset, is_true],
return_type=base.ir_type,
source_location=source_location,
)
else:
updated_target = assign_intrinsic_op(
context,
target="updated_target",
op=AVMOp.replace3,
args=[base, write_offset, value],
source_location=source_location,
)
return updated_target
def _concat_dynamic_array_fixed_size(
context: IRFunctionBuildContext,
*,
left: awst_nodes.Expression,
right: awst_nodes.Expression,
source_location: SourceLocation,
byte_size: int,
) -> Value:
factory = OpFactory(context, source_location)
def array_data(expr: awst_nodes.Expression) -> Value:
match expr.wtype:
case wtypes.ARC4StaticArray():
return context.visitor.visit_and_materialise_single(expr)
case wtypes.ARC4DynamicArray():
expr_value = context.visitor.visit_and_materialise_single(expr)
return factory.extract_to_end(expr_value, 2, "expr_value_trimmed")
case wtypes.WTuple():
values = context.visitor.visit_and_materialise(expr)
data = factory.constant(b"")
for val in values:
data = factory.concat(data, val, "data")
return data
case _:
raise InternalError(
f"Unexpected operand type for concatenation {expr.wtype}", source_location
)
left_data = array_data(left)
right_data = array_data(right)
concatenated = factory.concat(left_data, right_data, "concatenated")
if byte_size == 1:
len_ = factory.len(concatenated, "len_")
else:
byte_len = factory.len(concatenated, "byte_len")
len_ = assign_intrinsic_op(
context,
source_location=source_location,
op=AVMOp.div_floor,
args=[byte_len, byte_size],
target="len_",
)
len_16_bit = factory.as_u16_bytes(len_, "len_16_bit")
return factory.concat(len_16_bit, concatenated, "concat_result")
def _arc4_items_as_arc4_tuple(
context: IRFunctionBuildContext,
item_wtype: wtypes.ARC4Type,
items: Sequence[Value],
source_location: SourceLocation,
) -> Value:
factory = OpFactory(context, source_location)
result = factory.constant(b"")
if is_arc4_dynamic_size(item_wtype):
tail_offset: Value = UInt64Constant(value=len(items) * 2, source_location=source_location)
for item in items:
next_item_head = factory.as_u16_bytes(tail_offset, "next_item_head")
result = factory.concat(result, next_item_head, "result")
tail_offset = factory.add(
tail_offset, factory.len(item, "next_item_len"), "tail_offset"
)
for item in items:
result = factory.concat(result, item, "result")
return result
def _assert_index_in_bounds(
context: IRFunctionBuildContext,
index: Value,
length: ValueProvider,
source_location: SourceLocation,
) -> None:
if isinstance(index, UInt64Constant) and isinstance(length, UInt64Constant):
if 0 <= index.value < length.value:
return
raise CodeError("Index access is out of bounds", source_location)
array_length = assign_temp(
context,
source_location=source_location,
temp_description="array_length",
source=length,
)
index_is_in_bounds = assign_temp(
context,
source_location=source_location,
temp_description="index_is_in_bounds",
source=Intrinsic(
op=AVMOp.lt,
args=[index, array_length],
source_location=source_location,
),
)
assert_value(
context,
index_is_in_bounds,
source_location=source_location,
comment="Index access is out of bounds",
)
def _get_arc4_array_length(
wtype: wtypes.ARC4Array,
array: Value,
source_location: SourceLocation,
) -> ValueProvider:
match wtype:
case wtypes.ARC4StaticArray(array_size=array_size):
return UInt64Constant(value=array_size, source_location=source_location)
case wtypes.ARC4DynamicArray():
return Intrinsic(
op=AVMOp.extract_uint16,
args=[
array,
UInt64Constant(value=0, source_location=source_location),
],
source_location=source_location,
)
case _:
raise InternalError("Unexpected ARC4 array type", source_location)
def _get_arc4_array_head_and_tail(
wtype: wtypes.ARC4Array,
array: Value,
source_location: SourceLocation,
) -> ValueProvider:
match wtype:
case wtypes.ARC4StaticArray():
return array
case wtypes.ARC4DynamicArray():
return Intrinsic(
op=AVMOp.extract,
args=[array],
immediates=[2, 0],
source_location=source_location,
)
case _:
raise InternalError("Unexpected ARC4 array type", source_location)
def _get_arc4_array_tail(
context: IRFunctionBuildContext,
*,
element_wtype: wtypes.ARC4Type,
array_length: Value,
array_head_and_tail: Value,
source_location: SourceLocation,
) -> Value:
if is_arc4_static_size(element_wtype):
# no header for static sized elements
return array_head_and_tail
factory = OpFactory(context, source_location)
# special case to use extract with immediate length of 0 where possible
# TODO: have an IR pseudo op, extract_to_end that handles this for non constant values?
if isinstance(array_length, UInt64Constant) and array_length.value <= 127:
return factory.extract_to_end(array_length, array_length.value * 2, "data")
start_of_tail = factory.mul(array_length, 2, "start_of_tail")
total_length = factory.len(array_head_and_tail, "total_length")
return factory.substring3(array_head_and_tail, start_of_tail, total_length, "data")
def is_arc4_dynamic_size(wtype: wtypes.ARC4Type) -> bool:
match wtype:
case wtypes.ARC4DynamicArray():
return True
case wtypes.ARC4StaticArray(element_type=element_type):
return is_arc4_dynamic_size(element_type)
case wtypes.ARC4Tuple(types=types) | wtypes.ARC4Struct(types=types):
return any(map(is_arc4_dynamic_size, types))
return False
def is_arc4_static_size(wtype: wtypes.ARC4Type) -> bool:
return not is_arc4_dynamic_size(wtype)
def _get_arc4_fixed_bit_size(wtype: wtypes.ARC4Type) -> int:
if is_arc4_dynamic_size(wtype):
raise InternalError(f"Cannot get fixed bit size for a dynamic ABI type: {wtype}")
match wtype:
case wtypes.arc4_bool_wtype:
return 1
case wtypes.ARC4UIntN(n=n) | wtypes.ARC4UFixedNxM(n=n):
return n
case wtypes.ARC4StaticArray(element_type=element_type, array_size=array_size):
el_size = _get_arc4_fixed_bit_size(element_type)
return round_bits_to_nearest_bytes(array_size * el_size)
case wtypes.ARC4Tuple(types=types) | wtypes.ARC4Struct(types=types):
return _determine_arc4_tuple_head_size(types, round_end_result=True)
raise InternalError(f"Unexpected ABI wtype: {wtype}")
def _determine_arc4_tuple_head_size(
types: Sequence[wtypes.ARC4Type], *, round_end_result: bool
) -> int:
bit_size = 0
for t, next_t in zip_longest(types, types[1:]):
size = 16 if is_arc4_dynamic_size(t) else _get_arc4_fixed_bit_size(t)
bit_size += size
if t == wtypes.arc4_bool_wtype and next_t != t and (round_end_result or next_t):
bit_size = round_bits_to_nearest_bytes(bit_size)
return bit_size
|
algorandfoundation/puya
|
src/puya/ir/builder/arc4.py
|
Python
|
NOASSERTION
| 54,081 |
import typing
from collections.abc import Sequence
from puya import log
from puya.avm import AVMType
from puya.awst import (
nodes as awst_nodes,
wtypes,
)
from puya.errors import CodeError, InternalError
from puya.ir.avm_ops import AVMOp
from puya.ir.builder import arc4
from puya.ir.builder._tuple_util import build_tuple_registers
from puya.ir.builder._utils import (
assign,
assign_targets,
assign_temp,
get_implicit_return_is_original,
)
from puya.ir.context import IRFunctionBuildContext
from puya.ir.models import (
Intrinsic,
UInt64Constant,
Value,
ValueProvider,
ValueTuple,
)
from puya.ir.types_ import IRType, get_wtype_arity
from puya.ir.utils import format_tuple_index
from puya.parse import SourceLocation
logger = log.get_logger(__name__)
def handle_assignment_expr(
context: IRFunctionBuildContext,
target: awst_nodes.Lvalue,
value: awst_nodes.Expression,
assignment_location: SourceLocation,
) -> Sequence[Value]:
expr_values = context.visitor.visit_expr(value)
return handle_assignment(
context,
target=target,
value=expr_values,
is_nested_update=False,
assignment_location=assignment_location,
)
def handle_assignment(
context: IRFunctionBuildContext,
target: awst_nodes.Expression,
value: ValueProvider,
assignment_location: SourceLocation,
*,
is_nested_update: bool,
) -> Sequence[Value]:
match target:
# special case: a nested update can cause a tuple item to be re-assigned
# TODO: refactor this so that this special case is handled where it originates
case (
awst_nodes.TupleItemExpression(wtype=var_type, source_location=var_loc) as ti_expr
) if (
# including assumptions in condition, so assignment will error if they are not true
not var_type.immutable # mutable arc4 type
and is_nested_update # is a reassignment due to a nested update
and var_type.scalar_type is not None # only updating a scalar value
):
base_name = _get_tuple_var_name(ti_expr)
return _handle_maybe_implicit_return_assignment(
context,
base_name=base_name,
wtype=var_type,
value=value,
var_loc=var_loc,
assignment_loc=assignment_location,
is_nested_update=is_nested_update,
)
case awst_nodes.VarExpression(name=base_name, source_location=var_loc, wtype=var_type):
return _handle_maybe_implicit_return_assignment(
context,
base_name=base_name,
wtype=var_type,
value=value,
var_loc=var_loc,
assignment_loc=assignment_location,
is_nested_update=is_nested_update,
)
case awst_nodes.TupleExpression() as tup_expr:
source = context.visitor.materialise_value_provider(
value, description="tuple_assignment"
)
results = list[Value]()
for item in tup_expr.items:
arity = get_wtype_arity(item.wtype)
values = source[:arity]
del source[:arity]
if len(values) != arity:
raise CodeError("not enough values to unpack", assignment_location)
if arity == 1:
nested_value: ValueProvider = values[0]
else:
nested_value = ValueTuple(values=values, source_location=value.source_location)
results.extend(
handle_assignment(
context,
target=item,
value=nested_value,
is_nested_update=False,
assignment_location=assignment_location,
)
)
if source:
raise CodeError("too many values to unpack", assignment_location)
return results
case awst_nodes.AppStateExpression(
key=awst_key, wtype=wtype, source_location=field_location
):
_ = wtypes.persistable_stack_type(wtype, field_location) # double check
key_value = context.visitor.visit_and_materialise_single(awst_key)
(mat_value,) = context.visitor.materialise_value_provider(
value, description="new_state_value"
)
context.block_builder.add(
Intrinsic(
op=AVMOp.app_global_put,
args=[key_value, mat_value],
source_location=assignment_location,
)
)
return [mat_value]
case awst_nodes.AppAccountStateExpression(
key=awst_key, account=account_expr, wtype=wtype, source_location=field_location
):
_ = wtypes.persistable_stack_type(wtype, field_location) # double check
account = context.visitor.visit_and_materialise_single(account_expr)
key_value = context.visitor.visit_and_materialise_single(awst_key)
(mat_value,) = context.visitor.materialise_value_provider(
value, description="new_state_value"
)
context.block_builder.add(
Intrinsic(
op=AVMOp.app_local_put,
args=[account, key_value, mat_value],
source_location=assignment_location,
)
)
return [mat_value]
case awst_nodes.BoxValueExpression(
key=awst_key, wtype=wtype, source_location=field_location
):
scalar_type = wtypes.persistable_stack_type(wtype, field_location) # double check
key_value = context.visitor.visit_and_materialise_single(awst_key)
(mat_value,) = context.visitor.materialise_value_provider(
value, description="new_box_value"
)
if scalar_type == AVMType.bytes:
serialized_value = mat_value
if not (isinstance(wtype, wtypes.ARC4Type) and arc4.is_arc4_static_size(wtype)):
context.block_builder.add(
Intrinsic(
op=AVMOp.box_del, args=[key_value], source_location=assignment_location
)
)
elif scalar_type == AVMType.uint64:
serialized_value = assign_temp(
context=context,
temp_description="new_box_value",
source=Intrinsic(
op=AVMOp.itob,
args=[mat_value],
source_location=assignment_location,
),
source_location=assignment_location,
)
else:
typing.assert_never(scalar_type)
context.block_builder.add(
Intrinsic(
op=AVMOp.box_put,
args=[key_value, serialized_value],
source_location=assignment_location,
)
)
return [mat_value]
case awst_nodes.IndexExpression() as ix_expr:
if isinstance(ix_expr.base.wtype, wtypes.WArray):
raise NotImplementedError
elif isinstance(ix_expr.base.wtype, wtypes.ARC4Type): # noqa: RET506
return (
arc4.handle_arc4_assign(
context,
target=ix_expr,
value=value,
is_nested_update=is_nested_update,
source_location=assignment_location,
),
)
else:
raise InternalError(
f"Indexed assignment operation IR lowering"
f" not implemented for base type {ix_expr.base.wtype.name}",
assignment_location,
)
case awst_nodes.FieldExpression() as field_expr:
if isinstance(field_expr.base.wtype, wtypes.WStructType):
raise NotImplementedError
elif isinstance(field_expr.base.wtype, wtypes.ARC4Struct): # noqa: RET506
return (
arc4.handle_arc4_assign(
context,
target=field_expr,
value=value,
is_nested_update=is_nested_update,
source_location=assignment_location,
),
)
else:
raise InternalError(
f"Field assignment operation IR lowering"
f" not implemented for base type {field_expr.base.wtype.name}",
assignment_location,
)
case _:
raise CodeError(
"expression is not valid as an assignment target", target.source_location
)
def _handle_maybe_implicit_return_assignment(
context: IRFunctionBuildContext,
*,
base_name: str,
wtype: wtypes.WType,
value: ValueProvider,
var_loc: SourceLocation,
assignment_loc: SourceLocation,
is_nested_update: bool,
) -> Sequence[Value]:
registers = build_tuple_registers(context, base_name, wtype, var_loc)
for register in registers:
is_implicit_return = register.name in (
p.name for p in context.subroutine.parameters if p.implicit_return
)
# if an implicitly returned value is explicitly reassigned, then set a register which will
# prevent the original from being updated any further
if is_implicit_return and not is_nested_update:
assign(
context,
UInt64Constant(value=0, ir_type=IRType.bool, source_location=None),
name=get_implicit_return_is_original(register.name),
assignment_location=None,
)
assign_targets(
context,
source=value,
targets=registers,
assignment_location=assignment_loc,
)
return registers
def _get_tuple_var_name(expr: awst_nodes.TupleItemExpression) -> str:
if isinstance(expr.base.wtype, wtypes.WTuple):
if isinstance(expr.base, awst_nodes.TupleItemExpression):
return format_tuple_index(expr.base.wtype, _get_tuple_var_name(expr.base), expr.index)
if isinstance(expr.base, awst_nodes.VarExpression):
return format_tuple_index(expr.base.wtype, expr.base.name, expr.index)
raise CodeError("invalid assignment target", expr.base.source_location)
|
algorandfoundation/puya
|
src/puya/ir/builder/assignment.py
|
Python
|
NOASSERTION
| 10,805 |
import contextlib
import typing
from collections.abc import Iterator, Sequence
import attrs
from puya import log
from puya.awst import nodes as awst_nodes
from puya.errors import InternalError
from puya.ir.models import (
Assignment,
BasicBlock,
ControlOp,
Goto,
Op,
Register,
)
from puya.ir.ssa import BraunSSA
from puya.parse import SourceLocation
from puya.utils import lazy_setdefault
logger = log.get_logger(__name__)
@attrs.frozen(kw_only=True)
class _LoopTargets:
on_break: BasicBlock
on_continue: BasicBlock
class BlocksBuilder:
def __init__(
self,
parameters: Sequence[Register],
default_source_location: SourceLocation,
) -> None:
self._loop_targets_stack: list[_LoopTargets] = []
blocks = [BasicBlock(id=0, source_location=default_source_location)]
self._blocks = blocks
# initialize ssa
self.ssa = BraunSSA(blocks, parameters, self.active_block)
self.ssa.seal_block(self.active_block)
self._pending_labelled_blocks = dict[awst_nodes.Label, BasicBlock]()
self._created_labelled_blocks = dict[awst_nodes.Label, BasicBlock]()
@property
def active_block(self) -> BasicBlock:
return self._blocks[-1]
def add(self, op: Op) -> None:
"""Add an op"""
curr = self.active_block
if curr.terminated:
self._unreachable_error(op)
else:
curr.ops.append(op)
if isinstance(op, Assignment):
for target in op.targets:
if not self.ssa.has_write(target.name, curr):
raise InternalError(
f"ssa.write_variable not called for {target.name} in block {curr}"
)
def maybe_terminate(self, control_op: ControlOp) -> bool:
"""Add the control op for the block, if not already terminated."""
curr = self.active_block
if curr.terminated:
return False
for target in control_op.targets():
if self.ssa.is_sealed(target):
raise InternalError(
f"Cannot add predecessor to block, as it is already sealed: "
f"predecessor={curr}, block={target}"
)
target.predecessors.append(curr)
curr.terminator = control_op
logger.debug(f"Terminated {curr}")
return True
def terminate(self, control_op: ControlOp) -> None:
if not self.maybe_terminate(control_op):
self._unreachable_error(control_op)
def _unreachable_error(self, op: Op | ControlOp) -> None:
if op.source_location:
location = op.source_location
message = "unreachable code"
else:
terminator_location = (
self.active_block.terminator and self.active_block.terminator.source_location
)
location = terminator_location or self.active_block.source_location
message = "unreachable code follows"
logger.error(message, location=location)
def goto(self, target: BasicBlock, source_location: SourceLocation | None = None) -> None:
"""Add goto to a basic block, iff current block is not already terminated"""
self.maybe_terminate(Goto(target=target, source_location=source_location))
def goto_label(self, label: awst_nodes.Label, source_location: SourceLocation) -> None:
try:
target = self._created_labelled_blocks[label]
except KeyError:
target = lazy_setdefault(
self._pending_labelled_blocks,
label,
lambda _: BasicBlock(label=label, source_location=source_location),
)
self.goto(target, source_location)
def activate_block(self, block: BasicBlock) -> None:
self._activate_block(block)
self._seal_block_if_unlabelled(block)
@contextlib.contextmanager
def activate_open_block(self, block: BasicBlock) -> Iterator[None]:
self._activate_block(block)
try:
yield
finally:
self._seal_block_if_unlabelled(block)
def _activate_block(self, block: BasicBlock) -> None:
"""Add a basic block and make it the active one (target of adds)"""
if not self.active_block.terminated:
raise InternalError(
"Attempted to activate a new block when current block has not been terminated"
)
if not block.predecessors:
raise InternalError("Attempted to add a (non-entry) block with no predecessors")
assert block.id is None
block.id = len(self._blocks)
self._blocks.append(block)
def try_activate_block(self, block: BasicBlock) -> bool:
if block.predecessors:
self.activate_block(block)
return True
if not block.is_empty:
# here as a sanity - there shouldn't've been any modifications of "next" block contents
raise InternalError("next block has no predecessors but does have op(s)")
return False
@contextlib.contextmanager
def enter_loop(self, on_continue: BasicBlock, on_break: BasicBlock) -> Iterator[None]:
self._loop_targets_stack.append(_LoopTargets(on_continue=on_continue, on_break=on_break))
try:
yield
finally:
self._loop_targets_stack.pop()
def loop_break(self, source_location: SourceLocation) -> None:
try:
targets = self._loop_targets_stack[-1]
except IndexError as ex:
# TODO: this might be a code error or an internal error
raise InternalError("break outside of loop", source_location) from ex
self.goto(target=targets.on_break, source_location=source_location)
def loop_continue(self, source_location: SourceLocation) -> None:
try:
targets = self._loop_targets_stack[-1]
except IndexError as ex:
# TODO: this might be a code error or an internal error
raise InternalError("continue outside of loop", source_location) from ex
self.goto(target=targets.on_continue, source_location=source_location)
@typing.overload
def mkblock(
self, source_location: SourceLocation, /, description: str | None
) -> BasicBlock: ...
@typing.overload
def mkblock(
self, block: awst_nodes.Block, /, description: str | None = None
) -> BasicBlock: ...
@typing.overload
def mkblock(
self,
block: awst_nodes.Block | None,
/,
description: str,
*,
fallback_location: SourceLocation,
) -> BasicBlock: ...
def mkblock(
self,
block_or_source_location: awst_nodes.Block | SourceLocation | None,
/,
description: str | None = None,
*,
fallback_location: SourceLocation | None = None,
) -> BasicBlock:
if isinstance(block_or_source_location, awst_nodes.Block):
label = block_or_source_location.label
comment = block_or_source_location.comment or description
loc = block_or_source_location.source_location
else:
label = None
comment = description
loc_ = block_or_source_location or fallback_location
assert loc_ is not None
loc = loc_
if label in self._created_labelled_blocks:
raise InternalError(
f"block for label {label} has already been created", fallback_location
)
if (label is not None) and (pending := self._pending_labelled_blocks.pop(label, None)):
result = pending
result.source_location = loc
result.comment = comment
else:
result = BasicBlock(label=label, comment=comment, source_location=loc)
if label is not None:
self._created_labelled_blocks[label] = result
return result
def mkblocks(
self, *descriptions: str, source_location: SourceLocation
) -> Iterator[BasicBlock]:
for description in descriptions:
yield self.mkblock(source_location, description)
def finalise(self) -> list[BasicBlock]:
for pending_label, pending in self._pending_labelled_blocks.items():
logger.error(
f"block with label {pending_label} not found", location=pending.source_location
)
for block in self._created_labelled_blocks.values():
self.ssa.seal_block(block)
self.ssa.verify_complete()
return self._blocks.copy()
def _seal_block_if_unlabelled(self, block: BasicBlock) -> None:
if block.label is None:
self.ssa.seal_block(block)
|
algorandfoundation/puya
|
src/puya/ir/builder/blocks.py
|
Python
|
NOASSERTION
| 8,792 |
from puya.awst import nodes as awst_nodes
from puya.ir.avm_ops import AVMOp
from puya.ir.builder._utils import assign_intrinsic_op, assign_temp
from puya.ir.context import IRFunctionBuildContext
from puya.ir.models import Intrinsic, UInt64Constant, Value, ValueProvider
from puya.ir.types_ import IRType
from puya.parse import SourceLocation
def visit_bytes_slice_expression(
context: IRFunctionBuildContext, expr: awst_nodes.SliceExpression
) -> ValueProvider:
base = context.visitor.visit_and_materialise_single(expr.base)
if expr.begin_index is None and expr.end_index is None:
return base
if expr.begin_index is not None:
start_value = context.visitor.visit_and_materialise_single(expr.begin_index)
else:
start_value = UInt64Constant(value=0, source_location=expr.source_location)
if expr.end_index is not None:
stop_value = context.visitor.visit_and_materialise_single(expr.end_index)
return Intrinsic(
op=AVMOp.substring3,
args=[base, start_value, stop_value],
source_location=expr.source_location,
)
elif isinstance(start_value, UInt64Constant):
# we can use extract without computing the length when the start index is
# a constant value and the end index is None (ie end of array)
return Intrinsic(
op=AVMOp.extract,
immediates=[start_value.value, 0],
args=[base],
source_location=expr.source_location,
)
else:
base_length = assign_temp(
context,
source_location=expr.source_location,
source=Intrinsic(op=AVMOp.len_, args=[base], source_location=expr.source_location),
temp_description="base_length",
)
return Intrinsic(
op=AVMOp.substring3,
args=[base, start_value, base_length],
source_location=expr.source_location,
)
def visit_bytes_intersection_slice_expression(
context: IRFunctionBuildContext, expr: awst_nodes.IntersectionSliceExpression
) -> ValueProvider:
base = context.visitor.visit_and_materialise_single(expr.base)
length = assign_intrinsic_op(
context,
target="length",
op=AVMOp.len_,
args=[base],
source_location=expr.source_location,
)
start = (
UInt64Constant(value=0, source_location=expr.source_location)
if expr.begin_index is None
else get_bounded_value(
context,
value=expr.begin_index,
length=length,
source_location=expr.source_location,
)
)
end = (
length
if expr.end_index is None
else get_bounded_value(
context,
value=expr.end_index,
length=length,
source_location=expr.source_location,
)
)
if _is_end_check_required(start_index=expr.begin_index, end_index=expr.end_index):
end_before_start = assign_intrinsic_op(
context,
target="end_before_start",
op=AVMOp.lt,
args=[end, start],
source_location=expr.source_location,
)
end = assign_intrinsic_op(
context,
target="end",
op=AVMOp.select,
args=[end, start, end_before_start],
source_location=expr.source_location,
return_type=IRType.uint64,
)
return Intrinsic(
op=AVMOp.substring3,
args=[base, start, end],
source_location=expr.source_location,
)
def _is_end_check_required(
*,
start_index: awst_nodes.Expression | int | None,
end_index: awst_nodes.Expression | int | None,
) -> bool:
"""
Returns false if we can statically determine the start is less than or equal to the end (or
will be once it is bounded between 0 <= index <= len(target) )
"""
if start_index is None or end_index is None:
return False
match start_index:
case awst_nodes.IntegerConstant(value=start_static):
pass
case int(start_static):
pass
case _:
# Start is not statically known so a check is required
return True
match end_index:
case awst_nodes.IntegerConstant(value=end_static):
pass
case int(end_static):
pass
case _:
# End is not statically known, a check is required if start is not 0
return start_static > 0
# If start is negative
if start_static < 0:
# a check is required if end is more_negative, or not negative at all
return end_static < start_static or end_static > 0
# If end is negative (and start is not), a check is required
if end_static < 0:
return True
# A check is required if start is greater than end
return start_static > end_static
def get_bounded_value(
context: IRFunctionBuildContext,
*,
value: awst_nodes.Expression | int,
length: Value,
source_location: SourceLocation,
) -> Value:
if isinstance(value, int) and value < 0:
# abs(value) >= length
is_out_of_bounds = assign_intrinsic_op(
context,
target="is_out_of_bounds",
op=AVMOp.gte,
args=[abs(value), length],
source_location=source_location,
)
# length if is_out_of_bounds else abs(value)
bounded_offset = assign_intrinsic_op(
context,
op=AVMOp.select,
args=[abs(value), length, is_out_of_bounds],
source_location=source_location,
target="bounded_offset",
return_type=IRType.uint64,
)
# length - bounded_offset
bounded_index = assign_intrinsic_op(
context,
op=AVMOp.sub,
args=[length, bounded_offset],
target="bounded_index",
source_location=source_location,
)
return bounded_index
if isinstance(value, int):
unbounded: Value = UInt64Constant(value=value, source_location=source_location)
else:
unbounded = context.visitor.visit_and_materialise_single(value)
# unbounded > length
is_out_of_bounds = assign_intrinsic_op(
context,
target="is_out_of_bounds",
op=AVMOp.gte,
args=[unbounded, length],
source_location=source_location,
)
# length if is_out_of_bounds else unbounded
bounded_index = assign_intrinsic_op(
context,
op=AVMOp.select,
args=[unbounded, length, is_out_of_bounds],
source_location=source_location,
target="bounded_index",
return_type=IRType.uint64,
)
return bounded_index
|
algorandfoundation/puya
|
src/puya/ir/builder/bytes.py
|
Python
|
NOASSERTION
| 6,766 |
from collections.abc import Sequence
import attrs
from puya import log
from puya.awst import (
nodes as awst_nodes,
wtypes,
)
from puya.errors import CodeError
from puya.ir.builder._tuple_util import build_tuple_item_names
from puya.ir.builder._utils import assign_targets, new_register_version
from puya.ir.context import IRFunctionBuildContext
from puya.ir.models import InvokeSubroutine, Register, Subroutine, Value, ValueProvider, ValueTuple
from puya.parse import SourceLocation
logger = log.get_logger(__name__)
def visit_subroutine_call_expression(
context: IRFunctionBuildContext, expr: awst_nodes.SubroutineCallExpression
) -> ValueProvider | None:
target = context.resolve_subroutine(expr.target, expr.source_location)
return _call_subroutine(context, target, expr.args, expr.source_location)
def visit_puya_lib_call_expression(
context: IRFunctionBuildContext, call: awst_nodes.PuyaLibCall
) -> ValueProvider | None:
try:
target = context.embedded_funcs_lookup[call.func.value.id]
except KeyError:
raise CodeError(f"invalid puya_lib {call.func.name}", call.source_location) from None
return _call_subroutine(context, target, call.args, call.source_location)
def _call_subroutine(
context: IRFunctionBuildContext,
target: Subroutine,
args: Sequence[awst_nodes.CallArg],
call_location: SourceLocation,
) -> ValueProvider | None:
arg_lookup = _build_arg_lookup(context, args, call_location)
resolved_args = []
implicit_args = []
for idx, param in enumerate(target.parameters):
arg_val = arg_lookup.get(index=idx, param_name=param.name)
resolved_args.append(arg_val)
if param.implicit_return:
if arg_val in implicit_args:
logger.error(
"mutable values cannot be passed more than once to a subroutine",
location=arg_val.source_location,
)
implicit_args.append(arg_val)
if not arg_lookup.is_empty:
raise CodeError("function call arguments do not match signature", call_location) from None
invoke_expr = InvokeSubroutine(
source_location=call_location, args=resolved_args, target=target
)
if not implicit_args:
return invoke_expr
return_values = context.visitor.materialise_value_provider(invoke_expr, target.short_name)
while implicit_args:
in_arg = implicit_args.pop()
out_value = return_values.pop()
if isinstance(in_arg, Register):
out_arg = new_register_version(context, in_arg)
assign_targets(
context,
source=out_value,
targets=[out_arg],
assignment_location=call_location,
)
return (
ValueTuple(values=return_values, source_location=call_location) if return_values else None
)
@attrs.define
class _ArgLookup:
_source_location: SourceLocation
_positional_args: dict[int, Value] = attrs.field(factory=dict, init=False)
_named_args: dict[str, Value] = attrs.field(factory=dict, init=False)
_arg_idx: int = attrs.field(default=0, init=False)
@property
def is_empty(self) -> bool:
return not self._named_args and not self._positional_args
def add(self, name: str | None, value: Value) -> None:
if name is None:
self._positional_args[self._arg_idx] = value
else:
self._named_args[name] = value
self._arg_idx += 1
def get(self, index: int, param_name: str | None) -> Value:
if param_name is not None:
by_name = self._named_args.pop(param_name, None)
if by_name is not None:
return by_name
try:
return self._positional_args.pop(index)
except KeyError:
raise CodeError(
"function call arguments do not match signature", self._source_location
) from None
def _build_arg_lookup(
context: IRFunctionBuildContext,
args: Sequence[awst_nodes.CallArg],
call_location: SourceLocation,
) -> _ArgLookup:
lookup = _ArgLookup(call_location)
for expr_arg in args:
if not isinstance(expr_arg.value.wtype, wtypes.WTuple):
value = context.visitor.visit_and_materialise_single(expr_arg.value)
lookup.add(name=expr_arg.name, value=value)
else:
values = context.visitor.visit_and_materialise(expr_arg.value)
if expr_arg.name is None:
for tup_value in values:
lookup.add(name=None, value=tup_value)
else:
item_names = build_tuple_item_names(
base_name=expr_arg.name,
wtype=expr_arg.value.wtype,
source_location=call_location,
)
for tup_value, (tup_item_name, _) in zip(values, item_names, strict=True):
lookup.add(name=tup_item_name, value=tup_value)
return lookup
|
algorandfoundation/puya
|
src/puya/ir/builder/callsub.py
|
Python
|
NOASSERTION
| 5,028 |
from puya import log
from puya.awst import (
nodes as awst_nodes,
wtypes,
)
from puya.errors import InternalError
from puya.ir import intrinsic_factory
from puya.ir.builder._tuple_util import build_tuple_registers
from puya.ir.builder._utils import assign_targets, new_register_version
from puya.ir.context import IRFunctionBuildContext
from puya.ir.models import (
BasicBlock,
ConditionalBranch,
Switch,
Value,
ValueProvider,
ValueTuple,
)
from puya.ir.types_ import get_wtype_arity, wtype_to_ir_type
from puya.parse import SourceLocation
from puya.utils import lazy_setdefault
logger = log.get_logger(__name__)
def handle_if_else(context: IRFunctionBuildContext, stmt: awst_nodes.IfElse) -> None:
if_body = context.block_builder.mkblock(stmt.if_branch, "if_body")
else_body = None
if stmt.else_branch:
else_body = context.block_builder.mkblock(stmt.else_branch, "else_body")
next_block = context.block_builder.mkblock(stmt.source_location, "after_if_else")
process_conditional(
context,
stmt.condition,
true=if_body,
false=else_body or next_block,
loc=stmt.source_location,
)
_branch(context, if_body, stmt.if_branch, next_block)
if else_body:
assert stmt.else_branch
_branch(context, else_body, stmt.else_branch, next_block)
# activate the "next" block if it is reachable, which might not be the case
# if all paths within the "if" and "else" branches return early
context.block_builder.try_activate_block(next_block)
def handle_switch(context: IRFunctionBuildContext, statement: awst_nodes.Switch) -> None:
case_blocks = dict[Value, BasicBlock]()
ir_blocks = dict[awst_nodes.Block | None, BasicBlock]()
for value, block in statement.cases.items():
ir_value = context.visitor.visit_and_materialise_single(value)
if ir_value in case_blocks:
logger.error("code block is unreachable", location=block.source_location)
else:
case_blocks[ir_value] = lazy_setdefault(
ir_blocks,
block,
lambda _: context.block_builder.mkblock(
block, # noqa: B023
f"switch_case_{len(ir_blocks)}",
),
)
default_block = lazy_setdefault(
ir_blocks,
statement.default_case,
lambda b: context.block_builder.mkblock(
b, "switch_case_default", fallback_location=statement.source_location
),
)
next_block = context.block_builder.mkblock(statement.source_location, "switch_case_next")
switch_value = context.visitor.visit_and_materialise_single(statement.value)
context.block_builder.terminate(
Switch(
value=switch_value,
cases=case_blocks,
default=default_block,
source_location=statement.source_location,
)
)
for block_, ir_block in ir_blocks.items():
_branch(context, ir_block, block_, next_block)
# activate the "next" block if it is reachable, which might not be the case
# if all code paths within the cases return early
context.block_builder.try_activate_block(next_block)
def _branch(
context: IRFunctionBuildContext,
ir_block: BasicBlock,
ast_block: awst_nodes.Block | None,
next_ir_block: BasicBlock,
) -> None:
context.block_builder.activate_block(ir_block)
if ast_block is not None:
ast_block.accept(context.visitor)
context.block_builder.goto(next_ir_block)
def process_conditional(
context: IRFunctionBuildContext,
expr: awst_nodes.Expression,
*,
true: BasicBlock,
false: BasicBlock,
loc: SourceLocation,
) -> None:
if expr.wtype != wtypes.bool_wtype:
raise InternalError(
"_process_conditional should only be used for boolean conditionals", loc
)
match expr:
case awst_nodes.BooleanBinaryOperation(
op=bool_op, left=lhs, right=rhs, source_location=loc
):
# Short circuit boolean binary operators in a conditional context.
contd = context.block_builder.mkblock(loc, f"{bool_op}_contd")
if bool_op == "and":
process_conditional(context, lhs, true=contd, false=false, loc=loc)
elif bool_op == "or":
process_conditional(context, lhs, true=true, false=contd, loc=loc)
else:
raise InternalError(
f"Unhandled boolean operator for short circuiting: {bool_op}", loc
)
context.block_builder.activate_block(contd)
process_conditional(context, rhs, true=true, false=false, loc=loc)
case awst_nodes.Not(expr=expr, source_location=loc):
process_conditional(context, expr, true=false, false=true, loc=loc)
case _:
condition_value = context.visitor.visit_and_materialise_single(expr)
context.block_builder.terminate(
ConditionalBranch(
condition=condition_value,
non_zero=true,
zero=false,
source_location=loc,
)
)
def handle_while_loop(context: IRFunctionBuildContext, statement: awst_nodes.WhileLoop) -> None:
top, next_block = context.block_builder.mkblocks(
"while_top", "after_while", source_location=statement.source_location
)
body = context.block_builder.mkblock(statement.loop_body, "while_body")
context.block_builder.goto(top)
with context.block_builder.activate_open_block(top):
process_conditional(
context,
statement.condition,
true=body,
false=next_block,
loc=statement.source_location,
)
context.block_builder.activate_block(body)
with context.block_builder.enter_loop(on_continue=top, on_break=next_block):
statement.loop_body.accept(context.visitor)
context.block_builder.goto(top)
context.block_builder.activate_block(next_block)
def handle_conditional_expression(
context: IRFunctionBuildContext, expr: awst_nodes.ConditionalExpression
) -> ValueProvider:
# if lhs and rhs are both guaranteed to not produce side effects, we can use a simple select op
# TODO: expand detection of side-effect free to include "pure" ops
if (
get_wtype_arity(expr.wtype) == 1
and isinstance(
expr.true_expr, awst_nodes.VarExpression | awst_nodes.CompileTimeConstantExpression
)
and isinstance(
expr.false_expr, awst_nodes.VarExpression | awst_nodes.CompileTimeConstantExpression
)
):
false_reg = context.visitor.visit_and_materialise_single(expr.false_expr)
true_reg = context.visitor.visit_and_materialise_single(expr.true_expr)
condition_value = context.visitor.visit_and_materialise_single(expr.condition)
return intrinsic_factory.select(
condition=condition_value,
true=true_reg,
false=false_reg,
type_=wtype_to_ir_type(expr),
source_location=expr.source_location,
)
true_block, false_block, merge_block = context.block_builder.mkblocks(
"ternary_true", "ternary_false", "ternary_merge", source_location=expr.source_location
)
tmp_var_name = context.next_tmp_name("ternary_result")
true_registers = build_tuple_registers(context, tmp_var_name, expr.wtype, expr.source_location)
process_conditional(
context,
expr.condition,
true=true_block,
false=false_block,
loc=expr.source_location,
)
context.block_builder.activate_block(true_block)
true_vp = context.visitor.visit_expr(expr.true_expr)
assign_targets(
context,
source=true_vp,
targets=true_registers,
assignment_location=expr.true_expr.source_location,
)
context.block_builder.goto(merge_block)
context.block_builder.activate_block(false_block)
false_vp = context.visitor.visit_expr(expr.false_expr)
assign_targets(
context,
source=false_vp,
targets=[new_register_version(context, reg) for reg in true_registers],
assignment_location=expr.false_expr.source_location,
)
context.block_builder.goto(merge_block)
context.block_builder.activate_block(merge_block)
result = [
context.ssa.read_variable(variable=r.name, ir_type=r.ir_type, block=merge_block)
for r in true_registers
]
if len(result) == 1:
return result[0]
return ValueTuple(values=result, source_location=expr.source_location)
|
algorandfoundation/puya
|
src/puya/ir/builder/flow_control.py
|
Python
|
NOASSERTION
| 8,727 |
import typing
from puya import log
from puya.awst import (
nodes as awst_nodes,
wtypes,
)
from puya.awst.nodes import Expression
from puya.errors import CodeError, InternalError
from puya.ir.avm_ops import AVMOp
from puya.ir.builder import arc4
from puya.ir.builder._tuple_util import build_tuple_registers, get_tuple_item_values
from puya.ir.builder._utils import (
assert_value,
assign_intrinsic_op,
assign_targets,
assign_temp,
)
from puya.ir.context import IRFunctionBuildContext
from puya.ir.models import (
ConditionalBranch,
GotoNth,
Intrinsic,
Register,
UInt64Constant,
Value,
ValueProvider,
)
from puya.ir.types_ import IRType
from puya.ir.utils import lvalue_items
from puya.parse import SourceLocation
logger = log.get_logger(__name__)
class LoopVariables(typing.NamedTuple):
item_registers: typing.Sequence[Register]
index_: Register | None
def refresh_assignment(self, context: IRFunctionBuildContext) -> "LoopVariables":
item = [_refresh_mutated_variable(context, i) for i in self.item_registers]
if self.index_ is None:
index = None
else:
index = _refresh_mutated_variable(context, self.index_)
return LoopVariables(item, index)
class LoopAssigner:
def __init__(
self, context: IRFunctionBuildContext, items: awst_nodes.Lvalue, *, has_enumerate: bool
):
self._context: typing.Final = context
self._items: typing.Final = items
self.has_enumerate: typing.Final = has_enumerate
def assign_user_loop_vars(
self, item_provider: ValueProvider, index_provider: ValueProvider
) -> LoopVariables:
registers = self._build_registers_from_lvalue(self._items)
if not self.has_enumerate:
index_register = None
item_registers = registers
else:
(index_register, *item_registers) = registers
assign_targets(
self._context,
source=item_provider,
targets=item_registers,
assignment_location=self._items.source_location,
)
if index_register:
assign_targets(
self._context,
source=index_provider,
targets=[index_register],
assignment_location=self._items.source_location,
)
return LoopVariables(item_registers, index_register)
def _build_registers_from_lvalue(self, target: awst_nodes.Lvalue) -> list[Register]:
match target:
case awst_nodes.VarExpression(name=var_name, source_location=var_loc, wtype=var_type):
return build_tuple_registers(self._context, var_name, var_type, var_loc)
case awst_nodes.TupleExpression() as tup_expr:
tuple_items = lvalue_items(tup_expr)
return [
reg for item in tuple_items for reg in self._build_registers_from_lvalue(item)
]
case _:
raise CodeError(
"unsupported assignment target in loop", self._items.source_location
)
def handle_for_in_loop(context: IRFunctionBuildContext, statement: awst_nodes.ForInLoop) -> None:
sequence = statement.sequence
has_enumerate = False
reverse_items = False
reverse_index = False
while True:
match sequence:
case awst_nodes.Enumeration():
if has_enumerate:
raise CodeError(
"Nested enumeration is not currently supported", sequence.source_location
)
sequence = sequence.expr
has_enumerate = True
case awst_nodes.Reversed():
sequence = sequence.expr
reverse_items = not reverse_items
if not has_enumerate:
reverse_index = not reverse_index
case _:
break
assign_user_loop_vars = LoopAssigner(
context,
items=statement.items,
has_enumerate=has_enumerate,
)
match sequence:
case awst_nodes.Range(
start=range_start, stop=range_stop, step=range_step, source_location=range_loc
):
_iterate_urange(
context,
loop_body=statement.loop_body,
assigner=assign_user_loop_vars,
statement_loc=statement.source_location,
range_start=range_start,
range_stop=range_stop,
range_step=range_step,
range_loc=range_loc,
reverse_items=reverse_items,
reverse_index=reverse_index,
)
case awst_nodes.Expression(wtype=wtypes.WTuple(types=item_types)) as tuple_expression:
if not item_types:
logger.debug("Skipping ForInStatement which iterates an empty sequence.")
else:
_iterate_tuple(
context,
loop_body=statement.loop_body,
assigner=assign_user_loop_vars,
tuple_expr=tuple_expression,
statement_loc=statement.source_location,
reverse_index=reverse_index,
reverse_items=reverse_items,
)
case awst_nodes.Expression(wtype=wtypes.bytes_wtype):
bytes_value = context.visitor.visit_and_materialise_single(sequence)
byte_length = assign_temp(
context,
temp_description="bytes_length",
source=Intrinsic(
op=AVMOp.len_,
args=[bytes_value],
source_location=statement.source_location,
),
source_location=statement.source_location,
)
def get_byte_at_index(index_register: Value) -> ValueProvider:
return Intrinsic(
op=AVMOp.extract3,
args=[
bytes_value,
index_register,
UInt64Constant(value=1, source_location=None),
],
source_location=statement.items.source_location,
)
_iterate_indexable(
context,
loop_body=statement.loop_body,
indexable_size=byte_length,
get_value_at_index=get_byte_at_index,
assigner=assign_user_loop_vars,
statement_loc=statement.source_location,
reverse_index=reverse_index,
reverse_items=reverse_items,
)
case awst_nodes.Expression(wtype=wtypes.ARC4Array() as array_wtype):
iterator = arc4.build_for_in_array(
context,
array_wtype,
sequence,
statement.source_location,
)
_iterate_indexable(
context,
loop_body=statement.loop_body,
indexable_size=iterator.array_length,
get_value_at_index=iterator.get_value_at_index,
assigner=assign_user_loop_vars,
statement_loc=statement.source_location,
reverse_index=reverse_index,
reverse_items=reverse_items,
)
case _:
raise InternalError("Unsupported ForInLoop sequence", statement.source_location)
def _iterate_urange(
context: IRFunctionBuildContext,
*,
loop_body: awst_nodes.Block,
assigner: LoopAssigner,
statement_loc: SourceLocation,
range_start: Expression,
range_stop: Expression,
range_step: Expression,
range_loc: SourceLocation,
reverse_items: bool,
reverse_index: bool,
) -> None:
step = context.visitor.visit_and_materialise_single(range_step)
stop = context.visitor.visit_and_materialise_single(range_stop)
start = context.visitor.visit_and_materialise_single(range_start)
assert_value(context, step, source_location=statement_loc, comment="Step cannot be zero")
if reverse_items or reverse_index:
return _iterate_urange_with_reversal(
context,
loop_body=loop_body,
assigner=assigner,
statement_loc=statement_loc,
start=start,
stop=stop,
step=step,
range_loc=range_loc,
reverse_items=reverse_items,
reverse_index=reverse_index,
)
else:
return _iterate_urange_simple(
context,
loop_body=loop_body,
assigner=assigner,
statement_loc=statement_loc,
start=start,
stop=stop,
step=step,
range_loc=range_loc,
)
def _iterate_urange_simple(
context: IRFunctionBuildContext,
*,
loop_body: awst_nodes.Block,
assigner: LoopAssigner,
statement_loc: SourceLocation,
start: Value,
stop: Value,
step: Value,
range_loc: SourceLocation,
) -> None:
body = context.block_builder.mkblock(loop_body, "for_body")
header, footer, next_block = context.block_builder.mkblocks(
"for_header", "for_footer", "after_for", source_location=statement_loc
)
loop_vars = assigner.assign_user_loop_vars(
start, UInt64Constant(value=0, source_location=None)
)
context.block_builder.goto(header)
with context.block_builder.activate_open_block(header):
(current_range_item,), current_range_index = loop_vars.refresh_assignment(context)
continue_looping = assign_intrinsic_op(
context,
target="continue_looping",
op=AVMOp.lt,
args=[current_range_item, stop],
source_location=range_loc,
)
context.block_builder.terminate(
ConditionalBranch(
condition=continue_looping,
non_zero=body,
zero=next_block,
source_location=statement_loc,
)
)
context.block_builder.activate_block(body)
with context.block_builder.enter_loop(on_continue=footer, on_break=next_block):
loop_body.accept(context.visitor)
context.block_builder.goto(footer)
if context.block_builder.try_activate_block(footer):
assign_intrinsic_op(
context,
target=current_range_item,
op=AVMOp.add,
args=[current_range_item, step],
source_location=range_loc,
)
if current_range_index:
assign_intrinsic_op(
context,
target=current_range_index,
op=AVMOp.add,
args=[current_range_index, 1],
source_location=range_loc,
)
context.block_builder.goto(header)
context.block_builder.activate_block(next_block)
def _iterate_urange_with_reversal(
context: IRFunctionBuildContext,
*,
loop_body: awst_nodes.Block,
assigner: LoopAssigner,
statement_loc: SourceLocation,
start: Value,
stop: Value,
step: Value,
range_loc: SourceLocation,
reverse_items: bool,
reverse_index: bool,
) -> None:
assert reverse_items or reverse_index
body = context.block_builder.mkblock(loop_body, "for_body")
header, footer, increment_block, next_block = context.block_builder.mkblocks(
"for_header", "for_footer", "for_increment", "after_for", source_location=statement_loc
)
# The following code will result in underflow if we don't pre-check the urange
# params
should_loop = assign_intrinsic_op(
context,
target="should_loop",
op=AVMOp.lt,
args=[start, stop],
source_location=statement_loc,
)
context.block_builder.terminate(
ConditionalBranch(
condition=should_loop,
non_zero=header,
zero=next_block,
source_location=statement_loc,
)
)
context.block_builder.activate_block(header)
# iteration_count = ((stop - 1) - start) // step + 1
# => iteration_count - 1 = (stop - start - 1) // step
range_length = assign_intrinsic_op(
context,
target="range_length",
op=AVMOp.sub,
args=[stop, start],
source_location=range_loc,
)
range_length_minus_one = assign_intrinsic_op(
context,
target="range_length_minus_one",
op=AVMOp.sub,
args=[range_length, 1],
source_location=range_loc,
)
iteration_count_minus_one = assign_intrinsic_op(
context,
target="iteration_count_minus_one",
op=AVMOp.div_floor,
args=[range_length_minus_one, step],
source_location=range_loc,
)
range_delta = assign_intrinsic_op(
context,
target="range_delta",
op=AVMOp.mul,
args=[step, iteration_count_minus_one],
source_location=range_loc,
)
max_range_item = assign_intrinsic_op(
context,
target="max_range_item",
op=AVMOp.add,
args=[start, range_delta],
source_location=range_loc,
)
loop_vars = assigner.assign_user_loop_vars(
start if not reverse_items else max_range_item,
(
UInt64Constant(value=0, source_location=None)
if not reverse_index
else iteration_count_minus_one
),
)
context.block_builder.goto(body)
with context.block_builder.activate_open_block(body):
(current_range_item,), current_range_index = loop_vars.refresh_assignment(context)
with context.block_builder.enter_loop(on_continue=footer, on_break=next_block):
loop_body.accept(context.visitor)
context.block_builder.goto(footer)
if context.block_builder.try_activate_block(footer):
continue_looping_op = Intrinsic(
op=AVMOp.lt,
args=(
[current_range_item, max_range_item]
if not reverse_items
else [start, current_range_item]
),
source_location=range_loc,
)
continue_looping = assign_temp(
context,
source=continue_looping_op,
temp_description="continue_looping",
source_location=range_loc,
)
context.block_builder.terminate(
ConditionalBranch(
condition=continue_looping,
non_zero=increment_block,
zero=next_block,
source_location=statement_loc,
)
)
context.block_builder.activate_block(increment_block)
assign_intrinsic_op(
context,
target=current_range_item,
op=AVMOp.add if not reverse_items else AVMOp.sub,
args=[current_range_item, step],
source_location=range_loc,
)
if current_range_index:
assign_intrinsic_op(
context,
target=current_range_index,
op=AVMOp.add if not reverse_index else AVMOp.sub,
args=[current_range_index, 1],
source_location=range_loc,
)
context.block_builder.goto(body)
context.block_builder.activate_block(next_block)
def _iterate_indexable(
context: IRFunctionBuildContext,
*,
loop_body: awst_nodes.Block,
assigner: LoopAssigner,
statement_loc: SourceLocation,
indexable_size: Value,
get_value_at_index: typing.Callable[[Value], ValueProvider],
reverse_items: bool,
reverse_index: bool,
) -> None:
body = context.block_builder.mkblock(loop_body, "for_body")
header, footer, next_block = context.block_builder.mkblocks(
"for_header", "for_footer", "after_for", source_location=statement_loc
)
index_internal = assign_temp(
context,
source=UInt64Constant(value=0, source_location=None),
temp_description="item_index_internal",
source_location=None,
)
reverse_index_internal = assign_temp(
context,
source=indexable_size,
temp_description="reverse_index_internal",
source_location=None,
)
context.block_builder.goto(header)
with context.block_builder.activate_open_block(header):
current_index_internal = _refresh_mutated_variable(context, index_internal)
if not (reverse_items or reverse_index):
continue_looping = assign_intrinsic_op(
context,
target="continue_looping",
op=AVMOp.lt,
args=[current_index_internal, indexable_size],
source_location=statement_loc,
)
else:
continue_looping = assign_intrinsic_op(
context,
target="continue_looping",
op=AVMOp.gt,
args=[_refresh_mutated_variable(context, reverse_index_internal), 0],
source_location=statement_loc,
)
context.block_builder.terminate(
ConditionalBranch(
condition=continue_looping,
non_zero=body,
zero=next_block,
source_location=statement_loc,
)
)
context.block_builder.activate_block(body)
if reverse_items or reverse_index:
reverse_index_internal = assign_intrinsic_op(
context,
target=reverse_index_internal,
op=AVMOp.sub,
args=[_refresh_mutated_variable(context, reverse_index_internal), 1],
source_location=None,
)
assigner.assign_user_loop_vars(
get_value_at_index(
reverse_index_internal if reverse_items else current_index_internal
),
reverse_index_internal if reverse_index else current_index_internal,
)
with context.block_builder.enter_loop(on_continue=footer, on_break=next_block):
loop_body.accept(context.visitor)
context.block_builder.goto(footer)
if context.block_builder.try_activate_block(footer):
if not (reverse_items and reverse_index):
assign_intrinsic_op(
context,
target=index_internal,
op=AVMOp.add,
args=[current_index_internal, 1],
source_location=None,
)
context.block_builder.goto(header)
context.block_builder.activate_block(next_block)
def _iterate_tuple(
context: IRFunctionBuildContext,
*,
loop_body: awst_nodes.Block,
assigner: LoopAssigner,
tuple_expr: awst_nodes.Expression,
statement_loc: SourceLocation,
reverse_index: bool,
reverse_items: bool,
) -> None:
tuple_values = context.visitor.visit_and_materialise(tuple_expr)
assert isinstance(tuple_expr.wtype, wtypes.WTuple), "tuple_expr wtype must be WTuple"
tuple_wtype = tuple_expr.wtype
max_index = len(tuple_wtype.types) - 1
loop_counter_name = context.next_tmp_name("loop_counter")
def assign_counter_and_user_vars(loop_count: int) -> Register:
counter_reg = context.ssa.new_register(loop_counter_name, IRType.uint64, None)
assign_targets(
context,
source=UInt64Constant(value=loop_count, source_location=None),
targets=[counter_reg],
assignment_location=None,
)
item_index = loop_count if not reverse_items else (max_index - loop_count)
item_reg, index_reg = assigner.assign_user_loop_vars(
get_tuple_item_values(
tuple_values=tuple_values,
tuple_wtype=tuple_wtype,
index=item_index,
target_wtype=tuple_wtype.types[item_index],
source_location=statement_loc,
),
UInt64Constant(
value=loop_count if not reverse_index else (max_index - loop_count),
source_location=None,
),
)
if index_reg and not reverse_index:
return index_reg
else:
return counter_reg
# construct basic blocks
body = context.block_builder.mkblock(loop_body, "for_body")
footer, next_block = context.block_builder.mkblocks(
"for_footer", "after_for", source_location=statement_loc
)
headers = {
idx: context.block_builder.mkblock(statement_loc, f"for_header_{idx}")
for idx in range(1, len(tuple_wtype.types))
}
# first item - assigned in current block
loop_counter = assign_counter_and_user_vars(0)
# body
context.block_builder.goto(body)
with context.block_builder.activate_open_block(body):
current_loop_counter = _refresh_mutated_variable(context, loop_counter)
with context.block_builder.enter_loop(on_continue=footer, on_break=next_block):
loop_body.accept(context.visitor)
# footer + follow-up headers, iff the loop body doesn't exit unconditionally on first item
context.block_builder.goto(footer)
if context.block_builder.try_activate_block(footer):
# footer
context.block_builder.terminate(
GotoNth(
value=current_loop_counter,
blocks=list(headers.values()),
default=next_block,
source_location=statement_loc,
)
)
# headers for remaining items
for idx, header in headers.items():
context.block_builder.activate_block(header)
assign_counter_and_user_vars(idx)
context.block_builder.goto(body)
context.block_builder.activate_block(next_block)
def _refresh_mutated_variable(context: IRFunctionBuildContext, reg: Register) -> Register:
"""
Given a register pointing to an underlying root operand (ie name) that is mutated,
do an SSA read in the current block.
This is *only* required when there is control flow involved in the generated IR,
if it's only the builder that needs to loop then it should usually have an updated
reference to the most recent assigned register which will still be valid because it's
within the same block.
"""
return context.ssa.read_variable(reg.name, reg.ir_type, context.block_builder.active_block)
|
algorandfoundation/puya
|
src/puya/ir/builder/iteration.py
|
Python
|
NOASSERTION
| 22,778 |
import typing
from collections.abc import Mapping, Sequence
import attrs
import puya.awst.txn_fields
from puya import log
from puya.awst import (
nodes as awst_nodes,
wtypes,
)
from puya.awst.function_traverser import FunctionTraverser
from puya.awst.to_code_visitor import ToCodeVisitor
from puya.awst.wtypes import WInnerTransactionFields
from puya.errors import CodeError, InternalError
from puya.ir.avm_ops import AVMOp
from puya.ir.builder._tuple_util import build_tuple_item_names
from puya.ir.builder._utils import assign, assign_intrinsic_op
from puya.ir.builder.blocks import BlocksBuilder
from puya.ir.context import IRFunctionBuildContext
from puya.ir.models import (
ConditionalBranch,
InnerTransactionField,
Intrinsic,
ITxnConstant,
Register,
UInt64Constant,
Value,
ValueProvider,
ValueTuple,
)
from puya.ir.ssa import BraunSSA
from puya.ir.types_ import IRType, wtype_to_ir_type
from puya.ir.utils import format_tuple_index
from puya.parse import SourceLocation
from puya.utils import StableSet, positive_index
logger = log.get_logger(__name__)
_INNER_TRANSACTION_NON_ARRAY_FIELDS = [f for f in puya.awst.txn_fields.TxnField if not f.is_array]
@attrs.frozen(kw_only=True)
class CreateInnerTransactionFieldData:
var_name: str
field: puya.awst.txn_fields.TxnField
field_counts: set[int] = attrs.field(factory=set)
"""The observed number of values for this field
For non-array fields this will be either 0 or 1
For array fields this will be 0 -> N
Capturing these ranges allows generating much simpler IR
"""
field_count_register_name: str
@property
def value_registers(self) -> dict[int, str]:
return {idx: self.get_value_register_name(idx) for idx in range(max(self.field_counts))}
def get_value_register_name(self, index: int) -> str:
return f"{self.var_name}%%param_{self.field.immediate}_idx_{index}"
@attrs.frozen(kw_only=True)
class CreateInnerTransactionData:
var_name: str
fields: dict[puya.awst.txn_fields.TxnField, CreateInnerTransactionFieldData] = attrs.field(
factory=dict, init=False
)
def get_or_add_field_data(
self, field: puya.awst.txn_fields.TxnField
) -> CreateInnerTransactionFieldData:
try:
field_data = self.fields[field]
except KeyError:
field_data = self.fields[field] = CreateInnerTransactionFieldData(
var_name=self.var_name,
field=field,
field_count_register_name=f"{self.var_name}%%{field.immediate}_length",
)
return field_data
class InnerTransactionBuilder:
def __init__(self, context: IRFunctionBuildContext):
self.context = context
self._inner_txn_fields_data = dict[str, CreateInnerTransactionData]()
self._create_itxn_counter = iter(range(2**64 - 1))
@property
def ssa(self) -> BraunSSA:
return self.context.block_builder.ssa
@property
def block_builder(self) -> BlocksBuilder:
return self.context.block_builder
def handle_inner_transaction_field_assignments(
self, stmt: awst_nodes.AssignmentStatement
) -> bool:
# awst_nodes.CreateInnerTransaction is used to create a set of fields used in an inner txn
# When lowering this to IR field values and sizes are stored to local registers using
# the top level variable name as a prefix.
# For unused fields the size is 0
# For array fields the size goes up to the length of the array
# Other fields will have a size of 1
#
# Then itxn field sets are referenced the values are read using normal SSA functions
# this allows for complex use cases such as branches and loops to still function
value = stmt.value
source_location = stmt.source_location
target = stmt.target
match value:
case awst_nodes.CreateInnerTransaction(fields=fields):
((var_name, var_loc),) = _get_assignment_target_local_names(target, 1)
self._set_inner_transaction_fields(var_name, fields, var_loc)
return True
case awst_nodes.Copy(
value=awst_nodes.Expression(wtype=wtypes.WInnerTransactionFields()) as copy_source
):
((var_name, var_loc),) = _get_assignment_target_local_names(target, 1)
src_var_name = self._resolve_inner_txn_params_var_name(copy_source)
self._copy_inner_transaction_fields(var_name, src_var_name, var_loc)
return True
case awst_nodes.TupleExpression(items=tuple_items) as tuple_source if any(
isinstance(t, WInnerTransactionFields) for t in tuple_source.wtype.types
):
names = _get_assignment_target_local_names(target, len(tuple_items))
for (item_name, item_loc), item_value in zip(names, tuple_items, strict=True):
match item_value:
case awst_nodes.CreateInnerTransaction(fields=fields):
self._set_inner_transaction_fields(item_name, fields, item_loc)
case awst_nodes.Copy(
value=awst_nodes.Expression(
wtype=wtypes.WInnerTransactionFields()
) as copy_source
):
src_var_name = self._resolve_inner_txn_params_var_name(copy_source)
self._copy_inner_transaction_fields(item_name, src_var_name, item_loc)
case awst_nodes.Expression(wtype=wtypes.WInnerTransactionFields()):
raise CodeError(
"Unexpected Inner Transaction encountered in tuple", item_loc
)
case _:
value_provider = self.context.visitor.visit_expr(item_value)
assign(
self.context,
value_provider,
name=item_name,
register_location=item_loc,
assignment_location=source_location,
)
return True
case awst_nodes.Expression(wtype=wtypes.WInnerTransactionFields()):
raise CodeError(
"Inner Transaction params can only be reassigned using copy()",
source_location,
)
case _:
return False
def _visit_submit_expr(self, expr: awst_nodes.Expression) -> Sequence[Value]:
value_provider = self.context.visitor.visit_expr(expr)
match value_provider:
case ValueTuple(values=values):
return values
case Value() as value:
return (value,)
raise InternalError(
"Unexpected result for SubmitInnerTransaction expr", expr.source_location
)
def add_inner_transaction_submit_result_assignments(
self,
targets: Sequence[Value],
source_expr: awst_nodes.Expression,
ass_loc: SourceLocation | None,
) -> None:
"""Performs special handling for inner transaction related assignments"""
# awst_nodes.SubmitInnerTransaction is used to submit itxn field sets as an inner
# transaction group
# Then all non aray fields are assigned to local registers and cached
# This allows these fields to be read later even if additional itxn submissions have been
# performed.
# Array fields can not be cached in the same way currently, due to the lack of an
# efficient array type.
# targets are the assignment results, which will all be registers here,
# the only case they aren't registers is when assigning to storage, which will
# never be supported for itxn's because they're ephemeral
itx_targets = [
t for t in targets if isinstance(t, Register) and t.ir_type == IRType.itxn_group_idx
]
source_actions = SourceActionExtractor.visit(source_expr)
if len(itx_targets) != len(source_actions):
raise CodeError("unsupported inner transaction assignment", ass_loc)
for itxn_target, source_action in zip(itx_targets, source_actions, strict=True):
match source_action:
case _CopySource(var_name=src_var_name):
self._copy_submit_inner_transaction_fields(itxn_target.name, src_var_name)
case _AssignSubmit(is_last=is_last):
self._assign_submit_inner_transaction_fields(itxn_target, is_last=is_last)
case unexpected:
typing.assert_never(unexpected)
def handle_update_inner_transaction(self, call: awst_nodes.UpdateInnerTransaction) -> None:
var_name = self._resolve_inner_txn_params_var_name(call.itxn)
self._set_inner_transaction_fields(
var_name, call.fields, call.source_location, update=True
)
def handle_inner_transaction_field(
self, itxn_field: awst_nodes.InnerTransactionField
) -> ValueProvider:
src_loc = itxn_field.source_location
field = itxn_field.field
if field.is_array != bool(itxn_field.array_index):
raise InternalError(
"inconsistent array_index for inner transaction field",
src_loc,
)
itxn = self.context.visitor.visit_expr(itxn_field.itxn)
if not isinstance(itxn, Register | ITxnConstant):
itxn_field_desc = {itxn_field.itxn.accept(ToCodeVisitor())}
raise CodeError(
f"Could not resolve inner transaction group index for {itxn_field_desc}",
src_loc,
)
# use cached field if available
if isinstance(itxn, Register):
field_var_name = _get_txn_field_var_name(itxn.name, field.immediate)
if self.ssa.has_version(field_var_name):
return self.ssa.read_variable(
field_var_name, wtype_to_ir_type(field.wtype), self.block_builder.active_block
)
match itxn:
# use is_last register if it is defined
case Register(name=itxn_name) if self.ssa.has_version(_get_txn_is_last(itxn_name)):
is_last_in_group: Value = self.ssa.read_variable(
_get_txn_is_last(itxn_name),
IRType.bool,
self.block_builder.active_block,
)
# otherwise infer based on itxn expr
case _:
is_last_in_group = UInt64Constant(
value=int(_is_last_itxn(itxn_field.itxn)),
ir_type=IRType.bool,
source_location=src_loc,
)
return InnerTransactionField(
group_index=itxn,
is_last_in_group=is_last_in_group,
array_index=(
self.context.visitor.visit_and_materialise_single(itxn_field.array_index)
if itxn_field.array_index
else None
),
field=field.immediate,
type=wtype_to_ir_type(field.wtype),
source_location=src_loc,
)
def handle_submit_inner_transaction(
self, submit: awst_nodes.SubmitInnerTransaction
) -> Sequence[ITxnConstant]:
src_loc = submit.source_location
self.block_builder.add(
Intrinsic(
op=AVMOp.itxn_begin,
source_location=src_loc,
)
)
group_indexes = []
for group_index, param in enumerate(submit.itxns):
submit_var_loc = param.source_location
if group_index > 0:
self.block_builder.add(
Intrinsic(
op=AVMOp.itxn_next,
source_location=submit_var_loc,
)
)
param_var_name = self._resolve_inner_txn_params_var_name(param)
next_txn = self.block_builder.mkblock(submit_var_loc, "next_txn")
param_data = self._inner_txn_fields_data[param_var_name]
# with the current implementation, reversing the order itxn_field is called
# results in less stack manipulations as most values are naturally in the
# required order when stack allocation occurs
for field, field_data in reversed(param_data.fields.items()):
field_value_counts = sorted(field_data.field_counts)
if not field_value_counts or field_value_counts == [0]:
# nothing to do
continue
min_num_values, *remaining_values = field_value_counts
# values 0 -> min_num_values do not need to test
# values min_num_values -> max_num_values need to check if they are set
next_field = self.block_builder.mkblock(submit_var_loc, "next_field")
self._set_field_values(field_data, 0, min_num_values)
if remaining_values:
last_num_values = min_num_values
for next_num_values in remaining_values:
set_fields_blk = self.block_builder.mkblock(
submit_var_loc,
f"set_{field.immediate}_{last_num_values}_to_{next_num_values - 1}",
)
self.block_builder.terminate(
ConditionalBranch(
condition=self._get_is_field_count_gte(
field_data, next_num_values
),
non_zero=set_fields_blk,
zero=next_field,
source_location=submit_var_loc,
)
)
self.block_builder.activate_block(set_fields_blk)
self._set_field_values(field_data, last_num_values, next_num_values)
last_num_values = next_num_values
self.block_builder.goto(next_field)
self.block_builder.activate_block(next_field)
group_indexes.append(
ITxnConstant(
value=group_index,
source_location=submit_var_loc,
ir_type=IRType.itxn_group_idx,
)
)
self.block_builder.goto(next_txn)
self.block_builder.activate_block(next_txn)
self.block_builder.add(
Intrinsic(
op=AVMOp.itxn_submit,
source_location=src_loc,
)
)
return group_indexes
def _assign_submit_inner_transaction_fields(
self,
target: Register,
*,
is_last: bool,
) -> None:
var_name = target.name
is_last_in_group = assign(
self.context,
source=UInt64Constant(
value=int(is_last),
ir_type=IRType.bool,
source_location=None,
),
name=_get_txn_is_last(var_name),
register_location=None,
assignment_location=None,
)
for field in _INNER_TRANSACTION_NON_ARRAY_FIELDS:
field_reg = _get_txn_field_var_name(var_name, field.immediate)
assign(
context=self.context,
source=InnerTransactionField(
field=field.immediate,
group_index=target,
is_last_in_group=is_last_in_group,
type=wtype_to_ir_type(field.wtype),
array_index=None,
source_location=None,
),
name=field_reg,
register_location=None,
assignment_location=None,
)
def _copy_submit_inner_transaction_fields(self, dest_var_name: str, src_var_name: str) -> None:
active_block = self.context.block_builder.active_block
for field in _INNER_TRANSACTION_NON_ARRAY_FIELDS:
src_field = _get_txn_field_var_name(src_var_name, field.immediate)
dest_field = _get_txn_field_var_name(dest_var_name, field.immediate)
assign(
context=self.context,
source=self.context.ssa.read_variable(
src_field, wtype_to_ir_type(field.wtype), active_block
),
name=dest_field,
register_location=None,
assignment_location=None,
)
def _set_field_values(
self,
field_data: CreateInnerTransactionFieldData,
idx_from: int,
idx_to: int,
) -> None:
field = field_data.field
field_ir_type = wtype_to_ir_type(field.wtype)
for idx in range(idx_from, idx_to):
field_value = self.ssa.read_variable(
field_data.get_value_register_name(idx),
field_ir_type,
self.block_builder.active_block,
)
self.block_builder.add(
Intrinsic(
op=AVMOp.itxn_field,
source_location=None,
immediates=[field.immediate],
args=[field_value],
)
)
def _get_is_field_count_gte(
self, field_data: CreateInnerTransactionFieldData, count: int
) -> Register:
field = field_data.field
len_register = self.ssa.read_variable(
field_data.field_count_register_name,
IRType.uint64,
self.block_builder.active_block,
)
is_field_count_gte = assign_intrinsic_op(
self.context,
target=f"is_{field.immediate}_count_gte_{count}",
op=AVMOp.gte,
args=[len_register, count],
source_location=None,
)
return is_field_count_gte
def _set_inner_transaction_fields(
self,
var_name: str,
inner_txn_fields: Mapping[puya.awst.txn_fields.TxnField, awst_nodes.Expression],
var_loc: SourceLocation,
*,
update: bool = False,
) -> None:
param_data = self._inner_txn_fields_data.setdefault(
var_name, CreateInnerTransactionData(var_name=var_name)
)
# assign a unique constant to var_name, not used for anything directly, but prevents
# an undefined variable warning
assign(
context=self.context,
source=ITxnConstant(
value=next(self._create_itxn_counter),
source_location=var_loc,
ir_type=IRType.itxn_field_set,
),
name=var_name,
assignment_location=var_loc,
)
fields = StableSet.from_iter(inner_txn_fields)
if not update:
# add missing fields to end
for field in puya.awst.txn_fields.TxnField:
if field.is_inner_param and field not in fields:
fields.add(field)
for field in fields:
field_data = param_data.get_or_add_field_data(field)
arg_expr = inner_txn_fields.get(field)
values: Sequence[ValueProvider] = []
count_loc = arg_expr.source_location if arg_expr else var_loc
if arg_expr:
match self.context.visitor.visit_expr(arg_expr):
case ValueTuple(values=values):
pass
case ValueProvider() as vp:
values = [vp]
field_data.field_counts.add(len(values))
for idx, value in enumerate(values):
assign(
context=self.context,
source=value,
name=field_data.get_value_register_name(idx),
register_location=var_loc,
assignment_location=value.source_location,
)
assign(
context=self.context,
source=UInt64Constant(
value=len(values),
source_location=count_loc,
),
name=field_data.field_count_register_name,
register_location=var_loc,
assignment_location=count_loc,
)
def _copy_inner_transaction_fields(
self, dest_var_name: str, src_var_name: str, var_loc: SourceLocation
) -> None:
src_params_data = self._inner_txn_fields_data[src_var_name]
dest_params_data = self._inner_txn_fields_data.setdefault(
dest_var_name, CreateInnerTransactionData(var_name=dest_var_name)
)
for field in puya.awst.txn_fields.TxnField:
if not field.is_inner_param:
continue
src_field_data = src_params_data.get_or_add_field_data(field)
dest_field_data = dest_params_data.get_or_add_field_data(field)
dest_field_data.field_counts.update(src_field_data.field_counts)
for idx, src_field_register in src_field_data.value_registers.items():
dest_field_register = dest_field_data.get_value_register_name(idx)
assign(
context=self.context,
source=self.ssa.read_variable(
src_field_register,
wtype_to_ir_type(field.wtype),
self.block_builder.active_block,
),
name=dest_field_register,
assignment_location=var_loc,
)
assign(
context=self.context,
source=self.ssa.read_variable(
src_field_data.field_count_register_name,
IRType.uint64,
self.block_builder.active_block,
),
name=dest_field_data.field_count_register_name,
assignment_location=var_loc,
)
def _resolve_inner_txn_params_var_name(self, params: awst_nodes.Expression) -> str:
match params:
case awst_nodes.CreateInnerTransaction() as itxn:
var_name = self.context.next_tmp_name(description="inner_txn_params")
self._set_inner_transaction_fields(
var_name=var_name, inner_txn_fields=itxn.fields, var_loc=itxn.source_location
)
case awst_nodes.VarExpression(name=var_name):
pass
case awst_nodes.TupleItemExpression(
base=awst_nodes.VarExpression(name=name, wtype=wtypes.WTuple() as base_wtype),
index=index,
):
return format_tuple_index(base_wtype, name, index)
case awst_nodes.Copy(value=value):
return self._resolve_inner_txn_params_var_name(value)
case _:
raise InternalError(
"Could not resolve var_name for inner transaction params",
params.source_location,
)
return var_name
def _get_assignment_target_local_names(
target: awst_nodes.Expression, expected_number: int
) -> Sequence[tuple[str, SourceLocation]]:
match target:
case awst_nodes.VarExpression(name=var_name) if expected_number == 1:
return [(var_name, target.source_location)]
case awst_nodes.VarExpression(name=var_name, wtype=wtypes.WTuple() as var_wtype):
return [
(format_tuple_index(var_wtype, var_name, idx), target.source_location)
for idx in range(expected_number)
]
case awst_nodes.TupleExpression(items=items) if expected_number == len(items) and all(
isinstance(i, awst_nodes.VarExpression) for i in items
):
items = typing.cast(Sequence[awst_nodes.VarExpression], items)
return [(expr.name, expr.source_location) for expr in items]
case awst_nodes.TupleItemExpression(
base=awst_nodes.TupleExpression(wtype=tuple_wtype) as base, index=index
):
tuple_names = _get_assignment_target_local_names(base, len(tuple_wtype.types))
return [tuple_names[index]]
case awst_nodes.FieldExpression(
base=awst_nodes.TupleExpression(wtype=tuple_wtype) as base,
name=name,
source_location=name_loc,
):
tuple_names = _get_assignment_target_local_names(base, len(tuple_wtype.types))
index = tuple_wtype.name_to_index(name, name_loc)
return [tuple_names[index]]
raise CodeError(
"Inner Transactions can only be assigned to local variables",
target.source_location,
)
@attrs.frozen
class _CopySource:
var_name: str
@attrs.frozen
class _AssignSubmit:
index: int
is_last: bool
_SourceAction = _CopySource | _AssignSubmit
class SourceActionExtractor(FunctionTraverser):
def __init__(self) -> None:
self._actions = list[_SourceAction]()
@classmethod
def visit(cls, node: awst_nodes.Expression) -> list[_SourceAction]:
visitor = cls()
node.accept(visitor)
return visitor._actions # noqa: SLF001
def visit_submit_inner_transaction(self, call: awst_nodes.SubmitInnerTransaction) -> None:
itxns = len(call.itxns)
self._actions.extend(
_AssignSubmit(index=idx, is_last=idx == itxns - 1) for idx in range(itxns)
)
def visit_var_expression(self, expr: awst_nodes.VarExpression) -> None:
self._actions.extend(
[
_CopySource(var_name=name)
for name, ir_type in build_tuple_item_names(
expr.name, expr.wtype, expr.source_location
)
if ir_type == IRType.itxn_group_idx
]
)
def visit_inner_transaction_field(self, itxn_field: awst_nodes.InnerTransactionField) -> None:
# this will consume any referenced inner transaction, so don't need to traverse it
pass
def visit_tuple_item_expression(self, expr: awst_nodes.TupleItemExpression) -> None:
start_len = len(self._actions)
super().visit_tuple_item_expression(expr)
added = self._actions[start_len:]
# only keep the relevant action
if isinstance(expr.wtype, wtypes.WInnerTransaction):
self._actions[start_len:] = [added[expr.index]]
def visit_slice_expression(self, expr: awst_nodes.SliceExpression) -> None:
start_len = len(self._actions)
super().visit_slice_expression(expr)
added = self._actions[start_len:]
if not added or not isinstance(expr.base.wtype, wtypes.WTuple):
return
# determine constant indexes
tuple_size = len(added)
begin_index = 0 if expr.begin_index is None else _get_uint64_const(expr.begin_index)
if begin_index is None:
return
begin_index = positive_index(begin_index, added)
end_index = tuple_size if expr.end_index is None else _get_uint64_const(expr.end_index)
if end_index is None:
return
end_index = positive_index(end_index, added)
# include relevant items from sliced tuple
self._actions[start_len:] = [
added[idx]
for idx in range(begin_index, end_index)
if isinstance(expr.base.wtype.types[idx], wtypes.WInnerTransaction)
]
def _get_uint64_const(expr: awst_nodes.Expression) -> int | None:
if isinstance(expr, awst_nodes.IntegerConstant) and expr.wtype == wtypes.uint64_wtype:
return expr.value
return None
def _is_last_itxn(expr: awst_nodes.Expression) -> bool:
# is last itxn if expr is a submit expr of size 1 OR
if not isinstance(expr, awst_nodes.TupleItemExpression | awst_nodes.FieldExpression):
return _is_submit_expr_of_size(expr, 1)
# if expr is a tuple item expression with an index into the last item of a submit expr
base = expr.base
if not isinstance(base.wtype, wtypes.WTuple):
return False
index = (
expr.index
if isinstance(expr, awst_nodes.TupleItemExpression)
else base.wtype.name_to_index(expr.name, expr.source_location)
)
tuple_size = len(base.wtype.types)
if index == -1 or (index + 1) == tuple_size:
return _is_submit_expr_of_size(base, tuple_size)
else:
return False
def _is_submit_expr_of_size(expr: awst_nodes.Expression, expected_group_size: int) -> bool:
match expr:
case awst_nodes.SubmitInnerTransaction(itxns=itxns) if len(itxns) == expected_group_size:
return True
case awst_nodes.SingleEvaluation(source=source):
return _is_submit_expr_of_size(source, expected_group_size)
case _:
return False
def _get_txn_field_var_name(var_name: str, field: str) -> str:
return f"{var_name}.{field}"
def _get_txn_is_last(var_name: str) -> str:
return f"{var_name}._is_last"
|
algorandfoundation/puya
|
src/puya/ir/builder/itxn.py
|
Python
|
NOASSERTION
| 29,467 |
import typing
from collections.abc import Iterator, Sequence
import attrs
import puya.awst.visitors
import puya.ir.builder.storage
from puya import algo_constants, log, utils
from puya.avm import AVMType
from puya.awst import (
nodes as awst_nodes,
wtypes,
)
from puya.awst.nodes import BigUIntBinaryOperator, UInt64BinaryOperator
from puya.awst.to_code_visitor import ToCodeVisitor
from puya.awst.txn_fields import TxnField
from puya.awst.wtypes import WInnerTransaction, WInnerTransactionFields
from puya.errors import CodeError, InternalError
from puya.ir.avm_ops import AVMOp
from puya.ir.builder import arc4, flow_control, storage
from puya.ir.builder._tuple_util import get_tuple_item_values
from puya.ir.builder._utils import (
OpFactory,
assert_value,
assign,
assign_intrinsic_op,
assign_targets,
assign_temp,
extract_const_int,
get_implicit_return_is_original,
get_implicit_return_out,
mktemp,
)
from puya.ir.builder.arc4 import ARC4_FALSE, ARC4_TRUE
from puya.ir.builder.assignment import (
handle_assignment,
handle_assignment_expr,
)
from puya.ir.builder.bytes import (
visit_bytes_intersection_slice_expression,
visit_bytes_slice_expression,
)
from puya.ir.builder.callsub import (
visit_puya_lib_call_expression,
visit_subroutine_call_expression,
)
from puya.ir.builder.iteration import handle_for_in_loop
from puya.ir.builder.itxn import InnerTransactionBuilder
from puya.ir.context import IRBuildContext
from puya.ir.models import (
AddressConstant,
BigUIntConstant,
BytesConstant,
CompiledContractReference,
CompiledLogicSigReference,
Fail,
Intrinsic,
InvokeSubroutine,
MethodConstant,
Op,
ProgramExit,
Subroutine,
SubroutineReturn,
TemplateVar,
UInt64Constant,
Value,
ValueProvider,
ValueTuple,
)
from puya.ir.types_ import (
AVMBytesEncoding,
IRType,
bytes_enc_to_avm_bytes_enc,
wtype_to_ir_type,
wtype_to_ir_types,
)
from puya.ir.utils import format_tuple_index
from puya.parse import SourceLocation
TExpression: typing.TypeAlias = ValueProvider | None
TStatement: typing.TypeAlias = None
logger = log.get_logger(__name__)
class FunctionIRBuilder(
puya.awst.visitors.ExpressionVisitor[TExpression],
puya.awst.visitors.StatementVisitor[TStatement],
):
def __init__(
self, context: IRBuildContext, function: awst_nodes.Function, subroutine: Subroutine
):
self.context = context.for_function(function, subroutine, self)
self._itxn = InnerTransactionBuilder(self.context)
self._single_eval_cache = dict[awst_nodes.SingleEvaluation, TExpression]()
self._visited_exprs = dict[tuple[int, awst_nodes.Expression], TExpression]()
@classmethod
def build_body(
cls,
ctx: IRBuildContext,
function: awst_nodes.Function,
subroutine: Subroutine,
) -> None:
logger.debug(f"Building IR for function {function.full_name}")
builder = cls(ctx, function, subroutine)
func_ctx = builder.context
with func_ctx.log_exceptions():
block_builder = func_ctx.block_builder
for p in subroutine.parameters:
if p.implicit_return:
assign(
func_ctx,
UInt64Constant(value=1, ir_type=IRType.bool, source_location=None),
name=get_implicit_return_is_original(p.name),
assignment_location=None,
)
assign(
func_ctx,
p,
name=get_implicit_return_out(p.name),
assignment_location=None,
)
function.body.accept(builder)
final_block = block_builder.active_block
if not final_block.terminated:
if function.return_type != wtypes.void_wtype:
raise CodeError("not all paths return a value", function.body.source_location)
block_builder.terminate(
SubroutineReturn(
result=[
block_builder.ssa.read_variable(
get_implicit_return_out(p.name), p.ir_type, final_block
)
for p in subroutine.parameters
if p.implicit_return
],
source_location=None,
)
)
subroutine.body = block_builder.finalise()
subroutine.validate_with_ssa()
def visit_copy(self, expr: awst_nodes.Copy) -> TExpression:
# For reference types, we need to clone the data
# For value types, we can just visit the expression and the resulting read
# will effectively be a copy. We assign the copy to a new register in case it is
# mutated.
match expr.value.wtype:
case wtypes.ARC4Type(immutable=False):
# Arc4 encoded types are value types
original_value = self.visit_and_materialise_single(expr.value)
return assign_temp(
temp_description="copy",
source=original_value,
source_location=expr.source_location,
context=self.context,
)
raise InternalError(
f"Invalid source wtype for Copy {expr.value.wtype}", expr.source_location
)
def visit_arc4_decode(self, expr: awst_nodes.ARC4Decode) -> TExpression:
return arc4.decode_expr(self.context, expr)
def visit_arc4_encode(self, expr: awst_nodes.ARC4Encode) -> TExpression:
return arc4.encode_expr(self.context, expr)
def visit_compiled_contract(self, expr: awst_nodes.CompiledContract) -> TExpression:
prefix = self.context.options.template_vars_prefix if expr.prefix is None else expr.prefix
template_variables = {
prefix + k: self.visit_and_materialise_single(v)
for k, v in expr.template_variables.items()
}
# TODO: remove implicit coupling
# the coupling here is between the order of values in the ValueTuple
# and the structure of the high level python type
# once we support nested tuples, this coupling can be removed
# and instead support names on WTuple, then each value can be accessed and lowered
# via a FieldExpression
program_pages = [
CompiledContractReference(
artifact=expr.contract,
field=field,
program_page=page,
ir_type=IRType.bytes,
source_location=expr.source_location,
template_variables=template_variables,
)
for field in (
TxnField.ApprovalProgramPages,
TxnField.ClearStateProgramPages,
)
for page in (0, 1)
]
return ValueTuple(
values=program_pages
+ [
(
self.visit_and_materialise_single(expr.allocation_overrides[field])
if field in expr.allocation_overrides
else CompiledContractReference(
artifact=expr.contract,
field=field,
ir_type=IRType.uint64,
source_location=expr.source_location,
template_variables=template_variables,
)
)
for field in (
TxnField.ExtraProgramPages,
TxnField.GlobalNumUint,
TxnField.GlobalNumByteSlice,
TxnField.LocalNumUint,
TxnField.LocalNumByteSlice,
)
],
source_location=expr.source_location,
)
def visit_compiled_logicsig(self, expr: awst_nodes.CompiledLogicSig) -> TExpression:
prefix = self.context.options.template_vars_prefix if expr.prefix is None else expr.prefix
template_variables = {
prefix + k: self.visit_and_materialise_single(v)
for k, v in expr.template_variables.items()
}
return ValueTuple(
values=[
CompiledLogicSigReference(
artifact=expr.logic_sig,
ir_type=IRType.bytes,
source_location=expr.source_location,
template_variables=template_variables,
)
],
source_location=expr.source_location,
)
def visit_assignment_statement(self, stmt: awst_nodes.AssignmentStatement) -> TStatement:
if not self._itxn.handle_inner_transaction_field_assignments(stmt):
targets = handle_assignment_expr(
self.context,
target=stmt.target,
value=stmt.value,
assignment_location=stmt.source_location,
)
self._itxn.add_inner_transaction_submit_result_assignments(
targets, stmt.value, stmt.source_location
)
return None
def visit_assignment_expression(self, expr: awst_nodes.AssignmentExpression) -> TExpression:
result = handle_assignment_expr(
self.context,
target=expr.target,
value=expr.value,
assignment_location=expr.source_location,
)
if not result:
# HOW DID YOU GET HERE
raise CodeError("Assignment expression did not return a result", expr.source_location)
if len(result) == 1:
return result[0]
else:
return ValueTuple(expr.source_location, list(result))
def visit_biguint_postfix_unary_operation(
self, expr: awst_nodes.BigUIntPostfixUnaryOperation
) -> TExpression:
target_value = self.visit_and_materialise_single(expr.target)
rhs = BigUIntConstant(value=1, source_location=expr.source_location)
match expr.op:
case awst_nodes.BigUIntPostfixUnaryOperator.increment:
binary_op = awst_nodes.BigUIntBinaryOperator.add
case awst_nodes.BigUIntPostfixUnaryOperator.decrement:
binary_op = awst_nodes.BigUIntBinaryOperator.sub
case never:
typing.assert_never(never)
new_value = create_biguint_binary_op(binary_op, target_value, rhs, expr.source_location)
handle_assignment(
self.context,
target=expr.target,
value=new_value,
is_nested_update=False,
assignment_location=expr.source_location,
)
return target_value
def visit_uint64_postfix_unary_operation(
self, expr: awst_nodes.UInt64PostfixUnaryOperation
) -> TExpression:
target_value = self.visit_and_materialise_single(expr.target)
rhs = UInt64Constant(value=1, source_location=expr.source_location)
match expr.op:
case awst_nodes.UInt64PostfixUnaryOperator.increment:
binary_op = awst_nodes.UInt64BinaryOperator.add
case awst_nodes.UInt64PostfixUnaryOperator.decrement:
binary_op = awst_nodes.UInt64BinaryOperator.sub
case never:
typing.assert_never(never)
new_value = create_uint64_binary_op(binary_op, target_value, rhs, expr.source_location)
handle_assignment(
self.context,
target=expr.target,
value=new_value,
is_nested_update=False,
assignment_location=expr.source_location,
)
return target_value
def visit_uint64_binary_operation(self, expr: awst_nodes.UInt64BinaryOperation) -> TExpression:
left = self.visit_and_materialise_single(expr.left)
right = self.visit_and_materialise_single(expr.right)
return create_uint64_binary_op(expr.op, left, right, expr.source_location)
def visit_biguint_binary_operation(
self, expr: awst_nodes.BigUIntBinaryOperation
) -> TExpression:
left = self.visit_and_materialise_single(expr.left)
right = self.visit_and_materialise_single(expr.right)
return create_biguint_binary_op(expr.op, left, right, expr.source_location)
def visit_uint64_unary_operation(self, expr: awst_nodes.UInt64UnaryOperation) -> TExpression:
return Intrinsic(
op=AVMOp(expr.op),
args=[self.visit_and_materialise_single(expr.expr)],
source_location=expr.source_location,
)
def visit_bytes_unary_operation(self, expr: awst_nodes.BytesUnaryOperation) -> TExpression:
return Intrinsic(
op=AVMOp(f"b{expr.op}"),
args=[self.visit_and_materialise_single(expr.expr)],
source_location=expr.source_location,
)
def visit_integer_constant(self, expr: awst_nodes.IntegerConstant) -> TExpression:
match expr.wtype:
case wtypes.uint64_wtype:
if expr.value < 0 or expr.value.bit_length() > 64:
raise CodeError(f"invalid {expr.wtype} value", expr.source_location)
return UInt64Constant(
value=expr.value,
source_location=expr.source_location,
teal_alias=expr.teal_alias,
)
case wtypes.biguint_wtype:
if expr.value < 0 or expr.value.bit_length() > algo_constants.MAX_BIGUINT_BITS:
raise CodeError(f"invalid {expr.wtype} value", expr.source_location)
return BigUIntConstant(value=expr.value, source_location=expr.source_location)
case wtypes.ARC4UIntN(n=bit_size):
num_bytes = bit_size // 8
try:
arc4_result = expr.value.to_bytes(num_bytes, "big", signed=False)
except OverflowError:
raise CodeError(f"invalid {expr.wtype} value", expr.source_location) from None
return BytesConstant(
value=arc4_result,
encoding=AVMBytesEncoding.base16,
source_location=expr.source_location,
)
case _:
raise InternalError(
f"Unhandled wtype {expr.wtype} for integer constant {expr.value}",
expr.source_location,
)
def visit_decimal_constant(self, expr: awst_nodes.DecimalConstant) -> TExpression:
match expr.wtype:
case wtypes.ARC4UFixedNxM(n=bit_size, m=precision):
num_bytes = bit_size // 8
sign, digits, exponent = expr.value.as_tuple()
adjusted_int = int("".join(map(str, digits)))
if (
sign != 0 # negative
or not isinstance(exponent, int) # infinite
or -exponent > precision # too precise
or adjusted_int.bit_length() > bit_size # too big
):
raise CodeError(f"invalid {expr.wtype} value", expr.source_location)
return BytesConstant(
source_location=expr.source_location,
encoding=AVMBytesEncoding.base16,
value=adjusted_int.to_bytes(num_bytes, "big", signed=False),
)
case _:
raise InternalError(
f"Unhandled wtype {expr.wtype} for decimal constant {expr.value}",
expr.source_location,
)
def visit_bool_constant(self, expr: awst_nodes.BoolConstant) -> TExpression:
match expr.wtype:
case wtypes.bool_wtype:
return UInt64Constant(
value=int(expr.value),
ir_type=IRType.bool,
source_location=expr.source_location,
)
case wtypes.arc4_bool_wtype:
return BytesConstant(
value=(ARC4_TRUE if expr.value else ARC4_FALSE),
encoding=AVMBytesEncoding.base16,
ir_type=IRType.bytes,
source_location=expr.source_location,
)
case _:
raise InternalError(
f"Unexpected wtype {expr.wtype} for BoolConstant", expr.source_location
)
def visit_bytes_constant(self, expr: awst_nodes.BytesConstant) -> BytesConstant:
if len(expr.value) > algo_constants.MAX_BYTES_LENGTH:
raise CodeError(f"invalid {expr.wtype} value", expr.source_location)
return BytesConstant(
value=expr.value,
encoding=bytes_enc_to_avm_bytes_enc(expr.encoding),
ir_type=wtype_to_ir_type(expr),
source_location=expr.source_location,
)
def visit_string_constant(self, expr: awst_nodes.StringConstant) -> BytesConstant:
try:
value = expr.value.encode("utf8")
except UnicodeError:
value = None
if value is None:
raise CodeError(f"invalid {expr.wtype} value", expr.source_location)
match expr.wtype:
case wtypes.string_wtype:
encoding = AVMBytesEncoding.utf8
case wtypes.arc4_string_alias:
encoding = AVMBytesEncoding.base16
value = len(value).to_bytes(2) + value
case _:
raise InternalError(
f"Unexpected wtype {expr.wtype} for StringConstant", expr.source_location
)
if len(value) > algo_constants.MAX_BYTES_LENGTH:
raise CodeError(f"invalid {expr.wtype} value", expr.source_location)
return BytesConstant(
value=value,
encoding=encoding,
source_location=expr.source_location,
)
@typing.override
def visit_void_constant(self, expr: awst_nodes.VoidConstant) -> TExpression:
return None
def visit_address_constant(self, expr: awst_nodes.AddressConstant) -> TExpression:
if not utils.valid_address(expr.value):
# TODO: should this be here, or on IR model? there's pros and cons to each
raise CodeError("invalid Algorand address", expr.source_location)
return AddressConstant(
value=expr.value,
source_location=expr.source_location,
)
def visit_numeric_comparison_expression(
self, expr: awst_nodes.NumericComparisonExpression
) -> TExpression:
left = self.visit_and_materialise_single(expr.lhs)
right = self.visit_and_materialise_single(expr.rhs)
if left.atype != right.atype:
raise InternalError(
"Numeric comparison between different numeric types", expr.source_location
)
op_code = expr.operator.value
if left.atype == AVMType.bytes:
op_code = "b" + op_code
try:
avm_op = AVMOp(op_code)
except ValueError as ex:
raise InternalError(
f"Unmapped numeric comparison operator {expr.operator}", expr.source_location
) from ex
return Intrinsic(
op=avm_op,
args=[left, right],
source_location=expr.source_location,
)
def visit_checked_maybe(self, expr: awst_nodes.CheckedMaybe) -> TExpression:
value, check = self.visit_and_materialise(expr.expr, ("value", "check"))
assert_value(
self.context,
check,
comment=expr.comment,
source_location=expr.source_location,
)
return value
def _expand_tuple_var(self, name: str, wtype: wtypes.WTuple) -> Iterator[Value]:
for idx, wt in enumerate(wtype.types):
item_name = format_tuple_index(wtype, name, idx)
if isinstance(wt, wtypes.WTuple):
yield from self._expand_tuple_var(item_name, wt)
else:
yield self.context.ssa.read_variable(
variable=item_name,
ir_type=wtype_to_ir_type(wt),
block=self.context.block_builder.active_block,
)
def visit_var_expression(self, expr: awst_nodes.VarExpression) -> TExpression:
if isinstance(expr.wtype, wtypes.WTuple):
values = tuple(self._expand_tuple_var(expr.name, expr.wtype))
return ValueTuple(values=values, source_location=expr.source_location)
ir_type = wtype_to_ir_type(expr)
variable = self.context.ssa.read_variable(
expr.name, ir_type, self.context.block_builder.active_block
)
variable = attrs.evolve(variable, source_location=expr.source_location)
return variable
def _add_assert(
self,
condition_expr: awst_nodes.Expression | None,
error_message: str | None,
loc: SourceLocation,
) -> Intrinsic | None:
condition_value = (
self.visit_and_materialise_single(condition_expr) if condition_expr else None
)
if isinstance(condition_value, UInt64Constant):
if condition_value.value:
logger.warning("assertion is always true, ignoring", location=loc)
return None
else:
condition_value = None
if condition_value is None:
self.context.block_builder.terminate(
Fail(source_location=loc, error_message=error_message)
)
return None
else:
return Intrinsic(
op=AVMOp("assert"),
source_location=loc,
args=[condition_value],
error_message=error_message,
)
def visit_intrinsic_call(self, call: awst_nodes.IntrinsicCall) -> TExpression:
match call.op_code:
case "err":
return self._add_assert(
condition_expr=None, error_message=None, loc=call.source_location
)
case "return":
assert not call.immediates, f"return intrinsic had immediates: {call.immediates}"
(arg_expr,) = call.stack_args
exit_value = self.visit_and_materialise_single(arg_expr)
self.context.block_builder.terminate(
ProgramExit(source_location=call.source_location, result=exit_value)
)
return None
case "assert":
(condition_expr,) = call.stack_args
return self._add_assert(
condition_expr=condition_expr, error_message=None, loc=call.source_location
)
case _:
args = [self.visit_and_materialise_single(arg) for arg in call.stack_args]
return Intrinsic(
op=AVMOp(call.op_code),
source_location=call.source_location,
args=args,
immediates=list(call.immediates),
types=wtype_to_ir_types(call.wtype),
)
def visit_group_transaction_reference(
self, ref: awst_nodes.GroupTransactionReference
) -> TExpression:
index = self.visit_and_materialise_single(ref.index, "gtxn_idx")
if (txn_type := ref.wtype.transaction_type) is not None:
actual_type = assign_intrinsic_op(
self.context,
target="gtxn_type",
op=AVMOp.gtxns,
immediates=["TypeEnum"],
args=[index],
source_location=ref.source_location,
)
type_constant = UInt64Constant(
value=txn_type.value, teal_alias=txn_type.name, source_location=ref.source_location
)
type_matches = assign_intrinsic_op(
self.context,
target="gtxn_type_matches",
op=AVMOp.eq,
args=[actual_type, type_constant],
source_location=ref.source_location,
)
assert_value(
self.context,
type_matches,
comment=f"transaction type is {txn_type.name}",
source_location=ref.source_location,
)
return index
def visit_create_inner_transaction(self, call: awst_nodes.CreateInnerTransaction) -> None:
# for semantic compatibility, this is an error, since we don't evaluate the args
# here (there would be no point, if we hit this node on its own and not as part
# of a submit or an assigment, it does nothing)
logger.error(
"statement has no effect, did you forget to submit?", location=call.source_location
)
def visit_submit_inner_transaction(
self, submit: awst_nodes.SubmitInnerTransaction
) -> TExpression:
result = self._itxn.handle_submit_inner_transaction(submit)
if len(result) == 1:
return result[0]
return ValueTuple(
values=list(result),
source_location=submit.source_location,
)
def visit_update_inner_transaction(self, call: awst_nodes.UpdateInnerTransaction) -> None:
self._itxn.handle_update_inner_transaction(call)
def visit_inner_transaction_field(
self, itxn_field: awst_nodes.InnerTransactionField
) -> TExpression:
return self._itxn.handle_inner_transaction_field(itxn_field)
def visit_method_constant(self, expr: awst_nodes.MethodConstant) -> TExpression:
return MethodConstant(value=expr.value, source_location=expr.source_location)
def visit_tuple_expression(self, expr: awst_nodes.TupleExpression) -> TExpression:
items = list[Value]()
for item in expr.items:
nested_values = self.visit_and_materialise(item)
items.extend(nested_values)
return ValueTuple(
source_location=expr.source_location,
values=items,
)
def visit_tuple_item_expression(self, expr: awst_nodes.TupleItemExpression) -> TExpression:
if isinstance(expr.base.wtype, wtypes.WTuple):
tup = self.visit_and_materialise(expr.base)
return get_tuple_item_values(
tuple_values=tup,
tuple_wtype=expr.base.wtype,
index=expr.index,
target_wtype=expr.wtype,
source_location=expr.source_location,
)
elif isinstance(expr.base.wtype, wtypes.ARC4Tuple):
base = self.visit_and_materialise_single(expr.base)
return arc4.arc4_tuple_index(
self.context,
base=base,
index=expr.index,
wtype=expr.base.wtype,
source_location=expr.source_location,
)
else:
raise InternalError(
f"Tuple indexing operation IR lowering"
f" not implemented for base type {expr.base.wtype.name}",
expr.source_location,
)
def visit_field_expression(self, expr: awst_nodes.FieldExpression) -> TExpression:
if isinstance(expr.base.wtype, wtypes.WStructType):
raise NotImplementedError
if isinstance(expr.base.wtype, wtypes.WTuple):
index = expr.base.wtype.name_to_index(expr.name, expr.source_location)
tup = self.visit_and_materialise(expr.base)
return get_tuple_item_values(
tuple_values=tup,
tuple_wtype=expr.base.wtype,
index=index,
target_wtype=expr.wtype,
source_location=expr.source_location,
)
if isinstance(expr.base.wtype, wtypes.ARC4Struct):
base = self.visit_and_materialise_single(expr.base)
index = expr.base.wtype.names.index(expr.name)
return arc4.arc4_tuple_index(
self.context,
base=base,
index=index,
wtype=expr.base.wtype,
source_location=expr.source_location,
)
else:
raise InternalError(
f"Field access IR lowering"
f" not implemented for base type {expr.base.wtype.name}",
expr.source_location,
)
def visit_intersection_slice_expression(
self, expr: awst_nodes.IntersectionSliceExpression
) -> TExpression:
if isinstance(expr.base.wtype, wtypes.WTuple):
return self._visit_tuple_slice(expr, expr.base.wtype)
elif expr.base.wtype == wtypes.bytes_wtype:
return visit_bytes_intersection_slice_expression(self.context, expr)
else:
raise InternalError(
f"IntersectionSlice operation IR lowering not implemented for {expr.wtype.name}",
expr.source_location,
)
def visit_slice_expression(self, expr: awst_nodes.SliceExpression) -> TExpression:
"""Slices an enumerable type."""
if isinstance(expr.base.wtype, wtypes.WTuple):
return self._visit_tuple_slice(expr, expr.base.wtype)
elif expr.base.wtype == wtypes.bytes_wtype:
return visit_bytes_slice_expression(self.context, expr)
else:
raise InternalError(
f"Slice operation IR lowering not implemented for {expr.wtype.name}",
expr.source_location,
)
def _visit_tuple_slice(
self,
expr: awst_nodes.SliceExpression | awst_nodes.IntersectionSliceExpression,
base_wtype: wtypes.WTuple,
) -> TExpression:
tup = self.visit_and_materialise(expr.base)
start_i = extract_const_int(expr.begin_index) or 0
end_i = extract_const_int(expr.end_index)
return get_tuple_item_values(
tuple_values=tup,
tuple_wtype=base_wtype,
index=(start_i, end_i),
target_wtype=expr.wtype,
source_location=expr.source_location,
)
def visit_index_expression(self, expr: awst_nodes.IndexExpression) -> TExpression:
index = self.visit_and_materialise_single(expr.index)
base = self.visit_and_materialise_single(expr.base)
if expr.base.wtype == wtypes.bytes_wtype:
# note: the below works because Bytes is immutable, so this index expression
# can never appear as an assignment target
if isinstance(index, UInt64Constant) and index.value <= 255:
return Intrinsic(
op=AVMOp.extract,
args=[base],
immediates=[index.value, 1],
source_location=expr.source_location,
)
else:
return Intrinsic(
op=AVMOp.extract3,
args=[
base,
index,
UInt64Constant(value=1, source_location=expr.source_location),
],
source_location=expr.source_location,
)
elif isinstance(expr.base.wtype, wtypes.WArray):
raise NotImplementedError
elif isinstance(expr.base.wtype, wtypes.ARC4StaticArray | wtypes.ARC4DynamicArray):
return arc4.arc4_array_index(
self.context,
array_wtype=expr.base.wtype,
array=base,
index=index,
source_location=expr.source_location,
)
else:
raise InternalError(
f"Indexing operation IR lowering not implemented for {expr.wtype.name}",
expr.source_location,
)
def visit_conditional_expression(self, expr: awst_nodes.ConditionalExpression) -> TExpression:
return flow_control.handle_conditional_expression(self.context, expr)
def visit_single_evaluation(self, expr: awst_nodes.SingleEvaluation) -> TExpression:
try:
return self._single_eval_cache[expr]
except KeyError:
pass
source = expr.source.accept(self)
if not (source and source.types):
result: TExpression = None
else:
values = self.materialise_value_provider(source, description="awst_tmp")
if len(values) == 1:
(result,) = values
else:
result = ValueTuple(values=values, source_location=expr.source_location)
self._single_eval_cache[expr] = result
return result
def visit_app_state_expression(self, expr: awst_nodes.AppStateExpression) -> TExpression:
return storage.visit_app_state_expression(self.context, expr)
def visit_app_account_state_expression(
self, expr: awst_nodes.AppAccountStateExpression
) -> TExpression:
return storage.visit_app_account_state_expression(self.context, expr)
def visit_box_value_expression(self, expr: awst_nodes.BoxValueExpression) -> TExpression:
return puya.ir.builder.storage.visit_box_value(self.context, expr)
def visit_state_get_ex(self, expr: awst_nodes.StateGetEx) -> TExpression:
return storage.visit_state_get_ex(self.context, expr)
def visit_state_delete(self, statement: awst_nodes.StateDelete) -> TExpression:
return storage.visit_state_delete(self.context, statement)
def visit_state_get(self, expr: awst_nodes.StateGet) -> TExpression:
return storage.visit_state_get(self.context, expr)
def visit_state_exists(self, expr: awst_nodes.StateExists) -> TExpression:
return storage.visit_state_exists(self.context, expr)
def visit_new_array(self, expr: awst_nodes.NewArray) -> TExpression:
match expr.wtype:
case wtypes.ARC4Array():
return arc4.encode_arc4_array(self.context, expr)
case wtypes.WArray():
raise NotImplementedError
case _:
typing.assert_never(expr.wtype)
def visit_bytes_comparison_expression(
self, expr: awst_nodes.BytesComparisonExpression
) -> TExpression:
left = self.visit_and_materialise_single(expr.lhs)
right = self.visit_and_materialise_single(expr.rhs)
op_code = expr.operator.value
try:
avm_op = AVMOp(op_code)
except ValueError as ex:
raise InternalError(
f"Unmapped bytes comparison operator {expr.operator}", expr.source_location
) from ex
return Intrinsic(
op=avm_op,
args=[left, right],
source_location=expr.source_location,
)
def visit_subroutine_call_expression(
self, expr: awst_nodes.SubroutineCallExpression
) -> TExpression:
return visit_subroutine_call_expression(self.context, expr)
def visit_puya_lib_call(self, call: awst_nodes.PuyaLibCall) -> TExpression:
return visit_puya_lib_call_expression(self.context, call)
def visit_bytes_binary_operation(self, expr: awst_nodes.BytesBinaryOperation) -> TExpression:
left = self.visit_and_materialise_single(expr.left)
right = self.visit_and_materialise_single(expr.right)
return create_bytes_binary_op(expr.op, left, right, expr.source_location)
def visit_boolean_binary_operation(
self, expr: awst_nodes.BooleanBinaryOperation
) -> TExpression:
if not isinstance(expr.right, awst_nodes.VarExpression | awst_nodes.BoolConstant):
true_block, false_block, merge_block = self.context.block_builder.mkblocks(
"bool_true", "bool_false", "bool_merge", source_location=expr.source_location
)
tmp_name = self.context.next_tmp_name(f"{expr.op}_result")
flow_control.process_conditional(
self.context, expr, true=true_block, false=false_block, loc=expr.source_location
)
self.context.block_builder.activate_block(true_block)
assign(
self.context,
UInt64Constant(value=1, ir_type=IRType.bool, source_location=None),
name=tmp_name,
assignment_location=None,
)
self.context.block_builder.goto(merge_block)
self.context.block_builder.activate_block(false_block)
assign(
self.context,
UInt64Constant(value=0, ir_type=IRType.bool, source_location=None),
name=tmp_name,
assignment_location=None,
)
self.context.block_builder.goto(merge_block)
self.context.block_builder.activate_block(merge_block)
return self.context.ssa.read_variable(
variable=tmp_name, ir_type=IRType.bool, block=merge_block
)
left = self.visit_and_materialise_single(expr.left)
right = self.visit_and_materialise_single(expr.right)
match expr.op:
case "and":
op = AVMOp.and_
case "or":
op = AVMOp.or_
case _:
raise InternalError(
f"Unexpected/unimplemented boolean operator in IR builder: {expr.op}",
expr.source_location,
)
return Intrinsic(
op=op,
args=[left, right],
source_location=expr.source_location,
)
def visit_not_expression(self, expr: awst_nodes.Not) -> TExpression:
negated = self.visit_and_materialise_single(expr.expr)
return Intrinsic(
op=AVMOp("!"),
args=[negated],
source_location=expr.source_location,
)
def visit_reinterpret_cast(self, expr: awst_nodes.ReinterpretCast) -> TExpression:
# should be a no-op for us, but we validate the cast here too
source = self.visit_expr(expr.expr)
(inner_ir_type,) = source.types
outer_ir_type = wtype_to_ir_type(expr)
# don't need to do anything further if ir types are the same
if inner_ir_type == outer_ir_type:
return source
inner_avm_type = inner_ir_type.avm_type
outer_avm_type = outer_ir_type.avm_type
if inner_avm_type != outer_avm_type:
raise InternalError(
f"Tried to reinterpret {expr.expr.wtype} as {expr.wtype},"
" but resulting AVM types are incompatible:"
f" {inner_avm_type} and {outer_avm_type}, respectively",
expr.source_location,
)
target = mktemp(
self.context,
outer_ir_type,
description=f"reinterpret_{outer_ir_type.name}",
source_location=expr.source_location,
)
assign_targets(
self.context,
source=source,
targets=[target],
assignment_location=expr.source_location,
)
return target
def visit_block(self, block: awst_nodes.Block) -> TStatement:
if block.label:
ir_block = self.context.block_builder.mkblock(block)
self.context.block_builder.goto(ir_block)
self.context.block_builder.activate_block(ir_block)
for stmt in block.body:
stmt.accept(self)
def visit_goto(self, statement: awst_nodes.Goto) -> TStatement:
self.context.block_builder.goto_label(statement.target, statement.source_location)
def visit_if_else(self, stmt: awst_nodes.IfElse) -> TStatement:
flow_control.handle_if_else(self.context, stmt)
def visit_switch(self, statement: awst_nodes.Switch) -> TStatement:
flow_control.handle_switch(self.context, statement)
def visit_while_loop(self, statement: awst_nodes.WhileLoop) -> TStatement:
flow_control.handle_while_loop(self.context, statement)
def visit_loop_exit(self, statement: awst_nodes.LoopExit) -> TStatement:
self.context.block_builder.loop_break(statement.source_location)
def visit_return_statement(self, statement: awst_nodes.ReturnStatement) -> TStatement:
if statement.value is not None:
result = list(self.visit_and_materialise(statement.value))
else:
result = []
for param in self.context.subroutine.parameters:
if param.implicit_return:
result.append(
self.context.ssa.read_variable(
param.name,
param.ir_type,
self.context.block_builder.active_block,
)
)
return_types = [r.ir_type for r in result]
if return_types != self.context.subroutine.returns:
raise CodeError(
f"invalid return type {return_types}, expected {self.context.subroutine.returns}",
statement.source_location,
)
self.context.block_builder.terminate(
SubroutineReturn(
source_location=statement.source_location,
result=result,
)
)
def visit_assert_expression(self, expr: awst_nodes.AssertExpression) -> TStatement:
op = self._add_assert(
condition_expr=expr.condition,
error_message=expr.error_message,
loc=expr.source_location,
)
if op:
self.context.block_builder.add(op)
def visit_template_var(self, expr: awst_nodes.TemplateVar) -> TExpression:
return TemplateVar(
name=expr.name,
ir_type=wtype_to_ir_type(expr.wtype),
source_location=expr.source_location,
)
def visit_loop_continue(self, statement: awst_nodes.LoopContinue) -> TStatement:
self.context.block_builder.loop_continue(statement.source_location)
def visit_expression_statement(self, statement: awst_nodes.ExpressionStatement) -> TStatement:
# NOTE: popping of ignored return values should happen at code gen time
result = self._visit_and_check_for_double_eval(statement.expr)
if result is None:
wtype = statement.expr.wtype
match wtype:
case wtypes.void_wtype:
pass
case _ if (isinstance(wtype, WInnerTransaction | WInnerTransactionFields)):
# inner transaction wtypes aren't true expressions
pass
case _:
raise InternalError(
f"Expression statement with type {statement.expr.wtype} "
f"generated no result",
statement.source_location,
)
elif isinstance(result, Op):
self.context.block_builder.add(result)
# If we get a Value (e.g. a Register or some such) it's something that's being
# discarded effectively.
# The frontend should have already warned about this
def visit_uint64_augmented_assignment(
self, statement: awst_nodes.UInt64AugmentedAssignment
) -> TStatement:
target_value = self.visit_and_materialise_single(statement.target)
rhs = self.visit_and_materialise_single(statement.value)
expr = create_uint64_binary_op(statement.op, target_value, rhs, statement.source_location)
handle_assignment(
self.context,
target=statement.target,
value=expr,
is_nested_update=False,
assignment_location=statement.source_location,
)
def visit_biguint_augmented_assignment(
self, statement: awst_nodes.BigUIntAugmentedAssignment
) -> TStatement:
target_value = self.visit_and_materialise_single(statement.target)
rhs = self.visit_and_materialise_single(statement.value)
expr = create_biguint_binary_op(statement.op, target_value, rhs, statement.source_location)
handle_assignment(
self.context,
target=statement.target,
value=expr,
is_nested_update=False,
assignment_location=statement.source_location,
)
def visit_bytes_augmented_assignment(
self, statement: awst_nodes.BytesAugmentedAssignment
) -> TStatement:
if statement.target.wtype == wtypes.arc4_string_alias:
value: ValueProvider = arc4.concat_values(
self.context, statement.target, statement.value, statement.source_location
)
else:
target_value = self.visit_and_materialise_single(statement.target)
rhs = self.visit_and_materialise_single(statement.value)
value = create_bytes_binary_op(
statement.op, target_value, rhs, statement.source_location
)
handle_assignment(
self.context,
target=statement.target,
value=value,
is_nested_update=False,
assignment_location=statement.source_location,
)
def visit_enumeration(self, expr: awst_nodes.Enumeration) -> TStatement:
raise CodeError("Nested enumeration is not currently supported", expr.source_location)
def visit_reversed(self, expr: awst_nodes.Reversed) -> TExpression:
raise CodeError("Reversed is not valid outside of an enumeration", expr.source_location)
def visit_for_in_loop(self, statement: awst_nodes.ForInLoop) -> TStatement:
handle_for_in_loop(self.context, statement)
def visit_new_struct(self, expr: awst_nodes.NewStruct) -> TExpression:
match expr.wtype:
case wtypes.WStructType():
raise NotImplementedError
case wtypes.ARC4Struct() as arc4_struct_wtype:
return arc4.encode_arc4_struct(self.context, expr, arc4_struct_wtype)
case _:
typing.assert_never(expr.wtype)
def visit_array_pop(self, expr: awst_nodes.ArrayPop) -> TExpression:
source_location = expr.source_location
match expr.base.wtype:
case wtypes.ARC4DynamicArray() as array_wtype:
return arc4.pop_arc4_array(self.context, expr, array_wtype)
case _:
raise InternalError(
f"Unsupported target for array pop: {expr.base.wtype}", source_location
)
def visit_array_concat(self, expr: awst_nodes.ArrayConcat) -> TExpression:
return arc4.concat_values(
self.context,
left_expr=expr.left,
right_expr=expr.right,
source_location=expr.source_location,
)
def visit_array_extend(self, expr: awst_nodes.ArrayExtend) -> TExpression:
concat_result = arc4.concat_values(
self.context,
left_expr=expr.base,
right_expr=expr.other,
source_location=expr.source_location,
)
return arc4.handle_arc4_assign(
self.context,
target=expr.base,
value=concat_result,
is_nested_update=True,
source_location=expr.source_location,
)
def visit_arc4_router(self, expr: awst_nodes.ARC4Router) -> TExpression:
root = self.context.root
if not isinstance(root, awst_nodes.Contract):
raise CodeError(
"cannot create ARC4 router outside of a contract", expr.source_location
)
return InvokeSubroutine(
target=self.context.routers[root.id],
args=[],
source_location=expr.source_location,
)
def visit_emit(self, expr: awst_nodes.Emit) -> TExpression:
factory = OpFactory(self.context, expr.source_location)
value = self.context.visitor.visit_and_materialise_single(expr.value)
prefix = MethodConstant(value=expr.signature, source_location=expr.source_location)
event = factory.concat(prefix, value, "event")
self.context.block_builder.add(
Intrinsic(
op=AVMOp("log"),
args=[event],
source_location=expr.source_location,
)
)
return None
def visit_range(self, node: awst_nodes.Range) -> TExpression:
raise CodeError("unexpected range location", node.source_location)
def visit_and_materialise_single(
self, expr: awst_nodes.Expression, temp_description: str = "tmp"
) -> Value:
"""Translate an AWST Expression into a single Value"""
values = self.visit_and_materialise(expr, temp_description=temp_description)
try:
(value,) = values
except ValueError as ex:
raise InternalError(
"visit_and_materialise_single should not be used when"
f" an expression could be multi-valued, expression was: {expr}",
expr.source_location,
) from ex
return value
def visit_and_materialise(
self, expr: awst_nodes.Expression, temp_description: str | Sequence[str] = "tmp"
) -> Sequence[Value]:
value_seq_or_provider = self._visit_and_check_for_double_eval(
expr, materialise_as=temp_description
)
if value_seq_or_provider is None:
raise InternalError(
"No value produced by expression IR conversion", expr.source_location
)
return self.materialise_value_provider(value_seq_or_provider, description=temp_description)
def visit_expr(self, expr: awst_nodes.Expression) -> ValueProvider:
"""Visit the expression and ensure result is not None"""
value_seq_or_provider = self._visit_and_check_for_double_eval(expr)
if value_seq_or_provider is None:
raise InternalError(
"No value produced by expression IR conversion", expr.source_location
)
return value_seq_or_provider
def _visit_and_check_for_double_eval(
self, expr: awst_nodes.Expression, *, materialise_as: str | Sequence[str] | None = None
) -> ValueProvider | None:
# explicit SingleEvaluation nodes already handle this
if isinstance(expr, awst_nodes.SingleEvaluation):
return expr.accept(self)
# include the expression in the key to ensure the lifetime of the
# expression is as long as the cache.
# Temporary nodes may end up with the same id if nothing is referencing them
# e.g. such as used in _update_implicit_out_var
expr_id = id(expr), expr
try:
result = self._visited_exprs[expr_id]
except KeyError:
pass
else:
if isinstance(result, ValueProvider) and not isinstance(result, ValueTuple | Value):
raise InternalError(
"double evaluation of expression without materialization", expr.source_location
)
expr_str = expr.accept(ToCodeVisitor())
logger.debug(
f"encountered already materialized expression ({expr_str}),"
f" reusing result: {result!s}",
location=expr.source_location,
)
return result
source = expr.accept(self)
if materialise_as is None or not (source and source.types):
result = source
else:
values = self.materialise_value_provider(source, description=materialise_as)
if len(values) == 1:
(result,) = values
else:
result = ValueTuple(values=values, source_location=expr.source_location)
self._visited_exprs[expr_id] = result
return result
def materialise_value_provider(
self, provider: ValueProvider, description: str | Sequence[str]
) -> list[Value]:
"""
Given a ValueProvider with arity of N, return a Value sequence of length N.
Anything which is already a Value is passed through without change.
Otherwise, the result is assigned to a temporary register, which is returned
"""
if isinstance(provider, Value):
return [provider]
if isinstance(provider, ValueTuple):
return list(provider.values)
ir_types = provider.types
if not ir_types:
raise InternalError(
"Attempted to assign from expression that has no result", provider.source_location
)
if isinstance(description, str):
temp_description: Sequence[str] = [description] * len(ir_types)
else:
temp_description = description
targets = [
mktemp(self.context, ir_type, provider.source_location, description=descr)
for ir_type, descr in zip(ir_types, temp_description, strict=True)
]
assign_targets(
context=self.context,
source=provider,
targets=targets,
# TODO: should this be the source location of the site forcing materialisation?
assignment_location=provider.source_location,
)
return list[Value](targets)
def create_uint64_binary_op(
op: UInt64BinaryOperator, left: Value, right: Value, source_location: SourceLocation
) -> Intrinsic:
avm_op: AVMOp
match op:
case UInt64BinaryOperator.floor_div:
avm_op = AVMOp.div_floor
case UInt64BinaryOperator.pow:
avm_op = AVMOp.exp
case UInt64BinaryOperator.lshift:
avm_op = AVMOp.shl
case UInt64BinaryOperator.rshift:
avm_op = AVMOp.shr
case _:
try:
avm_op = AVMOp(op.value)
except ValueError as ex:
raise InternalError(
f"Unhandled uint64 binary operator: {op}", source_location
) from ex
return Intrinsic(op=avm_op, args=[left, right], source_location=source_location)
def create_biguint_binary_op(
op: BigUIntBinaryOperator, left: Value, right: Value, source_location: SourceLocation
) -> Intrinsic:
avm_op: AVMOp
match op:
case BigUIntBinaryOperator.floor_div:
avm_op = AVMOp.div_floor_bytes
case _:
try:
avm_op = AVMOp("b" + op.value)
except ValueError as ex:
raise InternalError(
f"Unhandled uint64 binary operator: {op}", source_location
) from ex
return Intrinsic(
op=avm_op, args=[left, right], types=(IRType.biguint,), source_location=source_location
)
def create_bytes_binary_op(
op: awst_nodes.BytesBinaryOperator, lhs: Value, rhs: Value, source_location: SourceLocation
) -> ValueProvider:
match op:
case awst_nodes.BytesBinaryOperator.add:
return Intrinsic(
op=AVMOp.concat,
args=[lhs, rhs],
source_location=source_location,
)
case awst_nodes.BytesBinaryOperator.bit_and:
return Intrinsic(
op=AVMOp.bitwise_and_bytes,
args=[lhs, rhs],
source_location=source_location,
)
case awst_nodes.BytesBinaryOperator.bit_or:
return Intrinsic(
op=AVMOp.bitwise_or_bytes,
args=[lhs, rhs],
source_location=source_location,
)
case awst_nodes.BytesBinaryOperator.bit_xor:
return Intrinsic(
op=AVMOp.bitwise_xor_bytes,
args=[lhs, rhs],
source_location=source_location,
)
raise InternalError("Unsupported BytesBinaryOperator: " + op)
def get_comparison_op_for_wtype(
numeric_comparison_equivalent: awst_nodes.NumericComparison, wtype: wtypes.WType
) -> AVMOp:
match wtype:
case wtypes.biguint_wtype:
return AVMOp("b" + numeric_comparison_equivalent)
case wtypes.uint64_wtype:
return AVMOp(numeric_comparison_equivalent)
case wtypes.bytes_wtype:
match numeric_comparison_equivalent:
case awst_nodes.NumericComparison.eq:
return AVMOp.eq
case awst_nodes.NumericComparison.ne:
return AVMOp.neq
raise InternalError(
f"unsupported operation of {numeric_comparison_equivalent} on type of {wtype}"
)
|
algorandfoundation/puya
|
src/puya/ir/builder/main.py
|
Python
|
NOASSERTION
| 56,636 |
import typing
from puya.avm import AVMType
from puya.awst import (
nodes as awst_nodes,
wtypes,
)
from puya.ir import intrinsic_factory
from puya.ir.avm_ops import AVMOp
from puya.ir.builder._utils import assert_value, assign_targets, mktemp
from puya.ir.context import IRFunctionBuildContext
from puya.ir.models import Intrinsic, UInt64Constant, Value, ValueProvider, ValueTuple
from puya.ir.types_ import IRType, wtype_to_ir_type
from puya.parse import SourceLocation
def visit_app_state_expression(
context: IRFunctionBuildContext, expr: awst_nodes.AppStateExpression
) -> ValueProvider:
maybe_value, exists = _build_state_get_ex(context, expr, expr.source_location)
# TODO: add specific (unsafe) optimisation flag to allow skipping this check
assert_value(
context,
value=exists,
comment=expr.exists_assertion_message or "state exists",
source_location=expr.source_location,
)
return maybe_value
def visit_app_account_state_expression(
context: IRFunctionBuildContext, expr: awst_nodes.AppAccountStateExpression
) -> ValueProvider:
maybe_value, exists = _build_state_get_ex(context, expr, expr.source_location)
# TODO: add specific (unsafe) optimisation flag to allow skipping this check
assert_value(
context,
value=exists,
comment=expr.exists_assertion_message or "state exists for account",
source_location=expr.source_location,
)
return maybe_value
def visit_box_value(
context: IRFunctionBuildContext, expr: awst_nodes.BoxValueExpression
) -> ValueProvider:
maybe_value, exists = _build_state_get_ex(context, expr, expr.source_location)
# TODO: add specific (unsafe) optimisation flag to allow skipping this check
assert_value(
context,
value=exists,
comment=expr.exists_assertion_message or "box exists",
source_location=expr.source_location,
)
return maybe_value
def visit_state_exists(
context: IRFunctionBuildContext, expr: awst_nodes.StateExists
) -> ValueProvider:
_, exists = _build_state_get_ex(
context, expr.field, expr.source_location, for_existence_check=True
)
return exists
def visit_state_get(context: IRFunctionBuildContext, expr: awst_nodes.StateGet) -> ValueProvider:
default = context.visitor.visit_and_materialise_single(expr.default)
maybe_value, exists = _build_state_get_ex(context, expr.field, expr.source_location)
return intrinsic_factory.select(
condition=exists,
true=maybe_value,
false=default,
type_=wtype_to_ir_type(expr.wtype),
source_location=expr.source_location,
)
def visit_state_get_ex(
context: IRFunctionBuildContext, expr: awst_nodes.StateGetEx
) -> ValueProvider:
return ValueTuple(
values=list(_build_state_get_ex(context, expr.field, expr.source_location)),
source_location=expr.source_location,
)
def visit_state_delete(
context: IRFunctionBuildContext, statement: awst_nodes.StateDelete
) -> ValueProvider | None:
match statement.field:
case awst_nodes.BoxValueExpression(key=awst_key):
op = AVMOp.box_del
awst_account = None
case awst_nodes.AppStateExpression(key=awst_key):
op = AVMOp.app_global_del
awst_account = None
case awst_nodes.AppAccountStateExpression(key=awst_key, account=awst_account):
op = AVMOp.app_local_del
case _:
typing.assert_never(statement.field)
args = []
if awst_account is not None:
account_value = context.visitor.visit_and_materialise_single(awst_account)
args.append(account_value)
key_value = context.visitor.visit_and_materialise_single(awst_key)
args.append(key_value)
context.block_builder.add(
Intrinsic(op=op, args=args, source_location=statement.source_location)
)
return None
def _build_state_get_ex(
context: IRFunctionBuildContext,
expr: (
awst_nodes.AppAccountStateExpression
| awst_nodes.AppStateExpression
| awst_nodes.BoxValueExpression
),
source_location: SourceLocation,
*,
for_existence_check: bool = False,
) -> tuple[Value, Value]:
key = context.visitor.visit_and_materialise_single(expr.key)
args: list[Value]
true_value_ir_type = get_ex_value_ir_type = wtype_to_ir_type(expr.wtype)
convert_op: AVMOp | None = None
if isinstance(expr, awst_nodes.AppStateExpression):
current_app_offset = UInt64Constant(value=0, source_location=expr.source_location)
args = [current_app_offset, key]
op = AVMOp.app_global_get_ex
elif isinstance(expr, awst_nodes.AppAccountStateExpression):
current_app_offset = UInt64Constant(value=0, source_location=expr.source_location)
op = AVMOp.app_local_get_ex
account = context.visitor.visit_and_materialise_single(expr.account)
args = [account, current_app_offset, key]
else:
args = [key]
if for_existence_check:
get_ex_value_ir_type = IRType.uint64
op = AVMOp.box_len
else:
op = AVMOp.box_get
match wtypes.persistable_stack_type(expr.wtype, source_location):
case AVMType.uint64:
get_ex_value_ir_type = IRType.bytes
convert_op = AVMOp.btoi
case AVMType.bytes:
pass
case invalid:
typing.assert_never(invalid)
get_ex = Intrinsic(
op=op,
args=args,
types=[get_ex_value_ir_type, IRType.bool],
source_location=source_location,
)
value_tmp, did_exist_tmp = context.visitor.materialise_value_provider(
get_ex, ("maybe_value", "maybe_exists")
)
if convert_op is None:
return value_tmp, did_exist_tmp
convert = Intrinsic(op=convert_op, args=[value_tmp], source_location=source_location)
value_tmp_converted = mktemp(
context,
ir_type=true_value_ir_type,
description="maybe_value_converted",
source_location=expr.source_location,
)
assign_targets(
context, source=convert, targets=[value_tmp_converted], assignment_location=source_location
)
return value_tmp_converted, did_exist_tmp
|
algorandfoundation/puya
|
src/puya/ir/builder/storage.py
|
Python
|
NOASSERTION
| 6,336 |
import contextlib
import itertools
import typing
from collections import defaultdict
from collections.abc import Iterator, Mapping
from functools import cached_property
import attrs
import puya.awst.nodes as awst_nodes
from puya.context import CompileContext
from puya.errors import CodeError, log_exceptions
from puya.ir.builder.blocks import BlocksBuilder
from puya.ir.models import Subroutine
from puya.ir.ssa import BraunSSA
from puya.parse import SourceLocation
from puya.program_refs import ContractReference
from puya.utils import attrs_extend
if typing.TYPE_CHECKING:
from puya.ir.builder.main import FunctionIRBuilder
TMP_VAR_INDICATOR = "%"
@attrs.frozen(kw_only=True)
class IRBuildContext(CompileContext):
awst: awst_nodes.AWST
subroutines: dict[awst_nodes.Function, Subroutine]
embedded_funcs_lookup: Mapping[str, Subroutine]
root: awst_nodes.Contract | awst_nodes.LogicSignature | None = None
routers: dict[ContractReference, Subroutine] = attrs.field(factory=dict)
@cached_property
def _awst_lookup(self) -> Mapping[str, awst_nodes.RootNode]:
return {node.id: node for node in self.awst}
def for_root(self, root: awst_nodes.Contract | awst_nodes.LogicSignature) -> typing.Self:
return attrs.evolve(
self,
root=root,
# copy subroutines so that contract specific subroutines do not pollute other passes
subroutines=self.subroutines.copy(),
)
def for_function(
self, function: awst_nodes.Function, subroutine: Subroutine, visitor: "FunctionIRBuilder"
) -> "IRFunctionBuildContext":
return attrs_extend(
IRFunctionBuildContext, self, visitor=visitor, function=function, subroutine=subroutine
)
def resolve_function_reference(
self,
target: awst_nodes.SubroutineTarget,
source_location: SourceLocation,
caller: awst_nodes.Function,
) -> awst_nodes.Function:
if isinstance(target, awst_nodes.SubroutineID):
func: awst_nodes.Node | None = self._awst_lookup.get(target.target)
else:
contract = self.root
if not (
isinstance(contract, awst_nodes.Contract)
and isinstance(caller, awst_nodes.ContractMethod)
):
raise CodeError(
"call to contract method from outside of contract class",
source_location,
)
match target:
case awst_nodes.InstanceMethodTarget(member_name=member_name):
func = contract.resolve_contract_method(member_name)
case awst_nodes.ContractMethodTarget(cref=start_at, member_name=member_name):
func = contract.resolve_contract_method(
member_name, source_location, start=start_at
)
case awst_nodes.InstanceSuperMethodTarget(member_name=member_name):
func = contract.resolve_contract_method(
member_name, source_location, start=caller.cref, skip=True
)
case unexpected:
typing.assert_never(unexpected)
if func is None:
raise CodeError(f"unable to resolve function reference {target}", source_location)
if not isinstance(func, awst_nodes.Function):
raise CodeError(
f"function reference {target} resolved to non-function {func}", source_location
)
return func
@property
def default_fallback(self) -> SourceLocation | None:
if self.root:
return self.root.source_location
return None
@contextlib.contextmanager
def log_exceptions(self, fallback_location: SourceLocation | None = None) -> Iterator[None]:
with log_exceptions(fallback_location or self.default_fallback):
yield
@attrs.frozen(kw_only=True)
class IRFunctionBuildContext(IRBuildContext):
"""Context when building from an awst Function node"""
function: awst_nodes.Function
subroutine: Subroutine
visitor: "FunctionIRBuilder"
block_builder: BlocksBuilder = attrs.field()
_tmp_counters: defaultdict[str, Iterator[int]] = attrs.field(
factory=lambda: defaultdict(itertools.count)
)
@property
def ssa(self) -> BraunSSA:
return self.block_builder.ssa
@block_builder.default
def _block_builder_factory(self) -> BlocksBuilder:
return BlocksBuilder(self.subroutine.parameters, self.function.source_location)
def next_tmp_name(self, description: str) -> str:
counter_value = next(self._tmp_counters[description])
return f"{description}{TMP_VAR_INDICATOR}{counter_value}"
@property
def default_fallback(self) -> SourceLocation | None:
return self.function.source_location
def resolve_subroutine(
self,
target: awst_nodes.SubroutineTarget,
source_location: SourceLocation,
*,
caller: awst_nodes.Function | None = None,
) -> Subroutine:
func = self.resolve_function_reference(
target=target, source_location=source_location, caller=caller or self.function
)
return self.subroutines[func]
|
algorandfoundation/puya
|
src/puya/ir/context.py
|
Python
|
NOASSERTION
| 5,307 |
algorandfoundation/puya
|
src/puya/ir/destructure/__init__.py
|
Python
|
NOASSERTION
| 0 |
|
import itertools
import typing
import typing as t
from collections.abc import Iterable
import attrs
from puya import log
from puya.avm import AVMType
from puya.ir import models
from puya.ir.types_ import IRType
from puya.ir.visitor_mem_replacer import MemoryReplacer
from puya.ir.vla import VariableLifetimeAnalysis
from puya.options import LocalsCoalescingStrategy
from puya.utils import StableSet
logger = log.get_logger(__name__)
@attrs.define
class MemoryReplacerWithRedundantAssignmentRemoval(MemoryReplacer):
def visit_assignment(self, op: models.Assignment) -> models.Assignment | None:
ass = super().visit_assignment(op)
if ass is None or ass.targets == (ass.source,):
return None
return ass
class CoalesceGroupStrategy(t.Protocol):
def get_group_key(self, reg: models.Register) -> object: ...
def determine_group_replacement(self, regs: Iterable[models.Register]) -> models.Register: ...
def coalesce_registers(group_strategy: CoalesceGroupStrategy, sub: models.Subroutine) -> int:
"""
A local can be merged with another local if they are never live at the same time.
For each local that is being defined, check to see what the live-out locals are.
It can be merged with another local set if:
- This local is not in the "live-out" of any local in set
- The "live-out" of this local does not intersect the local set
"""
vla = VariableLifetimeAnalysis.analyze(sub)
# TODO: this uses a basic definition of interference by looking at live-ranges,
# a better option is to continue with https://inria.hal.science/inria-00349925v1/document
# which has already been partially implemented (phase 1 + 4 have been, anyway)
variables_live_at_definition = dict[models.Register, StableSet[models.Register]]()
for param in sub.parameters:
variables_live_at_definition[param] = StableSet.from_iter(sub.parameters)
for block in sub.body:
for op in block.ops:
match op:
case models.Assignment(targets=targets):
op_live_out = vla.get_live_out_variables(op)
for defined_reg in targets:
live_set = variables_live_at_definition.setdefault(
defined_reg, StableSet()
)
live_set |= op_live_out
coalescable_groups_by_key = dict[
object, list[tuple[StableSet[models.Register], StableSet[models.Register]]]
]()
for defined_reg, live_set in variables_live_at_definition.items():
coalescable_groups = coalescable_groups_by_key.setdefault(
group_strategy.get_group_key(defined_reg), []
)
for coalescable_register_set, combined_live_out in coalescable_groups:
# conditions:
# 1) this register/variable must not be "alive" _after_ the
# definition of any other variable in this set
# 2) no register already in the set should be live out at the
# definition of this register
# regardless of the order the definitions are processed in, this guarantees that:
# for all A and B in coalescable_register_set such that A != B:
# A is not live-out whenever B is assigned
# AND B is not live-out whenever A is assigned
if defined_reg not in combined_live_out and live_set.isdisjoint(
coalescable_register_set
):
coalescable_register_set.add(defined_reg)
combined_live_out |= live_set
break
else:
coalescable_groups.append((StableSet(defined_reg), StableSet.from_iter(live_set)))
replacements = dict[models.Register, models.Register]()
for group in coalescable_groups_by_key.values():
for coalescable_register_set, _ in group:
if len(coalescable_register_set) < 2:
continue
replacement = group_strategy.determine_group_replacement(coalescable_register_set)
find = coalescable_register_set - {replacement}
logger.debug(f"Coalescing {replacement} with [{', '.join(map(str, find))}]")
replacements.update({to_find: replacement for to_find in find})
total_replacements = MemoryReplacerWithRedundantAssignmentRemoval.apply(
sub.body, replacements=replacements
)
return total_replacements
class RootOperandGrouping(CoalesceGroupStrategy):
def __init__(self, isolate: frozenset[models.Register] | None = None) -> None:
self._isolate = isolate or frozenset()
def get_group_key(self, reg: models.Register) -> object:
if reg in self._isolate:
return reg
return reg.name, reg.ir_type
def determine_group_replacement(self, regs: Iterable[models.Register]) -> models.Register:
return min(regs, key=lambda r: r.version)
class AggressiveGrouping(CoalesceGroupStrategy):
def __init__(self, sub: models.Subroutine) -> None:
self._params = frozenset(sub.parameters)
self._counter = itertools.count()
def get_group_key(self, reg: models.Register) -> object:
if reg in self._params:
return reg
else:
return reg.atype
def determine_group_replacement(self, regs: Iterable[models.Register]) -> models.Register:
next_id = next(self._counter)
(atype,) = {r.atype for r in regs}
match atype:
case AVMType.uint64:
ir_type = IRType.uint64
case AVMType.bytes:
ir_type = IRType.bytes
case _:
typing.assert_never(atype)
return models.Register(
name="",
version=next_id,
ir_type=ir_type,
source_location=None,
)
def coalesce_locals(subroutine: models.Subroutine, strategy: LocalsCoalescingStrategy) -> None:
match strategy:
case LocalsCoalescingStrategy.root_operand:
group_strategy: CoalesceGroupStrategy = RootOperandGrouping()
case LocalsCoalescingStrategy.root_operand_excluding_args:
group_strategy = RootOperandGrouping(isolate=frozenset(subroutine.parameters))
case LocalsCoalescingStrategy.aggressive:
group_strategy = AggressiveGrouping(subroutine)
logger.debug(
f"Coalescing local variables in {subroutine.id}"
f" using strategy {type(group_strategy).__name__}"
)
replacements = coalesce_registers(group_strategy, subroutine)
logger.debug(f"Coalescing resulted in {replacements} replacement/s")
|
algorandfoundation/puya
|
src/puya/ir/destructure/coalesce_locals.py
|
Python
|
NOASSERTION
| 6,658 |
import attrs
from puya import log
from puya.context import CompileContext
from puya.ir import models
from puya.ir.destructure.coalesce_locals import coalesce_locals
from puya.ir.destructure.optimize import post_ssa_optimizer
from puya.ir.destructure.parcopy import sequentialize_parallel_copies
from puya.ir.destructure.remove_phi import convert_to_cssa, destructure_cssa
logger = log.get_logger(__name__)
def destructure_ssa(context: CompileContext, program: models.Program) -> None:
for subroutine in program.all_subroutines:
logger.debug(f"Performing SSA IR destructuring for {subroutine.id}")
convert_to_cssa(subroutine)
subroutine.validate_with_ssa()
destructure_cssa(subroutine)
coalesce_locals(subroutine, context.options.locals_coalescing_strategy)
sequentialize_parallel_copies(subroutine)
post_ssa_optimizer(subroutine, context.options.optimization_level)
attrs.validate(subroutine)
|
algorandfoundation/puya
|
src/puya/ir/destructure/main.py
|
Python
|
NOASSERTION
| 965 |
import contextlib
import itertools
from puya import log
from puya.ir import models
from puya.ir.optimize.collapse_blocks import BlockReferenceReplacer
from puya.utils import unique
logger = log.get_logger(__name__)
def post_ssa_optimizer(sub: models.Subroutine, optimization_level: int) -> None:
logger.debug(f"Performing post-SSA optimizations at level {optimization_level}")
if optimization_level >= 1:
_remove_linear_jumps(sub)
if optimization_level >= 2:
_block_deduplication(sub)
def _remove_linear_jumps(subroutine: models.Subroutine) -> None:
# P = {p0, p1, ..., pn} -> {j} -> {t}
# remove {j} from subroutine
# point P at t:
# update references within P from j to t
# ensure P are all in predecessors of t
# This exists here and not in main IR optimization loop because we only want to do it for
# blocks that are _truly_ empty, not ones that contain phi-node magic that results in copies
# build a map of any blocks that are just an unconditional branch to their targets
jumps = dict[models.BasicBlock, models.BasicBlock]()
for block in subroutine.body.copy():
match block:
case models.BasicBlock(
ops=[], terminator=models.Goto(target=target)
) if target is not block:
jumps[block] = target
logger.debug(f"Removing jump block {block}")
with contextlib.suppress(ValueError):
target.predecessors.remove(block)
subroutine.body.remove(block)
# now back-propagate any chains
replacements = dict[models.BasicBlock, models.BasicBlock]()
for src, target in jumps.items():
while True:
try:
target = jumps[target]
except KeyError:
break
logger.debug(f"branching to {src} will be replaced with {target}")
replacements[src] = target
BlockReferenceReplacer.apply(find=src, replacement=target, blocks=subroutine.body)
for pred in src.predecessors:
if pred not in target.predecessors:
target.predecessors.append(pred)
def _block_deduplication(subroutine: models.Subroutine) -> None:
seen = dict[tuple[object, ...], models.BasicBlock]()
for block in subroutine.body.copy():
all_ops = tuple(op.freeze() for op in block.all_ops)
if existing := seen.get(all_ops):
logger.debug(
f"Removing duplicated block {block} and updating references to {existing}"
)
BlockReferenceReplacer.apply(find=block, replacement=existing, blocks=subroutine.body)
subroutine.body.remove(block)
existing.predecessors = unique(
itertools.chain(existing.predecessors, block.predecessors)
)
else:
seen[all_ops] = block
|
algorandfoundation/puya
|
src/puya/ir/destructure/optimize.py
|
Python
|
NOASSERTION
| 2,880 |
import itertools
from collections.abc import Callable, Iterable
from puya import log
from puya.ir import models
from puya.ir.context import TMP_VAR_INDICATOR
logger = log.get_logger(__name__)
def sequentialize_parallel_copies(sub: models.Subroutine) -> None:
logger.debug(f"Sequentializing parallel copies in {sub.id}")
our_tmp_prefix = f"parcopy{TMP_VAR_INDICATOR}"
max_tmp_id = max(
(
int(r.name.split(TMP_VAR_INDICATOR)[1])
for r in sub.get_assigned_registers()
if r.name.startswith(our_tmp_prefix)
),
default=-1,
)
next_tmp_id = itertools.count(max_tmp_id + 1)
def make_temp(x: models.Value | models.Register) -> models.Register:
return models.Register(
ir_type=x.ir_type,
name=f"{our_tmp_prefix}{next(next_tmp_id)}",
version=0,
source_location=x.source_location,
)
for block in sub.body:
ops = list[models.Op]()
for op in block.ops:
match op:
case models.Assignment(targets=targets, source=models.ValueTuple(values=sources)):
seqd = _sequentialize(zip(targets, sources, strict=True), mktmp=make_temp)
for dst, src in seqd:
assert isinstance(dst, models.Register) # TODO: this is bad
ops.append(
models.Assignment(
targets=[dst],
source=src,
source_location=op.source_location,
)
)
case _:
ops.append(op)
block.ops = ops
def _sequentialize[T](
copies: Iterable[tuple[T, T]],
mktmp: Callable[[T], T],
*,
filter_dup_dests: bool = True,
allow_fan_out: bool = True,
) -> list[tuple[T, T]]:
# If filter_dup_dests is True, consider pairs ordered, and if multiple
# pairs have the same dest var, the last one takes effect. Otherwise,
# such duplicate dest vars is an error.
if filter_dup_dests:
# If there're multiple assignments to the same var, keep only the latest
copies = list(dict(copies).items())
ready = []
to_do = []
pred = {}
loc = dict[T, T | None]()
res = []
for b, _ in copies:
loc[b] = None
for b, a in copies:
loc[a] = a
pred[b] = a
# Extra check
if not filter_dup_dests and b in to_do:
raise ValueError(f"Conflicting assignments to destination {b}, latest: {(b, a)}")
to_do.append(b)
for b, _ in copies:
if loc[b] is None:
ready.append(b)
logger.debug("loc: %s", "{" + ", ".join(f"{k}={v}" for k, v in loc.items()) + "}")
logger.debug("pred: %s", "{" + ", ".join(f"{k}={v}" for k, v in pred.items()) + "}")
logger.debug("ready: %s", ", ".join(map(str, ready)))
logger.debug("to_do: %s", ", ".join(map(str, to_do)))
while to_do:
while ready:
b = ready.pop()
logger.debug("* avail %s", b)
if b not in pred:
continue
a = pred[b]
c = loc[a]
assert c is not None
# print("%s <- %s" % (b, a))
res.append((b, c))
# Addition by Paul Sokolovsky to handle fan-out case (when same
# source is assigned to multiple destinations).
if allow_fan_out and c in to_do:
to_do.remove(c)
loc[a] = b
if a == c:
ready.append(a)
# Addition to handle fan-out.
if allow_fan_out and not to_do:
break
b = to_do.pop()
logger.debug("* to_do %s", b)
if b != loc[pred[b]]:
tmp = mktmp(b)
# print("%s <- %s" % (b, a))
res.append((tmp, b))
loc[b] = tmp
ready.append(b)
return res
|
algorandfoundation/puya
|
src/puya/ir/destructure/parcopy.py
|
Python
|
NOASSERTION
| 3,997 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.