Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
"pytest-timeout>=2.2.0,<3", # For avoiding timing out during tests
"hypothesis>=6.2.0,<7.0", # Strategy-based fuzzer
"hypothesis-jsonschema==0.19.0", # JSON Schema fuzzer extension
"pandas", # Needed to test w/ narwhals
],
"lint": [
"ruff>=0.10.0", # Unified linter and formatter
Expand Down Expand Up @@ -92,10 +93,9 @@
"lazyasd>=0.1.4",
"asttokens>=2.4.1,<3", # Peer dependency; w/o pin container build fails.
"cchecksum>=0.0.3,<1",
# Pandas peer-dep: Numpy 2.0 causes issues for some users.
"numpy<2",
"more-itertools; python_version<'3.10'", # backport for `itertools.pairwise`
"narwhals>=1.29,<2",
"packaging>=23.0,<24",
"pandas>=2.2.2,<3",
"pluggy>=1.3,<2",
"pydantic>=2.10.0,<3",
"pydantic-settings>=2.5.2,<3",
Expand Down
16 changes: 15 additions & 1 deletion src/ape/api/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,8 @@

import yaml
from ethpm_types import PackageManifest, PackageMeta, Source
from pydantic import ConfigDict, Field, ValidationError, model_validator
from narwhals.stable.v1 import Implementation as DataframeImplementation
from pydantic import ConfigDict, Field, ValidationError, field_validator, model_validator
from pydantic_settings import BaseSettings, SettingsConfigDict

from ape.exceptions import ConfigError
Expand Down Expand Up @@ -202,6 +203,17 @@ class DeploymentConfig(PluginConfig):
"""


class QueryConfig(PluginConfig):
"""Add 'query:' key to your config."""

backend: DataframeImplementation = DataframeImplementation.PANDAS
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

What if happens is the query manager gets used w/o Pandas installed?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

So, narwhals is an abstraction package that has code to translate operations on your choice of dataframe implementation, so if you don't have pandas installed but pick pandas then it will raise (hence the config option)

Many people prefer polars over pandas, and this would allow us to support both without having to ship with either package (meaning you would install your preferred package and set the config to use it as default)

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

on option, we could have a dynamic default that prefers polars if it is installed but uses pandas otherwise

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Yeah we can do that, because it's unlikely that someone would have 2 or more installed, and if they do they can just set it via config (or kwarg)

"""Which Narwhals backend implementation to use."""

@field_validator("backend", mode="before")
def convert_backend_str(cls, value: Any) -> DataframeImplementation:
return DataframeImplementation.from_backend(value)


def _get_problem_with_config(errors: list, path: Path) -> Optional[str]:
# Attempt to find line numbers in the config matching.
cfg_content = Source(content=path.read_text(encoding="utf8")).content
Expand Down Expand Up @@ -368,6 +380,8 @@ def __init__(self, *args, **kwargs):
The version of the project.
"""

query: QueryConfig = QueryConfig()

# NOTE: Plugin configs are technically "extras".
model_config = SettingsConfigDict(extra="allow", env_prefix="APE_")

Expand Down
19 changes: 14 additions & 5 deletions src/ape/api/providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,6 @@
from eth_utils import to_hex
from pydantic import Field, computed_field, field_serializer, model_validator

from ape.api.networks import NetworkAPI
from ape.api.query import BlockTransactionQuery
from ape.api.transactions import ReceiptAPI, TransactionAPI
from ape.exceptions import (
APINotImplementedError,
ProviderError,
Expand All @@ -44,6 +41,9 @@
from ape.utils.process import JoinableQueue, spawn
from ape.utils.rpc import RPCHeaders

from .networks import NetworkAPI
from .transactions import ReceiptAPI, TransactionAPI

if TYPE_CHECKING:
from eth_pydantic_types import HexBytes
from ethpm_types.abi import EventABI
Expand Down Expand Up @@ -151,12 +151,21 @@ def transactions(self) -> list[TransactionAPI]:
"""
All transactions in a block.
"""
from ape.api.query import BlockTransactionQuery

if self.hash is None:
# Unable to query transactions.
# NOTE: Only "unsealed" blocks do not have a hash
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not every provider Ape talks to is a blockchain. Made up dev blocks from providers like boa don't necessarily need hashes even though they have associated transactions.

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

As an aside, boa could also not use a real hash for block hash (e.g. block 1 is hash 0x1, etc)

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think we already made this change, but it was during development that led to the discovery of the bug leading to the test.

I think I am good with adjusting the test to catch an exception (instead of just completely deleting the test), as it was found with purpose.

raise ProviderError("Unable to find block transactions: not sealed yet")
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Would it be bad to logger.error() here and just return an empty list?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not sure if there's a use case to work with that, but potentially thatmigjt be better


elif self.num_transactions == 0:
return []

try:
query = BlockTransactionQuery(columns=["*"], block_id=self.hash)
query = BlockTransactionQuery(
columns=["*"],
num_transactions=self.num_transactions,
block_id=self.hash,
)
return cast(list[TransactionAPI], list(self.query_manager.query(query)))
except QueryEngineError as err:
# NOTE: Re-raising a better error here because was confusing
Expand Down
Loading
Loading