Usage Examples
:::note[Range Limits] The API enforces per-provider range limits. ERC-20 and Native Token queries are limited to 7 days / 1M blocks (10M for ARB). All other protocols (Aave, Uniswap, Lido, Stader, Threshold) allow up to 31 days / 10M blocks. JSON format is always limited to 10,000 blocks. Exceeding these limits returns a 400 error. :::
Builder Pattern
The client uses a fluent builder pattern. The query is only executed when you call a terminal method like as_df(), as_file(), or as_dict().
from defistream import DeFiStream
client = DeFiStream()
# Build query step by step
query = client.erc20.transfers("USDT")
query = query.network("ETH")
query = query.block_range(21000000, 21010000)
query = query.min_amount(1000)
# Execute and get DataFrame
df = query.as_df()
# Or chain everything
df = (
client.erc20.transfers("USDT")
.network("ETH")
.block_range(21000000, 21010000)
.min_amount(1000)
.as_df()
) ERC20 Transfers
# Get USDT transfers over 10,000 USDT
df = (
client.erc20.transfers("USDT")
.network("ETH")
.block_range(21000000, 21010000)
.min_amount(10000)
.as_df()
)
# Query multiple tokens at once (known symbols only, not contract addresses)
df = (
client.erc20.transfers("USDT", "USDC", "DAI")
.network("ETH")
.block_range(21000000, 21010000)
.as_df()
)
# Or set multiple tokens via chain method
df = (
client.erc20.transfers()
.token("USDT", "USDC")
.network("ETH")
.block_range(21000000, 21010000)
.as_df()
)
# Filter by sender
df = (
client.erc20.transfers("USDT")
.network("ETH")
.block_range(21000000, 21010000)
.sender("0x28c6c06298d514db089934071355e5743bf21d60")
.as_df()
) AAVE Events
# Get deposits
df = (
client.aave_v3.deposits()
.network("ETH")
.block_range(21000000, 21010000)
.as_df()
)
# Use a specific market type on ETH (Core, Prime, or EtherFi)
df = (
client.aave_v3.deposits()
.network("ETH")
.block_range(21000000, 21010000)
.eth_market_type("Prime")
.as_df()
) Uniswap Swaps
# Get swaps for WETH/USDC pool with 0.05% fee tier
df = (
client.uniswap_v3.swaps("WETH", "USDC", 500)
.network("ETH")
.block_range(21000000, 21010000)
.as_df()
)
# Or build with chain methods
df = (
client.uniswap_v3.swaps()
.symbol0("WETH")
.symbol1("USDC")
.fee(500)
.network("ETH")
.block_range(21000000, 21010000)
.as_df()
) Native Token Transfers
# Get ETH transfers >= 1 ETH
df = (
client.native_token.transfers()
.network("ETH")
.block_range(21000000, 21010000)
.min_amount(1.0)
.as_df()
) Binance Exchange Data
# OHLCV candles (1h, 4h, 1d, etc.)
df = (
client.binance.ohlcv("BTC")
.window("1h")
.time_range("2025-01-01", "2025-01-08")
.as_df()
)
# Raw tick trades (31-day max range)
df = (
client.binance.trades("BTC")
.time_range("2025-01-01", "2025-01-02")
.as_df()
)
# Book depth snapshots (365-day max range)
df = (
client.binance.book_depth()
.token("BTC")
.time_range("2025-01-01", "2025-02-01")
.as_df()
)
# Open interest (max 31 days)
df = (
client.binance.open_interest()
.token("ETH")
.time_range("2024-01-01", "2025-01-01")
.as_df()
)
# Funding rate
df = (
client.binance.funding_rate()
.token("BTC")
.time_range("2024-01-01", "2025-01-01")
.as_df()
)
# Long/short ratios
df = (
client.binance.long_short_ratios()
.token("SOL")
.time_range("2024-06-01", "2025-01-01")
.as_df()
) Label & Category Filters
# Get USDT transfers involving Binance wallets
df = (
client.erc20.transfers("USDT")
.network("ETH")
.block_range(21000000, 21010000)
.involving_label("Binance")
.as_df()
)
# Get USDT transfers FROM exchanges TO DeFi protocols
df = (
client.erc20.transfers("USDT")
.network("ETH")
.block_range(21000000, 21010000)
.sender_category("exchange")
.receiver_category("defi")
.as_df()
)
# Get AAVE deposits involving exchange addresses
df = (
client.aave_v3.deposits()
.network("ETH")
.block_range(21000000, 21010000)
.involving_category("exchange")
.as_df()
)
# Get native ETH transfers FROM Binance or Coinbase (multi-value)
df = (
client.native_token.transfers()
.network("ETH")
.block_range(21000000, 21010000)
.sender_label("Binance,Coinbase")
.as_df()
) Aggregate Queries
Use .aggregate() to bucket raw events into time or block intervals with summary statistics. All existing filters work before .aggregate() is called.
# Aggregate USDT transfers into 2-hour buckets
df = (
client.erc20.transfers("USDT")
.network("ETH")
.block_range(21000000, 21100000)
.aggregate(group_by="time", period="2h")
.as_df()
)
# Aggregate by block intervals
df = (
client.erc20.transfers("USDT")
.network("ETH")
.block_range(21000000, 21100000)
.aggregate(group_by="block", period="100b")
.as_df()
)
# Combine with filters — large transfers from exchanges, bucketed hourly
df = (
client.erc20.transfers("USDT")
.network("ETH")
.block_range(21000000, 21100000)
.sender_category("exchange")
.min_amount(10000)
.aggregate(group_by="time", period="1h")
.as_df()
)
# Aggregate Uniswap swaps
df = (
client.uniswap_v3.swaps("WETH", "USDC", 500)
.network("ETH")
.block_range(21000000, 21100000)
.aggregate(group_by="time", period="1h")
.as_df()
) You can also discover what aggregate fields are available for a protocol:
schema = client.aggregate_schema("erc20")
print(schema) Verbose Mode
By default, responses omit metadata fields to reduce payload size. Use .verbose() to include all fields:
# Default: compact response (no tx_hash, tx_id, log_index, network, name)
df = (
client.erc20.transfers("USDT")
.network("ETH")
.block_range(21000000, 21010000)
.as_df()
)
# Verbose: includes all metadata fields
df = (
client.erc20.transfers("USDT")
.network("ETH")
.block_range(21000000, 21010000)
.verbose()
.as_df()
) Value Column
Use .with_value() to USD value data to applicable events. This adds a value_usd (amount x price) column to individual events. On aggregate endpoints, it produces an agg_value_usd (sum) column.
Supported protocols: AAVE, Uniswap, Lido, ERC20, Native Token.
# Individual events with value data
df = (
client.aave_v3.deposits()
.network("ETH")
.block_range(21000000, 21010000)
.with_value()
.as_df()
)
# df now includes 'value_usd' column
# Aggregate with value — adds agg_value_usd column
df = (
client.aave_v3.deposits()
.network("ETH")
.block_range(21000000, 21100000)
.with_value()
.aggregate(group_by="time", period="2h")
.as_df()
)
# df now includes 'agg_value_usd' column