eth_getLogs
Returns an array of all logs matching a given filter object. Essential for querying historical events and monitoring smart contract activity.
When to Use This Method​
eth_getLogs
is crucial for:
- Event Monitoring - Track smart contract events and state changes
- Historical Queries - Search past events within a block range
- DeFi Analytics - Monitor trades, liquidity events, and protocol activity
- Indexing - Build databases of on-chain events
Parameters​
- Filter Object
fromBlock
- (optional) Starting block number or tagtoBlock
- (optional) Ending block number or tagaddress
- (optional) Contract address or array of addressestopics
- (optional) Array of topic filters (event signatures and indexed parameters)blockHash
- (optional) Restrict to single block (incompatible with fromBlock/toBlock)
{
"jsonrpc": "2.0",
"method": "eth_getLogs",
"params": [{
"fromBlock": "0x1",
"toBlock": "latest",
"address": "0x7d1afa7b718fb893db30a3abc0cfc608aacfebb0",
"topics": [
"0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef",
null,
"0x000000000000000000000000a7d8d9ef8d8ce8992df33d8b8cf4aebabd5bd270"
]
}],
"id": 1
}
Returns​
Array of log objects with:
removed
-true
if log was removed due to reorglogIndex
- Position in the blocktransactionIndex
- Transaction position in blocktransactionHash
- Hash of transactionblockHash
- Hash of blockblockNumber
- Block numberaddress
- Contract addressdata
- Non-indexed log parameterstopics
- Array of indexed log parameters
Implementation Examples​
- curl
- JavaScript
- Python
curl -X POST https://api-arbitrum-mainnet-archive.n.dwellir.com/YOUR_API_KEY \
-H "Content-Type: application/json" \
-d '{
"jsonrpc": "2.0",
"method": "eth_getLogs",
"params": [{
"fromBlock": "0x1",
"toBlock": "latest",
"address": "0xaf88d065e77c8cc2239327c5edb3a432268e5831",
"topics": [
"0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"
]
}],
"id": 1
}'
import { JsonRpcProvider, Interface, id, zeroPadValue, toBeHex } from 'ethers';
const provider = new JsonRpcProvider('https://api-arbitrum-mainnet-archive.n.dwellir.com/YOUR_API_KEY');
// Advanced event monitoring system
class EventMonitor {
constructor(provider) {
this.provider = provider;
this.filters = new Map();
}
async getTransferEvents(tokenAddress, fromBlock, toBlock) {
// ERC20 Transfer event signature
const transferTopic = id('Transfer(address,address,uint256)');
const filter = {
fromBlock: toBeHex(fromBlock),
toBlock: toBlock === 'latest' ? 'latest' : toBeHex(toBlock),
address: tokenAddress,
topics: [transferTopic]
};
const logs = await this.provider.getLogs(filter);
// Decode logs
const iface = new Interface([
'event Transfer(address indexed from, address indexed to, uint256 value)'
]);
return logs.map(log => {
const decoded = iface.parseLog({
topics: log.topics,
data: log.data
});
return {
from: decoded.args.from,
to: decoded.args.to,
value: decoded.args.value.toString(),
blockNumber: log.blockNumber,
transactionHash: log.transactionHash,
logIndex: log.logIndex
};
});
}
async getEventsBySignature(signature, address, fromBlock, toBlock) {
const eventTopic = id(signature);
const filter = {
fromBlock: toBeHex(fromBlock),
toBlock: toBlock === 'latest' ? 'latest' : toBeHex(toBlock),
address: address,
topics: [eventTopic]
};
return await this.provider.getLogs(filter);
}
async queryWithMultipleAddresses(addresses, eventSignature, fromBlock, toBlock) {
const eventTopic = id(eventSignature);
const filter = {
fromBlock: toBeHex(fromBlock),
toBlock: toBlock === 'latest' ? 'latest' : toBeHex(toBlock),
address: addresses, // Array of addresses
topics: [eventTopic]
};
const logs = await this.provider.getLogs(filter);
// Group by address
const grouped = {};
for (const log of logs) {
if (!grouped[log.address]) {
grouped[log.address] = [];
}
grouped[log.address].push(log);
}
return grouped;
}
async getFilteredTransfers(tokenAddress, specificAddress, direction, fromBlock, toBlock) {
const transferTopic = id('Transfer(address,address,uint256)');
let topics;
if (direction === 'from') {
// Transfers FROM specific address
topics = [
transferTopic,
zeroPadValue(specificAddress, 32),
null
];
} else if (direction === 'to') {
// Transfers TO specific address
topics = [
transferTopic,
null,
zeroPadValue(specificAddress, 32)
];
} else {
// All transfers involving address
const [fromLogs, toLogs] = await Promise.all([
this.getFilteredTransfers(tokenAddress, specificAddress, 'from', fromBlock, toBlock),
this.getFilteredTransfers(tokenAddress, specificAddress, 'to', fromBlock, toBlock)
]);
return [...fromLogs, ...toLogs].sort((a, b) => a.blockNumber - b.blockNumber);
}
const filter = {
fromBlock: toBeHex(fromBlock),
toBlock: toBlock === 'latest' ? 'latest' : toBeHex(toBlock),
address: tokenAddress,
topics: topics
};
return await this.provider.getLogs(filter);
}
async searchDeFiEvents(protocolAddress, fromBlock, toBlock) {
// Common DeFi event signatures
const eventSignatures = {
swap: 'Swap(address,uint256,uint256,uint256,uint256,address)',
addLiquidity: 'Mint(address,uint256,uint256)',
removeLiquidity: 'Burn(address,uint256,uint256,address)',
deposit: 'Deposit(address,uint256,uint256)',
withdraw: 'Withdrawal(address,uint256,uint256)',
borrow: 'Borrow(address,address,uint256,uint256,uint256)',
repay: 'Repay(address,address,uint256,uint256)'
};
const events = {};
for (const [name, signature] of Object.entries(eventSignatures)) {
try {
const logs = await this.getEventsBySignature(
signature,
protocolAddress,
fromBlock,
toBlock
);
if (logs.length > 0) {
events[name] = logs;
}
} catch (error) {
// Event might not exist in this contract
continue;
}
}
return events;
}
async getPaginatedLogs(filter, pageSize = 1000) {
const results = [];
let currentBlock = parseInt(filter.fromBlock);
const endBlock = filter.toBlock === 'latest' ?
await this.provider.getBlockNumber() :
parseInt(filter.toBlock);
while (currentBlock <= endBlock) {
const chunkEnd = Math.min(currentBlock + pageSize - 1, endBlock);
const chunkFilter = {
...filter,
fromBlock: toBeHex(currentBlock),
toBlock: toBeHex(chunkEnd)
};
try {
const logs = await this.provider.getLogs(chunkFilter);
results.push(...logs);
console.log(`Fetched blocks ${currentBlock} to ${chunkEnd}: ${logs.length} logs`);
} catch (error) {
if (error.message.includes('query returned more than')) {
// Reduce page size and retry
return this.getPaginatedLogs(filter, Math.floor(pageSize / 2));
}
throw error;
}
currentBlock = chunkEnd + 1;
}
return results;
}
createLiveEventStream(filter, callback) {
const processNewBlock = async (blockNumber) => {
const logs = await this.provider.getLogs({
...filter,
fromBlock: toBeHex(blockNumber),
toBlock: toBeHex(blockNumber)
});
if (logs.length > 0) {
callback(logs);
}
};
// Listen for new blocks
this.provider.on('block', processNewBlock);
// Return unsubscribe function
return () => {
this.provider.off('block', processNewBlock);
};
}
}
// NFT event monitoring
class NFTEventTracker {
constructor(provider) {
this.provider = provider;
}
async getERC721Transfers(contractAddress, fromBlock, toBlock) {
const transferTopic = id('Transfer(address,address,uint256)');
const logs = await this.provider.getLogs({
fromBlock: toBeHex(fromBlock),
toBlock: toBlock === 'latest' ? 'latest' : toBeHex(toBlock),
address: contractAddress,
topics: [transferTopic]
});
const iface = new Interface([
'event Transfer(address indexed from, address indexed to, uint256 indexed tokenId)'
]);
return logs.map(log => {
const decoded = iface.parseLog({
topics: log.topics,
data: log.data
});
return {
from: decoded.args.from,
to: decoded.args.to,
tokenId: decoded.args.tokenId.toString(),
blockNumber: log.blockNumber,
transactionHash: log.transactionHash,
isMint: decoded.args.from === '0x0000000000000000000000000000000000000000',
isBurn: decoded.args.to === '0x0000000000000000000000000000000000000000'
};
});
}
async getERC1155Transfers(contractAddress, fromBlock, toBlock) {
const singleTransferTopic = id('TransferSingle(address,address,address,uint256,uint256)');
const batchTransferTopic = id('TransferBatch(address,address,address,uint256[],uint256[])');
const [singleLogs, batchLogs] = await Promise.all([
this.provider.getLogs({
fromBlock: toBeHex(fromBlock),
toBlock: toBlock === 'latest' ? 'latest' : toBeHex(toBlock),
address: contractAddress,
topics: [singleTransferTopic]
}),
this.provider.getLogs({
fromBlock: toBeHex(fromBlock),
toBlock: toBlock === 'latest' ? 'latest' : toBeHex(toBlock),
address: contractAddress,
topics: [batchTransferTopic]
})
]);
return {
single: singleLogs,
batch: batchLogs,
total: singleLogs.length + batchLogs.length
};
}
}
// Usage examples
const monitor = new EventMonitor(provider);
// Get transfer events for a token
const transfers = await monitor.getTransferEvents(
'0xTokenAddress',
12345678,
'latest'
);
// Monitor specific address activity
const userTransfers = await monitor.getFilteredTransfers(
'0xTokenAddress',
'0xUserAddress',
'both', // 'from', 'to', or 'both'
12345678,
'latest'
);
// Search DeFi protocol events
const defiEvents = await monitor.searchDeFiEvents(
'0xProtocolAddress',
12345678,
12345778
);
// Create live event stream
const unsubscribe = monitor.createLiveEventStream(
{
address: '0xTokenAddress',
topics: [id('Transfer(address,address,uint256)')]
},
(logs) => {
console.log('New events:', logs);
}
);
from web3 import Web3
from eth_utils import event_signature_to_log_topic, to_hex
import json
from typing import Dict, List, Any, Optional
from datetime import datetime, timedelta
w3 = Web3(Web3.HTTPProvider('https://api-arbitrum-mainnet-archive.n.dwellir.com/YOUR_API_KEY'))
class LogAnalyzer:
"""Analyze and query event logs on Arbitrum L2"""
def __init__(self, w3_instance):
self.w3 = w3_instance
def get_transfer_events(
self,
token_address: str,
from_block: int,
to_block: str = 'latest',
from_address: Optional[str] = None,
to_address: Optional[str] = None
) -> List[Dict[str, Any]]:
"""Get ERC20 transfer events with optional filtering"""
# Transfer event signature
transfer_topic = self.w3.keccak(text='Transfer(address,address,uint256)').hex()
# Build topics array
topics = [transfer_topic]
if from_address:
topics.append(self._address_to_topic(from_address))
else:
topics.append(None)
if to_address:
topics.append(self._address_to_topic(to_address))
# Create filter
filter_params = {
'fromBlock': from_block,
'toBlock': to_block,
'address': token_address,
'topics': topics
}
# Get logs
logs = self.w3.eth.get_logs(filter_params)
# Decode logs
transfers = []
for log in logs:
transfers.append({
'from': self._topic_to_address(log['topics'][1]),
'to': self._topic_to_address(log['topics'][2]),
'value': int(log['data'], 16) if log['data'] != '0x' else 0,
'block_number': log['blockNumber'],
'transaction_hash': log['transactionHash'].hex(),
'log_index': log['logIndex'],
'address': log['address']
})
return transfers
def _address_to_topic(self, address: str) -> str:
"""Convert address to topic format"""
return '0x' + address[2:].lower().zfill(64)
def _topic_to_address(self, topic: str) -> str:
"""Convert topic to address format"""
return '0x' + topic.hex()[-40:]
def get_events_by_signature(
self,
event_signature: str,
contract_address: Optional[str] = None,
from_block: int = 0,
to_block: str = 'latest'
) -> List[Dict[str, Any]]:
"""Get events by signature"""
event_topic = self.w3.keccak(text=event_signature).hex()
filter_params = {
'fromBlock': from_block,
'toBlock': to_block,
'topics': [event_topic]
}
if contract_address:
filter_params['address'] = contract_address
logs = self.w3.eth.get_logs(filter_params)
return [{
'address': log['address'],
'topics': [t.hex() for t in log['topics']],
'data': log['data'],
'block_number': log['blockNumber'],
'transaction_hash': log['transactionHash'].hex(),
'log_index': log['logIndex']
} for log in logs]
def search_defi_activity(
self,
protocol_address: str,
from_block: int,
to_block: str = 'latest'
) -> Dict[str, List[Dict]]:
"""Search for common DeFi events"""
event_signatures = {
'swaps': 'Swap(address,uint256,uint256,uint256,uint256,address)',
'liquidity_added': 'Mint(address,uint256,uint256)',
'liquidity_removed': 'Burn(address,uint256,uint256,address)',
'deposits': 'Deposit(address,uint256)',
'withdrawals': 'Withdrawal(address,uint256)',
'borrows': 'Borrow(address,address,uint256,uint256,uint256)',
'repayments': 'Repay(address,address,uint256,uint256)'
}
results = {}
for event_type, signature in event_signatures.items():
try:
logs = self.get_events_by_signature(
signature,
protocol_address,
from_block,
to_block
)
if logs:
results[event_type] = logs
except Exception as e:
# Event might not exist in this contract
continue
return results
def get_nft_transfers(
self,
nft_address: str,
from_block: int,
to_block: str = 'latest',
token_id: Optional[int] = None
) -> List[Dict[str, Any]]:
"""Get NFT transfer events (ERC721)"""
# Transfer event with tokenId as indexed parameter
transfer_topic = self.w3.keccak(text='Transfer(address,address,uint256)').hex()
topics = [transfer_topic, None, None]
if token_id is not None:
topics.append(to_hex(token_id).zfill(66))
filter_params = {
'fromBlock': from_block,
'toBlock': to_block,
'address': nft_address,
'topics': topics
}
logs = self.w3.eth.get_logs(filter_params)
transfers = []
for log in logs:
from_addr = self._topic_to_address(log['topics'][1])
to_addr = self._topic_to_address(log['topics'][2])
transfers.append({
'from': from_addr,
'to': to_addr,
'token_id': int(log['topics'][3].hex(), 16) if len(log['topics']) > 3 else int(log['data'], 16),
'is_mint': from_addr == '0x0000000000000000000000000000000000000000',
'is_burn': to_addr == '0x0000000000000000000000000000000000000000',
'block_number': log['blockNumber'],
'transaction_hash': log['transactionHash'].hex(),
'log_index': log['logIndex']
})
return transfers
def get_logs_paginated(
self,
filter_params: Dict,
page_size: int = 1000
) -> List[Dict[str, Any]]:
"""Get logs with pagination to avoid query limits"""
all_logs = []
from_block = filter_params['fromBlock']
to_block = filter_params['toBlock']
if to_block == 'latest':
to_block = self.w3.eth.block_number
current_block = from_block
while current_block <= to_block:
chunk_end = min(current_block + page_size - 1, to_block)
chunk_filter = {
**filter_params,
'fromBlock': current_block,
'toBlock': chunk_end
}
try:
logs = self.w3.eth.get_logs(chunk_filter)
all_logs.extend(logs)
print(f"Fetched blocks {current_block} to {chunk_end}: {len(logs)} logs")
except Exception as e:
if 'query returned more than' in str(e):
# Reduce page size and retry
if page_size > 10:
return self.get_logs_paginated(filter_params, page_size // 2)
else:
raise Exception("Block range too large even with minimum page size")
else:
raise e
current_block = chunk_end + 1
return all_logs
def analyze_event_patterns(
self,
logs: List[Dict]
) -> Dict[str, Any]:
"""Analyze patterns in event logs"""
if not logs:
return {'error': 'No logs to analyze'}
# Group by block
blocks = {}
for log in logs:
block = log['blockNumber']
if block not in blocks:
blocks[block] = []
blocks[block].append(log)
# Group by transaction
transactions = {}
for log in logs:
tx = log['transactionHash'].hex() if hasattr(log['transactionHash'], 'hex') else log['transactionHash']
if tx not in transactions:
transactions[tx] = []
transactions[tx].append(log)
# Find most active addresses
addresses = {}
for log in logs:
addr = log['address']
if addr not in addresses:
addresses[addr] = 0
addresses[addr] += 1
sorted_addresses = sorted(addresses.items(), key=lambda x: x[1], reverse=True)
return {
'total_logs': len(logs),
'unique_blocks': len(blocks),
'unique_transactions': len(transactions),
'unique_addresses': len(addresses),
'most_active_addresses': sorted_addresses[:10],
'logs_per_block': {
'average': len(logs) / len(blocks) if blocks else 0,
'max': max(len(b) for b in blocks.values()) if blocks else 0,
'min': min(len(b) for b in blocks.values()) if blocks else 0
},
'logs_per_transaction': {
'average': len(logs) / len(transactions) if transactions else 0,
'max': max(len(t) for t in transactions.values()) if transactions else 0,
'min': min(len(t) for t in transactions.values()) if transactions else 0
}
}
def export_logs_to_csv(
self,
logs: List[Dict],
filename: str
):
"""Export logs to CSV file"""
import csv
if not logs:
return
# Get all unique keys
keys = set()
for log in logs:
keys.update(log.keys())
with open(filename, 'w', newline='') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=sorted(keys))
writer.writeheader()
for log in logs:
# Convert bytes to hex strings
row = {}
for key, value in log.items():
if isinstance(value, bytes):
row[key] = value.hex()
else:
row[key] = value
writer.writerow(row)
print(f"Exported {len(logs)} logs to {filename}")
# Usage examples
analyzer = LogAnalyzer(w3)
# Get token transfers
transfers = analyzer.get_transfer_events(
token_address='0xTokenAddress',
from_block=12345678,
to_block='latest',
from_address='0xUserAddress' # Optional filter
)
print(f"Found {len(transfers)} transfers")
for transfer in transfers[:5]:
print(f"From: {transfer['from'][:10]}... To: {transfer['to'][:10]}... Value: {transfer['value']}")
# Search DeFi activity
defi_events = analyzer.search_defi_activity(
protocol_address='0xProtocolAddress',
from_block=12345678
)
for event_type, events in defi_events.items():
print(f"{event_type}: {len(events)} events")
# Get NFT transfers
nft_transfers = analyzer.get_nft_transfers(
nft_address='0xNFTAddress',
from_block=12345678
)
mints = [t for t in nft_transfers if t['is_mint']]
print(f"Found {len(mints)} NFT mints")
# Analyze event patterns
pattern_analysis = analyzer.analyze_event_patterns(transfers)
print(f"Analysis: {json.dumps(pattern_analysis, indent=2)}")
Response Example​
{
"jsonrpc": "2.0",
"id": 1,
"result": [
{
"address": "0xaf88d065e77c8cc2239327c5edb3a432268e5831",
"topics": [
"0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef",
"0x000000000000000000000000a7d8d9ef8d8ce8992df33d8b8cf4aebabd5bd270",
"0x0000000000000000000000004e6b5f1abfb9aa3cc5b3d8f4a7b8c2d9e3f0a1b2"
],
"data": "0x000000000000000000000000000000000000000000000000016345785d8a0000",
"blockNumber": "0x123abc",
"blockHash": "0x1234567890abcdef1234567890abcdef1234567890abcdef1234567890abcdef",
"transactionHash": "0xabcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890",
"transactionIndex": "0x0",
"logIndex": "0x0",
"removed": false
}
]
}
Advanced Usage​
Multiple Address Filtering​
{
"jsonrpc": "2.0",
"method": "eth_getLogs",
"params": [{
"fromBlock": "0x1",
"toBlock": "latest",
"address": [
"0xaf88d065e77c8cc2239327c5edb3a432268e5831",
"0x82af49447d8a07e3bd95bd0d56f35241523fbab1"
],
"topics": [
"0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef"
]
}],
"id": 1
}
Topic Filtering with OR Logic​
{
"jsonrpc": "2.0",
"method": "eth_getLogs",
"params": [{
"fromBlock": "0x1",
"toBlock": "latest",
"topics": [
[
"0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef",
"0x8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925"
]
]
}],
"id": 1
}
Notes​
- Arbitrum supports full archive access with complete historical data
- Query limits: Maximum 10,000 results per request
- Block range limits: Maximum 1,000 blocks per query for optimal performance
- Topic arrays: Support up to 4 topics with OR logic within each position
- Performance tip: Use specific address filters to improve query speed
- Event signatures: First topic is always the keccak256 hash of the event signature
- Archive node: Full historical state available for deep blockchain analysis
Common Use Cases​
1. Token Balance History​
// Track historical token balance changes
async function getBalanceHistory(tokenAddress, userAddress, fromBlock) {
const monitor = new EventMonitor(provider);
// Get all transfers involving the user
const logs = await monitor.getFilteredTransfers(
tokenAddress,
userAddress,
'both',
fromBlock,
'latest'
);
// Calculate balance changes
const balanceChanges = [];
let currentBalance = 0n;
for (const log of logs) {
const iface = new Interface([
'event Transfer(address indexed from, address indexed to, uint256 value)'
]);
const decoded = iface.parseLog({
topics: log.topics,
data: log.data
});
const change = decoded.args.from.toLowerCase() === userAddress.toLowerCase() ?
-decoded.args.value : decoded.args.value;
currentBalance += change;
balanceChanges.push({
blockNumber: log.blockNumber,
transactionHash: log.transactionHash,
change: change.toString(),
balance: currentBalance.toString(),
type: change < 0 ? 'outgoing' : 'incoming'
});
}
return balanceChanges;
}
2. DEX Trade Monitoring​
// Monitor DEX trading activity
async function monitorDEXTrades(dexAddress, fromBlock) {
const swapTopic = id('Swap(address,address,int256,int256,uint160,uint128,int24)');
const logs = await provider.getLogs({
fromBlock: toBeHex(fromBlock),
toBlock: 'latest',
address: dexAddress,
topics: [swapTopic]
});
const trades = logs.map(log => {
// Decode Uniswap V3 swap event
const iface = new Interface([
'event Swap(address indexed sender, address indexed recipient, int256 amount0, int256 amount1, uint160 sqrtPriceX96, uint128 liquidity, int24 tick)'
]);
const decoded = iface.parseLog({
topics: log.topics,
data: log.data
});
return {
sender: decoded.args.sender,
recipient: decoded.args.recipient,
amount0: decoded.args.amount0.toString(),
amount1: decoded.args.amount1.toString(),
price: Number(decoded.args.sqrtPriceX96) ** 2 / (2 ** 192),
blockNumber: log.blockNumber,
transactionHash: log.transactionHash
};
});
return trades;
}
3. Contract Event Indexer​
// Build an event index for a contract
class EventIndexer {
constructor(provider, contractAddress, abi) {
this.provider = provider;
this.contractAddress = contractAddress;
this.iface = new Interface(abi);
this.eventIndex = new Map();
}
async indexEvents(fromBlock, toBlock) {
// Get all events for the contract
const logs = await this.provider.getLogs({
fromBlock: toBeHex(fromBlock),
toBlock: toBlock === 'latest' ? 'latest' : toBeHex(toBlock),
address: this.contractAddress
});
for (const log of logs) {
try {
const parsed = this.iface.parseLog({
topics: log.topics,
data: log.data
});
if (!this.eventIndex.has(parsed.name)) {
this.eventIndex.set(parsed.name, []);
}
this.eventIndex.get(parsed.name).push({
args: Object.fromEntries(
Object.entries(parsed.args).filter(([key]) => isNaN(key))
),
blockNumber: log.blockNumber,
transactionHash: log.transactionHash,
logIndex: log.logIndex
});
} catch {
// Unknown event
}
}
return this.getStatistics();
}
getStatistics() {
const stats = {};
for (const [eventName, events] of this.eventIndex) {
stats[eventName] = {
count: events.length,
firstBlock: Math.min(...events.map(e => e.blockNumber)),
lastBlock: Math.max(...events.map(e => e.blockNumber))
};
}
return stats;
}
}
Error Handling​
Error Type | Description | Solution |
---|---|---|
Query limit exceeded | Too many logs in range | Use pagination or reduce block range |
Invalid block range | fromBlock > toBlock | Validate block parameters |
Node timeout | Large query timeout | Break into smaller queries |
async function robustGetLogs(filter, maxRetries = 3) {
// Validate filter
if (filter.fromBlock > filter.toBlock && filter.toBlock !== 'latest') {
throw new Error('Invalid block range: fromBlock must be <= toBlock');
}
for (let attempt = 0; attempt < maxRetries; attempt++) {
try {
const logs = await provider.getLogs(filter);
return logs;
} catch (error) {
if (error.message.includes('query returned more than')) {
// Too many results, need pagination
const monitor = new EventMonitor(provider);
return await monitor.getPaginatedLogs(filter, 500);
}
if (error.message.includes('timeout') && attempt < maxRetries - 1) {
// Retry with exponential backoff
await new Promise(resolve =>
setTimeout(resolve, Math.pow(2, attempt) * 1000)
);
continue;
}
throw error;
}
}
}
Need help? Contact our support team or check the Arbitrum documentation.