suix_queryEvents
Queries and filters events emitted by transactions on the Sui blockchain with support for various search criteria and pagination.
Overview​
The suix_queryEvents
method is essential for monitoring smart contract activity, tracking specific events, and building event-driven applications on Sui. Events are emitted by Move modules during transaction execution and provide structured data about what happened during contract interactions. This method enables filtering by event type, sender, package, time range, and more.
Parameters​
Parameter | Type | Required | Description |
---|---|---|---|
query | object | Yes | Query criteria to filter events |
cursor | string | No | Pagination cursor from previous response |
limit | number | No | Maximum number of events to return (default: 50, max: 1000) |
descendingOrder | boolean | No | Return results in descending order by sequence number (default: false) |
Query Object​
The query object supports various filter types:
Filter Type | Description | Example |
---|---|---|
All | Match all events (no filter) | { "All": [] } |
Transaction | Events from specific transaction | { "Transaction": "0x123..." } |
MoveModule | Events from specific Move module | { "MoveModule": { "package": "0x2", "module": "coin" } } |
MoveEventType | Events of specific Move event type | { "MoveEventType": "0x2::coin::Deposit" } |
Sender | Events from transactions by specific sender | { "Sender": "0x456..." } |
Package | Events from any module in package | { "Package": "0x789..." } |
TimeRange | Events within time range | { "TimeRange": { "startTime": 1234567890, "endTime": 1234567900 } } |
Complex Query Filters​
Filter Type | Description | Structure |
---|---|---|
And | All conditions must match | { "And": [filter1, filter2] } |
Or | Any condition must match | { "Or": [filter1, filter2] } |
Returns​
Returns a paginated response containing matching events.
Field | Type | Description |
---|---|---|
data | array | Array of event objects matching the query |
nextCursor | string | Cursor for next page (null if no more pages) |
hasNextPage | boolean | Whether more pages are available |
Event Object Structure​
Field | Type | Description |
---|---|---|
id | object | Event identifier with transaction digest and event sequence |
packageId | string | Package that emitted the event |
transactionModule | string | Module name that emitted the event |
sender | string | Address of transaction sender |
type | string | Full event type (package::module::EventName) |
parsedJson | object | Event data parsed as JSON |
bcs | string | BCS-encoded event data |
timestampMs | string | Event timestamp in milliseconds |
Code Examples​
- cURL
- JavaScript
- Python
# Query all events (with limit)
curl -X POST https://sui-mainnet.dwellir.com/YOUR_API_KEY \
-H "Content-Type: application/json" \
-d '{
"jsonrpc": "2.0",
"method": "suix_queryEvents",
"params": [
{
"All": []
},
null,
10,
true
],
"id": 1
}'
# Query events from specific package
curl -X POST https://sui-mainnet.dwellir.com/YOUR_API_KEY \
-H "Content-Type: application/json" \
-d '{
"jsonrpc": "2.0",
"method": "suix_queryEvents",
"params": [
{
"Package": "0x2"
},
null,
50,
true
],
"id": 1
}'
# Query specific event type
curl -X POST https://sui-mainnet.dwellir.com/YOUR_API_KEY \
-H "Content-Type: application/json" \
-d '{
"jsonrpc": "2.0",
"method": "suix_queryEvents",
"params": [
{
"MoveEventType": "0x2::coin::CoinCreated"
},
null,
100,
true
],
"id": 1
}'
# Query events from specific sender
curl -X POST https://sui-mainnet.dwellir.com/YOUR_API_KEY \
-H "Content-Type: application/json" \
-d '{
"jsonrpc": "2.0",
"method": "suix_queryEvents",
"params": [
{
"Sender": "0xd77955e670601c2c2e6e8637e383695c166aac0a86b741c266bdfb23c2e3369f"
},
null,
25,
true
],
"id": 1
}'
# Query events with complex filter (AND condition)
curl -X POST https://sui-mainnet.dwellir.com/YOUR_API_KEY \
-H "Content-Type: application/json" \
-d '{
"jsonrpc": "2.0",
"method": "suix_queryEvents",
"params": [
{
"And": [
{
"Package": "0x2"
},
{
"Sender": "0xd77955e670601c2c2e6e8637e383695c166aac0a86b741c266bdfb23c2e3369f"
}
]
},
null,
30,
true
],
"id": 1
}'
import { SuiClient } from '@mysten/sui.js/client';
const client = new SuiClient({
url: 'https://sui-mainnet.dwellir.com/YOUR_API_KEY'
});
// Query events with various filters
async function queryEvents(filter, limit = 50, descending = true) {
try {
const result = await client.queryEvents({
query: filter,
limit: limit,
order: descending ? 'descending' : 'ascending'
});
console.log(`Found ${result.data.length} events`);
return result;
} catch (error) {
console.error('Failed to query events:', error);
return null;
}
}
// Get all events from a package
async function getPackageEvents(packageId, limit = 100) {
return await queryEvents({ Package: packageId }, limit);
}
// Get specific event type
async function getEventsByType(eventType, limit = 100) {
return await queryEvents({ MoveEventType: eventType }, limit);
}
// Get events from specific sender
async function getSenderEvents(sender, limit = 100) {
return await queryEvents({ Sender: sender }, limit);
}
// Get events from specific transaction
async function getTransactionEvents(transactionDigest) {
return await queryEvents({ Transaction: transactionDigest }, 1000);
}
// Get events with pagination
async function getAllEventsPaginated(filter, maxPages = 10) {
let allEvents = [];
let cursor = null;
let pageCount = 0;
while (pageCount < maxPages) {
const result = await client.queryEvents({
query: filter,
cursor: cursor,
limit: 50,
order: 'descending'
});
if (!result || result.data.length === 0) break;
allEvents.push(...result.data);
console.log(`Page ${pageCount + 1}: ${result.data.length} events`);
if (!result.hasNextPage) break;
cursor = result.nextCursor;
pageCount++;
}
return allEvents;
}
// Event monitoring and analysis
class EventMonitor {
constructor(client) {
this.client = client;
this.eventHandlers = new Map();
this.lastEventId = null;
this.isMonitoring = false;
}
// Register event handler for specific event types
onEvent(eventType, handler) {
if (!this.eventHandlers.has(eventType)) {
this.eventHandlers.set(eventType, []);
}
this.eventHandlers.get(eventType).push(handler);
}
// Start monitoring for new events
async startMonitoring(filter, intervalMs = 5000) {
if (this.isMonitoring) return;
console.log('Starting event monitoring...');
this.isMonitoring = true;
// Get initial state
try {
const initial = await this.client.queryEvents({
query: filter,
limit: 1,
order: 'descending'
});
if (initial.data.length > 0) {
this.lastEventId = initial.data[0].id;
}
} catch (error) {
console.error('Failed to get initial event state:', error);
}
const monitorLoop = async () => {
if (!this.isMonitoring) return;
try {
const result = await this.client.queryEvents({
query: filter,
limit: 100,
order: 'descending'
});
if (result.data.length > 0) {
const newEvents = [];
for (const event of result.data) {
// Stop when we reach previously seen events
if (this.lastEventId &&
event.id.txDigest === this.lastEventId.txDigest &&
event.id.eventSeq === this.lastEventId.eventSeq) {
break;
}
newEvents.push(event);
}
if (newEvents.length > 0) {
this.lastEventId = newEvents[0].id;
await this.processNewEvents(newEvents.reverse());
}
}
} catch (error) {
console.error('Error in event monitoring:', error);
}
setTimeout(monitorLoop, intervalMs);
};
setTimeout(monitorLoop, intervalMs);
}
async processNewEvents(events) {
for (const event of events) {
console.log(`📡 New event: ${event.type}`);
// Call registered handlers
const handlers = this.eventHandlers.get(event.type) || [];
const allHandlers = this.eventHandlers.get('*') || [];
for (const handler of [...handlers, ...allHandlers]) {
try {
await handler(event);
} catch (error) {
console.error('Error in event handler:', error);
}
}
}
}
stopMonitoring() {
this.isMonitoring = false;
console.log('Event monitoring stopped');
}
}
// Event analytics
class EventAnalytics {
constructor(client) {
this.client = client;
}
async analyzePackageActivity(packageId, hours = 24) {
const events = await getAllEventsPaginated(
{ Package: packageId },
20 // max pages
);
if (events.length === 0) {
return { error: 'No events found for package' };
}
const analysis = {
packageId: packageId,
totalEvents: events.length,
timeRange: hours,
eventTypes: {},
sendersActivity: {},
moduleActivity: {},
hourlyDistribution: {},
recentTrends: []
};
const cutoffTime = Date.now() - (hours * 60 * 60 * 1000);
const recentEvents = events.filter(e =>
parseInt(e.timestampMs || '0') > cutoffTime
);
recentEvents.forEach(event => {
// Event type analysis
const eventType = event.type;
analysis.eventTypes[eventType] = (analysis.eventTypes[eventType] || 0) + 1;
// Sender analysis
const sender = event.sender;
if (!analysis.sendersActivity[sender]) {
analysis.sendersActivity[sender] = {
eventCount: 0,
eventTypes: new Set()
};
}
analysis.sendersActivity[sender].eventCount++;
analysis.sendersActivity[sender].eventTypes.add(eventType);
// Module analysis
const module = event.transactionModule;
analysis.moduleActivity[module] = (analysis.moduleActivity[module] || 0) + 1;
// Hourly distribution
const timestamp = parseInt(event.timestampMs || '0');
const hour = new Date(timestamp).getHours();
analysis.hourlyDistribution[hour] = (analysis.hourlyDistribution[hour] || 0) + 1;
});
// Convert sets to counts
Object.values(analysis.sendersActivity).forEach(activity => {
activity.uniqueEventTypes = activity.eventTypes.size;
delete activity.eventTypes;
});
// Calculate trends
const timeWindow = 60 * 60 * 1000; // 1 hour windows
const windows = Math.ceil((Date.now() - cutoffTime) / timeWindow);
for (let i = 0; i < windows; i++) {
const windowStart = cutoffTime + (i * timeWindow);
const windowEnd = windowStart + timeWindow;
const windowEvents = recentEvents.filter(e => {
const timestamp = parseInt(e.timestampMs || '0');
return timestamp >= windowStart && timestamp < windowEnd;
});
analysis.recentTrends.push({
window: new Date(windowStart).toISOString(),
eventCount: windowEvents.length,
uniqueTypes: new Set(windowEvents.map(e => e.type)).size
});
}
return analysis;
}
async findEventPatterns(filter, patternSize = 3) {
const events = await getAllEventsPaginated(filter, 50);
if (events.length < patternSize) {
return { patterns: [], analysis: 'Insufficient events for pattern analysis' };
}
const patterns = new Map();
const eventSequences = new Map();
// Group events by sender for sequence analysis
events.forEach(event => {
const sender = event.sender;
if (!eventSequences.has(sender)) {
eventSequences.set(sender, []);
}
eventSequences.get(sender).push(event);
});
// Find patterns within each sender's events
for (const [sender, senderEvents] of eventSequences) {
if (senderEvents.length < patternSize) continue;
// Sort by timestamp
senderEvents.sort((a, b) =>
parseInt(a.timestampMs || '0') - parseInt(b.timestampMs || '0')
);
// Look for repeating patterns
for (let i = 0; i <= senderEvents.length - patternSize; i++) {
const pattern = senderEvents
.slice(i, i + patternSize)
.map(e => e.type)
.join(' -> ');
if (!patterns.has(pattern)) {
patterns.set(pattern, {
pattern: pattern,
count: 0,
senders: new Set(),
examples: []
});
}
const patternData = patterns.get(pattern);
patternData.count++;
patternData.senders.add(sender);
if (patternData.examples.length < 3) {
patternData.examples.push({
sender: sender,
events: senderEvents.slice(i, i + patternSize).map(e => ({
type: e.type,
timestamp: e.timestampMs,
txDigest: e.id.txDigest
}))
});
}
}
}
// Convert to array and sort by frequency
const patternArray = Array.from(patterns.values())
.filter(p => p.count > 1) // Only patterns that repeat
.sort((a, b) => b.count - a.count);
// Convert sets to counts
patternArray.forEach(pattern => {
pattern.uniqueSenders = pattern.senders.size;
delete pattern.senders;
});
return {
patterns: patternArray,
analysis: {
totalUniquePatterns: patternArray.length,
mostCommonPattern: patternArray[0]?.pattern || 'None',
avgPatternFrequency: patternArray.length > 0 ?
patternArray.reduce((sum, p) => sum + p.count, 0) / patternArray.length : 0
}
};
}
async compareEventActivity(filters, timeRangeHours = 24) {
const comparisons = {};
for (const [name, filter] of Object.entries(filters)) {
try {
const events = await getAllEventsPaginated(filter, 10);
const cutoffTime = Date.now() - (timeRangeHours * 60 * 60 * 1000);
const recentEvents = events.filter(e =>
parseInt(e.timestampMs || '0') > cutoffTime
);
comparisons[name] = {
totalEvents: recentEvents.length,
uniqueEventTypes: new Set(recentEvents.map(e => e.type)).size,
uniqueSenders: new Set(recentEvents.map(e => e.sender)).size,
avgEventsPerSender: recentEvents.length / new Set(recentEvents.map(e => e.sender)).size || 0,
topEventTypes: this.getTopEventTypes(recentEvents, 5)
};
} catch (error) {
comparisons[name] = { error: error.message };
}
}
return comparisons;
}
getTopEventTypes(events, topN = 5) {
const typeCounts = {};
events.forEach(event => {
typeCounts[event.type] = (typeCounts[event.type] || 0) + 1;
});
return Object.entries(typeCounts)
.sort(([,a], [,b]) => b - a)
.slice(0, topN)
.map(([type, count]) => ({ type, count }));
}
}
// Real-time event dashboard
async function createEventDashboard(packageIds, updateIntervalMs = 10000) {
const analytics = new EventAnalytics(client);
const dashboard = {
packages: {},
globalStats: {
totalEvents: 0,
totalUniqueTypes: new Set(),
totalUniqueSenders: new Set(),
lastUpdate: null
}
};
const updateDashboard = async () => {
console.log('📊 Updating event dashboard...');
for (const packageId of packageIds) {
try {
const analysis = await analytics.analyzePackageActivity(packageId, 1); // Last 1 hour
dashboard.packages[packageId] = analysis;
// Update global stats
dashboard.globalStats.totalEvents += analysis.totalEvents || 0;
Object.keys(analysis.eventTypes || {}).forEach(type => {
dashboard.globalStats.totalUniqueTypes.add(type);
});
Object.keys(analysis.sendersActivity || {}).forEach(sender => {
dashboard.globalStats.totalUniqueSenders.add(sender);
});
} catch (error) {
console.error(`Error updating dashboard for ${packageId}:`, error);
dashboard.packages[packageId] = { error: error.message };
}
}
dashboard.globalStats.totalUniqueTypes = dashboard.globalStats.totalUniqueTypes.size;
dashboard.globalStats.totalUniqueSenders = dashboard.globalStats.totalUniqueSenders.size;
dashboard.globalStats.lastUpdate = new Date().toISOString();
// Log summary
console.log(`Dashboard updated:`);
console.log(` Total Events: ${dashboard.globalStats.totalEvents}`);
console.log(` Unique Event Types: ${dashboard.globalStats.totalUniqueTypes}`);
console.log(` Unique Senders: ${dashboard.globalStats.totalUniqueSenders}`);
return dashboard;
};
// Initial update
await updateDashboard();
// Set up periodic updates
const intervalId = setInterval(updateDashboard, updateIntervalMs);
return {
dashboard,
stop: () => {
clearInterval(intervalId);
console.log('Event dashboard stopped');
}
};
}
// Usage examples
const commonPackages = ['0x2', '0x1']; // System packages
// Query specific event types
const coinEvents = await getEventsByType('0x2::coin::CoinCreated', 50);
console.log(`Found ${coinEvents?.data.length || 0} coin creation events`);
// Monitor events
const monitor = new EventMonitor(client);
monitor.onEvent('0x2::coin::CoinCreated', async (event) => {
console.log('💰 New coin created:', event.parsedJson);
});
monitor.onEvent('*', async (event) => {
console.log('📨 Event:', event.type);
});
await monitor.startMonitoring({ Package: '0x2' });
// Analytics
const analytics = new EventAnalytics(client);
const packageAnalysis = await analytics.analyzePackageActivity('0x2', 6);
console.log('Package Analysis:', packageAnalysis);
// Event patterns
const patterns = await analytics.findEventPatterns({ Package: '0x2' });
console.log('Found patterns:', patterns.patterns.length);
// Event dashboard
const { dashboard, stop } = await createEventDashboard(commonPackages, 30000);
// Stop after 10 minutes
setTimeout(stop, 10 * 60 * 1000);
import requests
import json
from typing import Dict, List, Any, Optional, Union, Callable, Set
from dataclasses import dataclass, field
import time
from collections import defaultdict, Counter
from datetime import datetime, timedelta
import threading
@dataclass
class EventFilter:
all_events: bool = False
transaction: Optional[str] = None
move_module: Optional[Dict[str, str]] = None
move_event_type: Optional[str] = None
sender: Optional[str] = None
package: Optional[str] = None
time_range: Optional[Dict[str, int]] = None
and_filters: Optional[List[Dict]] = None
or_filters: Optional[List[Dict]] = None
class SuiEventQueryClient:
def __init__(self, rpc_url: str):
self.rpc_url = rpc_url
def _build_filter_dict(self, event_filter: EventFilter) -> Dict[str, Any]:
"""Convert EventFilter to RPC filter format"""
if event_filter.all_events:
return {"All": []}
elif event_filter.transaction:
return {"Transaction": event_filter.transaction}
elif event_filter.move_module:
return {"MoveModule": event_filter.move_module}
elif event_filter.move_event_type:
return {"MoveEventType": event_filter.move_event_type}
elif event_filter.sender:
return {"Sender": event_filter.sender}
elif event_filter.package:
return {"Package": event_filter.package}
elif event_filter.time_range:
return {"TimeRange": event_filter.time_range}
elif event_filter.and_filters:
return {"And": event_filter.and_filters}
elif event_filter.or_filters:
return {"Or": event_filter.or_filters}
else:
return {"All": []}
def query_events(
self,
event_filter: EventFilter,
cursor: Optional[str] = None,
limit: int = 50,
descending: bool = True
) -> Optional[Dict[str, Any]]:
"""Query events with specified filter"""
filter_dict = self._build_filter_dict(event_filter)
payload = {
"jsonrpc": "2.0",
"method": "suix_queryEvents",
"params": [filter_dict, cursor, limit, descending],
"id": 1
}
try:
response = requests.post(
self.rpc_url,
headers={'Content-Type': 'application/json'},
data=json.dumps(payload),
timeout=30
)
result = response.json()
if 'error' in result:
print(f"RPC Error: {result['error']}")
return None
return result['result']
except Exception as e:
print(f"Error querying events: {e}")
return None
def get_all_events_paginated(
self,
event_filter: EventFilter,
max_pages: int = 50,
limit: int = 50
) -> List[Dict[str, Any]]:
"""Get all matching events using pagination"""
all_events = []
cursor = None
page = 0
while page < max_pages:
result = self.query_events(
event_filter,
cursor=cursor,
limit=limit,
descending=True
)
if not result or not result['data']:
break
all_events.extend(result['data'])
print(f"Page {page + 1}: {len(result['data'])} events")
if not result['hasNextPage']:
break
cursor = result['nextCursor']
page += 1
print(f"Total events retrieved: {len(all_events)}")
return all_events
def get_package_events(self, package_id: str, limit: int = 100) -> List[Dict[str, Any]]:
"""Get all events from a specific package"""
filter_obj = EventFilter(package=package_id)
result = self.query_events(filter_obj, limit=limit)
return result['data'] if result else []
def get_sender_events(self, sender: str, limit: int = 100) -> List[Dict[str, Any]]:
"""Get all events from a specific sender"""
filter_obj = EventFilter(sender=sender)
result = self.query_events(filter_obj, limit=limit)
return result['data'] if result else []
def get_event_type_events(self, event_type: str, limit: int = 100) -> List[Dict[str, Any]]:
"""Get events of a specific type"""
filter_obj = EventFilter(move_event_type=event_type)
result = self.query_events(filter_obj, limit=limit)
return result['data'] if result else []
class EventAnalytics:
def __init__(self, client: SuiEventQueryClient):
self.client = client
def analyze_package_activity(
self,
package_id: str,
hours: int = 24
) -> Dict[str, Any]:
"""Analyze event activity for a package"""
events = self.client.get_all_events_paginated(
EventFilter(package=package_id),
max_pages=20
)
if not events:
return {'error': 'No events found for package', 'package_id': package_id}
# Filter by time range
cutoff_time = int((datetime.now() - timedelta(hours=hours)).timestamp() * 1000)
recent_events = [
event for event in events
if int(event.get('timestampMs', '0')) > cutoff_time
]
analysis = {
'package_id': package_id,
'total_events': len(recent_events),
'time_range_hours': hours,
'event_types': Counter(),
'sender_activity': defaultdict(lambda: {'event_count': 0, 'event_types': set()}),
'module_activity': Counter(),
'hourly_distribution': defaultdict(int),
'recent_trends': []
}
for event in recent_events:
# Event type analysis
event_type = event['type']
analysis['event_types'][event_type] += 1
# Sender analysis
sender = event['sender']
analysis['sender_activity'][sender]['event_count'] += 1
analysis['sender_activity'][sender]['event_types'].add(event_type)
# Module analysis
module = event['transactionModule']
analysis['module_activity'][module] += 1
# Hourly distribution
timestamp = int(event.get('timestampMs', '0'))
if timestamp > 0:
hour = datetime.fromtimestamp(timestamp / 1000).hour
analysis['hourly_distribution'][hour] += 1
# Convert sets to counts for JSON serialization
for sender_data in analysis['sender_activity'].values():
sender_data['unique_event_types'] = len(sender_data['event_types'])
del sender_data['event_types']
# Calculate hourly trends
time_window_ms = 60 * 60 * 1000 # 1 hour
current_time = int(datetime.now().timestamp() * 1000)
for i in range(hours):
window_end = current_time - (i * time_window_ms)
window_start = window_end - time_window_ms
window_events = [
event for event in recent_events
if window_start <= int(event.get('timestampMs', '0')) < window_end
]
analysis['recent_trends'].append({
'hour': datetime.fromtimestamp(window_start / 1000).isoformat(),
'event_count': len(window_events),
'unique_types': len(set(event['type'] for event in window_events))
})
# Convert Counters to regular dicts
analysis['event_types'] = dict(analysis['event_types'])
analysis['module_activity'] = dict(analysis['module_activity'])
analysis['hourly_distribution'] = dict(analysis['hourly_distribution'])
analysis['sender_activity'] = dict(analysis['sender_activity'])
return analysis
def find_event_patterns(
self,
event_filter: EventFilter,
pattern_size: int = 3
) -> Dict[str, Any]:
"""Find repeating patterns in event sequences"""
events = self.client.get_all_events_paginated(event_filter, max_pages=50)
if len(events) < pattern_size:
return {
'patterns': [],
'analysis': 'Insufficient events for pattern analysis'
}
# Group events by sender
sender_sequences = defaultdict(list)
for event in events:
sender_sequences[event['sender']].append(event)
# Sort each sender's events by timestamp
for sender_events in sender_sequences.values():
sender_events.sort(key=lambda e: int(e.get('timestampMs', '0')))
patterns = defaultdict(lambda: {
'pattern': '',
'count': 0,
'senders': set(),
'examples': []
})
# Find patterns within each sender's sequence
for sender, sender_events in sender_sequences.items():
if len(sender_events) < pattern_size:
continue
for i in range(len(sender_events) - pattern_size + 1):
pattern_events = sender_events[i:i + pattern_size]
pattern = ' -> '.join([event['type'] for event in pattern_events])
pattern_data = patterns[pattern]
pattern_data['pattern'] = pattern
pattern_data['count'] += 1
pattern_data['senders'].add(sender)
if len(pattern_data['examples']) < 3:
pattern_data['examples'].append({
'sender': sender,
'events': [
{
'type': event['type'],
'timestamp': event.get('timestampMs'),
'tx_digest': event['id']['txDigest']
}
for event in pattern_events
]
})
# Convert to list and filter/sort
pattern_list = [
{
**data,
'unique_senders': len(data['senders']),
'senders': list(data['senders'])[:10] # Limit senders list
}
for pattern, data in patterns.items()
if data['count'] > 1 # Only patterns that repeat
]
pattern_list.sort(key=lambda x: x['count'], reverse=True)
return {
'patterns': pattern_list,
'analysis': {
'total_unique_patterns': len(pattern_list),
'most_common_pattern': pattern_list[0]['pattern'] if pattern_list else 'None',
'avg_pattern_frequency': (
sum(p['count'] for p in pattern_list) / len(pattern_list)
if pattern_list else 0
)
}
}
def compare_event_activity(
self,
filters: Dict[str, EventFilter],
time_range_hours: int = 24
) -> Dict[str, Any]:
"""Compare event activity across different filters"""
comparisons = {}
for name, event_filter in filters.items():
try:
events = self.client.get_all_events_paginated(event_filter, max_pages=10)
cutoff_time = int((datetime.now() - timedelta(hours=time_range_hours)).timestamp() * 1000)
recent_events = [
event for event in events
if int(event.get('timestampMs', '0')) > cutoff_time
]
unique_senders = set(event['sender'] for event in recent_events)
event_type_counts = Counter(event['type'] for event in recent_events)
comparisons[name] = {
'total_events': len(recent_events),
'unique_event_types': len(event_type_counts),
'unique_senders': len(unique_senders),
'avg_events_per_sender': len(recent_events) / len(unique_senders) if unique_senders else 0,
'top_event_types': [
{'type': event_type, 'count': count}
for event_type, count in event_type_counts.most_common(5)
]
}
except Exception as e:
comparisons[name] = {'error': str(e)}
return comparisons
class EventMonitor:
def __init__(self, client: SuiEventQueryClient):
self.client = client
self.event_handlers = defaultdict(list)
self.last_event_id = None
self.is_monitoring = False
self.monitor_thread = None
def on_event(self, event_type: str, handler: Callable):
"""Register event handler for specific event type or '*' for all events"""
self.event_handlers[event_type].append(handler)
def start_monitoring(
self,
event_filter: EventFilter,
interval_seconds: int = 10,
max_duration_minutes: int = 60
):
"""Start monitoring for new events"""
if self.is_monitoring:
print("Monitoring already in progress")
return
print(f"Starting event monitoring (max {max_duration_minutes} minutes)")
self.is_monitoring = True
# Get initial state
try:
initial_result = self.client.query_events(
event_filter,
limit=1,
descending=True
)
if initial_result and initial_result['data']:
self.last_event_id = initial_result['data'][0]['id']
except Exception as e:
print(f"Error getting initial event state: {e}")
def monitor_loop():
start_time = time.time()
max_duration = max_duration_minutes * 60
while self.is_monitoring and (time.time() - start_time) < max_duration:
try:
result = self.client.query_events(
event_filter,
limit=100,
descending=True
)
if result and result['data']:
new_events = []
for event in result['data']:
# Stop when we reach previously seen events
if (self.last_event_id and
event['id']['txDigest'] == self.last_event_id['txDigest'] and
event['id']['eventSeq'] == self.last_event_id['eventSeq']):
break
new_events.append(event)
if new_events:
self.last_event_id = new_events[0]['id']
self._process_new_events(list(reversed(new_events)))
time.sleep(interval_seconds)
except Exception as e:
print(f"Error in event monitoring: {e}")
time.sleep(interval_seconds)
self.is_monitoring = False
print("Event monitoring completed")
self.monitor_thread = threading.Thread(target=monitor_loop, daemon=True)
self.monitor_thread.start()
def _process_new_events(self, events: List[Dict[str, Any]]):
"""Process new events and call registered handlers"""
for event in events:
print(f"📡 New event: {event['type']}")
# Call handlers for specific event type
handlers = self.event_handlers.get(event['type'], [])
# Call handlers for all events
all_handlers = self.event_handlers.get('*', [])
for handler in handlers + all_handlers:
try:
handler(event)
except Exception as e:
print(f"Error in event handler: {e}")
def stop_monitoring(self):
"""Stop event monitoring"""
self.is_monitoring = False
if self.monitor_thread:
self.monitor_thread.join(timeout=5)
print("Event monitoring stopped")
class EventDashboard:
def __init__(self, client: SuiEventQueryClient):
self.client = client
self.analytics = EventAnalytics(client)
self.dashboard_data = {
'packages': {},
'global_stats': {
'total_events': 0,
'total_unique_types': 0,
'total_unique_senders': 0,
'last_update': None
}
}
self.is_updating = False
self.update_thread = None
def start_dashboard(
self,
package_ids: List[str],
update_interval_seconds: int = 30
):
"""Start real-time event dashboard"""
if self.is_updating:
print("Dashboard already running")
return
print(f"Starting event dashboard for {len(package_ids)} packages")
self.is_updating = True
def update_loop():
while self.is_updating:
try:
self._update_dashboard(package_ids)
time.sleep(update_interval_seconds)
except Exception as e:
print(f"Error updating dashboard: {e}")
time.sleep(update_interval_seconds)
self.update_thread = threading.Thread(target=update_loop, daemon=True)
self.update_thread.start()
# Initial update
self._update_dashboard(package_ids)
def _update_dashboard(self, package_ids: List[str]):
"""Update dashboard data"""
print("📊 Updating event dashboard...")
global_types = set()
global_senders = set()
total_events = 0
for package_id in package_ids:
try:
analysis = self.analytics.analyze_package_activity(package_id, 1) # Last 1 hour
self.dashboard_data['packages'][package_id] = analysis
if not analysis.get('error'):
total_events += analysis.get('total_events', 0)
global_types.update(analysis.get('event_types', {}).keys())
global_senders.update(analysis.get('sender_activity', {}).keys())
except Exception as e:
print(f"Error updating package {package_id}: {e}")
self.dashboard_data['packages'][package_id] = {'error': str(e)}
# Update global stats
self.dashboard_data['global_stats'] = {
'total_events': total_events,
'total_unique_types': len(global_types),
'total_unique_senders': len(global_senders),
'last_update': datetime.now().isoformat()
}
# Log summary
stats = self.dashboard_data['global_stats']
print(f"Dashboard updated:")
print(f" Total Events: {stats['total_events']}")
print(f" Unique Event Types: {stats['total_unique_types']}")
print(f" Unique Senders: {stats['total_unique_senders']}")
def get_dashboard_data(self) -> Dict[str, Any]:
"""Get current dashboard data"""
return self.dashboard_data.copy()
def stop_dashboard(self):
"""Stop dashboard updates"""
self.is_updating = False
if self.update_thread:
self.update_thread.join(timeout=5)
print("Event dashboard stopped")
# Usage examples
client = SuiEventQueryClient('https://sui-mainnet.dwellir.com/YOUR_API_KEY')
# Example 1: Query package events
package_events = client.get_package_events('0x2', limit=50)
print(f"Found {len(package_events)} events from package 0x2")
# Example 2: Query specific event type
coin_events = client.get_event_type_events('0x2::coin::CoinCreated', limit=30)
print(f"Found {len(coin_events)} coin creation events")
# Example 3: Event analytics
analytics = EventAnalytics(client)
analysis = analytics.analyze_package_activity('0x2', hours=6)
print(f"\nPackage Analysis:")
print(f"Total events: {analysis.get('total_events', 0)}")
print(f"Unique event types: {len(analysis.get('event_types', {}))}")
# Example 4: Find patterns
patterns = analytics.find_event_patterns(EventFilter(package='0x2'))
print(f"\nFound {len(patterns['patterns'])} event patterns")
for pattern in patterns['patterns'][:3]:
print(f" {pattern['pattern']}: {pattern['count']} occurrences")
# Example 5: Event monitoring
monitor = EventMonitor(client)
def on_coin_event(event):
print(f"💰 Coin event: {event['type']}")
if event.get('parsedJson'):
print(f" Data: {event['parsedJson']}")
def on_any_event(event):
print(f"📨 Event: {event['type']} from {event['sender'][:16]}...")
monitor.on_event('0x2::coin::CoinCreated', on_coin_event)
monitor.on_event('*', on_any_event)
# Monitor package events for 5 minutes
monitor.start_monitoring(
EventFilter(package='0x2'),
interval_seconds=10,
max_duration_minutes=5
)
# Example 6: Dashboard
dashboard = EventDashboard(client)
dashboard.start_dashboard(['0x2', '0x1'], update_interval_seconds=60)
# Stop dashboard after 10 minutes
time.sleep(600)
dashboard.stop_dashboard()
Response Example​
{
"jsonrpc": "2.0",
"id": 1,
"result": {
"data": [
{
"id": {
"txDigest": "0x8c123c0b23c456789abcdef0123456789abcdef0123456789abcdef0123456789a",
"eventSeq": "0"
},
"packageId": "0x2",
"transactionModule": "coin",
"sender": "0xd77955e670601c2c2e6e8637e383695c166aac0a86b741c266bdfb23c2e3369f",
"type": "0x2::coin::CoinCreated",
"parsedJson": {
"coinType": "0x2::sui::SUI",
"creator": "0xd77955e670601c2c2e6e8637e383695c166aac0a86b741c266bdfb23c2e3369f",
"initialSupply": "1000000000"
},
"bcs": "AQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyAhIiM=",
"timestampMs": "1703097600000"
},
{
"id": {
"txDigest": "0x7d456e78f90123456789abcdef0123456789abcdef0123456789abcdef012345",
"eventSeq": "1"
},
"packageId": "0x549e8b69270defbfafd4f94e17ec44cdbdd99820b33bda2278dea3b9a32d3f55",
"transactionModule": "dex",
"sender": "0x1a2b3c4d5e6f789012345678901234567890123456789012345678901234567890",
"type": "0x549e8b69270defbfafd4f94e17ec44cdbdd99820b33bda2278dea3b9a32d3f55::dex::SwapExecuted",
"parsedJson": {
"trader": "0x1a2b3c4d5e6f789012345678901234567890123456789012345678901234567890",
"tokenIn": "0x2::sui::SUI",
"tokenOut": "0xdba34672e30cb065b1f93e3ab55318768fd6fef66c15942c9f7cb846e2f900e7::usdc::USDC",
"amountIn": "500000000",
"amountOut": "125000",
"feeAmount": "1500000"
},
"bcs": "HGERFOWPgergpowejrgpowejrgpowejrgpowerjgpowerjgpowejrgp",
"timestampMs": "1703097610000"
}
],
"nextCursor": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9...",
"hasNextPage": true
}
}
Common Use Cases​
1. DeFi Protocol Monitoring​
async function monitorDEXActivity(dexPackageId) {
const monitor = new EventMonitor(client);
// Monitor swap events
monitor.onEvent(`${dexPackageId}::dex::SwapExecuted`, async (event) => {
const swap = event.parsedJson;
console.log(`🔄 Swap: ${swap.amountIn} ${swap.tokenIn} -> ${swap.amountOut} ${swap.tokenOut}`);
// Check for large trades
if (parseInt(swap.amountIn) > 1000000000) { // Large SUI trades
console.log('🚨 Large trade detected!');
// Send alert to monitoring system
}
});
// Monitor liquidity events
monitor.onEvent(`${dexPackageId}::pool::LiquidityAdded`, async (event) => {
console.log('💧 Liquidity added:', event.parsedJson);
});
await monitor.startMonitoring({ Package: dexPackageId });
}
2. NFT Marketplace Tracking​
async function trackNFTSales(marketplacePackage) {
const analytics = new EventAnalytics(client);
const salesEvents = await analytics.client.getEventsByType(
`${marketplacePackage}::marketplace::ItemSold`
);
const salesAnalysis = {
totalSales: salesEvents.length,
totalVolume: 0,
averagePrice: 0,
topCollections: {},
priceDistribution: { under1: 0, under10: 0, under100: 0, over100: 0 }
};
salesEvents.forEach(event => {
const sale = event.parsedJson;
const price = parseInt(sale.price) / 1000000000; // Convert MIST to SUI
salesAnalysis.totalVolume += price;
// Track collections
const collection = sale.collection || 'Unknown';
salesAnalysis.topCollections[collection] =
(salesAnalysis.topCollections[collection] || 0) + 1;
// Price distribution
if (price < 1) salesAnalysis.priceDistribution.under1++;
else if (price < 10) salesAnalysis.priceDistribution.under10++;
else if (price < 100) salesAnalysis.priceDistribution.under100++;
else salesAnalysis.priceDistribution.over100++;
});
salesAnalysis.averagePrice = salesAnalysis.totalVolume / salesAnalysis.totalSales;
return salesAnalysis;
}
3. User Activity Analysis​
async function analyzeUserActivity(userAddress) {
const userEvents = await getAllEventsPaginated(
{ Sender: userAddress },
20
);
const activity = {
totalTransactions: userEvents.length,
uniqueContracts: new Set(),
activityByHour: {},
mostUsedContracts: {},
eventTypes: {}
};
userEvents.forEach(event => {
// Track contracts used
activity.uniqueContracts.add(event.packageId);
// Track contract usage frequency
const contract = event.packageId;
activity.mostUsedContracts[contract] =
(activity.mostUsedContracts[contract] || 0) + 1;
// Track event types
activity.eventTypes[event.type] =
(activity.eventTypes[event.type] || 0) + 1;
// Activity by hour
const timestamp = parseInt(event.timestampMs || '0');
if (timestamp > 0) {
const hour = new Date(timestamp).getHours();
activity.activityByHour[hour] =
(activity.activityByHour[hour] || 0) + 1;
}
});
activity.uniqueContracts = activity.uniqueContracts.size;
return activity;
}
4. Smart Contract Audit Trail​
async function auditSmartContract(packageId, moduleNames) {
const auditTrail = {
package: packageId,
modules: {},
securityEvents: [],
adminActions: [],
upgradeEvents: [],
totalInteractions: 0
};
for (const moduleName of moduleNames) {
const moduleEvents = await getAllEventsPaginated({
MoveModule: { package: packageId, module: moduleName }
}, 50);
auditTrail.modules[moduleName] = {
totalEvents: moduleEvents.length,
uniqueSenders: new Set(moduleEvents.map(e => e.sender)).size,
eventTypes: {},
recentActivity: []
};
moduleEvents.forEach(event => {
auditTrail.totalInteractions++;
// Categorize events
const eventType = event.type.split('::').pop();
auditTrail.modules[moduleName].eventTypes[eventType] =
(auditTrail.modules[moduleName].eventTypes[eventType] || 0) + 1;
// Check for security-relevant events
if (eventType.toLowerCase().includes('admin') ||
eventType.toLowerCase().includes('owner') ||
eventType.toLowerCase().includes('upgrade')) {
if (eventType.toLowerCase().includes('upgrade')) {
auditTrail.upgradeEvents.push({
timestamp: event.timestampMs,
event: eventType,
sender: event.sender,
data: event.parsedJson
});
} else {
auditTrail.adminActions.push({
timestamp: event.timestampMs,
event: eventType,
sender: event.sender,
data: event.parsedJson
});
}
}
// Recent activity (last 24 hours)
const dayAgo = Date.now() - (24 * 60 * 60 * 1000);
if (parseInt(event.timestampMs || '0') > dayAgo) {
auditTrail.modules[moduleName].recentActivity.push({
timestamp: event.timestampMs,
type: eventType,
sender: event.sender
});
}
});
}
return auditTrail;
}
Best Practices​
1. Efficient Event Filtering​
// Use specific filters to reduce data transfer and improve performance
const specificFilter = {
And: [
{ Package: '0x2' },
{ MoveEventType: '0x2::coin::CoinCreated' }
]
};
// Avoid overly broad queries
const tooBoard = { All: [] }; // Can return massive amounts of data
2. Event Data Parsing​
function safeParseEventData(event) {
try {
return {
type: event.type,
data: event.parsedJson,
timestamp: parseInt(event.timestampMs || '0'),
sender: event.sender,
package: event.packageId,
module: event.transactionModule
};
} catch (error) {
console.warn('Failed to parse event:', error);
return {
type: event.type || 'Unknown',
data: null,
error: error.message
};
}
}
3. Performance Optimization​
// Use pagination for large datasets
async function efficientEventQuery(filter, maxEvents = 10000) {
const events = [];
let cursor = null;
let retrieved = 0;
while (retrieved < maxEvents) {
const batchSize = Math.min(100, maxEvents - retrieved);
const result = await client.queryEvents({
query: filter,
cursor: cursor,
limit: batchSize,
order: 'descending'
});
if (!result.data.length) break;
events.push(...result.data);
retrieved += result.data.length;
if (!result.hasNextPage) break;
cursor = result.nextCursor;
// Small delay to prevent overwhelming the RPC
await new Promise(resolve => setTimeout(resolve, 100));
}
return events;
}
Error Handling​
async function robustEventQuery(filter, retries = 3) {
for (let i = 0; i < retries; i++) {
try {
const result = await client.queryEvents({ query: filter });
return { success: true, data: result };
} catch (error) {
console.warn(`Query attempt ${i + 1} failed:`, error.message);
if (i === retries - 1) {
return {
success: false,
error: error.message,
retryExhausted: true
};
}
// Exponential backoff
await new Promise(resolve =>
setTimeout(resolve, Math.pow(2, i) * 1000)
);
}
}
}
Related Methods​
- sui_getTransactionBlock - Get transaction details including events
- suix_queryTransactionBlocks - Query transactions that emitted events
- sui_getObject - Get object details related to events
Notes​
- Events are immutable once emitted by transactions
- Pagination is essential for large result sets to avoid timeouts
- Event data structure depends on the Move module that emitted it
- Timestamps are in milliseconds since Unix epoch
- BCS encoding provides raw event data for custom parsing
- Filter combinations using And/Or can create complex queries
Need help? Contact our support team or check the Sui documentation.