state_getKeysPaged - JSON-RPC Method
Description​
Returns storage keys matching a prefix with pagination support. This JSON-RPC method is essential for querying large storage maps without overwhelming the node or client with massive result sets. It allows iterating through storage keys in manageable chunks.
Parameters​
Parameter | Type | Required | Description |
---|---|---|---|
prefix | string | Yes | Hex-encoded storage key prefix to match |
count | number | Yes | Maximum number of keys to return |
startKey | string | No | Hex-encoded key to start from (exclusive). Used for pagination |
blockHash | string | No | Block hash to query at. If omitted, uses the latest block |
Returns​
Field | Type | Description |
---|---|---|
result | array | Array of hex-encoded storage keys matching the prefix |
Request Example​
{
"jsonrpc": "2.0",
"method": "state_getKeysPaged",
"params": [
"0x26aa394eea5630e07c48ae0c9558cef7b99d880ec681799c0cf30e8886371da9",
10,
null
],
"id": 1
}
Response Example​
{
"jsonrpc": "2.0",
"result": [
"0x26aa394eea5630e07c48ae0c9558cef7b99d880ec681799c0cf30e8886371da900000001",
"0x26aa394eea5630e07c48ae0c9558cef7b99d880ec681799c0cf30e8886371da900000002",
"0x26aa394eea5630e07c48ae0c9558cef7b99d880ec681799c0cf30e8886371da900000003"
],
"id": 1
}
Code Examples​
JavaScript​
const getKeysPagedRPC = async (prefix, count, startKey = null, blockHash = null) => {
const params = blockHash
? [prefix, count, startKey, blockHash]
: [prefix, count, startKey];
const response = await fetch('https://api-polkadot.n.dwellir.com', {
method: 'POST',
headers: {
'Authorization': 'Bearer YOUR_API_KEY',
'Content-Type': 'application/json'
},
body: JSON.stringify({
jsonrpc: '2.0',
method: 'state_getKeysPaged',
params: params,
id: 1
})
});
const data = await response.json();
return data.result;
};
// Paginate through all keys
async function getAllKeysPaginated(prefix, pageSize = 100) {
let allKeys = [];
let startKey = null;
let hasMore = true;
while (hasMore) {
const keys = await getKeysPagedRPC(prefix, pageSize, startKey);
if (keys.length === 0) {
hasMore = false;
break;
}
allKeys = allKeys.concat(keys);
// Use last key as start for next page
startKey = keys[keys.length - 1];
console.log(`Fetched ${keys.length} keys, total: ${allKeys.length}`);
// Stop if we got less than requested
if (keys.length < pageSize) {
hasMore = false;
}
}
return allKeys;
}
// Example: Paginate through accounts
const accountPrefix = '0x26aa394eea5630e07c48ae0c9558cef7b99d880ec681799c0cf30e8886371da9';
const allAccounts = await getAllKeysPaginated(accountPrefix, 50);
console.log(`Total accounts found: ${allAccounts.length}`);
Python​
import requests
import json
from typing import List, Optional
class StorageKeyPaginator:
def __init__(self, rpc_url: str, api_key: str):
self.rpc_url = rpc_url
self.headers = {
"Authorization": f"Bearer {api_key}",
"Content-Type": "application/json"
}
def get_keys_paged(
self,
prefix: str,
count: int,
start_key: Optional[str] = None,
block_hash: Optional[str] = None
) -> List[str]:
params = [prefix, count, start_key]
if block_hash:
params.append(block_hash)
payload = {
"jsonrpc": "2.0",
"method": "state_getKeysPaged",
"params": params,
"id": 1
}
response = requests.post(
self.rpc_url,
headers=self.headers,
data=json.dumps(payload)
)
return response.json()["result"]
def iterate_all_keys(self, prefix: str, page_size: int = 100):
"""Generator that yields all keys matching prefix"""
start_key = None
while True:
keys = self.get_keys_paged(prefix, page_size, start_key)
if not keys:
break
for key in keys:
yield key
if len(keys) < page_size:
break
start_key = keys[-1]
def get_all_keys(self, prefix: str, page_size: int = 100) -> List[str]:
"""Get all keys as a list"""
return list(self.iterate_all_keys(prefix, page_size))
# Usage example
paginator = StorageKeyPaginator(
"https://api-polkadot.n.dwellir.com",
"YOUR_API_KEY"
)
# Get validator keys with pagination
validator_prefix = "0x5f3e4907f716ac89b6347d15ececedca9320c2dc4f5d7af5b320b04e2d1a3ff3"
# Method 1: Get all at once
all_validators = paginator.get_all_keys(validator_prefix, page_size=50)
print(f"Total validators: {len(all_validators)}")
# Method 2: Process in batches
for i, key in enumerate(paginator.iterate_all_keys(validator_prefix, page_size=10)):
if i >= 100: # Process first 100 only
break
print(f"Validator {i}: {key[-64:]}") # Print account part
TypeScript (@polkadot/api)​
import { ApiPromise, WsProvider } from '@polkadot/api';
class StorageKeysPaginator {
private api: ApiPromise;
constructor(api: ApiPromise) {
this.api = api;
}
async *iterateKeys(
prefix: string,
pageSize: number = 100
): AsyncGenerator<string> {
let startKey: string | null = null;
while (true) {
const keys = await this.api.rpc.state.getKeysPaged(
prefix,
pageSize,
startKey
);
if (keys.length === 0) break;
for (const key of keys) {
yield key.toHex();
}
if (keys.length < pageSize) break;
startKey = keys[keys.length - 1].toHex();
}
}
async getAllKeys(prefix: string, pageSize: number = 100): Promise<string[]> {
const allKeys: string[] = [];
for await (const key of this.iterateKeys(prefix, pageSize)) {
allKeys.push(key);
}
return allKeys;
}
async getKeysWithValues(
prefix: string,
pageSize: number = 100
): Promise<Map<string, any>> {
const results = new Map();
for await (const key of this.iterateKeys(prefix, pageSize)) {
const value = await this.api.rpc.state.getStorage(key);
results.set(key, value);
}
return results;
}
}
// Usage
async function paginationExample() {
const provider = new WsProvider('wss://api-polkadot.n.dwellir.com');
const api = await ApiPromise.create({ provider });
const paginator = new StorageKeysPaginator(api);
// Get account storage prefix
const prefix = api.query.system.account.keyPrefix();
// Example 1: Count total keys
let count = 0;
for await (const key of paginator.iterateKeys(prefix, 50)) {
count++;
if (count % 100 === 0) {
console.log(`Processed ${count} keys...`);
}
}
console.log(`Total keys: ${count}`);
// Example 2: Get first 10 accounts with balances
const accountData = new Map();
let processed = 0;
for await (const key of paginator.iterateKeys(prefix, 10)) {
const value = await api.rpc.state.getStorage(key);
const account = api.createType('AccountInfo', value);
accountData.set(key, {
free: account.data.free.toHuman(),
reserved: account.data.reserved.toHuman()
});
processed++;
if (processed >= 10) break;
}
console.log('First 10 accounts:', accountData);
await api.disconnect();
}
Memory-Efficient Processing​
// Process large datasets without loading all into memory
async function processLargeStorage(prefix, processor, batchSize = 100) {
let startKey = null;
let totalProcessed = 0;
while (true) {
const keys = await getKeysPagedRPC(prefix, batchSize, startKey);
if (keys.length === 0) break;
// Process batch
for (const key of keys) {
await processor(key);
totalProcessed++;
}
console.log(`Processed batch of ${keys.length}, total: ${totalProcessed}`);
if (keys.length < batchSize) break;
startKey = keys[keys.length - 1];
// Optional: Add delay to avoid overwhelming the node
await new Promise(resolve => setTimeout(resolve, 100));
}
return totalProcessed;
}
// Example processor
const analyzeAccounts = async (key) => {
// Process each account key
const address = key.slice(-64);
// Do something with the address
};
const total = await processLargeStorage(accountPrefix, analyzeAccounts, 50);
console.log(`Processed ${total} accounts`);
Use Cases​
- Large-Scale Analysis: Process millions of storage entries
- Data Export: Export blockchain data in batches
- Memory Management: Handle large datasets with limited memory
- Progressive Loading: Load data progressively in UIs
- Background Processing: Process storage in background tasks
Notes​
- The
startKey
parameter is exclusive (results start after this key) - Results are returned in lexicographical order
- Empty result indicates no more keys available
- Consider rate limiting when processing large datasets
- Use appropriate page sizes based on your use case (10-1000)
Related Methods​
state_getKeys
- Get all keys without paginationstate_getStorage
- Get individual storage valuesstate_getMetadata
- Get metadata to decode storage