Skip to main content

sui_multiGetObjects

Efficiently retrieves information about multiple objects on the Sui blockchain in a single request, reducing network overhead and improving application performance.

Overview​

The sui_multiGetObjects method is a batch version of sui_getObject, allowing you to query multiple objects simultaneously. This is particularly useful for applications that need to fetch data from numerous objects, such as displaying NFT collections, checking multiple coin balances, or analyzing complex object relationships. The method maintains the same response structure as single object queries but returns an array of results.

Parameters​

ParameterTypeRequiredDescription
objectIdsarrayYesArray of object IDs to query (up to 50 objects per request)
optionsobjectNoOptions for controlling the response content (same as sui_getObject)

Object IDs Array​

  • Maximum of 50 object IDs per request
  • Each ID must be a 64-character hex string with 0x prefix
  • Invalid or non-existent object IDs return error objects

Options Object​

FieldTypeDefaultDescription
showTypebooleanfalseInclude the object's Move type information
showOwnerbooleanfalseInclude object ownership details
showPreviousTransactionbooleanfalseInclude the previous transaction digest
showDisplaybooleanfalseInclude display metadata if available
showContentbooleanfalseInclude the object's content fields
showBcsbooleanfalseInclude BCS-encoded object data
showStorageRebatebooleanfalseInclude storage rebate information

Returns​

Returns an array of objects containing the requested information for each queried object.

FieldTypeDescription
dataobjectThe object's data including content and metadata
errorobjectError information if the object doesn't exist or is invalid

Each array element follows the same structure as sui_getObject responses, containing either a data field with object information or an error field for failed queries.

Code Examples​

curl -X POST https://sui-mainnet.dwellir.com/YOUR_API_KEY \
-H "Content-Type: application/json" \
-d '{
"jsonrpc": "2.0",
"method": "sui_multiGetObjects",
"params": [
[
"0x5d3c87e88bc566e3f10c66e0275a366001ffa8b86142adc78c744de6afffeb34",
"0x1a2b3c4d5e6f789012345678901234567890123456789012345678901234567890",
"0x9876543210abcdef0123456789abcdef0123456789abcdef0123456789abcdef"
],
{
"showType": true,
"showOwner": true,
"showContent": true,
"showDisplay": true
}
],
"id": 1
}'

Response Example​

{
"jsonrpc": "2.0",
"id": 1,
"result": [
{
"data": {
"objectId": "0x5d3c87e88bc566e3f10c66e0275a366001ffa8b86142adc78c744de6afffeb34",
"version": "31823924",
"digest": "HpSeCiMLG53N9FcHDrRTxwGhc4RVJa1seZhXYJ7KFpJe",
"type": "0x2::coin::Coin<0x2::sui::SUI>",
"owner": {
"AddressOwner": "0xd77955e670601c2c2e6e8637e383695c166aac0a86b741c266bdfb23c2e3369f"
},
"content": {
"dataType": "moveObject",
"type": "0x2::coin::Coin<0x2::sui::SUI>",
"hasPublicTransfer": true,
"fields": {
"balance": "1000000000",
"id": {
"id": "0x5d3c87e88bc566e3f10c66e0275a366001ffa8b86142adc78c744de6afffeb34"
}
}
}
}
},
{
"data": {
"objectId": "0x1a2b3c4d5e6f789012345678901234567890123456789012345678901234567890",
"version": "12345",
"digest": "AbC123XyZ456",
"type": "0x123::nft::NFT",
"owner": {
"AddressOwner": "0xd77955e670601c2c2e6e8637e383695c166aac0a86b741c266bdfb23c2e3369f"
},
"display": {
"data": {
"name": "Sui Warrior #1234",
"description": "A fierce warrior from the Sui realm",
"image_url": "https://example.com/nft/1234.png",
"creator": "Sui Studios"
}
},
"content": {
"dataType": "moveObject",
"type": "0x123::nft::NFT",
"hasPublicTransfer": true,
"fields": {
"id": {
"id": "0x1a2b3c4d5e6f789012345678901234567890123456789012345678901234567890"
},
"name": "Sui Warrior #1234",
"rarity": "Legendary",
"power": 95
}
}
}
},
{
"error": {
"code": "objectNotFound",
"message": "Object 0x9876543210abcdef0123456789abcdef0123456789abcdef0123456789abcdef not found"
}
}
]
}

Common Use Cases​

1. NFT Collection Display​

async function displayNFTCollection(nftIds) {
const results = await client.multiGetObjects({
ids: nftIds,
options: {
showContent: true,
showDisplay: true,
showOwner: true
}
});

const nfts = results
.filter(result => result.data && result.data.display)
.map(result => {
const data = result.data;
return {
id: data.objectId,
name: data.display.data.name,
image: data.display.data.image_url,
description: data.display.data.description,
owner: data.owner?.AddressOwner,
attributes: data.content?.fields || {}
};
});

return nfts;
}

2. Portfolio Balance Aggregation​

async function getPortfolioBalances(coinObjectIds) {
const results = await client.multiGetObjects({
ids: coinObjectIds,
options: {
showType: true,
showContent: true
}
});

const balances = {};

results.forEach(result => {
if (!result.data || !result.data.type?.includes('::coin::Coin<')) {
return;
}

const coinType = result.data.type;
const balance = parseInt(result.data.content?.fields?.balance || '0');

if (!balances[coinType]) {
balances[coinType] = {
type: coinType,
totalBalance: 0,
objectCount: 0,
objects: []
};
}

balances[coinType].totalBalance += balance;
balances[coinType].objectCount += 1;
balances[coinType].objects.push({
objectId: result.data.objectId,
balance: balance
});
});

return balances;
}

3. Object Ownership Verification​

async function verifyObjectOwnership(objectIds, expectedOwner) {
const results = await client.multiGetObjects({
ids: objectIds,
options: { showOwner: true }
});

const ownership = {
owned: [],
notOwned: [],
errors: []
};

results.forEach((result, index) => {
const objectId = objectIds[index];

if (result.error) {
ownership.errors.push({
objectId,
error: result.error
});
return;
}

const owner = result.data?.owner?.AddressOwner;
if (owner === expectedOwner) {
ownership.owned.push(objectId);
} else {
ownership.notOwned.push({
objectId,
actualOwner: owner
});
}
});

return ownership;
}

4. Batch Object Type Analysis​

async function analyzeObjectTypes(objectIds) {
const results = await client.multiGetObjects({
ids: objectIds,
options: {
showType: true,
showContent: true
}
});

const analysis = {
byType: {},
byPackage: {},
summary: {
total: objectIds.length,
found: 0,
notFound: 0
}
};

results.forEach(result => {
if (result.error) {
analysis.summary.notFound++;
return;
}

analysis.summary.found++;
const type = result.data?.type || 'Unknown';

// Analyze by full type
if (!analysis.byType[type]) {
analysis.byType[type] = {
count: 0,
objects: []
};
}
analysis.byType[type].count++;
analysis.byType[type].objects.push(result.data.objectId);

// Analyze by package
const packageMatch = type.match(/^(0x[a-f0-9]+)::/);
if (packageMatch) {
const packageId = packageMatch[1];
if (!analysis.byPackage[packageId]) {
analysis.byPackage[packageId] = {
count: 0,
types: new Set()
};
}
analysis.byPackage[packageId].count++;
analysis.byPackage[packageId].types.add(type);
}
});

// Convert Sets to Arrays for JSON serialization
Object.values(analysis.byPackage).forEach(pkg => {
pkg.types = Array.from(pkg.types);
});

return analysis;
}

Advanced Usage Patterns​

1. Chunked Processing for Large Sets​

async function processLargeObjectSet(objectIds, chunkSize = 50) {
const results = [];
const chunks = [];

// Split into chunks
for (let i = 0; i < objectIds.length; i += chunkSize) {
chunks.push(objectIds.slice(i, i + chunkSize));
}

console.log(`Processing ${objectIds.length} objects in ${chunks.length} chunks`);

// Process chunks with rate limiting
for (let i = 0; i < chunks.length; i++) {
const chunk = chunks[i];

try {
const chunkResults = await client.multiGetObjects({
ids: chunk,
options: {
showType: true,
showContent: true,
showOwner: true
}
});

results.push(...chunkResults);
console.log(`Completed chunk ${i + 1}/${chunks.length}`);

// Rate limiting - wait between chunks
if (i < chunks.length - 1) {
await new Promise(resolve => setTimeout(resolve, 100));
}
} catch (error) {
console.error(`Error processing chunk ${i + 1}:`, error);
// Add placeholder errors for the chunk
results.push(...chunk.map(id => ({
error: { code: 'chunkError', message: error.message }
})));
}
}

return results;
}

2. Smart Caching with Selective Updates​

class ObjectCache {
constructor(client) {
this.client = client;
this.cache = new Map();
this.lastFetch = new Map();
}

async getObjects(objectIds, options = {}, maxAge = 60000) {
const now = Date.now();
const fresh = [];
const stale = [];

// Determine which objects need fetching
objectIds.forEach(id => {
const cached = this.cache.get(id);
const lastFetch = this.lastFetch.get(id) || 0;

if (cached && (now - lastFetch) < maxAge) {
fresh.push({ id, data: cached });
} else {
stale.push(id);
}
});

let newData = [];
if (stale.length > 0) {
console.log(`Fetching ${stale.length} objects, using ${fresh.length} from cache`);

const results = await this.client.multiGetObjects({
ids: stale,
options
});

// Update cache
results.forEach((result, index) => {
const id = stale[index];
if (result.data) {
this.cache.set(id, result.data);
this.lastFetch.set(id, now);
}
newData.push({ id, data: result.data, error: result.error });
});
}

// Combine fresh and new data
const allResults = [...fresh, ...newData];

// Sort to match original order
return objectIds.map(id =>
allResults.find(result => result.id === id) ||
{ id, error: { code: 'notFound', message: 'Object not found in results' } }
);
}

clearCache() {
this.cache.clear();
this.lastFetch.clear();
}

getCacheStats() {
return {
size: this.cache.size,
entries: Array.from(this.cache.keys())
};
}
}

3. Parallel Processing with Error Recovery​

async function resilientMultiGetObjects(objectIds, options = {}, maxRetries = 3) {
let attempt = 0;
let lastError;

while (attempt < maxRetries) {
try {
const results = await client.multiGetObjects({
ids: objectIds,
options
});

// Check for partial failures
const failures = results.filter(r => r.error);
if (failures.length > 0 && failures.length < results.length) {
console.warn(`Partial failure: ${failures.length}/${results.length} objects failed`);
}

return results;
} catch (error) {
attempt++;
lastError = error;

console.warn(`Attempt ${attempt} failed:`, error.message);

if (attempt < maxRetries) {
const delay = Math.min(1000 * Math.pow(2, attempt - 1), 5000);
console.log(`Retrying in ${delay}ms...`);
await new Promise(resolve => setTimeout(resolve, delay));
}
}
}

throw new Error(`Failed after ${maxRetries} attempts. Last error: ${lastError?.message}`);
}

Performance Optimization​

1. Request Batching Strategy​

class OptimizedObjectFetcher {
constructor(client, batchSize = 50) {
this.client = client;
this.batchSize = batchSize;
this.requestQueue = [];
this.processing = false;
}

async getObject(objectId, options = {}) {
return new Promise((resolve, reject) => {
this.requestQueue.push({ objectId, options, resolve, reject });
this.processQueue();
});
}

async processQueue() {
if (this.processing || this.requestQueue.length === 0) {
return;
}

this.processing = true;

while (this.requestQueue.length > 0) {
const batch = this.requestQueue.splice(0, this.batchSize);

try {
const objectIds = batch.map(req => req.objectId);
const options = this.mergeOptions(batch.map(req => req.options));

const results = await this.client.multiGetObjects({
ids: objectIds,
options
});

// Resolve individual promises
batch.forEach((req, index) => {
const result = results[index];
if (result.data) {
req.resolve(result.data);
} else {
req.reject(new Error(result.error?.message || 'Object not found'));
}
});

} catch (error) {
// Reject all requests in the batch
batch.forEach(req => req.reject(error));
}
}

this.processing = false;
}

mergeOptions(optionsArray) {
// Merge all options to include maximum requested data
const merged = {};
const booleanFields = [
'showType', 'showOwner', 'showPreviousTransaction',
'showDisplay', 'showContent', 'showBcs', 'showStorageRebate'
];

booleanFields.forEach(field => {
merged[field] = optionsArray.some(opts => opts[field]);
});

return merged;
}
}

Error Handling Best Practices​

1. Comprehensive Error Categorization​

function categorizeErrors(results) {
const errorTypes = {
notFound: [],
deleted: [],
invalidId: [],
networkError: [],
other: []
};

results.forEach((result, index) => {
if (!result.error) return;

const error = result.error;
const objectId = result.objectId || `index_${index}`;

switch (error.code) {
case 'objectNotFound':
errorTypes.notFound.push({ objectId, error });
break;
case 'objectDeleted':
errorTypes.deleted.push({ objectId, error });
break;
case 'invalidObjectId':
errorTypes.invalidId.push({ objectId, error });
break;
default:
if (error.message?.includes('network')) {
errorTypes.networkError.push({ objectId, error });
} else {
errorTypes.other.push({ objectId, error });
}
}
});

return errorTypes;
}

2. Retry Logic for Failed Objects​

async function retryFailedObjects(failedObjectIds, options = {}, maxRetries = 2) {
let attempt = 0;
let remainingIds = [...failedObjectIds];
const successfulResults = [];

while (attempt < maxRetries && remainingIds.length > 0) {
attempt++;
console.log(`Retry attempt ${attempt} for ${remainingIds.length} objects`);

const results = await client.multiGetObjects({
ids: remainingIds,
options
});

const stillFailed = [];

results.forEach((result, index) => {
const objectId = remainingIds[index];

if (result.data) {
successfulResults.push({ objectId, data: result.data });
} else {
stillFailed.push(objectId);
}
});

remainingIds = stillFailed;

if (remainingIds.length > 0 && attempt < maxRetries) {
await new Promise(resolve => setTimeout(resolve, 1000 * attempt));
}
}

return {
successful: successfulResults,
stillFailed: remainingIds
};
}

Best Practices​

1. Batch Size Management​

  • Maximum 50 objects per request to avoid rate limiting
  • Use chunking for larger datasets
  • Implement progressive loading for user interfaces

2. Selective Data Fetching​

  • Only request data fields you need (showContent, showDisplay, etc.)
  • Use different option sets for different use cases
  • Cache frequently accessed object data

3. Error Handling​

  • Always check for both data and error fields in results
  • Implement retry logic for network failures
  • Categorize errors for appropriate handling

4. Performance Optimization​

  • Use caching for immutable objects
  • Implement request deduplication
  • Consider parallel processing for independent operations

5. Rate Limiting​

  • Respect API rate limits with appropriate delays
  • Use exponential backoff for retries
  • Monitor request frequency in production

Need help? Contact our support team or check the Sui documentation.