Docs

sui_multiGetObjects - Batch Query Multip...

Efficiently retrieve multiple Sui objects in a single request using sui_multiGetObjects RPC method. Batch query object data, ownership, and metadata with Dwellir's high-performance Sui infrastructure.

Efficiently retrieves information about multiple objects on the Sui blockchain in a single request, reducing network overhead and improving application performance.

Overview

The sui_multiGetObjects method is a batch version of sui_getObject, allowing you to query multiple objects simultaneously. This is particularly useful for applications that need to fetch data from numerous objects, such as displaying NFT collections, checking multiple coin balances, or analyzing complex object relationships. The method maintains the same response structure as single object queries but returns an array of results.

Code Examples

Common Use Cases

1. NFT Collection Display

JavaScript
async function displayNFTCollection(nftIds) {
  const results = await client.multiGetObjects({
    ids: nftIds,
    options: {
      showContent: true,
      showDisplay: true,
      showOwner: true,
    },
  });

  const nfts = results
    .filter((result) => result.data && result.data.display)
    .map((result) => {
      const data = result.data;
      return {
        id: data.objectId,
        name: data.display.data.name,
        image: data.display.data.image_url,
        description: data.display.data.description,
        owner: data.owner?.AddressOwner,
        attributes: data.content?.fields || {},
      };
    });

  return nfts;
}

2. Portfolio Balance Aggregation

JavaScript
async function getPortfolioBalances(coinObjectIds) {
  const results = await client.multiGetObjects({
    ids: coinObjectIds,
    options: {
      showType: true,
      showContent: true,
    },
  });

  const balances = {};

  results.forEach((result) => {
    if (!result.data || !result.data.type?.includes('::coin::Coin<')) {
      return;
    }

    const coinType = result.data.type;
    const balance = parseInt(result.data.content?.fields?.balance || '0');

    if (!balances[coinType]) {
      balances[coinType] = {
        type: coinType,
        totalBalance: 0,
        objectCount: 0,
        objects: [],
      };
    }

    balances[coinType].totalBalance += balance;
    balances[coinType].objectCount += 1;
    balances[coinType].objects.push({
      objectId: result.data.objectId,
      balance: balance,
    });
  });

  return balances;
}

3. Object Ownership Verification

JavaScript
async function verifyObjectOwnership(objectIds, expectedOwner) {
  const results = await client.multiGetObjects({
    ids: objectIds,
    options: { showOwner: true },
  });

  const ownership = {
    owned: [],
    notOwned: [],
    errors: [],
  };

  results.forEach((result, index) => {
    const objectId = objectIds[index];

    if (result.error) {
      ownership.errors.push({
        objectId,
        error: result.error,
      });
      return;
    }

    const owner = result.data?.owner?.AddressOwner;
    if (owner === expectedOwner) {
      ownership.owned.push(objectId);
    } else {
      ownership.notOwned.push({
        objectId,
        actualOwner: owner,
      });
    }
  });

  return ownership;
}

4. Batch Object Type Analysis

JavaScript
async function analyzeObjectTypes(objectIds) {
  const results = await client.multiGetObjects({
    ids: objectIds,
    options: {
      showType: true,
      showContent: true,
    },
  });

  const analysis = {
    byType: {},
    byPackage: {},
    summary: {
      total: objectIds.length,
      found: 0,
      notFound: 0,
    },
  };

  results.forEach((result) => {
    if (result.error) {
      analysis.summary.notFound++;
      return;
    }

    analysis.summary.found++;
    const type = result.data?.type || 'Unknown';

    // Analyze by full type
    if (!analysis.byType[type]) {
      analysis.byType[type] = {
        count: 0,
        objects: [],
      };
    }
    analysis.byType[type].count++;
    analysis.byType[type].objects.push(result.data.objectId);

    // Analyze by package
    const packageMatch = type.match(/^(0x[a-f0-9]+)::/);
    if (packageMatch) {
      const packageId = packageMatch[1];
      if (!analysis.byPackage[packageId]) {
        analysis.byPackage[packageId] = {
          count: 0,
          types: new Set(),
        };
      }
      analysis.byPackage[packageId].count++;
      analysis.byPackage[packageId].types.add(type);
    }
  });

  // Convert Sets to Arrays for JSON serialization
  Object.values(analysis.byPackage).forEach((pkg) => {
    pkg.types = Array.from(pkg.types);
  });

  return analysis;
}

Advanced Usage Patterns

1. Chunked Processing for Large Sets

JavaScript
async function processLargeObjectSet(objectIds, chunkSize = 50) {
  const results = [];
  const chunks = [];

  // Split into chunks
  for (let i = 0; i < objectIds.length; i += chunkSize) {
    chunks.push(objectIds.slice(i, i + chunkSize));
  }

  console.log(`Processing ${objectIds.length} objects in ${chunks.length} chunks`);

  // Process chunks with rate limiting
  for (let i = 0; i < chunks.length; i++) {
    const chunk = chunks[i];

    try {
      const chunkResults = await client.multiGetObjects({
        ids: chunk,
        options: {
          showType: true,
          showContent: true,
          showOwner: true,
        },
      });

      results.push(...chunkResults);
      console.log(`Completed chunk ${i + 1}/${chunks.length}`);

      // Rate limiting - wait between chunks
      if (i < chunks.length - 1) {
        await new Promise((resolve) => setTimeout(resolve, 100));
      }
    } catch (error) {
      console.error(`Error processing chunk ${i + 1}:`, error);
      // Add placeholder errors for the chunk
      results.push(
        ...chunk.map((id) => ({
          error: { code: 'chunkError', message: error.message },
        })),
      );
    }
  }

  return results;
}

2. Smart Caching with Selective Updates

JavaScript
class ObjectCache {
  constructor(client) {
    this.client = client;
    this.cache = new Map();
    this.lastFetch = new Map();
  }

  async getObjects(objectIds, options = {}, maxAge = 60000) {
    const now = Date.now();
    const fresh = [];
    const stale = [];

    // Determine which objects need fetching
    objectIds.forEach((id) => {
      const cached = this.cache.get(id);
      const lastFetch = this.lastFetch.get(id) || 0;

      if (cached && now - lastFetch < maxAge) {
        fresh.push({ id, data: cached });
      } else {
        stale.push(id);
      }
    });

    let newData = [];
    if (stale.length > 0) {
      console.log(`Fetching ${stale.length} objects, using ${fresh.length} from cache`);

      const results = await this.client.multiGetObjects({
        ids: stale,
        options,
      });

      // Update cache
      results.forEach((result, index) => {
        const id = stale[index];
        if (result.data) {
          this.cache.set(id, result.data);
          this.lastFetch.set(id, now);
        }
        newData.push({ id, data: result.data, error: result.error });
      });
    }

    // Combine fresh and new data
    const allResults = [...fresh, ...newData];

    // Sort to match original order
    return objectIds.map(
      (id) => allResults.find((result) => result.id === id) || { id, error: { code: 'notFound', message: 'Object not found in results' } },
    );
  }

  clearCache() {
    this.cache.clear();
    this.lastFetch.clear();
  }

  getCacheStats() {
    return {
      size: this.cache.size,
      entries: Array.from(this.cache.keys()),
    };
  }
}

3. Parallel Processing with Error Recovery

JavaScript
async function resilientMultiGetObjects(objectIds, options = {}, maxRetries = 3) {
  let attempt = 0;
  let lastError;

  while (attempt < maxRetries) {
    try {
      const results = await client.multiGetObjects({
        ids: objectIds,
        options,
      });

      // Check for partial failures
      const failures = results.filter((r) => r.error);
      if (failures.length > 0 && failures.length < results.length) {
        console.warn(`Partial failure: ${failures.length}/${results.length} objects failed`);
      }

      return results;
    } catch (error) {
      attempt++;
      lastError = error;

      console.warn(`Attempt ${attempt} failed:`, error.message);

      if (attempt < maxRetries) {
        const delay = Math.min(1000 * Math.pow(2, attempt - 1), 5000);
        console.log(`Retrying in ${delay}ms...`);
        await new Promise((resolve) => setTimeout(resolve, delay));
      }
    }
  }

  throw new Error(`Failed after ${maxRetries} attempts. Last error: ${lastError?.message}`);
}

Performance Optimization

1. Request Batching Strategy

JavaScript
class OptimizedObjectFetcher {
  constructor(client, batchSize = 50) {
    this.client = client;
    this.batchSize = batchSize;
    this.requestQueue = [];
    this.processing = false;
  }

  async getObject(objectId, options = {}) {
    return new Promise((resolve, reject) => {
      this.requestQueue.push({ objectId, options, resolve, reject });
      this.processQueue();
    });
  }

  async processQueue() {
    if (this.processing || this.requestQueue.length === 0) {
      return;
    }

    this.processing = true;

    while (this.requestQueue.length > 0) {
      const batch = this.requestQueue.splice(0, this.batchSize);

      try {
        const objectIds = batch.map((req) => req.objectId);
        const options = this.mergeOptions(batch.map((req) => req.options));

        const results = await this.client.multiGetObjects({
          ids: objectIds,
          options,
        });

        // Resolve individual promises
        batch.forEach((req, index) => {
          const result = results[index];
          if (result.data) {
            req.resolve(result.data);
          } else {
            req.reject(new Error(result.error?.message || 'Object not found'));
          }
        });
      } catch (error) {
        // Reject all requests in the batch
        batch.forEach((req) => req.reject(error));
      }
    }

    this.processing = false;
  }

  mergeOptions(optionsArray) {
    // Merge all options to include maximum requested data
    const merged = {};
    const booleanFields = [
      'showType',
      'showOwner',
      'showPreviousTransaction',
      'showDisplay',
      'showContent',
      'showBcs',
      'showStorageRebate',
    ];

    booleanFields.forEach((field) => {
      merged[field] = optionsArray.some((opts) => opts[field]);
    });

    return merged;
  }
}

Error Handling Best Practices

1. Comprehensive Error Categorization

JavaScript
function categorizeErrors(results) {
  const errorTypes = {
    notFound: [],
    deleted: [],
    invalidId: [],
    networkError: [],
    other: [],
  };

  results.forEach((result, index) => {
    if (!result.error) return;

    const error = result.error;
    const objectId = result.objectId || `index_${index}`;

    switch (error.code) {
      case 'objectNotFound':
        errorTypes.notFound.push({ objectId, error });
        break;
      case 'objectDeleted':
        errorTypes.deleted.push({ objectId, error });
        break;
      case 'invalidObjectId':
        errorTypes.invalidId.push({ objectId, error });
        break;
      default:
        if (error.message?.includes('network')) {
          errorTypes.networkError.push({ objectId, error });
        } else {
          errorTypes.other.push({ objectId, error });
        }
    }
  });

  return errorTypes;
}

2. Retry Logic for Failed Objects

JavaScript
async function retryFailedObjects(failedObjectIds, options = {}, maxRetries = 2) {
  let attempt = 0;
  let remainingIds = [...failedObjectIds];
  const successfulResults = [];

  while (attempt < maxRetries && remainingIds.length > 0) {
    attempt++;
    console.log(`Retry attempt ${attempt} for ${remainingIds.length} objects`);

    const results = await client.multiGetObjects({
      ids: remainingIds,
      options,
    });

    const stillFailed = [];

    results.forEach((result, index) => {
      const objectId = remainingIds[index];

      if (result.data) {
        successfulResults.push({ objectId, data: result.data });
      } else {
        stillFailed.push(objectId);
      }
    });

    remainingIds = stillFailed;

    if (remainingIds.length > 0 && attempt < maxRetries) {
      await new Promise((resolve) => setTimeout(resolve, 1000 * attempt));
    }
  }

  return {
    successful: successfulResults,
    stillFailed: remainingIds,
  };
}

Best Practices

1. Batch Size Management

  • Maximum 50 objects per request to avoid rate limiting
  • Use chunking for larger datasets
  • Implement progressive loading for user interfaces

2. Selective Data Fetching

  • Only request data fields you need (showContent, showDisplay, etc.)
  • Use different option sets for different use cases
  • Cache frequently accessed object data

3. Error Handling

  • Always check for both data and error fields in results
  • Implement retry logic for network failures
  • Categorize errors for appropriate handling

4. Performance Optimization

  • Use caching for immutable objects
  • Implement request deduplication
  • Consider parallel processing for independent operations

5. Rate Limiting

  • Respect API rate limits with appropriate delays
  • Use exponential backoff for retries
  • Monitor request frequency in production

Need help? Contact our support team or check the Sui documentation.