Webhook delivery is at-least-once. Use the id field as an idempotency key and design handlers to safely process duplicates. See Vault Webhooks for signing and retry details.
import Casedev from 'casedev';import fs from 'fs';import path from 'path';const client = new Casedev({ apiKey: process.env.CASEDEV_API_KEY });async function main() { const documentsDir = './discovery_dump'; // 1. Create vault const vault = await client.vault.create({ name: 'Matter 2024-1234 - Discovery', description: 'Documents from opposing counsel' }); // 2. Subscribe to ingestion events await client.vault.events.subscriptions.create(vault.id, { callbackUrl: 'https://your-app.com/webhooks/case-vault', eventTypes: ['vault.ingest.completed', 'vault.ingest.failed'] }); // 3. Upload and ingest all documents const files = fs.readdirSync(documentsDir); for (const file of files) { const filePath = path.join(documentsDir, file); if (!fs.statSync(filePath).isFile()) continue; const upload = await client.vault.upload(vault.id, { filename: file, contentType: 'application/pdf' }); await fetch(upload.uploadUrl, { method: 'PUT', body: fs.readFileSync(filePath) }); await client.vault.ingest(vault.id, upload.objectId); console.log(`Queued: ${file}`); } // 4. Search (after webhook confirms ingestion is complete) const results = await client.vault.search(vault.id, { query: 'evidence of safety violations in 2023', method: 'hybrid', limit: 10 }); console.log(results.chunks);}main();
Production tip: For large document sets (1000+), use parallel uploads with a concurrency limit of 10-20 to maximize throughput while avoiding rate limits.