|
2 | 2 | title: Batch Operations |
3 | 3 | description: Perform efficient bulk read and write operations with DynamoDB Tooling. |
4 | 4 | --- |
5 | | - |
6 | | -# Batch Operations |
7 | | - |
8 | | -DynamoDB Tooling provides efficient batch operations for bulk reads and writes, handling DynamoDB's limits and throttling automatically. |
9 | | - |
10 | | -## Batch Get |
11 | | - |
12 | | -Retrieve multiple items efficiently: |
13 | | - |
14 | | -```ts |
15 | | -import { User } from './models/User' |
16 | | - |
17 | | -// Get multiple items by ID |
18 | | -const users = await User.batchGet([ |
19 | | - 'user-1', |
20 | | - 'user-2', |
21 | | - 'user-3', |
22 | | -]) |
23 | | - |
24 | | -// With consistent read |
25 | | -const users = await User.batchGet(['user-1', 'user-2'], { |
26 | | - consistentRead: true, |
27 | | -}) |
28 | | - |
29 | | -// With projection |
30 | | -const users = await User.batchGet(['user-1', 'user-2'], { |
31 | | - projection: ['id', 'name', 'email'], |
32 | | -}) |
33 | | -``` |
34 | | - |
35 | | -### Cross-Model Batch Get |
36 | | - |
37 | | -Retrieve items from multiple models: |
38 | | - |
39 | | -```ts |
40 | | -import { batchGet } from 'dynamodb-tooling' |
41 | | - |
42 | | -const results = await batchGet([ |
43 | | - { model: User, keys: ['user-1', 'user-2'] }, |
44 | | - { model: Post, keys: ['post-1', 'post-2'] }, |
45 | | - { model: Comment, keys: ['comment-1'] }, |
46 | | -]) |
47 | | - |
48 | | -console.log(results.users) // User[] |
49 | | -console.log(results.posts) // Post[] |
50 | | -console.log(results.comments) // Comment[] |
51 | | -``` |
52 | | - |
53 | | -### Handling Large Batches |
54 | | - |
55 | | -DynamoDB limits batch get to 100 items. The library handles this automatically: |
56 | | - |
57 | | -```ts |
58 | | -// Automatically splits into multiple requests |
59 | | -const users = await User.batchGet(arrayOf500UserIds) |
60 | | - |
61 | | -// With custom concurrency |
62 | | -const users = await User.batchGet(arrayOf500UserIds, { |
63 | | - concurrency: 5, // Number of parallel requests |
64 | | -}) |
65 | | -``` |
66 | | - |
67 | | -## Batch Write |
68 | | - |
69 | | -Write multiple items efficiently: |
70 | | - |
71 | | -```ts |
72 | | -// Batch create |
73 | | -await User.batchCreate([ |
74 | | - { email: 'user1@example.com', name: 'User 1' }, |
75 | | - { email: 'user2@example.com', name: 'User 2' }, |
76 | | - { email: 'user3@example.com', name: 'User 3' }, |
77 | | -]) |
78 | | - |
79 | | -// Batch update |
80 | | -await User.batchUpdate([ |
81 | | - { id: 'user-1', status: 'active' }, |
82 | | - { id: 'user-2', status: 'active' }, |
83 | | - { id: 'user-3', status: 'inactive' }, |
84 | | -]) |
85 | | - |
86 | | -// Batch delete |
87 | | -await User.batchDelete(['user-1', 'user-2', 'user-3']) |
88 | | -``` |
89 | | - |
90 | | -### Mixed Batch Operations |
91 | | - |
92 | | -Combine different operations in one batch: |
93 | | - |
94 | | -```ts |
95 | | -import { batchWrite } from 'dynamodb-tooling' |
96 | | - |
97 | | -await batchWrite([ |
98 | | - { type: 'put', model: User, item: { id: 'user-new', name: 'New User' } }, |
99 | | - { type: 'put', model: Post, item: { id: 'post-new', title: 'New Post' } }, |
100 | | - { type: 'delete', model: User, key: 'user-old' }, |
101 | | - { type: 'delete', model: Comment, key: 'comment-old' }, |
102 | | -]) |
103 | | -``` |
104 | | - |
105 | | -### Handling Large Writes |
106 | | - |
107 | | -DynamoDB limits batch write to 25 items: |
108 | | - |
109 | | -```ts |
110 | | -// Automatically splits into multiple requests |
111 | | -await User.batchCreate(arrayOf1000Users) |
112 | | - |
113 | | -// With retry handling for unprocessed items |
114 | | -await User.batchCreate(arrayOf1000Users, { |
115 | | - retryUnprocessed: true, |
116 | | - retryDelay: 100, // ms |
117 | | - maxRetries: 5, |
118 | | -}) |
119 | | -``` |
120 | | - |
121 | | -## Transactions |
122 | | - |
123 | | -For atomic operations across multiple items: |
124 | | - |
125 | | -### Transaction Write |
126 | | - |
127 | | -```ts |
128 | | -import { transactWrite } from 'dynamodb-tooling' |
129 | | - |
130 | | -await transactWrite([ |
131 | | - { |
132 | | - type: 'put', |
133 | | - model: User, |
134 | | - item: { id: 'user-1', name: 'John', balance: 100 }, |
135 | | - }, |
136 | | - { |
137 | | - type: 'update', |
138 | | - model: Account, |
139 | | - key: 'account-1', |
140 | | - update: { balance: { $add: -50 } }, |
141 | | - }, |
142 | | - { |
143 | | - type: 'conditionCheck', |
144 | | - model: Order, |
145 | | - key: 'order-1', |
146 | | - condition: { status: 'pending' }, |
147 | | - }, |
148 | | - { |
149 | | - type: 'delete', |
150 | | - model: TempRecord, |
151 | | - key: 'temp-1', |
152 | | - }, |
153 | | -]) |
154 | | -``` |
155 | | - |
156 | | -### Transaction Get |
157 | | - |
158 | | -```ts |
159 | | -import { transactGet } from 'dynamodb-tooling' |
160 | | - |
161 | | -const [user, account, order] = await transactGet([ |
162 | | - { model: User, key: 'user-1' }, |
163 | | - { model: Account, key: 'account-1' }, |
164 | | - { model: Order, key: 'order-1' }, |
165 | | -]) |
166 | | -``` |
167 | | - |
168 | | -### Idempotent Transactions |
169 | | - |
170 | | -Use client tokens for idempotency: |
171 | | - |
172 | | -```ts |
173 | | -await transactWrite([ |
174 | | - // ... operations |
175 | | -], { |
176 | | - clientRequestToken: 'unique-request-id-123', |
177 | | -}) |
178 | | -``` |
179 | | - |
180 | | -## Bulk Import |
181 | | - |
182 | | -For large-scale data imports: |
183 | | - |
184 | | -```ts |
185 | | -import { createDataImporter } from 'dynamodb-tooling' |
186 | | - |
187 | | -const importer = createDataImporter({ |
188 | 5 | tableName: 'MyApp', |
189 | 6 | batchSize: 25, |
190 | 7 | concurrency: 5, |
|
0 commit comments