@@ -84,14 +84,23 @@ pip install acai_aws
8484# app.py (entry point for your Lambda)
8585from acai_aws.apigateway.router import Router
8686
87+
88+ def authenticate (request , response , requirements ):
89+ if request.headers.get(' x-api-key' ) != ' secret-key' :
90+ response.code = 401
91+ response.set_error(' auth' , ' Unauthorized' )
92+
93+
8794router = Router(
8895 base_path = ' api/v1' ,
8996 handlers = ' handlers' , # directory mode
9097 schema = ' openapi.yml' , # optional OpenAPI document
9198 auto_validate = True ,
9299 validate_response = True ,
93- before_all = lambda request , response , _ : request.context.update({ ' trace_id ' : request.headers.get( ' trace-id ' )})
100+ with_auth = authenticate
94101)
102+ router.auto_load()
103+
95104
96105def handler (event , context ):
97106 return router.route(event, context)
@@ -101,14 +110,8 @@ def handler(event, context):
101110# handlers/users.py
102111from acai_aws.apigateway.requirements import requirements
103112
104- def authenticate (request , response , _requirements ):
105- if request.headers.get(' x-api-key' ) != ' secret' :
106- response.code = 401
107- response.set_error(' auth' , ' Unauthorized' )
108-
109113@requirements (
110114 auth_required = True ,
111- before = authenticate,
112115 required_body = {
113116 ' type' : ' object' ,
114117 ' required' : [' email' , ' name' ],
@@ -179,31 +182,129 @@ pipenv run generate
179182
180183## 🔄 Event Processing
181184
182- Acai AWS provides consistent event objects for AWS stream and queue services. Decorate your handler with ` acai_aws.common.records.requirements.requirements ` to auto-detect the source and normalize each record .
185+ Acai AWS provides consistent event objects for AWS stream and queue services. Decorate your handler with ` acai_aws.common.records.requirements.requirements ` to auto-detect the source and wrap records .
183186
184187``` python
185188from acai_aws.dynamodb.requirements import requirements
186189
190+ class ProductRecord :
191+ def __init__ (self , record ):
192+ self .id = record.body[' id' ]
193+ self .payload = record.body
194+
187195@requirements (
188196 operations = [' created' , ' updated' ],
189197 timeout = 10 ,
190- data_class = lambda record : record.body
198+ data_class = ProductRecord
191199)
192- def handler (event ):
193- for record in event .records:
194- process (record) # record is dict from the stream, filtered and validated
195- return {' processed' : len (event .records)}
200+ def handler (records ):
201+ for record in records .records:
202+ process_product (record.id, record.payload)
203+ return {' processed' : len (records .records)}
196204```
197205
198206Supported services include:
199207
200- - ** DynamoDB Streams** (` acai_aws.dynamodb.event.Event ` )
201- - ** SQS** (` acai_aws.sqs.event.Event ` )
202- - ** SNS** (` acai_aws.sns.event.Event ` )
203- - ** S3** (optional ` get_object ` helper to pull objects)
204- - ** Kinesis** , ** Firehose** , ** MSK** , ** MQ** , ** DocumentDB**
208+ ** DynamoDB Streams**
209+
210+ ``` python
211+ from acai_aws.dynamodb.requirements import requirements as ddb_requirements
212+
213+ @ddb_requirements ()
214+ def dynamodb_handler (records ):
215+ for record in records.records:
216+ handle_ddb_change(record.operation, record.body)
217+ ```
218+
219+ ** Amazon SQS**
220+
221+ ``` python
222+ from acai_aws.sqs.requirements import requirements as sqs_requirements
223+
224+ @sqs_requirements ()
225+ def sqs_handler (records ):
226+ for record in records.records:
227+ handle_message(record.body, record.attributes)
228+ ```
229+
230+ ** Amazon SNS**
231+
232+ ``` python
233+ from acai_aws.sns.requirements import requirements as sns_requirements
234+
235+ @sns_requirements ()
236+ def sns_handler (records ):
237+ for record in records.records:
238+ handle_notification(record.body, record.subject)
239+ ```
240+
241+ ** Amazon S3**
242+
243+ ``` python
244+ from acai_aws.s3.requirements import requirements as s3_requirements
245+
246+ @s3_requirements (get_object = True , data_type = ' json' )
247+ def s3_handler (records ):
248+ for record in records.records:
249+ handle_object(record.bucket, record.key, record.body)
250+ ```
251+
252+ ** Amazon Kinesis**
253+
254+ ``` python
255+ from acai_aws.kinesis.requirements import requirements as kinesis_requirements
256+
257+ @kinesis_requirements ()
258+ def kinesis_handler (records ):
259+ for record in records.records:
260+ handle_stream_event(record.partition_key, record.body)
261+ ```
262+
263+ ** Amazon Firehose**
264+
265+ ``` python
266+ from acai_aws.firehose.requirements import requirements as firehose_requirements
267+
268+ @firehose_requirements ()
269+ def firehose_handler (records ):
270+ for record in records.records:
271+ handle_delivery(record.record_id, record.body)
272+ ```
273+
274+ ** Amazon MSK**
275+
276+ ``` python
277+ from acai_aws.msk.requirements import requirements as msk_requirements
278+
279+ @msk_requirements ()
280+ def msk_handler (records ):
281+ for record in records.records:
282+ handle_msk_message(record.topic, record.body)
283+ ```
284+
285+ ** Amazon MQ**
286+
287+ ``` python
288+ from acai_aws.mq.requirements import requirements as mq_requirements
289+
290+ @mq_requirements ()
291+ def mq_handler (records ):
292+ for record in records.records:
293+ handle_mq_message(record.message_id, record.body)
294+ ```
295+
296+ ** Amazon DocumentDB Change Streams**
297+
298+ ``` python
299+ from acai_aws.documentdb.requirements import requirements as docdb_requirements
300+
301+ @docdb_requirements ()
302+ def docdb_handler (records ):
303+ for record in records.records:
304+ handle_docdb_change(record.operation, record.full_document)
305+ ```
205306
206- Each record exposes intuitive properties like ` record.operation ` , ` record.body ` , ` record.headers ` , or service-specific fields .
307+ Each record exposes intuitive properties like ` record.operation ` , ` record.body ` , or service-specific metadata (bucket, partition, headers, etc.) .
207308
208309---
209310
0 commit comments