add missing files
This commit is contained in:
4
streams-events/app/config.py
Normal file
4
streams-events/app/config.py
Normal file
@@ -0,0 +1,4 @@
|
||||
import os
|
||||
|
||||
MEILISEARCH_HOST: str = os.getenv('MEILISEARCH_HOST') # type: ignore
|
||||
MEILISEARCH_API_KEY: str = os.getenv('MEILISEARCH_API_KEY') # type: ignore
|
||||
0
streams-events/app/events/__init__.py
Normal file
0
streams-events/app/events/__init__.py
Normal file
34
streams-events/app/events/index_docs_into_meili.py
Normal file
34
streams-events/app/events/index_docs_into_meili.py
Normal file
@@ -0,0 +1,34 @@
|
||||
from arnparse import arnparse
|
||||
from aws_lambda_powertools import Logger
|
||||
from aws_lambda_powertools.utilities.data_classes import (
|
||||
DynamoDBStreamEvent,
|
||||
event_source,
|
||||
)
|
||||
from aws_lambda_powertools.utilities.typing import LambdaContext
|
||||
from config import MEILISEARCH_API_KEY, MEILISEARCH_HOST
|
||||
from meili import Op
|
||||
from meilisearch import Client as Meilisearch
|
||||
|
||||
logger = Logger(__name__)
|
||||
meili_client = Meilisearch(MEILISEARCH_HOST, MEILISEARCH_API_KEY)
|
||||
|
||||
|
||||
@event_source(data_class=DynamoDBStreamEvent)
|
||||
@logger.inject_lambda_context
|
||||
def lambda_handler(event: DynamoDBStreamEvent, context: LambdaContext):
|
||||
with Op(meili_client) as op:
|
||||
for record in event.records:
|
||||
pk = record.dynamodb.keys['id'] # type: ignore
|
||||
new_image = record.dynamodb.new_image # type: ignore
|
||||
index = table_from_arn(record.event_source_arn) # type: ignore
|
||||
|
||||
op.append(
|
||||
index,
|
||||
op=record.event_name, # type: ignore
|
||||
data=new_image or pk,
|
||||
)
|
||||
|
||||
|
||||
def table_from_arn(arn: str) -> str:
|
||||
arn_ = arnparse(arn)
|
||||
return arn_.resource.split('/')[0]
|
||||
62
streams-events/app/meili.py
Normal file
62
streams-events/app/meili.py
Normal file
@@ -0,0 +1,62 @@
|
||||
from typing import Self
|
||||
|
||||
from aws_lambda_powertools.shared.json_encoder import Encoder
|
||||
from aws_lambda_powertools.utilities.data_classes.dynamo_db_stream_event import (
|
||||
DynamoDBRecordEventName,
|
||||
)
|
||||
from meilisearch import Client
|
||||
|
||||
|
||||
class Op:
|
||||
def __init__(self, client: Client) -> None:
|
||||
self.op = {}
|
||||
self.client = client
|
||||
|
||||
def __enter__(self) -> Self:
|
||||
return self
|
||||
|
||||
def __exit__(self, *exc_details) -> None:
|
||||
# When we exit, we need to keep flushing whatever's left
|
||||
# until there's nothing left in our items buffer.
|
||||
while self.op:
|
||||
self._flush()
|
||||
|
||||
def _flush(self):
|
||||
op = self.op
|
||||
client = self.client
|
||||
|
||||
for index_, ops in op.items():
|
||||
index = client.index(index_)
|
||||
|
||||
for op, doc in ops.items():
|
||||
match op:
|
||||
case DynamoDBRecordEventName.INSERT:
|
||||
index.add_documents(doc, serializer=JSONEncoder)
|
||||
case DynamoDBRecordEventName.MODIFY:
|
||||
index.update_documents(doc, serializer=JSONEncoder)
|
||||
case DynamoDBRecordEventName.REMOVE:
|
||||
index.delete_documents(doc)
|
||||
|
||||
self.op = {}
|
||||
|
||||
def append(
|
||||
self,
|
||||
index: str,
|
||||
/,
|
||||
op: DynamoDBRecordEventName,
|
||||
data: dict | str,
|
||||
) -> bool:
|
||||
if index not in self.op:
|
||||
self.op[index] = {}
|
||||
|
||||
if op not in self.op[index]:
|
||||
self.op[index][op] = []
|
||||
|
||||
return self.op[index][op].append(data)
|
||||
|
||||
|
||||
class JSONEncoder(Encoder):
|
||||
def default(self, obj):
|
||||
if isinstance(obj, set):
|
||||
return list(obj)
|
||||
return super(__class__, self).default(obj)
|
||||
Reference in New Issue
Block a user