add
This commit is contained in:
0
users-events/app/events/__init__.py
Normal file
0
users-events/app/events/__init__.py
Normal file
0
users-events/app/events/batch/__init__.py
Normal file
0
users-events/app/events/batch/__init__.py
Normal file
20
users-events/app/events/batch/csv_into_chunks.py
Normal file
20
users-events/app/events/batch/csv_into_chunks.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from aws_lambda_powertools.utilities.data_classes import (
|
||||
EventBridgeEvent,
|
||||
event_source,
|
||||
)
|
||||
from aws_lambda_powertools.utilities.typing import LambdaContext
|
||||
|
||||
from boto3clients import s3_client
|
||||
from config import CHUNK_SIZE
|
||||
from csv_utils import byte_ranges
|
||||
|
||||
transport_params = {'client': s3_client}
|
||||
|
||||
|
||||
@event_source(data_class=EventBridgeEvent)
|
||||
def lambda_handler(event: EventBridgeEvent, context: LambdaContext) -> bool:
|
||||
new_image = event.detail['new_image']
|
||||
csvfile = new_image['s3uri']
|
||||
pairs = byte_ranges(csvfile, CHUNK_SIZE, transport_params=transport_params)
|
||||
|
||||
return True
|
||||
14
users-events/app/events/batch/excel_to_csv.py
Normal file
14
users-events/app/events/batch/excel_to_csv.py
Normal file
@@ -0,0 +1,14 @@
|
||||
from aws_lambda_powertools.utilities.data_classes import (
|
||||
EventBridgeEvent,
|
||||
event_source,
|
||||
)
|
||||
from aws_lambda_powertools.utilities.typing import LambdaContext
|
||||
|
||||
from boto3clients import s3_client
|
||||
|
||||
transport_params = {'client': s3_client}
|
||||
|
||||
|
||||
@event_source(data_class=EventBridgeEvent)
|
||||
def lambda_handler(event: EventBridgeEvent, context: LambdaContext) -> bool:
|
||||
return True
|
||||
55
users-events/app/events/batch/read_csv_chunk.py
Normal file
55
users-events/app/events/batch/read_csv_chunk.py
Normal file
@@ -0,0 +1,55 @@
|
||||
import csv
|
||||
from io import StringIO
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from aws_lambda_powertools.utilities.data_classes import (
|
||||
EventBridgeEvent,
|
||||
event_source,
|
||||
)
|
||||
from aws_lambda_powertools.utilities.typing import LambdaContext
|
||||
|
||||
from boto3clients import s3_client
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from mypy_boto3_s3.client import S3Client
|
||||
else:
|
||||
S3Client = object
|
||||
|
||||
transport_params = {'client': s3_client}
|
||||
|
||||
|
||||
@event_source(data_class=EventBridgeEvent)
|
||||
def lambda_handler(event: EventBridgeEvent, context: LambdaContext) -> bool:
|
||||
new_image = event.detail['new_image']
|
||||
csvfile = new_image['s3_uri']
|
||||
|
||||
data = _get_s3_object_range(
|
||||
csvfile,
|
||||
start_byte=new_image['start_byte'],
|
||||
end_byte=new_image['end_byte'],
|
||||
s3_client=s3_client,
|
||||
)
|
||||
reader = csv.reader(data)
|
||||
|
||||
for x in reader:
|
||||
print(x)
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def _get_s3_object_range(
|
||||
s3_uri: str,
|
||||
*,
|
||||
start_byte: int,
|
||||
end_byte: int,
|
||||
s3_client: S3Client,
|
||||
) -> StringIO:
|
||||
bucket, key = s3_uri.replace('s3://', '').split('/', 1)
|
||||
|
||||
response = s3_client.get_object(
|
||||
Bucket=bucket,
|
||||
Key=key,
|
||||
Range=f'bytes={start_byte}-{end_byte}',
|
||||
)
|
||||
|
||||
return StringIO(response['Body'].read().decode('utf-8'))
|
||||
40
users-events/app/events/email_receiving.py
Normal file
40
users-events/app/events/email_receiving.py
Normal file
@@ -0,0 +1,40 @@
|
||||
import urllib.parse as urllib_parse
|
||||
from email.utils import parseaddr
|
||||
|
||||
from aws_lambda_powertools import Logger
|
||||
from aws_lambda_powertools.utilities.data_classes import SESEvent, event_source
|
||||
from aws_lambda_powertools.utilities.typing import LambdaContext
|
||||
from layercake.dynamodb import DynamoDBPersistenceLayer, KeyPair, SortKey
|
||||
|
||||
from boto3clients import dynamodb_client
|
||||
from config import USER_TABLE
|
||||
from ses_utils import get_header_value
|
||||
|
||||
logger = Logger(__name__)
|
||||
user_layer = DynamoDBPersistenceLayer(USER_TABLE, dynamodb_client)
|
||||
|
||||
|
||||
@logger.inject_lambda_context
|
||||
@event_source(data_class=SESEvent)
|
||||
def lambda_handler(event: SESEvent, context: LambdaContext) -> dict:
|
||||
ses = event.record.ses
|
||||
to = urllib_parse.unquote(ses.receipt.recipients[0]).lower()
|
||||
name, email_from = parseaddr(get_header_value(ses.mail.headers, 'from'))
|
||||
|
||||
org_id = user_layer.collection.get_item(
|
||||
KeyPair('email', SortKey(to, path_spec='user_id')),
|
||||
raise_on_error=False,
|
||||
default={},
|
||||
)
|
||||
|
||||
if not org_id:
|
||||
return {'disposition': 'STOP_RULE_SET'}
|
||||
|
||||
print(
|
||||
{
|
||||
'id': f'mailbox#{org_id}',
|
||||
'sk': ses.mail.message_id,
|
||||
}
|
||||
)
|
||||
|
||||
return {'disposition': 'CONTINUE'}
|
||||
Reference in New Issue
Block a user