add user-management

This commit is contained in:
2025-05-29 13:28:54 -03:00
parent 797a325cb0
commit 590cf10777
35 changed files with 5275 additions and 223 deletions

View File

View File

@@ -0,0 +1,20 @@
from aws_lambda_powertools.utilities.data_classes import (
EventBridgeEvent,
event_source,
)
from aws_lambda_powertools.utilities.typing import LambdaContext
from boto3clients import s3_client
from config import CHUNK_SIZE
from csv_utils import byte_ranges
transport_params = {'client': s3_client}
@event_source(data_class=EventBridgeEvent)
def lambda_handler(event: EventBridgeEvent, context: LambdaContext) -> bool:
new_image = event.detail['new_image']
csvfile = new_image['s3uri']
pairs = byte_ranges(csvfile, CHUNK_SIZE, transport_params=transport_params)
return True

View File

@@ -0,0 +1,14 @@
from aws_lambda_powertools.utilities.data_classes import (
EventBridgeEvent,
event_source,
)
from aws_lambda_powertools.utilities.typing import LambdaContext
from boto3clients import s3_client
transport_params = {'client': s3_client}
@event_source(data_class=EventBridgeEvent)
def lambda_handler(event: EventBridgeEvent, context: LambdaContext) -> bool:
return True

View File

@@ -0,0 +1,49 @@
import csv
from io import StringIO
from aws_lambda_powertools.utilities.data_classes import (
EventBridgeEvent,
event_source,
)
from aws_lambda_powertools.utilities.typing import LambdaContext
from boto3clients import s3_client
transport_params = {'client': s3_client}
@event_source(data_class=EventBridgeEvent)
def lambda_handler(event: EventBridgeEvent, context: LambdaContext) -> bool:
new_image = event.detail['new_image']
csvfile = new_image['s3_uri']
data = _get_s3_object_range(
csvfile,
start_byte=new_image['start_byte'],
end_byte=new_image['end_byte'],
s3_client=s3_client,
)
reader = csv.reader(data)
for x in reader:
print(x)
return True
def _get_s3_object_range(
s3_uri: str,
*,
start_byte: int,
end_byte: int,
s3_client,
) -> StringIO:
bucket, key = s3_uri.replace('s3://', '').split('/', 1)
response = s3_client.get_object(
Bucket=bucket,
Key=key,
Range=f'bytes={start_byte}-{end_byte}',
)
return StringIO(response['Body'].read().decode('utf-8'))

View File

@@ -0,0 +1,45 @@
import urllib.parse as urllib_parse
from email.utils import parseaddr
from typing import Any, Iterator
from aws_lambda_powertools import Logger
from aws_lambda_powertools.utilities.data_classes import SESEvent, event_source
from aws_lambda_powertools.utilities.typing import LambdaContext
logger = Logger(__name__)
@logger.inject_lambda_context
@event_source(data_class=SESEvent)
def lambda_handler(event: SESEvent, context: LambdaContext) -> dict:
ses = event.record.ses
to = urllib_parse.unquote(ses.receipt.recipients[0]).lower()
name, email_from = parseaddr(get_header_value(ses.mail.headers, 'from'))
subject = get_header_value(
ses.mail.headers,
'subject',
default='',
raise_on_missing=False,
)
if email_from == 'sergio@somosbeta.com.br':
return {'disposition': 'CONTINUE'}
return {'disposition': 'STOP_RULE_SET'}
def get_header_value(
headers: Iterator,
header_name: str,
*,
default: Any = None,
raise_on_missing: bool = True,
) -> str:
for header in headers:
if header.name.lower() == header_name:
return header.value
if raise_on_missing:
raise ValueError(f'{header_name} not found.')
return default