update comment
This commit is contained in:
@@ -1,284 +0,0 @@
|
||||
import base64
|
||||
import json
|
||||
import os
|
||||
import urllib.parse as urlparse
|
||||
from datetime import datetime
|
||||
from enum import Enum
|
||||
from typing import Any, Type
|
||||
|
||||
from glom import glom
|
||||
from layercake.dateutils import now, timestamp
|
||||
from layercake.dynamodb import DynamoDBPersistenceLayer, Key
|
||||
|
||||
DELIMITER = '#'
|
||||
TZ = os.getenv('TZ', 'UTC')
|
||||
|
||||
|
||||
def json_b64encode(obj: dict) -> str:
|
||||
s = json.dumps(obj).encode('utf-8')
|
||||
s = base64.urlsafe_b64encode(s).decode('utf-8')
|
||||
return urlparse.quote(s)
|
||||
|
||||
|
||||
def json_b64decode(s: str) -> dict:
|
||||
obj = urlparse.unquote(s).encode('utf-8')
|
||||
obj = base64.urlsafe_b64decode(obj).decode('utf-8')
|
||||
return json.loads(obj)
|
||||
|
||||
|
||||
class KeyLoc(Enum):
|
||||
PK = 'pk' # Partition Key
|
||||
SK = 'sk' # Sort Key
|
||||
|
||||
|
||||
def _join_prefix(
|
||||
prefix: str | tuple[str, ...],
|
||||
delimiter: str,
|
||||
*args,
|
||||
) -> str:
|
||||
"""
|
||||
Joins a prefix (or prefixes) with additional arguments using a specified delimiter.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
prefix : str | tuple[str, ...]
|
||||
A prefix or a tuple of prefixes to be added.
|
||||
delimiter : str
|
||||
Delimiter to use for joining the strings.
|
||||
*args
|
||||
Additional strings to be joined with the prefix.
|
||||
|
||||
Returns
|
||||
-------
|
||||
str
|
||||
A single string with the prefix and additional arguments joined by the delimiter.
|
||||
"""
|
||||
if isinstance(prefix, str):
|
||||
prefix = (prefix,)
|
||||
|
||||
return delimiter.join(prefix + args)
|
||||
|
||||
|
||||
def _keys(
|
||||
*,
|
||||
pk: str,
|
||||
sk: str,
|
||||
prefix: str | tuple[str, ...],
|
||||
keyloc_prefix: KeyLoc,
|
||||
delimiter: str,
|
||||
) -> dict:
|
||||
"""
|
||||
Parameters
|
||||
----------
|
||||
keyloc_prefix : str | tuple[str, ...]
|
||||
Specifies whether to add the prefix to the Partition Key or Sort Key.
|
||||
delimiter : str
|
||||
Delimiter used to join the prefix and the key.
|
||||
|
||||
Returns
|
||||
-------
|
||||
dict
|
||||
A dict with keys id and sk representing the constructed keys for the DynamoDB item.
|
||||
"""
|
||||
if keyloc_prefix == KeyLoc.PK:
|
||||
pk = _join_prefix(prefix, delimiter, pk)
|
||||
sk = sk
|
||||
else:
|
||||
pk = pk
|
||||
sk = _join_prefix(prefix, delimiter, sk)
|
||||
|
||||
return {
|
||||
'id': pk,
|
||||
'sk': sk,
|
||||
}
|
||||
|
||||
|
||||
def _ttl_kwargs(ttl: datetime | int | None = None, tz=None) -> dict:
|
||||
if not ttl:
|
||||
return {}
|
||||
|
||||
if isinstance(ttl, int):
|
||||
return {
|
||||
'ttl': ttl,
|
||||
'ttl_date': datetime.fromtimestamp(ttl, tz),
|
||||
}
|
||||
|
||||
return {
|
||||
'ttl': timestamp(ttl),
|
||||
'ttl_date': ttl,
|
||||
}
|
||||
|
||||
|
||||
class MissingRecordError(ValueError):
|
||||
pass
|
||||
|
||||
|
||||
def get_record(
|
||||
pk: str,
|
||||
sk: str | tuple[str, ...],
|
||||
*,
|
||||
glom_spec: str | None = None,
|
||||
raise_on_missing: bool = True,
|
||||
default_on_missing: Any = None,
|
||||
missing_cls: Type[Exception] = MissingRecordError,
|
||||
delimiter: str = DELIMITER,
|
||||
persistence_layer: DynamoDBPersistenceLayer,
|
||||
) -> Any:
|
||||
if not issubclass(missing_cls, Exception):
|
||||
raise TypeError(
|
||||
f'missing_cls must be a subclass of Exception, got {missing_cls}'
|
||||
)
|
||||
|
||||
record = persistence_layer.get_item(
|
||||
Key(
|
||||
pk,
|
||||
sk if isinstance(sk, str) else delimiter.join(sk),
|
||||
)
|
||||
)
|
||||
|
||||
if raise_on_missing and not record:
|
||||
raise missing_cls(f'Record with Key({pk}, {sk}) not found.')
|
||||
|
||||
if glom_spec and record:
|
||||
return glom(record, glom_spec, default=default_on_missing)
|
||||
|
||||
return record or default_on_missing
|
||||
|
||||
|
||||
def add_record(
|
||||
pk: str,
|
||||
sk: str,
|
||||
prefix: str | tuple[str, ...],
|
||||
*,
|
||||
keyloc_prefix: KeyLoc = KeyLoc.SK,
|
||||
ttl: datetime | int | None = None,
|
||||
delimiter: str = DELIMITER,
|
||||
persistence_layer: DynamoDBPersistenceLayer,
|
||||
**kwargs: Any,
|
||||
) -> bool:
|
||||
"""
|
||||
Parameters
|
||||
----------
|
||||
keyloc_prefix: str | tuple[str, ...]
|
||||
Specifies whether to add the prefix to the Partition Key or Sort Key.
|
||||
delimiter: str
|
||||
Delimiter used to join the prefix and the key.
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
"""
|
||||
current_time = now(TZ)
|
||||
keys = _keys(
|
||||
pk=pk,
|
||||
sk=sk,
|
||||
prefix=prefix,
|
||||
keyloc_prefix=keyloc_prefix,
|
||||
delimiter=delimiter,
|
||||
)
|
||||
return persistence_layer.put_item(
|
||||
item=keys
|
||||
| kwargs
|
||||
| {'create_date': current_time}
|
||||
| _ttl_kwargs(ttl, current_time.tzinfo)
|
||||
)
|
||||
|
||||
|
||||
def del_record(
|
||||
pk: str,
|
||||
sk: str,
|
||||
prefix: str | tuple[str, ...],
|
||||
*,
|
||||
keyloc_prefix: KeyLoc = KeyLoc.SK,
|
||||
delimiter: str = DELIMITER,
|
||||
persistence_layer: DynamoDBPersistenceLayer,
|
||||
) -> bool:
|
||||
"""
|
||||
Parameters
|
||||
----------
|
||||
keyloc_prefix: str | tuple[str, ...]
|
||||
Specifies whether to add the prefix to the Partition Key or Sort Key.
|
||||
delimiter: str
|
||||
Delimiter used to join the prefix and the key.
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
"""
|
||||
return persistence_layer.delete_item(
|
||||
key=_keys(
|
||||
pk=pk,
|
||||
sk=sk,
|
||||
prefix=prefix,
|
||||
keyloc_prefix=keyloc_prefix,
|
||||
delimiter=delimiter,
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def get_records(
|
||||
pk: str,
|
||||
prefix: str | tuple[str, ...],
|
||||
*,
|
||||
keyloc_prefix: KeyLoc = KeyLoc.SK,
|
||||
expr_attr_name: dict = {},
|
||||
expr_attr_values: dict = {},
|
||||
limit: int = 10,
|
||||
start_key: str | None = None,
|
||||
filter_expr: str | None = None,
|
||||
delimiter: str = DELIMITER,
|
||||
persistence_layer: DynamoDBPersistenceLayer,
|
||||
) -> dict[str, Any]:
|
||||
"""
|
||||
Parameters
|
||||
----------
|
||||
pk: str
|
||||
Partition Key value.
|
||||
keyloc_prefix: str | tuple[str, ...]
|
||||
Specifies whether to add the prefix to the Partition Key or Sort Key.
|
||||
delimiter: str
|
||||
Delimiter used to join the prefix and the key.
|
||||
|
||||
Returns
|
||||
-------
|
||||
dict
|
||||
"""
|
||||
prefix_ = _join_prefix(prefix, delimiter)
|
||||
pk = f'{prefix_}{delimiter}{pk}' if keyloc_prefix == KeyLoc.PK else pk
|
||||
sk = f'{prefix_}{delimiter}'
|
||||
|
||||
key_cond_expr = (
|
||||
'id = :pk'
|
||||
if keyloc_prefix == KeyLoc.PK
|
||||
else 'id = :pk AND begins_with(sk, :sk)'
|
||||
)
|
||||
expr_attr_values_ = (
|
||||
{':pk': pk}
|
||||
if keyloc_prefix == KeyLoc.PK
|
||||
else {
|
||||
':pk': pk,
|
||||
':sk': sk,
|
||||
}
|
||||
)
|
||||
|
||||
result = persistence_layer.query(
|
||||
key_cond_expr=key_cond_expr,
|
||||
expr_attr_name=expr_attr_name,
|
||||
expr_attr_values=expr_attr_values_ | expr_attr_values,
|
||||
filter_expr=filter_expr,
|
||||
limit=limit,
|
||||
index_forward=False,
|
||||
start_key=json_b64decode(start_key) if start_key else {},
|
||||
)
|
||||
|
||||
items = (
|
||||
result['items']
|
||||
if keyloc_prefix == KeyLoc.PK
|
||||
# Removes the prefix from sk
|
||||
else [x | {'sk': x['sk'].removeprefix(sk)} for x in result['items']]
|
||||
)
|
||||
|
||||
return {
|
||||
'items': items,
|
||||
'last_key': json_b64encode(result['last_key']) if result['last_key'] else None,
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
import math
|
||||
from typing import Any
|
||||
from typing import TypedDict
|
||||
|
||||
from elasticsearch import Elasticsearch
|
||||
from elasticsearch_dsl import Search
|
||||
@@ -7,13 +7,19 @@ from elasticsearch_dsl import Search
|
||||
MAX_PAGE_SIZE = 100
|
||||
|
||||
|
||||
class PaginatedResult(TypedDict):
|
||||
total_items: int
|
||||
total_pages: int
|
||||
items: list[dict]
|
||||
|
||||
|
||||
def search(
|
||||
index: str,
|
||||
*,
|
||||
query: dict,
|
||||
page_size: int = 25,
|
||||
elastic_client: Elasticsearch,
|
||||
) -> dict[str, Any]:
|
||||
) -> PaginatedResult:
|
||||
s = Search(
|
||||
using=elastic_client,
|
||||
index=index,
|
||||
@@ -36,5 +42,5 @@ def search(
|
||||
return {
|
||||
'total_items': r.hits.total.value, # type: ignore
|
||||
'total_pages': math.ceil(r.hits.total.value / page_size), # type: ignore
|
||||
'hits': [hit.to_dict() for hit in r],
|
||||
'items': [hit.to_dict() for hit in r],
|
||||
}
|
||||
|
||||
@@ -8,28 +8,33 @@ from aws_lambda_powertools.event_handler.api_gateway import (
|
||||
Router,
|
||||
)
|
||||
from elasticsearch import Elasticsearch
|
||||
from layercake.dynamodb import DynamoDBPersistenceLayer
|
||||
from layercake.dynamodb import (
|
||||
ComposeKey,
|
||||
DynamoDBCollection,
|
||||
DynamoDBPersistenceLayer,
|
||||
KeyPair,
|
||||
PartitionKey,
|
||||
)
|
||||
from pydantic import UUID4, BaseModel, StringConstraints
|
||||
|
||||
import elastic
|
||||
from dynamodb import KeyLoc, get_records
|
||||
from http_models import RecordResponse, SearchResponse
|
||||
from models import User
|
||||
from settings import ELASTIC_CONN, USER_TABLE
|
||||
|
||||
router = Router()
|
||||
dynamodb_client = boto3.client('dynamodb')
|
||||
user_layer = DynamoDBPersistenceLayer(USER_TABLE, dynamodb_client)
|
||||
collect = DynamoDBCollection(user_layer)
|
||||
elastic_client = Elasticsearch(**ELASTIC_CONN)
|
||||
|
||||
|
||||
@router.get('/', compress=True, tags=['User'], summary='Get users')
|
||||
def get_users() -> SearchResponse:
|
||||
def get_users():
|
||||
event = router.current_event
|
||||
query = event.get_query_string_value('query', '{}')
|
||||
page_size = event.get_query_string_value('page_size', '25')
|
||||
|
||||
return elastic.search( # type: ignore
|
||||
return elastic.search(
|
||||
index=USER_TABLE,
|
||||
page_size=int(page_size),
|
||||
query=json.loads(query),
|
||||
@@ -53,38 +58,30 @@ def patch_reset(id: str, payload: ResetPasswordPayload):
|
||||
|
||||
|
||||
@router.get('/<id>/emails', compress=True, tags=['User'], summary='Get user emails')
|
||||
def get_emails(id: str) -> RecordResponse:
|
||||
def get_emails(id: str):
|
||||
start_key = router.current_event.get_query_string_value('start_key', None)
|
||||
|
||||
return get_records( # type: ignore
|
||||
pk=id,
|
||||
prefix='emails',
|
||||
return collect.get_items(
|
||||
key=KeyPair(id, 'emails'),
|
||||
start_key=start_key,
|
||||
persistence_layer=user_layer,
|
||||
)
|
||||
|
||||
|
||||
@router.get('/<id>/logs', compress=True, tags=['User'], summary='Get user logs')
|
||||
def get_logs(id: str) -> RecordResponse:
|
||||
def get_logs(id: str):
|
||||
start_key = router.current_event.get_query_string_value('start_key', None)
|
||||
|
||||
return get_records( # type: ignore
|
||||
pk=id,
|
||||
prefix='log',
|
||||
delimiter=':',
|
||||
keyloc_prefix=KeyLoc.PK,
|
||||
return collect.get_items(
|
||||
key=PartitionKey(ComposeKey(id, prefix='log', delimiter=':')),
|
||||
start_key=start_key,
|
||||
persistence_layer=user_layer,
|
||||
)
|
||||
|
||||
|
||||
@router.get('/<id>/orgs', compress=True, tags=['User'], summary='Get user orgs')
|
||||
def get_orgs(id: str) -> RecordResponse:
|
||||
def get_orgs(id: str):
|
||||
start_key = router.current_event.get_query_string_value('start_key', None)
|
||||
|
||||
return get_records( # type: ignore
|
||||
pk=id,
|
||||
prefix='orgs',
|
||||
return collect.get_items(
|
||||
key=KeyPair(id, 'orgs'),
|
||||
start_key=start_key,
|
||||
persistence_layer=user_layer,
|
||||
)
|
||||
|
||||
@@ -5,6 +5,7 @@ from dataclasses import dataclass
|
||||
from http import HTTPMethod
|
||||
|
||||
import boto3
|
||||
import layercake.jsonl as jsonl
|
||||
import pytest
|
||||
from layercake.dynamodb import DynamoDBPersistenceLayer
|
||||
|
||||
@@ -123,10 +124,16 @@ def dynamodb_persistence_layer(dynamodb_client) -> DynamoDBPersistenceLayer:
|
||||
return DynamoDBPersistenceLayer(PYTEST_TABLE_NAME, dynamodb_client)
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
def dynamodb_seeds(dynamodb_client):
|
||||
with jsonl.readlines('tests/seeds.jsonl') as lines:
|
||||
for line in lines:
|
||||
dynamodb_client.put_item(TableName=PYTEST_TABLE_NAME, Item=line)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_app(monkeypatch):
|
||||
monkeypatch.setattr('settings.ELASTIC_CONN', {'hosts': 'http://127.0.0.1:9200'})
|
||||
|
||||
import app
|
||||
|
||||
return app
|
||||
|
||||
@@ -1,8 +1,46 @@
|
||||
import json
|
||||
from http import HTTPMethod, HTTPStatus
|
||||
|
||||
from layercake.dynamodb import DynamoDBCollection, DynamoDBPersistenceLayer
|
||||
|
||||
from ..conftest import HttpApiProxy, LambdaContext
|
||||
|
||||
|
||||
def test_get_emails(
|
||||
mock_app,
|
||||
monkeypatch,
|
||||
dynamodb_seeds,
|
||||
dynamodb_persistence_layer: DynamoDBPersistenceLayer,
|
||||
http_api_proxy: HttpApiProxy,
|
||||
lambda_context: LambdaContext,
|
||||
):
|
||||
mock_app.users.collect = DynamoDBCollection(dynamodb_persistence_layer)
|
||||
|
||||
r = mock_app.lambda_handler(
|
||||
http_api_proxy(
|
||||
raw_path='/users/5OxmMjL-ujoR5IMGegQz/emails',
|
||||
method=HTTPMethod.GET,
|
||||
),
|
||||
lambda_context,
|
||||
)
|
||||
|
||||
# This data was added from seeds
|
||||
assert json.loads(r['body']) == {
|
||||
'items': [
|
||||
{
|
||||
'email_verified': True,
|
||||
'mx_record_exists': True,
|
||||
'sk': 'emails#sergio@somosbeta.com.br',
|
||||
'email_primary': True,
|
||||
'id': '5OxmMjL-ujoR5IMGegQz',
|
||||
'create_date': '2019-03-25T00:00:00-03:00',
|
||||
'update_date': '2023-11-09T12:13:04.308986-03:00',
|
||||
}
|
||||
],
|
||||
'last_key': None,
|
||||
}
|
||||
|
||||
|
||||
def test_post_user(
|
||||
mock_app,
|
||||
http_api_proxy: HttpApiProxy,
|
||||
|
||||
4
http-api/tests/seeds.jsonl
Normal file
4
http-api/tests/seeds.jsonl
Normal file
@@ -0,0 +1,4 @@
|
||||
{"id": {"S": "5OxmMjL-ujoR5IMGegQz"}, "sk": {"S": "0"}, "update_date": {"S": "2024-02-08T16:42:33.776409-03:00"}, "create_date": {"S": "2019-03-25T00:00:00-03:00"}, "email_verified": {"BOOL": true}, "cognito:sub": {"S": "58efed8d-d276-41a8-8502-4ab8b5a6415e"}, "cpf": {"S": "07879819908"}, "email": {"S": "sergio@somosbeta.com.br"}, "name": {"S": "S\u00e9rgio Rafael de Siqueira"}, "last_login": {"S": "2024-02-08T20:53:45.818126-03:00"}, "tenant:org_id": {"L": [{"S": "cJtK9SsnJhKPyxESe7g3DG"}, {"S": "edp8njvgQuzNkLx2ySNfAD"}, {"S": "8TVSi5oACLxTiT8ycKPmaQ"}]}}
|
||||
{"id": {"S": "5OxmMjL-ujoR5IMGegQz"}, "sk": {"S": "emails#sergio@somosbeta.com.br"}, "email_verified": {"BOOL": true}, "update_date": {"S": "2024-02-08T16:42:33.776409-03:00"}, "create_date": {"S": "2019-03-25T00:00:00-03:00"}, "email_primary": {"BOOL": true}, "mx_record_exists": {"BOOL": true}, "update_date": {"S": "2023-11-09T12:13:04.308986-03:00"}}
|
||||
{"id": {"S": "logs#5OxmMjL-ujoR5IMGegQz"}, "sk": {"S": "2024-02-08T16:42:33.776409-03:00"}, "action": {"S": "OPEN_EMAIL"}}
|
||||
{"id": {"S": "logs#5OxmMjL-ujoR5IMGegQz"}, "sk": {"S": "2019-03-25T00:00:00-03:00"}, "action": {"S": "CLICK_EMAIL"}}
|
||||
Reference in New Issue
Block a user