update
This commit is contained in:
@@ -1,5 +0,0 @@
|
|||||||
build:
|
|
||||||
sam build --use-container
|
|
||||||
|
|
||||||
deploy: build
|
|
||||||
sam deploy --debug
|
|
||||||
@@ -1,83 +0,0 @@
|
|||||||
import csv
|
|
||||||
from typing import TextIO
|
|
||||||
|
|
||||||
from smart_open import open
|
|
||||||
|
|
||||||
|
|
||||||
def byte_ranges(
|
|
||||||
csvfile: str,
|
|
||||||
chunk_size: int = 100,
|
|
||||||
**kwargs,
|
|
||||||
) -> list[tuple[int, int]]:
|
|
||||||
"""Compute byte ranges for reading a CSV file in fixed-size line chunks.
|
|
||||||
|
|
||||||
Returns pairs (start_byte, end_byte) for each fixed-size group of lines.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
csvfile : str
|
|
||||||
Path to the CSV file, opened in binary mode internally.
|
|
||||||
chunk_size : int, optional
|
|
||||||
Number of lines per chunk. Default is 100.
|
|
||||||
**kwargs :
|
|
||||||
Extra options passed to `open()`, e.g., buffering.
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
list of tuple[int, int]
|
|
||||||
Byte ranges covering each chunk of lines.
|
|
||||||
|
|
||||||
Example
|
|
||||||
-------
|
|
||||||
>>> byte_ranges("users.csv", chunk_size=500)
|
|
||||||
[(0, 3125), (3126, 6150), (6151, 9124)]
|
|
||||||
"""
|
|
||||||
line_offsets = [0]
|
|
||||||
|
|
||||||
with open(csvfile, 'rb', **kwargs) as fp:
|
|
||||||
while True:
|
|
||||||
if not fp.readline():
|
|
||||||
break
|
|
||||||
line_offsets.append(fp.tell())
|
|
||||||
|
|
||||||
total_lines = len(line_offsets) - 1
|
|
||||||
byte_ranges = []
|
|
||||||
|
|
||||||
for start_line in range(1, total_lines + 1, chunk_size):
|
|
||||||
# Calculate the end line index, bounded by total lines
|
|
||||||
end_line = min(start_line + chunk_size - 1, total_lines)
|
|
||||||
# Get byte range for this chunk
|
|
||||||
start_byte = line_offsets[start_line - 1]
|
|
||||||
end_byte = line_offsets[end_line] - 1
|
|
||||||
|
|
||||||
byte_ranges.append((start_byte, end_byte))
|
|
||||||
|
|
||||||
return byte_ranges
|
|
||||||
|
|
||||||
|
|
||||||
def detect_delimiter(sample: TextIO) -> str:
|
|
||||||
"""Detect the delimiter character used in a CSV file.
|
|
||||||
|
|
||||||
Parameters
|
|
||||||
----------
|
|
||||||
sample : TextIO
|
|
||||||
A file-like object opened in text mode (e.g., from `open('file.csv')`).
|
|
||||||
Must be readable and at position 0.
|
|
||||||
|
|
||||||
Returns
|
|
||||||
-------
|
|
||||||
str
|
|
||||||
The detected delimiter character (e.g., ',', ';', '\\t').
|
|
||||||
|
|
||||||
Raises
|
|
||||||
------
|
|
||||||
csv.Error
|
|
||||||
If the file cannot be parsed as CSV or delimiter detection fails.
|
|
||||||
ValueError
|
|
||||||
If the file is empty or contains no detectable delimiter.
|
|
||||||
"""
|
|
||||||
sniffer = csv.Sniffer()
|
|
||||||
dialect = sniffer.sniff(sample.read())
|
|
||||||
sample.seek(0)
|
|
||||||
|
|
||||||
return dialect.delimiter
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
[project]
|
|
||||||
name = "batch-jobs"
|
|
||||||
version = "0.1.0"
|
|
||||||
description = ""
|
|
||||||
readme = ""
|
|
||||||
requires-python = ">=3.13"
|
|
||||||
dependencies = ["layercake"]
|
|
||||||
|
|
||||||
[dependency-groups]
|
|
||||||
dev = [
|
|
||||||
"pytest>=8.3.4",
|
|
||||||
"pytest-cov>=6.0.0",
|
|
||||||
"ruff>=0.9.1",
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
|
||||||
pythonpath = ["app/"]
|
|
||||||
addopts = "--cov --cov-report html -v"
|
|
||||||
|
|
||||||
[tool.ruff]
|
|
||||||
target-version = "py311"
|
|
||||||
src = ["app"]
|
|
||||||
|
|
||||||
[tool.ruff.format]
|
|
||||||
quote-style = "single"
|
|
||||||
|
|
||||||
[tool.ruff.lint]
|
|
||||||
select = ["E", "F", "I"]
|
|
||||||
|
|
||||||
[tool.uv.sources]
|
|
||||||
layercake = { path = "../layercake" }
|
|
||||||
@@ -1,3 +0,0 @@
|
|||||||
{
|
|
||||||
"extraPaths": ["app/"]
|
|
||||||
}
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
version = 0.1
|
|
||||||
[default.deploy.parameters]
|
|
||||||
stack_name = "saladeaula-batch-jobs"
|
|
||||||
resolve_s3 = true
|
|
||||||
s3_prefix = "batch_jobs"
|
|
||||||
region = "sa-east-1"
|
|
||||||
confirm_changeset = false
|
|
||||||
capabilities = "CAPABILITY_IAM"
|
|
||||||
image_repositories = []
|
|
||||||
@@ -1,44 +0,0 @@
|
|||||||
AWSTemplateFormatVersion: 2010-09-09
|
|
||||||
Transform: AWS::Serverless-2016-10-31
|
|
||||||
|
|
||||||
Globals:
|
|
||||||
Function:
|
|
||||||
CodeUri: app/
|
|
||||||
Runtime: python3.13
|
|
||||||
Tracing: Active
|
|
||||||
Architectures:
|
|
||||||
- x86_64
|
|
||||||
Layers:
|
|
||||||
- !Sub arn:aws:lambda:sa-east-1:336641857101:layer:layercake:55
|
|
||||||
Environment:
|
|
||||||
Variables:
|
|
||||||
TZ: America/Sao_Paulo
|
|
||||||
LOG_LEVEL: DEBUG
|
|
||||||
POWERTOOLS_LOGGER_SAMPLE_RATE: 0.1
|
|
||||||
POWERTOOLS_LOGGER_LOG_EVENT: true
|
|
||||||
|
|
||||||
Resources:
|
|
||||||
EventLog:
|
|
||||||
Type: AWS::Logs::LogGroup
|
|
||||||
Properties:
|
|
||||||
RetentionInDays: 90
|
|
||||||
|
|
||||||
EventCsvChunksFunction:
|
|
||||||
Type: AWS::Serverless::Function
|
|
||||||
Properties:
|
|
||||||
Handler: events.csv_chunks.lambda_handler
|
|
||||||
LoggingConfig:
|
|
||||||
LogGroup: !Ref EventLog
|
|
||||||
Policies:
|
|
||||||
- S3CrudPolicy:
|
|
||||||
BucketName: saladeaula.digital
|
|
||||||
Events:
|
|
||||||
DynamoDBEvent:
|
|
||||||
Type: EventBridgeRule
|
|
||||||
Properties:
|
|
||||||
Pattern:
|
|
||||||
resources: [betaeducacao-prod-users_d2o3r5gmm4it7j]
|
|
||||||
detail:
|
|
||||||
new_image:
|
|
||||||
sk:
|
|
||||||
- prefix: batch_jobs#
|
|
||||||
@@ -1,16 +0,0 @@
|
|||||||
from dataclasses import dataclass
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class LambdaContext:
|
|
||||||
function_name: str = 'test'
|
|
||||||
memory_limit_in_mb: int = 128
|
|
||||||
invoked_function_arn: str = 'arn:aws:lambda:eu-west-1:809313241:function:test'
|
|
||||||
aws_request_id: str = '52fdfc07-2182-154f-163f-5f0f9a621d72'
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def lambda_context() -> LambdaContext:
|
|
||||||
return LambdaContext()
|
|
||||||
@@ -1,13 +0,0 @@
|
|||||||
import events.chunk_csv as app
|
|
||||||
|
|
||||||
|
|
||||||
def test_chunk_csv(lambda_context):
|
|
||||||
event = {
|
|
||||||
'detail': {
|
|
||||||
'new_image': {
|
|
||||||
's3uri': 's3://saladeaula.digital/samples/large_users.csv',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
app.lambda_handler(event, lambda_context) # type: ignore
|
|
||||||
File diff suppressed because it is too large
Load Diff
@@ -1,28 +0,0 @@
|
|||||||
CADASTRO DE COLABORADOR,,,,
|
|
||||||
,NOME COMPLETO,EMAIL (letra minúscula),CPF,TREINAMENTO
|
|
||||||
,ANDRE HENRIQUE LOPES ZAFALON,henrique.zafalon@fanucamerica.com,261.955.138-22,NR-35 (RECICLAGEM)
|
|
||||||
,SERGIO DA SILVA CUPERTINO,sergio.cupertino@fanucamerica.com,066.945.708-64,NR-10 (RECICLAGEM)
|
|
||||||
,SERGIO DA SILVA CUPERTINO,sergio.cupertino@fanucamerica.com,066.945.708-64,NR-35 (RECICLAGEM)
|
|
||||||
,ROVANE CAMPOS,rovane.campos@fanucamerica.com,095.958.578-82,NR-10 (RECICLAGEM)
|
|
||||||
,ROVANE CAMPOS,rovane.campos@fanucamerica.com,095.958.578-82,NR-35 (RECICLAGEM)
|
|
||||||
,MARCIO ATSUSHI KANEKO MASUDA,marcio.masuda@fanucamerica.com,293.042.798-10,NR-10 (RECICLAGEM)
|
|
||||||
,FABIO AKIRA HARAGUCHI,fabio.haraguchi@fanucamerica.com,287.018.428-03,NR-10 (RECICLAGEM)
|
|
||||||
,EMIDIO YOITI MOCHIZUKI,emidio.mochizuki@fanucamerica.com,268.579.208-26,NR-10 (RECICLAGEM)
|
|
||||||
,EMIDIO YOITI MOCHIZUKI,emidio.mochizuki@fanucamerica.com,268.579.208-26,NR-35 (RECICLAGEM)
|
|
||||||
,ERIC HIDEKI MORIKIO,eric.morikio@fanucamerica.com,417.359.838-61,NR-10 (RECICLAGEM)
|
|
||||||
,HENRIQUE DE FIGUEIREDO BASTOS FERRAZ,henrique.ferraz@fanucamerica.com,417.059.788-51,NR-10 (RECICLAGEM)
|
|
||||||
,LAYS MORETTI DA SILVA,lays.silva@fanucamerica.com,013.107.662-07,NR-10 (RECICLAGEM)
|
|
||||||
,LAYS MORETTI DA SILVA,lays.silva@fanucamerica.com,013.107.662-07,NR-12
|
|
||||||
,ANDRE DE SOUZA,andre.souza@fanucamerica.com,290.688.648-31,NR-10 (RECICLAGEM)
|
|
||||||
,ANDRE DE SOUZA,andre.souza@fanucamerica.com,290.688.648-31,NR-12
|
|
||||||
,RAFAEL TOSHIO BURATO MAEDA,rafael.maeda@fanucamerica.com,394.153.268-59,NR-10 (RECICLAGEM)
|
|
||||||
,RAFAEL TOSHIO BURATO MAEDA,rafael.maeda@fanucamerica.com,394.153.268-59,NR-12
|
|
||||||
,RAFAEL TOSHIO BURATO MAEDA,rafael.maeda@fanucamerica.com,394.153.268-59,NR-35 (RECICLAGEM)
|
|
||||||
,RICARDO GALLES BONET,ricardo.bonet@fanucamerica.com,424.430.528-93,NR-10 (RECICLAGEM)
|
|
||||||
,RULIO SIEFERT SERA,rulio.sera@fanucamerica.com,063.916.859-08,NR-10 (RECICLAGEM)
|
|
||||||
,MACIEL FERREIRA BOMFIM,maciel.bomfim@fanucamerica.com,334.547.088-85,NR-10 (RECICLAGEM)
|
|
||||||
,JAIME EDUARDO GALVEZ AVILES,jaime.galvez@fanucamerica.com,280.238.818-50,NR-12
|
|
||||||
,JAIME EDUARDO GALVEZ AVILES,jaime.galvez@fanucamerica.com,280.238.818-50,NR-35 (RECICLAGEM)
|
|
||||||
,HIGOR MACHADO SILVA,higor.silva@fanucamerica.com,419.879.878-88,NR-12
|
|
||||||
,LÁZARO SOUZA DIAS,lazaro.dias@fanucamerica.com,067.179.825-19,NR-12
|
|
||||||
,JOÃO PEDRO AGUIAR GALASSO,joao.pedro@fanucamerica.com,570.403.588-40,NR-12
|
|
||||||
|
@@ -1,29 +0,0 @@
|
|||||||
from csv_utils import byte_ranges, detect_delimiter
|
|
||||||
|
|
||||||
|
|
||||||
def test_detect_delimiter():
|
|
||||||
with open('tests/samples/users.csv') as fp:
|
|
||||||
assert detect_delimiter(fp) == ','
|
|
||||||
|
|
||||||
|
|
||||||
def test_byte_ranges():
|
|
||||||
csvpath = 'tests/samples/users.csv'
|
|
||||||
ranges = byte_ranges(csvpath, 10)
|
|
||||||
*_, pair = ranges
|
|
||||||
start_byte, end_byte = pair
|
|
||||||
|
|
||||||
assert ranges == [(0, 808), (809, 1655), (1656, 2303)]
|
|
||||||
|
|
||||||
expected = """,RICARDO GALLES BONET,ricardo.bonet@fanucamerica.com,424.430.528-93,NR-10 (RECICLAGEM)
|
|
||||||
,RULIO SIEFERT SERA,rulio.sera@fanucamerica.com,063.916.859-08,NR-10 (RECICLAGEM)
|
|
||||||
,MACIEL FERREIRA BOMFIM,maciel.bomfim@fanucamerica.com,334.547.088-85,NR-10 (RECICLAGEM)
|
|
||||||
,JAIME EDUARDO GALVEZ AVILES,jaime.galvez@fanucamerica.com,280.238.818-50,NR-12
|
|
||||||
,JAIME EDUARDO GALVEZ AVILES,jaime.galvez@fanucamerica.com,280.238.818-50,NR-35 (RECICLAGEM)
|
|
||||||
,HIGOR MACHADO SILVA,higor.silva@fanucamerica.com,419.879.878-88,NR-12
|
|
||||||
,LÁZARO SOUZA DIAS,lazaro.dias@fanucamerica.com,067.179.825-19,NR-12
|
|
||||||
,JOÃO PEDRO AGUIAR GALASSO,joao.pedro@fanucamerica.com,570.403.588-40,NR-12"""
|
|
||||||
|
|
||||||
with open(csvpath, 'rb') as f:
|
|
||||||
f.seek(start_byte)
|
|
||||||
data = f.read(end_byte - start_byte + 1)
|
|
||||||
assert data.decode('utf-8') == expected
|
|
||||||
1039
batch-jobs/uv.lock
generated
1039
batch-jobs/uv.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -91,6 +91,7 @@ def get_course(id: str):
|
|||||||
)
|
)
|
||||||
def put_course(id: str, payload: Course):
|
def put_course(id: str, payload: Course):
|
||||||
update_course(id, payload, persistence_layer=course_layer)
|
update_course(id, payload, persistence_layer=course_layer)
|
||||||
|
|
||||||
return JSONResponse(
|
return JSONResponse(
|
||||||
body=payload,
|
body=payload,
|
||||||
status_code=HTTPStatus.OK,
|
status_code=HTTPStatus.OK,
|
||||||
|
|||||||
@@ -15,7 +15,7 @@ def create_course(
|
|||||||
transact.put(
|
transact.put(
|
||||||
item={
|
item={
|
||||||
'sk': '0',
|
'sk': '0',
|
||||||
'tenant__org_id': {org.id},
|
'metadata__tenant_id': org.id,
|
||||||
'create_date': now_,
|
'create_date': now_,
|
||||||
**course.model_dump(),
|
**course.model_dump(),
|
||||||
}
|
}
|
||||||
@@ -24,7 +24,7 @@ def create_course(
|
|||||||
item={
|
item={
|
||||||
'id': course.id,
|
'id': course.id,
|
||||||
'sk': 'metadata#tenant',
|
'sk': 'metadata#tenant',
|
||||||
'org_id': org.id,
|
'tenant_id': f'ORG#{org.id}',
|
||||||
'name': org.name,
|
'name': org.name,
|
||||||
'create_date': now_,
|
'create_date': now_,
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -5,7 +5,6 @@ from uuid import uuid4
|
|||||||
|
|
||||||
from layercake.dateutils import now, ttl
|
from layercake.dateutils import now, ttl
|
||||||
from layercake.dynamodb import DynamoDBPersistenceLayer, KeyPair, TransactItems
|
from layercake.dynamodb import DynamoDBPersistenceLayer, KeyPair, TransactItems
|
||||||
from layercake.strutils import md5_hash
|
|
||||||
|
|
||||||
from conf import ORDER_TABLE
|
from conf import ORDER_TABLE
|
||||||
from models import Course, Enrollment
|
from models import Course, Enrollment
|
||||||
@@ -30,7 +29,7 @@ class Rel(TypedDict):
|
|||||||
|
|
||||||
|
|
||||||
class LifecycleEvents(str, Enum):
|
class LifecycleEvents(str, Enum):
|
||||||
"""Schedules lifecycle events."""
|
"""Lifecycle events related to scheduling actions."""
|
||||||
|
|
||||||
# Reminder if the user does not access within 3 days
|
# Reminder if the user does not access within 3 days
|
||||||
REMINDER_NO_ACCESS_3_DAYS = 'schedules#reminder_no_access_3_days'
|
REMINDER_NO_ACCESS_3_DAYS = 'schedules#reminder_no_access_3_days'
|
||||||
@@ -38,11 +37,11 @@ class LifecycleEvents(str, Enum):
|
|||||||
# When there is no activity 7 days after the first access
|
# When there is no activity 7 days after the first access
|
||||||
NO_ACTIVITY_7_DAYS = 'schedules#no_activity_7_days'
|
NO_ACTIVITY_7_DAYS = 'schedules#no_activity_7_days'
|
||||||
|
|
||||||
# When the access period expires
|
# Reminder 30 days before the access period expires
|
||||||
ACCESS_PERIOD_EXPIRED = 'schedules#access_period_expired'
|
ACCESS_PERIOD_REMINDER_30_DAYS = 'schedules#access_period_reminder_30_days'
|
||||||
|
|
||||||
# When the course certificate expires
|
# Reminder for certificate expiration set to 30 days from now
|
||||||
CERTIFICATE_EXPIRATION = 'schedules#certificate_expiration'
|
CERT_EXPIRATION_REMINDER_30_DAYS = 'schedules#cert_expiration_reminder_30_days'
|
||||||
|
|
||||||
# Archive the course after the certificate expires
|
# Archive the course after the certificate expires
|
||||||
COURSE_ARCHIVED = 'schedules#course_archived'
|
COURSE_ARCHIVED = 'schedules#course_archived'
|
||||||
@@ -52,19 +51,12 @@ def enroll(
|
|||||||
enrollment: Enrollment,
|
enrollment: Enrollment,
|
||||||
*,
|
*,
|
||||||
tenant: Tenant,
|
tenant: Tenant,
|
||||||
rel: tuple[Rel, ...] | Rel = (),
|
|
||||||
author: Author | None = None,
|
|
||||||
vacancy: Vacancy | None = None,
|
|
||||||
ensure_vacancy: bool = True,
|
|
||||||
persistence_layer: DynamoDBPersistenceLayer,
|
persistence_layer: DynamoDBPersistenceLayer,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""Enrolls a user into a course and schedules lifecycle events."""
|
"""Enrolls a user into a course and schedules lifecycle events."""
|
||||||
now_ = now()
|
now_ = now()
|
||||||
user = enrollment.user
|
user = enrollment.user
|
||||||
course = enrollment.course
|
course = enrollment.course
|
||||||
exp_interval = course.exp_interval
|
|
||||||
lock_hash = md5_hash('%s%s' % (user.id, course.id))
|
|
||||||
ttl_date = now_ + timedelta(days=exp_interval - 30)
|
|
||||||
|
|
||||||
transact = TransactItems(persistence_layer.table_name)
|
transact = TransactItems(persistence_layer.table_name)
|
||||||
transact.put(
|
transact.put(
|
||||||
@@ -72,6 +64,7 @@ def enroll(
|
|||||||
'sk': '0',
|
'sk': '0',
|
||||||
'create_date': now_,
|
'create_date': now_,
|
||||||
'metadata__tenant_id': tenant['id'],
|
'metadata__tenant_id': tenant['id'],
|
||||||
|
'metadata__related_ids': {tenant['id'], user.id},
|
||||||
**enrollment.model_dump(),
|
**enrollment.model_dump(),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
@@ -79,19 +72,11 @@ def enroll(
|
|||||||
item={
|
item={
|
||||||
'id': enrollment.id,
|
'id': enrollment.id,
|
||||||
'sk': 'metadata#tenant',
|
'sk': 'metadata#tenant',
|
||||||
'org_id': tenant['id'],
|
'tenant_id': f'ORG#{tenant["id"]}',
|
||||||
'name': tenant['name'],
|
'name': tenant['name'],
|
||||||
'create_date': now_,
|
'create_date': now_,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
transact.put(
|
|
||||||
item={
|
|
||||||
'id': enrollment.id,
|
|
||||||
'sk': LifecycleEvents.COURSE_ARCHIVED,
|
|
||||||
'create_date': now_,
|
|
||||||
'ttl': ttl(days=exp_interval, start_dt=now_),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
transact.put(
|
transact.put(
|
||||||
item={
|
item={
|
||||||
'id': enrollment.id,
|
'id': enrollment.id,
|
||||||
@@ -106,7 +91,7 @@ def enroll(
|
|||||||
transact.put(
|
transact.put(
|
||||||
item={
|
item={
|
||||||
'id': enrollment.id,
|
'id': enrollment.id,
|
||||||
'sk': LifecycleEvents.ACCESS_PERIOD_EXPIRED,
|
'sk': LifecycleEvents.ACCESS_PERIOD_REMINDER_30_DAYS,
|
||||||
'name': user.name,
|
'name': user.name,
|
||||||
'email': user.email,
|
'email': user.email,
|
||||||
'course': course.name,
|
'course': course.name,
|
||||||
@@ -115,75 +100,6 @@ def enroll(
|
|||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
for r in rel:
|
|
||||||
print(r['id'])
|
|
||||||
transact.put(
|
|
||||||
item={
|
|
||||||
'id': enrollment.id,
|
|
||||||
# 'sk': 'rel#{}' % r['id'],
|
|
||||||
'create_date': now_,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
if author:
|
|
||||||
transact.put(
|
|
||||||
item={
|
|
||||||
'id': enrollment.id,
|
|
||||||
'sk': 'metadata#author',
|
|
||||||
'user_id': author['id'],
|
|
||||||
'name': author['name'],
|
|
||||||
'create_date': now_,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
if vacancy:
|
|
||||||
transact.put(
|
|
||||||
item={
|
|
||||||
'id': enrollment.id,
|
|
||||||
'sk': 'parent_vacancy',
|
|
||||||
# 'vacancy': vacancy.model_dump(),
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
if ensure_vacancy:
|
|
||||||
# Ensures that there's a vacancy
|
|
||||||
transact.delete(
|
|
||||||
key=vacancy.model_dump(),
|
|
||||||
cond_expr='attribute_exists(sk)',
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add cancel policy if there is a vacancy
|
|
||||||
if vacancy:
|
|
||||||
transact.put(
|
|
||||||
item={
|
|
||||||
'id': enrollment.id,
|
|
||||||
'sk': 'metadata#cancel_policy',
|
|
||||||
'create_date': now_,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
# To ensure that the user does not enroll in the same course again until
|
|
||||||
# the certificate expires.
|
|
||||||
transact.put(
|
|
||||||
item={
|
|
||||||
'id': 'metadata#lock',
|
|
||||||
'sk': lock_hash,
|
|
||||||
'enrollment_id': enrollment.id,
|
|
||||||
'create_date': vacancy,
|
|
||||||
'ttl': ttl(start_dt=ttl_date),
|
|
||||||
},
|
|
||||||
cond_expr='attribute_not_exists(sk)',
|
|
||||||
)
|
|
||||||
transact.put(
|
|
||||||
item={
|
|
||||||
'id': enrollment.id,
|
|
||||||
'sk': 'lock',
|
|
||||||
'hash': lock_hash,
|
|
||||||
'create_date': vacancy,
|
|
||||||
'ttl': ttl(start_dt=ttl_date),
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
return persistence_layer.transact_write_items(transact)
|
return persistence_layer.transact_write_items(transact)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
2
layercake/uv.lock
generated
2
layercake/uv.lock
generated
@@ -589,7 +589,7 @@ wheels = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "layercake"
|
name = "layercake"
|
||||||
version = "0.3.4"
|
version = "0.4.0"
|
||||||
source = { editable = "." }
|
source = { editable = "." }
|
||||||
dependencies = [
|
dependencies = [
|
||||||
{ name = "arnparse" },
|
{ name = "arnparse" },
|
||||||
|
|||||||
@@ -21,4 +21,4 @@ def test_set_as_paid(
|
|||||||
doc = dynamodb_persistence_layer.get_item(
|
doc = dynamodb_persistence_layer.get_item(
|
||||||
key=KeyPair('9omWNKymwU5U4aeun6mWzZ', '0'),
|
key=KeyPair('9omWNKymwU5U4aeun6mWzZ', '0'),
|
||||||
)
|
)
|
||||||
print(doc)
|
assert doc['status'] == 'PAID'
|
||||||
|
|||||||
Reference in New Issue
Block a user