From 34fe12a7dd6af2a163ec521d3b2054c7951ed6a6 Mon Sep 17 00:00:00 2001 From: Anderson Banihirwe <13301940+andersy005@users.noreply.github.com> Date: Wed, 28 Aug 2024 17:56:05 -0700 Subject: [PATCH] Add beneficiary data to Credit Model (#118) --- ...4_add_beneficiary_data_to_credit_model.py} | 14 +++++++---- offsets_db_api/models.py | 4 +++ requirements.txt | 2 +- tests/conftest.py | 8 +++--- tests/test_files.py | 8 +++--- update_database.py | 25 ++++++++----------- 6 files changed, 33 insertions(+), 28 deletions(-) rename migrations/versions/{895a2d11e837_reset_migrations_and_add_new_fields_to_.py => 6c863cafdf94_add_beneficiary_data_to_credit_model.py} (90%) diff --git a/migrations/versions/895a2d11e837_reset_migrations_and_add_new_fields_to_.py b/migrations/versions/6c863cafdf94_add_beneficiary_data_to_credit_model.py similarity index 90% rename from migrations/versions/895a2d11e837_reset_migrations_and_add_new_fields_to_.py rename to migrations/versions/6c863cafdf94_add_beneficiary_data_to_credit_model.py index ae5174b..8791859 100644 --- a/migrations/versions/895a2d11e837_reset_migrations_and_add_new_fields_to_.py +++ b/migrations/versions/6c863cafdf94_add_beneficiary_data_to_credit_model.py @@ -1,8 +1,8 @@ -"""reset migrations and add new fields to project +"""add beneficiary data to credit model -Revision ID: 895a2d11e837 +Revision ID: 6c863cafdf94 Revises: -Create Date: 2023-12-06 04:34:16.583574 +Create Date: 2024-08-28 16:25:27.798958 """ @@ -12,7 +12,7 @@ from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. -revision = '895a2d11e837' +revision = '6c863cafdf94' down_revision = None branch_labels = None depends_on = None @@ -29,7 +29,7 @@ def upgrade() -> None: sa.Column('tags', postgresql.ARRAY(sa.String()), nullable=True), sa.Column('notes', sqlmodel.sql.sqltypes.AutoString(), nullable=True), sa.Column('is_waybacked', sa.Boolean(), nullable=True), - sa.Column('type', sqlmodel.sql.sqltypes.AutoString(), nullable=False), + sa.Column('type', sqlmodel.sql.sqltypes.AutoString(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.PrimaryKeyConstraint('id'), ) @@ -91,6 +91,10 @@ def upgrade() -> None: sa.Column('vintage', sa.Integer(), nullable=True), sa.Column('transaction_date', sa.Date(), nullable=True), sa.Column('transaction_type', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column('account', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column('beneficiary', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column('reason', sqlmodel.sql.sqltypes.AutoString(), nullable=True), + sa.Column('note', sqlmodel.sql.sqltypes.AutoString(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('project_id', sqlmodel.sql.sqltypes.AutoString(), nullable=True), sa.ForeignKeyConstraint( diff --git a/offsets_db_api/models.py b/offsets_db_api/models.py index b1c731f..6bdc1ea 100644 --- a/offsets_db_api/models.py +++ b/offsets_db_api/models.py @@ -119,6 +119,10 @@ class CreditBase(SQLModel): description='Date of transaction', strict=False ) transaction_type: str | None = Field(description='Type of transaction') + account: str | None = Field(description='Account used for the transaction') + beneficiary: str | None = Field(description='Beneficiary of credits') + reason: str | None = Field(description='Reason for transaction') + note: str | None = Field(description='Note') class Credit(CreditBase, table=True): diff --git a/requirements.txt b/requirements.txt index 3db1ccf..ba03568 100644 --- a/requirements.txt +++ b/requirements.txt @@ -5,7 +5,7 @@ fastapi==0.109.1 fastparquet gunicorn httpx -offsets-db-data>=2024.6.0 +offsets-db-data>=2024.8.0 pandas>=1.5.3 psycopg2-binary==2.9.9 pydantic-settings>=2.1 diff --git a/tests/conftest.py b/tests/conftest.py index 15ed6b3..604c2ab 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -50,19 +50,19 @@ def wait_for_file_processing(test_app: TestClient, file_ids: list[str], timeout: def setup_post(test_app: TestClient): payload: list[dict[str, str]] = [ { - 'url': 's3://carbonplan-offsets-db/final/2024-08-13/credits-augmented.parquet', + 'url': 's3://carbonplan-offsets-db/final/2024-08-28/credits-augmented.parquet', 'category': 'credits', }, { - 'url': 's3://carbonplan-offsets-db/final/2024-08-13/projects-augmented.parquet', + 'url': 's3://carbonplan-offsets-db/final/2024-08-28/projects-augmented.parquet', 'category': 'projects', }, { - 'url': 's3://carbonplan-offsets-db/final/2024-08-13/curated-clips.parquet', + 'url': 's3://carbonplan-offsets-db/final/2024-08-28/curated-clips.parquet', 'category': 'clips', }, { - 'url': 's3://carbonplan-offsets-db/final/2024-08-13/weekly-summary-clips.parquet', + 'url': 's3://carbonplan-offsets-db/final/2024-08-27/weekly-summary-clips.parquet', 'category': 'clips', }, ] diff --git a/tests/test_files.py b/tests/test_files.py index 383143b..b70f847 100644 --- a/tests/test_files.py +++ b/tests/test_files.py @@ -27,19 +27,19 @@ def test_submit_bad_file(test_app: TestClient, url: str, category: str): def file_urls(): return [ { - 'url': 's3://carbonplan-offsets-db/final/2024-08-13/credits-augmented.parquet', + 'url': 's3://carbonplan-offsets-db/final/2024-08-28/credits-augmented.parquet', 'category': 'credits', }, { - 'url': 's3://carbonplan-offsets-db/final/2024-08-13/projects-augmented.parquet', + 'url': 's3://carbonplan-offsets-db/final/2024-08-28/projects-augmented.parquet', 'category': 'projects', }, { - 'url': 's3://carbonplan-offsets-db/final/2024-08-13/curated-clips.parquet', + 'url': 's3://carbonplan-offsets-db/final/2024-08-28/curated-clips.parquet', 'category': 'clips', }, { - 'url': 's3://carbonplan-offsets-db/final/2024-08-13/weekly-summary-clips.parquet', + 'url': 's3://carbonplan-offsets-db/final/2024-08-27/weekly-summary-clips.parquet', 'category': 'clips', }, ] diff --git a/update_database.py b/update_database.py index f5ebd43..9b91889 100644 --- a/update_database.py +++ b/update_database.py @@ -13,7 +13,7 @@ def generate_path(*, date: datetime.date, bucket: str, category: str) -> str: def calculate_date(*, days_back: int) -> datetime.date: - return datetime.datetime.utcnow().date() - datetime.timedelta(days=days_back) + return datetime.datetime.now(datetime.timezone.utc).date() - datetime.timedelta(days=days_back) def get_latest(*, bucket: str): @@ -41,7 +41,7 @@ def get_latest(*, bucket: str): data.append({'category': key, 'url': entry_url}) weekly_summary_start = datetime.date(year=2024, month=2, day=6) - weekly_summary_end = datetime.datetime.utcnow().date() + weekly_summary_end = datetime.datetime.now(datetime.timezone.utc).date() date_ranges = pd.date_range( start=weekly_summary_start, end=weekly_summary_end, freq='W-TUE', inclusive='both' ) @@ -62,31 +62,25 @@ def get_latest(*, bucket: str): def post_data_to_environment(*, env: str, bucket: str) -> None: - # Set up the headers for the request - headers = { - 'accept': 'application/json', - 'Content-Type': 'application/json', - } - if env == 'production': files = get_latest(bucket=bucket) else: files = [ { - 'url': 's3://carbonplan-offsets-db/final/2024-03-05/credits-augmented.parquet', + 'url': 's3://carbonplan-offsets-db/final/2024-08-28/credits-augmented.parquet', 'category': 'credits', }, { - 'url': 's3://carbonplan-offsets-db/final/2024-03-05/projects-augmented.parquet', + 'url': 's3://carbonplan-offsets-db/final/2024-08-28/projects-augmented.parquet', 'category': 'projects', }, { - 'url': 's3://carbonplan-offsets-db/final/2024-03-05/curated-clips.parquet', + 'url': 's3://carbonplan-offsets-db/final/2024-08-28/curated-clips.parquet', 'category': 'clips', }, { - 'url': 's3://carbonplan-offsets-db/final/2024-03-05/weekly-summary-clips.parquet', + 'url': 's3://carbonplan-offsets-db/final/2024-08-27/weekly-summary-clips.parquet', 'category': 'clips', }, ] @@ -104,8 +98,11 @@ def post_data_to_environment(*, env: str, bucket: str) -> None: if api_key is None: raise ValueError('OFFSETS_DB_API_KEY_STAGING environment variable not set') - headers['X-API-KEY'] = api_key - + headers = { + 'accept': 'application/json', + 'Content-Type': 'application/json', + 'X-API-KEY': api_key, + } # Send the request response = requests.post(url, headers=headers, data=json.dumps(files))