Skip to content

Commit 6f8d44f

Browse files
[DEV-14824] Handle merge conflict
2 parents 0b443a7 + 60cb39d commit 6f8d44f

92 files changed

Lines changed: 1571 additions & 7356 deletions

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ dependencies = [
5050
"openpyxl==3.1.*",
5151
"pandas==2.2.*",
5252
"psutil==5.9.*",
53-
"psycopg2==2.9.9", # Pinning exact version because this package will drop support for Python versions in patches
53+
"psycopg>=3.3.3",
5454
"py-gfm==2.0.0",
5555
"pydantic[dotenv]==1.9.*",
5656
"python-json-logger==2.0.7",

usaspending_api/awards/v2/views/accounts.py

Lines changed: 7 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,15 +1,19 @@
11
from collections import OrderedDict
22

3-
from psycopg2.sql import Identifier, Literal, SQL
3+
from psycopg.sql import SQL, Identifier, Literal
44
from rest_framework.request import Request
55
from rest_framework.response import Response
66
from rest_framework.views import APIView
77

88
from usaspending_api.common.cache_decorator import cache_response
99
from usaspending_api.common.helpers.generic_helper import get_pagination
1010
from usaspending_api.common.helpers.sql_helpers import execute_sql_to_ordered_dictionary
11-
from usaspending_api.common.validator.award import get_internal_or_generated_award_id_model
12-
from usaspending_api.common.validator.pagination import customize_pagination_with_sort_columns
11+
from usaspending_api.common.validator.award import (
12+
get_internal_or_generated_award_id_model,
13+
)
14+
from usaspending_api.common.validator.pagination import (
15+
customize_pagination_with_sort_columns,
16+
)
1317
from usaspending_api.common.validator.tinyshield import validate_post_request
1418
from usaspending_api.references.helpers import generate_agency_slugs_for_agency_list
1519

usaspending_api/awards/v2/views/funding.py

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -2,16 +2,23 @@
22
from copy import deepcopy
33
from itertools import chain
44

5-
from psycopg2.sql import Identifier, Literal, SQL
5+
from psycopg.sql import SQL, Identifier, Literal
66
from rest_framework.request import Request
77
from rest_framework.response import Response
88
from rest_framework.views import APIView
99

1010
from usaspending_api.common.cache_decorator import cache_response
1111
from usaspending_api.common.helpers.generic_helper import get_simple_pagination_metadata
12-
from usaspending_api.common.helpers.sql_helpers import build_composable_order_by, execute_sql_to_ordered_dictionary
13-
from usaspending_api.common.validator.award import get_internal_or_generated_award_id_model
14-
from usaspending_api.common.validator.pagination import customize_pagination_with_sort_columns
12+
from usaspending_api.common.helpers.sql_helpers import (
13+
build_composable_order_by,
14+
execute_sql_to_ordered_dictionary,
15+
)
16+
from usaspending_api.common.validator.award import (
17+
get_internal_or_generated_award_id_model,
18+
)
19+
from usaspending_api.common.validator.pagination import (
20+
customize_pagination_with_sort_columns,
21+
)
1522
from usaspending_api.common.validator.tinyshield import validate_post_request
1623
from usaspending_api.references.helpers import generate_agency_slugs_for_agency_list
1724

usaspending_api/awards/v2/views/funding_rollup.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,17 @@
11
from collections import OrderedDict
22

3-
from psycopg2.sql import Identifier, Literal, SQL
3+
from psycopg.sql import SQL, Identifier, Literal
44
from rest_framework.request import Request
55
from rest_framework.response import Response
66
from rest_framework.views import APIView
77

88
from usaspending_api.common.cache_decorator import cache_response
99
from usaspending_api.common.helpers.sql_helpers import execute_sql_to_ordered_dictionary
10-
from usaspending_api.common.validator.award import get_internal_or_generated_award_id_model
10+
from usaspending_api.common.validator.award import (
11+
get_internal_or_generated_award_id_model,
12+
)
1113
from usaspending_api.common.validator.tinyshield import validate_post_request
1214

13-
1415
ROLLUP_SQL = SQL(
1516
"""
1617
with gather_financial_accounts_by_awards as (
@@ -21,7 +22,8 @@
2122
from vw_awards a
2223
inner join financial_accounts_by_awards faba on faba.award_id = a.id
2324
INNER JOIN submission_attributes sa ON faba.submission_id = sa.submission_id
24-
INNER JOIN dabs_submission_window_schedule dabs ON sa.submission_window_id = dabs.id and dabs.submission_reveal_date <= now()
25+
INNER JOIN dabs_submission_window_schedule dabs
26+
ON sa.submission_window_id = dabs.id and dabs.submission_reveal_date <= now()
2527
where {award_id_column} = {award_id}
2628
)
2729
select

usaspending_api/broker/helpers/delete_fabs_transactions.py

Lines changed: 9 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -2,15 +2,15 @@
22

33
from django.conf import settings
44
from django.db import connections
5+
from psycopg.sql import SQL, Placeholder
56

67
from usaspending_api.broker.helpers.delete_stale_fabs import delete_stale_fabs
78
from usaspending_api.common.helpers.timing_helpers import timer
89

9-
1010
logger = logging.getLogger("script")
1111

1212

13-
def delete_fabs_transactions(ids_to_delete):
13+
def delete_fabs_transactions(ids_to_delete: list) -> list:
1414
"""ids_to_delete are published_fabs_ids"""
1515
if ids_to_delete:
1616
with timer(f"deleting {len(ids_to_delete)} stale FABS data", logger.info):
@@ -23,7 +23,7 @@ def delete_fabs_transactions(ids_to_delete):
2323
return update_and_delete_award_ids
2424

2525

26-
def get_delete_pks_for_afa_keys(afa_ids_to_delete):
26+
def get_delete_pks_for_afa_keys(afa_ids_to_delete: list) -> list:
2727
"""
2828
When we read from FABS delete files, we are only reading in afa_generated_unique keys (AFA). Unfortunately,
2929
AFAs on their own do not give us enough information to delete records since AFAs are reused in Broker. This
@@ -35,16 +35,18 @@ def get_delete_pks_for_afa_keys(afa_ids_to_delete):
3535
return []
3636

3737
uppercased = tuple(afa.upper() for afa in afa_ids_to_delete)
38-
39-
sql = """
38+
placeholders = SQL(",").join(Placeholder() * len(uppercased))
39+
sql = SQL(
40+
"""
4041
select published_fabs_id
4142
from published_fabs
42-
where upper(afa_generated_unique) in %s and
43+
where upper(afa_generated_unique) in ({}) and
4344
is_active is not true
4445
"""
46+
).format(placeholders)
4547

4648
with connections[settings.BROKER_DB_ALIAS].cursor() as cursor:
47-
cursor.execute(sql, [uppercased])
49+
cursor.execute(sql, uppercased)
4850
rows = cursor.fetchall()
4951

5052
return [row[0] for row in rows]

usaspending_api/broker/management/commands/load_broker_table.py

Lines changed: 12 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,11 @@
1+
import argparse
12
import csv
23
import logging
4+
from io import StringIO
35

46
from django.conf import settings
57
from django.core.management import BaseCommand
68
from django.db import connections
7-
from io import StringIO
89

910
logger = logging.getLogger(__name__)
1011

@@ -15,7 +16,7 @@ class Command(BaseCommand):
1516

1617
CHUNK_SIZE = 50_000
1718

18-
def add_arguments(self, parser):
19+
def add_arguments(self, parser: argparse.ArgumentParser) -> None:
1920

2021
broker_group = parser.add_argument_group(
2122
title="broker",
@@ -37,7 +38,8 @@ def add_arguments(self, parser):
3738

3839
usaspending_group = parser.add_argument_group(
3940
title="usaspending",
40-
description="Optional fields that reference components of the USAspending DB. Defaults to Broker DB counterpart",
41+
description="Optional fields that reference components of the USAspending DB. "
42+
"Defaults to Broker DB counterpart",
4143
)
4244
usaspending_group.add_argument(
4345
"--usaspending-table-name",
@@ -52,7 +54,7 @@ def add_arguments(self, parser):
5254
help="Schema name in the USAspending DB to load data into",
5355
)
5456

55-
def handle(self, *args, **options):
57+
def handle(self, *args, **options) -> None:
5658

5759
broker_table_name = options["table_name"]
5860
broker_schema_name = options["schema_name"]
@@ -101,8 +103,10 @@ def handle(self, *args, **options):
101103
writer = csv.writer(f)
102104
writer.writerows(rows)
103105
f.seek(0)
104-
usas_cursor.copy_expert(
105-
f"COPY {usas_schema_name}.{usas_table_name} ({usas_table_columns}) FROM STDIN WITH (DELIMITER ',', FORMAT CSV)",
106-
f,
107-
)
106+
with usas_cursor.copy(
107+
f"COPY {usas_schema_name}.{usas_table_name} ({usas_table_columns}) FROM STDIN "
108+
"WITH (DELIMITER ',', FORMAT CSV)",
109+
) as copy:
110+
for row in f:
111+
copy.write(row)
108112
usas_conn.commit()

usaspending_api/broker/management/commands/load_fpds_transactions.py

Lines changed: 39 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,35 @@
11
import logging
2-
import psycopg2
32
import re
4-
53
from datetime import datetime, timezone
4+
from typing import IO, AnyStr, List, Optional
5+
6+
import psycopg
7+
from django.core.management import CommandParser
68
from django.core.management.base import BaseCommand
7-
from typing import IO, List, AnyStr, Optional
89

9-
from usaspending_api.broker.helpers.last_load_date import get_last_load_date, update_last_load_date
10-
from usaspending_api.common.helpers.date_helper import datetime_command_line_argument_type
10+
from usaspending_api.broker.helpers.last_load_date import (
11+
get_last_load_date,
12+
update_last_load_date,
13+
)
14+
from usaspending_api.common.helpers.date_helper import (
15+
datetime_command_line_argument_type,
16+
)
1117
from usaspending_api.common.helpers.etl_helpers import update_c_to_d_linkages
1218
from usaspending_api.common.helpers.sql_helpers import get_database_dsn_string
1319
from usaspending_api.common.retrieve_file_from_uri import RetrieveFileFromUri
14-
from usaspending_api.etl.award_helpers import update_awards, update_procurement_awards, prune_empty_awards
15-
from usaspending_api.etl.transaction_loaders.fpds_loader import load_fpds_transactions, failed_ids, delete_stale_fpds
16-
from usaspending_api.transactions.transaction_delete_journal_helpers import retrieve_deleted_fpds_transactions
20+
from usaspending_api.etl.award_helpers import (
21+
prune_empty_awards,
22+
update_awards,
23+
update_procurement_awards,
24+
)
25+
from usaspending_api.etl.transaction_loaders.fpds_loader import (
26+
delete_stale_fpds,
27+
failed_ids,
28+
load_fpds_transactions,
29+
)
30+
from usaspending_api.transactions.transaction_delete_journal_helpers import (
31+
retrieve_deleted_fpds_transactions,
32+
)
1733

1834
logger = logging.getLogger("script")
1935

@@ -28,12 +44,13 @@ class Command(BaseCommand):
2844
modified_award_ids = []
2945

3046
@staticmethod
31-
def get_cursor_for_date_query(connection, date, count=False):
47+
def get_cursor_for_date_query(
48+
connection: psycopg.Connection, date: datetime, count: bool = False
49+
) -> psycopg.Cursor:
50+
db_cursor = connection.cursor()
3251
if count:
33-
db_cursor = connection.cursor()
3452
db_query = ALL_FPDS_QUERY.format("COUNT(*)")
3553
else:
36-
db_cursor = connection.cursor("fpds_load", cursor_factory=psycopg2.extras.DictCursor)
3754
db_query = ALL_FPDS_QUERY.format("detached_award_procurement_id")
3855

3956
if date:
@@ -54,7 +71,7 @@ def load_fpds_incrementally(self, date: Optional[datetime], chunk_size: int = CH
5471
stale_awards = delete_stale_fpds(detached_award_procurement_ids)
5572
self.modified_award_ids.extend(stale_awards)
5673

57-
with psycopg2.connect(dsn=get_database_dsn_string()) as connection:
74+
with psycopg.connect(get_database_dsn_string()) as connection:
5875
logger.info("Fetching records to update")
5976
total_records = self.get_cursor_for_date_query(connection, date, True).fetchall()[0][0]
6077
records_processed = 0
@@ -93,7 +110,7 @@ def load_fpds_from_file(self, file_path: str) -> None:
93110
logger.info(f"Total transaction IDs in file: {total_count}")
94111

95112
@staticmethod
96-
def update_award_records(awards, skip_cd_linkage=True):
113+
def update_award_records(awards: list, skip_cd_linkage: bool = True) -> None:
97114
if awards:
98115
unique_awards = set(awards)
99116
logger.info(f"{len(unique_awards)} award records impacted by transaction DML operations")
@@ -107,7 +124,7 @@ def update_award_records(awards, skip_cd_linkage=True):
107124
else:
108125
logger.info("No award records to update")
109126

110-
def add_arguments(self, parser):
127+
def add_arguments(self, parser: CommandParser) -> None:
111128
mutually_exclusive_group = parser.add_mutually_exclusive_group(required=True)
112129

113130
mutually_exclusive_group.add_argument(
@@ -131,16 +148,17 @@ def add_arguments(self, parser):
131148
"--file",
132149
metavar="FILEPATH",
133150
type=str,
134-
help="Load/Reload transactions using the detached_award_procurement_id list stored at this file path (one ID per line)"
135-
"to reload, one ID per line. Nonexistent IDs will be ignored.",
151+
help="Load/Reload transactions using the detached_award_procurement_id list stored at this file path "
152+
"(one ID per line) to reload, one ID per line. Nonexistent IDs will be ignored.",
136153
)
137154
mutually_exclusive_group.add_argument(
138155
"--reload-all",
139156
action="store_true",
140-
help="Script will load or reload all FPDS records in source tables, from all time. This does NOT clear the USAspending database first",
157+
help="Script will load or reload all FPDS records in source tables, from all time. "
158+
"This does NOT clear the USAspending database first",
141159
)
142160

143-
def handle(self, *args, **options):
161+
def handle(self, *args, **options) -> None:
144162

145163
# Record script execution start time to update the FPDS last updated date in DB as appropriate
146164
update_time = datetime.now(timezone.utc)
@@ -173,7 +191,8 @@ def handle(self, *args, **options):
173191
raise SystemExit(1)
174192

175193
if options["reload_all"] or options["since_last_load"]:
176-
# we wait until after the load finishes to update the load date because if this crashes we'll need to load again
194+
# we wait until after the load finishes to update the load date
195+
# because if this crashes we'll need to load again
177196
update_last_load_date("fpds", update_time)
178197

179-
logger.info(f"Successfully Completed")
198+
logger.info("Successfully Completed")

0 commit comments

Comments
 (0)