From 6e674e96a4c1ab54754c19040d993e071ee2f922 Mon Sep 17 00:00:00 2001 From: maxachis Date: Wed, 11 Dec 2024 14:59:08 -0500 Subject: [PATCH] Fix test_permissions_manager_init_user_not_found --- app.py | 4 +- .../{batch_logic.py => bulk_logic.py} | 66 +++++++++---------- ...namic_schema_request_content_population.py | 6 +- .../{batch_dtos.py => bulk_dtos.py} | 2 +- .../{batch_schemas.py => bulk_schemas.py} | 0 resources/Batch.py | 52 +++++++-------- resources/endpoint_schema_config.py | 14 ++-- .../helper_classes/RequestValidator.py | 34 +++++----- .../{test_batch.py => test_bulk.py} | 30 ++++----- utilities/namespace.py | 2 +- 10 files changed, 105 insertions(+), 105 deletions(-) rename middleware/primary_resource_logic/{batch_logic.py => bulk_logic.py} (82%) rename middleware/schema_and_dto_logic/primary_resource_dtos/{batch_dtos.py => bulk_dtos.py} (91%) rename middleware/schema_and_dto_logic/primary_resource_schemas/{batch_schemas.py => bulk_schemas.py} (100%) rename tests/integration/{test_batch.py => test_bulk.py} (96%) diff --git a/app.py b/app.py index 92eb14c4..c8011659 100644 --- a/app.py +++ b/app.py @@ -6,7 +6,7 @@ from flask_cors import CORS from middleware.util import get_env_variable -from resources.Batch import namespace_batch +from resources.Batch import namespace_bulk from resources.Callback import namespace_auth from resources.DataRequests import namespace_data_requests from resources.GithubDataRequests import namespace_github @@ -66,7 +66,7 @@ namespace_notifications, namespace_map, namespace_signup, - namespace_batch, + namespace_bulk, namespace_match, ] diff --git a/middleware/primary_resource_logic/batch_logic.py b/middleware/primary_resource_logic/bulk_logic.py similarity index 82% rename from middleware/primary_resource_logic/batch_logic.py rename to middleware/primary_resource_logic/bulk_logic.py index 15a159ec..b374f630 100644 --- a/middleware/primary_resource_logic/batch_logic.py +++ b/middleware/primary_resource_logic/bulk_logic.py @@ -27,8 +27,8 @@ setup_dto_class, ) -from middleware.schema_and_dto_logic.primary_resource_dtos.batch_dtos import ( - BatchRequestDTO, +from middleware.schema_and_dto_logic.primary_resource_dtos.bulk_dtos import ( + BulkRequestDTO, ) from csv import DictReader @@ -65,7 +65,7 @@ def _abort_if_csv(file): ) -class BatchRequestManager: +class BulkRequestManager: def __init__(self): self.requests = [] @@ -92,7 +92,7 @@ def all_requests_errored_out(self): return len(self.get_requests_without_error()) == 0 -class BatchRowProcessor: +class BulkRowProcessor: def __init__(self, raw_row: dict, request_id: int): self.raw_row = raw_row @@ -139,7 +139,7 @@ def create_completed_request(self, inner_dto): ) -class AgenciesPostBRP(BatchRowProcessor): +class AgenciesPostBRP(BulkRowProcessor): def create_completed_request(self, inner_dto): return AgencyPostRequestInfo( @@ -148,10 +148,10 @@ def create_completed_request(self, inner_dto): @dataclass -class BatchConfig: - dto: BatchRequestDTO +class BulkConfig: + dto: BulkRequestDTO handler: PostPutHandler - brp_class: type[BatchRowProcessor] + brp_class: type[BulkRowProcessor] schema: Schema @@ -162,32 +162,32 @@ def listify_strings(raw_rows: list[dict]): raw_row[k] = v.split(",") -def run_batch( - batch_config: BatchConfig, +def run_bulk( + bulk_config: BulkConfig, ): unflattener = SchemaUnflattener( - flat_schema_class=batch_config.dto.csv_schema.__class__ + flat_schema_class=bulk_config.dto.csv_schema.__class__ ) - raw_rows = _get_raw_rows_from_csv(file=batch_config.dto.file) + raw_rows = _get_raw_rows_from_csv(file=bulk_config.dto.file) listify_strings(raw_rows) - schema = batch_config.schema - brm = BatchRequestManager() + schema = bulk_config.schema + brm = BulkRequestManager() for idx, raw_row in enumerate(raw_rows): - brp = batch_config.brp_class(raw_row=raw_row, request_id=idx) + brp = bulk_config.brp_class(raw_row=raw_row, request_id=idx) brp.process( unflattener=unflattener, - inner_dto_class=batch_config.dto.inner_dto_class, + inner_dto_class=bulk_config.dto.inner_dto_class, schema=schema, ) brm.add_request(request=brp.request) - handler = batch_config.handler + handler = bulk_config.handler handler.mass_execute(requests=brm.get_requests_without_error()) return brm def manage_response( - brm: BatchRequestManager, resource_name: str, verb: str, include_ids: bool = True + brm: BulkRequestManager, resource_name: str, verb: str, include_ids: bool = True ): errors = brm.get_error_dict() if brm.all_requests_errored_out(): @@ -216,9 +216,9 @@ def manage_response( ) -def batch_post_agencies(db_client: DatabaseClient, dto: BatchRequestDTO): - brm = run_batch( - batch_config=BatchConfig( +def bulk_post_agencies(db_client: DatabaseClient, dto: BulkRequestDTO): + brm = run_bulk( + bulk_config=BulkConfig( dto=dto, handler=AgencyPostHandler(), brp_class=AgenciesPostBRP, @@ -228,12 +228,12 @@ def batch_post_agencies(db_client: DatabaseClient, dto: BatchRequestDTO): return manage_response(brm=brm, resource_name="agencies", verb="created") -def batch_put_agencies(db_client: DatabaseClient, dto: BatchRequestDTO): - brm = run_batch( - batch_config=BatchConfig( +def bulk_put_agencies(db_client: DatabaseClient, dto: BulkRequestDTO): + brm = run_bulk( + bulk_config=BulkConfig( dto=dto, handler=AgencyPutHandler(), - brp_class=BatchRowProcessor, + brp_class=BulkRowProcessor, schema=dto.csv_schema.__class__(exclude=["file"]), ) ) @@ -242,24 +242,24 @@ def batch_put_agencies(db_client: DatabaseClient, dto: BatchRequestDTO): ) -def batch_post_data_sources(db_client: DatabaseClient, dto: BatchRequestDTO): - brm = run_batch( - batch_config=BatchConfig( +def bulk_post_data_sources(db_client: DatabaseClient, dto: BulkRequestDTO): + brm = run_bulk( + bulk_config=BulkConfig( dto=dto, handler=DataSourcesPostHandler(), - brp_class=BatchRowProcessor, + brp_class=BulkRowProcessor, schema=dto.csv_schema.__class__(exclude=["file"]), ) ) return manage_response(brm=brm, resource_name="data sources", verb="created") -def batch_put_data_sources(db_client: DatabaseClient, dto: BatchRequestDTO): - brm = run_batch( - batch_config=BatchConfig( +def bulk_put_data_sources(db_client: DatabaseClient, dto: BulkRequestDTO): + brm = run_bulk( + bulk_config=BulkConfig( dto=dto, handler=DataSourcesPutHandler(), - brp_class=BatchRowProcessor, + brp_class=BulkRowProcessor, schema=dto.csv_schema.__class__(exclude=["file"]), ) ) diff --git a/middleware/schema_and_dto_logic/dynamic_logic/dynamic_schema_request_content_population.py b/middleware/schema_and_dto_logic/dynamic_logic/dynamic_schema_request_content_population.py index 11aecc7c..a37634b2 100644 --- a/middleware/schema_and_dto_logic/dynamic_logic/dynamic_schema_request_content_population.py +++ b/middleware/schema_and_dto_logic/dynamic_logic/dynamic_schema_request_content_population.py @@ -8,8 +8,8 @@ from middleware.flask_response_manager import FlaskResponseManager from middleware.schema_and_dto_logic.custom_types import SchemaTypes, DTOTypes -from middleware.schema_and_dto_logic.primary_resource_dtos.batch_dtos import ( - BatchRequestDTO, +from middleware.schema_and_dto_logic.primary_resource_dtos.bulk_dtos import ( + BulkRequestDTO, ) from middleware.schema_and_dto_logic.util import ( _get_required_argument, @@ -42,7 +42,7 @@ def populate_schema_with_request_content( """ # Get all declared fields from the schema if load_file: - return BatchRequestDTO( + return BulkRequestDTO( file=request.files.get("file"), csv_schema=schema, inner_dto_class=dto_class ) fields = schema.fields diff --git a/middleware/schema_and_dto_logic/primary_resource_dtos/batch_dtos.py b/middleware/schema_and_dto_logic/primary_resource_dtos/bulk_dtos.py similarity index 91% rename from middleware/schema_and_dto_logic/primary_resource_dtos/batch_dtos.py rename to middleware/schema_and_dto_logic/primary_resource_dtos/bulk_dtos.py index 29e367cb..5d18eb31 100644 --- a/middleware/schema_and_dto_logic/primary_resource_dtos/batch_dtos.py +++ b/middleware/schema_and_dto_logic/primary_resource_dtos/bulk_dtos.py @@ -7,7 +7,7 @@ @dataclass -class BatchRequestDTO: +class BulkRequestDTO: file: FileStorage csv_schema: Schema inner_dto_class: Any diff --git a/middleware/schema_and_dto_logic/primary_resource_schemas/batch_schemas.py b/middleware/schema_and_dto_logic/primary_resource_schemas/bulk_schemas.py similarity index 100% rename from middleware/schema_and_dto_logic/primary_resource_schemas/batch_schemas.py rename to middleware/schema_and_dto_logic/primary_resource_schemas/bulk_schemas.py diff --git a/resources/Batch.py b/resources/Batch.py index 35deedd7..ffc12ac3 100644 --- a/resources/Batch.py +++ b/resources/Batch.py @@ -6,18 +6,18 @@ AccessInfoPrimary, ) from middleware.decorators import endpoint_info -from middleware.primary_resource_logic.batch_logic import ( - batch_post_agencies, - batch_post_data_sources, - batch_put_agencies, - batch_put_data_sources, +from middleware.primary_resource_logic.bulk_logic import ( + bulk_post_agencies, + bulk_post_data_sources, + bulk_put_agencies, + bulk_put_data_sources, ) from resources.PsycopgResource import PsycopgResource from resources.endpoint_schema_config import SchemaConfigs from resources.resource_helpers import ResponseInfo from utilities.namespace import create_namespace, AppNamespaces -namespace_batch = create_namespace(AppNamespaces.BATCH) +namespace_bulk = create_namespace(AppNamespaces.BULK) def add_csv_description(initial_description: str) -> str: @@ -28,13 +28,13 @@ def add_csv_description(initial_description: str) -> str: ) -@namespace_batch.route("/agencies") -class AgenciesBatch(PsycopgResource): +@namespace_bulk.route("/agencies") +class AgenciesBulk(PsycopgResource): @endpoint_info( - namespace=namespace_batch, + namespace=namespace_bulk, auth_info=STANDARD_JWT_AUTH_INFO, - schema_config=SchemaConfigs.BATCH_AGENCIES_POST, + schema_config=SchemaConfigs.BULK_AGENCIES_POST, description=add_csv_description( initial_description="Adds multiple agencies from a CSV file." ), @@ -44,14 +44,14 @@ class AgenciesBatch(PsycopgResource): ) def post(self, access_info: AccessInfoPrimary) -> Response: return self.run_endpoint( - wrapper_function=batch_post_agencies, - schema_populate_parameters=SchemaConfigs.BATCH_AGENCIES_POST.value.get_schema_populate_parameters(), + wrapper_function=bulk_post_agencies, + schema_populate_parameters=SchemaConfigs.BULK_AGENCIES_POST.value.get_schema_populate_parameters(), ) @endpoint_info( - namespace=namespace_batch, + namespace=namespace_bulk, auth_info=WRITE_ONLY_AUTH_INFO, - schema_config=SchemaConfigs.BATCH_AGENCIES_PUT, + schema_config=SchemaConfigs.BULK_AGENCIES_PUT, description=add_csv_description( initial_description="Updates multiple agencies from a CSV file." ), @@ -61,18 +61,18 @@ def post(self, access_info: AccessInfoPrimary) -> Response: ) def put(self, access_info: AccessInfoPrimary): return self.run_endpoint( - wrapper_function=batch_put_agencies, - schema_populate_parameters=SchemaConfigs.BATCH_AGENCIES_PUT.value.get_schema_populate_parameters(), + wrapper_function=bulk_put_agencies, + schema_populate_parameters=SchemaConfigs.BULK_AGENCIES_PUT.value.get_schema_populate_parameters(), ) -@namespace_batch.route("/data-sources") -class DataSourcesBatch(PsycopgResource): +@namespace_bulk.route("/data-sources") +class DataSourcesBulk(PsycopgResource): @endpoint_info( - namespace=namespace_batch, + namespace=namespace_bulk, auth_info=STANDARD_JWT_AUTH_INFO, - schema_config=SchemaConfigs.BATCH_DATA_SOURCES_POST, + schema_config=SchemaConfigs.BULK_DATA_SOURCES_POST, description=add_csv_description( initial_description="Adds multiple data sources from a CSV file." ), @@ -82,14 +82,14 @@ class DataSourcesBatch(PsycopgResource): ) def post(self, access_info: AccessInfoPrimary): return self.run_endpoint( - wrapper_function=batch_post_data_sources, - schema_populate_parameters=SchemaConfigs.BATCH_DATA_SOURCES_POST.value.get_schema_populate_parameters(), + wrapper_function=bulk_post_data_sources, + schema_populate_parameters=SchemaConfigs.BULK_DATA_SOURCES_POST.value.get_schema_populate_parameters(), ) @endpoint_info( - namespace=namespace_batch, + namespace=namespace_bulk, auth_info=WRITE_ONLY_AUTH_INFO, - schema_config=SchemaConfigs.BATCH_DATA_SOURCES_PUT, + schema_config=SchemaConfigs.BULK_DATA_SOURCES_PUT, description=add_csv_description( initial_description="Updates multiple data sources from a CSV file." ), @@ -100,6 +100,6 @@ def post(self, access_info: AccessInfoPrimary): def put(self, access_info: AccessInfoPrimary): return self.run_endpoint( - wrapper_function=batch_put_data_sources, - schema_populate_parameters=SchemaConfigs.BATCH_DATA_SOURCES_PUT.value.get_schema_populate_parameters(), + wrapper_function=bulk_put_data_sources, + schema_populate_parameters=SchemaConfigs.BULK_DATA_SOURCES_PUT.value.get_schema_populate_parameters(), ) diff --git a/resources/endpoint_schema_config.py b/resources/endpoint_schema_config.py index 5805fc85..3db2f2ee 100644 --- a/resources/endpoint_schema_config.py +++ b/resources/endpoint_schema_config.py @@ -12,8 +12,8 @@ PermissionsRequestDTO, PermissionsGetRequestSchema, ) -from middleware.schema_and_dto_logic.primary_resource_dtos.batch_dtos import ( - BatchRequestDTO, +from middleware.schema_and_dto_logic.primary_resource_dtos.bulk_dtos import ( + BulkRequestDTO, ) from middleware.schema_and_dto_logic.primary_resource_dtos.match_dtos import AgencyMatchDTO from middleware.schema_and_dto_logic.primary_resource_dtos.reset_token_dtos import ( @@ -23,7 +23,7 @@ ArchivesGetResponseSchema, ArchivesPutRequestSchema, ) -from middleware.schema_and_dto_logic.primary_resource_schemas.batch_schemas import ( +from middleware.schema_and_dto_logic.primary_resource_schemas.bulk_schemas import ( BatchRequestSchema, BatchPostResponseSchema, BatchPutResponseSchema, @@ -502,22 +502,22 @@ class SchemaConfigs(Enum): # endregion # region Batch - BATCH_DATA_SOURCES_POST = EndpointSchemaConfig( + BULK_DATA_SOURCES_POST = EndpointSchemaConfig( input_schema=DataSourcesPostBatchRequestSchema(), input_dto_class=DataSourcesPostDTO, primary_output_schema=BatchPostResponseSchema(), ) - BATCH_DATA_SOURCES_PUT = EndpointSchemaConfig( + BULK_DATA_SOURCES_PUT = EndpointSchemaConfig( input_schema=DataSourcesPutBatchRequestSchema(), input_dto_class=DataSourcesPutDTO, primary_output_schema=BatchPutResponseSchema(), ) - BATCH_AGENCIES_POST = EndpointSchemaConfig( + BULK_AGENCIES_POST = EndpointSchemaConfig( input_schema=AgenciesPostBatchRequestSchema(), input_dto_class=AgenciesPostDTO, primary_output_schema=BatchPostResponseSchema(), ) - BATCH_AGENCIES_PUT = EndpointSchemaConfig( + BULK_AGENCIES_PUT = EndpointSchemaConfig( input_schema=AgenciesPutBatchRequestSchema(), input_dto_class=AgenciesPutDTO, primary_output_schema=BatchPutResponseSchema(), diff --git a/tests/helper_scripts/helper_classes/RequestValidator.py b/tests/helper_scripts/helper_classes/RequestValidator.py index 5c0b4bc6..c82db5df 100644 --- a/tests/helper_scripts/helper_classes/RequestValidator.py +++ b/tests/helper_scripts/helper_classes/RequestValidator.py @@ -468,57 +468,57 @@ def get_api_spec( ) @dataclass - class BatchOperationParams: + class BulkOperationParams: file: BytesIO headers: dict expected_response_status: HTTPStatus = HTTPStatus.OK - def insert_agencies_batch( + def insert_agencies_bulk( self, - bop: BatchOperationParams, - expected_schema=SchemaConfigs.BATCH_AGENCIES_POST.value.primary_output_schema, + bop: BulkOperationParams, + expected_schema=SchemaConfigs.BULK_AGENCIES_POST.value.primary_output_schema, ): return self.post( - endpoint="/api/batch/agencies", + endpoint="/api/bulk/agencies", headers=bop.headers, file=bop.file, expected_schema=expected_schema, expected_response_status=bop.expected_response_status, ) - def update_agencies_batch( + def update_agencies_bulk( self, - bop: BatchOperationParams, - expected_schema=SchemaConfigs.BATCH_AGENCIES_PUT.value.primary_output_schema, + bop: BulkOperationParams, + expected_schema=SchemaConfigs.BULK_AGENCIES_PUT.value.primary_output_schema, ): return self.put( - endpoint="/api/batch/agencies", + endpoint="/api/bulk/agencies", headers=bop.headers, file=bop.file, expected_schema=expected_schema, expected_response_status=bop.expected_response_status, ) - def insert_data_sources_batch( + def insert_data_sources_bulk( self, - bop: BatchOperationParams, - expected_schema=SchemaConfigs.BATCH_DATA_SOURCES_POST.value.primary_output_schema, + bop: BulkOperationParams, + expected_schema=SchemaConfigs.BULK_DATA_SOURCES_POST.value.primary_output_schema, ): return self.post( - endpoint="/api/batch/data-sources", + endpoint="/api/bulk/data-sources", headers=bop.headers, file=bop.file, expected_schema=expected_schema, expected_response_status=bop.expected_response_status, ) - def update_data_sources_batch( + def update_data_sources_bulk( self, - bop: BatchOperationParams, - expected_schema=SchemaConfigs.BATCH_DATA_SOURCES_PUT.value.primary_output_schema, + bop: BulkOperationParams, + expected_schema=SchemaConfigs.BULK_DATA_SOURCES_PUT.value.primary_output_schema, ): return self.put( - endpoint="/api/batch/data-sources", + endpoint="/api/bulk/data-sources", headers=bop.headers, file=bop.file, expected_schema=expected_schema, diff --git a/tests/integration/test_batch.py b/tests/integration/test_bulk.py similarity index 96% rename from tests/integration/test_batch.py rename to tests/integration/test_bulk.py index 9ace80ca..32e964bb 100644 --- a/tests/integration/test_batch.py +++ b/tests/integration/test_bulk.py @@ -8,12 +8,12 @@ from conftest import test_data_creator_flask, monkeysession from database_client.enums import LocationType -from middleware.primary_resource_logic.batch_logic import listify_strings +from middleware.primary_resource_logic.bulk_logic import listify_strings from middleware.schema_and_dto_logic.common_response_schemas import MessageSchema from middleware.schema_and_dto_logic.dynamic_logic.dynamic_csv_to_schema_conversion_logic import ( SchemaUnflattener, ) -from middleware.schema_and_dto_logic.primary_resource_schemas.batch_schemas import ( +from middleware.schema_and_dto_logic.primary_resource_schemas.bulk_schemas import ( AgenciesPostRequestFlatBaseSchema, DataSourcesPostRequestFlatBaseSchema, AgenciesPutRequestFlatBaseSchema, @@ -140,7 +140,7 @@ def create_csv_and_run( with SimpleTempFile(suffix=suffix) as temp_file: runner.csv_creator.create_csv(file=temp_file, rows=rows) return request_validator_method( - bop=RequestValidator.BatchOperationParams( + bop=RequestValidator.BulkOperationParams( file=temp_file, headers=runner.tdc.get_admin_tus().jwt_authorization_header, expected_response_status=expected_response_status, @@ -167,7 +167,7 @@ def test_batch_agencies_insert_happy_path( data = create_csv_and_run( runner=runner, rows=rows, - request_validator_method=runner.tdc.request_validator.insert_agencies_batch, + request_validator_method=runner.tdc.request_validator.insert_agencies_bulk, ) ids = data["ids"] @@ -198,7 +198,7 @@ def test_batch_agencies_insert_some_errors( data = create_csv_and_run( runner=runner, rows=rows, - request_validator_method=runner.tdc.request_validator.insert_agencies_batch, + request_validator_method=runner.tdc.request_validator.insert_agencies_bulk, ) check_for_errors(data) @@ -212,7 +212,7 @@ def test_batch_agencies_insert_wrong_file_type( runner=agencies_post_runner, rows=[], suffix=".json", - request_validator_method=runner.tdc.request_validator.insert_agencies_batch, + request_validator_method=runner.tdc.request_validator.insert_agencies_bulk, expected_response_status=HTTPStatus.UNSUPPORTED_MEDIA_TYPE, expected_schema=MessageSchema(), ) @@ -231,7 +231,7 @@ def test_batch_agencies_update_happy_path( data = create_csv_and_run( runner=runner, rows=rows, - request_validator_method=runner.tdc.request_validator.update_agencies_batch, + request_validator_method=runner.tdc.request_validator.update_agencies_bulk, ) ids = [agencies[i].id for i in range(3)] @@ -265,7 +265,7 @@ def test_batch_agencies_update_some_errors( data = create_csv_and_run( runner=runner, rows=rows, - request_validator_method=runner.tdc.request_validator.update_agencies_batch, + request_validator_method=runner.tdc.request_validator.update_agencies_bulk, ) check_for_errors(data, check_ids=False) @@ -278,7 +278,7 @@ def test_batch_agencies_update_wrong_file_type( runner=agencies_put_runner, rows=[], suffix=".json", - request_validator_method=runner.tdc.request_validator.update_agencies_batch, + request_validator_method=runner.tdc.request_validator.update_agencies_bulk, expected_response_status=HTTPStatus.UNSUPPORTED_MEDIA_TYPE, expected_schema=MessageSchema(), ) @@ -295,7 +295,7 @@ def test_batch_data_sources_insert_happy_path( data = create_csv_and_run( runner=runner, rows=rows, - request_validator_method=runner.tdc.request_validator.insert_data_sources_batch, + request_validator_method=runner.tdc.request_validator.insert_data_sources_bulk, ) ids = data["ids"] unflattener = SchemaUnflattener( @@ -326,7 +326,7 @@ def test_batch_data_sources_insert_some_errors( data = create_csv_and_run( runner=runner, rows=rows, - request_validator_method=runner.tdc.request_validator.insert_data_sources_batch, + request_validator_method=runner.tdc.request_validator.insert_data_sources_bulk, ) check_for_errors(data) @@ -339,7 +339,7 @@ def test_batch_data_sources_insert_wrong_file_type( runner=data_sources_post_runner, rows=[], suffix=".json", - request_validator_method=runner.tdc.request_validator.insert_data_sources_batch, + request_validator_method=runner.tdc.request_validator.insert_data_sources_bulk, expected_response_status=HTTPStatus.UNSUPPORTED_MEDIA_TYPE, expected_schema=MessageSchema(), ) @@ -356,7 +356,7 @@ def test_batch_data_sources_update_happy_path( data = create_csv_and_run( runner=runner, rows=rows, - request_validator_method=runner.tdc.request_validator.update_data_sources_batch, + request_validator_method=runner.tdc.request_validator.update_data_sources_bulk, ) ids = [data_source.id for data_source in data_sources] @@ -395,7 +395,7 @@ def test_batch_data_sources_update_some_errors( data = create_csv_and_run( runner=runner, rows=rows, - request_validator_method=runner.tdc.request_validator.update_data_sources_batch, + request_validator_method=runner.tdc.request_validator.update_data_sources_bulk, ) check_for_errors(data, check_ids=False) @@ -408,7 +408,7 @@ def test_batch_data_sources_update_wrong_file_type( runner=data_sources_put_runner, rows=[], suffix=".json", - request_validator_method=runner.tdc.request_validator.update_data_sources_batch, + request_validator_method=runner.tdc.request_validator.update_data_sources_bulk, expected_response_status=HTTPStatus.UNSUPPORTED_MEDIA_TYPE, expected_schema=MessageSchema(), ) diff --git a/utilities/namespace.py b/utilities/namespace.py index e2450309..67474e9e 100644 --- a/utilities/namespace.py +++ b/utilities/namespace.py @@ -25,7 +25,7 @@ class AppNamespaces(Enum): path="notifications", description="Notifications Namespace" ) MAP = NamespaceAttributes(path="map", description="Map Namespace") - BATCH = NamespaceAttributes(path="batch", description="Batch Namespace") + BULK = NamespaceAttributes(path="bulk", description="Bulk Namespace") MATCH = NamespaceAttributes(path="match", description="Match Namespace")