From f15211c4676d65a42678d9502b7ad21ec6934555 Mon Sep 17 00:00:00 2001 From: James Kent Date: Tue, 24 Oct 2023 10:01:10 -0500 Subject: [PATCH] update connexion to 3.0.0 --- store/neurostore/core.py | 26 +-- store/neurostore/openapi | 2 +- store/neurostore/requirements.txt | 3 +- store/neurostore/resources/base.py | 4 +- store/neurostore/resources/data.py | 28 +-- store/neurostore/resources/utils.py | 6 +- store/neurostore/schemas/data.py | 185 ++++++++++-------- store/neurostore/tests/api/test_analyses.py | 19 +- .../neurostore/tests/api/test_annotations.py | 80 ++++---- .../neurostore/tests/api/test_base_studies.py | 24 +-- store/neurostore/tests/api/test_conditions.py | 6 +- store/neurostore/tests/api/test_crud.py | 8 +- store/neurostore/tests/api/test_images.py | 9 +- store/neurostore/tests/api/test_points.py | 9 +- .../neurostore/tests/api/test_query_params.py | 14 +- store/neurostore/tests/api/test_studies.py | 39 ++-- store/neurostore/tests/api/test_studysets.py | 32 +-- store/neurostore/tests/conftest.py | 1 + store/neurostore/tests/request_utils.py | 17 +- 19 files changed, 264 insertions(+), 248 deletions(-) diff --git a/store/neurostore/core.py b/store/neurostore/core.py index 7a59fc2a7..92477f9e2 100644 --- a/store/neurostore/core.py +++ b/store/neurostore/core.py @@ -4,11 +4,10 @@ from authlib.integrations.flask_client import OAuth import connexion -from connexion.json_schema import default_handlers as json_schema_handlers -from connexion.resolver import MethodViewResolver +# from connexion.json_schema import default_handlers as json_schema_handlers +from connexion.resolver import MethodResolver from flask_caching import Cache from flask_cors import CORS -import prance import sqltap.wsgi from .or_json import ORJSONDecoder, ORJSONEncoder @@ -16,7 +15,7 @@ connexion_app = connexion.FlaskApp( - __name__, specification_dir="openapi/", debug=os.getenv("DEBUG", False) == "True" + __name__, specification_dir="openapi/" ) app = connexion_app.app @@ -34,29 +33,14 @@ options = {"swagger_ui": True} -# https://github.com/spec-first/connexion/issues/254#issuecomment-1133843523 -json_schema_handlers[""] = lambda uri: ( - json_schema_handlers["file"](str(connexion_app.specification_dir / uri)) -) - - -# https://github.com/spec-first/connexion/issues/254#issuecomment-504699959 -def get_bundled_specs(main_file): - parser = prance.ResolvingParser( - str(main_file.absolute()), lazy=True, backend="openapi-spec-validator" - ) - parser.parse() - return parser.specification - - openapi_file = Path(os.path.dirname(__file__) + "/openapi/neurostore-openapi.yml") connexion_app.add_api( - get_bundled_specs(openapi_file), + openapi_file, base_path="/api", options=options, arguments={"title": "NeuroStore API"}, - resolver=MethodViewResolver("neurostore.resources"), + resolver=MethodResolver("neurostore.resources"), strict_validation=os.getenv("DEBUG", False) == "True", validate_responses=os.getenv("DEBUG", False) == "True", ) diff --git a/store/neurostore/openapi b/store/neurostore/openapi index e588a84e7..08e4af955 160000 --- a/store/neurostore/openapi +++ b/store/neurostore/openapi @@ -1 +1 @@ -Subproject commit e588a84e73e4ca12e97ca0df34e2be1999a2ab45 +Subproject commit 08e4af955691ecfe6e967d11b648e2234f4b5401 diff --git a/store/neurostore/requirements.txt b/store/neurostore/requirements.txt index 584cdf036..5153a015a 100644 --- a/store/neurostore/requirements.txt +++ b/store/neurostore/requirements.txt @@ -1,7 +1,7 @@ aniso8601~=8.1 auth0-python~=3.16 authlib~=0.15 -connexion[swagger-ui]~=2.7 +connexion[swagger-ui,uvicorn]==3.0.0a7 email-validator~=1.1 flake8~=3.8 flask~=2.0 @@ -31,4 +31,3 @@ openapi-spec-validator~=0.3 scipy~=1.9 pytest~=7.1 orjson~=3.8 -prance diff --git a/store/neurostore/resources/base.py b/store/neurostore/resources/base.py index f66e38064..c5adcdf74 100644 --- a/store/neurostore/resources/base.py +++ b/store/neurostore/resources/base.py @@ -3,7 +3,7 @@ """ import re -import connexion +from connexion.context import context from flask import abort, request, current_app # jsonify from flask.views import MethodView @@ -45,7 +45,7 @@ def create_user(): # user signed up with auth0, but has not made any queries yet... # should have endpoint to "create user" after sign on with auth0 current_user = User( - external_id=connexion.context["user"], name=profile_info.get("name", "Unknown") + external_id=context["user"], name=profile_info.get("name", "Unknown") ) return current_user diff --git a/store/neurostore/resources/data.py b/store/neurostore/resources/data.py index e6ddbb5ff..8a7132b61 100644 --- a/store/neurostore/resources/data.py +++ b/store/neurostore/resources/data.py @@ -44,13 +44,13 @@ ] LIST_CLONE_ARGS = { - "source_id": fields.String(missing=None), - "source": fields.String(missing=None), - "unique": BooleanOrString(missing=False), + "source_id": fields.String(load_default=None), + "source": fields.String(load_default=None), + "unique": BooleanOrString(load_default=False), } LIST_NESTED_ARGS = { - "nested": fields.Boolean(load_default=False, missing=False), + "nested": fields.Boolean(load_default=False), } @@ -91,7 +91,7 @@ def serialize_records(self, records, args): @view_maker class AnnotationsView(ObjectView, ListView): - _view_fields = {**LIST_CLONE_ARGS, "studyset_id": fields.String(missing=None)} + _view_fields = {**LIST_CLONE_ARGS, "studyset_id": fields.String(load_default=None)} _nested = {"annotation_analyses": "AnnotationAnalysesResource"} _linked = { "studyset": "StudysetsView", @@ -154,10 +154,10 @@ class BaseStudiesView(ObjectView, ListView): _nested = {"versions": "StudiesView"} _view_fields = { - "level": fields.String(default="group", missing="group"), - "flat": fields.Boolean(load_default=False, missing=False), - "info": fields.Boolean(load_default=False, missing=False), - "data_type": fields.String(missing=None), + "level": fields.String(default="group", load_default="group"), + "flat": fields.Boolean(load_default=False), + "info": fields.Boolean(load_default=False), + "data_type": fields.String(load_default=None), } _multi_search = ("name", "description") @@ -272,11 +272,11 @@ def post(self): class StudiesView(ObjectView, ListView): _view_fields = { **{ - "data_type": fields.String(missing=None), - "studyset_owner": fields.String(missing=None), - "level": fields.String(default="group", missing="group"), - "flat": fields.Boolean(load_default=False, missing=False), - "info": fields.Boolean(load_default=False, missing=False), + "data_type": fields.String(load_default=None), + "studyset_owner": fields.String(load_default=None), + "level": fields.String(default="group", load_default="group"), + "flat": fields.Boolean(load_default=False), + "info": fields.Boolean(load_default=False), }, **LIST_NESTED_ARGS, **LIST_CLONE_ARGS, diff --git a/store/neurostore/resources/utils.py b/store/neurostore/resources/utils.py index a60c90d37..7538b61c4 100644 --- a/store/neurostore/resources/utils.py +++ b/store/neurostore/resources/utils.py @@ -3,7 +3,7 @@ """ import re -import connexion +from connexion.context import context from .. import models from .. import schemas @@ -16,10 +16,10 @@ def camel_case_split(str): def get_current_user(): - user = connexion.context.get("user") + user = context.get("user") if user: return models.User.query.filter_by( - external_id=connexion.context["user"] + external_id=user ).first() return None diff --git a/store/neurostore/schemas/data.py b/store/neurostore/schemas/data.py index 1826b900a..e872f56d1 100644 --- a/store/neurostore/schemas/data.py +++ b/store/neurostore/schemas/data.py @@ -524,14 +524,120 @@ class JSONLSAnalysisSchema(AnalysisSchema): study = fields.Function(lambda analysis: analysis.study.IRI, dump_only=True) -class StudysetSnapshot(object): +class BaseSnapshot(object): def __init__(self): pass def _serialize_dt(self, dt): return dt.isoformat() if dt else dt + def serialize(self, resource_dict): + return orjson.dumps(resource_dict) + + def dump_and_serialize(self, resource): + return self.serialize(self.dump(resource)) + + +class ImageSnapshot(BaseSnapshot): + def dump(self, i): + return { + "id": i.id, + "user": i.user_id, + "url": i.url, + "space": i.space, + "value_type": i.value_type, + "filename": i.filename, + "add_date": i.add_date, + } + + +class PointValueSnapshot(BaseSnapshot): + def dump(self, v): + return { + "kind": v.kind, + "value": v.value, + } + + +class PointSnapshot(BaseSnapshot): + def dump(self, p): + v_schema = PointValueSnapshot() + return { + "id": p.id, + "coordinates": p.coordinates, + "kind": p.kind, + "space": p.space, + "image": p.image, + "label_id": p.label_id, + "values": [ + v_schema.dump(v) + for v in p.values + ], + } + + +class ConditionSnapshot(BaseSnapshot): + def dump(self, ac): + return { + "id": ac.condition_id, + "user": ac.condition.user_id, + "name": ac.condition.name, + "description": ac.condition.description, + } + + +class AnalysisSnapshot(BaseSnapshot): + def dump(self, a): + ac_schema = ConditionSnapshot() + p_schema = PointSnapshot() + i_schema = ImageSnapshot() + return { + "id": a.id, + "user": a.user_id, + "name": a.name, + "description": a.description, + "conditions": [ + ac_schema.dump(ac) + for ac in a.analysis_conditions + ], + "weights": list(a.weights), + "points": [ + p_schema.dump(p) + for p in a.points + ], + "images": [ + i_schema.dump(i) + for i in a.images + ], + } + + +class StudySnapshot(BaseSnapshot): + def dump(self, s): + a_schema = AnalysisSnapshot() + return { + "id": s.id, + "created_at": self._serialize_dt(s.created_at), + "updated_at": self._serialize_dt(s.updated_at), + "user": s.user_id, + "name": s.name, + "description": s.description, + "publication": s.publication, + "doi": s.doi, + "pmid": s.pmid, + "authors": s.authors, + "year": s.year, + "metadata": s.metadata_, + "source": s.source, + "source_id": s.source_id, + "source_updated_at": self._serialize_dt(s.source_updated_at), + "analyses": [a_schema.dump(a) for a in s.analyses], + } + + +class StudysetSnapshot(BaseSnapshot): def dump(self, studyset): + s_schema = StudySnapshot() return { "id": studyset.id, "name": studyset.name, @@ -543,82 +649,7 @@ def dump(self, studyset): "created_at": self._serialize_dt(studyset.created_at), "updated_at": self._serialize_dt(studyset.updated_at), "studies": [ - { - "id": s.id, - "created_at": self._serialize_dt(s.created_at), - "updated_at": self._serialize_dt(s.updated_at), - "user": s.user_id, - "name": s.name, - "description": s.description, - "publication": s.publication, - "doi": s.doi, - "pmid": s.pmid, - "authors": s.authors, - "year": s.year, - "metadata": s.metadata_, - "source": s.source, - "source_id": s.source_id, - "source_updated_at": self._serialize_dt(s.source_updated_at), - "analyses": [ - { - "id": a.id, - "user": a.user_id, - "study": s.id, - "name": a.name, - "description": a.description, - "conditions": [ - { - "id": ac.condition_id, - "user": ac.condition.user_id, - "name": ac.condition.name, - "description": ac.condition.description, - } - for ac in a.analysis_conditions - ], - "weights": list(a.weights), - "points": [ - { - "id": p.id, - "coordinates": p.coordinates, - "analysis": a.id, - "kind": p.kind, - "space": p.space, - "image": p.image, - "label_id": p.label_id, - "values": [ - { - "kind": v.kind, - "value": v.value, - } - for v in p.values - ], - } - for p in a.points - ], - "images": [ - { - "id": i.id, - "user": i.user_id, - "analysis": a.id, - "analysis_name": a.name, - "url": i.url, - "space": i.space, - "value_type": i.value_type, - "filename": i.filename, - "add_date": i.add_date, - } - for i in a.images - ], - } - for a in s.analyses - ], - } + s_schema.dump(s) for s in studyset.studies ], } - - def serialize(self, studyset_dict): - return orjson.dumps(studyset_dict) - - def dump_and_serialize(self, studyset): - return self.serialize(self.dump(studyset)) diff --git a/store/neurostore/tests/api/test_analyses.py b/store/neurostore/tests/api/test_analyses.py index 4c194c838..821408c8d 100644 --- a/store/neurostore/tests/api/test_analyses.py +++ b/store/neurostore/tests/api/test_analyses.py @@ -1,4 +1,3 @@ -from ..request_utils import decode_json from ...models import Analysis, User, Point, Image from ...schemas import AnalysisSchema @@ -8,15 +7,15 @@ def test_get_nested_and_not_nested_analyses(auth_client, ingest_neurosynth, sess non_nested = auth_client.get(f"/api/analyses/{analysis_id}?nested=false") nested = auth_client.get(f"/api/analyses/{analysis_id}?nested=true") - assert isinstance(non_nested.json["points"][0], str) - assert isinstance(nested.json["points"][0], dict) + assert isinstance(non_nested.json()["points"][0], str) + assert isinstance(nested.json()["points"][0], dict) def test_get_analyses(auth_client, ingest_neurosynth, session): # List of analyses resp = auth_client.get("/api/analyses/") assert resp.status_code == 200 - analysis_list = decode_json(resp)["results"] + analysis_list = resp.json()["results"] assert isinstance(analysis_list, list) assert len(analysis_list) == Analysis.query.count() @@ -41,9 +40,9 @@ def test_get_analyses(auth_client, ingest_neurosynth, session): # Query specify analysis ID resp = auth_client.get(f"/api/analyses/{a_id}") assert resp.status_code == 200 - assert decode_json(resp) == analysis + assert resp.json() == analysis - assert decode_json(resp)["id"] == a_id + assert resp.json()["id"] == a_id def test_post_analyses(auth_client, ingest_neurosynth, session): @@ -111,7 +110,7 @@ def test_update_points_analyses(auth_client, ingest_neurovault, session): update_points = auth_client.put(f"/api/analyses/{analysis_db.id}", data=payload) assert update_points.status_code == 200 - assert payload["points"] == update_points.json["points"] + assert payload["points"] == update_points.json()["points"] # see if cache updated nested_get = auth_client.get(f"/api/analyses/{analysis_db.id}?nested=false") @@ -119,8 +118,8 @@ def test_update_points_analyses(auth_client, ingest_neurovault, session): get = auth_client.get(f"/api/analyses/{analysis_db.id}") assert ( - set(p["id"] for p in nested_get.json["points"]) - == set(p for p in nonnested_get.json["points"]) - == set(p for p in get.json["points"]) + set(p["id"] for p in nested_get.json()["points"]) + == set(p for p in nonnested_get.json()["points"]) + == set(p for p in get.json()["points"]) == set(p for p in payload["points"]) ) diff --git a/store/neurostore/tests/api/test_annotations.py b/store/neurostore/tests/api/test_annotations.py index 7f668c15c..ede237a0e 100644 --- a/store/neurostore/tests/api/test_annotations.py +++ b/store/neurostore/tests/api/test_annotations.py @@ -12,7 +12,7 @@ def test_post_blank_annotation(auth_client, ingest_neurosynth, session): resp = auth_client.post("/api/annotations/", data=payload) assert resp.status_code == 200 # assert there exists an annotation analysis for every analysis - assert len(resp.json["notes"]) == len( + assert len(resp.json()["notes"]) == len( [a for study in dset.studies for a in study.analyses] ) @@ -45,7 +45,7 @@ def test_get_annotations(auth_client, ingest_neurosynth, session): resp = auth_client.get(f"/api/annotations/?studyset_id={dset.id}") assert resp.status_code == 200 - annot_id = resp.json["results"][0]["id"] + annot_id = resp.json()["results"][0]["id"] annot = auth_client.get(f"/api/annotations/{annot_id}") assert annot.status_code == 200 @@ -54,7 +54,7 @@ def test_get_annotations(auth_client, ingest_neurosynth, session): assert annot_export.status_code == 200 - df = pd.read_csv(StringIO(annot_export.json["annotation_csv"])) + df = pd.read_csv(StringIO(annot_export.json()["annotation_csv"])) assert isinstance(df, pd.DataFrame) @@ -65,7 +65,7 @@ def test_clone_annotation(auth_client, simple_neurosynth_annotation, session): f"/api/annotations/?source_id={annotation_entry.id}", data={} ) assert resp.status_code == 200 - data = resp.json + data = resp.json() assert data["name"] == annotation_entry.name assert data["source_id"] == annotation_entry.id assert data["source"] == "neurostore" @@ -75,26 +75,26 @@ def test_single_analysis_delete(auth_client, user_data, session): user = User.query.filter_by(name="user1").first() # get relevant studyset studysets = auth_client.get(f"/api/studysets/?user_id={user.external_id}") - studyset_id = studysets.json["results"][0]["id"] + studyset_id = studysets.json()["results"][0]["id"] studyset = auth_client.get(f"/api/studysets/{studyset_id}") # get relevant annotation annotations = auth_client.get(f"/api/annotations/?studyset_id={studyset_id}") - annotation_id = annotations.json["results"][0]["id"] + annotation_id = annotations.json()["results"][0]["id"] annotation = auth_client.get(f"/api/annotations/{annotation_id}") # pick study to edit - study_id = studyset.json["studies"][0] + study_id = studyset.json()["studies"][0] study = auth_client.get(f"/api/studies/{study_id}") # select analysis to delete - analysis_id = study.json["analyses"][0] + analysis_id = study.json()["analyses"][0] auth_client.delete(f"/api/analyses/{analysis_id}") # test if annotations were updated updated_annotation = auth_client.get(f"/api/annotations/{annotation_id}") assert updated_annotation.status_code == 200 - assert (len(annotation.json["notes"]) - 1) == ( - len(updated_annotation.json["notes"]) + assert (len(annotation.json()["notes"]) - 1) == ( + len(updated_annotation.json()["notes"]) ) @@ -102,14 +102,14 @@ def test_study_removal_from_studyset(auth_client, session, user_data): user = User.query.filter_by(name="user1").first() # get relevant studyset studysets = auth_client.get(f"/api/studysets/?user_id={user.external_id}") - studyset_id = studysets.json["results"][0]["id"] + studyset_id = studysets.json()["results"][0]["id"] studyset = auth_client.get(f"/api/studysets/{studyset_id}") # get relevant annotation annotations = auth_client.get(f"/api/annotations/?studyset_id={studyset_id}") - annotation_id = annotations.json["results"][0]["id"] + annotation_id = annotations.json()["results"][0]["id"] annotation = auth_client.get(f"/api/annotations/{annotation_id}") # remove study from studyset - studies = studyset.json["studies"] + studies = studyset.json()["studies"] studies.pop() # update studyset @@ -119,8 +119,8 @@ def test_study_removal_from_studyset(auth_client, session, user_data): updated_annotation = auth_client.get(f"/api/annotations/{annotation_id}") assert updated_annotation.status_code == 200 - assert (len(annotation.json["notes"]) - 1) == ( - len(updated_annotation.json["notes"]) + assert (len(annotation.json()["notes"]) - 1) == ( + len(updated_annotation.json()["notes"]) ) @@ -128,17 +128,17 @@ def test_study_addition_to_studyset(auth_client, session, user_data): user = User.query.filter_by(name="user1").first() # get relevant studyset studysets = auth_client.get(f"/api/studysets/?user_id={user.external_id}") - studyset_id = studysets.json["results"][0]["id"] + studyset_id = studysets.json()["results"][0]["id"] studyset = auth_client.get(f"/api/studysets/{studyset_id}") # get relevant annotation annotations = auth_client.get(f"/api/annotations/?studyset_id={studyset_id}") - annotation_id = annotations.json["results"][0]["id"] + annotation_id = annotations.json()["results"][0]["id"] annotation = auth_client.get(f"/api/annotations/{annotation_id}") # add a new study - studies = studyset.json["studies"] + studies = studyset.json()["studies"] user2 = User.query.filter_by(name="user2").first() studies_u2 = auth_client.get(f"/api/studies/?user_id={user2.external_id}") - studies_u2_ids = [s["id"] for s in studies_u2.json["results"]] + studies_u2_ids = [s["id"] for s in studies_u2.json()["results"]] studies.extend(studies_u2_ids) # update studyset @@ -148,8 +148,8 @@ def test_study_addition_to_studyset(auth_client, session, user_data): updated_annotation = auth_client.get(f"/api/annotations/{annotation_id}") assert updated_annotation.status_code == 200 - assert (len(annotation.json["notes"]) + 1) == ( - len(updated_annotation.json["notes"]) + assert (len(annotation.json()["notes"]) + 1) == ( + len(updated_annotation.json()["notes"]) ) @@ -157,7 +157,7 @@ def test_blank_slate_creation(auth_client, session): # create empty studyset studyset_data = {"name": "test studyset"} studyset_post = auth_client.post("/api/studysets/", data=studyset_data) - ss_id = studyset_post.json["id"] + ss_id = studyset_post.json()["id"] # create annotation annotation_data = { "studyset": ss_id, @@ -169,7 +169,7 @@ def test_blank_slate_creation(auth_client, session): # create study study_data = {"name": "fake study"} study_post = auth_client.post("/api/studies/", data=study_data) - s_id = study_post.json["id"] + s_id = study_post.json()["id"] # add study to studyset studyset_put_data = {"studies": [s_id]} @@ -179,10 +179,10 @@ def test_blank_slate_creation(auth_client, session): study_put_data = {"analyses": [{"name": "analysis1"}, {"name": "analysis2"}]} _ = auth_client.put(f"/api/studies/{s_id}", data=study_put_data) - annotation_get = auth_client.get(f"/api/annotations/{annotation_post.json['id']}") + annotation_get = auth_client.get(f"/api/annotations/{annotation_post.json()['id']}") - assert len(annotation_get.json["notes"]) == ( - (len(annotation_post.json["notes"]) + 2) + assert len(annotation_get.json()["notes"]) == ( + (len(annotation_post.json()["notes"]) + 2) ) @@ -190,28 +190,28 @@ def test_analysis_addition_to_studyset(auth_client, session, user_data): user = User.query.filter_by(name="user1").first() # get relevant studyset studysets = auth_client.get(f"/api/studysets/?user_id={user.external_id}") - studyset_id = studysets.json["results"][0]["id"] + studyset_id = studysets.json()["results"][0]["id"] studyset = auth_client.get(f"/api/studysets/{studyset_id}") # get relevant annotation annotations = auth_client.get(f"/api/annotations/?studyset_id={studyset_id}") - annotation_id = annotations.json["results"][0]["id"] + annotation_id = annotations.json()["results"][0]["id"] annotation = auth_client.get(f"/api/annotations/{annotation_id}") # add a new analysis - study_id = studyset.json["studies"][0] - analysis = {"id": auth_client.get(f"/api/studies/{study_id}").json["analyses"][0]} + study_id = studyset.json()["studies"][0] + analysis = {"id": auth_client.get(f"/api/studies/{study_id}").json()["analyses"][0]} analysis_new = {"name": "new_analysis"} analyses = [analysis, analysis_new] updated_study = auth_client.put( f"/api/studies/{study_id}", data={"analyses": [analysis, analysis_new]} ) - assert len(updated_study.json["analyses"]) == len(analyses) + assert len(updated_study.json()["analyses"]) == len(analyses) # test if annotations were updated updated_annotation = auth_client.get(f"/api/annotations/{annotation_id}") assert updated_annotation.status_code == 200 - assert (len(annotation.json["notes"]) + 1) == ( - len(updated_annotation.json["notes"]) + assert (len(annotation.json()["notes"]) + 1) == ( + len(updated_annotation.json()["notes"]) ) @@ -275,7 +275,7 @@ def test_put_nonexistent_analysis(auth_client, ingest_neurosynth, session): assert ( auth_client.put( - f"/api/annotations/{annot.json['id']}", data=bad_payload + f"/api/annotations/{annot.json()['id']}", data=bad_payload ).status_code == 400 ) @@ -303,14 +303,14 @@ def test_correct_note_overwrite(auth_client, ingest_neurosynth, session): new_value = "something new" data[0]["note"]["doo"] = new_value doo_payload = {"notes": data} - put_resp = auth_client.put(f"/api/annotations/{annot.json['id']}", data=doo_payload) + put_resp = auth_client.put(f"/api/annotations/{annot.json()['id']}", data=doo_payload) - get_resp = auth_client.get(f"/api/annotations/{annot.json['id']}") + get_resp = auth_client.get(f"/api/annotations/{annot.json()['id']}") - assert len(put_resp.json["notes"]) == len(data) - assert get_resp.json == put_resp.json + assert len(put_resp.json()["notes"]) == len(data) + assert get_resp.json() == put_resp.json() assert ( - get_resp.json["notes"][0]["note"]["doo"] - == put_resp.json["notes"][0]["note"]["doo"] + get_resp.json()["notes"][0]["note"]["doo"] + == put_resp.json()["notes"][0]["note"]["doo"] == new_value ) diff --git a/store/neurostore/tests/api/test_base_studies.py b/store/neurostore/tests/api/test_base_studies.py index 5def52dac..1f70a7946 100644 --- a/store/neurostore/tests/api/test_base_studies.py +++ b/store/neurostore/tests/api/test_base_studies.py @@ -35,8 +35,8 @@ def test_flat_base_study(auth_client, ingest_neurosynth, session): assert flat_resp.status_code == reg_resp.status_code == 200 - assert "versions" not in flat_resp.json["results"][0] - assert "versions" in reg_resp.json["results"][0] + assert "versions" not in flat_resp.json()["results"][0] + assert "versions" in reg_resp.json()["results"][0] def test_info_base_study(auth_client, ingest_neurosynth, session): @@ -46,19 +46,19 @@ def test_info_base_study(auth_client, ingest_neurosynth, session): assert info_resp.status_code == 200 assert reg_resp.status_code == 200 - assert "updated_at" in info_resp.json["results"][0]["versions"][0] - assert isinstance(reg_resp.json["results"][0]["versions"][0], str) + assert "updated_at" in info_resp.json()["results"][0]["versions"][0] + assert isinstance(reg_resp.json()["results"][0]["versions"][0], str) # test specific base-study - base_study_id = reg_resp.json["results"][0]["id"] + base_study_id = reg_resp.json()["results"][0]["id"] single_info_resp = auth_client.get(f"/api/base-studies/{base_study_id}?info=true") single_reg_resp = auth_client.get(f"/api/base-studies/{base_study_id}?info=false") assert single_info_resp.status_code == 200 assert single_reg_resp.status_code == 200 - assert "id" in single_info_resp.json["versions"][0] - assert isinstance(single_reg_resp.json["versions"][0], str) + assert "id" in single_info_resp.json()["versions"][0] + assert isinstance(single_reg_resp.json()["versions"][0], str) def test_has_coordinates_images(auth_client, session): @@ -104,7 +104,7 @@ def test_has_coordinates_images(auth_client, session): assert base_study.has_images is False # get the analysis - analysis_id = create_study.json["analyses"][0] + analysis_id = create_study.json()["analyses"][0] # update analysis with points analysis_point = auth_client.put( @@ -124,7 +124,7 @@ def test_has_coordinates_images(auth_client, session): assert base_study.has_images is True # delete point - point_id = analysis_point.json["points"][0] + point_id = analysis_point.json()["points"][0] del_point = auth_client.delete(f"/api/points/{point_id}") @@ -132,7 +132,7 @@ def test_has_coordinates_images(auth_client, session): assert base_study.has_coordinates is False # delete image - image_id = analysis_image.json["images"][0] + image_id = analysis_image.json()["images"][0] del_image = auth_client.delete(f"/api/images/{image_id}") @@ -166,7 +166,7 @@ def test_has_coordinates_images(auth_client, session): assert base_study_2.has_images is True # delete analysis a - analysis_ids = create_full_study.json["analyses"] + analysis_ids = create_full_study.json()["analyses"] analyses = [Analysis.query.filter_by(id=id_).one() for id_ in analysis_ids] point_analysis = image_analysis = None @@ -219,7 +219,7 @@ def test_has_coordinates_images(auth_client, session): # delete the full study delete_study = auth_client.delete( - f"/api/studies/{create_full_study_again.json['id']}" + f"/api/studies/{create_full_study_again.json()['id']}" ) assert delete_study.status_code == 200 diff --git a/store/neurostore/tests/api/test_conditions.py b/store/neurostore/tests/api/test_conditions.py index 2c904f98a..3ee038147 100644 --- a/store/neurostore/tests/api/test_conditions.py +++ b/store/neurostore/tests/api/test_conditions.py @@ -1,15 +1,15 @@ def test_get_conditions(auth_client, ingest_neurovault, session): resp = auth_client.get("/api/conditions/") assert resp.status_code == 200 - assert len(resp.json["results"]) > 1 + assert len(resp.json()["results"]) > 1 def test_post_conditions(auth_client, ingest_neurovault, session): my_condition = {"name": "ice cream", "description": "surprise, it's rocky road!"} post_resp = auth_client.post("/api/conditions/", data=my_condition) assert post_resp.status_code == 200 - post_data = post_resp.json - get_data = auth_client.get(f"/api/conditions/{post_data['id']}").json + post_data = post_resp.json() + get_data = auth_client.get(f"/api/conditions/{post_data['id']}").json() for attr in my_condition.keys(): assert post_data[attr] == get_data[attr] == my_condition[attr] diff --git a/store/neurostore/tests/api/test_crud.py b/store/neurostore/tests/api/test_crud.py index 3ce21979f..1a43883e7 100644 --- a/store/neurostore/tests/api/test_crud.py +++ b/store/neurostore/tests/api/test_crud.py @@ -61,7 +61,7 @@ def test_create(auth_client, user_data, endpoint, model, schema, session): d_key_sf.get(k), (StringOrNested, fields.Nested), ): - assert v == resp.json[k] + assert v == resp.json()[k] @pytest.mark.parametrize( @@ -90,10 +90,10 @@ def test_read(auth_client, user_data, endpoint, model, schema, session): resp = auth_client.get(f"/api/{endpoint}/") assert resp.status_code == 200 - assert len(expected_results) == len(resp.json["results"]) + assert len(expected_results) == len(resp.json()["results"]) query_ids = set([res.id for res in expected_results]) - resp_ids = set([res["id"] for res in resp.json["results"]]) + resp_ids = set([res["id"] for res in resp.json()["results"]]) assert query_ids == resp_ids @@ -119,7 +119,7 @@ def test_update(auth_client, user_data, endpoint, model, schema, update, session assert resp.status_code == 200 k, v = list(update.items())[0] - assert resp.json[k] == getattr(record, k) == v + assert resp.json()[k] == getattr(record, k) == v @pytest.mark.parametrize( diff --git a/store/neurostore/tests/api/test_images.py b/store/neurostore/tests/api/test_images.py index bd365c884..866cd0ec3 100644 --- a/store/neurostore/tests/api/test_images.py +++ b/store/neurostore/tests/api/test_images.py @@ -1,4 +1,3 @@ -from ..request_utils import decode_json from ...models import Study, Analysis, User, Image @@ -6,7 +5,7 @@ def test_get_images(auth_client, ingest_neurovault, session): # List of studysets resp = auth_client.get("/api/images/") assert resp.status_code == 200 - images_list = decode_json(resp)["results"] + images_list = resp.json()["results"] assert isinstance(images_list, list) @@ -28,8 +27,8 @@ def test_post_images(auth_client, session): resp = auth_client.post("/api/images/", data=payload) assert resp.status_code == 200 - assert resp.json["url"] == payload["url"] - assert resp.json["filename"] == payload["filename"] + assert resp.json()["url"] == payload["url"] + assert resp.json()["filename"] == payload["filename"] def test_put_images(auth_client, session): @@ -59,7 +58,7 @@ def test_put_images(auth_client, session): new_data = {"url": "new fake"} resp = auth_client.put(f"/api/images/{image_id}", data=new_data) - assert resp.json["url"] == new_data["url"] + assert resp.json()["url"] == new_data["url"] def test_delete_images(auth_client, session): diff --git a/store/neurostore/tests/api/test_points.py b/store/neurostore/tests/api/test_points.py index fb9e3ac65..25c556acb 100644 --- a/store/neurostore/tests/api/test_points.py +++ b/store/neurostore/tests/api/test_points.py @@ -1,4 +1,3 @@ -from ..request_utils import decode_json from ...models import Point from ...schemas import PointSchema from ...models import User, Analysis, Study @@ -7,13 +6,13 @@ def test_get_points(auth_client, ingest_neurosynth, session): # Get an analysis resp = auth_client.get("/api/analyses/") - analysis = decode_json(resp)["results"][0] + analysis = resp.json()["results"][0] point_id = analysis["points"][0] # Get a point resp = auth_client.get(f"/api/points/{point_id}") - point = decode_json(resp) + point = resp.json() # Test a few fields db_point = Point.query.filter_by(id=point_id).first() @@ -51,7 +50,7 @@ def test_put_points(auth_client, session): new_data = {"x": 10} resp = auth_client.put(f"/api/points/{point_id}", data=new_data) - assert resp.json["coordinates"][0] == new_data["x"] + assert resp.json()["coordinates"][0] == new_data["x"] def test_post_points(auth_client, ingest_neurosynth, session): @@ -69,7 +68,7 @@ def test_post_points(auth_client, ingest_neurosynth, session): assert resp.status_code == 200 - assert resp.json["coordinates"] == point["coordinates"] + assert resp.json()["coordinates"] == point["coordinates"] def test_delete_points(auth_client, session): diff --git a/store/neurostore/tests/api/test_query_params.py b/store/neurostore/tests/api/test_query_params.py index 5d685eb0a..9f6d466f8 100644 --- a/store/neurostore/tests/api/test_query_params.py +++ b/store/neurostore/tests/api/test_query_params.py @@ -23,12 +23,12 @@ def test_nested(auth_client, ingest_neurosynth, nested, resource_schema, session for field in fields: if nested == "true": try: - assert isinstance(resp.json["results"][0][field][0], dict) + assert isinstance(resp.json()["results"][0][field][0], dict) except IndexError: continue else: try: - assert isinstance(resp.json["results"][0][field][0], str) + assert isinstance(resp.json()["results"][0][field][0], str) except IndexError: continue @@ -39,7 +39,7 @@ def test_user_id(auth_client, user_data, session): id_ = auth_client.username user = User.query.filter_by(external_id=id_).first() resp = auth_client.get(f"/api/studies/?user_id={user.external_id}") - for study in resp.json["results"]: + for study in resp.json()["results"]: assert study["user"] == user.external_id @@ -50,7 +50,7 @@ def test_source_id(auth_client, ingest_neurosynth, session): post = auth_client.post(f"/api/studies/?source_id={study.id}", data={}) get = auth_client.get(f"/api/studies/?source_id={study.id}&nested=true") - assert post.json == get.json["results"][0] + assert post.json() == get.json()["results"][0] @pytest.mark.parametrize("endpoint", ["studies", "base-studies"]) @@ -64,8 +64,8 @@ def test_data_type( get_both = auth_client.get(f"/api/{endpoint}/?data_type=both") assert get_both.status_code == 200 assert ( - len(get_coord.json["results"]) + len(get_img.json["results"]) - == len(get_both.json["results"]) + len(get_coord.json()["results"]) + len(get_img.json()["results"]) + == len(get_both.json()["results"]) != 0 ) @@ -83,7 +83,7 @@ def test_common_queries(auth_client, ingest_neurosynth, session): total_search = auth_client.get(f"/api/studies/?search={study.pmid}") assert pmid_search.status_code == total_search.status_code == 200 - assert len(pmid_search.json["results"]) == len(total_search.json["results"]) + assert len(pmid_search.json()["results"]) == len(total_search.json()["results"]) def test_multiword_queries(auth_client, ingest_neurosynth, session): diff --git a/store/neurostore/tests/api/test_studies.py b/store/neurostore/tests/api/test_studies.py index 7fa51f004..f634db466 100644 --- a/store/neurostore/tests/api/test_studies.py +++ b/store/neurostore/tests/api/test_studies.py @@ -1,6 +1,5 @@ import pytest -from ..request_utils import decode_json from ...models import Studyset, Study, User, Analysis @@ -9,7 +8,7 @@ def test_create_study_as_user_and_analysis_as_bot(auth_clients, session): user_auth_client = next(ac for ac in auth_clients if ac.username == "user1-id") study_resp = user_auth_client.post("/api/studies/", data={"name": "test"}) - study_id = study_resp.json["id"] + study_id = study_resp.json()["id"] bot_auth_client = next(ac for ac in auth_clients if "clients" in ac.username) analysis_resp = bot_auth_client.post( @@ -23,11 +22,11 @@ def test_get_studies(auth_client, ingest_neurosynth, ingest_neuroquery, session) # List of studies resp = auth_client.get("/api/studies/?nested=true&level=group") assert resp.status_code == 200 - studies_list = decode_json(resp)["results"] + studies_list = resp.json()["results"] assert isinstance(studies_list, list) - assert len(studies_list) == resp.json["metadata"]["total_count"] + assert len(studies_list) == resp.json()["metadata"]["total_count"] # Check study keys study = studies_list[0] @@ -37,7 +36,7 @@ def test_get_studies(auth_client, ingest_neurosynth, ingest_neuroquery, session) # Query specify analysis ID resp = auth_client.get(f"/api/studies/{s_id}") assert resp.status_code == 200 - full_study = decode_json(resp) + full_study = resp.json() # Check extra keys for k in ["analyses", "created_at", "doi", "name"]: @@ -66,14 +65,14 @@ def test_put_studies(auth_client, ingest_neurosynth, data, session): study_entry = Study.query.first() study_clone = auth_client.post( f"/api/studies/?source_id={study_entry.id}", data={} - ).json + ).json() study_clone_id = study_clone["id"] payload = data if payload.get("analyses"): if payload["analyses"][0].get("conditions"): conditions = [] for cond in payload["analyses"][0]["conditions"]: - conditions.append(auth_client.post("/api/conditions/", data=cond).json) + conditions.append(auth_client.post("/api/conditions/", data=cond).json()) payload["analyses"][0]["conditions"] = [ {"id": cond["id"]} for cond in conditions ] @@ -84,13 +83,13 @@ def test_put_studies(auth_client, ingest_neurosynth, data, session): updated_study_entry = Study.query.filter_by(id=study_clone_id).first() - assert put_resp.json["metadata"] == updated_study_entry.metadata_ + assert put_resp.json()["metadata"] == updated_study_entry.metadata_ def test_clone_studies(auth_client, ingest_neurosynth, ingest_neurovault, session): study_entry = Study.query.filter(Study.metadata_.isnot(None)).first() resp = auth_client.post(f"/api/studies/?source_id={study_entry.id}", data={}) - data = resp.json + data = resp.json() assert data["name"] == study_entry.name assert data["source_id"] == study_entry.id assert data["source"] == "neurostore" @@ -100,7 +99,7 @@ def test_clone_studies(auth_client, ingest_neurosynth, ingest_neurovault, sessio # a clone of a clone should reference the original parent resp2 = auth_client.post(f"/api/studies/?source_id={data['id']}", data={}) - data2 = resp2.json + data2 = resp2.json() assert data2["name"] == study_entry.name assert data2["source_id"] == study_entry.id @@ -120,15 +119,15 @@ def test_private_studies(user_data, auth_clients, session): user2 = User.query.filter_by(external_id=id2).first() resp1 = client1.get("/api/studies/") resp2 = client2.get("/api/studies/") - name_set1 = set(s["name"] for s in resp1.json["results"]) - name_set2 = set(s["name"] for s in resp2.json["results"]) - assert len(resp1.json["results"]) == len(resp2.json["results"]) == 4 + name_set1 = set(s["name"] for s in resp1.json()["results"]) + name_set2 = set(s["name"] for s in resp2.json()["results"]) + assert len(resp1.json()["results"]) == len(resp2.json()["results"]) == 4 assert f"{user1.id}'s private study" in (name_set1 - name_set2) assert f"{user2.id}'s private study" in (name_set2 - name_set1) # but users can still access private studies with given link user2_private_study = next( - (s["id"] for s in resp2.json["results"] if "private" in s["name"]) + (s["id"] for s in resp2.json()["results"] if "private" in s["name"]) ) user1_get = client1.get(f"/api/studies/{user2_private_study}") @@ -137,7 +136,7 @@ def test_private_studies(user_data, auth_clients, session): def test_post_studies(auth_client, ingest_neurosynth, session): - payload = auth_client.get("/api/analyses/").json["results"] + payload = auth_client.get("/api/analyses/").json()["results"] analyses = [analysis["id"] for analysis in payload] my_study = { "name": "bomb", @@ -160,7 +159,7 @@ def test_delete_studies(auth_client, ingest_neurosynth, session): auth_client.delete(f"/api/studies/{study_db.id}") - for analysis in get.json["analyses"]: + for analysis in get.json()["analyses"]: assert Analysis.query.filter_by(id=analysis).first() is None @@ -179,12 +178,12 @@ def test_getting_studysets_by_owner(auth_clients, user_data, session): non_user_studysets_db = list(set(all_studysets_db) - set(user_studysets_db)) all_studysets = client1.get("/api/studies/") - for study in all_studysets.json["results"]: + for study in all_studysets.json()["results"]: for studyset in study["studysets"]: assert studyset["id"] in [as_db.id for as_db in all_studysets_db] filtered_studysets = client1.get(f"/api/studies/?studyset_owner={id1}") - for study in filtered_studysets.json["results"]: + for study in filtered_studysets.json()["results"]: for studyset in study["studysets"]: assert studyset["id"] in [us_db.id for us_db in user_studysets_db] assert studyset["id"] not in [nus_db.id for nus_db in non_user_studysets_db] @@ -250,5 +249,5 @@ def test_studies_flat(auth_client, ingest_neurosynth, session): assert flat_resp.status_code == reg_resp.status_code == 200 - assert "analyses" not in flat_resp.json["results"][0] - assert "analyses" in reg_resp.json["results"][0] + assert "analyses" not in flat_resp.json()["results"][0] + assert "analyses" in reg_resp.json()["results"][0] diff --git a/store/neurostore/tests/api/test_studysets.py b/store/neurostore/tests/api/test_studysets.py index 9947f1b10..b853f5ade 100644 --- a/store/neurostore/tests/api/test_studysets.py +++ b/store/neurostore/tests/api/test_studysets.py @@ -3,7 +3,7 @@ def test_post_and_get_studysets(auth_client, ingest_neurosynth, session): # create a studyset - payload = auth_client.get("/api/studies/").json + payload = auth_client.get("/api/studies/").json() study_ids = [study["id"] for study in payload["results"]] post_data = { "name": "rock road", @@ -16,13 +16,13 @@ def test_post_and_get_studysets(auth_client, ingest_neurosynth, session): get_resp = auth_client.get("/api/studysets/") assert ( - next(d for d in get_resp.json["results"] if d["name"] == "rock road") - == post_resp.json + next(d for d in get_resp.json()["results"] if d["name"] == "rock road") + == post_resp.json() ) def test_add_study_to_studyset(auth_client, ingest_neurosynth, session): - payload = auth_client.get("/api/studies/").json + payload = auth_client.get("/api/studies/").json() study_ids = [study["id"] for study in payload["results"]] post_data = { "name": "rock road", @@ -32,7 +32,7 @@ def test_add_study_to_studyset(auth_client, ingest_neurosynth, session): post_resp = auth_client.post("/api/studysets/", data=post_data) assert post_resp.status_code == 200 - dset_id = post_resp.json["id"] + dset_id = post_resp.json()["id"] auth_client.get(f"/api/studysets/{dset_id}?nested=true") auth_client.get(f"/api/studysets/{dset_id}?nested=false") @@ -43,7 +43,7 @@ def test_add_study_to_studyset(auth_client, ingest_neurosynth, session): nested_resp = auth_client.get(f"/api/studysets/{dset_id}?nested=true") non_nested_resp = auth_client.get(f"/api/studysets/{dset_id}?nested=false") - assert len(nested_resp.json["studies"]) == len(non_nested_resp.json["studies"]) + assert len(nested_resp.json()["studies"]) == len(non_nested_resp.json()["studies"]) def test_get_nested_nonnested_studysets(auth_client, ingest_neurosynth, session): @@ -51,15 +51,15 @@ def test_get_nested_nonnested_studysets(auth_client, ingest_neurosynth, session) non_nested = auth_client.get(f"/api/studysets/{studyset_id}?nested=false") nested = auth_client.get(f"/api/studysets/{studyset_id}?nested=true") - assert isinstance(non_nested.json["studies"][0], str) - assert isinstance(nested.json["studies"][0], dict) + assert isinstance(non_nested.json()["studies"][0], str) + assert isinstance(nested.json()["studies"][0], dict) def test_hot_swap_study_in_studyset(auth_client, ingest_neurosynth, session): # create studyset create_ss = auth_client.post("/api/studysets/", data={"name": "test"}) - ss_test = create_ss.json["id"] + ss_test = create_ss.json()["id"] # cache studyset endpoint auth_client.get(f"/api/studysets/{ss_test}") auth_client.get(f"/api/studysets/{ss_test}?nested=false") @@ -77,16 +77,16 @@ def test_hot_swap_study_in_studyset(auth_client, ingest_neurosynth, session): assert ( set(study_ids) - == set([s for s in add_study.json["studies"]]) - == set([s for s in add_study_non_nested.json["studies"]]) - == set([s["id"] for s in add_study_nested.json["studies"]]) + == set([s for s in add_study.json()["studies"]]) + == set([s for s in add_study_non_nested.json()["studies"]]) + == set([s["id"] for s in add_study_nested.json()["studies"]]) ) # clone study clone_study = auth_client.post(f"/api/studies/?source_id={study_ids[0]}", data={}) assert clone_study.status_code == 200 - clone_study_id = clone_study.json["id"] + clone_study_id = clone_study.json()["id"] new_study_ids = [clone_study_id, study_ids[1]] # swap out cloned study put_resp = auth_client.put( @@ -99,7 +99,7 @@ def test_hot_swap_study_in_studyset(auth_client, ingest_neurosynth, session): assert ( set(new_study_ids) - == set(s for s in clone_ss.json["studies"]) - == set(s for s in clone_ss_non_nested.json["studies"]) - == set(s["id"] for s in clone_ss_nested.json["studies"]) + == set(s for s in clone_ss.json()["studies"]) + == set(s for s in clone_ss_non_nested.json()["studies"]) + == set(s["id"] for s in clone_ss_nested.json()["studies"]) ) diff --git a/store/neurostore/tests/conftest.py b/store/neurostore/tests/conftest.py index 96d3aca0a..8558fd0c7 100644 --- a/store/neurostore/tests/conftest.py +++ b/store/neurostore/tests/conftest.py @@ -140,6 +140,7 @@ def app(mock_auth): "pool_timeout": 5, "pool_size": 0, } + cache.clear() # Establish an application context before running the tests. ctx = _app.app_context() diff --git a/store/neurostore/tests/request_utils.py b/store/neurostore/tests/request_utils.py index 81b1f0b06..2730893c1 100644 --- a/store/neurostore/tests/request_utils.py +++ b/store/neurostore/tests/request_utils.py @@ -5,10 +5,10 @@ class Client(object): def __init__(self, token, test_client=None, prepend="", username=None): if test_client is None: - from ..core import app + from ..core import connexion_app as app if not getattr(app, "test_client", None): - app = app._app + app = app.app._app test_client = app.test_client() self.client_flask = True else: @@ -42,18 +42,23 @@ def _make_request( if content_type is None: content_type = "application/json" + headers["Content-Type"] = content_type + route = self.prepend + route if self.client_flask: + kwargs = { + "headers": headers, + "params": params, + } + if data is not None and json_dump is True: data = json.dumps(data) + kwargs['data'] = data return request_function( route, - data=data, - headers=headers, - content_type=content_type, - query_string=params, + **kwargs, ) else: return request_function(route, json=data, headers=headers, params=params)