Skip to content

Commit

Permalink
update connexion to 3.0.0
Browse files Browse the repository at this point in the history
  • Loading branch information
jdkent committed Oct 24, 2023
1 parent d59a5c6 commit f15211c
Show file tree
Hide file tree
Showing 19 changed files with 264 additions and 248 deletions.
26 changes: 5 additions & 21 deletions store/neurostore/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,19 +4,18 @@

from authlib.integrations.flask_client import OAuth
import connexion
from connexion.json_schema import default_handlers as json_schema_handlers
from connexion.resolver import MethodViewResolver
# from connexion.json_schema import default_handlers as json_schema_handlers
from connexion.resolver import MethodResolver
from flask_caching import Cache
from flask_cors import CORS
import prance
import sqltap.wsgi

from .or_json import ORJSONDecoder, ORJSONEncoder
from .database import init_db


connexion_app = connexion.FlaskApp(
__name__, specification_dir="openapi/", debug=os.getenv("DEBUG", False) == "True"
__name__, specification_dir="openapi/"
)

app = connexion_app.app
Expand All @@ -34,29 +33,14 @@

options = {"swagger_ui": True}

# https://github.com/spec-first/connexion/issues/254#issuecomment-1133843523
json_schema_handlers[""] = lambda uri: (
json_schema_handlers["file"](str(connexion_app.specification_dir / uri))
)


# https://github.com/spec-first/connexion/issues/254#issuecomment-504699959
def get_bundled_specs(main_file):
parser = prance.ResolvingParser(
str(main_file.absolute()), lazy=True, backend="openapi-spec-validator"
)
parser.parse()
return parser.specification


openapi_file = Path(os.path.dirname(__file__) + "/openapi/neurostore-openapi.yml")

connexion_app.add_api(
get_bundled_specs(openapi_file),
openapi_file,
base_path="/api",
options=options,
arguments={"title": "NeuroStore API"},
resolver=MethodViewResolver("neurostore.resources"),
resolver=MethodResolver("neurostore.resources"),
strict_validation=os.getenv("DEBUG", False) == "True",
validate_responses=os.getenv("DEBUG", False) == "True",
)
Expand Down
2 changes: 1 addition & 1 deletion store/neurostore/openapi
Submodule openapi updated 1 files
+13 −9 neurostore-openapi.yml
3 changes: 1 addition & 2 deletions store/neurostore/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
aniso8601~=8.1
auth0-python~=3.16
authlib~=0.15
connexion[swagger-ui]~=2.7
connexion[swagger-ui,uvicorn]==3.0.0a7
email-validator~=1.1
flake8~=3.8
flask~=2.0
Expand Down Expand Up @@ -31,4 +31,3 @@ openapi-spec-validator~=0.3
scipy~=1.9
pytest~=7.1
orjson~=3.8
prance
4 changes: 2 additions & 2 deletions store/neurostore/resources/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"""
import re

import connexion
from connexion.context import context
from flask import abort, request, current_app # jsonify
from flask.views import MethodView

Expand Down Expand Up @@ -45,7 +45,7 @@ def create_user():
# user signed up with auth0, but has not made any queries yet...
# should have endpoint to "create user" after sign on with auth0
current_user = User(
external_id=connexion.context["user"], name=profile_info.get("name", "Unknown")
external_id=context["user"], name=profile_info.get("name", "Unknown")
)

return current_user
Expand Down
28 changes: 14 additions & 14 deletions store/neurostore/resources/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,13 +44,13 @@
]

LIST_CLONE_ARGS = {
"source_id": fields.String(missing=None),
"source": fields.String(missing=None),
"unique": BooleanOrString(missing=False),
"source_id": fields.String(load_default=None),
"source": fields.String(load_default=None),
"unique": BooleanOrString(load_default=False),
}

LIST_NESTED_ARGS = {
"nested": fields.Boolean(load_default=False, missing=False),
"nested": fields.Boolean(load_default=False),
}


Expand Down Expand Up @@ -91,7 +91,7 @@ def serialize_records(self, records, args):

@view_maker
class AnnotationsView(ObjectView, ListView):
_view_fields = {**LIST_CLONE_ARGS, "studyset_id": fields.String(missing=None)}
_view_fields = {**LIST_CLONE_ARGS, "studyset_id": fields.String(load_default=None)}
_nested = {"annotation_analyses": "AnnotationAnalysesResource"}
_linked = {
"studyset": "StudysetsView",
Expand Down Expand Up @@ -154,10 +154,10 @@ class BaseStudiesView(ObjectView, ListView):
_nested = {"versions": "StudiesView"}

_view_fields = {
"level": fields.String(default="group", missing="group"),
"flat": fields.Boolean(load_default=False, missing=False),
"info": fields.Boolean(load_default=False, missing=False),
"data_type": fields.String(missing=None),
"level": fields.String(default="group", load_default="group"),
"flat": fields.Boolean(load_default=False),
"info": fields.Boolean(load_default=False),
"data_type": fields.String(load_default=None),
}

_multi_search = ("name", "description")
Expand Down Expand Up @@ -272,11 +272,11 @@ def post(self):
class StudiesView(ObjectView, ListView):
_view_fields = {
**{
"data_type": fields.String(missing=None),
"studyset_owner": fields.String(missing=None),
"level": fields.String(default="group", missing="group"),
"flat": fields.Boolean(load_default=False, missing=False),
"info": fields.Boolean(load_default=False, missing=False),
"data_type": fields.String(load_default=None),
"studyset_owner": fields.String(load_default=None),
"level": fields.String(default="group", load_default="group"),
"flat": fields.Boolean(load_default=False),
"info": fields.Boolean(load_default=False),
},
**LIST_NESTED_ARGS,
**LIST_CLONE_ARGS,
Expand Down
6 changes: 3 additions & 3 deletions store/neurostore/resources/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
"""
import re

import connexion
from connexion.context import context

from .. import models
from .. import schemas
Expand All @@ -16,10 +16,10 @@ def camel_case_split(str):


def get_current_user():
user = connexion.context.get("user")
user = context.get("user")
if user:
return models.User.query.filter_by(
external_id=connexion.context["user"]
external_id=user
).first()
return None

Expand Down
185 changes: 108 additions & 77 deletions store/neurostore/schemas/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -524,14 +524,120 @@ class JSONLSAnalysisSchema(AnalysisSchema):
study = fields.Function(lambda analysis: analysis.study.IRI, dump_only=True)


class StudysetSnapshot(object):
class BaseSnapshot(object):
def __init__(self):
pass

def _serialize_dt(self, dt):
return dt.isoformat() if dt else dt

def serialize(self, resource_dict):
return orjson.dumps(resource_dict)

def dump_and_serialize(self, resource):
return self.serialize(self.dump(resource))


class ImageSnapshot(BaseSnapshot):
def dump(self, i):
return {
"id": i.id,
"user": i.user_id,
"url": i.url,
"space": i.space,
"value_type": i.value_type,
"filename": i.filename,
"add_date": i.add_date,
}


class PointValueSnapshot(BaseSnapshot):
def dump(self, v):
return {
"kind": v.kind,
"value": v.value,
}


class PointSnapshot(BaseSnapshot):
def dump(self, p):
v_schema = PointValueSnapshot()
return {
"id": p.id,
"coordinates": p.coordinates,
"kind": p.kind,
"space": p.space,
"image": p.image,
"label_id": p.label_id,
"values": [
v_schema.dump(v)
for v in p.values
],
}


class ConditionSnapshot(BaseSnapshot):
def dump(self, ac):
return {
"id": ac.condition_id,
"user": ac.condition.user_id,
"name": ac.condition.name,
"description": ac.condition.description,
}


class AnalysisSnapshot(BaseSnapshot):
def dump(self, a):
ac_schema = ConditionSnapshot()
p_schema = PointSnapshot()
i_schema = ImageSnapshot()
return {
"id": a.id,
"user": a.user_id,
"name": a.name,
"description": a.description,
"conditions": [
ac_schema.dump(ac)
for ac in a.analysis_conditions
],
"weights": list(a.weights),
"points": [
p_schema.dump(p)
for p in a.points
],
"images": [
i_schema.dump(i)
for i in a.images
],
}


class StudySnapshot(BaseSnapshot):
def dump(self, s):
a_schema = AnalysisSnapshot()
return {
"id": s.id,
"created_at": self._serialize_dt(s.created_at),
"updated_at": self._serialize_dt(s.updated_at),
"user": s.user_id,
"name": s.name,
"description": s.description,
"publication": s.publication,
"doi": s.doi,
"pmid": s.pmid,
"authors": s.authors,
"year": s.year,
"metadata": s.metadata_,
"source": s.source,
"source_id": s.source_id,
"source_updated_at": self._serialize_dt(s.source_updated_at),
"analyses": [a_schema.dump(a) for a in s.analyses],
}


class StudysetSnapshot(BaseSnapshot):
def dump(self, studyset):
s_schema = StudySnapshot()
return {
"id": studyset.id,
"name": studyset.name,
Expand All @@ -543,82 +649,7 @@ def dump(self, studyset):
"created_at": self._serialize_dt(studyset.created_at),
"updated_at": self._serialize_dt(studyset.updated_at),
"studies": [
{
"id": s.id,
"created_at": self._serialize_dt(s.created_at),
"updated_at": self._serialize_dt(s.updated_at),
"user": s.user_id,
"name": s.name,
"description": s.description,
"publication": s.publication,
"doi": s.doi,
"pmid": s.pmid,
"authors": s.authors,
"year": s.year,
"metadata": s.metadata_,
"source": s.source,
"source_id": s.source_id,
"source_updated_at": self._serialize_dt(s.source_updated_at),
"analyses": [
{
"id": a.id,
"user": a.user_id,
"study": s.id,
"name": a.name,
"description": a.description,
"conditions": [
{
"id": ac.condition_id,
"user": ac.condition.user_id,
"name": ac.condition.name,
"description": ac.condition.description,
}
for ac in a.analysis_conditions
],
"weights": list(a.weights),
"points": [
{
"id": p.id,
"coordinates": p.coordinates,
"analysis": a.id,
"kind": p.kind,
"space": p.space,
"image": p.image,
"label_id": p.label_id,
"values": [
{
"kind": v.kind,
"value": v.value,
}
for v in p.values
],
}
for p in a.points
],
"images": [
{
"id": i.id,
"user": i.user_id,
"analysis": a.id,
"analysis_name": a.name,
"url": i.url,
"space": i.space,
"value_type": i.value_type,
"filename": i.filename,
"add_date": i.add_date,
}
for i in a.images
],
}
for a in s.analyses
],
}
s_schema.dump(s)
for s in studyset.studies
],
}

def serialize(self, studyset_dict):
return orjson.dumps(studyset_dict)

def dump_and_serialize(self, studyset):
return self.serialize(self.dump(studyset))
Loading

0 comments on commit f15211c

Please sign in to comment.