Skip to content

Commit

Permalink
fix cors issue (#611)
Browse files Browse the repository at this point in the history
* fix cors issue

* style change with black

* do not test options in compose
  • Loading branch information
jdkent authored Nov 3, 2023
1 parent cd943a0 commit 007a84c
Show file tree
Hide file tree
Showing 15 changed files with 71 additions and 57 deletions.
2 changes: 1 addition & 1 deletion store/manage.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
from neurostore import ingest
from neurostore import models

if not getattr(app, 'config', None):
if not getattr(app, "config", None):
app = app._app

app.config.from_object(os.environ["APP_SETTINGS"])
Expand Down
20 changes: 14 additions & 6 deletions store/neurostore/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,21 @@
from pathlib import Path
from werkzeug.middleware.profiler import ProfilerMiddleware

from connexion.middleware import MiddlewarePosition
from starlette.middleware.cors import CORSMiddleware
from authlib.integrations.flask_client import OAuth
import connexion

# from connexion.json_schema import default_handlers as json_schema_handlers
from connexion.resolver import MethodResolver
from flask_caching import Cache
from flask_cors import CORS
import sqltap.wsgi

from .or_json import ORJSONDecoder, ORJSONEncoder
from .database import init_db


connexion_app = connexion.FlaskApp(
__name__, specification_dir="openapi/"
)
connexion_app = connexion.FlaskApp(__name__, specification_dir="openapi/")

app = connexion_app.app

Expand All @@ -35,6 +35,16 @@

openapi_file = Path(os.path.dirname(__file__) + "/openapi/neurostore-openapi.yml")

# Enable CORS
connexion_app.add_middleware(
CORSMiddleware,
position=MiddlewarePosition.BEFORE_ROUTING,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)

connexion_app.add_api(
openapi_file,
base_path="/api",
Expand All @@ -45,8 +55,6 @@
validate_responses=os.getenv("DEBUG", False) == "True",
)

# Enable CORS
cors = CORS(app)

auth0 = oauth.register(
"auth0",
Expand Down
3 changes: 1 addition & 2 deletions store/neurostore/requirements.txt
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
aniso8601~=8.1
auth0-python~=3.16
authlib~=0.15
connexion[swagger-ui,uvicorn,flask]==3.0.0a7
connexion[swagger-ui,uvicorn,flask]==3.0.0
email-validator~=1.1
flake8~=3.8
flask~=2.0
flask-caching~=2.0
flask-cors~=3.0
flask-dance~=3.2
flask-shell-ipython~=0.4
flask-migrate~=2.5
Expand Down
10 changes: 7 additions & 3 deletions store/neurostore/resources/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -304,9 +304,13 @@ def get(self, id):
snapshot = StudysetSnapshot()
return snapshot.dump(record), 200, {"Content-Type": "application/json"}
else:
return self._schema(
context=dict(args),
).dump(record), 200, {"Content-Type": "application/json"}
return (
self._schema(
context=dict(args),
).dump(record),
200,
{"Content-Type": "application/json"},
)

def put(self, id):
request_data = self.insert_data(id, request.json)
Expand Down
6 changes: 2 additions & 4 deletions store/neurostore/resources/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,9 +218,7 @@ def post(self):
to_commit = []
for study_data in data:
filter_params = {
k: study_data.get(k)
for k in search_keys
if study_data.get(k)
k: study_data.get(k) for k in search_keys if study_data.get(k)
}
if "name" in filter_params and (set(filter_params) - {"name"}) != set():
del filter_params["name"]
Expand Down Expand Up @@ -408,7 +406,7 @@ def pre_nested_record_update(record):
has_pmid = True
if record.name and not record.doi and not record.pmid:
name_search = func.regexp_replace(
record.name, r'[' + string.punctuation + ']', '', 'g'
record.name, r"[" + string.punctuation + "]", "", "g"
)
query = query.filter(BaseStudy.name.ilike(f"%{name_search}%"))
has_name = True
Expand Down
4 changes: 1 addition & 3 deletions store/neurostore/resources/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,9 +18,7 @@ def camel_case_split(str):
def get_current_user():
user = context.get("user")
if user:
return models.User.query.filter_by(
external_id=user
).first()
return models.User.query.filter_by(external_id=user).first()
return None


Expand Down
4 changes: 3 additions & 1 deletion store/neurostore/schemas/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,9 @@

class UserSchema(BaseSchema):
name = fields.Str(metadata={"description": "User full name"})
external_id = fields.Str(metadata={"description": "External authentication service user ID"})
external_id = fields.Str(
metadata={"description": "External authentication service user ID"}
)

class Meta:
unknown = EXCLUDE
29 changes: 8 additions & 21 deletions store/neurostore/schemas/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -330,7 +330,9 @@ class StudySchema(BaseDataSchema):
dump_only=True, metadata={"db_only": True}, allow_none=True
)
studysets = fields.Pluck("StudysetSchema", "_id", many=True, dump_only=True)
base_study = fields.Pluck("BaseStudySchema", "_id", dump_only=True, metadata={"db_only": True})
base_study = fields.Pluck(
"BaseStudySchema", "_id", dump_only=True, metadata={"db_only": True}
)
has_coordinates = fields.Bool(dump_only=True)
has_images = fields.Bool(dump_only=True)
# studysets = fields.Nested(
Expand Down Expand Up @@ -569,10 +571,7 @@ def dump(self, p):
"space": p.space,
"image": p.image,
"label_id": p.label_id,
"values": [
v_schema.dump(v)
for v in p.values
],
"values": [v_schema.dump(v) for v in p.values],
}


Expand All @@ -596,19 +595,10 @@ def dump(self, a):
"user": a.user_id,
"name": a.name,
"description": a.description,
"conditions": [
ac_schema.dump(ac)
for ac in a.analysis_conditions
],
"conditions": [ac_schema.dump(ac) for ac in a.analysis_conditions],
"weights": list(a.weights),
"points": [
p_schema.dump(p)
for p in a.points
],
"images": [
i_schema.dump(i)
for i in a.images
],
"points": [p_schema.dump(p) for p in a.points],
"images": [i_schema.dump(i) for i in a.images],
}


Expand Down Expand Up @@ -648,8 +638,5 @@ def dump(self, studyset):
"pmid": studyset.pmid,
"created_at": self._serialize_dt(studyset.created_at),
"updated_at": self._serialize_dt(studyset.updated_at),
"studies": [
s_schema.dump(s)
for s in studyset.studies
],
"studies": [s_schema.dump(s) for s in studyset.studies],
}
4 changes: 3 additions & 1 deletion store/neurostore/tests/api/test_annotations.py
Original file line number Diff line number Diff line change
Expand Up @@ -303,7 +303,9 @@ def test_correct_note_overwrite(auth_client, ingest_neurosynth, session):
new_value = "something new"
data[0]["note"]["doo"] = new_value
doo_payload = {"notes": data}
put_resp = auth_client.put(f"/api/annotations/{annot.json()['id']}", data=doo_payload)
put_resp = auth_client.put(
f"/api/annotations/{annot.json()['id']}", data=doo_payload
)

get_resp = auth_client.get(f"/api/annotations/{annot.json()['id']}")

Expand Down
8 changes: 8 additions & 0 deletions store/neurostore/tests/api/test_crud.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,14 @@ def test_read(auth_client, user_data, endpoint, model, schema, session):

expected_results = model.query.filter(query).all()

pre = auth_client.client.options(
f"/api/{endpoint}",
headers={
"Origin": "http://example.com",
"Access-Control-Request-Method": "GET",
},
)
assert pre.status_code == 200
resp = auth_client.get(f"/api/{endpoint}/")

assert resp.status_code == 200
Expand Down
4 changes: 3 additions & 1 deletion store/neurostore/tests/api/test_studies.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,9 @@ def test_put_studies(auth_client, ingest_neurosynth, data, session):
if payload["analyses"][0].get("conditions"):
conditions = []
for cond in payload["analyses"][0]["conditions"]:
conditions.append(auth_client.post("/api/conditions/", data=cond).json())
conditions.append(
auth_client.post("/api/conditions/", data=cond).json()
)
payload["analyses"][0]["conditions"] = [
{"id": cond["id"]} for cond in conditions
]
Expand Down
2 changes: 1 addition & 1 deletion store/neurostore/tests/request_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ def _make_request(

if data is not None and json_dump is True:
data = json.dumps(data)
kwargs['data'] = data
kwargs["data"] = data

return request_function(
route,
Expand Down
4 changes: 3 additions & 1 deletion store/scripts/ace_ingestion.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@
metadata_file = "/ace/no-pubmed-central-july-11-2023/metadata.csv"
text_file = "/ace/no-pubmed-central-july-11-2023/text.csv"

coordinates_df, metadata_df, text_df = load_ace_files(coordinates_file, metadata_file, text_file)
coordinates_df, metadata_df, text_df = load_ace_files(
coordinates_file, metadata_file, text_file
)

ace_ingestion_logic(coordinates_df, metadata_df, text_df)
22 changes: 13 additions & 9 deletions store/scripts/create_neurostore_studyset.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,11 @@
from sqlalchemy.orm import joinedload


base_studies = BaseStudy.query.options(
joinedload("versions")
).filter_by(has_coordinates=True).all()
base_studies = (
BaseStudy.query.options(joinedload("versions"))
.filter_by(has_coordinates=True)
.all()
)

neurostore_studyset = []
for bs in base_studies:
Expand All @@ -20,12 +22,14 @@
if selected_study.user is None:
selected_study = v
else:
if (
selected_study.updated_at or selected_study.created_at
) <= (v.updated_at or v.created_at):
if (selected_study.updated_at or selected_study.created_at) <= (
v.updated_at or v.created_at
):
selected_study = v
neurostore_studyset.append(selected_study)

ss = Studyset(name="Neurostore Studyset", description="aggregation of studies on the neurostore database. Ran periodically, may not represent the latest state of the database.", studies=neurostore_studyset)


ss = Studyset(
name="Neurostore Studyset",
description="aggregation of studies on the neurostore database. Ran periodically, may not represent the latest state of the database.",
studies=neurostore_studyset,
)
6 changes: 3 additions & 3 deletions store/scripts/load_testing/locustfile.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@
with open("concepts.json", "r") as c_j:
concepts = json.load(c_j)

CONCEPT_NAMES = [c['name'] for c in concepts]
CONCEPT_NAMES = [c["name"] for c in concepts]


class QuickstartUser(HttpUser):
Expand All @@ -24,12 +24,12 @@ def view_studyset_and_studies(self):
result = self.client.get("/studysets/?page_size=100")
if result.status_code != 200:
return
studyset_ids = [ss['id'] for ss in result.json()['results']]
studyset_ids = [ss["id"] for ss in result.json()["results"]]
studyset_id = random.choice(studyset_ids)
studyset = self.client.get(f"/studysets/{studyset_id}")
if studyset.status_code != 200:
return
study_ids = [s for s in studyset.json()['studies']]
study_ids = [s for s in studyset.json()["studies"]]
for study_id in study_ids:
self.client.get(f"/studies/{study_id}?nested=true")

Expand Down

0 comments on commit 007a84c

Please sign in to comment.