Skip to content

Commit

Permalink
wip: cache/search studysets
Browse files Browse the repository at this point in the history
  • Loading branch information
jdkent committed Aug 18, 2023
1 parent a54f50b commit bedad2c
Show file tree
Hide file tree
Showing 6 changed files with 112 additions and 14 deletions.
2 changes: 2 additions & 0 deletions store/neurostore/models/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,6 +129,8 @@ class BaseStudy(BaseMixin, db.Model):
public = db.Column(db.Boolean, default=True)
level = db.Column(db.String)
metadata_ = db.Column(JSONB)
has_coordinates = db.Column(db.Boolean, default=False, nullable=False)
has_images = db.Column(db.Boolean, default=False, nullable=False)
user_id = db.Column(db.Text, db.ForeignKey("users.external_id"), index=True)
__ts_vector__ = db.Column(
TSVector(),
Expand Down
41 changes: 40 additions & 1 deletion store/neurostore/models/event_listeners.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy import event
from .data import AnnotationAnalysis, Annotation, Studyset, Study, _check_type
from .data import (
AnnotationAnalysis, Annotation, Studyset, Study, Analysis, Point, Image, _check_type
)
from ..database import db


Expand Down Expand Up @@ -119,3 +121,40 @@ def add_annotation_analyses_study(study, analyses, collection_adapter):
event.listen(Studyset.studies, "bulk_replace", add_annotation_analyses_studyset)

event.listen(Study.analyses, "bulk_replace", add_annotation_analyses_study)


# Define an event listener to update Base-Study flags
@event.listens_for(Analysis.points, 'append')
@event.listens_for(Analysis.points, 'remove')
@event.listens_for(Analysis.images, 'append')
@event.listens_for(Analysis.images, 'remove')
def update_base_study_flags(target, value, initiator):
base_study = getattr(getattr(target, 'study', None), 'base_study', None)
updated = False
if base_study is not None:
base_study.has_coordinates = isinstance(value, Point) or any(
analysis.points for study in base_study.versions for analysis in study.analyses
)
base_study.has_images = isinstance(value, Image) or any(
analysis.images for study in base_study.versions for analysis in study.analyses
)
db.session.add(base_study)
updated = True

return updated


@event.listens_for(db.session, 'after_flush')
def update_base_study_flags_item_delete(session, flush_context):
any_updates = False
for obj in session.deleted:
if isinstance(obj, (Point, Image)):
target = obj.analysis_id
value = obj
initiator = "DELETE"
res = update_base_study_flags(target, value, initiator)
if not any_updates and res:
any_updates = True

if any_updates:
db.session.commit()
8 changes: 8 additions & 0 deletions store/neurostore/resources/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,9 @@
BaseStudy,
User,
Annotation,
Image,
Point,
Analysis,
)
from ..schemas.data import StudysetSnapshot
from . import data as viewdata
Expand Down Expand Up @@ -146,6 +149,11 @@ def update_or_create(cls, data, id=None, commit=True):

if k not in cls._nested and k not in ["id", "user"]:
try:
# preload images and points for updating in the event
# relationships cannot be loaded during events
if isinstance(record, (Image, Point)) and isinstance(v, Analysis):
v.images
v.points
setattr(record, k, v)
except AttributeError:
print(k)
Expand Down
2 changes: 2 additions & 0 deletions store/neurostore/resources/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -398,6 +398,8 @@ class ImagesView(ObjectView, ListView):
}
_search_fields = ("filename", "space", "value_type", "analysis_name")

def custom_record_update(record):
pass

@view_maker
class PointsView(ObjectView, ListView):
Expand Down
13 changes: 0 additions & 13 deletions store/neurostore/schemas/data.py
Original file line number Diff line number Diff line change
Expand Up @@ -529,8 +529,6 @@ def dump(self, studyset):
"analyses": [
{
"id": a.id,
"created_at": self._serialize_dt(a.created_at),
"updated_at": self._serialize_dt(a.updated_at),
"user": a.user_id,
"study": s.id,
"name": a.name,
Expand All @@ -541,22 +539,13 @@ def dump(self, studyset):
"user": ac.condition.user_id,
"name": ac.condition.name,
"description": ac.condition.description,
"created_at": self._serialize_dt(
ac.condition.created_at
),
"updated_at": self._serialize_dt(
ac.condition.updated_at
),
}
for ac in a.analysis_conditions
],
"weights": list(a.weights),
"points": [
{
"id": p.id,
"created_at": self._serialize_dt(p.created_at),
"updated_at": self._serialize_dt(p.updated_at),
"user": p.user_id,
"coordinates": p.coordinates,
"analysis": a.id,
"kind": p.kind,
Expand All @@ -576,8 +565,6 @@ def dump(self, studyset):
"images": [
{
"id": i.id,
"created_at": self._serialize_dt(i.created_at),
"updated_at": self._serialize_dt(i.updated_at),
"user": i.user_id,
"analysis": a.id,
"analysis_name": a.name,
Expand Down
60 changes: 60 additions & 0 deletions store/neurostore/tests/api/test_base_studies.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Test Base Study Endpoint"""
from neurostore.models import BaseStudy


def test_flat_base_study(auth_client, ingest_neurosynth, session):
Expand All @@ -17,3 +18,62 @@ def test_info_base_study(auth_client, ingest_neurosynth, session):
assert info_resp.status_code == 200

assert "updated_at" in info_resp.json["results"][0]["versions"][0]


def test_has_coordinates_images(auth_client, session):
# create an empty study
doi = "abcd"
pmid = "1234"
create_study = auth_client.post(
"/api/studies/",
data={
"name": "yeah",
"pmid": "1234",
"doi": "abcd",
"analyses": [
{
"name": "is this it?",
},
],
},
)
assert create_study.status_code == 200
# get base study
base_study = BaseStudy.query.filter_by(doi=doi, pmid=pmid).one()
assert base_study.has_coordinates is False
assert base_study.has_images is False

# get the analysis
analysis_id = create_study.json["analyses"][0]

# update analysis with points
analysis_point = auth_client.put(
f"/api/analyses/{analysis_id}", data={"points": [{"x": 1, "y": 2, "z": 3}]}
)

assert analysis_point.status_code == 200
assert base_study.has_coordinates is True

# update analysis with points
analysis_image = auth_client.put(
f"/api/analyses/{analysis_id}", data={"images": [{"filename": "my_fake_image.nii.gz"}]}
)

assert analysis_image.status_code == 200
assert base_study.has_images is True

# delete point
point_id = analysis_point.json["points"][0]

del_point = auth_client.delete(f"/api/points/{point_id}")

assert del_point.status_code == 200
assert base_study.has_coordinates is False

# delete image
image_id = analysis_image.json["images"][0]

del_image = auth_client.delete(f"/api/points/{image_id}")

assert del_image.status_code == 200
assert base_study.has_images is False

0 comments on commit bedad2c

Please sign in to comment.