Skip to content

Commit

Permalink
Merge pull request #592 from chuguoxipo/develop
Browse files Browse the repository at this point in the history
Add exportTimelineForStudy API & Unit Test
  • Loading branch information
DwayneJengSage authored Sep 18, 2023
2 parents 131e454 + d33b1c3 commit 4c7433d
Show file tree
Hide file tree
Showing 5 changed files with 309 additions and 6 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ public final class Exporter3Configuration {
private String projectId;
private String rawDataFolderId;
private Long storageLocationId;
private String wikiPageId;

/** Helper method that returns true if all configuration attributes are specified. */
public boolean isConfigured() {
Expand Down Expand Up @@ -112,11 +113,19 @@ public void setStorageLocationId(Long storageLocationId) {
this.storageLocationId = storageLocationId;
}

public String getWikiPageId() {
return wikiPageId;
}

public void setWikiPageId(String wikiPageId) {
this.wikiPageId = wikiPageId;
}

@Override
public int hashCode() {
return Objects.hash(createStudyNotificationTopicArn, dataAccessTeamId, exportNotificationTopicArn, participantVersionDemographicsTableId,
participantVersionDemographicsViewId, participantVersionTableId, projectId, rawDataFolderId,
storageLocationId);
storageLocationId, wikiPageId);
}

@Override
Expand All @@ -135,7 +144,8 @@ public boolean equals(Object obj) {
&& Objects.equals(participantVersionDemographicsViewId, other.participantVersionDemographicsViewId)
&& Objects.equals(participantVersionTableId, other.participantVersionTableId)
&& Objects.equals(projectId, other.projectId) && Objects.equals(rawDataFolderId, other.rawDataFolderId)
&& Objects.equals(storageLocationId, other.storageLocationId);
&& Objects.equals(storageLocationId, other.storageLocationId)
&& Objects.equals(wikiPageId, other.wikiPageId);
}

@Override
Expand All @@ -145,6 +155,6 @@ public String toString() {
+ participantVersionDemographicsTableId + ", participantVersionDemographicsViewId="
+ participantVersionDemographicsViewId + ", participantVersionTableId=" + participantVersionTableId
+ ", projectId=" + projectId + ", rawDataFolderId=" + rawDataFolderId + ", storageLocationId="
+ storageLocationId + "]";
+ storageLocationId + ", wikiPageId=" + wikiPageId + "]";
}
}
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
package org.sagebionetworks.bridge.services;

import java.io.File;
import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
Expand All @@ -20,20 +21,31 @@
import com.amazonaws.services.sqs.AmazonSQS;
import com.amazonaws.services.sqs.model.SendMessageResult;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Charsets;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableSet;
import com.google.common.io.CharSink;
import com.google.common.io.FileWriteMode;
import com.google.common.io.Files;
import org.apache.commons.lang3.StringUtils;
import org.sagebionetworks.client.SynapseClient;
import org.sagebionetworks.client.exceptions.SynapseException;
import org.sagebionetworks.repo.model.Folder;
import org.sagebionetworks.repo.model.ObjectType;
import org.sagebionetworks.repo.model.Project;
import org.sagebionetworks.repo.model.Team;
import org.sagebionetworks.repo.model.dao.WikiPageKey;
import org.sagebionetworks.repo.model.dao.WikiPageKeyHelper;
import org.sagebionetworks.repo.model.file.CloudProviderFileHandleInterface;
import org.sagebionetworks.repo.model.annotation.v2.AnnotationsValue;
import org.sagebionetworks.repo.model.annotation.v2.AnnotationsValueType;
import org.sagebionetworks.repo.model.project.ExternalS3StorageLocationSetting;
import org.sagebionetworks.repo.model.table.ColumnModel;
import org.sagebionetworks.repo.model.table.ColumnType;
import org.sagebionetworks.repo.model.table.EntityView;
import org.sagebionetworks.repo.model.v2.wiki.V2WikiPage;
import org.sagebionetworks.repo.model.table.MaterializedView;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
Expand Down Expand Up @@ -66,6 +78,8 @@
import org.sagebionetworks.bridge.models.exporter.ExporterSubscriptionRequest;
import org.sagebionetworks.bridge.models.exporter.ExporterSubscriptionResult;
import org.sagebionetworks.bridge.models.healthdata.HealthDataRecordEx3;
import org.sagebionetworks.bridge.models.schedules2.Schedule2;
import org.sagebionetworks.bridge.models.schedules2.timelines.Timeline;
import org.sagebionetworks.bridge.models.studies.Study;
import org.sagebionetworks.bridge.models.upload.Upload;
import org.sagebionetworks.bridge.models.worker.Exporter3Request;
Expand Down Expand Up @@ -265,6 +279,9 @@ public class Exporter3Service {
private AmazonSQS sqsClient;
private StudyService studyService;
private SynapseHelper synapseHelper;
private Schedule2Service schedule2Service;
private SynapseClient synapseClient;
private FileService fileService;

@Autowired
public final void setConfig(BridgeConfig config) {
Expand Down Expand Up @@ -340,11 +357,26 @@ final void setStudyService(StudyService studyService) {
this.studyService = studyService;
}

@Resource(name="exporterSynapseClient")
public final void setSynapseClient(SynapseClient synapseClient) {
this.synapseClient = synapseClient;
}

@Resource(name="exporterSynapseHelper")
public final void setSynapseHelper(SynapseHelper synapseHelper) {
this.synapseHelper = synapseHelper;
}

@Autowired
final void setSchedule2Service(Schedule2Service schedule2Service) {
this.schedule2Service = schedule2Service;
}

@Autowired
public final void setFileService(FileService fileService) {
this.fileService = fileService;
}

/**
* Initializes configs and Synapse resources for Exporter 3.0. Note that if any config already exists, this API
* will simply ignore them. This allows for two notable scenarios
Expand Down Expand Up @@ -438,7 +470,6 @@ public Exporter3Configuration initExporter3ForStudy(String appId, String studyId
sendNotification(appId, studyId, "create study", createStudyNotificationTopicArn, notification);
}
}

return ex3Config;
}

Expand Down Expand Up @@ -934,4 +965,55 @@ private void exportUpload(String appId, String recordId) {
LOG.info("Sent export request for app " + appId + " record " + recordId + "; received message ID=" +
sqsResult.getMessageId());
}

// Export timeline from Bridge to Synapse (Some researchers only have access to Synapse, not Bridge,
// so they need access to the Timeline information.).
public Exporter3Configuration exportTimelineForStudy(String appId, String studyId) throws BridgeSynapseException,
SynapseException, IOException{
// Get Timeline to export for the study.
Study study = studyService.getStudy(appId, studyId, true);
if (study.getScheduleGuid() == null) {
throw new EntityNotFoundException(Schedule2.class);
}
Timeline timeline = schedule2Service.getTimelineForSchedule(appId,
study.getScheduleGuid());

// Check Synapse
synapseHelper.checkSynapseWritableOrThrow();

// If Exporter3 is not enabled for study, initiate Exporter3 for Study;
if (!study.isExporter3Enabled()) {
initExporter3ForStudy(appId, studyId);
study = studyService.getStudy(appId, studyId, true);
}
Exporter3Configuration exporter3Config = study.getExporter3Configuration();

// Export the study's Timeline to Synapse as a wiki page in JSON format.
// If we have an IOException, the wiki page will almost certainly fail to upload. Instead of catching the exception,
// we just let the exception get thrown.
JsonNode node = BridgeObjectMapper.get().valueToTree(timeline);
File outputFile = File.createTempFile("timelineFor" + studyId, ".txt");
CharSink charSink = Files.asCharSink(outputFile, Charsets.UTF_8, FileWriteMode.APPEND);
charSink.write(node.toString());

CloudProviderFileHandleInterface markdown = synapseClient.multipartUpload(outputFile,
exporter3Config.getStorageLocationId(), false, false);

// If first time exporting the timeline for the study, create a new wiki page in Synapse;
// If wiki page already exists, update the existing wiki page.
if (exporter3Config.getWikiPageId() == null) {
V2WikiPage wiki = new V2WikiPage();
wiki.setTitle("Exported Timeline for " + studyId);
wiki.setMarkdownFileHandleId(markdown.getId());
wiki = synapseClient.createV2WikiPage(exporter3Config.getProjectId(), ObjectType.ENTITY, wiki);
exporter3Config.setWikiPageId(wiki.getId());
studyService.updateStudy(appId, study);
} else {
WikiPageKey key = WikiPageKeyHelper.createWikiPageKey(exporter3Config.getProjectId(), ObjectType.ENTITY, exporter3Config.getWikiPageId());
V2WikiPage getWiki = synapseClient.getV2WikiPage(key);
getWiki.setMarkdownFileHandleId(markdown.getId());
synapseClient.updateV2WikiPage(exporter3Config.getProjectId(), ObjectType.ENTITY, getWiki);
}
return exporter3Config;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -96,6 +96,15 @@ public Exporter3Configuration initExporter3ForStudy(@PathVariable String studyId
return exporter3Service.initExporter3ForStudy(session.getAppId(), studyId);
}

@PostMapping("/v5/studies/{studyId}/timeline/export")
@ResponseStatus(HttpStatus.CREATED)
public Exporter3Configuration exportTimelineForStudy(@PathVariable String studyId)
throws BridgeSynapseException, SynapseException, IOException {
UserSession session = getAuthenticatedSession(STUDY_DESIGNER, DEVELOPER);
CAN_UPDATE_STUDIES.checkAndThrow(STUDY_ID, studyId);
return exporter3Service.exportTimelineForStudy(session.getAppId(), studyId);
}

/** Subscribe to be notified when health data is exported to the study-specific Synapse project. */
@PostMapping(path = "/v5/studies/{studyId}/exporter3/notifications/export/subscribe")
@ResponseStatus(HttpStatus.CREATED)
Expand Down
Loading

0 comments on commit 4c7433d

Please sign in to comment.