Skip to content

Commit

Permalink
Merge branch 'master' into pr-osparc-toolin-update
Browse files Browse the repository at this point in the history
  • Loading branch information
GitHK authored Mar 4, 2025
2 parents 8664f51 + 27f40cc commit d715acd
Show file tree
Hide file tree
Showing 8 changed files with 82 additions and 33 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -191,8 +191,8 @@ qx.Class.define("osparc.dashboard.TemplateBrowser", {
const workbench = newStudyData["workbench"];
const nodesIdsListed = [];
Object.keys(workbench).forEach(nodeId => {
const node = workbench[nodeId];
if (osparc.study.StudyPricingUnits.includeInList(node)) {
const nodeData = workbench[nodeId];
if (osparc.study.StudyPricingUnits.includeInList(nodeData)) {
nodesIdsListed.push(nodeId);
}
});
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -487,7 +487,7 @@ qx.Class.define("osparc.data.model.Study", {
// Do not listen to output related backend updates if the node is a frontend node.
// The frontend controls its output values, progress and states.
// If a File Picker is uploading a file, the backend could override the current state with some older state.
if (node && nodeData && !osparc.data.model.Node.isFrontend(node)) {
if (node && nodeData && !osparc.data.model.Node.isFrontend(node.getMetaData())) {
node.setOutputData(nodeData.outputs);
if ("progress" in nodeData) {
const progress = Number.parseInt(nodeData["progress"]);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -414,7 +414,18 @@ qx.Class.define("osparc.file.FilePicker", {
if (files.length === 1) {
const fileUploader = new osparc.file.FileUploader(this.getNode());
fileUploader.addListener("uploadAborted", () => this.__resetOutput());
fileUploader.addListener("fileUploaded", () => {
fileUploader.addListener("fileUploaded", e => {
const fileMetadata = e.getData();
if (
"location" in fileMetadata &&
"dataset" in fileMetadata &&
"path" in fileMetadata &&
"name" in fileMetadata
) {
osparc.file.FilePicker.setOutputValueFromStore(this.getNode(), fileMetadata["location"], fileMetadata["dataset"], fileMetadata["path"], fileMetadata["name"]);
} else {
console.error("metadata info missing", fileMetadata);
}
this.fireEvent("fileUploaded");
this.getNode().fireEvent("fileUploaded");
}, this);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ qx.Class.define("osparc.file.FileUploader", {

events: {
"uploadAborted": "qx.event.type.Event",
"fileUploaded": "qx.event.type.Event"
"fileUploaded": "qx.event.type.Data",
},

statics: {
Expand All @@ -60,6 +60,7 @@ qx.Class.define("osparc.file.FileUploader", {
members: {
__presignedLinkData: null,
__uploadedParts: null,
__fileMetadata: null,

// Request to the server an upload URL.
retrieveUrlAndUpload: function(file) {
Expand All @@ -80,6 +81,14 @@ qx.Class.define("osparc.file.FileUploader", {
.then(presignedLinkData => {
if (presignedLinkData.resp.urls) {
this.__presignedLinkData = presignedLinkData;

this.__fileMetadata = {
location: presignedLinkData.locationId,
dataset: studyId,
path: presignedLinkData.fileUuid,
name: file.name
};

try {
this.__uploadFile(file);
} catch (error) {
Expand Down Expand Up @@ -124,7 +133,7 @@ qx.Class.define("osparc.file.FileUploader", {
const nProgress = Math.min(Math.max(100*progress-min, min), max);
this.getNode().getStatus().setProgress(nProgress);
if (this.__uploadedParts.every(uploadedPart => uploadedPart["e_tag"] !== null)) {
this.__checkCompleteUpload(file);
this.__checkCompleteUpload();
}
}
} catch (err) {
Expand Down Expand Up @@ -153,7 +162,7 @@ qx.Class.define("osparc.file.FileUploader", {
},

// Use XMLHttpRequest to complete the upload to S3
__checkCompleteUpload: function(file) {
__checkCompleteUpload: function() {
if (this.getNode()["fileUploadAbortRequested"]) {
this.__abortUpload();
return;
Expand All @@ -162,29 +171,21 @@ qx.Class.define("osparc.file.FileUploader", {
const presignedLinkData = this.__presignedLinkData;
this.getNode().getStatus().setProgress(this.self().PROGRESS_VALUES.COMPLETING);
const completeUrl = presignedLinkData.resp.links.complete_upload;
const location = presignedLinkData.locationId;
const path = presignedLinkData.fileUuid;
const xhr = new XMLHttpRequest();
xhr.onloadend = () => {
const fileMetadata = {
location,
dataset: this.getNode().getStudy().getUuid(),
path,
name: file.name
};
const resp = JSON.parse(xhr.responseText);
if ("error" in resp && resp["error"]) {
console.error(resp["error"]);
this.__abortUpload();
} else if ("data" in resp) {
if (xhr.status == 202) {
console.log("waiting for completion", file.name);
console.log("waiting for completion", this.__fileMetadata.name);
// @odeimaiz: we need to poll the received new location in the response
// we do have links.state -> poll that link until it says ok
// right now this kind of work if files are small and this happens fast
this.__pollFileUploadState(resp["data"]["links"]["state"], fileMetadata);
this.__pollFileUploadState(resp["data"]["links"]["state"]);
} else if (xhr.status == 200) {
this.__completeUpload(fileMetadata);
this.__completeUpload();
}
}
};
Expand All @@ -196,30 +197,27 @@ qx.Class.define("osparc.file.FileUploader", {
xhr.send(JSON.stringify(body));
},

__pollFileUploadState: function(stateLink, fileMetadata) {
__pollFileUploadState: function(stateLink) {
const xhr = new XMLHttpRequest();
xhr.open("POST", stateLink, true);
xhr.setRequestHeader("Content-Type", "application/json");
xhr.onloadend = () => {
const resp = JSON.parse(xhr.responseText);
if ("data" in resp && resp["data"] && resp["data"]["state"] === "ok") {
this.__completeUpload(fileMetadata);
this.__completeUpload();
} else {
const interval = 2000;
qx.event.Timer.once(() => this.__pollFileUploadState(stateLink, fileMetadata), this, interval);
qx.event.Timer.once(() => this.__pollFileUploadState(stateLink), this, interval);
}
};
xhr.send();
},

__completeUpload: function(fileMetadata) {
__completeUpload: function() {
this.getNode()["fileUploadAbortRequested"] = false;

if ("location" in fileMetadata && "dataset" in fileMetadata && "path" in fileMetadata && "name" in fileMetadata) {
osparc.file.FilePicker.setOutputValueFromStore(this.getNode(), fileMetadata["location"], fileMetadata["dataset"], fileMetadata["path"], fileMetadata["name"]);
}
this.__presignedLinkData = null;
this.fireEvent("fileUploaded");
this.fireDataEvent("fileUploaded", this.__fileMetadata);
},

__abortUpload: function() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,8 @@ qx.Class.define("osparc.study.StudyPricingUnits", {
},

statics: {
includeInList: function(node) {
return !osparc.data.model.Node.isFrontend(node);
includeInList: function(nodeData) {
return !osparc.data.model.Node.isFrontend(nodeData);
},
},

Expand All @@ -61,9 +61,9 @@ qx.Class.define("osparc.study.StudyPricingUnits", {
if ("workbench" in this.__studyData) {
const workbench = this.__studyData["workbench"];
Object.keys(workbench).forEach(nodeId => {
const node = workbench[nodeId];
if (this.self().includeInList(node)) {
const nodePricingUnits = new osparc.study.NodePricingUnits(this.__studyData["uuid"], nodeId, node);
const nodeData = workbench[nodeId];
if (this.self().includeInList(nodeData)) {
const nodePricingUnits = new osparc.study.NodePricingUnits(this.__studyData["uuid"], nodeId, nodeData);
this.__nodePricingUnits.push(nodePricingUnits);
this._add(nodePricingUnits);
promises.push(nodePricingUnits.showPricingUnits());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -359,7 +359,7 @@ qx.Class.define("osparc.study.Utils", {
},

getNonFrontendNodes: function(studyData) {
return Object.values(studyData["workbench"]).filter(node => !osparc.data.model.Node.isFrontend(node));
return Object.values(studyData["workbench"]).filter(nodeData => !osparc.data.model.Node.isFrontend(nodeData));
},

guessIcon: function(studyData) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,7 @@ qx.Class.define("osparc.ui.basic.NodeStatusUI", {
this.getNode().getStatus().addListener("changeProgress", e => {
const progress = e.getData();
if (progress > 0 && progress < 100) {
this.getChildControl("label").setValue(this.tr("Uploading"));
this.getChildControl("label").setValue(this.tr("Uploading..."));
}
});
}
Expand Down
40 changes: 40 additions & 0 deletions services/web/server/tests/unit/with_dbs/01/test_api_keys.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,19 +10,26 @@
from http.client import HTTPException

import pytest
import tenacity
from aiohttp.test_utils import TestClient
from faker import Faker
from models_library.products import ProductName
from pytest_mock import MockerFixture
from pytest_simcore.helpers.assert_checks import assert_status
from pytest_simcore.helpers.monkeypatch_envs import setenvs_from_dict
from pytest_simcore.helpers.typing_env import EnvVarsDict
from pytest_simcore.helpers.webserver_login import NewUser, UserInfoDict
from servicelib.aiohttp import status
from servicelib.aiohttp.application_keys import APP_SETTINGS_KEY
from simcore_service_webserver.api_keys import _repository as repo
from simcore_service_webserver.api_keys._models import ApiKey
from simcore_service_webserver.api_keys._service import (
get_or_create_api_key,
prune_expired_api_keys,
)
from simcore_service_webserver.application_settings import GarbageCollectorSettings
from simcore_service_webserver.db.models import UserRole
from tenacity import retry_if_exception_type, stop_after_attempt, wait_fixed


@pytest.fixture
Expand Down Expand Up @@ -215,3 +222,36 @@ async def test_get_not_existing_api_key(

if not errors:
assert data is None


@pytest.fixture
async def app_environment(
app_environment: EnvVarsDict,
monkeypatch: pytest.MonkeyPatch,
) -> EnvVarsDict:
return app_environment | setenvs_from_dict(
monkeypatch,
{
"WEBSERVER_GARBAGE_COLLECTOR": '{"GARBAGE_COLLECTOR_INTERVAL_S": 30, "GARBAGE_COLLECTOR_PRUNE_APIKEYS_INTERVAL_S": 1}'
},
)


async def test_prune_expired_api_keys_task_is_triggered(
app_environment: EnvVarsDict, mocker: MockerFixture, client: TestClient
):
mock = mocker.patch(
"simcore_service_webserver.api_keys._service._repository.prune_expired"
)
settings = client.server.app[ # type: ignore
APP_SETTINGS_KEY
].WEBSERVER_GARBAGE_COLLECTOR
assert isinstance(settings, GarbageCollectorSettings)
async for attempt in tenacity.AsyncRetrying(
stop=stop_after_attempt(5),
wait=wait_fixed(1),
retry=retry_if_exception_type(AssertionError),
reraise=True,
):
with attempt:
mock.assert_called()

0 comments on commit d715acd

Please sign in to comment.