diff --git a/docker/main/dataservice/entrypoint.sh b/docker/main/dataservice/entrypoint.sh
index e42ae462d..433ecc801 100644
--- a/docker/main/dataservice/entrypoint.sh
+++ b/docker/main/dataservice/entrypoint.sh
@@ -62,7 +62,7 @@ if [ -d ${UPDATED_PACKAGES_DIR:=/updated_packages} ]; then
for srv in $(pip -qq freeze | grep dmod | awk -F= '{print $1}' | awk -F- '{print $2}'); do
if [ $(ls ${UPDATED_PACKAGES_DIR} | grep dmod.${srv}- | wc -l) -eq 1 ]; then
pip uninstall -y --no-input $(pip -qq freeze | grep dmod.${srv} | awk -F= '{print $1}')
- pip install $(ls ${UPDATED_PACKAGES_DIR}/*.whl | grep dmod.${srv}-)
+ pip install --no-deps $(ls ${UPDATED_PACKAGES_DIR}/*.whl | grep dmod.${srv}-)
fi
done
#pip install ${UPDATED_PACKAGES_DIR}/*.whl
diff --git a/docker/nwm_gui/docker-compose.yml b/docker/nwm_gui/docker-compose.yml
index 509441649..7eb8ef4fb 100644
--- a/docker/nwm_gui/docker-compose.yml
+++ b/docker/nwm_gui/docker-compose.yml
@@ -34,6 +34,7 @@ services:
args:
docker_internal_registry: ${DOCKER_INTERNAL_REGISTRY:?Missing DOCKER_INTERNAL_REGISTRY value (see 'Private Docker Registry ' section in example.env)}
comms_package_name: ${PYTHON_PACKAGE_DIST_NAME_COMMS:?}
+ client_package_name: ${PYTHON_PACKAGE_DIST_NAME_CLIENT:?}
networks:
- request-listener-net
# Call this when starting the container
@@ -62,6 +63,8 @@ services:
volumes:
- ${DMOD_APP_STATIC:?}:/usr/maas_portal/static
- ${DMOD_SSL_DIR}/requestservice:/usr/maas_portal/ssl
+ # Needed only for speeding debugging
+ #- ${DOCKER_GUI_HOST_SRC:?GUI sources path not configured in environment}/MaaS:/usr/maas_portal/MaaS
#- ${DOCKER_GUI_HOST_VENV_DIR:-/tmp/blah}:${DOCKER_GUI_CONTAINER_VENV_DIR:-/tmp/blah}
# Expose Django's port to the internal network so that the web server may access it
expose:
diff --git a/example.env b/example.env
index dd98d004c..ed8baf63f 100644
--- a/example.env
+++ b/example.env
@@ -101,6 +101,11 @@ NGEN_BRANCH=master
## Python Packages Settings ##
########################################################################
+## The "name" of the built client Python distribution package, for purposes of installing (e.g., via pip)
+PYTHON_PACKAGE_DIST_NAME_CLIENT=dmod-client
+## The name of the actual Python communication package (i.e., for importing or specifying as a module on the command line)
+PYTHON_PACKAGE_NAME_CLIENT=dmod.client
+
## The "name" of the built communication Python distribution package, for purposes of installing (e.g., via pip)
PYTHON_PACKAGE_DIST_NAME_COMMS=dmod-communication
## The name of the actual Python communication package (i.e., for importing or specifying as a module on the command line)
diff --git a/python/gui/MaaS/cbv/AbstractDatasetView.py b/python/gui/MaaS/cbv/AbstractDatasetView.py
new file mode 100644
index 000000000..e1ae493c3
--- /dev/null
+++ b/python/gui/MaaS/cbv/AbstractDatasetView.py
@@ -0,0 +1,29 @@
+from abc import ABC
+from django.views.generic.base import View
+from dmod.client.request_clients import DatasetExternalClient
+import logging
+logger = logging.getLogger("gui_log")
+from .DMODProxy import DMODMixin, GUI_STATIC_SSL_DIR
+from typing import Dict
+
+
+class AbstractDatasetView(View, DMODMixin, ABC):
+
+ def __init__(self, *args, **kwargs):
+ super(AbstractDatasetView, self).__init__(*args, **kwargs)
+ self._dataset_client = None
+
+ async def get_dataset(self, dataset_name: str) -> Dict[str, dict]:
+ serial_dataset = await self.dataset_client.get_serialized_datasets(dataset_name=dataset_name)
+ return serial_dataset
+
+ async def get_datasets(self) -> Dict[str, dict]:
+ serial_datasets = await self.dataset_client.get_serialized_datasets()
+ return serial_datasets
+
+ @property
+ def dataset_client(self) -> DatasetExternalClient:
+ if self._dataset_client is None:
+ self._dataset_client = DatasetExternalClient(endpoint_uri=self.maas_endpoint_uri,
+ ssl_directory=GUI_STATIC_SSL_DIR)
+ return self._dataset_client
diff --git a/python/gui/MaaS/cbv/DMODProxy.py b/python/gui/MaaS/cbv/DMODProxy.py
index fc0fcb7a1..e9e308664 100644
--- a/python/gui/MaaS/cbv/DMODProxy.py
+++ b/python/gui/MaaS/cbv/DMODProxy.py
@@ -16,6 +16,8 @@
from pathlib import Path
from typing import List, Optional, Tuple, Type
+GUI_STATIC_SSL_DIR = Path('/usr/maas_portal/ssl')
+
class RequestFormProcessor(ABC):
@@ -209,7 +211,7 @@ class PostFormRequestClient(ModelExecRequestClient):
def _bootstrap_ssl_dir(cls, ssl_dir: Optional[Path] = None):
if ssl_dir is None:
ssl_dir = Path(__file__).resolve().parent.parent.parent.joinpath('ssl')
- ssl_dir = Path('/usr/maas_portal/ssl') #Fixme
+ ssl_dir = GUI_STATIC_SSL_DIR #Fixme
return ssl_dir
def __init__(self, endpoint_uri: str, http_request: HttpRequest, ssl_dir: Optional[Path] = None):
@@ -315,6 +317,7 @@ def forward_request(self, request: HttpRequest, event_type: MessageEventType) ->
client = PostFormRequestClient(endpoint_uri=self.maas_endpoint_uri, http_request=request)
if event_type == MessageEventType.MODEL_EXEC_REQUEST:
form_processor_type = ModelExecRequestFormProcessor
+ # TODO: need a new type of form processor here (or 3 more, for management, uploading, and downloading)
else:
raise RuntimeError("{} got unsupported event type: {}".format(self.__class__.__name__, str(event_type)))
diff --git a/python/gui/MaaS/cbv/DatasetApiView.py b/python/gui/MaaS/cbv/DatasetApiView.py
new file mode 100644
index 000000000..6da6eff65
--- /dev/null
+++ b/python/gui/MaaS/cbv/DatasetApiView.py
@@ -0,0 +1,62 @@
+import asyncio
+from django.http import JsonResponse
+from wsgiref.util import FileWrapper
+from django.http.response import StreamingHttpResponse
+from .AbstractDatasetView import AbstractDatasetView
+from .DatasetFileWebsocketFilelike import DatasetFileWebsocketFilelike
+import logging
+logger = logging.getLogger("gui_log")
+
+
+class DatasetApiView(AbstractDatasetView):
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ def _get_dataset_content_details(self, dataset_name: str):
+ result = asyncio.get_event_loop().run_until_complete(self.dataset_client.get_dataset_content_details(name=dataset_name))
+ logger.info(result)
+ return JsonResponse({"contents": result}, status=200)
+
+ def _delete_dataset(self, dataset_name: str) -> JsonResponse:
+ result = asyncio.get_event_loop().run_until_complete(self.dataset_client.delete_dataset(name=dataset_name))
+ return JsonResponse({"successful": result}, status=200)
+
+ def _get_datasets_json(self) -> JsonResponse:
+ serial_dataset_map = asyncio.get_event_loop().run_until_complete(self.get_datasets())
+ return JsonResponse({"datasets": serial_dataset_map}, status=200)
+
+ def _get_dataset_json(self, dataset_name: str) -> JsonResponse:
+ serial_dataset = asyncio.get_event_loop().run_until_complete(self.get_dataset(dataset_name=dataset_name))
+ return JsonResponse({"dataset": serial_dataset[dataset_name]}, status=200)
+
+ def _get_download(self, request, *args, **kwargs):
+ dataset_name = request.GET.get("dataset_name", None)
+ item_name = request.GET.get("item_name", None)
+ chunk_size = 8192
+
+ custom_filelike = DatasetFileWebsocketFilelike(self.dataset_client, dataset_name, item_name)
+
+ response = StreamingHttpResponse(
+ FileWrapper(custom_filelike, chunk_size),
+ content_type="application/octet-stream"
+ )
+ response['Content-Length'] = asyncio.get_event_loop().run_until_complete(self.dataset_client.get_item_size(dataset_name, item_name))
+ response['Content-Disposition'] = "attachment; filename=%s" % item_name
+ return response
+
+ def get(self, request, *args, **kwargs):
+ request_type = request.GET.get("request_type", None)
+ if request_type == 'download_file':
+ return self._get_download(request)
+ elif request_type == 'datasets':
+ return self._get_datasets_json()
+ elif request_type == 'dataset':
+ return self._get_dataset_json(dataset_name=request.GET.get("name", None))
+ elif request_type == 'contents':
+ return self._get_dataset_content_details(dataset_name=request.GET.get("name", None))
+ if request_type == 'delete':
+ return self._delete_dataset(dataset_name=request.GET.get("name", None))
+
+ # TODO: finish
+ return JsonResponse({}, status=400)
diff --git a/python/gui/MaaS/cbv/DatasetFileWebsocketFilelike.py b/python/gui/MaaS/cbv/DatasetFileWebsocketFilelike.py
new file mode 100644
index 000000000..9e69409ad
--- /dev/null
+++ b/python/gui/MaaS/cbv/DatasetFileWebsocketFilelike.py
@@ -0,0 +1,20 @@
+import asyncio
+from typing import AnyStr
+from dmod.client.request_clients import DatasetExternalClient
+
+
+class DatasetFileWebsocketFilelike:
+
+ def __init__(self, client: DatasetExternalClient, dataset_name: str, file_name: str):
+ self._client = client
+ self._dataset_name = dataset_name
+ self._file_name = file_name
+ self._read_index: int = 0
+
+ def read(self, blksize: int) -> AnyStr:
+
+ result = asyncio.get_event_loop().run_until_complete(
+ self._client.download_item_block(dataset_name=self._dataset_name, item_name=self._file_name,
+ blk_start=self._read_index, blk_size=blksize))
+ self._read_index += blksize
+ return result
diff --git a/python/gui/MaaS/cbv/DatasetManagementForms.py b/python/gui/MaaS/cbv/DatasetManagementForms.py
new file mode 100644
index 000000000..f1441afca
--- /dev/null
+++ b/python/gui/MaaS/cbv/DatasetManagementForms.py
@@ -0,0 +1,240 @@
+from django import forms
+from enum import Enum
+from functools import partial
+
+from dmod.core.meta_data import DataCategory, DataFormat
+
+from .js_utils import start_end_time_validation
+
+# typing imports
+from typing import Optional
+
+# form field type alias
+# correspond to `dmod.core.meta_data.StandardDatasetIndex``
+def _time(start_time_id: str, end_time_id: str):
+ return partial(
+ forms.DateTimeField,
+ widget=forms.DateTimeInput(
+ attrs={
+ "type": "datetime-local",
+ "onchange": start_end_time_validation(start_time_id, end_time_id),
+ }
+ ),
+ # TODO: this should be removed once we upgrade django versions >= 3.1 (tracked by #209)
+ input_formats=["%Y-%m-%dT%H:%M"],
+ )
+
+
+_Unknown = forms.CharField
+_CatchmentId = forms.CharField
+_DataId = forms.CharField
+_HydrofabricId = forms.CharField
+_Length = forms.IntegerField
+_GlobalChecksum = forms.CharField
+_ElementId = forms.CharField
+_File = partial(
+ forms.FileField,
+ widget=forms.ClearableFileInput(
+ attrs={
+ # filename cannot contain underscore (_)
+ "oninput": """((el) => {
+ const files = el.files;
+
+ for (let {name} of files){
+ // filenames cannot include _'s.
+ if (name.includes('_')){
+
+ // see constraint validation API for more detail (https://developer.mozilla.org/en-US/docs/Web/API/Constraint_validation)
+ el.setCustomValidity('Filename cannot contain underscores \"_\"');
+ return;
+ }
+
+ // valid input
+ el.setCustomValidity('');
+ }
+
+ })(this)"""
+ }
+ ),
+)
+
+
+class FormNameMixIn:
+ def form_name(self) -> str:
+ """returns class name of form"""
+ return type(self).__name__
+
+
+class DynamicFormMixIn:
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+ for visible in self.visible_fields():
+ # input field have id's of form: `id_{{field instance var name}}_{{form name}}
+ visible.field.widget.attrs["id"] = f"{visible.auto_id}_{self.form_name()}"
+ visible.field.widget.attrs["class"] = self.form_name()
+ visible.field.widget.attrs["style"] = "display: none;"
+ visible.field.widget.attrs["disabled"] = "true"
+
+
+class DatasetForm(FormNameMixIn, forms.Form):
+ name = forms.CharField(max_length=100, label="Dataset Name")
+ category = forms.ChoiceField(
+ choices=[(f.name, f.name.title()) for f in DataCategory],
+ label="Dataset Category",
+ )
+ data_format = forms.ChoiceField(
+ choices=[("---", "---")] + [(f.name, f.name) for f in DataFormat],
+ label="Data Format",
+ widget=forms.Select(
+ attrs={
+ # when selection changes, unhide and enable the form fields and labels for the
+ # corresponding DataFormat. form fields and labels have an html class name of their
+ # DataFormat. i.e.
+ "onchange": """((name) => {
+ // remove previously active fields, if any
+ const active_fields = document.querySelectorAll('.active_field')
+ active_fields.forEach(el => {
+
+ // disable field, hide it, and remove flag class, 'active_field'
+ el.setAttribute('disabled', true)
+ el.style.display = 'none'
+ el.classList.remove('active_field')
+ })
+
+ const els_with_class = document.querySelectorAll(`.${name}`)
+ els_with_class.forEach(el => {
+
+ // enable field, hide it, and remove flag class, 'active_field'
+ el.removeAttribute('disabled')
+ el.style.display = 'block'
+ el.classList.add('active_field')
+ })
+ })(this.value)"""
+ }
+ ),
+ )
+
+
+class AORC_CSV(DynamicFormMixIn, FormNameMixIn, forms.Form):
+ catchment_id = _CatchmentId()
+ start_time = _time("id_start_time_AORC_CSV", "id_end_time_AORC_CSV")(
+ label="Start Datetime"
+ )
+ end_time = _time("id_start_time_AORC_CSV", "id_end_time_AORC_CSV")(
+ label="End Datetime"
+ )
+ # TODO: note if end times are inclusive.
+ # TODO: note that all datetimes are naive UTC time.
+ # help_text="",
+ # )
+ file = _File()
+
+
+class NETCDF_FORCING_CANONICAL(DynamicFormMixIn, FormNameMixIn, forms.Form):
+ catchment_id = _CatchmentId()
+ start_time = _time(
+ "id_start_time_NETCDF_FORCING_CANONICAL", "id_end_time_NETCDF_FORCING_CANONICAL"
+ )(label="Start Datetime")
+ end_time = _time(
+ "id_start_time_NETCDF_FORCING_CANONICAL", "id_end_time_NETCDF_FORCING_CANONICAL"
+ )(label="End Datetime")
+ file = _File()
+
+
+class NETCDF_AORC_DEFAULT(DynamicFormMixIn, FormNameMixIn, forms.Form):
+ catchment_id = _CatchmentId()
+ start_time = _time(
+ "id_start_time_NETCDF_AORC_DEFAULT", "id_end_time_NETCDF_AORC_DEFAULT"
+ )(label="Start Datetime")
+ end_time = _time(
+ "id_start_time_NETCDF_AORC_DEFAULT", "id_end_time_NETCDF_AORC_DEFAULT"
+ )(label="End Datetime")
+ file = _File()
+
+
+class NGEN_OUTPUT(DynamicFormMixIn, FormNameMixIn, forms.Form):
+ catchment_id = _CatchmentId()
+ start_time = _time("id_start_time_NGEN_OUTPUT", "id_end_time_NGEN_OUTPUT")(
+ label="Start Datetime"
+ )
+ end_time = _time("id_start_time_NGEN_OUTPUT", "id_end_time_NGEN_OUTPUT")(
+ label="End Datetime"
+ )
+ data_id = _DataId()
+ file = _File()
+
+
+class NGEN_REALIZATION_CONFIG(DynamicFormMixIn, FormNameMixIn, forms.Form):
+ catchment_id = _CatchmentId()
+ start_time = _time(
+ "id_start_time_NGEN_REALIZATION_CONFIG", "id_end_time_NGEN_REALIZATION_CONFIG"
+ )(label="Start Datetime")
+ end_time = _time(
+ "id_start_time_NGEN_REALIZATION_CONFIG", "id_end_time_NGEN_REALIZATION_CONFIG"
+ )(label="End Datetime")
+ data_id = _DataId()
+ file = _File()
+
+
+class NGEN_GEOJSON_HYDROFABRIC(DynamicFormMixIn, FormNameMixIn, forms.Form):
+ catchment_id = _CatchmentId()
+ hydrofabric_id = _HydrofabricId()
+ data_id = _DataId()
+ file = _File()
+
+
+class NGEN_PARTITION_CONFIG(DynamicFormMixIn, FormNameMixIn, forms.Form):
+ data_id = _DataId()
+ hydrofabric_id = _HydrofabricId
+ length = _Length()
+ file = _File()
+
+
+class BMI_CONFIG(DynamicFormMixIn, FormNameMixIn, forms.Form):
+ global_checksum = _GlobalChecksum()
+ data_id = _DataId()
+ file = _File()
+
+
+class NWM_OUTPUT(DynamicFormMixIn, FormNameMixIn, forms.Form):
+ catchment_id = _CatchmentId()
+ start_time = _time("id_start_time_NWM_OUTPUT", "id_end_time_NWM_OUTPUT")(
+ label="Start Datetime"
+ )
+ end_time = _time("id_start_time_NWM_OUTPUT", "id_end_time_NWM_OUTPUT")(
+ label="End Datetime"
+ )
+ data_id = _DataId()
+ file = _File()
+
+
+class NWM_CONFIG(DynamicFormMixIn, FormNameMixIn, forms.Form):
+ element_id = _ElementId()
+ start_time = _time("id_start_time_NWM_CONFIG", "id_end_time_NWM_CONFIG")(
+ label="Start Datetime"
+ )
+ end_time = _time("id_start_time_NWM_CONFIG", "id_end_time_NWM_CONFIG")(
+ label="End Datetime"
+ )
+ data_id = _DataId()
+ file = _File()
+
+
+class DatasetFormatForm(Enum):
+ AORC_CSV = AORC_CSV
+ NETCDF_FORCING_CANONICAL = NETCDF_FORCING_CANONICAL
+ NETCDF_AORC_DEFAULT = NETCDF_AORC_DEFAULT
+ NGEN_OUTPUT = NGEN_OUTPUT
+ NGEN_REALIZATION_CONFIG = NGEN_REALIZATION_CONFIG
+ NGEN_GEOJSON_HYDROFABRIC = NGEN_GEOJSON_HYDROFABRIC
+ NGEN_PARTITION_CONFIG = NGEN_PARTITION_CONFIG
+ BMI_CONFIG = BMI_CONFIG
+ NWM_OUTPUT = NWM_OUTPUT
+ NWM_CONFIG = NWM_CONFIG
+
+ @staticmethod
+ def get_form_from_name(name: str) -> Optional[forms.Form]:
+ try:
+ return DatasetFormatForm[name].value
+ except KeyError:
+ return None
diff --git a/python/gui/MaaS/cbv/DatasetManagementView.py b/python/gui/MaaS/cbv/DatasetManagementView.py
new file mode 100644
index 000000000..b2275897c
--- /dev/null
+++ b/python/gui/MaaS/cbv/DatasetManagementView.py
@@ -0,0 +1,119 @@
+"""
+Defines a view that may be used to configure a MaaS request
+"""
+import asyncio
+from django.http import HttpRequest, HttpResponse
+from django.shortcuts import render
+
+import dmod.communication as communication
+from dmod.core.meta_data import DataCategory, DataFormat
+
+import logging
+logger = logging.getLogger("gui_log")
+
+from .utils import extract_log_data
+from .AbstractDatasetView import AbstractDatasetView
+from .DatasetManagementForms import DatasetForm, DatasetFormatForm
+
+
+class DatasetManagementView(AbstractDatasetView):
+
+ """
+ A view used to configure a dataset management request or requests for transmitting dataset data.
+ """
+
+ def __init__(self, *args, **kwargs):
+ super().__init__(*args, **kwargs)
+
+ def _process_event_type(self, http_request: HttpRequest) -> communication.MessageEventType:
+ """
+ Determine and return whether this request is for a ``DATASET_MANAGEMENT`` or ``DATA_TRANSMISSION`` event.
+
+ Parameters
+ ----------
+ http_request : HttpRequest
+ The raw HTTP request in question.
+
+ Returns
+ -------
+ communication.MessageEventType
+ Either ``communication.MessageEventType.DATASET_MANAGEMENT`` or
+ ``communication.MessageEventType.DATA_TRANSMISSION``.
+ """
+ # TODO:
+ raise NotImplementedError("{}._process_event_type not implemented".format(self.__class__.__name__))
+
+ def get(self, http_request: HttpRequest, *args, **kwargs) -> HttpResponse:
+ """
+ The handler for 'get' requests.
+
+ This will render the 'maas/dataset_management.html' template after retrieving necessary information to initially
+ populate the forms it displays.
+
+ Parameters
+ ----------
+ http_request : HttpRequest
+ The request asking to render this page.
+ args
+ kwargs
+
+ Returns
+ -------
+ A rendered page.
+ """
+ errors, warnings, info = extract_log_data(kwargs)
+
+ # Gather map of serialized datasets, keyed by dataset name
+ serial_dataset_map = asyncio.get_event_loop().run_until_complete(self.get_datasets())
+ serial_dataset_list = [serial_dataset_map[d] for d in serial_dataset_map]
+
+ dataset_categories = [c.name.title() for c in DataCategory]
+ dataset_formats = [f.name for f in DataFormat]
+
+ form = DatasetForm()
+
+ payload = {
+ 'form': form,
+ 'dynamic_forms': [f.value() for f in DatasetFormatForm],
+ 'datasets': serial_dataset_list,
+ 'dataset_categories': dataset_categories,
+ 'dataset_formats': dataset_formats,
+ 'errors': errors,
+ 'info': info,
+ 'warnings': warnings
+ }
+
+ return render(http_request, 'maas/dataset_management.html', payload)
+
+ def post(self, http_request: HttpRequest, *args, **kwargs) -> HttpResponse:
+ """
+ The handler for 'post' requests.
+
+ This will attempt to submit the request and rerender the page like a 'get' request.
+
+ Parameters
+ ----------
+ http_request : HttpRequest
+ The request asking to render this page.
+ args
+ kwargs
+
+ Returns
+ -------
+ A rendered page.
+ """
+ # TODO: implement this to figure out whether DATASET_MANAGEMENT or DATA_TRANSMISSION
+ event_type = self._process_event_type(http_request)
+ client, session_data, dmod_response = self.forward_request(http_request, event_type)
+
+ # TODO: this probably isn't exactly correct, so review once closer to completion
+ if dmod_response is not None and 'dataset_id' in dmod_response.data:
+ session_data['new_dataset_id'] = dmod_response.data['dataset_id']
+
+ http_response = self.get(http_request=http_request, errors=client.errors, warnings=client.warnings,
+ info=client.info, *args, **kwargs)
+
+ for k, v in session_data.items():
+ http_response.set_cookie(k, v)
+
+ return http_response
diff --git a/python/gui/MaaS/cbv/js_utils.py b/python/gui/MaaS/cbv/js_utils.py
new file mode 100644
index 000000000..27ece4f7b
--- /dev/null
+++ b/python/gui/MaaS/cbv/js_utils.py
@@ -0,0 +1,36 @@
+def start_end_time_validation(start_time_id: str, end_time_id: str) -> str:
+ """Applies validity testing to start and end time input DOM elements of type`datetime-local`. If
+ start time is after end time or end time is prior to start, an input validity message is tagged
+ on the `start_time_id` element.
+ """
+ return f"""((start_time_id, end_time_id) => {{
+
+ let start_time_el = document.getElementById(start_time_id);
+ let end_time_el = document.getElementById(end_time_id);
+
+ if (start_time_el == null){{
+ console.error(`invalid start_time_id: ${{start_time_id}}`)
+ return;
+ }}
+
+ if (end_time_el == null){{
+ console.error(`invalid end_time_id: ${{end_time_id}}`)
+ return;
+ }}
+
+ if (start_time_el.value === '' || end_time_el.value === ''){{
+ // missing time value
+ return;
+ }}
+
+ const start_time = new Date(start_time_el.value);
+ const end_time = new Date(end_time_el.value);
+
+ if (start_time.getTime() > end_time.getTime()){{
+ start_time_el.setCustomValidity('Start time after end time');
+ return;
+ }}
+
+ // reset
+ start_time_el.setCustomValidity('');
+ }})('{start_time_id}', '{end_time_id}')"""
diff --git a/python/gui/MaaS/static/maas/js/components/confirmDialog.js b/python/gui/MaaS/static/maas/js/components/confirmDialog.js
new file mode 100644
index 000000000..a09710327
--- /dev/null
+++ b/python/gui/MaaS/static/maas/js/components/confirmDialog.js
@@ -0,0 +1,108 @@
+class ConfirmDialog {
+ constructor(parentDivName, id, styleClass, onConfirmFunc) {
+ this.parentDivName = parentDivName;
+ this.id = id;
+ this.styleClass = styleClass;
+ this.onConfirmFunc = onConfirmFunc;
+
+ this.outer_div = null;
+ this.content_div = null;
+ }
+
+ get parentDiv() {
+ return document.getElementById(this.parentDivName);
+ }
+}
+
+class ConfirmDeleteDatasetDialog extends ConfirmDialog {
+ constructor(dataset_name, parentDivName, id, styleClass, onConfirmFunc) {
+ super(parentDivName, id, styleClass, onConfirmFunc);
+ this.dataset_name = dataset_name;
+ this.buttons_div = null;
+ }
+
+ _style_outer_div() {
+ this.outer_div.style.position = 'fixed';
+ this.outer_div.style.zIndex = '1';
+ this.outer_div.style.left = '35%';
+ this.outer_div.style.top = '5%';
+ this.outer_div.style.width = '25%';
+ this.outer_div.style.height = '25%';
+ this.outer_div.style.overflow = 'clip';
+ this.outer_div.style.backgroundColor = '#B7B5B5FF';
+ this.outer_div.style.border = '1px solid #888';
+ this.outer_div.style.padding = '15px';
+ //this.outer_div.style.paddingTop = '0px';
+ this.outer_div.style.margin = '15% auto';
+ }
+
+ _init_outer_div() {
+ if (this.outer_div == null) {
+ this.outer_div = document.createElement('div');
+ this.outer_div.id = this.id;
+ this.outer_div.class = this.styleClass;
+ this._style_outer_div();
+ this.parentDiv.appendChild(this.outer_div);
+ }
+ }
+
+ _init_content() {
+ if (this.content_div == null) {
+ this.content_div = document.createElement('div');
+ this.content_div.style.height = '70%';
+ //this.content_div.style.overflow = 'fixed';
+ this.content_div.style.padding = '10px';
+ this.content_div.appendChild(document.createTextNode("This will permanently delete dataset: "));
+ this.content_div.appendChild(document.createElement('br'));
+ this.content_div.appendChild(document.createElement('br'));
+ this.content_div.appendChild(document.createTextNode(this.dataset_name));
+ this.content_div.appendChild(document.createElement('br'));
+ this.content_div.appendChild(document.createElement('br'));
+ this.content_div.appendChild(document.createTextNode("Proceed?"));
+
+ if (this.outer_div == null) {
+ this._init_outer_div();
+ }
+ this.outer_div.appendChild(this.content_div);
+ }
+ }
+
+ _init_buttons() {
+ if (this.buttons_div == null) {
+ this.buttons_div = document.createElement('div');
+ this.outer_div.appendChild(this.buttons_div);
+ this.buttons_div.style.padding = '10px';
+
+ let cancel_button = document.createElement('button');
+ cancel_button.onclick = () => {
+ this.remove();
+ };
+ cancel_button.textContent = "Cancel";
+ cancel_button.style.marginRight = '10px';
+ this.buttons_div.appendChild(cancel_button);
+
+ let confirm_button = document.createElement('button');
+ confirm_button.onclick = this.onConfirmFunc;
+ confirm_button.textContent = "Confirm";
+ this.buttons_div.appendChild(confirm_button);
+ }
+ }
+
+ append() {
+ this._init_outer_div();
+ this._init_content();
+ this._init_buttons();
+ }
+
+ remove() {
+ this.buttons_div.remove();
+ this.buttons_div = null;
+
+ this.content_div.remove();
+ this.content_div = null;
+
+ this.outer_div.remove();
+ this.outer_div = null;
+ }
+
+}
\ No newline at end of file
diff --git a/python/gui/MaaS/static/maas/js/components/createDatasetForm.js b/python/gui/MaaS/static/maas/js/components/createDatasetForm.js
new file mode 100644
index 000000000..ce9fffb7d
--- /dev/null
+++ b/python/gui/MaaS/static/maas/js/components/createDatasetForm.js
@@ -0,0 +1,43 @@
+class CreateDatasetForm {
+ constructor(parentDivId) {
+ this.parentDivId = parentDivId;
+ this.formElementId = this.parentDivId + "-form";
+ this.formContentDivId = this.formElementId + "-div-universal-inputs";
+ this.dynamicVarsDivId = this.formElementId + "-div-dynamic-inputs";
+
+ }
+
+ updateFormatChange(selection) {
+ let dy_div = document.getElementById(this.dynamicVarsDivId);
+ while (dy_div.firstChild){
+ dy_div.removeChild(dy_div)
+ }
+
+ let addUploadSelection = false;
+ if (selection == "NETCDF_FORCING_CANONICAL") {
+ addUploadSelection = true;
+ }
+
+ if (addUploadSelection) {
+ let upload_select_label = document.createElement('label');
+ let selectId = this.parentDivId + '-inputs-upload';
+ upload_select_label.appendChild(document.createTextNode('Data Files:'));
+ upload_select_label.htmlFor = selectId
+ dy_div.appendChild(upload_select_label);
+
+ let upload_select = document.createElement('input');
+ upload_select.type = 'file';
+ upload_select.name = 'create-dataset-upload';
+ upload_select.id = selectId
+ upload_select.style.float = 'right';
+ upload_select.style.textAlign = 'right';
+ dy_div.appendChild(upload_select);
+ }
+ }
+
+ dynamicInputUpdate(formInput, selection) {
+ if (formInput.id == this.parentDivId + '-form-input-format') {
+ this.updateFormatChange(selection);
+ }
+ }
+}
\ No newline at end of file
diff --git a/python/gui/MaaS/static/maas/js/components/datasetOverview.js b/python/gui/MaaS/static/maas/js/components/datasetOverview.js
new file mode 100644
index 000000000..e6b404cba
--- /dev/null
+++ b/python/gui/MaaS/static/maas/js/components/datasetOverview.js
@@ -0,0 +1,166 @@
+class DatasetOverviewTableRow {
+ constructor(parentTableId, serializedDataset, detailsOnClickFunc, filesOnClickFunc, downloadOnClickFunc,
+ uploadOnClickFunc, deleteOnClickFunc) {
+ this.parentTableId = parentTableId;
+ this.serializedDataset = serializedDataset;
+
+ this.rowClassName = "mgr-tbl-content";
+
+ this.detailsOnClickFunc = detailsOnClickFunc;
+ this.filesOnClickFunc = filesOnClickFunc;
+ this.downloadOnClickFunc = downloadOnClickFunc;
+ this.uploadOnClickFunc = uploadOnClickFunc;
+ this.deleteOnClickFunc = deleteOnClickFunc;
+
+ this.row = document.getElementById(this.rowId);
+ }
+
+ get datasetName() {
+ return this.serializedDataset["name"];
+ }
+
+ get category() {
+ return this.serializedDataset["data_category"];
+ }
+
+ get rowId() {
+ return this.parentTableId + "-row-" + this.datasetName;
+ }
+
+ get parentTable() {
+ return document.getElementById(this.parentTableId);
+ }
+
+ _createLinks(is_anchor, text, onClickFunc) {
+ let cell = document.createElement('th');
+ let content;
+ if (is_anchor) {
+ content = document.createElement('a');
+ content.href = "javascript:void(0);";
+ }
+ else {
+ content = document.createElement('button');
+ }
+
+ const ds_name = this.datasetName;
+
+ let onclick;
+ switch (text) {
+ case 'Details':
+ onclick = this.detailsOnClickFunc;
+ break;
+ case 'Files':
+ onclick = this.filesOnClickFunc;
+ break;
+ case 'Download':
+ onclick = this.downloadOnClickFunc;
+ break;
+ case 'Upload Files':
+ onclick = this.uploadOnClickFunc;
+ break;
+ case 'Delete':
+ onclick = this.deleteOnClickFunc;
+ break;
+ }
+
+ content.onclick = function() { onclick(ds_name); };
+ content.appendChild(document.createTextNode(text));
+ cell.appendChild(content);
+ this.row.appendChild(cell);
+ }
+
+ build() {
+ if (this.row != null) {
+ this.row.remove();
+ }
+ this.row = document.createElement('tr');
+ this.row.id = this.rowId;
+ this.row.className = this.rowClassName;
+
+ let colCell = document.createElement('th');
+ colCell.appendChild(document.createTextNode(this.datasetName));
+ this.row.appendChild(colCell);
+
+ colCell = document.createElement('th');
+ colCell.appendChild(document.createTextNode(this.category));
+ this.row.appendChild(colCell);
+
+ this._createLinks(true, "Details", this.datasetName, this.detailsOnClickFunc);
+ this._createLinks(true, "Files", this.datasetName, this.filesOnClickFunc);
+ this._createLinks(true, "Download", this.datasetName, this.downloadOnClickFunc);
+ this._createLinks(true, "Upload Files", this.datasetName, this.uploadOnClickFunc);
+ this._createLinks(true, "Delete", this.datasetName, this.deleteOnClickFunc);
+ }
+}
+
+class DatasetOverviewTable {
+ constructor(parentDivId, tableClass, detailsOnClickFunc, filesOnClickFunc, downloadOnClickFunc,
+ uploadOnClickFunc, deleteOnClickFunc) {
+ this.parentDivId = parentDivId;
+ this.tableClass = tableClass;
+ this.tableId = this.parentDivId + "-overview-table";
+
+ this.detailsOnClickFunc = detailsOnClickFunc;
+ this.filesOnClickFunc = filesOnClickFunc;
+ this.downloadOnClickFunc = downloadOnClickFunc;
+ this.uploadOnClickFunc = uploadOnClickFunc;
+ this.deleteOnClickFunc = deleteOnClickFunc;
+
+ this.table = document.getElementById(this.tableId);
+ }
+
+ get parentDiv() {
+ return document.getElementById(this.parentDivId);
+ }
+
+ get tableHeader() {
+ let thead = document.createElement('thead');
+ let header = document.createElement('tr');
+ thead.appendChild(header);
+
+ let colCell = document.createElement('th');
+ colCell.className = "mgr-tbl-dataset-header";
+ colCell.appendChild(document.createTextNode('Dataset Name'));
+ header.appendChild(colCell);
+
+ colCell = document.createElement('th');
+ colCell.className = "mgr-tbl-category-header";
+ colCell.appendChild(document.createTextNode('Category'));
+ header.appendChild(colCell);
+
+ header.appendChild(document.createElement('th'));
+
+ colCell = document.createElement('th');
+ colCell.appendChild(document.createTextNode('Actions'));
+ header.appendChild(colCell);
+
+ header.appendChild(document.createElement('th'));
+ header.appendChild(document.createElement('th'));
+
+ return thead;
+ }
+
+ buildAndAddRow(serializedDataset) {
+ let row = new DatasetOverviewTableRow(this.tableId, serializedDataset, this.detailsOnClickFunc,
+ this.filesOnClickFunc, this.downloadOnClickFunc, this.uploadOnClickFunc, this.deleteOnClickFunc);
+ row.build();
+ this.table.appendChild(row.row);
+ }
+
+ buildTable(contentResponse) {
+ if (this.table != null) {
+ this.table.remove();
+ }
+ this.table = document.createElement('table');
+ this.table.id = this.tableId;
+ this.table.className = this.tableClass;
+
+ this.table.appendChild(this.tableHeader);
+
+ for (const ds_name in contentResponse["datasets"]) {
+ this.buildAndAddRow(contentResponse["datasets"][ds_name]);
+ }
+
+ this.parentDiv.appendChild(this.table);
+ }
+}
\ No newline at end of file
diff --git a/python/gui/MaaS/templates/maas/dataset_management.html b/python/gui/MaaS/templates/maas/dataset_management.html
new file mode 100644
index 000000000..2390fa7dd
--- /dev/null
+++ b/python/gui/MaaS/templates/maas/dataset_management.html
@@ -0,0 +1,828 @@
+
+
+
+
+ OWP MaaS
+ {% load static %}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {% if errors %}
+
+
+ {% for error in errors %}
+ - {{ error }}
+ {% endfor %}
+
+
+ {% endif %}
+
+ {% if warnings %}
+
+
+ {% for warning in warnings %}
+ - {{ warning }}
+ {% endfor %}
+
+
+ {% endif %}
+
+ {% if info %}
+
+
+ {% for message in info %}
+ - {{ message }}
+ {% endfor %}
+
+
+ {% endif %}
+
+ {# Cache jQuery scripts for UI scripting and styling #}
+
+
Dataset Management
+
+
+
+
+
+
+
diff --git a/python/gui/MaaS/urls.py b/python/gui/MaaS/urls.py
index 704af44b3..8771c1419 100644
--- a/python/gui/MaaS/urls.py
+++ b/python/gui/MaaS/urls.py
@@ -1,5 +1,7 @@
from django.conf.urls import url
from .cbv.EditView import EditView
+from .cbv.DatasetManagementView import DatasetManagementView
+from .cbv.DatasetApiView import DatasetApiView
from .cbv.MapView import MapView, Fabrics, FabricNames, FabricTypes, ConnectedFeatures
from .cbv.configuration import CreateConfiguration
@@ -10,6 +12,10 @@
urlpatterns = [
url(r'^$', EditView.as_view()),
+ # TODO: add this later
+ #url(r'ngen$', NgenWorkflowView.as_view(), name="ngen-workflow"),
+ url(r'datasets', DatasetManagementView.as_view(), name="dataset-management"),
+ url(r'dataset-api', DatasetApiView.as_view(), name="dataset-api"),
url(r'map$', MapView.as_view(), name="map"),
url(r'map/connections$', ConnectedFeatures.as_view(), name="connections"),
url(r'fabric/names$', FabricNames.as_view(), name='fabric-names'),
diff --git a/python/lib/client/dmod/client/_version.py b/python/lib/client/dmod/client/_version.py
index b794fd409..7fd229a32 100644
--- a/python/lib/client/dmod/client/_version.py
+++ b/python/lib/client/dmod/client/_version.py
@@ -1 +1 @@
-__version__ = '0.1.0'
+__version__ = '0.2.0'
diff --git a/python/lib/client/dmod/client/request_clients.py b/python/lib/client/dmod/client/request_clients.py
index 09251c01b..bd89d40e7 100644
--- a/python/lib/client/dmod/client/request_clients.py
+++ b/python/lib/client/dmod/client/request_clients.py
@@ -9,13 +9,13 @@
from dmod.communication.data_transmit_message import DataTransmitMessage, DataTransmitResponse
from dmod.core.meta_data import DataCategory, DataDomain, TimeRange
from pathlib import Path
-from typing import List, Optional, Tuple, Type, Union
+from typing import AnyStr, Dict, List, Optional, Tuple, Type, Union
import json
import websockets
-#import logging
-#logger = logging.getLogger("gui_log")
+import logging
+logger = logging.getLogger("client_log")
class NgenRequestClient(ModelExecRequestClient[NGENRequest, NGENRequestResponse]):
@@ -89,6 +89,25 @@ async def create_dataset(self, name: str, category: DataCategory, domain: DataDo
async def delete_dataset(self, name: str, **kwargs) -> bool:
pass
+ @abstractmethod
+ async def download_item_block(self, dataset_name: str, item_name: str, blk_start: int, blk_size: int) -> AnyStr:
+ """
+ Download a block/chunk of a given size and start point from a specified dataset file.
+
+ Parameters
+ ----------
+ dataset_name
+ item_name
+ blk_start
+ blk_size
+
+ Returns
+ -------
+ AnyStr
+ The downloaded block/chunk.
+ """
+ pass
+
@abstractmethod
async def download_dataset(self, dataset_name: str, dest_dir: Path) -> bool:
"""
@@ -130,6 +149,14 @@ async def download_from_dataset(self, dataset_name: str, item_name: str, dest: P
"""
pass
+ @abstractmethod
+ async def get_dataset_content_details(self, name: str, **kwargs) -> bool:
+ pass
+
+ @abstractmethod
+ async def get_item_size(self, dataset_name: str, item_name: str) -> int:
+ pass
+
@abstractmethod
async def list_datasets(self, category: Optional[DataCategory] = None) -> List[str]:
pass
@@ -175,6 +202,50 @@ async def delete_dataset(self, name: str, **kwargs) -> bool:
self.last_response = await self.async_make_request(request)
return self.last_response is not None and self.last_response.success
+ async def get_dataset_content_details(self, name: str, **kwargs) -> dict:
+ # TODO: later add things like created and last updated perhaps
+ query = DatasetQuery(query_type=QueryType.GET_DATASET_ITEMS)
+ request = DatasetManagementMessage(action=ManagementAction.QUERY, query=query, dataset_name=name)
+ self.last_response: DatasetManagementResponse = await self.async_make_request(request)
+ if self.last_response.success:
+ return self.last_response.data
+ else:
+ return {}
+
+ async def get_item_size(self, dataset_name: str, item_name: str) -> int:
+ query = DatasetQuery(query_type=QueryType.GET_ITEM_SIZE, item_name=item_name)
+ request = DatasetManagementMessage(action=ManagementAction.QUERY, query=query, dataset_name=dataset_name,
+ data_location=item_name)
+ self.last_response: DatasetManagementResponse = await self.async_make_request(request)
+ if self.last_response.success:
+ return self.last_response.data
+ else:
+ return -1
+
+ async def download_item_block(self, dataset_name: str, item_name: str, blk_start: int, blk_size: int) -> AnyStr:
+ """
+ Download a block/chunk of a given size and start point from a specified dataset file.
+
+ Parameters
+ ----------
+ dataset_name
+ item_name
+ blk_start
+ blk_size
+
+ Returns
+ -------
+ AnyStr
+ The downloaded block/chunk.
+ """
+ request = DatasetManagementMessage(action=ManagementAction.REQUEST_DATA, dataset_name=dataset_name,
+ data_location=item_name, blk_start=blk_start, blk_size=blk_size)
+ self.last_response: DatasetManagementResponse = await self.async_make_request(request)
+ if self.last_response.success:
+ return self.last_response.data
+ else:
+ return ''
+
async def download_dataset(self, dataset_name: str, dest_dir: Path) -> bool:
"""
Download an entire dataset to a local directory.
@@ -460,6 +531,55 @@ async def delete_dataset(self, name: str, **kwargs) -> bool:
self.last_response = await self.async_make_request(request)
return self.last_response is not None and self.last_response.success
+ async def download_item_block(self, dataset_name: str, item_name: str, blk_start: int, blk_size: int) -> AnyStr:
+ """
+ Download a block/chunk of a given size and start point from a specified dataset file.
+
+ Parameters
+ ----------
+ dataset_name
+ item_name
+ blk_start
+ blk_size
+
+ Returns
+ -------
+ AnyStr
+ The downloaded block/chunk.
+ """
+ await self._async_acquire_session_info()
+ request = MaaSDatasetManagementMessage(action=ManagementAction.REQUEST_DATA, dataset_name=dataset_name,
+ session_secret=self.session_secret, data_location=item_name,
+ blk_start=blk_start, blk_size=blk_size)
+ self.last_response: DatasetManagementResponse = await self.async_make_request(request)
+ if self.last_response.success:
+ return self.last_response.data
+ else:
+ return ''
+
+ async def get_item_size(self, dataset_name: str, item_name: str) -> int:
+ await self._async_acquire_session_info()
+ query = DatasetQuery(query_type=QueryType.GET_ITEM_SIZE, item_name=item_name)
+ request = MaaSDatasetManagementMessage(action=ManagementAction.QUERY, query=query, dataset_name=dataset_name,
+ session_secret=self._session_secret, data_location=item_name)
+ self.last_response: DatasetManagementResponse = await self.async_make_request(request)
+ if self.last_response.success:
+ return self.last_response.data
+ else:
+ return -1
+
+ async def get_dataset_content_details(self, name: str, **kwargs) -> List:
+ # TODO: later add things like created and last updated perhaps
+ await self._async_acquire_session_info()
+ query = DatasetQuery(query_type=QueryType.GET_DATASET_ITEMS)
+ request = MaaSDatasetManagementMessage(session_secret=self.session_secret, action=ManagementAction.QUERY,
+ query=query, dataset_name=name)
+ self.last_response: DatasetManagementResponse = await self.async_make_request(request)
+ if self.last_response.success:
+ return self.last_response.data[DatasetManagementResponse._DATA_KEY_QUERY_RESULTS]
+ else:
+ return []
+
async def download_dataset(self, dataset_name: str, dest_dir: Path) -> bool:
await self._async_acquire_session_info()
try:
@@ -505,6 +625,43 @@ async def download_from_dataset(self, dataset_name: str, item_name: str, dest: P
if not has_data:
return message_object
+ async def get_serialized_datasets(self, dataset_name: Optional[str] = None) -> Dict[str, dict]:
+ """
+ Get dataset objects in serialized form, either for all datasets or for the one with the provided name.
+
+ Parameters
+ ----------
+ dataset_name : Optional[str]
+ The name of a specific dataset to get serialized details of, if only one should be obtained.
+
+ Returns
+ -------
+ Dict[str, dict]
+ A dictionary, keyed by dataset name, of serialized dataset objects.
+ """
+ # TODO: may need to generalize this and add to super class
+ if dataset_name is None:
+ datasets = await self.list_datasets()
+ else:
+ # TODO: improve how this is use so that it can be safely, efficiently put everywhere it **may** be needed
+ await self._async_acquire_session_info()
+ datasets = [dataset_name]
+ serialized = dict()
+ action = ManagementAction.QUERY
+ query = DatasetQuery(query_type=QueryType.GET_SERIALIZED_FORM)
+ try:
+ for d in datasets:
+ request = MaaSDatasetManagementMessage(action=action, query=query, dataset_name=d,
+ session_secret=self.session_secret)
+ self.last_response: DatasetManagementResponse = await self.async_make_request(request)
+ if self.last_response.success:
+ serialized[d] = self.last_response.data[DatasetManagementResponse._DATA_KEY_QUERY_RESULTS]
+ # TODO: what to do if any are not successful
+ return serialized
+ except Exception as e:
+ logger.error(e)
+ raise e
+
async def list_datasets(self, category: Optional[DataCategory] = None) -> List[str]:
await self._async_acquire_session_info()
action = ManagementAction.LIST_ALL if category is None else ManagementAction.SEARCH
diff --git a/python/lib/client/dmod/test/test_dataset_client.py b/python/lib/client/dmod/test/test_dataset_client.py
index b266658c7..37ff6e386 100644
--- a/python/lib/client/dmod/test/test_dataset_client.py
+++ b/python/lib/client/dmod/test/test_dataset_client.py
@@ -1,7 +1,7 @@
import unittest
from ..client.request_clients import DataCategory, DatasetClient, DatasetManagementResponse, MaaSDatasetManagementResponse
from pathlib import Path
-from typing import List, Optional
+from typing import List, Optional, AnyStr
class SimpleMockDatasetClient(DatasetClient):
@@ -28,6 +28,24 @@ async def download_from_dataset(self, dataset_name: str, item_name: str, dest: P
""" Mock implementation, always returning ``False``. """
return False
+ async def download_item_block(self, dataset_name: str, item_name: str, blk_start: int, blk_size: int) -> AnyStr:
+ """
+ Mock implementation, always returning empty string.
+ """
+ return ''
+
+ async def get_dataset_content_details(self, name: str, **kwargs) -> bool:
+ """
+ Mock implementation, always returning ``False``.
+ """
+ return False
+
+ async def get_item_size(self, dataset_name: str, item_name: str) -> int:
+ """
+ Mock implementation always returning ``1``.
+ """
+ return 1
+
async def list_datasets(self, category: Optional[DataCategory] = None) -> List[str]:
""" Mock implementation, always returning an empty list. """
return []
diff --git a/python/lib/communication/dmod/communication/_version.py b/python/lib/communication/dmod/communication/_version.py
index e4e49b3bb..8969d4966 100644
--- a/python/lib/communication/dmod/communication/_version.py
+++ b/python/lib/communication/dmod/communication/_version.py
@@ -1 +1 @@
-__version__ = '0.9.0'
+__version__ = '0.9.1'
diff --git a/python/lib/communication/dmod/communication/client.py b/python/lib/communication/dmod/communication/client.py
index aa81b56f3..44fb880fd 100644
--- a/python/lib/communication/dmod/communication/client.py
+++ b/python/lib/communication/dmod/communication/client.py
@@ -787,10 +787,8 @@ def _update_after_valid_response(self, response: EXTERN_REQ_R):
# TODO: this can probably be taken out, as the superclass implementation should suffice
async def async_make_request(self, request: EXTERN_REQ_M) -> EXTERN_REQ_R:
- async with websockets.connect(self.endpoint_uri, ssl=self.client_ssl_context) as websocket:
- await websocket.send(request.to_json())
- response = await websocket.recv()
- return request.__class__.factory_init_correct_response_subtype(json_obj=json.loads(response))
+ response = await self.async_send(request.to_json(), await_response=True)
+ return request.__class__.factory_init_correct_response_subtype(json_obj=json.loads(response))
@property
def errors(self):
diff --git a/python/lib/communication/dmod/communication/dataset_management_message.py b/python/lib/communication/dmod/communication/dataset_management_message.py
index d148b6d0e..d21192906 100644
--- a/python/lib/communication/dmod/communication/dataset_management_message.py
+++ b/python/lib/communication/dmod/communication/dataset_management_message.py
@@ -16,6 +16,11 @@ class QueryType(Enum):
GET_VALUES = 6
GET_MIN_VALUE = 7
GET_MAX_VALUE = 8
+ GET_SERIALIZED_FORM = 9
+ GET_LAST_UPDATED = 10
+ GET_SIZE = 11
+ GET_ITEM_SIZE = 12
+ GET_DATASET_ITEMS = 13
@classmethod
def get_for_name(cls, name_str: str) -> 'QueryType':
@@ -42,26 +47,32 @@ def get_for_name(cls, name_str: str) -> 'QueryType':
class DatasetQuery(Serializable):
_KEY_QUERY_TYPE = 'query_type'
+ _KEY_ITEM_NAME = 'item_name'
@classmethod
def factory_init_from_deserialized_json(cls, json_obj: dict) -> Optional['DatasetQuery']:
try:
- return cls(query_type=QueryType.get_for_name(json_obj[cls._KEY_QUERY_TYPE]))
+ return cls(query_type=QueryType.get_for_name(json_obj[cls._KEY_QUERY_TYPE]),
+ item_name=json_obj.get(cls._KEY_ITEM_NAME))
except Exception as e:
return None
def __hash__(self):
- return hash(self.query_type)
+ return hash('{}{}'.format(self.query_type.name, self.item_name if self.item_name is not None else ''))
def __eq__(self, other):
- return isinstance(other, DatasetQuery) and self.query_type == other.query_type
+ return isinstance(other, DatasetQuery) and self.query_type == other.query_type \
+ and self.item_name == other.item_name
- def __init__(self, query_type: QueryType):
+ def __init__(self, query_type: QueryType, item_name: Optional[str] = None):
self.query_type = query_type
+ self.item_name = item_name
def to_dict(self) -> Dict[str, Union[str, Number, dict, list]]:
serial = dict()
serial[self._KEY_QUERY_TYPE] = self.query_type.name
+ if self.item_name is not None:
+ serial[self._KEY_ITEM_NAME] = self.item_name
return serial
@@ -181,6 +192,8 @@ class DatasetManagementMessage(AbstractInitRequest):
_SERIAL_KEY_CATEGORY = 'category'
_SERIAL_KEY_DATA_DOMAIN = 'data_domain'
_SERIAL_KEY_DATA_LOCATION = 'data_location'
+ _SERIAL_KEY_DATA_BLK_START = 'data_blk_start'
+ _SERIAL_KEY_DATA_BLK_SIZE = 'data_blk_size'
_SERIAL_KEY_DATASET_NAME = 'dataset_name'
_SERIAL_KEY_IS_PENDING_DATA = 'pending_data'
_SERIAL_KEY_QUERY = 'query'
@@ -217,6 +230,8 @@ def factory_init_from_deserialized_json(cls, json_obj: dict) -> Optional['Datase
category_str = json_obj.get(cls._SERIAL_KEY_CATEGORY)
category = None if category_str is None else DataCategory.get_for_name(category_str)
data_loc = json_obj.get(cls._SERIAL_KEY_DATA_LOCATION)
+ data_blk_start = json_obj.get(cls._SERIAL_KEY_DATA_BLK_START)
+ data_blk_size = json_obj.get(cls._SERIAL_KEY_DATA_BLK_SIZE)
#page = json_obj[cls._SERIAL_KEY_PAGE] if cls._SERIAL_KEY_PAGE in json_obj else None
if cls._SERIAL_KEY_QUERY in json_obj:
query = DatasetQuery.factory_init_from_deserialized_json(json_obj[cls._SERIAL_KEY_QUERY])
@@ -229,7 +244,7 @@ def factory_init_from_deserialized_json(cls, json_obj: dict) -> Optional['Datase
return deserialized_class(action=action, dataset_name=dataset_name, category=category,
is_read_only_dataset=json_obj[cls._SERIAL_KEY_IS_READ_ONLY], domain=domain,
- data_location=data_loc,
+ data_location=data_loc, blk_start=data_blk_start, blk_size=data_blk_size,
is_pending_data=json_obj.get(cls._SERIAL_KEY_IS_PENDING_DATA), #page=page,
query=query, **deserialized_class_kwargs)
except Exception as e:
@@ -261,8 +276,8 @@ def __hash__(self):
def __init__(self, action: ManagementAction, dataset_name: Optional[str] = None, is_read_only_dataset: bool = False,
category: Optional[DataCategory] = None, domain: Optional[DataDomain] = None,
- data_location: Optional[str] = None, is_pending_data: bool = False,
- query: Optional[DatasetQuery] = None, *args, **kwargs):
+ data_location: Optional[str] = None, blk_start: Optional[int] = None, blk_size: Optional[int] = None,
+ is_pending_data: bool = False, query: Optional[DatasetQuery] = None, *args, **kwargs):
"""
Initialize this instance.
@@ -278,6 +293,10 @@ def __init__(self, action: ManagementAction, dataset_name: Optional[str] = None,
The optional category of the involved dataset or datasets, when applicable; defaults to ``None``.
data_location : Optional[str]
Optional location/file/object/etc. for acted-upon data.
+ blk_start : Optional[int]
+ Optional starting point for when acting upon a block/chunk of data.
+ blk_size : Optional[int]
+ Optional block size for when acting upon a block/chunk of data.
is_pending_data : bool
Whether the sender has data pending transmission after this message (default: ``False``).
query : Optional[DatasetQuery]
@@ -302,9 +321,19 @@ def __init__(self, action: ManagementAction, dataset_name: Optional[str] = None,
self._category = category
self._domain = domain
self._data_location = data_location
+ self._blk_start = blk_start
+ self._blk_size = blk_size
self._query = query
self._is_pending_data = is_pending_data
+ @property
+ def blk_size(self) -> Optional[int]:
+ return self._blk_size
+
+ @property
+ def blk_start(self) -> Optional[int]:
+ return self._blk_start
+
@property
def data_location(self) -> Optional[str]:
"""
@@ -406,6 +435,10 @@ def to_dict(self) -> Dict[str, Union[str, Number, dict, list]]:
serial[self._SERIAL_KEY_CATEGORY] = self.data_category.name
if self.data_location is not None:
serial[self._SERIAL_KEY_DATA_LOCATION] = self.data_location
+ if self._blk_start is not None:
+ serial[self._SERIAL_KEY_DATA_BLK_START] = self._blk_start
+ if self._blk_size is not None:
+ serial[self._SERIAL_KEY_DATA_BLK_SIZE] = self._blk_size
if self.data_domain is not None:
serial[self._SERIAL_KEY_DATA_DOMAIN] = self.data_domain.to_dict()
if self.query is not None:
@@ -602,6 +635,10 @@ def __init__(self, session_secret: str, *args, **kwargs):
is_read_only_dataset : bool
category : Optional[DataCategory]
data_location : Optional[str]
+ blk_start : Optional[int]
+ Optional starting point for when acting upon a block/chunk of data.
+ blk_size : Optional[int]
+ Optional block size for when acting upon a block/chunk of data.
is_pending_data : bool
query : Optional[DataQuery]
"""
diff --git a/python/lib/core/dmod/test/test_allocationparadigm.py b/python/lib/core/dmod/test/test_allocationparadigm.py
index 060e45f09..d37e36445 100644
--- a/python/lib/core/dmod/test/test_allocationparadigm.py
+++ b/python/lib/core/dmod/test/test_allocationparadigm.py
@@ -1,6 +1,6 @@
import unittest
-from dmod.scheduler.job.job import AllocationParadigm
from dmod.communication import SchedulerRequestMessage
+from ..core.execution import AllocationParadigm
class TestJobAllocationParadigm(unittest.TestCase):
diff --git a/python/lib/externalrequests/dmod/externalrequests/maas_request_handlers.py b/python/lib/externalrequests/dmod/externalrequests/maas_request_handlers.py
index b56f1d10e..fc2b48e09 100644
--- a/python/lib/externalrequests/dmod/externalrequests/maas_request_handlers.py
+++ b/python/lib/externalrequests/dmod/externalrequests/maas_request_handlers.py
@@ -288,25 +288,28 @@ async def handle_request(self, request: MaaSDatasetManagementMessage, **kwargs)
session, is_authorized, reason, msg = await self.get_authorized_session(request)
if not is_authorized:
return MaaSDatasetManagementResponse(success=False, reason=reason.name, message=msg)
- # In this case, we actually can pass the request as-is straight through (i.e., after confirming authorization)
- async with self.service_client as client:
- # Have to handle these two slightly differently, since multiple message will be going over the websocket
- if request.management_action == ManagementAction.REQUEST_DATA:
- await client.connection.send(str(request))
- mgmt_response = await self._handle_data_download(client_websocket=kwargs['upstream_websocket'],
- service_websocket=client.connection)
- elif request.management_action == ManagementAction.ADD_DATA:
- await client.connection.send(str(request))
- mgmt_response = await self._handle_data_upload(client_websocket=kwargs['upstream_websocket'],
- service_websocket=client.connection)
- else:
- mgmt_response = await client.async_make_request(request)
- logging.debug("************* {} received response:\n{}".format(self.__class__.__name__, str(mgmt_response)))
- # Likewise, can just send back the response from the internal service client
- return MaaSDatasetManagementResponse.factory_create(mgmt_response)
+ try:
+ # In this case, we actually can pass the request as-is straight through (i.e., after confirming authorization)
+ async with self.service_client as client:
+ # Have to handle these two slightly differently, since multiple message will be going over the websocket
+ if request.management_action == ManagementAction.REQUEST_DATA:
+ await client.connection.send(str(request))
+ mgmt_response = await self._handle_data_download(client_websocket=kwargs['upstream_websocket'],
+ service_websocket=client.connection)
+ elif request.management_action == ManagementAction.ADD_DATA:
+ await client.connection.send(str(request))
+ mgmt_response = await self._handle_data_upload(client_websocket=kwargs['upstream_websocket'],
+ service_websocket=client.connection)
+ else:
+ mgmt_response = await client.async_make_request(request)
+ logging.debug("************* {} received response:\n{}".format(self.__class__.__name__, str(mgmt_response)))
+ # Likewise, can just send back the response from the internal service client
+ return MaaSDatasetManagementResponse.factory_create(mgmt_response)
+ except Exception as e:
+ raise e
@property
def service_client(self) -> DataServiceClient:
if self._service_client is None:
- self._service_client = DataServiceClient(self.service_url, self.service_ssl_dir)
+ self._service_client = DataServiceClient(endpoint_uri=self.service_url, ssl_directory=self.service_ssl_dir)
return self._service_client
diff --git a/python/lib/modeldata/dmod/modeldata/_version.py b/python/lib/modeldata/dmod/modeldata/_version.py
index ccf9e6286..1658609d0 100644
--- a/python/lib/modeldata/dmod/modeldata/_version.py
+++ b/python/lib/modeldata/dmod/modeldata/_version.py
@@ -1 +1 @@
-__version__ = '0.8.0'
\ No newline at end of file
+__version__ = '0.9.0'
\ No newline at end of file
diff --git a/python/lib/modeldata/dmod/modeldata/data/dataset.py b/python/lib/modeldata/dmod/modeldata/data/dataset.py
index aac52024b..fe9044d52 100644
--- a/python/lib/modeldata/dmod/modeldata/data/dataset.py
+++ b/python/lib/modeldata/dmod/modeldata/data/dataset.py
@@ -799,7 +799,8 @@ def filter(self, base_dataset: Dataset, restrictions: List[Union[ContinuousRestr
pass
@abstractmethod
- def get_data(self, dataset_name: str, item_name: str, **kwargs) -> Union[bytes, Any]:
+ def get_data(self, dataset_name: str, item_name: str, offset: Optional[int] = None, length: Optional[int] = None,
+ **kwargs) -> Union[bytes, Any]:
"""
Get data from this dataset.
@@ -812,6 +813,10 @@ def get_data(self, dataset_name: str, item_name: str, **kwargs) -> Union[bytes,
The dataset from which to get data.
item_name : str
The name of the object from which to get data.
+ offset : Optional[int]
+ Optional start byte position of object data.
+ length : Optional[int]
+ Optional number of bytes of object data from offset.
kwargs
Implementation-specific params for representing what data to get and how to get and deliver it.
@@ -861,6 +866,26 @@ def link_user(self, user: DatasetUser, dataset: Dataset) -> bool:
self._dataset_users[dataset.name].add(user.uuid)
return True
+ @abstractmethod
+ def get_file_stat(self, dataset_name: str, file_name, **kwargs) -> Dict[str, Any]:
+ """
+ Get the meta information about the given file.
+
+ Parameters
+ ----------
+ dataset_name : str
+ The name of the dataset containing the file of interest.
+ file_name : str
+ The name of the file of interest.
+ kwargs
+
+ Returns
+ -------
+ dict
+ Meta information about the given file, in dictionary form.
+ """
+ pass
+
@abstractmethod
def list_files(self, dataset_name: str, **kwargs) -> List[str]:
"""
diff --git a/python/lib/modeldata/dmod/modeldata/data/object_store_manager.py b/python/lib/modeldata/dmod/modeldata/data/object_store_manager.py
index 084492805..a33e5fc29 100644
--- a/python/lib/modeldata/dmod/modeldata/data/object_store_manager.py
+++ b/python/lib/modeldata/dmod/modeldata/data/object_store_manager.py
@@ -10,7 +10,7 @@
from minio.api import ObjectWriteResult
from minio.deleteobjects import DeleteObject
from pathlib import Path
-from typing import Dict, List, Optional, Set, Tuple
+from typing import Any, Dict, List, Optional, Set, Tuple
from uuid import UUID
@@ -61,8 +61,13 @@ def __init__(self, obj_store_host_str: str, access_key: Optional[str] = None, se
# For any buckets that have the standard serialized object (i.e., were for datasets previously), reload them
for bucket_name in self.list_buckets():
serialized_item = self._gen_dataset_serial_obj_name(bucket_name)
- if serialized_item in [o.object_name for o in self._client.list_objects(bucket_name)]:
+ try:
self.reload(reload_from=bucket_name, serialized_item=serialized_item)
+ except minio.error.S3Error as e:
+ # Continue with looping through buckets and initializing if we get this particular exception and
+ # error code, but otherwise pass through the exception
+ if e.code != "NoSuchKey":
+ raise e
except Exception as e:
self._errors.append(e)
# TODO: consider if we should not re-throw this (which would likely force us to ensure users checked this)
@@ -420,7 +425,32 @@ def delete_data(self, dataset_name: str, **kwargs) -> bool:
self._errors.extend(error_list)
return False
- def get_data(self, dataset_name: str, item_name: str, **kwargs) -> bytes:
+ def get_file_stat(self, dataset_name: str, file_name, **kwargs) -> Dict[str, Any]:
+ """
+ Get the meta information about the given file.
+
+ Parameters
+ ----------
+ dataset_name : str
+ The name of the dataset containing the file of interest.
+ file_name : str
+ The name of the file of interest.
+ kwargs
+
+ Returns
+ -------
+ dict
+ Meta information about the given file, in dictionary form.
+ """
+ obj_stat = self._client.stat_object(dataset_name, file_name)
+ as_dict = dict()
+ as_dict["name"] = obj_stat.object_name
+ as_dict["size"] = obj_stat.size
+ # TODO: get more of this if worth it
+ return as_dict
+
+ def get_data(self, dataset_name: str, item_name: str, offset: Optional[int] = None, length: Optional[int] = None,
+ **kwargs) -> bytes:
"""
Get data from this dataset.
@@ -434,15 +464,12 @@ def get_data(self, dataset_name: str, item_name: str, **kwargs) -> bytes:
The name of the dataset (i.e., bucket) from which to get data.
item_name : str
The name of the object from which to get data.
- kwargs
- Implementation-specific params for representing what data to get and how to get and deliver it.
-
- Keyword Args
- -------
- offset : int
+ offset : Optional[int]
Optional start byte position of object data.
- length : int
+ length : Optional[int]
Optional number of bytes of object data from offset.
+ kwargs
+ Implementation-specific params for representing what data to get and how to get and deliver it.
Returns
-------
@@ -452,8 +479,10 @@ def get_data(self, dataset_name: str, item_name: str, **kwargs) -> bytes:
if item_name not in self.list_files(dataset_name):
raise RuntimeError('Cannot get data for non-existing {} file in {} dataset'.format(item_name, dataset_name))
optional_params = dict()
- for key in [k for k in self.data_chunking_params if k in kwargs]:
- optional_params[key] = kwargs[key]
+ if offset is not None:
+ optional_params['offset'] = offset
+ if length is not None:
+ optional_params['length'] = length
response_object = self._client.get_object(bucket_name=dataset_name, object_name=item_name, **optional_params)
return response_object.data
@@ -554,12 +583,14 @@ def reload(self, reload_from: str, serialized_item: Optional[str] = None) -> Dat
if serialized_item is None:
serialized_item = self._gen_dataset_serial_obj_name(reload_from)
+ response_obj = None
try:
response_obj = self._client.get_object(bucket_name=reload_from, object_name=serialized_item)
response_data = json.loads(response_obj.data.decode())
finally:
- response_obj.close()
- response_obj.release_conn()
+ if response_obj is not None:
+ response_obj.close()
+ response_obj.release_conn()
# If we can safely infer it, make sure the "type" key is set in cases when it is missing
if len(self.supported_dataset_types) == 1 and Dataset._KEY_TYPE not in response_data:
diff --git a/python/lib/modeldata/setup.py b/python/lib/modeldata/setup.py
index c536a6b03..69bf05c84 100644
--- a/python/lib/modeldata/setup.py
+++ b/python/lib/modeldata/setup.py
@@ -14,7 +14,7 @@
author_email='',
url='',
license='',
- install_requires=['numpy>=1.20.1', 'pandas', 'geopandas', 'dmod-communication>=0.4.2', 'dmod-core>=0.1.0', 'minio',
+ install_requires=['numpy>=1.20.1', 'pandas', 'geopandas', 'dmod-communication>=0.9.1', 'dmod-core>=0.2.0', 'minio',
'aiohttp<=3.7.4', 'hypy@git+https://github.com/NOAA-OWP/hypy@master#egg=hypy&subdirectory=python'],
packages=find_namespace_packages(exclude=('tests', 'schemas', 'ssl', 'src'))
)
diff --git a/python/services/dataservice/dmod/dataservice/_version.py b/python/services/dataservice/dmod/dataservice/_version.py
index 290d7c60d..222c11cfd 100644
--- a/python/services/dataservice/dmod/dataservice/_version.py
+++ b/python/services/dataservice/dmod/dataservice/_version.py
@@ -1 +1 @@
-__version__ = '0.3.0'
\ No newline at end of file
+__version__ = '0.4.0'
\ No newline at end of file
diff --git a/python/services/dataservice/dmod/dataservice/service.py b/python/services/dataservice/dmod/dataservice/service.py
index 04e1fa0af..1d1c7c37d 100644
--- a/python/services/dataservice/dmod/dataservice/service.py
+++ b/python/services/dataservice/dmod/dataservice/service.py
@@ -684,9 +684,22 @@ def _process_query(self, message: DatasetManagementMessage) -> DatasetManagement
dataset_name = message.dataset_name
list_of_files = self.get_known_datasets()[dataset_name].manager.list_files(dataset_name)
return DatasetManagementResponse(action=message.management_action, success=True, dataset_name=dataset_name,
- reason='Obtained {} Items List',
+ reason='Obtained {} Items List'.format(dataset_name),
data={DatasetManagementResponse._DATA_KEY_QUERY_RESULTS: list_of_files})
- # TODO: (later) add support for messages with other query types also
+ elif query_type == QueryType.GET_SERIALIZED_FORM:
+ dataset_name = message.dataset_name
+ serialized_form = self.get_known_datasets()[dataset_name].to_dict()
+ return DatasetManagementResponse(action=message.management_action, success=True, dataset_name=dataset_name,
+ reason='Obtained serialized {} dataset'.format(dataset_name),
+ data={DatasetManagementResponse._DATA_KEY_QUERY_RESULTS: serialized_form})
+ if query_type == QueryType.GET_DATASET_ITEMS:
+ dataset = self.get_known_datasets()[message.dataset_name]
+ mgr = dataset.manager
+ item_details: List[dict] = [mgr.get_file_stat(dataset.name, f) for f in mgr.list_files(dataset.name)]
+ return DatasetManagementResponse(action=message.management_action, success=True, dataset_name=dataset.name,
+ reason='Obtained file details for {} dataset'.format(dataset.name),
+ data={DatasetManagementResponse._DATA_KEY_QUERY_RESULTS: item_details})
+ # TODO: (later) add support for messages with other query types also
else:
reason = 'Unsupported {} Query Type - {}'.format(DatasetQuery.__class__.__name__, query_type.name)
return DatasetManagementResponse(action=message.management_action, success=False, reason=reason)
@@ -890,6 +903,14 @@ async def listener(self, websocket: WebSocketServerProtocol, path):
partial_indx = 0
elif inbound_message.management_action == ManagementAction.CREATE:
response = await self._async_process_dataset_create(message=inbound_message)
+ elif inbound_message.management_action == ManagementAction.REQUEST_DATA and inbound_message.blk_start is not None:
+ manager = self.get_known_datasets()[inbound_message.dataset_name].manager
+ raw_data = manager.get_data(dataset_name=inbound_message.dataset_name,
+ item_name=inbound_message.data_location,
+ offset=inbound_message.blk_start, length=inbound_message.blk_size)
+ response = DatasetManagementResponse(success=raw_data is not None,
+ action=inbound_message.management_action,
+ data=raw_data, reason="Data Block Retrieve Complete")
elif inbound_message.management_action == ManagementAction.REQUEST_DATA:
response = await self._async_process_data_request(message=inbound_message, websocket=websocket)
elif inbound_message.management_action == ManagementAction.ADD_DATA:
diff --git a/python/services/dataservice/setup.py b/python/services/dataservice/setup.py
index 32dc10aab..1d87d90f7 100644
--- a/python/services/dataservice/setup.py
+++ b/python/services/dataservice/setup.py
@@ -14,7 +14,7 @@
author_email='',
url='',
license='',
- install_requires=['dmod-core>=0.1.1', 'dmod-communication>=0.7.1', 'dmod-scheduler>=0.7.0', 'dmod-modeldata>=0.8.0',
+ install_requires=['dmod-core>=0.2.0', 'dmod-communication>=0.9.1', 'dmod-scheduler>=0.7.0', 'dmod-modeldata>=0.9.0',
'redis'],
packages=find_namespace_packages(exclude=('tests', 'test', 'deprecated', 'conf', 'schemas', 'ssl', 'src'))
)
diff --git a/python/services/requestservice/dmod/requestservice/_version.py b/python/services/requestservice/dmod/requestservice/_version.py
index 9bdd4d277..83e147c62 100644
--- a/python/services/requestservice/dmod/requestservice/_version.py
+++ b/python/services/requestservice/dmod/requestservice/_version.py
@@ -1 +1 @@
-__version__ = '0.5.0'
\ No newline at end of file
+__version__ = '0.6.0'
\ No newline at end of file
diff --git a/python/services/requestservice/setup.py b/python/services/requestservice/setup.py
index 06618889d..af7ed1e0a 100644
--- a/python/services/requestservice/setup.py
+++ b/python/services/requestservice/setup.py
@@ -14,7 +14,7 @@
author_email='',
url='',
license='',
- install_requires=['websockets', 'dmod-core>=0.1.0', 'dmod-communication>=0.7.0', 'dmod-access>=0.2.0',
+ install_requires=['websockets', 'dmod-core>=0.2.0', 'dmod-communication>=0.8.0', 'dmod-access>=0.2.0',
'dmod-externalrequests>=0.3.0'],
packages=find_namespace_packages(exclude=('tests', 'schemas', 'ssl', 'src'))
)