diff --git a/back-end/pyworkflow/pyworkflow/node.py b/back-end/pyworkflow/pyworkflow/node.py
index 009bf31..a541956 100644
--- a/back-end/pyworkflow/pyworkflow/node.py
+++ b/back-end/pyworkflow/pyworkflow/node.py
@@ -48,8 +48,6 @@ def get_execution_options(self, workflow, flow_nodes):
"""
execution_options = dict()
- # TODO: Can we iterate through flow_vars instead?
- # If none are included, we can just return `self.options`.
for key, option in self.options.items():
if key in flow_nodes:
diff --git a/back-end/pyworkflow/pyworkflow/node_factory.py b/back-end/pyworkflow/pyworkflow/node_factory.py
index cd1976e..a257963 100644
--- a/back-end/pyworkflow/pyworkflow/node_factory.py
+++ b/back-end/pyworkflow/pyworkflow/node_factory.py
@@ -3,8 +3,7 @@
def node_factory(node_info):
- # Create a new Node with info
- # TODO: should perform error-checking or add default values if missing
+ """Create a new Node with info."""
node_type = node_info.get('node_type')
node_key = node_info.get('node_key')
diff --git a/back-end/pyworkflow/pyworkflow/nodes/flow_control/integer_input.py b/back-end/pyworkflow/pyworkflow/nodes/flow_control/integer_input.py
index 0ecb0cf..7b10bab 100644
--- a/back-end/pyworkflow/pyworkflow/nodes/flow_control/integer_input.py
+++ b/back-end/pyworkflow/pyworkflow/nodes/flow_control/integer_input.py
@@ -1,11 +1,11 @@
-from pyworkflow.node import FlowNode, NodeException
+from pyworkflow.node import FlowNode
from pyworkflow.parameters import *
class IntegerNode(FlowNode):
- """StringNode object
+ """IntegerNode object
- Allows for Strings to replace 'string' fields in Nodes
+ Allows for Integers to replace fields representing numbers in Nodes
"""
name = "Integer Input"
num_in = 0
diff --git a/back-end/pyworkflow/pyworkflow/nodes/io/read_csv.py b/back-end/pyworkflow/pyworkflow/nodes/io/read_csv.py
index f829d61..b905807 100644
--- a/back-end/pyworkflow/pyworkflow/nodes/io/read_csv.py
+++ b/back-end/pyworkflow/pyworkflow/nodes/io/read_csv.py
@@ -5,13 +5,10 @@
class ReadCsvNode(IONode):
- """ReadCsvNode
-
- Reads a CSV file into a pandas DataFrame.
+ """Reads a CSV file into a pandas DataFrame.
Raises:
- NodeException: any error reading CSV file, converting
- to DataFrame.
+ NodeException: any error reading CSV file, converting to DataFrame.
"""
name = "Read CSV"
num_in = 0
diff --git a/back-end/pyworkflow/pyworkflow/nodes/io/write_csv.py b/back-end/pyworkflow/pyworkflow/nodes/io/write_csv.py
index 0ca54cf..02adc39 100644
--- a/back-end/pyworkflow/pyworkflow/nodes/io/write_csv.py
+++ b/back-end/pyworkflow/pyworkflow/nodes/io/write_csv.py
@@ -5,13 +5,10 @@
class WriteCsvNode(IONode):
- """WriteCsvNode
-
- Writes the current DataFrame to a CSV file.
+ """Writes the current DataFrame to a CSV file.
Raises:
- NodeException: any error writing CSV file, converting
- from DataFrame.
+ NodeException: any error writing CSV file, converting from DataFrame.
"""
name = "Write CSV"
num_in = 1
diff --git a/back-end/pyworkflow/pyworkflow/nodes/manipulation/filter.py b/back-end/pyworkflow/pyworkflow/nodes/manipulation/filter.py
index 0ee1daa..6f8b66d 100644
--- a/back-end/pyworkflow/pyworkflow/nodes/manipulation/filter.py
+++ b/back-end/pyworkflow/pyworkflow/nodes/manipulation/filter.py
@@ -5,6 +5,14 @@
class FilterNode(ManipulationNode):
+ """Subset the DataFrame rows or columns according to the specified index labels.
+
+ pandas API reference:
+ https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.DataFrame.filter.html
+
+ Raises:
+ NodeException: catches exceptions when dealing with pandas DataFrames.
+ """
name = "Filter"
num_in = 1
num_out = 1
@@ -31,7 +39,13 @@ class FilterNode(ManipulationNode):
def execute(self, predecessor_data, flow_vars):
try:
input_df = pd.DataFrame.from_dict(predecessor_data[0])
- output_df = pd.DataFrame.filter(input_df, **self.options)
+ output_df = pd.DataFrame.filter(
+ input_df,
+ items=flow_vars['items'].get_value(),
+ like=flow_vars['like'].get_value(),
+ regex=flow_vars['regex'].get_value(),
+ axis=flow_vars['axis'].get_value(),
+ )
return output_df.to_json()
except Exception as e:
raise NodeException('filter', str(e))
diff --git a/back-end/pyworkflow/pyworkflow/nodes/manipulation/join.py b/back-end/pyworkflow/pyworkflow/nodes/manipulation/join.py
index f0ae745..e5507a7 100644
--- a/back-end/pyworkflow/pyworkflow/nodes/manipulation/join.py
+++ b/back-end/pyworkflow/pyworkflow/nodes/manipulation/join.py
@@ -5,12 +5,23 @@
class JoinNode(ManipulationNode):
+ """Merge DataFrame or named Series objects with a database-style join.
+
+ pandas API reference:
+ https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.merge.html
+
+ Raises:
+ NodeException: catches exceptions when dealing with pandas DataFrames.
+ """
name = "Joiner"
num_in = 2
num_out = 1
OPTIONS = {
- "on": StringParameter("Join Column", docstring="Name of column to join on")
+ "on": StringParameter(
+ "Join Column",
+ docstring="Name of column to join on"
+ )
}
def execute(self, predecessor_data, flow_vars):
diff --git a/back-end/pyworkflow/pyworkflow/nodes/manipulation/pivot.py b/back-end/pyworkflow/pyworkflow/nodes/manipulation/pivot.py
index b7fe43f..cc8b71f 100644
--- a/back-end/pyworkflow/pyworkflow/nodes/manipulation/pivot.py
+++ b/back-end/pyworkflow/pyworkflow/nodes/manipulation/pivot.py
@@ -5,6 +5,14 @@
class PivotNode(ManipulationNode):
+ """Create a spreadsheet-style pivot table as a DataFrame.
+
+ pandas reference:
+ https://pandas.pydata.org/pandas-docs/stable/reference/api/pandas.pivot_table.html
+
+ Raises:
+ NodeException: catches exceptions when dealing with pandas DataFrames.
+ """
name = "Pivoting"
num_in = 1
num_out = 3
@@ -56,7 +64,18 @@ class PivotNode(ManipulationNode):
def execute(self, predecessor_data, flow_vars):
try:
input_df = pd.DataFrame.from_dict(predecessor_data[0])
- output_df = pd.DataFrame.pivot_table(input_df, **self.options)
+ output_df = pd.DataFrame.pivot_table(
+ input_df,
+ index=flow_vars['index'].get_value(),
+ values=flow_vars['values'].get_value(),
+ columns=flow_vars['columns'].get_value(),
+ aggfunc=flow_vars['aggfunc'].get_value(),
+ fill_value=flow_vars['fill_value'].get_value(),
+ margins=flow_vars['margins'].get_value(),
+ dropna=flow_vars['dropna'].get_value(),
+ margins_name=flow_vars['margins_name'].get_value(),
+ observed=flow_vars['observed'].get_value(),
+ )
return output_df.to_json()
except Exception as e:
raise NodeException('pivot', str(e))
diff --git a/back-end/pyworkflow/pyworkflow/tests/test_workflow.py b/back-end/pyworkflow/pyworkflow/tests/test_workflow.py
index 8ecffa7..2b2574e 100644
--- a/back-end/pyworkflow/pyworkflow/tests/test_workflow.py
+++ b/back-end/pyworkflow/pyworkflow/tests/test_workflow.py
@@ -53,7 +53,7 @@ def test_workflow_filename(self):
def test_workflow_from_json(self):
new_workflow = Workflow("Untitled", root_dir="/tmp")
- workflow_copy = Workflow.from_json(self.workflow.to_session_dict())
+ workflow_copy = Workflow.from_json(self.workflow.to_json())
self.assertEqual(new_workflow.name, workflow_copy.name)
@@ -62,16 +62,17 @@ def test_workflow_from_json_key_error(self):
new_workflow = Workflow.from_json(dict())
def test_empty_workflow_to_session(self):
- new_workflow = Workflow("Untitled", root_dir="/tmp")
- saved_workflow = new_workflow.to_session_dict()
+ new_workflow = Workflow("Untitled", root_dir="/tmp", node_dir=os.path.join(os.getcwd(), 'nodes'))
+ saved_workflow = new_workflow.to_json()
workflow_to_compare = {
'name': 'Untitled',
'root_dir': '/tmp',
+ 'node_dir': os.path.join(os.getcwd(), 'nodes'),
'graph': Workflow.to_graph_json(new_workflow.graph),
'flow_vars': Workflow.to_graph_json(new_workflow.flow_vars),
}
- self.assertDictEqual(new_workflow.to_session_dict(), workflow_to_compare)
+ self.assertDictEqual(new_workflow.to_json(), workflow_to_compare)
##########################
# Node lists
diff --git a/back-end/pyworkflow/pyworkflow/workflow.py b/back-end/pyworkflow/pyworkflow/workflow.py
index 91100e9..350850f 100644
--- a/back-end/pyworkflow/pyworkflow/workflow.py
+++ b/back-end/pyworkflow/pyworkflow/workflow.py
@@ -40,6 +40,21 @@ def __init__(self, name="Untitled", root_dir=DEFAULT_ROOT_PATH,
except OSError as e:
raise WorkflowException('init workflow', str(e))
+ ##################
+ # GETTERS/SETTERS
+ ##################
+ @property
+ def name(self):
+ return self._name
+
+ @name.setter
+ def name(self, name: str):
+ self._name = name
+
+ @property
+ def root_dir(self):
+ return self._root_dir
+
@property
def node_dir(self):
return self._node_dir
@@ -48,19 +63,139 @@ def node_dir(self):
def graph(self):
return self._graph
+ @property
+ def flow_vars(self):
+ return self._flow_vars
+
+ @property
+ def filename(self):
+ return self.name + '.json'
+
+ @staticmethod
+ def generate_file_name(workflow, node_id):
+ """Generates a file name for saving intermediate execution data.
+
+ Current format is 'workflow_name - node_id'
+
+ Args:
+ workflow: the workflow
+ node_id: the id of the workflow
+ """
+ return f"{workflow.name}-{node_id}"
+
def path(self, file_name):
return os.path.join(self.root_dir, file_name)
def node_path(self, node_type, file_name):
return os.path.join(self.node_dir, node_type, file_name)
- @property
- def root_dir(self):
- return self._root_dir
+ ##################
+ # EDGE OPERATIONS
+ ##################
+ def add_edge(self, node_from: Node, node_to: Node):
+ """ Add a Node object to the graph.
- @property
- def flow_vars(self):
- return self._flow_vars
+ Args:
+ node_from - The Node the edge originates from
+ node_to - The Node the edge ends at
+
+ Returns:
+ Tuple representing the new Edge (from, to)
+ """
+ # Prevent duplicate edges between the same two nodes
+ # TODO: This may be incorrect usage for a `node_to` that has multi-in
+ from_id = node_from.node_id
+ to_id = node_to.node_id
+
+ if self.graph.has_edge(from_id, to_id):
+ raise WorkflowException('add_node', 'Edge between nodes already exists.')
+
+ self.graph.add_edge(from_id, to_id)
+
+ return (from_id, to_id)
+
+ def remove_edge(self, node_from: Node, node_to: Node):
+ """ Remove a node from the graph.
+
+ Returns:
+ Tuple representing the removed Edge (from, to)
+
+ Raises:
+ WorkflowException: on issue with removing node from graph
+ """
+ from_id = node_from.node_id
+ to_id = node_to.node_id
+
+ try:
+ self.graph.remove_edge(from_id, to_id)
+ except nx.NetworkXError:
+ raise WorkflowException('remove_edge', 'Edge from %s to %s does not exist in graph.' % (from_id, to_id))
+
+ return (from_id, to_id)
+
+ ##################
+ # NODE OPERATIONS
+ ##################
+ def get_all_flow_var_options(self, node_id):
+ """Retrieve all FlowNode options for a specified Node.
+
+ A Node can use all global FlowNodes, and any connected local FlowNodes
+ for variable substitution.
+
+ Args:
+ node_id: The Node to GET
+
+ Returns:
+ list of all FlowNode objects, converted to JSON
+ """
+ # Add global FlowNodes
+ graph_data = Workflow.to_graph_json(self.flow_vars)
+ flow_variables = graph_data['nodes']
+
+ # Append local FlowNodes
+ for predecessor_id in self.get_node_predecessors(node_id):
+ node = self.get_node(predecessor_id)
+
+ if node.node_type == 'flow_control':
+ flow_variables.append(node.to_json())
+
+ return flow_variables
+
+ def get_flow_var(self, node_id):
+ """Retrieves a global flow variable from workflow, if exists
+
+ Return:
+ FlowNode object, if one exists. Otherwise, None.
+ """
+ if self.flow_vars.has_node(node_id) is not True:
+ return None
+
+ node_info = self.flow_vars.nodes[node_id]
+ return node_factory(node_info)
+
+ def get_node(self, node_id):
+ """Retrieves Node from workflow, if exists
+
+ Return:
+ Node object, if one exists. Otherwise, None.
+ """
+ if self.graph.has_node(node_id) is not True:
+ return None
+
+ node_info = self.graph.nodes[node_id]
+ return node_factory(node_info)
+
+ def get_node_predecessors(self, node_id):
+ try:
+ return list(self.graph.predecessors(node_id))
+ except nx.NetworkXError as e:
+ raise WorkflowException('get node predecessors', str(e))
+
+ def get_node_successors(self, node_id):
+ try:
+ return list(self.graph.successors(node_id))
+ except nx.NetworkXError as e:
+ raise WorkflowException('get node successors', str(e))
def get_packaged_nodes(self, root_path=None, node_type=None):
"""Retrieve list of Nodes available to the Workflow.
@@ -128,54 +263,20 @@ def get_packaged_nodes(self, root_path=None, node_type=None):
# Otherwise, return list containing all Nodes of a `node_type`
return nodes
- def get_node(self, node_id):
- """Retrieves Node from workflow, if exists
-
- Return:
- Node object, if one exists. Otherwise, None.
- """
- if self._graph.has_node(node_id) is not True:
- return None
-
- node_info = self.graph.nodes[node_id]
- return node_factory(node_info)
-
- def get_flow_var(self, node_id):
- """Retrieves a global flow variable from workflow, if exists
-
- Return:
- FlowNode object, if one exists. Otherwise, None.
- """
- if self.flow_vars.has_node(node_id) is not True:
- return None
-
- node_info = self.flow_vars.nodes[node_id]
- return node_factory(node_info)
-
- def get_all_flow_var_options(self, node_id):
- """Retrieve all FlowNode options for a specified Node.
-
- A Node can use all global FlowNodes, and any connected local FlowNodes
- for variable substitution.
-
- Args:
- node_id: The Node to GET
+ def remove_node(self, node):
+ """ Remove a node from the graph.
- Returns:
- list of all FlowNode objects, converted to JSON
+ Raises:
+ WorkflowException: on issue with removing node from graph
"""
- # Add global FlowNodes
- graph_data = Workflow.to_graph_json(self.flow_vars)
- flow_variables = graph_data['nodes']
-
- # Append local FlowNodes
- for predecessor_id in self.get_node_predecessors(node_id):
- node = self.get_node(predecessor_id)
-
- if node.node_type == 'flow_control':
- flow_variables.append(node.to_json())
+ try:
+ # Select the correct graph to modify
+ graph = self.flow_vars if node.is_global else self.graph
- return flow_variables
+ graph.remove_node(node.node_id)
+ return node
+ except (AttributeError, nx.NetworkXError):
+ raise WorkflowException('remove_node', 'Node does not exist in graph.')
def update_or_add_node(self, node: Node):
""" Update or add a Node object to the graph.
@@ -200,86 +301,9 @@ def update_or_add_node(self, node: Node):
return node
- @property
- def name(self):
- return self._name
-
- @name.setter
- def name(self, name: str):
- self._name = name
-
- @property
- def filename(self):
- return self.name + '.json'
-
- def add_edge(self, node_from: Node, node_to: Node):
- """ Add a Node object to the graph.
-
- Args:
- node_from - The Node the edge originates from
- node_to - The Node the edge ends at
-
- Returns:
- Tuple representing the new Edge (from, to)
- """
- # Prevent duplicate edges between the same two nodes
- # TODO: This may be incorrect usage for a `node_to` that has multi-in
- from_id = node_from.node_id
- to_id = node_to.node_id
-
- if self.graph.has_edge(from_id, to_id):
- raise WorkflowException('add_node', 'Edge between nodes already exists.')
-
- self.graph.add_edge(from_id, to_id)
-
- return (from_id, to_id)
-
- def remove_edge(self, node_from: Node, node_to: Node):
- """ Remove a node from the graph.
-
- Returns:
- Tuple representing the removed Edge (from, to)
-
- Raises:
- WorkflowException: on issue with removing node from graph
- """
- from_id = node_from.node_id
- to_id = node_to.node_id
-
- try:
- self.graph.remove_edge(from_id, to_id)
- except nx.NetworkXError:
- raise WorkflowException('remove_edge', 'Edge from %s to %s does not exist in graph.' % (from_id, to_id))
-
- return (from_id, to_id)
-
- def remove_node(self, node):
- """ Remove a node from the graph.
-
- Raises:
- WorkflowException: on issue with removing node from graph
- """
- try:
- # Select the correct graph to modify
- graph = self.flow_vars if node.is_global else self.graph
-
- graph.remove_node(node.node_id)
- return node
- except (AttributeError, nx.NetworkXError):
- raise WorkflowException('remove_node', 'Node does not exist in graph.')
-
- def get_node_successors(self, node_id):
- try:
- return list(self.graph.successors(node_id))
- except nx.NetworkXError as e:
- raise WorkflowException('get node successors', str(e))
-
- def get_node_predecessors(self, node_id):
- try:
- return list(self.graph.predecessors(node_id))
- except nx.NetworkXError as e:
- raise WorkflowException('get node predecessors', str(e))
-
+ ####################
+ # EXECUTION METHODS
+ ####################
def execute(self, node_id):
"""Execute a single Node in the graph.
@@ -319,6 +343,14 @@ def execute(self, node_id):
return node_to_execute
+ def execution_order(self):
+ try:
+ return list(nx.topological_sort(self.graph))
+ except (nx.NetworkXError, nx.NetworkXUnfeasible) as e:
+ raise WorkflowException('execution order', str(e))
+ except RuntimeError as e:
+ raise WorkflowException('execution order', 'The graph was changed while generating the execution order')
+
def load_flow_nodes(self, option_replace):
"""Construct dict of FlowNodes indexed by option name.
@@ -393,26 +425,9 @@ def load_input_data(self, node_id):
return input_data
- def execution_order(self):
- try:
- return list(nx.topological_sort(self.graph))
- except (nx.NetworkXError, nx.NetworkXUnfeasible) as e:
- raise WorkflowException('execution order', str(e))
- except RuntimeError as e:
- raise WorkflowException('execution order', 'The graph was changed while generating the execution order')
-
- @staticmethod
- def upload_file(uploaded_file, to_open):
- try:
- # TODO: Change to a stream/other method for large files?
- with open(to_open, 'wb') as f:
- f.write(uploaded_file.read())
-
- uploaded_file.close()
- return to_open
- except OSError as e:
- raise WorkflowException('upload_file', str(e))
-
+ ##################
+ # FILE I/O
+ ##################
def download_file(self, node_id):
node = self.get_node(node_id)
if node is None:
@@ -434,30 +449,6 @@ def download_file(self, node_id):
except OSError as e:
raise WorkflowException('download_file', str(e))
- @staticmethod
- def store_node_data(workflow, node_id, data):
- """Store Node data
-
- Writes the current DataFrame to disk in JSON format.
-
- Args:
- workflow: The Workflow that stores the graph.
- node_id: The Node which contains a DataFrame to save.
- data: A pandas DataFrame converted to JSON.
-
- Returns:
-
- """
- file_name = Workflow.generate_file_name(workflow, node_id)
- file_path = workflow.path(file_name)
-
- try:
- with open(file_path, 'w') as f:
- f.write(data)
- return file_name
- except Exception as e:
- return None
-
def retrieve_node_data(self, node_to_retrieve):
"""Retrieve Node data
@@ -487,32 +478,44 @@ def retrieve_node_data(self, node_to_retrieve):
raise WorkflowException('retrieve node data', str(e))
@staticmethod
- def read_graph_json(json_data):
- """Deserialize JSON NetworkX graph
+ def store_node_data(workflow, node_id, data):
+ """Store Node data
+
+ Writes the current DataFrame to disk in JSON format.
Args:
- json_data: JSON data from which to read JSON-serialized graph
+ workflow: The Workflow that stores the graph.
+ node_id: The Node which contains a DataFrame to save.
+ data: A pandas DataFrame converted to JSON.
Returns:
- NetworkX DiGraph object
- Raises:
- NetworkXError: on issue with loading JSON graph data
"""
- return nx.readwrite.json_graph.node_link_graph(json_data)
+ file_name = Workflow.generate_file_name(workflow, node_id)
+ file_path = workflow.path(file_name)
- @staticmethod
- def generate_file_name(workflow, node_id):
- """Generates a file name for saving intermediate execution data.
+ try:
+ with open(file_path, 'w') as f:
+ f.write(data)
+ return file_name
+ except Exception as e:
+ return None
- Current format is 'workflow_name - node_id'
+ @staticmethod
+ def upload_file(uploaded_file, to_open):
+ try:
+ # TODO: Change to a stream/other method for large files?
+ with open(to_open, 'wb') as f:
+ f.write(uploaded_file.read())
- Args:
- workflow: the workflow
- node_id: the id of the workflow
- """
- return f"{workflow.name}-{node_id}"
+ uploaded_file.close()
+ return to_open
+ except OSError as e:
+ raise WorkflowException('upload_file', str(e))
+ ############################
+ # WORKFLOW (DE)SERIALIZATION
+ ############################
@classmethod
def from_json(cls, json_data):
"""Load Workflow from JSON data.
@@ -528,33 +531,63 @@ def from_json(cls, json_data):
malformed NetworkX graph data (NetworkXError)
"""
try:
- name = json_data['name']
- root_dir = json_data['root_dir']
- graph = Workflow.read_graph_json(json_data['graph'])
- flow_vars = Workflow.read_graph_json(json_data['flow_vars'])
-
- return cls(name=name, root_dir=root_dir, graph=graph, flow_vars=flow_vars)
+ return cls(
+ name=json_data['name'],
+ root_dir=json_data['root_dir'],
+ node_dir=json_data['node_dir'],
+ graph=Workflow.read_graph_json(json_data['graph']),
+ flow_vars=Workflow.read_graph_json(json_data['flow_vars']),
+ )
except KeyError as e:
raise WorkflowException('from_json', str(e))
except nx.NetworkXError as e:
raise WorkflowException('from_json', str(e))
+ def to_json(self):
+ """Save Workflow information in JSON format."""
+ try:
+ return {
+ 'name': self.name,
+ 'root_dir': self.root_dir,
+ 'node_dir': self.node_dir,
+ 'graph': Workflow.to_graph_json(self.graph),
+ 'flow_vars': Workflow.to_graph_json(self.flow_vars),
+ }
+ except nx.NetworkXError as e:
+ raise WorkflowException('to_json', str(e))
+
+ ####################
+ # NetworkX GRAPH I/O
+ ####################
+ @staticmethod
+ def read_graph_json(json_data):
+ """Deserialize JSON NetworkX graph
+
+ Args:
+ json_data: JSON data from which to read JSON-serialized graph
+
+ Returns:
+ NetworkX DiGraph object
+
+ Raises:
+ NetworkXError: on issue with loading JSON graph data
+ """
+ return nx.readwrite.json_graph.node_link_graph(json_data)
+
@staticmethod
def to_graph_json(graph):
- return nx.readwrite.json_graph.node_link_data(graph)
+ """Serialize JSON NetworkX graph
+
+ Args:
+ graph: NetworkX graph to save to JSON
- def to_session_dict(self):
- """Store Workflow information in the Django session.
+ Returns:
+ JSON representation of NetworkX graph
+
+ Raises:
+ NetworkXError: on issue with saving graph to JSON
"""
- try:
- out = dict()
- out['name'] = self.name
- out['root_dir'] = self.root_dir
- out['graph'] = Workflow.to_graph_json(self.graph)
- out['flow_vars'] = Workflow.to_graph_json(self.flow_vars)
- return out
- except nx.NetworkXError as e:
- raise WorkflowException('to_session_dict', str(e))
+ return nx.readwrite.json_graph.node_link_data(graph)
class WorkflowUtils:
diff --git a/back-end/vp/node/views.py b/back-end/vp/node/views.py
index be5302a..03f60d0 100644
--- a/back-end/vp/node/views.py
+++ b/back-end/vp/node/views.py
@@ -2,7 +2,7 @@
from django.http import JsonResponse
from django.views.decorators.csrf import csrf_exempt
-from pyworkflow import Workflow, WorkflowException, Node, NodeException, node_factory, ParameterValidationError
+from pyworkflow import WorkflowException, NodeException, ParameterValidationError, node_factory
from rest_framework.decorators import api_view
from drf_yasg.utils import swagger_auto_schema
@@ -86,9 +86,9 @@ def node(request):
})
@api_view(['POST', 'DELETE'])
def handle_edge(request, node_from_id, node_to_id):
- """ Add new edge to the graph
+ """Add new edge to the graph
- Creates a new edge from node_from_id to node_to_id.
+ Creates a new edge from node_from_id to node_to_id.
"""
# Check if the graph contains the requested Node
node_from = request.pyworkflow.get_node(node_from_id)
@@ -126,7 +126,7 @@ def handle_edge(request, node_from_id, node_to_id):
400: 'No file specified',
404: 'Node/graph not found'
})
-@swagger_auto_schema(method='post',
+@swagger_auto_schema(method='patch',
operation_summary='Update a node from the graph',
operation_description='Updates a node from the graph.',
responses={
@@ -144,10 +144,10 @@ def handle_edge(request, node_from_id, node_to_id):
405: 'Method not allowed',
500: 'Error processing Node change'
})
-@api_view(['GET', 'POST', 'DELETE'])
+@api_view(['GET', 'PATCH', 'DELETE'])
@csrf_exempt
def handle_node(request, node_id):
- """ Retrieve, update, or delete a Node from the graph
+ """Retrieve, update, or delete a Node from the graph
Returns:
200 - Node was found; data in JSON format
@@ -173,7 +173,7 @@ def handle_node(request, node_id):
"retrieved_node": retrieved_node.to_json(),
"flow_variables": request.pyworkflow.get_all_flow_var_options(node_id),
}, safe=False)
- elif request.method == 'POST':
+ elif request.method == 'PATCH':
updated_node = create_node(request)
# Nodes need to be the same type to update
diff --git a/back-end/vp/vp/views.py b/back-end/vp/vp/views.py
index ebe950d..2d6428d 100644
--- a/back-end/vp/vp/views.py
+++ b/back-end/vp/vp/views.py
@@ -1,12 +1,9 @@
from django.http import JsonResponse
from rest_framework.decorators import api_view
from drf_yasg.utils import swagger_auto_schema
-from pyworkflow import Node
-from modulefinder import ModuleFinder
-
-@swagger_auto_schema(method='get', responses={200:'JSON response with data'})
+@swagger_auto_schema(method='get', responses={200: 'JSON response with data'})
@api_view(['GET'])
def info(request):
"""Retrieve app info.
diff --git a/back-end/vp/workflow/middleware.py b/back-end/vp/workflow/middleware.py
index f6b0893..24f8fd7 100644
--- a/back-end/vp/workflow/middleware.py
+++ b/back-end/vp/workflow/middleware.py
@@ -43,6 +43,6 @@ def __call__(self, request):
# Request should have 'pyworkflow' attribute, but do not crash if not
if hasattr(request, 'pyworkflow'):
# Save Workflow back to session
- request.session.update(request.pyworkflow.to_session_dict())
+ request.session.update(request.pyworkflow.to_json())
return response
diff --git a/back-end/vp/workflow/views.py b/back-end/vp/workflow/views.py
index 98a2b04..42e4785 100644
--- a/back-end/vp/workflow/views.py
+++ b/back-end/vp/workflow/views.py
@@ -1,6 +1,5 @@
import os
import json
-import sys
from django.http import JsonResponse, HttpResponse
from django.conf import settings
@@ -8,8 +7,6 @@
from pyworkflow import Workflow, WorkflowException
from drf_yasg.utils import swagger_auto_schema
-from modulefinder import ModuleFinder
-
@swagger_auto_schema(method='post',
operation_summary='Create a new workflow.',
@@ -30,8 +27,11 @@ def new_workflow(request):
workflow_id = json.loads(request.body)
# Create new Workflow
- request.pyworkflow = Workflow(name=workflow_id['id'], root_dir=settings.MEDIA_ROOT)
- request.session.update(request.pyworkflow.to_session_dict())
+ request.pyworkflow = Workflow(
+ name=workflow_id['id'],
+ root_dir=settings.MEDIA_ROOT
+ )
+ request.session.update(request.pyworkflow.to_json())
return JsonResponse(Workflow.to_graph_json(request.pyworkflow.graph))
except (json.JSONDecodeError, KeyError) as e:
@@ -40,7 +40,9 @@ def new_workflow(request):
@swagger_auto_schema(method='post',
operation_summary='Open workflow from file.',
- operation_description='Loads a JSON file from disk and translates into Workflow object and JSON object of front-end',
+ operation_description='Loads a JSON file from disk and '
+ 'translates into Workflow object and '
+ 'JSON object of front-end',
responses={
200: 'Workflow representation in JSON',
400: 'No file specified',
@@ -56,7 +58,7 @@ def open_workflow(request):
Args:
request: Django request Object, should follow the pattern:
{
- react: {react-diagrams JSON},
+ ui-graph: {JSON representation of visual graph},
pyworkflow: {
name: Workflow name,
root_dir: File storage,
@@ -67,7 +69,7 @@ def open_workflow(request):
Raises:
JSONDecodeError: invalid JSON data
- KeyError: request missing either 'react' or 'pyworkflow' data
+ KeyError: request missing either 'ui-graph' or 'pyworkflow' data
WorkflowException: error loading JSON into NetworkX DiGraph
Returns:
@@ -83,14 +85,14 @@ def open_workflow(request):
combined_json = json.load(uploaded_file)
request.pyworkflow = Workflow.from_json(combined_json['pyworkflow'])
- request.session.update(request.pyworkflow.to_session_dict())
+ request.session.update(request.pyworkflow.to_json())
# Send back front-end workflow
- return JsonResponse(combined_json['react'])
+ return JsonResponse(combined_json['ui-graph'])
except KeyError as e:
return JsonResponse({'open_workflow': 'Missing data for ' + str(e)}, status=500)
except json.JSONDecodeError as e:
- return JsonResponse({'No React JSON provided': str(e)}, status=500)
+ return JsonResponse({'No JSON provided for UI graph': str(e)}, status=500)
except WorkflowException as e:
return JsonResponse({e.action: e.reason}, status=404)
@@ -134,15 +136,16 @@ def save_workflow(request):
Returns:
Downloads JSON file representing graph.
"""
- # Load session data into Workflow object. If successful, return
- # serialized graph
+ # Load session data into Workflow object.
+ # If successful, return serialized graph
try:
combined_json = json.dumps({
'filename': request.pyworkflow.filename,
- 'react': json.loads(request.body),
+ 'ui-graph': json.loads(request.body),
'pyworkflow': {
'name': request.pyworkflow.name,
'root_dir': request.pyworkflow.root_dir,
+ 'node_dir': request.pyworkflow.node_dir,
'graph': Workflow.to_graph_json(request.pyworkflow.graph),
'flow_vars': Workflow.to_graph_json(request.pyworkflow.flow_vars),
}
diff --git a/docs/media/pyworkflow_coverage.svg b/docs/media/pyworkflow_coverage.svg
index 8e21255..6963b3e 100644
--- a/docs/media/pyworkflow_coverage.svg
+++ b/docs/media/pyworkflow_coverage.svg
@@ -15,7 +15,7 @@
coverage
coverage
- 84%
- 84%
+ 87%
+ 87%
diff --git a/docs/media/ui_coverage.svg b/docs/media/ui_coverage.svg
index 3f77961..bab3ea3 100644
--- a/docs/media/ui_coverage.svg
+++ b/docs/media/ui_coverage.svg
@@ -15,7 +15,7 @@
coverage
coverage
- 64%
- 64%
+ 81%
+ 81%
diff --git a/front-end/src/API.js b/front-end/src/API.js
index a184885..9d09d72 100644
--- a/front-end/src/API.js
+++ b/front-end/src/API.js
@@ -78,7 +78,7 @@ export async function updateNode(node, config, flowConfig) {
node.options.option_replace = flowConfig;
const payload = {...node.options, options: node.config};
const options = {
- method: "POST",
+ method: "PATCH",
body: JSON.stringify(payload)
};
const endpoint = node.options.is_global ? "node/global" : "node";
@@ -211,7 +211,6 @@ export async function uploadDataFile(formData) {
* @returns {Promise}
*/
export async function downloadDataFile(node) {
- // TODO: make this not a giant security problem
let contentType;
const payload = {...node.options, options: node.config};
diff --git a/front-end/src/components/VPLink/VPLinkModel.js b/front-end/src/components/VPLink/VPLinkModel.js
index 832c6fd..e5517df 100644
--- a/front-end/src/components/VPLink/VPLinkModel.js
+++ b/front-end/src/components/VPLink/VPLinkModel.js
@@ -27,9 +27,6 @@ export default class VPLinkModel extends DefaultLinkModel {
return this.getLastPoint().getX() === 0 && this.getLastPoint().getY() === 0;
}
- /**
- * TODO: Notify backend the link has been removed
- */
remove() {
super.remove();
API.deleteEdge(this)