From 682e459b613f3b26d7526cc9244e30f92ebb4c44 Mon Sep 17 00:00:00 2001 From: mirceatlx Date: Sat, 18 Mar 2023 23:31:21 +0100 Subject: [PATCH 1/6] setup nutrient test + upload data to cloud --- .../modules/index/runnables/nutrient.py | 2 +- .../pipeline/test/unit/test_indices.py | 31 ++++++++++++++----- 2 files changed, 25 insertions(+), 8 deletions(-) diff --git a/src/backend/pipeline/modules/index/runnables/nutrient.py b/src/backend/pipeline/modules/index/runnables/nutrient.py index 078d01a..f058d84 100644 --- a/src/backend/pipeline/modules/index/runnables/nutrient.py +++ b/src/backend/pipeline/modules/index/runnables/nutrient.py @@ -24,7 +24,7 @@ def run(self, data: Data) -> bool: data.modules[Modules.INDEX.value]["runnables"][self.type.value]["masks"] = nutrient_masks masks = [np.where(mask == 1, 255, 0) for mask in nutrient_masks] patches = data.modules[Modules.MOSAIC.value]["patches"] - hsize, _ = data.modules[Modules.MOSAIC.value]["patches_dims"] + hsize = 512 result = self.calculate(masks, patches, hsize) data.modules[Modules.INDEX.value]["runnables"][self.type.value]["index"] = result return True diff --git a/src/backend/pipeline/test/unit/test_indices.py b/src/backend/pipeline/test/unit/test_indices.py index 17ee8ea..c86155c 100644 --- a/src/backend/pipeline/test/unit/test_indices.py +++ b/src/backend/pipeline/test/unit/test_indices.py @@ -7,9 +7,14 @@ from ...modules.modules import Modules from ...config import Config, CloudConfig from ...modules.index.indicies import Indicies +from ...modules.index.runnables.nutrient import Nutrient +from ...auth import get_credentials +from google.cloud import storage +import os import glob import numpy as np import pytest +import asyncio class TestNutrientRunnable: """ @@ -17,20 +22,32 @@ class TestNutrientRunnable: """ @pytest.mark.asyncio - @pytest.mark.skip(reason="Need the .npy file on Cloud Storage") async def test_nutrient(self): """ Test the method run. """ paths = {3:"../../ml/deepv3_seg_3/", 4:"../../ml/deepv3_seg_4/"} + # initialize config for the pipeline with the necessary modules cfg = Config(modules={Mosaicing: None, AgricultureVisionPreprocess: None, - SemanticSegmentation: paths, Index: None}, + SemanticSegmentation: paths, Index: {"config": None, "runnables": [Nutrient]}}, cloud=CloudConfig()) pipeline = Pipeline(cfg) - imgs = [Mat.read(file) for file in glob.glob("../data/mosaicing/farm/D*.JPG")] - result = pipeline.run(imgs) - assert result.modules[Modules.INDEX]["runnables"][Indicies.NUTRIENT]["index"] is not None - assert result.modules[Modules.INDEX]["runnables"][Indicies.NUTRIENT]["masks"] is not None + # get image paths + imgs = [Mat.read(file) for file in sorted(glob.glob("../data/mosaicing/farm/D*.JPG"))] + # run the pipeline + result = await pipeline.run(imgs) + assert result.modules[Modules.INDEX.value]["runnables"][Indicies.NUTRIENT.value]["index"] is not None + assert result.modules[Modules.INDEX.value]["runnables"][Indicies.NUTRIENT.value]["masks"] is not None + + # if the necessary data is not already stored locally, download it from the cloud + if not os.path.exists("nutrient_masks.npy"): + # connect to Cloud Storage + storage_client = storage.Client(credentials=get_credentials()) + bucket = storage_client.bucket("terrafarm-test") + blob = bucket.blob("nutrient_masks.npy") + blob.download_to_filename("nutrient_masks.npy") + + # load data and assert expected = np.load("nutrient_masks.npy", allow_pickle=True) - out = result.modules[Modules.INDEX]["runnables"][Indicies.NUTRIENT]["masks"] + out = result.modules[Modules.INDEX.value]["runnables"][Indicies.NUTRIENT.value]["masks"] assert np.array_equal(out, expected) From 149e1bd3957e3d08136ba5275eebddfa7048b2e5 Mon Sep 17 00:00:00 2001 From: mirceatlx Date: Sat, 18 Mar 2023 23:50:05 +0100 Subject: [PATCH 2/6] debug --- src/backend/pipeline/test/unit/test_indices.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/backend/pipeline/test/unit/test_indices.py b/src/backend/pipeline/test/unit/test_indices.py index c86155c..b176347 100644 --- a/src/backend/pipeline/test/unit/test_indices.py +++ b/src/backend/pipeline/test/unit/test_indices.py @@ -36,6 +36,7 @@ async def test_nutrient(self): imgs = [Mat.read(file) for file in sorted(glob.glob("../data/mosaicing/farm/D*.JPG"))] # run the pipeline result = await pipeline.run(imgs) + print(result.modules[Modules.INDEX.value]) assert result.modules[Modules.INDEX.value]["runnables"][Indicies.NUTRIENT.value]["index"] is not None assert result.modules[Modules.INDEX.value]["runnables"][Indicies.NUTRIENT.value]["masks"] is not None @@ -49,5 +50,6 @@ async def test_nutrient(self): # load data and assert expected = np.load("nutrient_masks.npy", allow_pickle=True) + print(result.modules[Modules.INDEX.value]["runnables"]) out = result.modules[Modules.INDEX.value]["runnables"][Indicies.NUTRIENT.value]["masks"] assert np.array_equal(out, expected) From c20f5d7fea7505192e6eebd79f67c676597ca394 Mon Sep 17 00:00:00 2001 From: paulmis Date: Sun, 19 Mar 2023 03:40:31 +0100 Subject: [PATCH 3/6] fix to_persist --- src/backend/pipeline/modules/parallel_module.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/backend/pipeline/modules/parallel_module.py b/src/backend/pipeline/modules/parallel_module.py index 83fb0af..bd66d5d 100644 --- a/src/backend/pipeline/modules/parallel_module.py +++ b/src/backend/pipeline/modules/parallel_module.py @@ -85,6 +85,10 @@ def prepare(self, data: Data) -> None: super().prepare(data) data.modules[self.type.value]["runnables"] = {} + # Override super initialization + data.persistable[self.type.value] = {} + data.persistable[self.type.value]["runnables"] = {} + # Initialize runnables' data for runnable in self.runnables: runnable.prepare(data) From d5a8fadd8c92e2586798fdeda818670a798f308e Mon Sep 17 00:00:00 2001 From: paulmis Date: Sun, 19 Mar 2023 16:50:52 +0100 Subject: [PATCH 4/6] fix super runs, print index stack trace --- src/backend/README.md | 2 +- src/backend/pipeline/modules/index/index.py | 3 --- .../pipeline/modules/index/runnables/ndvi.py | 7 ++--- .../modules/index/runnables/nutrient.py | 11 +++++--- src/backend/pipeline/modules/module.py | 20 ++++---------- src/backend/pipeline/modules/mosaicing.py | 8 +----- .../pipeline/modules/parallel_module.py | 13 ++++----- src/backend/pipeline/modules/preprocess.py | 9 +++---- src/backend/pipeline/modules/segmentation.py | 6 ++--- src/backend/pipeline/pipeline.py | 27 ++++++++++++++----- .../pipeline/test/unit/test_indices.py | 11 ++++---- .../pipeline/test/unit/test_preprocessing.py | 6 +---- 12 files changed, 56 insertions(+), 67 deletions(-) diff --git a/src/backend/README.md b/src/backend/README.md index 1084590..2b8f9b9 100644 --- a/src/backend/README.md +++ b/src/backend/README.md @@ -118,7 +118,7 @@ To add functionality, implement one of these abstract classes. The runnable is the simplest form of a pipeline element. While it is the building block of modules, the user can implement Runnables that are run by modules (such as `NDVI`). #### Module -The module is a logical part of the image processing pipeline, chained sequentially with other pipelines. A module will perform its functionality when being `run()` and save the relevant data in the pipeline data object that will be passed to the following module. Note that your implementation should invoke `super().run(data)` after your module logic. +The module is a logical part of the image processing pipeline, chained sequentially with other pipelines. A module will perform its functionality when being `run()` and save the relevant data in the pipeline data object that will be passed to the following module by the pipeline. #### Parallel module The parallel module is a module that can run multiple threads of execution at the same time, essentially allowing parallel module invocations. Parallel modules implement logical groups of functionalities, such as the calculation of all indicies (e.g. `NDVI` and `Mositure`) that do not rely on each other. diff --git a/src/backend/pipeline/modules/index/index.py b/src/backend/pipeline/modules/index/index.py index c2a87bd..1af9f46 100644 --- a/src/backend/pipeline/modules/index/index.py +++ b/src/backend/pipeline/modules/index/index.py @@ -1,8 +1,5 @@ -# type: ignore from __future__ import annotations from ..parallel_module import ParallelModule -from .runnables.ndvi import NDVI -from .runnables.nutrient import Nutrient from ..runnable import Runnable from ..data import Data, Modules from typing import Any diff --git a/src/backend/pipeline/modules/index/runnables/ndvi.py b/src/backend/pipeline/modules/index/runnables/ndvi.py index 57eb6ef..f4f2d3c 100644 --- a/src/backend/pipeline/modules/index/runnables/ndvi.py +++ b/src/backend/pipeline/modules/index/runnables/ndvi.py @@ -3,8 +3,9 @@ from ..indicies import Indicies from ....mat import Channels import numpy as np -import cv2 +import traceback import matplotlib.pyplot as plt +import sys class NDVI(Runnable): """ @@ -44,8 +45,8 @@ def run(self, data: Data) -> bool: # Catch exception except Exception as exception: - print("NDVI calculation failed!") - print(exception) + print("Running NDVI failed: " + str(exception)) + print(traceback.format_exc()) return False def calculate(self, nir, red) -> np.ndarray: diff --git a/src/backend/pipeline/modules/index/runnables/nutrient.py b/src/backend/pipeline/modules/index/runnables/nutrient.py index f058d84..26ba006 100644 --- a/src/backend/pipeline/modules/index/runnables/nutrient.py +++ b/src/backend/pipeline/modules/index/runnables/nutrient.py @@ -1,10 +1,11 @@ from ...runnable import Runnable from ...data import Data, Modules from ..indicies import Indicies -from ....mat import Channels +import traceback import numpy as np import matplotlib.pyplot as plt import cv2 +import sys class Nutrient(Runnable): """ @@ -12,7 +13,7 @@ class Nutrient(Runnable): """ def __init__(self, data: Data) -> None: - super().__init__(data, name="NUTRIENT") + super().__init__(data, name="Nutrient") self.type = Indicies.NUTRIENT def run(self, data: Data) -> bool: @@ -28,9 +29,11 @@ def run(self, data: Data) -> bool: result = self.calculate(masks, patches, hsize) data.modules[Modules.INDEX.value]["runnables"][self.type.value]["index"] = result return True + + # Catch exception except Exception as exception: - print("Nutrient calculation failed!") - print(exception) + print("Running Nutrient failed: " + str(exception)) + print(traceback.format_exc()) return False def calculate(self, masks, patches, hsize: int) -> np.ndarray: diff --git a/src/backend/pipeline/modules/module.py b/src/backend/pipeline/modules/module.py index da17df8..5703a78 100644 --- a/src/backend/pipeline/modules/module.py +++ b/src/backend/pipeline/modules/module.py @@ -8,8 +8,8 @@ from typing import Any import pickle import pydash -from google.cloud import storage -from firebase_admin import firestore +import json +from abc import abstractmethod class Module(Runnable): """ @@ -35,25 +35,15 @@ def __init__(self, data: Data, channels: list[Channels] = [], input_data: Any = self.next: Module | None = None self.type: Modules = module_type - def run(self, data: Data) -> Any: + @abstractmethod + def run(self, data: Data) -> None: """ Processes the image. Args: - data: the job data - persist: whether to save the images to the field database + data: the pipeline data object """ - # If there is a next module, then run it - if self.next is not None: - print(f"Preparing <{self.name}>") - data.current = self.next.type - self.next.prepare(data) - print(f"Running <{self.name}>") - - # Otherwise, return the data - return data - def display(self, img: Mat) -> None: """ Downscales and displays an image to fit your monitor. diff --git a/src/backend/pipeline/modules/mosaicing.py b/src/backend/pipeline/modules/mosaicing.py index 39d58ca..e50aa85 100644 --- a/src/backend/pipeline/modules/mosaicing.py +++ b/src/backend/pipeline/modules/mosaicing.py @@ -16,7 +16,7 @@ class Mosaicing(Module): def __init__(self, data: Data, input_data: Any = None) -> None: super().__init__(data, name="Mosaicing", module_type=Modules.MOSAIC) - def run(self, data: Data) -> Data: + def run(self, data: Data) -> None: """ Stitches the images to create an orthomosaic image of the farm. @@ -26,9 +26,6 @@ def run(self, data: Data) -> Data: Raises: Exception: when the sticher fails to stich the images - - Returns: - The stiched image. """ self.prepare(data) @@ -62,9 +59,6 @@ def run(self, data: Data) -> Data: data.modules[self.type.value]["alpha_img"] = alpha_stitched patches = self.create_patches(stitched, data.input[0].channels) data.modules[self.type.value]["patches"] = patches - - # Run the next module - return super().run(data) def to_persist(self, data: Data): data.persistable[self.type.value] = frozenset([self.type.value + "." + "stitched"]) diff --git a/src/backend/pipeline/modules/parallel_module.py b/src/backend/pipeline/modules/parallel_module.py index bd66d5d..6372102 100644 --- a/src/backend/pipeline/modules/parallel_module.py +++ b/src/backend/pipeline/modules/parallel_module.py @@ -9,6 +9,8 @@ from .modules import Modules from concurrent.futures import ThreadPoolExecutor, wait from typing import Any, Type + + class ParallelModule(Module): """ Represents an arbitrary image processing pipeline module that can @@ -33,29 +35,24 @@ def __init__(self, data: Data, runnables: list[Type[Runnable]], input_data: Any self.runnables: list[Runnable] = [runnable(data) for runnable in runnables] self.channels: list[Channels] = list(set(sum([runnable.channels for runnable in self.runnables], []))) - def run(self, data: Data) -> Data: + def run(self, data: Data) -> None: """ Processes the image using the runnables. Args: - img: the input image(s) - data: the job data + data: the pipeline data object """ - # Spawn the executor # TODO: don't hardcode max_workers with ThreadPoolExecutor(max_workers=4) as executor: # Run the runnables - print("Parallel module running " +\ + print("Running " +\ ', '.join(["<" + runnable.name + ">" for runnable in self.runnables])) futures = {executor.submit(runnable.run, data): runnable for runnable in self.runnables} # Wait for completion # TODO: add validation wait(futures) - - # Run the module functionality - return super().run(data) def verify(self, channels: list[Channels]) -> bool: """ diff --git a/src/backend/pipeline/modules/preprocess.py b/src/backend/pipeline/modules/preprocess.py index 65e1e9d..c7a056d 100644 --- a/src/backend/pipeline/modules/preprocess.py +++ b/src/backend/pipeline/modules/preprocess.py @@ -10,8 +10,8 @@ class Preprocess(Module): """Perform data preprocessing on raw images.""" - def __init__(self, data: Data, input_data: Any): - super().__init__(data, name="Preprocesisng", module_type=Modules.PREPROCESS) + def __init__(self, data: Data, input_data: Any, name="Preprocess"): + super().__init__(data, name=name, module_type=Modules.PREPROCESS) self.masks: list[Mat] = input_data def run(self, data: Data): @@ -32,8 +32,6 @@ def run(self, data: Data): for (x, mask) in zip(data.input, self.masks)] data.modules[self.type.value]["masked"] = masked - return super().run(data) - class AgricultureVisionPreprocess(Preprocess): """ Perform data preprocessing on Agriculture-Vision: A Large Aerial Image Database for @@ -44,7 +42,7 @@ class AgricultureVisionPreprocess(Preprocess): """ def __init__(self, data: Data, input_data: Any): - super().__init__(data, input_data=input_data) + super().__init__(data, input_data=input_data, name="AgricultureVisionPreprocess") def run(self, data: Data): """ @@ -74,4 +72,3 @@ def run(self, data: Data): data.modules[self.type.value]["clipping"] =\ [Mat(np.clip(x.get(), v_lower, v_upper).astype(np.uint8), data.input[0].channels) for (x, (v_lower, v_upper)) in zip(data.modules[Modules.MOSAIC.value]["patches"], bounds)] - return super().run(data) diff --git a/src/backend/pipeline/modules/segmentation.py b/src/backend/pipeline/modules/segmentation.py index 2369777..ace4203 100644 --- a/src/backend/pipeline/modules/segmentation.py +++ b/src/backend/pipeline/modules/segmentation.py @@ -9,10 +9,11 @@ class SemanticSegmentation(Module): """Perform semantic segmentation using the DeepLabV3+ model.""" def __init__(self, data: Data, input_data: Any) -> None: - super().__init__(data, name="Semantic Segmentation DeepLabv3+", module_type=Modules.SEGMENTATION) + super().__init__(data, name="Semantic Segmentation DeepLabv3+", + module_type=Modules.SEGMENTATION) self.paths = input_data # paths to model atrifacts - def run(self, data: Data) -> Data: + def run(self, data: Data) -> None: """ Perform inference using the images given. Each image should adhere to specific dimensions in order to be @@ -30,5 +31,4 @@ def run(self, data: Data) -> Data: predictions.append(model.predict(np.expand_dims((image.get()), axis=0))) data.modules[self.type.value]["masks"] = predictions - return super().run(data) diff --git a/src/backend/pipeline/pipeline.py b/src/backend/pipeline/pipeline.py index 709ab8c..073c247 100644 --- a/src/backend/pipeline/pipeline.py +++ b/src/backend/pipeline/pipeline.py @@ -11,6 +11,7 @@ from firebase_admin import firestore from google.cloud import storage import time +import traceback # temporary input bucket for manual triggers temp_input_bucket = "terrafarm-inputs" @@ -48,6 +49,12 @@ def __init__(self, config: Config): tail.next = self.build_module(module, input_data) tail = tail.next + # Show the build pipeline + print('------------------') + print('Modules in the pipeline:') + print(' -> '.join(module.__name__ for module in config.modules.keys())) + print('------------------') + def build_module(self, module: Type[Module], input_data: Any) -> Module: """ Builds a module and returns it. @@ -114,23 +121,31 @@ async def run(self, imgs: Mat | list[Mat]) -> Data: raise RuntimeError("Pipeline input integrity violated") # Run the chain - iterator: Module | None = self.head + module: Module | None = self.head failed: bool = False - while iterator is not None: - # Run the module + while module is not None: try: - data = iterator.run(data) + # Prepare the module + print(f"Preparing <{module.name}>") + data.current = module.type + module.prepare(data) + + # Run the module + print(f"Running <{module.name}>") + module.run(data) except Exception as exception: + print("Running " + module.name + " failed: " + str(exception)) + print(traceback.format_exc()) failed = True # Upload data to the cloud asynchronously if self.config.cloud.use_cloud: asyncio.create_task( asyncio.to_thread( - iterator.upload(data, self.collection, self.bucket, self.base_url))) + module.upload(data, self.collection, self.bucket, self.base_url))) # Go to the next module - iterator = iterator.next + module = module.next # Log end time of the pipeline if self.config.cloud.use_cloud: diff --git a/src/backend/pipeline/test/unit/test_indices.py b/src/backend/pipeline/test/unit/test_indices.py index b176347..e3945a2 100644 --- a/src/backend/pipeline/test/unit/test_indices.py +++ b/src/backend/pipeline/test/unit/test_indices.py @@ -17,9 +17,7 @@ import asyncio class TestNutrientRunnable: - """ - Unit testing for the nutrient deficiency module. - """ + """Unit testing for the nutrient deficiency module.""" @pytest.mark.asyncio async def test_nutrient(self): @@ -28,15 +26,16 @@ async def test_nutrient(self): """ paths = {3:"../../ml/deepv3_seg_3/", 4:"../../ml/deepv3_seg_4/"} # initialize config for the pipeline with the necessary modules - cfg = Config(modules={Mosaicing: None, AgricultureVisionPreprocess: None, - SemanticSegmentation: paths, Index: {"config": None, "runnables": [Nutrient]}}, + cfg = Config(modules={Mosaicing: None, + AgricultureVisionPreprocess: None, + SemanticSegmentation: paths, + Index: {"config": None, "runnables": [Nutrient]}}, cloud=CloudConfig()) pipeline = Pipeline(cfg) # get image paths imgs = [Mat.read(file) for file in sorted(glob.glob("../data/mosaicing/farm/D*.JPG"))] # run the pipeline result = await pipeline.run(imgs) - print(result.modules[Modules.INDEX.value]) assert result.modules[Modules.INDEX.value]["runnables"][Indicies.NUTRIENT.value]["index"] is not None assert result.modules[Modules.INDEX.value]["runnables"][Indicies.NUTRIENT.value]["masks"] is not None diff --git a/src/backend/pipeline/test/unit/test_preprocessing.py b/src/backend/pipeline/test/unit/test_preprocessing.py index 8b77e3c..f70198f 100644 --- a/src/backend/pipeline/test/unit/test_preprocessing.py +++ b/src/backend/pipeline/test/unit/test_preprocessing.py @@ -11,11 +11,7 @@ from ...auth import get_credentials class TestPreprocessingModule: - """ - Unit testing for the preprocessing module. - """ - - + """Unit testing for the preprocessing module.""" def test_preprocessing(self): """ From bf659b29ec428259c3f8ee2e4251c4bec79408ee Mon Sep 17 00:00:00 2001 From: mirceatlx Date: Mon, 20 Mar 2023 09:22:01 +0100 Subject: [PATCH 5/6] change version --- src/backend/requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/backend/requirements.txt b/src/backend/requirements.txt index 4903de8..8c96148 100644 --- a/src/backend/requirements.txt +++ b/src/backend/requirements.txt @@ -4,7 +4,7 @@ numpy==1.24.1 opencv-python==4.7.0.68 matplotlib==3.6.2 screeninfo==0.8.1 -tensorflow==2.10.0 +tensorflow==2.11.0 matplotlib==3.6.2 mypy==1.0.1 pylint==2.16.2 From 83c4e1cfe8cc24ef3cca40c48aef73b57988f00d Mon Sep 17 00:00:00 2001 From: mirceatlx Date: Mon, 20 Mar 2023 11:34:37 +0100 Subject: [PATCH 6/6] argmax --- src/backend/main.py | 2 +- src/backend/pipeline/modules/index/runnables/nutrient.py | 4 +++- src/backend/pipeline/templates.py | 2 +- src/backend/pipeline/test/unit/test_indices.py | 6 +++--- 4 files changed, 8 insertions(+), 6 deletions(-) diff --git a/src/backend/main.py b/src/backend/main.py index c9cd1d5..25a64c4 100755 --- a/src/backend/main.py +++ b/src/backend/main.py @@ -60,7 +60,7 @@ def main(args: Any): imgs = imgs[:4] # Run the pipeline - pipeline = default_pipeline(cloud=cloud_config) + pipeline = nutrient_pipeline() pipeline.show() # Authenticate to firebase diff --git a/src/backend/pipeline/modules/index/runnables/nutrient.py b/src/backend/pipeline/modules/index/runnables/nutrient.py index 26ba006..3c31e06 100644 --- a/src/backend/pipeline/modules/index/runnables/nutrient.py +++ b/src/backend/pipeline/modules/index/runnables/nutrient.py @@ -1,6 +1,7 @@ from ...runnable import Runnable from ...data import Data, Modules from ..indicies import Indicies +import tensorflow as tf import traceback import numpy as np import matplotlib.pyplot as plt @@ -20,8 +21,9 @@ def run(self, data: Data) -> bool: try: # take the calculated masks from the segmentation module masks = data.modules[Modules.SEGMENTATION.value]["masks"] + arg_masks = [tf.one_hot(tf.math.argmax(mask, axis=3), depth=9) for mask in masks] # assume index 0 is for the nutrient deficiency mask - nutrient_masks = [mask[0][:, :, 0] for mask in masks] + nutrient_masks = [mask[0][:, :, 0] for mask in arg_masks] data.modules[Modules.INDEX.value]["runnables"][self.type.value]["masks"] = nutrient_masks masks = [np.where(mask == 1, 255, 0) for mask in nutrient_masks] patches = data.modules[Modules.MOSAIC.value]["patches"] diff --git a/src/backend/pipeline/templates.py b/src/backend/pipeline/templates.py index 7e082ba..1fae884 100644 --- a/src/backend/pipeline/templates.py +++ b/src/backend/pipeline/templates.py @@ -47,5 +47,5 @@ def nutrient_pipeline() -> Pipeline: AgricultureVisionPreprocess: None, SemanticSegmentation: paths, Index: {"config": None, "runnables": [Nutrient]}}, - cloud=CloudConfig(True, "terrafarm-example")) + cloud=CloudConfig(False, "terrafarm-example")) return Pipeline(cfg) diff --git a/src/backend/pipeline/test/unit/test_indices.py b/src/backend/pipeline/test/unit/test_indices.py index e3945a2..41fcfd4 100644 --- a/src/backend/pipeline/test/unit/test_indices.py +++ b/src/backend/pipeline/test/unit/test_indices.py @@ -15,6 +15,7 @@ import numpy as np import pytest import asyncio +import tensorflow as tf class TestNutrientRunnable: """Unit testing for the nutrient deficiency module.""" @@ -48,7 +49,6 @@ async def test_nutrient(self): blob.download_to_filename("nutrient_masks.npy") # load data and assert - expected = np.load("nutrient_masks.npy", allow_pickle=True) - print(result.modules[Modules.INDEX.value]["runnables"]) - out = result.modules[Modules.INDEX.value]["runnables"][Indicies.NUTRIENT.value]["masks"] + expected = np.load("nutrient_masks.npy", allow_pickle=True) + out = result.modules[Modules.INDEX.value]["runnables"][Indicies.NUTRIENT.value]["masks"] assert np.array_equal(out, expected)