Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Initial unit testing for scaffolding file Part 0 #174

Open
wants to merge 10 commits into
base: main
Choose a base branch
from
36 changes: 36 additions & 0 deletions .github/workflows/test_with_docker.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
# This is a basic workflow to help you get started with Actions

name: test-with-docker

# Controls when the action will run. Triggers the workflow on push or pull request
# events but only for the master branch
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
schedule:
# * is a special character in YAML so you have to quote this string
- cron: '5 4 * * 0'

# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
# This workflow contains a single job called "build"
build:
# The type of runner that the job will run on
runs-on: ubuntu-latest

# Steps represent a sequence of tasks that will be executed as part of the job
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- name: Checkout
uses: actions/checkout@v2

- name: Make sure that the workflow works
run: echo Smoke test

- name: Run the tests using docker-compose
working-directory: .github/workflows
run: |
docker compose -f ../../docker-compose.tests.yml build
docker compose -f ../../docker-compose.tests.yml up --exit-code-from notebook-server
51 changes: 51 additions & 0 deletions docker-compose.tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
version: "3"
services:
dashboard:
image: em-pub-dash-dev/frontend
build:
context: frontend
dockerfile: docker/Dockerfile.dev
depends_on:
- db
ports:
# DASH in numbers
- "3274:6060"
volumes:
- ./frontend:/public
- ./plots:/public/plots
networks:
- emission
notebook-server:
image: em-pub-dash-dev/viz-scripts
build:
context: viz_scripts
dockerfile: docker/Dockerfile.test
args:
SERVER_IMAGE_TAG: ${SERVER_IMAGE_TAG}
depends_on:
- db
environment:
- DB_HOST=db
- WEB_SERVER_HOST=0.0.0.0
- CRON_MODE=
- STUDY_CONFIG=stage-program
ports:
# ipynb in numbers
- "47962:47962"
networks:
- emission
volumes:
- ./viz_scripts:/usr/src/app/saved-notebooks
- ./plots:/plots
db:
image: mongo:4.4.0
volumes:
- mongo-data:/data/db
networks:
- emission

networks:
emission:

volumes:
mongo-data:
19 changes: 19 additions & 0 deletions viz_scripts/docker/Dockerfile.test
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
# python 3
ARG SERVER_IMAGE_TAG
FROM shankari/e-mission-server:master_${SERVER_IMAGE_TAG}

VOLUME /plots

ADD docker/environment36.dashboard.additions.yml /

WORKDIR /usr/src/app

RUN /bin/bash -c "source setup/activate.sh && conda env update --name emission --file setup/environment36.notebook.additions.yml"
RUN /bin/bash -c "source setup/activate.sh && conda env update --name emission --file /environment36.dashboard.additions.yml"

ADD docker/start_tests.sh /usr/src/app/.docker/start_tests.sh
RUN chmod u+x /usr/src/app/.docker/start_tests.sh

EXPOSE 8888

CMD ["/bin/bash", "/usr/src/app/.docker/start_tests.sh"]
2 changes: 2 additions & 0 deletions viz_scripts/docker/environment36.dashboard.additions.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ channels:
- defaults
dependencies:
- seaborn=0.11.1
- pytest
- coverage
- pip:
- nbparameterise==0.6
- devcron==0.4
13 changes: 13 additions & 0 deletions viz_scripts/docker/start_tests.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
#!/bin/bash
set -e # Exit on error

# change python environment
pwd
source setup/activate.sh || exit 1
conda env list
cd saved-notebooks/tests || exit 1

echo "Starting unit tests..."
PYTHONPATH=../.. coverage run -m pytest . -v

coverage report
12 changes: 11 additions & 1 deletion viz_scripts/plots.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,17 @@

sns.set_style("whitegrid")
sns.set()
get_ipython().run_line_magic('matplotlib', 'inline')

try:
# Import the function
from IPython import get_ipython
# Check if running in an IPython environment (like Jupyter Notebook)
if get_ipython() is not None:
get_ipython().run_line_magic('matplotlib', 'inline')
except ImportError:
# Handle the case where IPython is not installed
# We are running in regular Python (likely pytest), not Jupyter/IPython
pass

# Module for pretty-printing outputs (e.g. head) to help users
# understand what is going on
Expand Down
58 changes: 58 additions & 0 deletions viz_scripts/tests/test_plots.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
import pytest as pytest
import pandas as pd
import numpy as np
# Using import_module, as we have saved-notebooks as the directory
import importlib
plots = importlib.import_module('saved-notebooks.plots')

# Test Data Fixtures
@pytest.fixture
def sample_labels():
return ['Car', 'Bus', 'Train', 'Walk']

@pytest.fixture
def sample_values():
return [100, 50, 3, 1]

@pytest.fixture
def sample_labels_no_small():
return ['Car', 'Bus']


@pytest.fixture
def sample_values_no_small():
return [100, 100]

class TestCalculatePct:
def test_calculate_pct_basic(self, sample_labels, sample_values):
labels, values, pcts = plots.calculate_pct(sample_labels, sample_values)
assert len(labels) == len(sample_labels)
assert len(values) == len(sample_values)
assert sum(pcts) == pytest.approx(100.0, abs=0.1)

def test_calculate_pct_empty(self):
labels, values, pcts = plots.calculate_pct([],[])
assert len(labels) == 0
assert len(values) == 0
assert len(pcts) == 0

def test_calculate_pct_single(self):
labels, values, pcts = plots.calculate_pct(['Car'], [100])
assert pcts == [100.0]

class TestMergeSmallEntries:
def test_merge_small_entries_basic(self, sample_labels, sample_values):
labels, values, pcts = plots.merge_small_entries(sample_labels, sample_values)
assert all(pct > 2.0 for pct in pcts)

def test_merge_small_entries_no_small(self, sample_labels_no_small, sample_values_no_small):
result_labels, result_values, result_pcts = plots.merge_small_entries(sample_labels_no_small, sample_values_no_small)
assert len(result_labels) == 2
assert 'other' not in result_labels
assert 'OTHER' not in result_labels

def test_merge_small_entries_some_small(self, sample_labels, sample_values):
result_labels, result_values, result_pcts = plots.merge_small_entries(sample_labels, sample_values)
print(result_labels)
assert len(result_labels) == 3
assert result_labels[0] in ['Car', 'Bus','other', 'OTHER']
74 changes: 74 additions & 0 deletions viz_scripts/tests/test_scaffolding.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
import unittest.mock as mock
import emission.core.wrapper.localdate as ecwl
import emission.storage.timeseries.tcquery as esttc
import importlib
import pandas as pd
import numpy as np
import collections as colls
import pytest

# Dynamically import saved-notebooks.plots
scaffolding = importlib.import_module('saved-notebooks.scaffolding')

def test_get_time_query():
# Test with both year and month
result = scaffolding.get_time_query(2022, 6)
assert result is not None
assert isinstance(result, esttc.TimeComponentQuery)

# Test with year and no month
result = scaffolding.get_time_query(2023, None)
assert result is not None
assert isinstance(result, esttc.TimeComponentQuery)

# Test with month and no year
with pytest.raises(Exception) as e_info:
result = scaffolding.get_time_query(None, 12)

# Test with no year or month
result = scaffolding.get_time_query(None, None)
assert result is None

def test_mapping_labels():
dynamic_labels = {
"MODE": [
{"value":"gas_car", "base_mode": "CAR",
"baseMode":"CAR", "met_equivalent":"IN_VEHICLE", "kgCo2PerKm": 0.22031},
{"value":"motorcycle", "base_mode": "MOPED", "footprint": { "gasoline": { "wh_per_km": 473.17 }},
"baseMode":"MOPED", "met_equivalent":"IN_VEHICLE", "kgCo2PerKm": 0.113143309},
{"value":"walk", "base_mode": "WALKING",
"baseMode":"WALKING", "met_equivalent":"WALKING", "kgCo2PerKm": 0},
{"value":"e_car", "base_mode": "E_CAR",
"baseMode":"E_CAR", "met_equivalent":"IN_VEHICLE", "kgCo2PerKm": 0.08216},
{"value":"taxi", "base_mode": "TAXI",
"baseMode":"TAXI", "met_equivalent":"IN_VEHICLE", "kgCo2PerKm": 0.30741},
{"value":"bike", "base_mode": "BICYCLING",
"baseMode":"BICYCLING", "met_equivalent":"BICYCLING", "kgCo2PerKm": 0},
{"value":"air", "base_mode": "AIR",
"baseMode":"AIR", "met_equivalent":"IN_VEHICLE", "kgCo2PerKm": 0.09975}
],
"translations": {
"en": {
"walk": "Walk",
"motorcycle":"Motorcycle",
"bike": "Bicycle",
"gas_car": "Car",
"e_car": "Electric Car",
"taxi": "Taxi",
"air": "Airplane"
}
}
}

result_mode = scaffolding.mapping_labels(dynamic_labels, "MODE")

expected_result_mode = colls.defaultdict(lambda: 'Other', {
"gas_car": "Car",
"motorcycle": "Motorcycle",
"walk": "Walk",
"e_car": "Electric Car",
"taxi": "Taxi",
"bike": "Bicycle",
"air": "Airplane"
})
assert result_mode == expected_result_mode