Skip to content

Commit

Permalink
add tests
Browse files Browse the repository at this point in the history
  • Loading branch information
carderne committed Feb 17, 2024
1 parent 57729b2 commit ff338ef
Show file tree
Hide file tree
Showing 47 changed files with 404 additions and 210 deletions.
18 changes: 17 additions & 1 deletion .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,25 @@ on:
- 'v*'

jobs:
publish:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4

- name: Setup python
uses: actions/setup-python@v5
with:
python-version: '3.12'

- name: Install
run: pip install '.[dev]'

- name: Test
run: pytest

publish:
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
Expand Down
6 changes: 2 additions & 4 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
# Custom
server/
test_output/
quickrun.py
filter.ipynb
examples/*.tif
examples/*.gpkg

# Byte-compiled / optimized / DLL files
__pycache__/
Expand Down
2 changes: 1 addition & 1 deletion LICENSE
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
MIT License

Copyright (c) 2019 Chris Arderne
Copyright (c) 2018 Chris Arderne

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
Expand Down
4 changes: 4 additions & 0 deletions MANIFEST.in
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
prune tests
prune docs
prune examples
prune gridfinder-animated.gif
8 changes: 8 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
.PHONY: lint
lint:
ruff format .
ruff check .

.PHONY: test
test:
pytest
38 changes: 24 additions & 14 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,8 @@ The algorithm looks as follows in process, guessing the grid network for Uganda:

## Input requirements
gridfinder requires the following data sources:
- VIIRS data, monthly and annual composites available [here](https://ngdc.noaa.gov/eog/viirs/download_dnb_composites.html).
- OSM highway data, most easily available using the [HOT Export Tool](https://export.hotosm.org/en/v3/), otherwise [BBBike](https://extract.bbbike.org/) or [geofabrik](https://download.geofabrik.de/), depending on your needs.
- VIIRS data, monthly and annual composites available [here](https://eogdata.mines.edu/products/vnl/).
- OSM highway data, most easily available using the [HOT Export Tool](https://export.hotosm.org/en/v3/), otherwise [geofabrik](https://download.geofabrik.de/)

## Model usage

Expand All @@ -18,19 +18,29 @@ This repository includes the input data needed to do a test run for Burundi, so

## Installation
### Install with pip

pip install gridfinder
```bash
pip install gridfinder
```

**Note:** On some operating systems (Ubuntu 18.04), you may get an error about `libspatialindex`. To overcome this on Ubuntu, run:
```bash
sudo apt install libspatialindex-dev
```

sudo apt install libspatialindex-dev

### Install from GitHub
## Development
Download or clone the repository and install the required packages (preferably in a virtual environment):

git clone https://github.com/carderne/gridfinder.git
cd gridfinder
pip install -e '.[dev]'

You can run ```./test.sh``` in the directory, which will do an entire run through using the test data and confirm whether everything is set up properly.
(It will fail if jupyter isn't installed!)
```bash
git clone https://github.com/carderne/gridfinder.git
cd gridfinder
pip install -e '.[dev]'
```

### Linting
```bash
make lint
```

### Testing
```bash
make test
```
158 changes: 42 additions & 116 deletions example.ipynb → examples/example.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -18,17 +18,16 @@
"import os\n",
"from pathlib import Path\n",
"\n",
"import matplotlib.pyplot as plt\n",
"from matplotlib import cm\n",
"from mpl_toolkits.mplot3d import Axes3D\n",
"import folium\n",
"import geopandas as gpd\n",
"import matplotlib.animation as animation\n",
"import seaborn as sns\n",
"from IPython.display import display, Markdown\n",
"\n",
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"import rasterio\n",
"import geopandas as gpd\n",
"import folium\n",
"import seaborn as sns\n",
"from IPython.display import Markdown, display\n",
"from matplotlib import cm\n",
"from mpl_toolkits.mplot3d import Axes3D\n",
"\n",
"import gridfinder as gf\n",
"from gridfinder import save_raster"
Expand All @@ -47,89 +46,22 @@
"metadata": {},
"outputs": [],
"source": [
"folder_inputs = Path('test_data')\n",
"folder_ntl_in = folder_inputs / 'ntl'\n",
"aoi_in = folder_inputs / 'gadm.gpkg'\n",
"roads_in = folder_inputs / 'roads.gpkg'\n",
"pop_in = folder_inputs / 'pop.tif'\n",
"grid_truth = folder_inputs / 'grid.gpkg'\n",
"\n",
"folder_out = Path('test_output')\n",
"folder_ntl_out = folder_out / 'ntl_clipped'\n",
"raster_merged_out = folder_out / 'ntl_merged.tif'\n",
"targets_out = folder_out / 'targets.tif'\n",
"targets_clean_out = folder_out / 'targets_clean.tif'\n",
"roads_out = folder_out / 'roads.tif'\n",
"\n",
"dist_out = folder_out / 'dist.tif'\n",
"guess_out = folder_out / 'guess.tif'\n",
"guess_skeletonized_out = folder_out / 'guess_skel.tif'\n",
"guess_nulled = folder_out / 'guess_nulled.tif'\n",
"guess_vec_out = folder_out / 'guess.gpkg'"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"percentile = 70 # percentile value to use when merging monthly NTL rasters\n",
"ntl_threshold = 0.1 # threshold when converting filtered NTL to binary (probably shouldn't change)\n",
"upsample_by = 2 # factor by which to upsample before processing roads (both dimensions are scaled by this)\n",
"cutoff = 0.0 # cutoff to apply to output dist raster, values below this are considered grid"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Clip and merge monthly rasters"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"gf.clip_rasters(folder_ntl_in, folder_ntl_out, aoi_in)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"raster_merged, affine = gf.merge_rasters(folder_ntl_out, percentile=percentile)\n",
"save_raster(raster_merged_out, raster_merged, affine)\n",
"print('Merged')\n",
"plt.imshow(raster_merged, vmin=0, vmax=1)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"## Create filter"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"ntl_filter = gf.create_filter()\n",
"data_in = Path(\"../tests/data\")\n",
"aoi_in = data_in / \"aoi.geojson\"\n",
"roads_in = data_in / \"roads.geojson\"\n",
"pop_in = data_in / \"pop.tif\"\n",
"ntl_in = data_in / \"ntl.tif\"\n",
"grid_truth_in = data_in / \"grid.geojson\"\n",
"\n",
"X = np.fromfunction(lambda i, j: i, ntl_filter.shape)\n",
"Y = np.fromfunction(lambda i, j: j, ntl_filter.shape)\n",
"data_out = Path(\".\")\n",
"targets_out = data_out / \"targets.tif\"\n",
"targets_clean_out = data_out / \"targets_clean.tif\"\n",
"costs_out = data_out / \"costs.tif\"\n",
"\n",
"fig = plt.figure()\n",
"sns.set()\n",
"ax = fig.add_subplot(projection='3d')\n",
"ax.plot_surface(X, Y, ntl_filter, cmap=cm.coolwarm, linewidth=0, antialiased=False)"
"dist_out = data_out / \"dist.tif\"\n",
"guess_out = data_out / \"guess.tif\"\n",
"guess_thin_out = data_out / \"guess_thin.tif\"\n",
"guess_vec_out = data_out / \"guess.gpkg\""
]
},
{
Expand All @@ -145,14 +77,10 @@
"metadata": {},
"outputs": [],
"source": [
"ntl_thresh, affine = gf.prepare_ntl(raster_merged_out,\n",
" aoi_in,\n",
" ntl_filter=ntl_filter,\n",
" threshold=ntl_threshold,\n",
" upsample_by=upsample_by)\n",
"ntl_thresh, affine = gf.prepare_ntl(ntl_in, aoi_in)\n",
"save_raster(targets_out, ntl_thresh, affine)\n",
"print('Targets prepared')\n",
"plt.imshow(ntl_thresh, cmap='viridis')"
"print(\"Targets prepared\")\n",
"plt.imshow(ntl_thresh, cmap=\"viridis\")"
]
},
{
Expand All @@ -170,8 +98,8 @@
"source": [
"targets_clean = gf.drop_zero_pop(targets_out, pop_in, aoi_in)\n",
"save_raster(targets_clean_out, targets_clean, affine)\n",
"print('Removed zero pop')\n",
"plt.imshow(ntl_thresh, cmap='viridis')"
"print(\"Removed zero pop\")\n",
"plt.imshow(ntl_thresh, cmap=\"viridis\")"
]
},
{
Expand All @@ -187,12 +115,10 @@
"metadata": {},
"outputs": [],
"source": [
"roads_raster, affine = gf.prepare_roads(roads_in,\n",
" aoi_in,\n",
" targets_out)\n",
"save_raster(roads_out, roads_raster, affine, nodata=-1)\n",
"print('Costs prepared')\n",
"plt.imshow(roads_raster, cmap='viridis', vmin=0, vmax=1)"
"roads_raster, affine = gf.prepare_roads(roads_in, aoi_in, targets_out)\n",
"save_raster(costs_out, roads_raster, affine, nodata=-1)\n",
"print(\"Costs prepared\")\n",
"plt.imshow(roads_raster, cmap=\"viridis\", vmin=0, vmax=1)"
]
},
{
Expand All @@ -208,9 +134,9 @@
"metadata": {},
"outputs": [],
"source": [
"targets, costs, start, affine = gf.get_targets_costs(targets_clean_out, roads_out)\n",
"targets, costs, start, affine = gf.get_targets_costs(targets_clean_out, costs_out)\n",
"est_mem = gf.estimate_mem_use(targets, costs)\n",
"print(f'Estimated memory usage: {est_mem:.2f} GB')"
"print(f\"Estimated memory usage: {est_mem:.2f} GB\")"
]
},
{
Expand All @@ -237,10 +163,10 @@
"metadata": {},
"outputs": [],
"source": [
"guess, affine = gf.threshold(dist_out, cutoff=cutoff)\n",
"guess, affine = gf.threshold(dist_out, cutoff=0.0)\n",
"save_raster(guess_out, guess, affine)\n",
"print('Got guess')\n",
"plt.imshow(guess, cmap='viridis')"
"print(\"Got guess\")\n",
"plt.imshow(guess, cmap=\"viridis\")"
]
},
{
Expand All @@ -257,8 +183,8 @@
"outputs": [],
"source": [
"true_pos, false_neg = gf.accuracy(grid_truth, guess_out, aoi_in)\n",
"print(f'Points identified as grid that are grid: {100*true_pos:.0f}%')\n",
"print(f'Actual grid that was missed: {100*false_neg:.0f}%')"
"print(f\"Points identified as grid that are grid: {100*true_pos:.0f}%\")\n",
"print(f\"Actual grid that was missed: {100*false_neg:.0f}%\")"
]
},
{
Expand All @@ -275,8 +201,8 @@
"outputs": [],
"source": [
"guess_skel, affine = gf.thin(guess_out)\n",
"save_raster(guess_skeletonized_out, guess_skel, affine)\n",
"print('Skeletonized')\n",
"save_raster(guess_thin_out, guess_skel, affine)\n",
"print(\"Skeletonized\")\n",
"plt.imshow(guess_skel)"
]
},
Expand All @@ -293,9 +219,9 @@
"metadata": {},
"outputs": [],
"source": [
"guess_gdf = gf.raster_to_lines(guess_skeletonized_out)\n",
"guess_gdf.to_file(guess_vec_out, driver='GPKG')\n",
"print('Converted to geom')"
"guess_gdf = gf.raster_to_lines(guess_thin_out)\n",
"guess_gdf.to_file(guess_vec_out, driver=\"GPKG\")\n",
"print(\"Converted to geom\")"
]
},
{
Expand Down
Loading

0 comments on commit ff338ef

Please sign in to comment.