diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8607de368..07bab4b9b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,7 @@ -files: "libpysal\/" +files: "libpysal\/|docs\/" repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.5.0" + rev: "v0.6.9" hooks: - id: ruff - id: ruff-format diff --git a/docs/conf.py b/docs/conf.py index 24c4648f0..973e917ed 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,5 +1,3 @@ -# -*- coding: utf-8 -*- -# # libpysal documentation build configuration file, created by # sphinx-quickstart on Wed Jun 6 15:54:22 2018. # @@ -17,8 +15,8 @@ # documentation root, use os.path.abspath to make it absolute, like shown here. # import sphinx_bootstrap_theme -import libpysal +import libpysal # -- General configuration ------------------------------------------------ diff --git a/docs/user-guide/data/examples.ipynb b/docs/user-guide/data/examples.ipynb index 051fef910..fe32a7931 100644 --- a/docs/user-guide/data/examples.ipynb +++ b/docs/user-guide/data/examples.ipynb @@ -35,7 +35,7 @@ "metadata": {}, "outputs": [], "source": [ - "from libpysal.examples import get_path \n" + "from libpysal.examples import get_path" ] }, { @@ -72,6 +72,7 @@ "outputs": [], "source": [ "import libpysal\n", + "\n", "dbf = libpysal.io.open(get_path(\"mexicojoin.dbf\"))" ] }, @@ -357,7 +358,7 @@ } ], "source": [ - "libpysal.examples.load_example('dataset42')" + "libpysal.examples.load_example(\"dataset42\")" ] }, { @@ -387,7 +388,7 @@ } ], "source": [ - "balt_url = libpysal.examples.get_url('Baltimore')\n", + "balt_url = libpysal.examples.get_url(\"Baltimore\")\n", "balt_url" ] }, @@ -421,7 +422,7 @@ } ], "source": [ - "libpysal.examples.explain('taz')" + "libpysal.examples.explain(\"taz\")" ] }, { @@ -430,7 +431,7 @@ "metadata": {}, "outputs": [], "source": [ - "taz = libpysal.examples.load_example('taz')" + "taz = libpysal.examples.load_example(\"taz\")" ] }, { @@ -485,7 +486,7 @@ } ], "source": [ - "libpysal.examples.explain('Baltimore')" + "libpysal.examples.explain(\"Baltimore\")" ] }, { @@ -502,7 +503,7 @@ } ], "source": [ - "balt = libpysal.examples.load_example('Baltimore')" + "balt = libpysal.examples.load_example(\"Baltimore\")" ] }, { @@ -673,7 +674,8 @@ ], "source": [ "from libpysal.examples import explain\n", - "explain('Tampa1')" + "\n", + "explain(\"Tampa1\")" ] }, { @@ -691,7 +693,8 @@ ], "source": [ "from libpysal.examples import load_example\n", - "tampa1 = load_example('Tampa1')" + "\n", + "tampa1 = load_example(\"Tampa1\")" ] }, { @@ -820,7 +823,7 @@ "metadata": {}, "outputs": [], "source": [ - "tampa_counties_shp = tampa1.load('tampa_counties.shp')" + "tampa_counties_shp = tampa1.load(\"tampa_counties.shp\")" ] }, { @@ -858,7 +861,7 @@ "metadata": {}, "outputs": [], "source": [ - "tampa_df = geopandas.read_file(tampa1.get_path('tampa_counties.shp'))" + "tampa_df = geopandas.read_file(tampa1.get_path(\"tampa_counties.shp\"))" ] }, { @@ -936,7 +939,7 @@ } ], "source": [ - "libpysal.examples.explain('Rio Grande do Sul')" + "libpysal.examples.explain(\"Rio Grande do Sul\")" ] }, { @@ -960,7 +963,7 @@ } ], "source": [ - "rio = libpysal.examples.load_example('Rio Grande do Sul')" + "rio = libpysal.examples.load_example(\"Rio Grande do Sul\")" ] }, { @@ -1051,7 +1054,7 @@ } ], "source": [ - "libpysal.examples.remote_datasets.datasets # a listing of all remotes" + "libpysal.examples.remote_datasets.datasets # a listing of all remotes" ] }, { diff --git a/docs/user-guide/graph/matching-graph.ipynb b/docs/user-guide/graph/matching-graph.ipynb index 0c0394bb2..b4f2e5faf 100644 --- a/docs/user-guide/graph/matching-graph.ipynb +++ b/docs/user-guide/graph/matching-graph.ipynb @@ -22,13 +22,13 @@ "metadata": {}, "outputs": [], "source": [ - "import sys\n", "import os\n", - "sys.path.append(os.path.abspath('..'))\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", + "import sys\n", + "\n", + "sys.path.append(os.path.abspath(\"..\"))\n", "import geopandas\n", - "import pandas" + "import matplotlib.pyplot as plt\n", + "import numpy as np" ] }, { @@ -46,7 +46,7 @@ "metadata": {}, "outputs": [], "source": [ - "points = np.row_stack([(10.2, 5.1), (4.7, 2.2), (5.3, 5.7), (2.7, 5.3), (7,4)])\n", + "points = np.row_stack([(10.2, 5.1), (4.7, 2.2), (5.3, 5.7), (2.7, 5.3), (7, 4)])\n", "gdf = geopandas.GeoDataFrame(geometry=geopandas.points_from_xy(*points.T))" ] }, @@ -108,8 +108,8 @@ } ], "source": [ - "f, ax =plt.subplots(1,3)\n", - "for i,g in enumerate((g1, g2, g3)):\n", + "f, ax = plt.subplots(1, 3)\n", + "for i, g in enumerate((g1, g2, g3)):\n", " g.plot(gdf, ax=ax[i])\n", " ax[i].set_title(f\"k = {i+1}\")" ] @@ -128,6 +128,7 @@ "outputs": [], "source": [ "import geodatasets\n", + "\n", "stores = geopandas.read_file(geodatasets.get_path(\"geoda liquor_stores\")).explode(\n", " index_parts=False\n", ")" @@ -261,8 +262,8 @@ } ], "source": [ - "f, ax =plt.subplots(1,3)\n", - "for i,g in enumerate((g1, g5, g10)):\n", + "f, ax = plt.subplots(1, 3)\n", + "for i, g in enumerate((g1, g5, g10)):\n", " g.plot(stores, ax=ax[i], nodes=False)\n", " ax[i].set_title(f\"k = {(1, 5, 10)[i]}\")" ] @@ -293,8 +294,8 @@ "source": [ "sources = stores.sample(100)\n", "sinks = stores[~stores.index.isin(sources.index)].sample(100)\n", - "ax = sources.plot(color='red')\n", - "sinks.plot(color='blue', ax=ax)\n", + "ax = sources.plot(color=\"red\")\n", + "sinks.plot(color=\"blue\", ax=ax)\n", "plt.show()" ] }, @@ -304,8 +305,9 @@ "metadata": {}, "outputs": [], "source": [ - "from libpysal.graph._matching import _spatial_matching\n", - "import shapely" + "import shapely\n", + "\n", + "from libpysal.graph._matching import _spatial_matching" ] }, { @@ -334,7 +336,9 @@ "metadata": {}, "outputs": [], "source": [ - "crosspattern_heads, crosspattern_tails, weights, mip = _spatial_matching(x=sink_coordinates, y = source_coordinates, n_matches=1, return_mip=True)" + "crosspattern_heads, crosspattern_tails, weights, mip = _spatial_matching(\n", + " x=sink_coordinates, y=source_coordinates, n_matches=1, return_mip=True\n", + ")" ] }, { @@ -364,12 +368,13 @@ "outputs": [], "source": [ "lines = shapely.linestrings(\n", - " list( \n", - " zip(\n", - " map(list, source_coordinates[crosspattern_heads]),\n", - " map(list, sink_coordinates[crosspattern_tails])\n", + " list(\n", + " zip(\n", + " map(list, source_coordinates[crosspattern_heads]),\n", + " map(list, sink_coordinates[crosspattern_tails]),\n", + " strict=False,\n", + " )\n", " )\n", - ")\n", ")" ] }, @@ -390,9 +395,9 @@ } ], "source": [ - "ax = sources.plot(color='red')\n", - "sinks.plot(color='blue', ax=ax)\n", - "geopandas.GeoSeries(lines).plot(linewidth=1, color='k', ax=ax)\n", + "ax = sources.plot(color=\"red\")\n", + "sinks.plot(color=\"blue\", ax=ax)\n", + "geopandas.GeoSeries(lines).plot(linewidth=1, color=\"k\", ax=ax)\n", "plt.show()" ] } diff --git a/docs/user-guide/graph/w_g_migration.ipynb b/docs/user-guide/graph/w_g_migration.ipynb index 6111ff91f..a8ef58763 100644 --- a/docs/user-guide/graph/w_g_migration.ipynb +++ b/docs/user-guide/graph/w_g_migration.ipynb @@ -47,8 +47,7 @@ "metadata": {}, "outputs": [], "source": [ - "from libpysal import weights\n", - "from libpysal import graph" + "from libpysal import graph, weights" ] }, { @@ -85,11 +84,10 @@ "source": [ "%matplotlib inline\n", "\n", - "import seaborn as sns\n", - "import pandas as pd\n", "import geopandas as gpd\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", + "import pandas as pd\n", + "import seaborn as sns\n", + "\n", "from libpysal import examples\n", "\n", "%load_ext watermark\n", @@ -2710,7 +2708,7 @@ "metadata": {}, "outputs": [], "source": [ - "w_queen.transform = 'r'\n", + "w_queen.transform = \"r\"\n", "wlag = lag_spatial(w_queen, y)" ] }, @@ -3111,7 +3109,7 @@ "metadata": {}, "outputs": [], "source": [ - "ngdf = gdf.set_index('NAME')" + "ngdf = gdf.set_index(\"NAME\")" ] }, { @@ -3162,7 +3160,7 @@ } ], "source": [ - "g['Ashe']" + "g[\"Ashe\"]" ] }, { @@ -3742,9 +3740,9 @@ } ], "source": [ - "m = ngdf.loc[g['Ashe'].index].explore(color=\"#25b497\")\n", - "ngdf.loc[['Ashe']].explore(m=m, color=\"#fa94a5\")\n", - "g.explore(ngdf, m=m, focal='Ashe')" + "m = ngdf.loc[g[\"Ashe\"].index].explore(color=\"#25b497\")\n", + "ngdf.loc[[\"Ashe\"]].explore(m=m, color=\"#fa94a5\")\n", + "g.explore(ngdf, m=m, focal=\"Ashe\")" ] }, { diff --git a/docs/user-guide/weights/Raster_awareness_API.ipynb b/docs/user-guide/weights/Raster_awareness_API.ipynb index 2e09dacae..af971e738 100644 --- a/docs/user-guide/weights/Raster_awareness_API.ipynb +++ b/docs/user-guide/weights/Raster_awareness_API.ipynb @@ -25,13 +25,13 @@ "source": [ "%matplotlib inline\n", "\n", - "from libpysal.weights import Rook, Queen, raster\n", "import matplotlib.pyplot as plt\n", - "from splot import libpysal as splot\n", "import numpy as np\n", "import xarray as xr\n", - "import pandas as pd\n", - "from esda import Moran_Local" + "from esda import Moran_Local\n", + "from splot import libpysal as splot\n", + "\n", + "from libpysal.weights import Queen, Rook, raster" ] }, { @@ -82,7 +82,8 @@ } ], "source": [ - "ds = xr.tutorial.open_dataset(\"air_temperature.nc\") # -> returns a xarray.Dataset object\n", + "# -> returns a xarray.Dataset object\n", + "ds = xr.tutorial.open_dataset(\"air_temperature.nc\")\n", "da = ds[\"air\"] # we'll use the \"air\" data variable for further analysis\n", "print(da)" ] @@ -116,8 +117,8 @@ } ], "source": [ - "da = da.groupby('time.month').mean()\n", - "print(da.coords) # as a result time dim is replaced by month " + "da = da.groupby(\"time.month\").mean()\n", + "print(da.coords) # as a result time dim is replaced by month" ] }, { @@ -149,8 +150,12 @@ } ], "source": [ - "# let's plot over month, each facet will represent the mean air temperature in a given month.\n", - "da.plot(col=\"month\", col_wrap=4,) " + "# let's plot over month, each facet will represent\n", + "# the mean air temperature in a given month.\n", + "da.plot(\n", + " col=\"month\",\n", + " col_wrap=4,\n", + ")" ] }, { @@ -191,9 +196,12 @@ ], "source": [ "coords_labels = {}\n", - "coords_labels[\"z_label\"] = \"month\" # since month does not belong to the default list we need to pass it using a dictionary\n", + "# since month does not belong to the default list we\n", + "# need to pass it using a dictionary\n", + "coords_labels[\"z_label\"] = \"month\"\n", "w_queen = Queen.from_xarray(\n", - " da, z_value=12, coords_labels=coords_labels, sparse=False) # We'll use data from 12th layer (in our case layer=month)" + " da, z_value=12, coords_labels=coords_labels, sparse=False\n", + ") # We'll use data from 12th layer (in our case layer=month)" ] }, { @@ -297,7 +305,9 @@ ], "source": [ "# Converting obtained data back to DataArray\n", - "moran_da = raster.w2da(lisa.p_sim, w_queen) # w2da accepts list/1d array/pd.Series and a weight object aligned to passed data\n", + "moran_da = raster.w2da(\n", + " lisa.p_sim, w_queen\n", + ") # w2da accepts list/1d array/pd.Series and a weight object aligned to passed data\n", "print(moran_da)" ] }, @@ -387,8 +397,10 @@ ], "source": [ "# Lets load a netCDF Surface dataset\n", - "ds = xr.open_dataset('ECMWF_ERA-40_subset.nc') # After loading netCDF dataset we obtained a xarray.Dataset object\n", - "print(ds) # This Dataset object containes several data variables" + "ds = xr.open_dataset(\n", + " \"ECMWF_ERA-40_subset.nc\"\n", + ") # After loading netCDF dataset we obtained a xarray.Dataset object\n", + "print(ds) # This Dataset object containes several data variables" ] }, { @@ -414,8 +426,10 @@ } ], "source": [ - "da = ds[\"p2t\"] # this will give us the required DataArray with p2t (2 metre temperature) data variable\n", - "da = da.groupby('time.day').mean()\n", + "# this will give us the required DataArray with p2t\n", + "# (2 metre temperature) data variable\n", + "da = ds[\"p2t\"]\n", + "da = da.groupby(\"time.day\").mean()\n", "print(da.dims)" ] }, @@ -447,7 +461,9 @@ "metadata": {}, "outputs": [], "source": [ - "data = da.to_series()[w_rook.index] # we derived the data from DataArray similar to our last example " + "data = da.to_series()[\n", + " w_rook.index\n", + "] # we derived the data from DataArray similar to our last example" ] }, { @@ -479,7 +495,9 @@ ], "source": [ "da1 = raster.wsp2da(data, w_rook, attrs=da.attrs, coords=da[12:13].coords)\n", - "xr.DataArray.equals(da[12:13], da1) # method to compare 2 DataArray, if true then w2da was successfull" + "xr.DataArray.equals(\n", + " da[12:13], da1\n", + ") # method to compare 2 DataArray, if true then w2da was successfull" ] }, { @@ -524,7 +542,7 @@ ], "source": [ "# Loading raster data with missing values\n", - "da = xr.open_rasterio('/data/Downloads/lux_ppp_2019.tif')\n", + "da = xr.open_rasterio(\"/data/Downloads/lux_ppp_2019.tif\")\n", "print(da)" ] }, @@ -557,7 +575,9 @@ } ], "source": [ - "da.where(da.values>da.attrs[\"nodatavals\"][0]).plot() # we can see that the DataArray contains missing values." + "da.where(\n", + " da.values > da.attrs[\"nodatavals\"][0]\n", + ").plot() # we can see that the DataArray contains missing values." ] }, { @@ -607,8 +627,8 @@ } ], "source": [ - "f,ax = plt.subplots(1,3,figsize=(4*4,4), subplot_kw=dict(aspect='equal'))\n", - "da_s.where(da_s.values>da_s.attrs[\"nodatavals\"][0]).plot(ax=ax[0])\n", + "f, ax = plt.subplots(1, 3, figsize=(4 * 4, 4), subplot_kw=dict(aspect=\"equal\"))\n", + "da_s.where(da_s.values > da_s.attrs[\"nodatavals\"][0]).plot(ax=ax[0])\n", "ax[0].set_title(\"Sliced raster\")\n", "splot.plot_spatial_weights(w_rook, data=da_s, ax=ax[1])\n", "ax[1].set_title(\"Rook contiguity\")\n", @@ -634,8 +654,8 @@ "metadata": {}, "outputs": [], "source": [ - "# Building a test DataArray \n", - "da_s = raster.testDataArray((1,5,10), rand=True)" + "# Building a test DataArray\n", + "da_s = raster.testDataArray((1, 5, 10), rand=True)" ] }, { @@ -765,7 +785,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.2" + "version": "3.12.7" } }, "nbformat": 4, diff --git a/docs/user-guide/weights/categorical_lag.ipynb b/docs/user-guide/weights/categorical_lag.ipynb index 37cb9af5e..6a5b87950 100644 --- a/docs/user-guide/weights/categorical_lag.ipynb +++ b/docs/user-guide/weights/categorical_lag.ipynb @@ -15,11 +15,13 @@ "metadata": {}, "outputs": [], "source": [ - ">>> from libpysal.graph._spatial_lag import _lag_spatial\n", - ">>> import numpy as np\n", - ">>> from libpysal.weights.util import lat2W\n", - ">>> from libpysal.graph import Graph\n", - ">>> graph = Graph.from_W(lat2W(3,3))\n" + "import numpy as np\n", + "\n", + "from libpysal.graph import Graph\n", + "from libpysal.graph._spatial_lag import _lag_spatial\n", + "from libpysal.weights.util import lat2W\n", + "\n", + "graph = Graph.from_W(lat2W(3, 3))" ] }, { @@ -40,9 +42,8 @@ } ], "source": [ - ">>> y = np.arange(9)\n", - ">>> _lag_spatial(graph, y)\n", - "\n" + "y = np.arange(9)\n", + "_lag_spatial(graph, y)" ] }, { @@ -63,26 +64,10 @@ } ], "source": [ - ">>> y = np.array([*'ababcbcbc'])\n", - ">>> _lag_spatial(graph, y, categorical=True)" + "y = np.array([*\"ababcbcbc\"])\n", + "_lag_spatial(graph, y, categorical=True)" ] }, - { - "cell_type": "code", - "execution_count": null, - "id": "84523f3b-3909-4e33-a7b9-11a70b263308", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "689efe78-b402-4bbc-b8f1-28c24faf9777", - "metadata": {}, - "outputs": [], - "source": [] - }, { "cell_type": "code", "execution_count": 28, @@ -101,8 +86,8 @@ } ], "source": [ - ">>> y[3] = 'a'\n", - ">>> y" + "y[3] = \"a\"\n", + "y" ] }, { @@ -123,8 +108,8 @@ } ], "source": [ - ">>> np.random.seed(12345)\n", - ">>> _lag_spatial(graph, y, categorical=True, ties='random')" + "np.random.seed(12345)\n", + "_lag_spatial(graph, y, categorical=True, ties=\"random\")" ] }, { @@ -145,7 +130,7 @@ } ], "source": [ - ">>> _lag_spatial(graph, y, categorical=True, ties='random')" + "_lag_spatial(graph, y, categorical=True, ties=\"random\")" ] }, { @@ -166,7 +151,7 @@ } ], "source": [ - ">>> _lag_spatial(graph, y, categorical=True, ties='tryself')" + "_lag_spatial(graph, y, categorical=True, ties=\"tryself\")" ] }, { @@ -192,22 +177,6 @@ "_lag_spatial(graph, y, categorical=True)" ] }, - { - "cell_type": "code", - "execution_count": null, - "id": "b77d9c3b-e923-428b-8b61-f383a1af42fe", - "metadata": {}, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "b120ef88-dd80-4b9d-8c7c-8b413d476e08", - "metadata": {}, - "outputs": [], - "source": [] - }, { "cell_type": "code", "execution_count": 1, @@ -226,14 +195,15 @@ } ], "source": [ - ">>> import libpysal\n", - ">>> import numpy as np\n", - ">>> np.random.seed(12345)\n", - ">>> w = libpysal.weights.lat2W(3, 3)\n", - ">>> y = np.array([*'ababcbcbc'])\n", - ">>> y_l = libpysal.weights.lag_categorical(w, y)\n", - ">>> np.array_equal(y_l, np.array(['b', 'a', 'b', 'c', 'b', 'c', 'b', 'c', 'b']))\n", - "\n" + "import numpy as np\n", + "\n", + "import libpysal\n", + "\n", + "np.random.seed(12345)\n", + "w = libpysal.weights.lat2W(3, 3)\n", + "y = np.array([*\"ababcbcbc\"])\n", + "y_l = libpysal.weights.lag_categorical(w, y)\n", + "np.array_equal(y_l, np.array([\"b\", \"a\", \"b\", \"c\", \"b\", \"c\", \"b\", \"c\", \"b\"]))" ] }, { @@ -331,6 +301,7 @@ ], "source": [ "import numpy\n", + "\n", "_lag_spatial(g, numpy.array(y), categorical=True)" ] }, @@ -360,7 +331,7 @@ } ], "source": [ - "_lag_spatial(g, numpy.array(y), categorical=True, ties='tryself')" + "_lag_spatial(g, numpy.array(y), categorical=True, ties=\"tryself\")" ] }, { @@ -381,7 +352,7 @@ } ], "source": [ - "np.array_equal(y_l, _lag_spatial(g, numpy.array(y), categorical=True, ties='random'))" + "np.array_equal(y_l, _lag_spatial(g, numpy.array(y), categorical=True, ties=\"random\"))" ] }, { @@ -402,7 +373,7 @@ } ], "source": [ - "np.array_equal(y_l, _lag_spatial(g, numpy.array(y), categorical=True, ties='raise'))" + "np.array_equal(y_l, _lag_spatial(g, numpy.array(y), categorical=True, ties=\"raise\"))" ] }, { @@ -431,7 +402,7 @@ } ], "source": [ - "y[3] = 'a'\n", + "y[3] = \"a\"\n", "y" ] }, @@ -463,7 +434,7 @@ } ], "source": [ - "_lag_spatial(g, numpy.array(y), categorical=True, ties='random')" + "_lag_spatial(g, numpy.array(y), categorical=True, ties=\"random\")" ] }, { @@ -484,7 +455,7 @@ } ], "source": [ - "_lag_spatial(g, numpy.array(y), categorical=True, ties='tryself')" + "_lag_spatial(g, numpy.array(y), categorical=True, ties=\"tryself\")" ] }, { @@ -508,7 +479,7 @@ ], "source": [ "y_l = libpysal.weights.lag_categorical(w, y)\n", - "np.array_equal(y_l, _lag_spatial(g, numpy.array(y), categorical=True, ties='raise'))" + "np.array_equal(y_l, _lag_spatial(g, numpy.array(y), categorical=True, ties=\"raise\"))" ] }, { @@ -520,14 +491,6 @@ "source": [ "y_l" ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "2a9f175a-4108-408e-b17b-7147861d1375", - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { @@ -546,7 +509,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.12.2" + "version": "3.12.7" } }, "nbformat": 4, diff --git a/docs/user-guide/weights/voronoi.ipynb b/docs/user-guide/weights/voronoi.ipynb index f91fd9639..ff1a18928 100644 --- a/docs/user-guide/weights/voronoi.ipynb +++ b/docs/user-guide/weights/voronoi.ipynb @@ -22,10 +22,10 @@ "metadata": {}, "outputs": [], "source": [ - "import sys\n", "import os\n", - "sys.path.append(os.path.abspath('..'))\n", - "import libpysal" + "import sys\n", + "\n", + "sys.path.append(os.path.abspath(\"..\"))" ] }, { @@ -34,7 +34,7 @@ "metadata": {}, "outputs": [], "source": [ - "from libpysal.cg.voronoi import voronoi, voronoi_frames" + "from libpysal.cg.voronoi import voronoi, voronoi_frames" ] }, { @@ -120,9 +120,8 @@ "outputs": [], "source": [ "%matplotlib inline\n", - "import matplotlib\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt" + "import matplotlib.pyplot as plt\n", + "import numpy as np" ] }, { @@ -155,8 +154,8 @@ ], "source": [ "fig, ax = plt.subplots()\n", - "region_df.plot(ax=ax, color='blue',edgecolor='black', alpha=0.3)\n", - "point_df.plot(ax=ax, color='red')\n" + "region_df.plot(ax=ax, color=\"blue\", edgecolor=\"black\", alpha=0.3)\n", + "point_df.plot(ax=ax, color=\"red\")" ] }, { @@ -174,7 +173,7 @@ "source": [ "n_points = 200\n", "np.random.seed(12345)\n", - "points = np.random.random((n_points,2))*10 + 10\n", + "points = np.random.random((n_points, 2)) * 10 + 10\n", "results = voronoi(points)\n", "mins = points.min(axis=0)\n", "maxs = points.max(axis=0)" @@ -228,7 +227,7 @@ ], "source": [ "fig, ax = plt.subplots()\n", - "points_df.plot(ax=ax, color='red')" + "points_df.plot(ax=ax, color=\"red\")" ] }, { @@ -261,8 +260,8 @@ ], "source": [ "fig, ax = plt.subplots()\n", - "regions_df.plot(ax=ax, color='blue',edgecolor='black', alpha=0.3)\n", - "points_df.plot(ax=ax, color='red')" + "regions_df.plot(ax=ax, color=\"blue\", edgecolor=\"black\", alpha=0.3)\n", + "points_df.plot(ax=ax, color=\"red\")" ] }, { @@ -313,12 +312,12 @@ ], "source": [ "fig, ax = plt.subplots()\n", - "regions_df.plot(ax=ax, edgecolor='black', facecolor='blue', alpha=0.2 )\n", - "points_df.plot(ax=ax, color='red')\n", + "regions_df.plot(ax=ax, edgecolor=\"black\", facecolor=\"blue\", alpha=0.2)\n", + "points_df.plot(ax=ax, color=\"red\")\n", "plt.xlim(minx, maxx)\n", "plt.ylim(miny, maxy)\n", - "plt.title(\"buffer: %f, n: %d\"%(r,n_points))\n", - "plt.show()\n" + "plt.title(\"buffer: %f, n: %d\" % (r, n_points))\n", + "plt.show()" ] }, { @@ -421,7 +420,7 @@ "metadata": {}, "outputs": [], "source": [ - "idx = [i for i in range(w.n) if w.cardinalities[i]==12]" + "idx = [i for i in range(w.n) if w.cardinalities[i] == 12]" ] }, { diff --git a/docs/user-guide/weights/weights.ipynb b/docs/user-guide/weights/weights.ipynb index b06937a51..621f666c7 100644 --- a/docs/user-guide/weights/weights.ipynb +++ b/docs/user-guide/weights/weights.ipynb @@ -13,8 +13,8 @@ "metadata": {}, "outputs": [], "source": [ - "import sys\n", - "import os" + "import os\n", + "import sys" ] }, { @@ -23,7 +23,7 @@ "metadata": {}, "outputs": [], "source": [ - "sys.path.append(os.path.abspath('..'))\n", + "sys.path.append(os.path.abspath(\"..\"))\n", "import libpysal" ] }, @@ -108,7 +108,7 @@ } ], "source": [ - "libpysal.examples.explain('mexico')" + "libpysal.examples.explain(\"mexico\")" ] }, { @@ -125,10 +125,11 @@ "outputs": [], "source": [ "import geopandas\n", + "\n", "pth = libpysal.examples.get_path(\"mexicojoin.shp\")\n", "gdf = geopandas.read_file(pth)\n", "\n", - "from libpysal.weights import Queen, Rook, KNN" + "from libpysal.weights import KNN, Queen, Rook" ] }, { @@ -138,7 +139,7 @@ "outputs": [], "source": [ "%matplotlib inline\n", - "import matplotlib.pyplot as plt\n" + "import matplotlib.pyplot as plt" ] }, { @@ -241,10 +242,13 @@ } ], "source": [ - "ax = gdf.plot(edgecolor='grey', facecolor='w')\n", - "f,ax = w_rook.plot(gdf, ax=ax, \n", - " edge_kws=dict(color='r', linestyle=':', linewidth=1),\n", - " node_kws=dict(marker=''))\n", + "ax = gdf.plot(edgecolor=\"grey\", facecolor=\"w\")\n", + "f, ax = w_rook.plot(\n", + " gdf,\n", + " ax=ax,\n", + " edge_kws=dict(color=\"r\", linestyle=\":\", linewidth=1),\n", + " node_kws=dict(marker=\"\"),\n", + ")\n", "ax.set_axis_off()" ] }, @@ -488,7 +492,7 @@ } ], "source": [ - "w_rook.neighbors[0] # the first location has two neighbors at locations 1 and 22" + "w_rook.neighbors[0] # the first location has two neighbors at locations 1 and 22" ] }, { @@ -511,7 +515,7 @@ } ], "source": [ - "gdf['NAME'][[0, 1,22]]" + "gdf[\"NAME\"][[0, 1, 22]]" ] }, { @@ -596,10 +600,13 @@ } ], "source": [ - "ax = gdf.plot(edgecolor='grey', facecolor='w')\n", - "f,ax = w_queen.plot(gdf, ax=ax, \n", - " edge_kws=dict(color='r', linestyle=':', linewidth=1),\n", - " node_kws=dict(marker=''))\n", + "ax = gdf.plot(edgecolor=\"grey\", facecolor=\"w\")\n", + "f, ax = w_queen.plot(\n", + " gdf,\n", + " ax=ax,\n", + " edge_kws=dict(color=\"r\", linestyle=\":\", linewidth=1),\n", + " node_kws=dict(marker=\"\"),\n", + ")\n", "ax.set_axis_off()" ] }, @@ -649,7 +656,7 @@ "metadata": {}, "outputs": [], "source": [ - "c9 = [idx for idx,c in w_queen.cardinalities.items() if c==9]" + "c9 = [idx for idx, c in w_queen.cardinalities.items() if c == 9]" ] }, { @@ -670,7 +677,7 @@ } ], "source": [ - "gdf['NAME'][c9]" + "gdf[\"NAME\"][c9]" ] }, { @@ -743,21 +750,28 @@ ], "source": [ "import numpy as np\n", - "f,ax = plt.subplots(1,2,figsize=(10, 6), subplot_kw=dict(aspect='equal'))\n", - "gdf.plot(edgecolor='grey', facecolor='w', ax=ax[0])\n", - "w_rook.plot(gdf, ax=ax[0], \n", - " edge_kws=dict(color='r', linestyle=':', linewidth=1),\n", - " node_kws=dict(marker=''))\n", - "ax[0].set_title('Rook')\n", + "\n", + "f, ax = plt.subplots(1, 2, figsize=(10, 6), subplot_kw=dict(aspect=\"equal\"))\n", + "gdf.plot(edgecolor=\"grey\", facecolor=\"w\", ax=ax[0])\n", + "w_rook.plot(\n", + " gdf,\n", + " ax=ax[0],\n", + " edge_kws=dict(color=\"r\", linestyle=\":\", linewidth=1),\n", + " node_kws=dict(marker=\"\"),\n", + ")\n", + "ax[0].set_title(\"Rook\")\n", "ax[0].axis(np.asarray([-105.0, -95.0, 21, 26]))\n", "\n", - "ax[0].axis('off')\n", - "gdf.plot(edgecolor='grey', facecolor='w', ax=ax[1])\n", - "w_queen.plot(gdf, ax=ax[1], \n", - " edge_kws=dict(color='r', linestyle=':', linewidth=1),\n", - " node_kws=dict(marker=''))\n", - "ax[1].set_title('Queen')\n", - "ax[1].axis('off')\n", + "ax[0].axis(\"off\")\n", + "gdf.plot(edgecolor=\"grey\", facecolor=\"w\", ax=ax[1])\n", + "w_queen.plot(\n", + " gdf,\n", + " ax=ax[1],\n", + " edge_kws=dict(color=\"r\", linestyle=\":\", linewidth=1),\n", + " node_kws=dict(marker=\"\"),\n", + ")\n", + "ax[1].set_title(\"Queen\")\n", + "ax[1].axis(\"off\")\n", "ax[1].axis(np.asarray([-105.0, -95.0, 21, 26]))" ] }, @@ -809,10 +823,13 @@ } ], "source": [ - "ax = gdf.plot(edgecolor='grey', facecolor='w')\n", - "f,ax = w_knn.plot(gdf, ax=ax, \n", - " edge_kws=dict(color='r', linestyle=':', linewidth=1),\n", - " node_kws=dict(marker=''))\n", + "ax = gdf.plot(edgecolor=\"grey\", facecolor=\"w\")\n", + "f, ax = w_knn.plot(\n", + " gdf,\n", + " ax=ax,\n", + " edge_kws=dict(color=\"r\", linestyle=\":\", linewidth=1),\n", + " node_kws=dict(marker=\"\"),\n", + ")\n", "ax.set_axis_off()" ] }, @@ -830,7 +847,7 @@ "outputs": [], "source": [ "pth = libpysal.examples.get_path(\"mexicojoin.shp\")\n", - "from libpysal.weights import Queen, Rook, KNN" + "from libpysal.weights import KNN, Queen, Rook" ] }, { @@ -895,10 +912,13 @@ } ], "source": [ - "ax = gdf.plot(edgecolor='grey', facecolor='w')\n", - "f,ax = w_knn1.plot(gdf, ax=ax, \n", - " edge_kws=dict(color='r', linestyle=':', linewidth=1),\n", - " node_kws=dict(marker=''))\n", + "ax = gdf.plot(edgecolor=\"grey\", facecolor=\"w\")\n", + "f, ax = w_knn1.plot(\n", + " gdf,\n", + " ax=ax,\n", + " edge_kws=dict(color=\"r\", linestyle=\":\", linewidth=1),\n", + " node_kws=dict(marker=\"\"),\n", + ")\n", "ax.set_axis_off()" ] }, @@ -915,7 +935,7 @@ "metadata": {}, "outputs": [], "source": [ - "w_knn3 = KNN.from_shapefile(pth,k=3)" + "w_knn3 = KNN.from_shapefile(pth, k=3)" ] }, { @@ -937,10 +957,13 @@ } ], "source": [ - "ax = gdf.plot(edgecolor='grey', facecolor='w')\n", - "f,ax = w_knn3.plot(gdf, ax=ax, \n", - " edge_kws=dict(color='r', linestyle=':', linewidth=1),\n", - " node_kws=dict(marker=''))\n", + "ax = gdf.plot(edgecolor=\"grey\", facecolor=\"w\")\n", + "f, ax = w_knn3.plot(\n", + " gdf,\n", + " ax=ax,\n", + " edge_kws=dict(color=\"r\", linestyle=\":\", linewidth=1),\n", + " node_kws=dict(marker=\"\"),\n", + ")\n", "ax.set_axis_off()" ] }, @@ -966,7 +989,7 @@ "metadata": {}, "outputs": [], "source": [ - "w = lat2W(4,3)" + "w = lat2W(4, 3)" ] }, { @@ -1053,7 +1076,7 @@ "metadata": {}, "outputs": [], "source": [ - "rs = libpysal.examples.get_path('map_RS_BR.shp')" + "rs = libpysal.examples.get_path(\"map_RS_BR.shp\")" ] }, { @@ -1193,11 +1216,14 @@ } ], "source": [ - "plt.rcParams[\"figure.figsize\"] = (20,15)\n", - "ax = rs_df.plot(edgecolor='grey', facecolor='w')\n", - "f,ax = wq.plot(rs_df, ax=ax, \n", - " edge_kws=dict(color='r', linestyle=':', linewidth=1),\n", - " node_kws=dict(marker=''))\n", + "plt.rcParams[\"figure.figsize\"] = (20, 15)\n", + "ax = rs_df.plot(edgecolor=\"grey\", facecolor=\"w\")\n", + "f, ax = wq.plot(\n", + " rs_df,\n", + " ax=ax,\n", + " edge_kws=dict(color=\"r\", linestyle=\":\", linewidth=1),\n", + " node_kws=dict(marker=\"\"),\n", + ")\n", "\n", "ax.set_axis_off()" ] @@ -1221,14 +1247,16 @@ } ], "source": [ - "\n", - "ax = rs_df.plot(edgecolor='grey', facecolor='w')\n", - "f,ax = wf.plot(rs_df, ax=ax, \n", - " edge_kws=dict(color='r', linestyle=':', linewidth=1),\n", - " node_kws=dict(marker=''))\n", - "ax.set_title('Rio Grande do Sul: Nonplanar Weights')\n", + "ax = rs_df.plot(edgecolor=\"grey\", facecolor=\"w\")\n", + "f, ax = wf.plot(\n", + " rs_df,\n", + " ax=ax,\n", + " edge_kws=dict(color=\"r\", linestyle=\":\", linewidth=1),\n", + " node_kws=dict(marker=\"\"),\n", + ")\n", + "ax.set_title(\"Rio Grande do Sul: Nonplanar Weights\")\n", "ax.set_axis_off()\n", - "plt.savefig('rioGrandeDoSul.png')\n" + "plt.savefig(\"rioGrandeDoSul.png\")" ] }, { diff --git a/libpysal/examples/tests/test_available.py b/libpysal/examples/tests/test_available.py index 549d2b422..59166a07a 100644 --- a/libpysal/examples/tests/test_available.py +++ b/libpysal/examples/tests/test_available.py @@ -20,7 +20,7 @@ class TestExamples: def test_available(self): examples = available() - assert type(examples) == pandas.core.frame.DataFrame + assert isinstance(examples, pandas.DataFrame) assert examples.shape == (99, 3) def test_data_home(self): diff --git a/pyproject.toml b/pyproject.toml index 80dba4d4d..8f7740beb 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -82,12 +82,27 @@ include = ["libpysal", "libpysal.*"] line-length = 88 lint.select = ["E", "F", "W", "I", "UP", "N", "B", "A", "C4", "SIM", "ARG"] target-version = "py310" -exclude = ["libpysal/tests/*", "docs/*"] +exclude = [ + "libpysal/tests/*", + "libpysal/cg/tests/fast_point_in_polygon_algorithm.ipynb" +] +include = ["*.py", "*.ipynb"] + [tool.ruff.lint.per-file-ignores] "*__init__.py" = [ "F401", # imported but unused "F403", # star import; unable to detect undefined names ] +"*.ipynb" = [ + "C408", # Unnecessary `dict` call + "E402", # Module level import not at top of cell +] +"docs/conf.py" = [ + "A001", # Variable `copyright` is shadowing a Python builtin + "E501", # Line too long + "UP031", # Use format specifiers instead of percent format +] + [tool.coverage.run]