diff --git a/.gitignore b/.gitignore index 624a551..c00e7ff 100644 --- a/.gitignore +++ b/.gitignore @@ -1,5 +1,14 @@ *.py[cod] +*.nc + +__pycache__ +pip-wheel-metadata +_version.py +.pytest_cache +.DS_Store +.vscode + # C extensions *.so diff --git a/ldndc2nc/config_handler.py b/ldndc2nc/config_handler.py new file mode 100644 index 0000000..9b22994 --- /dev/null +++ b/ldndc2nc/config_handler.py @@ -0,0 +1,143 @@ +# -*- coding: utf-8 -*- +"""ldndc2nc.config_handler: read the configuration settings for this ldndc2nc run.""" + +import logging +import os +from pathlib import Path + +import yaml + +from .variable import Variable + +log = logging.getLogger(__name__) + + +def find_config(local_file=None): + """ look for config file in the default locations """ + env_var = os.environ.get("LDNDC2NC_CONF", "__NOTSET__") + locations = [ + x / "ldndc2nc.conf" for x in [Path("."), Path.home(), Path("/etc/ldndc2nc")] + ] + locations.append(Path(env_var)) + + if local_file: + locations.insert(0, Path(local_file)) + + for cfg_file in locations: + if cfg_file.is_file(): + return cfg_file + return None + + +def read_config(file_path) -> None: + """ read yaml config file and modify special properties""" + + print(f"read_config: {file_path}") + with open(file_path, "r") as ymlfile: + cfg = yaml.load(ymlfile, Loader=yaml.FullLoader) + + return cfg + + +def write_config(self, dest=Path.home()): + """ write cfg file to user dir """ + + if self.cfg: + self._decode() + fname = Path(dest) / "ldndc2nc.conf" + with open(fname, "w") as f: + f.write(yaml.dump(self._decode(self.cfg), default_flow_style=False)) + + +def get_section(self, section): + """ parse config data structure, return data of required section """ + + if self.cfg: + self.cfg = self._encode(self.cfg) + + section_data = None + + def is_valid_section(s): + valid_sections = ["info", "project", "variables", "refdata"] + return s in valid_sections + + if is_valid_section(section.lower()): + try: + section_data = self.cfg.get(section, self.cfg.get(section.lower())) + except KeyError: + log.critical( + f"Section <{section.lower()}> not found in cfg file {self.file_path}" + ) + log.critical( + f"The following sections are present: {list(self.cfg.keys())}." + ) + else: + log.critical(f"Section {section.lower()} is not a valid section") + raise RuntimeError + return section_data + + +class ConfigHandler: + + _write_config = write_config + _get_section = get_section + + def __init__(self, local_file=None): + self.cfg_file = find_config(local_file=local_file) + raw = read_config(self.cfg_file) + self.cfg = self._encode(raw) + + @staticmethod + def _decode(cfg): + if "variables" in cfg: + for file, variables in cfg["variables"].items(): + cfg["variables"][file] = [v.text_full for v in variables] + return cfg + + @staticmethod + def _encode(cfg): + if "variables" in cfg: + for file, variables in cfg["variables"].items(): + cfg["variables"][file] = [ + Variable(v) if isinstance(v, str) else v for v in variables + ] + return cfg + + @property + def file_path(self): + if not self.cfg_file: + self.cfg_file = find_config() + return self.cfg_file + + @property + def variables(self): + vars = [] + if "variables" in self.cfg: + for file, variables in self.cfg["variables"].items(): + vars += [Variable(v) if isinstance(v, str) else v for v in variables] + return vars + + @property + def text(self): + clean = self._decode(self.cfg) + return yaml.dump(clean, default_style=False) if self.cfg else None + + @property + def global_info(self): + global_info = {} + for section_name in ["info", "project"]: + section = self.section(section_name) + if section: + global_info.update(section) + else: + log.warn(f"No <{section_name}> data found in config") + return global_info + + def section(self, section): + return self._get_section(section) + + def write(self, *args, **kwargs): + self._write_config(*args, **kwargs) + + def __repr__(self): + return f"" diff --git a/ldndc2nc/data/ldndc2nc.conf b/ldndc2nc/data/ldndc2nc.conf index 2871584..2ee4d7e 100644 --- a/ldndc2nc/data/ldndc2nc.conf +++ b/ldndc2nc/data/ldndc2nc.conf @@ -24,7 +24,7 @@ project: # variables: soilchemistry-daily.txt: - - dC_co2_emis[kgCha-1];dC_co2_emis_auto[kgCha-1];dC_co2_emis_hetero[kgCha-1] + - dC_co2_emis[kgCha-1]=dC_co2_emis_auto[kgCha-1]+dC_co2_emis_hetero[kgCha-1] - dN_n2o_emis[kgNha-1] - dN_no_emis[kgNha-1] - dN_n2_emis[kgNha-1] @@ -44,7 +44,7 @@ variables: report-fertilize.txt: - dN_fertilizer[kgNha-1] report-harvest.txt: - - dC_habove[kgCha-1];dC_bud[kgCha-1];dC_straw[kgCha-1] + - dC_habove[kgCha-1]=dC_bud[kgCha-1]+dC_straw[kgCha-1] - dC_bud[kgCha-1] - dC_stubble[kgCha-1] diff --git a/ldndc2nc/extra.py b/ldndc2nc/extra.py index 6ac4cf6..8e7fcec 100644 --- a/ldndc2nc/extra.py +++ b/ldndc2nc/extra.py @@ -28,22 +28,19 @@ def _copy_default_config(): shutil.copyfile(fname, Path.home() / "ldndc2nc.conf") -def _find_config(): +def _find_config() -> Path: """ look for cfgFile in the default locations """ cfgFile = None + env_var = os.environ.get("LDNDC2NC_CONF", "__NOTSET__") + locations = [ - Path("."), - Path.home(), - Path("/etc/ldndc2nc"), - os.environ.get("LDNDC2NC_CONF"), + x / "ldndc2nc.conf" for x in [Path("."), Path.home(), Path("/etc/ldndc2nc")] ] - locations = [x for x in locations if x is not None] + locations.append(Path(env_var)) for loc in locations: - f = loc / "ldndc2nc.conf" - if f.is_file(f): - cfgFile = str(f) - break + if loc.is_file(): + return loc return cfgFile @@ -72,7 +69,7 @@ def is_multipart_item(x): return cfg -def parse_config(cfg, section=None): +def parse_config(cfg, section): """ parse config data structure, return data of required section """ def is_valid_section(s): @@ -101,10 +98,9 @@ def get_config(cfgFile=None): def cfgfile_exists(cfgFile): return cfgFile is not None - if cfgfile_exists(cfgFile): - if not os.path.isfile(cfgFile): - log.critical("Specified configuration file not found.") - exit(1) + if not Path(cfgFile).is_file(): + log.critical(f"Specified config file not found: {cfgFile}") + exit(1) else: cfgFile = _find_config() diff --git a/ldndc2nc/ldndc2nc.py b/ldndc2nc/ldndc2nc.py index bee052a..3e547c5 100644 --- a/ldndc2nc/ldndc2nc.py +++ b/ldndc2nc/ldndc2nc.py @@ -13,11 +13,10 @@ import numpy as np import pandas as pd - import xarray as xr from .cli import cli -from .extra import get_config, parse_config, set_config +from .config_handler import ConfigHandler log = logging.getLogger(__name__) @@ -88,6 +87,16 @@ def _all_items_identical(x): return x.count(x[0]) == len(x) +def create_id_mapper(cell_ids: xr.DataArray): + dm = {} + for ila, la in enumerate(cell_ids.lat.values): + for ilo, lo in enumerate(cell_ids.lon.values): + the_id = cell_ids.loc[{"lat": la, "lon": lo}].values + if not np.isnan(the_id): + dm[int(the_id)] = (la, lo) + return dm + + def _extract_fileno(fname): """ extract file iterator @@ -166,23 +175,6 @@ def _limit_df_years(years, df, yearcol="year"): return df -def _read_global_info(cfg): - info = parse_config(cfg, section="info") - project = parse_config(cfg, section="project") - all_info = {} - if info: - for k in info.keys(): - all_info[k] = info[k] - else: - log.warn("No data found in config") - if project: - for k in project.keys(): - all_info[k] = project[k] - else: - log.warn("No data found in config") - return all_info - - def read_ldndc_txt(inpath, varData, years, limiter=""): """ parse ldndc txt output files and return dataframe """ @@ -201,13 +193,9 @@ def read_ldndc_txt(inpath, varData, years, limiter=""): infiles = _select_files(inpath, ldndc_file_type, limiter=limiter) # special treatment for tuple entries in varData - for v in varData[ldndc_file_type]: - if _is_composite_var(v): - varnames.append(v[0]) - datacols += v[1] - else: - varnames.append(v) - datacols.append(v) + for var in varData[ldndc_file_type]: + varnames.append(var.name) + datacols.extend(var.sources) # iterate over all files of one ldndc file type for fcnt, fname in enumerate(infiles): @@ -247,22 +235,24 @@ def read_ldndc_txt(inpath, varData, years, limiter=""): df = df.sort_values(by=["id", "time"]) df = df.set_index(["id", "time"]) - # sum columns if this was requested in the conf file - for v in varData[ldndc_file_type]: - if _is_composite_var(v): - new_colname, src_colnames = v - drop_colnames = [] + cols_to_drop, cols_to_keep = [], [] - df[new_colname] = df[src_colnames].sum(axis=1) + # sum columns if this was requested in the conf file + for var in varData[ldndc_file_type]: + df[var.name] = df[var.sources].sum(axis=1) + if var.name not in cols_to_keep: + cols_to_keep.append(var.name) + else: + raise ValueError( + "Variable requested multiple times. Check your conf file." + ) - # drop original columns if they are not explicitly requested - for v2 in varData[ldndc_file_type]: - if not _is_composite_var(v2): - if v2 in src_colnames: - drop_colnames.append(v2) + cols_to_drop.extend(var.sources) + cols_to_drop.append(var.text) - df.drop(drop_colnames, axis=1) + cols_to_drop = list(set(cols_to_drop).difference(set(cols_to_keep))) + df = df.drop(cols_to_drop, axis=1) df_all.append(df) # check if all tables have the same number of rows @@ -283,12 +273,10 @@ def main(): # parse args args = cli() - # read config - cfg = get_config(args.config) + config = ConfigHandler(args.config) - # write config if args.storeconfig: - set_config(cfg) + config.write() # read or build refdata array def use_cli_refdata(): @@ -302,7 +290,7 @@ def use_cli_refdata(): if refvar not in refnc.data_vars: log.critical("Var <%s> not in %s" % (refvar, reffile)) exit(1) - cell_ids = refnc[refvar] + cell_ids = refnc[refvar].where(refnc[refvar] > 0) lats, lons = refnc.lat.values, refnc.lon.values else: log.critical("Specified reffile %s not found" % reffile) @@ -311,34 +299,19 @@ def use_cli_refdata(): log.error("You need to specify a reffile") exit(1) - dm = {} - for ila, la in enumerate(lats): - for ilo, lo in enumerate(lons): - the_id = cell_ids.loc[{"lat": la, "lon": lo}].values - if np.isnan(the_id) is False: - dm[int(the_id)] = (la, lo) - - # get general info - global_info = _read_global_info(cfg) - # read source output from ldndc - log.info(cfg["variables"]) + log.debug(config.variables) varinfos, df = read_ldndc_txt( - args.indir, cfg["variables"], args.years, limiter=args.limiter + args.indir, config.section("variables"), args.years, limiter=args.limiter ) - log.info(df.columns) - - df["lat"], df["lon"] = zip(*df.id.map(dm)) + id_mapper = create_id_mapper(cell_ids) + df["lat"], df["lon"] = zip(*df.id.map(id_mapper)) df = df.set_index(["time", "lat", "lon"]) + df = df.drop("id", axis=1) df.sort_index(inplace=True) - # process data in yearly chunks - UNITS = {k: v for k, v in [_split_colname(colname) for colname in df.columns]} - - df.columns = UNITS.keys() - # iterate over cellids and variables ENCODING = { "complevel": 5, @@ -376,11 +349,15 @@ def get_datavar_encodings(ds): ENCODINGS = get_datavar_encodings(ds) for v in ds.data_vars: - ds[v].attrs["units"] = UNITS[v] + units = next( + (var.unit for var in config.variables if var.name == v), None + ) + if units: + ds[v].attrs["units"] = units if args.split: outfilename = f"{args.outfile[:-3]}_{yr}.nc" - ds.attrs = global_info + ds.attrs = config.global_info ds.to_netcdf( Path(args.outdir) / outfilename, format="NETCDF4_CLASSIC", @@ -392,7 +369,7 @@ def get_datavar_encodings(ds): if not args.split: with xr.concat(ds_all, dim="time") as ds: ENCODINGS = get_datavar_encodings(ds) - ds.attrs = global_info + ds.attrs = config.global_info ds.to_netcdf( Path(args.outdir) / args.outfile, format="NETCDF4_CLASSIC", diff --git a/tests/test_conf_processing.py b/ldndc2nc/tests/test_conf_processing.py similarity index 91% rename from tests/test_conf_processing.py rename to ldndc2nc/tests/test_conf_processing.py index a8d105a..0bc4398 100644 --- a/tests/test_conf_processing.py +++ b/ldndc2nc/tests/test_conf_processing.py @@ -1,4 +1,5 @@ import pytest + from ldndc2nc.ldndc2nc import _all_items_identical, _is_composite_var, _split_colname @@ -20,6 +21,6 @@ def test_all_items_identical(input, result): assert _all_items_identical(input) == result -@pytest.mark.xfail(raises=IndexError) def test_all_items_identical_empty(): - _all_items_identical([]) + with pytest.raises(IndexError): + _all_items_identical([]) diff --git a/ldndc2nc/tests/test_confighandler.py b/ldndc2nc/tests/test_confighandler.py new file mode 100644 index 0000000..72f0f33 --- /dev/null +++ b/ldndc2nc/tests/test_confighandler.py @@ -0,0 +1,87 @@ +from pathlib import Path + +import pkg_resources +import pytest + +from ldndc2nc.config_handler import ConfigHandler, find_config + + +@pytest.fixture +def mock_env_ldndc2nc(monkeypatch): + monkeypatch.setenv("LDNDC2NC_CONF", "/tmp/ldndc2nc.conf") + + +@pytest.fixture +def mock_env_ldndc2nc_missing(monkeypatch): + monkeypatch.setenv("LDNDC2NC_CONF", "__NOTSET__") + + +@pytest.fixture +def fs_with_config_file(fs): + fs.add_real_file( + pkg_resources.resource_filename("ldndc2nc", "data/ldndc2nc.conf"), + target_path=Path.home(), + ) + yield fs + + +# NOTE: fs is a fixture provided by pyfakefs which patches itself into +# pytest when installed + + +@pytest.mark.parametrize("path", [Path("."), Path.home(), Path("/etc/ldndc2nc")]) +def test_find_config_fs(fs, path): + fs.create_file(path / "ldndc2nc.conf") + assert find_config() == Path(path / "ldndc2nc.conf") + + +def test_find_config_custom(fs): + custom = "/tmp/custom.conf" + fs.create_file(custom) + assert find_config(custom) == Path(custom) + + +def test_find_config_environ(fs, mock_env_ldndc2nc): + custom_location = Path("/tmp/ldndc2nc.conf") + fs.create_file(custom_location) + assert find_config() == custom_location + + +def test_find_config_environ_missing(fs, mock_env_ldndc2nc_missing): + assert find_config() is None + + +@pytest.fixture(scope="class") +def handler(): + yield ConfigHandler() + + +# class TestConfigHandler: +# def test_read_config(self, handler, fs_with_config_file): +# assert handler.cfg is not None + +# @pytest.mark.parametrize( +# "path,expected", +# [(Path.home() / "ldndc2nc.conf", True), (Path("bad/ldndc2nc.conf"), False)], +# ) +# def test_find_path(self, handler, fs_with_config_file, path, expected): +# assert (handler.file_path == path) == expected + +# def test_variables(self, handler, fs_with_config_file): +# assert len(handler.variables) > 0 + +# @pytest.mark.parametrize( +# "section,expected", +# [("variables", False), ("VARIABLES", False), ("refdata", True)], +# ) +# def test_section(self, handler, fs_with_config_file, section, expected): +# assert (handler.section(section) is None) == expected + +# def test_section_unknown(self, handler, fs_with_config_file): +# with pytest.raises(RuntimeError): +# handler.section("_Variables") + +# def test_global_info(self, handler, fs_with_config_file): +# # global info data present in default ldndc2nc.conf file +# global_info_entries = {"author", "email", "institution", "name", "version"} +# assert set(handler.global_info.keys()) == global_info_entries diff --git a/ldndc2nc/tests/test_extra.py b/ldndc2nc/tests/test_extra.py new file mode 100644 index 0000000..892bd0c --- /dev/null +++ b/ldndc2nc/tests/test_extra.py @@ -0,0 +1,36 @@ +from pathlib import Path + +import pytest + +from ldndc2nc.extra import _find_config + + +@pytest.fixture +def mock_env_ldndc2nc(monkeypatch): + monkeypatch.setenv("LDNDC2NC_CONF", "/tmp/ldndc2nc.conf") + + +@pytest.fixture +def mock_env_ldndc2nc_missing(monkeypatch): + monkeypatch.setenv("LDNDC2NC_CONF", "__NOTSET__") + + +# NOTE: fs is a fixture provided by pyfakefs which patches itself into +# pytest when installed + + +@pytest.mark.parametrize("path", [Path("."), Path.home(), Path("/etc/ldndc2nc")]) +def test_find_config_fs(fs, path): + fs.create_file(path / "ldndc2nc.conf") + assert _find_config() == Path(path / "ldndc2nc.conf") + + +def test_find_config_environ(fs, mock_env_ldndc2nc): + custom_location = Path("/tmp/ldndc2nc.conf") + fs.create_file(custom_location) + assert _find_config() == custom_location + + +def test_find_config_environ_missing(fs, mock_env_ldndc2nc_missing): + print(_find_config()) + assert _find_config() is None diff --git a/ldndc2nc/tests/test_variable.py b/ldndc2nc/tests/test_variable.py new file mode 100644 index 0000000..d1ee5af --- /dev/null +++ b/ldndc2nc/tests/test_variable.py @@ -0,0 +1,103 @@ +import pytest + +from ldndc2nc.variable import Variable, identical, valid_brackets, variables_compatible + + +def test_identical(): + assert identical(["A", "A", "A"]) is True + assert identical(["A", "A", "B"]) is False + assert identical([]) is True + + +test_data = [ + ("a", True), + ("a[b]", True), + ("a[]", True), + ("a[b[c]]", False), + ("a[[b]]", False), + ("a[b][c]", False), + ("a]b", False), + ("a]b[", False), + ("a[[b]", False), +] + + +@pytest.mark.parametrize("s,expected", test_data) +def test_valid_brackets(s, expected): + assert valid_brackets(s) == expected + + +# test a bunch variable combinations +test_data = [ + ("n_emis", ["dN_n2o_emis[kgNha-1]", "dN_no_emis[kgNha-1]"], True), + ("dN_emis", ["dN_n2o_emis[kgNha-1]", "dN_no_emis[kgNha-1]"], True), + ("dC_emis", ["dN_n2o_emis[kgNha-1]", "dN_no_emis[kgNha-1]"], False), + ("emission", ["dC_co2_emis[kgCha-1]", "dN_n2o_emis[kgNha-1]"], False), + ("dN_n2o_emis[kgNha-1]", ["dN_n2o_emis[kgNha-1]"], True), + ("aN_n2o_emis[kgNha-1]", ["dN_n2o_emis[kgNha-1]"], False), + ("aN_emis", ["aN_n2o_emis[kgNha-1]", "aN_n2o_emis[kgNha-1]"], True), + ("aN_emis", ["aN_n2o_emis[kgNha-1]", "aN_n2o_emis[kgNha-1]"], True), + ("aC_emis", ["dC_co2_emis[kgCha-1]"], False), + ("aN_n2o_emis[kgNha-1]", ["dN_n2o_emis[kgNha-1]", "dN_n2o_emis[kgNha-1]"], False), +] + + +@pytest.mark.parametrize("s,src,expected", test_data) +def test_variables_compatible(s, src, expected): + assert variables_compatible(s, src) == expected + + +test_data = [ + ("dN_n2o_emis[kgNha-1]", ("dN_n2o_emis", "kgNha-1")), + ("dN_n2o_emis", ("dN_n2o_emis", None)), +] + + +@pytest.mark.parametrize("s,expected", test_data) +def test_variable_decode(s, expected): + assert Variable._decode(s) == expected + + +def test_variable_bad_decode(): + with pytest.raises(ValueError): + Variable._decode("dN_n2o_emis[[kgNha-1]") + + +@pytest.mark.parametrize("expected,s", test_data) +def test_variable_encode(s, expected): + assert Variable._encode(*s) == expected + + +test_data = [ + ("dN_n2o_emis[kgNha-1]", None, ["dN_n2o_emis[kgNha-1]"]), + ("n_emis", "n2o_emis+no_emis", ["n2o_emis", "no_emis"]), +] + + +@pytest.mark.parametrize("s,sources,expected", test_data) +def test_variable_sources(s, sources, expected): + v = Variable(s, sources=sources) + assert v.sources == expected + + +test_data = [ + ("dN_n2o_emis[kgNha-1]", None, False), + ("n2o", "dN_n2o_emis[kgNha-1]", False), + ("n_emis", "n2o_emis+no_emis", True), +] + + +@pytest.mark.parametrize("s,sources,expected", test_data) +def test_variable_iscomposite(s, sources, expected): + v = Variable(s, sources=sources) + assert v.is_composite == expected + + +def test_variable_text(): + v = Variable("dN_n_emis[kgNha-1]=dN_n2o_emis[kgNha-1]+dN_no_emis[kgNha-1]") + assert v.text == "dN_n_emis[kgNha-1]" + + +def test_variable_text_full(): + v = Variable("dN_n_emis[kgNha-1]=dN_n2o_emis[kgNha-1]+dN_no_emis[kgNha-1]") + assert v.text_full == "dN_n_emis[kgNha-1]=dN_n2o_emis[kgNha-1]+dN_no_emis[kgNha-1]" diff --git a/ldndc2nc/variable.py b/ldndc2nc/variable.py new file mode 100644 index 0000000..465401c --- /dev/null +++ b/ldndc2nc/variable.py @@ -0,0 +1,97 @@ +# -*- coding: utf-8 -*- +"""ldndc2nc.extra: extra module within the ldndc2nc package.""" + +import logging +from typing import List, Optional, Tuple + +log = logging.getLogger(__name__) + + +def identical(elements: List) -> bool: + return all([e == elements[0] for e in elements]) + + +def valid_brackets(s: str) -> bool: + """only up to one open/ close square bracket allowed""" + cnt, cnt_closed = 0, 0 + for l in s: + if l == "[": + cnt += 1 + if cnt > 1: + return False + if l == "]": + cnt -= 1 + if cnt == 0: + cnt_closed += 1 + elif cnt < 0: + return False + if cnt_closed > 1: + return False + return True + + +def variables_compatible(s: str, src: List[str]) -> bool: + prefixes = [f"{v}_" for v in ["dC", "dN", "aC", "aN"]] + + ps = [p for p in prefixes for s in src if s.startswith(p)] + + # include target variable if it also has a known prefix + for t_p in prefixes: + if s.startswith(t_p): + ps.append(t_p) + + if identical(ps): + return True + + return False + + +class Variable: + def __init__(self, s: str, sources: Optional[str] = None): + + if s.count("=") == 0: + pass + elif s.count("=") == 1: + s, sources = s.split("=") + else: + raise ValueError(f"Variable line is invalid:\n{s}") + + self._sources = [s] + self.name, self.unit = self._decode(s) + + if sources: + self._sources = sources.split("+") if "+" in sources else [sources] + + if not variables_compatible(s, self._sources): + raise ValueError("Trying to add incompatible columns") + + def __repr__(self): + return f"" + + @property + def text(self) -> str: + return f"{self._encode(self.name, self.unit)}" + + @property + def text_full(self) -> str: + part1 = self.text + part2 = "=" + "+".join(self._sources) if self.is_composite else "" + return part1 + part2 + + @property + def sources(self) -> List[str]: + return self._sources + + @property + def is_composite(self) -> bool: + return False if len(self._sources) == 1 else True + + @staticmethod + def _decode(s: str) -> Tuple[str, Optional[str]]: + if not valid_brackets(s): + raise ValueError(f"Variable {s} does not follow formatting convention") + return tuple(s.replace("]", "").split("[")) if "[" in s else (s, None) + + @staticmethod + def _encode(name: str, unit: Optional[str] = None) -> str: + return f"{name}[{unit}]" if unit else name diff --git a/requirements-dev.txt b/requirements-dev.txt index 3e98c23..63af031 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,5 +1,6 @@ black pre-commit +pyfakefs pytest pytest-cov pytest-xdist diff --git a/setup.cfg b/setup.cfg index daded7c..1749c62 100644 --- a/setup.cfg +++ b/setup.cfg @@ -55,15 +55,17 @@ ignore = E731 # line break before binary operator W503 + # ambiguous variable name 'l' + E741 exclude= .eggs doc [isort] default_section = THIRDPARTY -known_first_party = xarray +known_first_party = ldndc2nc multi_line_output = 3 include_trailing_comma = True force_grid_wrap = 0 use_parentheses = True -line_length = 88 \ No newline at end of file +line_length = 88 diff --git a/update_docs.sh b/update_docs.sh deleted file mode 100644 index 2f47297..0000000 --- a/update_docs.sh +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env bash - -# build the docs -cd docs -make clean -make html -cd .. - -# commit and push -#git add -A -#git commit -m "building and pushing docs" -#git push origin master - -# switch branches and pull the data we want -#git checkout gh-pages -#rm -rf . -#touch .nojekyll -#git checkout master docs/build/html -#mv ./docs/build/html/* ./ -#rm -rf ./docs -#git add -A -#git commit -m "publishing updated docs..." -#git push origin gh-pages - -# switch back -#git checkout master