Skip to content

Commit 33a8fea

Browse files
authored
subset from interface and reformat (#57)
* Update environment * Update docs env * Update env * update build * update build * update build * update build * Update docs * Update changelog * Add option to create subset from interface * Reformat and update docs * Update changelog * fix np.int
1 parent eaca29e commit 33a8fea

16 files changed

+615
-727
lines changed

.readthedocs.yml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,11 @@ build:
55
tools:
66
python: mambaforge-4.10
77

8+
python:
9+
install:
10+
- method: pip
11+
path: .
12+
813
sphinx:
914
configuration: docs/conf.py
1015

CHANGELOG.rst

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@ Version 1.3.0
1313
- Add module to assign custom metadata readers to ISMN_Interface
1414
- Notebook added that describes using a custom metadata reader
1515
- RTD build uses a separate, smaller environment.yml now (and mamba)
16+
- ISMN_Interface now has a method to create an instance of itself for a selection of (`ISMN_Interface.subset_from_ids`)
1617

1718

1819
Version 1.2.0

README.rst

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -26,9 +26,9 @@ https://ismn.earth
2626

2727
The following tutorials are available in ``docs/examples``:
2828

29-
`1) ISMN reader basic functionality <docs/examples/interface.ipynb>`_
29+
`1) ISMN reader basic functionality <https://ismn.readthedocs.io/en/latest/examples/interface.html>`_
3030

31-
`2) Adding custom metadata readers <docs/examples/custom_meta.ipynb>`_
31+
`2) Adding custom metadata readers <https://ismn.readthedocs.io/en/latest/examples/custom_meta.html>`_
3232

3333
Citation
3434
========
@@ -238,6 +238,14 @@ If you want to contribute please follow these steps:
238238
We use pytest so a simple function called test_my_feature is enough
239239
- submit a pull request to our master branch
240240

241+
Code Formatting
242+
---------------
243+
To apply pep8 conform styling to any changed files [we use `yapf`](https://github.com/google/yapf). The correct
244+
settings are already set in `setup.cfg`. Therefore the following command
245+
should be enough:
246+
247+
yapf file.py --in-place
248+
241249
Release new version
242250
-------------------
243251

docs/examples/interface.ipynb

Lines changed: 86 additions & 307 deletions
Large diffs are not rendered by default.

environment.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -17,5 +17,6 @@ dependencies:
1717
- sphinx
1818
- nbsphinx
1919
- sphinx_rtd_theme
20+
- yapf
2021
- pytest
2122
- pytest-cov

setup.cfg

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -121,3 +121,8 @@ version = 4.0.2
121121
package = ismn
122122
extensions =
123123
no_skeleton
124+
125+
[yapf]
126+
based_on_style = yapf
127+
indent_width = 4
128+
column_limit = 79

src/ismn/base.py

Lines changed: 16 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -33,6 +33,7 @@
3333

3434

3535
def zip(func):
36+
3637
def wrapper(cls, *args, **kwargs):
3738
if not cls.zip:
3839
raise IOError("Zip archive expected, use @dir functions instead.")
@@ -42,9 +43,11 @@ def wrapper(cls, *args, **kwargs):
4243

4344

4445
def dir(func):
46+
4547
def wrapper(cls, *args, **kwargs):
4648
if cls.zip:
47-
raise IOError("Unzipped archive expected, use @zip functions instead.")
49+
raise IOError(
50+
"Unzipped archive expected, use @zip functions instead.")
4851
return func(cls, *args, **kwargs)
4952

5053
return wrapper
@@ -124,8 +127,8 @@ def clean_subpath(self, subpath) -> Union[Path, PurePosixPath]:
124127
subpath = PurePosixPath(subpath)
125128
else:
126129
assert (
127-
self.path / Path(subpath)
128-
).exists(), "Subpath does not exist in archive"
130+
self.path /
131+
Path(subpath)).exists(), "Subpath does not exist in archive"
129132

130133
return subpath
131134

@@ -185,7 +188,8 @@ def __scan_dir(self, station_subdirs: bool = True) -> OrderedDict:
185188
if net not in cont.keys():
186189
cont[net] = np.array([])
187190
if station_subdirs:
188-
cont[net] = np.append(cont[net], Path(net, stat.name))
191+
cont[net] = np.append(cont[net],
192+
Path(net, stat.name))
189193
else:
190194
cont[net] = np.append(cont[net], stat.name)
191195

@@ -194,7 +198,9 @@ def __scan_dir(self, station_subdirs: bool = True) -> OrderedDict:
194198
return self.cont
195199

196200
@dir
197-
def __find_files_dir(self, subpath: str = None, fn_templ: str = "*.csv") -> list:
201+
def __find_files_dir(self,
202+
subpath: str = None,
203+
fn_templ: str = "*.csv") -> list:
198204
"""
199205
Find files in the archive or a subdirectory of the archive
200206
that match to the passed filename template.
@@ -209,7 +215,9 @@ def __find_files_dir(self, subpath: str = None, fn_templ: str = "*.csv") -> list
209215
return filenames
210216

211217
@zip
212-
def __find_files_zip(self, subpath: str = None, fn_templ: str = "*.csv") -> list:
218+
def __find_files_zip(self,
219+
subpath: str = None,
220+
fn_templ: str = "*.csv") -> list:
213221
"""
214222
Find files in zip archive that match the passed template and subdir.
215223
"""
@@ -224,8 +232,7 @@ def __find_files_zip(self, subpath: str = None, fn_templ: str = "*.csv") -> list
224232
filter(
225233
lambda f: fnmatch.fnmatch(f, f"{subpath}/{fn_templ}"),
226234
all_files,
227-
)
228-
).copy()
235+
)).copy()
229236

230237
return filterlist
231238

@@ -330,8 +337,7 @@ def extract_dir(self, subdir_in_archive, out_path):
330337
ls = np.array(self.zip.namelist())
331338

332339
filterlist = list(
333-
filter(lambda x: x.startswith(str(subdir_in_archive)), ls)
334-
).copy()
340+
filter(lambda x: x.startswith(str(subdir_in_archive)), ls)).copy()
335341

336342
self.zip.extractall(members=filterlist, path=out_path)
337343

src/ismn/citations.txt

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -101,7 +101,8 @@ TAHMO;We acknowledge the work of Nicolaas Cornelis van de Giesen and Frank Annor
101101
TERENO;Zacharias, S., H.R. Bogena, L. Samaniego, M. Mauder, R. Fuß, T. Puetz, M. Frenzel, M. Schwank, C. Baessler, K. Butterbach-Bahl, O. Bens, E. Borg, A. Brauer, P. Dietrich, I. Hajnsek, G. Helle, R. Kiese, H. Kunstmann, S. Klotz, J.C. Munch, H. Papen, E. Priesack, H. P. Schmid, R. Steinbrecher, U. Rosenbaum, G. Teutsch, H. Vereecken. 2011. A Network of Terrestrial Environmental Observatories in Germany. Vadose Zone J. 10. 955–973. doi:10.2136/vzj2010.0139
102102
TERENO;Bogena, H., Kunkel, R., Puetz, T., Vereecken, H., Kruger, E., Zacharias, S., Dietrich, P., Wollschlaeger, U., Kunstmann, H., Papen, H., Schmid, H., Munch, J., Priesack, E., Schwank, M., Bens, O., Brauer, A., Borg, E. & Hajnsek, I. (2012), ‘Tereno - long-term monitoring network for terrestrial environmental research’, Hydrologie und Wasserbewirtschaftung 56, 138–143.
103103
TERENO;Bogena, H. R. (2016), ‘Tereno: German network of terrestrial environmental observatories’, Journal of large-scale research facilities JLSRF 2, 52.
104-
UDC_SMOS;Schlenz, F., Dall'Amico, J., Loew, A., Mauser, W. (2012): Uncertainty Assessment of the SMOS Validation in the Upper Danube Catchment. IEEE Transactions on Geoscience and Remote Sensing, 50(5), pp.1517–1529. doi: 10.1109/TGRS.2011.2171694.
104+
TxSON;Caldwell, T. G., T. Bongiovanni, M. H. Cosh, T. J. Jackson, A. Colliander, C. J. Abolt, R. Casteel, T. Larson, B. R. Scanlon, and M. H. Young (2019), The Texas Soil Observation Network: A comprehensive soil moisture dataset for remote sensing and land surface model validation, Vadose Zone Journal, 18:100034, doi:10.2136/vzj2019.04.0034
105+
UDC_SMOS;Schlenz, F., Dall'Amico, J., Loew, A., Mauser, W. (2012): Uncertainty Assessment of the SMOS Validation in the Upper Danube Catchment. IEEE Transactions on Geoscience and Remote Sensing, 50(5), pp.1517–1529. doi: 10.1109/TGRS.2011.2171694.
105106
UDC_SMOS;A. Loew, J. T. Dall'Amico, F. Schlenz, W. Mauser (2009): The Upper Danube soil moisture validation site: measurements and activities, paper presented at Earth Observation and Water Cycle conference, Frascati (Rome), 18 - 20 November 2009, to be published in ESA Special dataation SP-674.
106107
UMBRIA;Brocca, L., Hasenauer, S., Lacava, T., Melone, F., Moramarco, T., Wagner, W., Dorigo, W., Matgen, P., Martínez-Fernández, J., Llorens, P., Latron, J., Martin, C., Bittelli, M. (2011). Soil moisture estimation through ASCAT and AMSR-E sensors: an intercomparison and validation study across Europe. Remote Sensing of Environment, 115, 3390-3408, doi:10.1016/j.rse.2011.08.003.
107108
UMBRIA;Brocca, L., Melone, F., Moramarco, T. (2008). On the estimation of antecedent wetness condition in rainfall-runoff modelling. Hydrological Processes, 22 (5), 629-642.

src/ismn/components.py

Lines changed: 42 additions & 36 deletions
Original file line numberDiff line numberDiff line change
@@ -106,15 +106,14 @@ def __init__(
106106
self.name = name if name is not None else self.__repr__()
107107

108108
def __repr__(self):
109-
return (
110-
f"{self.instrument}_{self.variable}_"
111-
f"{self.depth.start:1.6f}_{self.depth.end:1.6f}"
112-
)
109+
return (f"{self.instrument}_{self.variable}_"
110+
f"{self.depth.start:1.6f}_{self.depth.end:1.6f}")
113111

114112
@property
115113
def metadata(self) -> MetaData:
116-
return MetaData() if self.filehandler is None else self.filehandler.metadata
117-
114+
return MetaData(
115+
) if self.filehandler is None else self.filehandler.metadata
116+
118117
@property
119118
def data(self):
120119
return self.read_data()
@@ -261,7 +260,7 @@ def __init__(self, name, lon, lat, elev):
261260

262261
def __repr__(self):
263262
# Provide basic station information.
264-
return f"Sensors at '{self.name}': {[s.name for s in self.sensors.values()]}"
263+
return f"Station '{self.name}' with Sensors: {[s.name for s in self.sensors.values()]}"
265264

266265
@property
267266
def metadata(self) -> MetaData:
@@ -317,9 +316,10 @@ def get_depths(self, variable=None):
317316

318317
return depths
319318

320-
def get_min_max_obs_timestamp(
321-
self, variable="soil moisture", min_depth=None, max_depth=None
322-
):
319+
def get_min_max_obs_timestamp(self,
320+
variable="soil moisture",
321+
min_depth=None,
322+
max_depth=None):
323323
"""
324324
Goes through the sensors associated with this station
325325
and checks the metadata to get and approximate time coverage of the station.
@@ -467,14 +467,10 @@ def get_sensors(self, variable, depth_from, depth_to):
467467
sensors : numpy.ndarray
468468
array of sensors found for the given combination of variable and depths
469469
"""
470-
return np.array(
471-
[
472-
s
473-
for s in self.iter_sensors(
474-
variable=variable, depth=Depth(depth_from, depth_to)
475-
)
476-
]
477-
)
470+
return np.array([
471+
s for s in self.iter_sensors(
472+
variable=variable, depth=Depth(depth_from, depth_to))
473+
])
478474

479475

480476
class Network(IsmnComponent):
@@ -513,7 +509,7 @@ def __init__(self, name, stations=None):
513509

514510
def __repr__(self):
515511
# Provide basic Network information.
516-
return f"Stations in '{self.name}': {list(self.stations.keys())}"
512+
return f"Network '{self.name}' with Stations: {list(self.stations.keys())}"
517513

518514
def __getitem__(self, item: Union[int, str]):
519515
# shortcut to access networks directly
@@ -662,7 +658,6 @@ class NetworkCollection(IsmnComponent):
662658
"""
663659

664660
def __init__(self, networks):
665-
666661
"""
667662
Create network collection from previously created Networks.
668663
@@ -682,21 +677,30 @@ def __init__(self, networks):
682677
lons += net_lons
683678
lats += net_lats
684679

685-
self.grid = BasicGrid(lons, lats) if (len(lons) > 0 and len(lats) > 0) else None
680+
self.grid = BasicGrid(lons, lats) if (len(lons) > 0 and
681+
len(lats) > 0) else None
686682

687683
def __repr__(self, indent: str = ""):
688-
return ",\n".join(
689-
[
690-
f"{indent}{net.name}: {list(net.stations.keys())}"
691-
for net in self.networks.values()
692-
]
693-
)
684+
return ",\n".join([
685+
f"{indent}{net.name}: {list(net.stations.keys())}"
686+
for net in self.networks.values()
687+
])
694688

695-
def __getitem__(self, item: Union[int, str]):
689+
def __getitem__(self, item: Union[int, str, list]) -> \
690+
Union["NetworkCollection", Network]:
696691
# shortcut to access networks directly
697-
if isinstance(item, int):
698-
item = list(self.networks.keys())[item]
699-
return self.networks[item]
692+
if isinstance(item, (int, str)):
693+
if isinstance(item, int):
694+
item = list(self.networks.keys())[item]
695+
net: Network = self.networks[item]
696+
return net
697+
else:
698+
keys = list(self.networks.keys())
699+
sub: NetworkCollection = NetworkCollection(networks=[
700+
self.networks[n] if isinstance(n, str) else self
701+
.networks[keys[n]] for n in item
702+
])
703+
return sub
700704

701705
def iter_networks(self) -> Network:
702706
"""
@@ -741,7 +745,8 @@ def station4gpi(self, gpi):
741745
in_grid = np.isin(idxs, self.grid.activegpis)
742746

743747
if not all(in_grid):
744-
raise ValueError(f"Index not found in loaded grid: {idxs[~in_grid]}")
748+
raise ValueError(
749+
f"Index not found in loaded grid: {idxs[~in_grid]}")
745750

746751
lon, lat = self.grid.gpi2lonlat(idxs)
747752

@@ -798,15 +803,16 @@ def export_citations(self, out_file=None):
798803
references: OrderedDict
799804
Network names as keys and network references as values
800805
"""
801-
refs = OrderedDict(
802-
[(net.name, net.get_citations()) for net in self.iter_networks()]
803-
)
806+
refs = OrderedDict([
807+
(net.name, net.get_citations()) for net in self.iter_networks()
808+
])
804809

805810
if out_file is not None:
806811
with open(out_file, mode="w") as out_file:
807812
for name, reflist in refs.items():
808813
out_file.write(f"References for Network {name}:\n")
809-
out_file.write("-----------------------------------------\n")
814+
out_file.write(
815+
"-----------------------------------------\n")
810816
for ref in reflist:
811817
out_file.write(f"{ref}\n")
812818
out_file.write("\n")

0 commit comments

Comments
 (0)