Commit 9db74c0e authored by DEVOS's avatar DEVOS
Browse files

Merger

parent 1f47362a
......@@ -9,6 +9,8 @@ import importer # allow to solve relative imports
import sonar_netcdf.utils.nc_reader as reader
import netCDF4 as nc
from sonar_netcdf.utils.nc_reader_presenter import NcReaderPresenter
class SingleDimensionSelector:
"""Single dimension widget, made of a slider and a check box"""
......@@ -72,7 +74,8 @@ class DimensionsSelector:
class AppLayout2:
"""Composite handling layout of all widgets"""
def clear_output(self,event=None):
def clear_output(self, event=None):
self.content_plt.clear_output(wait=False)
def clear_all(self, event=None):
......@@ -123,7 +126,7 @@ class NCExplorer:
starting_path = os.path.expanduser(starting_path)
starting_path = os.path.expandvars(starting_path)
if not os.path.exists(starting_path):
new_path= os.path.expanduser("~")
new_path = os.path.expanduser("~")
print(f"Input file {starting_path} does not exist switching to {new_path}")
starting_path = new_path
self.fc = self._createFileChooser(starting_path, self.on_file_selected)
......@@ -295,7 +298,8 @@ class NCExplorer:
# build a slice index given all dimension selector
slice_index = self.widget.plt_dimension_selector.build_slice_index()
self.current_reader._display_variable(
presenter = NcReaderPresenter(self.current_reader)
presenter._display_variable(
variable_name=name, variable_path=path, slice_index=slice_index
)
......
......@@ -43,7 +43,8 @@
"outputs": [],
"source": [
"#import some netcdf helper\n",
"from sonar_netcdf.utils.nc_reader import NcReader"
"from sonar_netcdf.utils.nc_reader import NcReader\n",
"from sonar_netcdf.utils.nc_reader_presenter import NcReaderPresenter"
]
},
{
......@@ -52,7 +53,7 @@
"metadata": {},
"outputs": [],
"source": [
"#help(NcReader.dump_content)"
"#help(NcReaderPresenter.dump_content)"
]
},
{
......@@ -61,7 +62,7 @@
"metadata": {},
"outputs": [],
"source": [
"#help(NcReader.dump_groups)"
"#help(NcReaderPresenter.dump_groups)"
]
},
{
......@@ -87,7 +88,8 @@
"outputs": [],
"source": [
"#open the file \n",
"reader = NcReader(file_path)"
"reader = NcReader(file_path)\n",
"presenter = NcReaderPresenter(reader)"
]
},
{
......@@ -161,7 +163,7 @@
],
"source": [
"\n",
"values=reader.dump_content(root=\"Sonar/Beam_group1/ADCP/current_velocity_geographical_north\",cmap=\"inferno\",vmin=-0.5,vmax=0.5)\n"
"values=presenter.dump_content(root=\"Sonar/Beam_group1/ADCP/current_velocity_geographical_north\",cmap=\"inferno\",vmin=-0.5,vmax=0.5)\n"
]
},
{
......@@ -223,7 +225,7 @@
"source": [
"\n",
"\n",
"values=reader.dump_content(root=\"Sonar/Beam_group1/ADCP/Mean_current/current_velocity_geographical_north\",cmap=\"inferno\",vmin=-0.5,vmax=0.5)\n"
"values=presenter.dump_content(root=\"Sonar/Beam_group1/ADCP/Mean_current/current_velocity_geographical_north\",cmap=\"inferno\",vmin=-0.5,vmax=0.5)\n"
]
},
{
......@@ -385,12 +387,12 @@
"#psi\n",
"psi=reader._get_variable_data(\"/Sonar/Beam_group1/equivalent_beam_angle\",slice_index={'beam':0})\n",
"psi=np.asarray(psi)\n",
"psi=psi[0,:]\n",
"psi=psi[:]\n",
"\n",
"#Gain\n",
"G= reader._get_variable_data(\"/Sonar/Beam_group1/transducer_gain\",slice_index={'beam':0})\n",
"G=np.asarray(G)\n",
"G=G[0,:]\n",
"G=G[:]\n",
"#use default value\n",
"G=G+33.1\n",
"\n",
......@@ -43,7 +43,8 @@
"outputs": [],
"source": [
"#import some netcdf helper\n",
"from sonar_netcdf.utils.nc_reader import NcReader"
"from sonar_netcdf.utils.nc_reader import NcReader\n",
"from sonar_netcdf.utils.nc_reader_presenter import NcReaderPresenter"
]
},
{
......@@ -69,7 +70,8 @@
"outputs": [],
"source": [
"#open the file \n",
"reader = NcReader(file_path)"
"reader = NcReader(file_path)\n",
"presenter = NcReaderPresenter(reader)"
]
},
{
......@@ -144,7 +146,7 @@
],
"source": [
"\n",
"values=reader.dump_content(root=\"Sonar/Grid_group_1/frequency\",cmap=\"jet\",vmin=-0.5,vmax=0.5)\n"
"values=presenter.dump_content(root=\"Sonar/Grid_group_1/frequency\",cmap=\"jet\",vmin=-0.5,vmax=0.5)\n"
]
},
{
......@@ -203,7 +205,7 @@
],
"source": [
"\n",
"values=reader.dump_content(root=\"Sonar/Grid_group_1/integrated_backscatter\",slice_index={'frequency':10},cmap=\"jet\",vmin=-80,vmax=-30)\n"
"values=presenter.dump_content(root=\"Sonar/Grid_group_1/integrated_backscatter\",slice_index={'frequency':10},cmap=\"jet\",vmin=-80,vmax=-30)\n"
]
},
{
......
......@@ -43,7 +43,8 @@
"outputs": [],
"source": [
"#import some netcdf helper\n",
"from sonar_netcdf.utils.nc_reader import NcReader"
"from sonar_netcdf.utils.nc_reader import NcReader\n",
"from sonar_netcdf.utils.nc_reader_presenter import NcReaderPresenter"
]
},
{
......@@ -52,7 +53,7 @@
"metadata": {},
"outputs": [],
"source": [
"#help(NcReader.dump_content)"
"#help(NcReaderPresenter.dump_content)"
]
},
{
......@@ -61,7 +62,7 @@
"metadata": {},
"outputs": [],
"source": [
"#help(NcReader.dump_groups)"
"#help(NcReaderPresenter.dump_groups)"
]
},
{
......@@ -89,7 +90,8 @@
"outputs": [],
"source": [
"#open the file \n",
"reader = NcReader(file_path)"
"reader = NcReader(file_path)\n",
"presenter = NcReaderPresenter(reader)"
]
},
{
......@@ -165,7 +167,7 @@
],
"source": [
"\n",
"values=reader.dump_content(root=\"Sonar/Beam_group2/backscatter_r\",cmap=\"viridis\",vmin=-90,vmax=-30)\n"
"values=presenter.dump_content(root=\"Sonar/Beam_group2/backscatter_r\",cmap=\"viridis\",vmin=-90,vmax=-30)\n"
]
},
{
......@@ -228,7 +230,7 @@
}
],
"source": [
"values=reader.dump_content(root=\"Sonar/Beam_group2/echoangle_major\",cmap=\"coolwarm\",vmin=-10,vmax=10)"
"values=presenter.dump_content(root=\"Sonar/Beam_group2/echoangle_major\",cmap=\"coolwarm\",vmin=-10,vmax=10)"
]
},
{
......@@ -284,7 +286,7 @@
}
],
"source": [
"values=reader.dump_content(root=\"Sonar/Beam_group2/echoangle_minor\",cmap=\"coolwarm\",vmin=-10,vmax=10)"
"values=presenter.dump_content(root=\"Sonar/Beam_group2/echoangle_minor\",cmap=\"coolwarm\",vmin=-10,vmax=10)"
]
},
{
This diff is collapsed.
"""
Utility class to manage Sonar-netcdf dimensions
"""
from utils.nc_reader import NcReader, open_nc_reader
def find_time_coordinate_variables(reader: NcReader) -> None:
"""
Browse downward group hierarchy to find all coordinate variables based on time units
"""
for all_variables in reader.walk_tree_variables():
for variable in all_variables:
print(variable)
if __name__ == "__main__":
with open_nc_reader("E:/ifremer/data/sonar-netcdf/ADCP/HYDROMOMAR-D20200904-T093759.nc") as reader:
find_time_coordinate_variables(reader)
"""
Tests of module sonar_netcdf.sonar_variables
"""
import tempfile as tmp
import netCDF4 as nc
import numpy as np
import sonar_netcdf.sonar_groups as sg
import sonar_netcdf.utils.nc_reader as nc_r
import sonar_netcdf.utils.nc_variables as nc_v
def test_find_time_coordinate_variables():
"""check find_time_coordinate_variables function"""
with tmp.TemporaryDirectory() as o_dir:
filename = tmp.mktemp(dir=o_dir, suffix=".nc")
# Generates a Sonar files with 5 time coordinate variables
with nc.Dataset(filename, mode="w") as file:
root_structure = sg.RootGrp()
root = root_structure.create_group(file)
root_structure.create_crs(root)
# create /annotation
ano_structure = sg.AnnotationGrp()
ano = ano_structure.create_group(root)
ano_structure.create_dimension(ano, {sg.AnnotationGrp.TIME_DIM_NAME: 2})
ano_structure.create_time(ano, long_name="ANO_TIME")
ano_structure.create_annotation_text(ano)
# create /platform
pla_structure = sg.PlatformGrp()
pla = pla_structure.create_group(root)
pla_structure.create_dimension(pla, {"time1": 2})
var = pla.createVariable(varname="time1", datatype=float, dimensions=("time1"), fill_value=np.nan)
var.long_name = "PLA_TIME"
var.units = "nanoseconds since 1970-01-01 00:00:00Z"
# create /platform/NMEA
nmea_structure = sg.NmeaGrp()
nmea = nmea_structure.create_group(pla, ident="NMEA")
nmea_structure.create_dimension(nmea, {sg.NmeaGrp.TIME_DIM_NAME: 2})
nmea_structure.create_time(nmea, long_name="NMEA_TIME")
nmea_structure.create_nmea_datagram(nmea)
# create /Sonar
sonar_structure = sg.SonarGrp()
sonar = sonar_structure.create_group(root)
beam_structure = sg.BeamGroup1Grp()
for i in range(2):
beam = beam_structure.create_group(sonar, ident=f"Beam_group{i}")
beam_structure.create_beam_type(beam)
beam_structure.create_dimension(beam, {sg.BeamGroup1Grp.PING_TIME_DIM_NAME: 2})
beam_structure.create_ping_time(beam, long_name=f"BEAM{i}_TIME")
beam_structure.create_platform_heading(beam)
with nc_r.open_nc_reader(filename) as reader:
all_long_names = [variable.long_name for variable in nc_v.find_time_coordinate_variables(reader)]
assert len(all_long_names) == 5
for long_name in all_long_names:
assert long_name in [
"ANO_TIME",
"PLA_TIME",
"NMEA_TIME",
"BEAM0_TIME",
"BEAM1_TIME",
]
import sys
import traceback
from contextlib import contextmanager
from typing import Optional
from typing import Generator, List, Optional
import matplotlib.pyplot as plt
import netCDF4 as nc
import numpy as np
from .print_color import error, header, pprint, warning
from .print_color import pprint
# ensure minimum size for figures
plt.rcParams["figure.dpi"] = 270
......@@ -21,24 +19,28 @@ class NcReader:
It has a set of methods allowing to parse and dump the file content on a jupyter netbook
"""
def __init__(self, filename):
def __init__(self, filename: str, quiet: bool = False):
"""
:param filename: the file path to read
:param ignore_variable: a list of variables name that will be ignored and not display
:param quiet: False to enter in verbose mode
"""
# open the file
self.file_name = filename
self.quiet = quiet
# open the file. May raise an OSError
self.dataset = nc.Dataset(self.file_name)
self.quiet = False
def __del__(self):
def close(self):
# close the file
self.dataset.close()
# "dataset" not in self.__dict__ when Netcdf was unable to open the file
if "dataset" in self.__dict__ and self.dataset is not None:
self.dataset.close()
self.dataset = None
def _is_string_variable(self, variable_path):
variable = self.dataset[variable_path]
return variable.dtype == type("str")
def __del__(self):
# close the file
self.close()
def _get_variable_data(self, variable_path, slice_index=None):
"""
......@@ -59,6 +61,12 @@ class NcReader:
def _get_variable(self, variable_path):
return self.dataset[variable_path]
def find_variable(self, variable_path) -> Optional[nc.Variable]:
try:
return self.dataset[variable_path]
except IndexError:
return None
@staticmethod
def is_variable_vlen(variable):
"""
......@@ -66,12 +74,6 @@ class NcReader:
"""
return variable._isvlen
def _is_ignored(self, variable_name, ignore_variable):
"""
Tell is a variable is masked variable (ancillary variable for example)
"""
return variable_name in ignore_variable
def _squeeze_shape(self, shape: tuple, dimensions: tuple):
"""
Remove dimension equals to 1 in a tuple list, if everything is equal to 1 the initia
......@@ -210,16 +212,6 @@ class NcReader:
f"Not supported display of vlen variable {variable_path}, reduced dimensions are too high {reduced_dimensions} ({reduced_shape})"
)
def _display_variable_header(self, variable_name, variable_path):
"""
Print variable
:return: None
"""
if not self.quiet:
header("\n\n")
header(f"Variable {variable_name} : {variable_path}")
pprint(f"{self.dataset[variable_path]}")
@staticmethod
def get_variable_path_and_name(dataset: nc.Dataset):
"""
......@@ -234,219 +226,38 @@ class NcReader:
return (f"{vpath}/{dataset.name}", dataset.name)
return None, None
def _display_variable(
self,
variable_name,
variable_path,
slice_index: dict,
ignore_variable=None,
cmap="viridis",
vmin=None,
vmax=None,
) -> np.ndarray:
if ignore_variable is None:
ignore_variable = []
if cmap is None:
cmap = "viridis"
# this is the only way I found to clear the figure : clear it and recreate it
if not self._is_ignored(variable_name, ignore_variable):
if self._is_string_variable(variable_path):
# String variable, will only pprint a few data
pprint(f"Variable {variable_name} is of type string")
variable = self.dataset[variable_path]
values = variable[:]
pprint(f"Variable {variable_name} {len(values)} values (['{values[0]}',...,'{values[-1]}']")
else:
v = self._get_variable_data(variable_path, slice_index)
if not self.quiet:
pprint("Variable : " + variable_path + " size =" + str(v.shape))
if len(v.shape) == 1:
if v.shape[0] > 0:
if not self.quiet:
pprint("Statistics min:" + str(np.nanmin(v)) + " max:" + str(np.nanmax(v)))
plt.plot(v)
plt.show()
else:
warning("1D variable with a null size" + variable_path + " size =" + str(v.shape))
elif len(v.shape) == 2:
if v.shape[0] > 0 and v.shape[1] > 0:
if not self.quiet:
pprint("statistics min:" + str(np.nanmin(v)) + " max:" + str(np.nanmax(v)))
if vmin is None:
vmin = np.nanmin(v)
if vmax is None:
vmax = np.nanmax(v)
fig, ax = plt.subplots()
im = ax.imshow(v, aspect="auto", cmap=cmap, vmin=vmin, vmax=vmax)
fig.colorbar(im)
plt.show()
else:
warning(f"Empty values dimensions = {v.shape}")
else:
error("Cannot display variable : " + variable_path + " size =" + str(v.shape))
pprint("Values =" + str(v))
return v
else:
warning("ignored variable :" + variable_name)
return None
def dump_content(
self,
root="/",
recurse_subgroup=True,
slice_index=None,
ignored_variable_list=None,
cmap=None,
vmax=None,
vmin=None,
):
"""
Display all group content (name, types, attributes, variables)
Variable content is displayed if numeric as 1D or 2D plots.
When dimensions of variable is higher that 2 a reduction is done on dimension equals to 1 and if not enough the slice_index parameter is used to reduce dimensions
Vlen data is filled up with invalid values along the variable length dimension in order to be displayed properly
:param root: the root path used as a starting point
:param recurse_subgroup: boolean indicating if recursion into subgroup is done
:param slice_index: parameter containing a dictionnary of index that should be use to reduce dimension if needed for example slice_index={'ping_time':3}
:param ignored_variable_list: list of variables that should be ignored
:return: None
"""
if slice_index is None:
slice_index = {}
if ignored_variable_list is None:
ignored_variable_list = []
if root == "/" or root is None:
root_group = self.dataset
else:
root_group = self.dataset[root]
return self._recurse_and_display(
root_group,
slice_index=slice_index,
recurse_subgroup=recurse_subgroup,
ignored_variable_list=ignored_variable_list,
cmap=cmap,
vmin=vmin,
vmax=vmax,
)
def dump_groups(self, starting_path="/"):
"""
Parse recursively all group and print their names
:param starting_path: the group starting point
:return: None
"""
if starting_path == "/" or starting_path is None:
self._dump_groups_and_recurse(group=None)
# Root group is not strictly a group but a nc.Dataset (it has no name)
start_dataset = self.dataset
else:
start_dataset = self.dataset[starting_path]
self._dump_groups_and_recurse(start_dataset)
def _dump_groups_and_recurse(self, group=None, level=""):
"""Recurse every group and print their names"""
if group is None:
pprint("-+Root")
dataset = self.dataset
else:
if len(group.groups) > 0:
pprint(f"{level}|+{group.name}")
else:
pprint(f"{level}|-{group.name}")
dataset = group
for subgroup_name in dataset.groups:
self._dump_groups_and_recurse(dataset[subgroup_name], level=f"{level} ")
def _print_variable(self, variable, slice_index, ignored_variable_list, cmap=None, vmin=None, vmax=None):
v = None
try:
variable_path = variable._grp.path + "/" + variable.name
self._display_variable_header(variable, variable_path)
v = self._display_variable(
variable,
variable_path,
slice_index,
ignore_variable=ignored_variable_list,
cmap=cmap,
vmin=vmin,
vmax=vmax,
)
except: # pylint: disable=W0702
error("Error for variable :" + variable_path)
error(f"Unexpected error: {sys.exc_info()}")
traceback.print_exc()
return v
def _recurse_and_display(
self, dataset, slice_index, recurse_subgroup=True, ignored_variable_list=None, cmap=None, vmin=None, vmax=None
):
last_variable = None
if ignored_variable_list is None:
ignored_variable_list = []
if isinstance(dataset, nc.Variable):
return self._print_variable(dataset, slice_index, ignored_variable_list, cmap=cmap, vmin=vmin, vmax=vmax)
if dataset.parent is not None:
header(f"Group {dataset.name} ({dataset.path}")
else:
# root dataset has not name
header(f"Root Group {dataset.path}")
if not self.quiet:
pprint(f"{dataset}")
for variable in sorted(dataset.variables):
last_variable = self._print_variable(
dataset[variable], slice_index, ignored_variable_list, cmap=cmap, vmin=vmin, vmax=vmax
)
if recurse_subgroup:
for subgroup_name in dataset.groups:
last_variable = self._recurse_and_display(
dataset.groups[subgroup_name], slice_index, cmap=cmap, vmin=vmin, vmax=vmax
)
return last_variable
def walk_tree_group(self, dataset: Optional[nc.Dataset] = None):
def walk_tree_group(self, dataset: Optional[nc.Dataset] = None) -> Generator[nc.Group, None, None]:
if dataset is None:
dataset = self.dataset
yield dataset.groups.values()
yield dataset
for value in dataset.groups.values():
yield from self.walk_tree_group(value)
def walk_tree_variables(self, dataset: Optional[nc.Dataset] = None):
for all_groups in self.walk_tree_group(dataset):
for group in all_groups:
yield group.variables
def walk_tree_variables(self, dataset: Optional[nc.Dataset] = None) -> Generator[nc.Variable, None, None]:
for group in self.walk_tree_group(dataset):
for valiable in group.variables.values():
yield valiable
def walk_tree_vltypes(self, dataset: Optional[nc.Dataset] = None) -> Generator[nc.VLType, None, None]:
for group in self.walk_tree_group(dataset):
for vltype in group.vltypes.values():
yield vltype
def walk_tree_dimensions(self, dataset: Optional[nc.Dataset] = None) -> Generator[nc.Dimension, None, None]:
for group in self.walk_tree_group(dataset):
for dimension in group.dimensions.values():
yield dimension
@contextmanager
def open_nc_reader(sonar_path: str):
"""
Define a With Statement Context Managers for a SounderDriver
Allow opening a SounderDriver in a With Statement
Define a With Statement Context Managers for a NcReader
Allow opening a NcReader in a With Statement
"""
# Code to acquire resource, e.g.:
sonar_reader = NcReader(sonar_path)
sonar_reader = NcReader(sonar_path, True)
try:
yield sonar_reader
finally:
del sonar_reader
# in case we are started in standalone app, for debug only
if __name__ == "__main__":
# due to relative import should be run as a module (python -m ...)
file_path = "D:/data/file/XSF/Movies/Sardine_schools_1.xsf.nc"
file_path = "D:/XSF/0006_20200504_111056_FG_EM122.xsf.nc"
file_path = "D:/data/file/XSF/ExampleSonarData/test90-D20171107-T195133.nc"