Commit 88578ece authored by PIOLLE's avatar PIOLLE

code refactoring

parent f398a7b8
......@@ -25,19 +25,12 @@ import shapely.geometry
from naiad.queries.server import Server
from naiad.queries.index import Index
from naiad.processing.tile import Tile, TileEncoder
from naiad.queries.tile import Tile, TileEncoder
from naiad.queries.search import SpatioTemporalSearch
from naiad.utils.filelocator import FileLocator
from naiad.utils.executor import Executor
from naiad.processing.geoshape import GeoShape
logger = logging.getLogger()
logger.setLevel(logging.INFO)
tracer = logging.getLogger('elasticsearch.trace')
tracer.setLevel(logging.WARNING)
tracer.addHandler(logging.StreamHandler())
def get_options():
parser = argparse.ArgumentParser(
description='Search the granules matching a region and period of '
......@@ -151,32 +144,28 @@ def get_options():
return args
if '__main__' == __name__:
logger = logging.getLogger()
logger.setLevel(logging.INFO)
tracer = logging.getLogger('elasticsearch.trace')
tracer.setLevel(logging.WARNING)
tracer.addHandler(logging.StreamHandler())
args = get_options()
if args.debug:
logger.setLevel(logging.DEBUG)
if args.show or args.show_all:
try:
import matplotlib.pyplot as plt
except:
raise ImportError("matplotlib is missing")
name = args.name.lower()
if args.area:
lonmin, latmin, lonmax, latmax = literal_eval(args.area)
else:
# global selection by default
lonmin, latmin, lonmax, latmax = -180, -90., 180., 90
area = shapely.geometry.asPolygon([
(lonmin, latmax),
(lonmax, latmax),
(lonmax, latmin),
(lonmin, latmin),
(lonmin, latmax),
])
area = shapely.geometry.box([lonmin, latmin, lonmax, latmax])
if args.start:
start = dateutil.parser.parse(args.start)
else:
......@@ -267,15 +256,13 @@ if '__main__' == __name__:
print(granule)
elif args.output_format == "json":
print (json.dumps(res['data'], cls=TileEncoder, sort_keys=True,
print(json.dumps(res['data'], cls=TileEncoder, sort_keys=True,
indent=4, separators=(',', ': ')))
# display
if args.show_all:
for granule in res['data']:
GeoShape.draw(granule.shape, colour="green")
GeoShape.draw(area, colour="red")
plt.show()
Tile.show_all(res['data'])
elif args.show:
for granule in res['data']:
granule.show(clip=area)
"""
helper functions
"""
import os
from pathlib import Path
import sys
from typing import List
import yaml
from elasticsearch.exceptions import ConnectionError, NotFoundError
from shapely.geometry import box
from .queries.search import SpatioTemporalSearch
from .queries.server import Server
from .utils.filelocator import FileLocator
def config_file(config: str = None):
"""Get the local configuration file with the pre-configured accesses
Args:
config (str, optional): configuration file containing the preconfigured
Naiad accesses. If not provided, the will search for
NAIAD_HOME env variable or by default in the default
``.naiad/search.yaml`` file in the user home directory.
"""
if config is None:
if 'NAIAD_HOME' in os.environ:
config = Path(os.environ['NAIAD_HOME']) / 'search.yaml'
else:
# look in user home
config = Path.home() / '.naiad/search.yaml'
config = Path(config)
if not config.exists():
raise IOError(
'Index configuration file {} not found'.format(config)
)
with open(config) as f:
config = yaml.load(f, Loader=yaml.FullLoader)
return config
def get_config_from_file(indices: List[str], config: str = None):
"""Get the configuration details of a list of index
Args:
config (str, optional): configuration file containing the preconfigured
Naiad accesses. If not provided, the will search for
NAIAD_HOME env variable or by default in the default
``.naiad/search.yaml`` file in the user home directory.
"""
all_configs = config_file(config)
idxconfig = []
for index in indices:
if index not in all_configs['indexes']:
raise ValueError(
'index {} missing in confifuration file'.format(index)
)
for param in ['es_server', 'username', 'password']:
if param not in all_configs['indexes'][index]:
all_configs['indexes'][index][param] = \
all_configs['globals'][param]
idxconfig.append(all_configs['indexes'][index])
return idxconfig
def search(
indices,
start,
end,
area=box(-180, -90, 180., 90.),
precise=False,
constraints=None,
configfile=None,
fullpath=False):
"""Search granules wrt to some user criteria"""
config = get_config_from_file(indices, configfile)
# has to be the same server for each sought index
print(set([_['es_server'] for _ in config]))
if len(set([_['es_server'] for _ in config])) != 1:
raise NotImplementedError
granule_constraints = None
if constraints is not None:
fields = constraints.split(';')
granule_constraints = []
for item in fields:
prop, oper, val = item.split(' ')
if oper not in ['eq', 'lt', 'le', 'gt', 'ge']:
raise Exception("Invalid constraint operator : %s", oper)
granule_constraints.append((prop, oper, val))
# create search query
searchq = SpatioTemporalSearch(
indices, area, start, end,
granule_constraints=granule_constraints,
precise=precise
)
# connect to Naiad server and run the query
try:
# create Naiad server object to query
es = Server(config[0]['es_server'],
login=config[0]['username'],
password=config[0]['password'])
res = searchq.run(es, precise=precise)
except ConnectionError as _:
print("I can not perform the search (Elasticsearch can not be reached)",
file=sys.stderr)
exit(1)
except NotFoundError:
print("The requested index is not existing", file=sys.stderr)
exit(1)
# complete with full path name if required
if fullpath:
datastore = FileLocator(configfile)
for granule in res['data']:
granule.granule = datastore.get_full_path(granule.granule, name)
return res['data']
This diff is collapsed.
......@@ -14,9 +14,9 @@ import shapely.geometry
import elasticsearch
import elasticsearch.helpers
from naiad.processing.tile import Tile
from naiad.processing.geoshape import GeoShape
from naiad.queries.query import Query
from .tile import Tile
from .geoshape import GeoShape
from .query import Query
# available index levels
......@@ -723,7 +723,7 @@ class MultiSpatioTemporalSearch(SpatialQuery):
s0 = iter * max_number_of_requests
s1 = min((iter + 1) * max_number_of_requests, len(self.searches))
for search in self.searches[s0:s1] :
for search in self.searches[s0:s1]:
query_list += json.dumps({})
query_list += "\n"
query_list += search.json()
......
This diff is collapsed.
......@@ -11,7 +11,7 @@ Module to manage the execution of an external command line tool with a granule p
.. codeauthor:: Jeff Piolle <jfpiolle@ifremer.fr>
"""
import os
import ConfigParser
import configparser
from os.path import expanduser
import shutil
import inspect
......@@ -53,7 +53,6 @@ class Executor(object):
shutil.copyfile(default, toolstore)
self.toolstore = toolstore
# put datastore in cache
print "TOOOL", self.toolstore
self.load_toolstore()
def load_toolstore(self):
......
......@@ -10,75 +10,39 @@ Class to locate granules indexed in Naiad
.. sectionauthor:: Jeff Piolle <jfpiolle@ifremer.fr>
.. codeauthor:: Jeff Piolle <jfpiolle@ifremer.fr>
"""
import os
import ConfigParser
from os.path import expanduser
import shutil
import inspect
import datetime
from pathlib import Path
from dateutil import parser
from naiad import config_file
class FileLocator(object):
"""Class to locate granule files indexed in Naiad.
Args:
datastore (str, optional): configuration file containing the path and
config (str, optional): configuration file containing the path and
structure of the file organization for each dataset indexed in
Naiad. If not provided, the FileLocator object will search for
NAIAD_DATASTORE env variable or by default in the default
``.naiad/datastore.cfg`` file in the user home directory.
NAIAD_HOME env variable or by default in the default
``.naiad/search.yaml`` file in the user home directory.
"""
def __init__(self, datastore=None):
self.datastore = datastore
if datastore is None:
if 'NAIAD_DATASTORE' in os.environ:
# look NAIAD_DATASTORE
self.datastore = os.environ['NAIAD_DATASTORE']
def __init__(self, config: str = None):
self.config = config_file(config)
else:
# look in user home
naiadhome = os.path.join(expanduser("~"), '.naiad')
datastore = os.path.join(expanduser("~"),
'.naiad',
'mydatastore.cfg')
if not os.path.exists(datastore):
if not os.path.exists(naiadhome):
os.makedirs(naiadhome)
if not os.path.exists(datastore):
default = os.path.join(
os.path.dirname(inspect.getfile(self.__class__)),
'datastore.cfg'
)
shutil.copyfile(default, datastore)
self.datastore = datastore
# put datastore in cache
self.load_datastore()
def load_datastore(self):
"""load datastore configuration file in cache"""
configfile = self.datastore
config = ConfigParser.RawConfigParser()
config.optionxform = str
if not os.path.exists(configfile):
print("Datastore file {} is not existing".format(configfile))
exit(-1)
config.read(configfile)
self.cache = config
def get_full_path(self, granule, dataset_id):
try:
path = self.cache.get("storage_path", dataset_id)
except ConfigParser.NoOptionError:
path = self.config[dataset_id]["storage_path"]
except KeyError:
raise ValueError(
"No storage location is defined for dataset {} in {}"
.format(dataset_id, self.datastore)
"No storage location is defined for index {}"
.format(dataset_id)
)
try:
extractor = (self.cache.get("time_extractor", dataset_id)
.replace('$FILE', granule))
except ConfigParser.NoOptionError:
raise ValueError("No way specified for granule reference time "
"extraction")
extractor = self.config[dataset_id]["time_extractor"]\
.replace('$FILE', granule)
except KeyError:
raise ValueError(
"No specified mean of extracting reference time for granule ")
date = eval(extractor)
return os.path.join(date.strftime(path), granule)
return Path(date.strftime(path)) / granule
......@@ -38,13 +38,9 @@ setup(
),
long_description=open('README.rst').read(),
install_requires=[
'numpy>=1.7.1',
'scipy',
'descartes>=1.0.1',
'colorama',
'pyproj',
'python-dateutil',
'elasticsearch'
'elasticsearch<7'
],
package_data={'naiad/utils': ['datastore.cfg', 'toolstore.cfg']},
)
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment