Commit 484d75d4 authored by BODERE's avatar BODERE
Browse files

fix: endpoint + PODAAC

parent 470bcc33
Pipeline #6523 passed with stage
in 1 minute and 37 seconds
...@@ -16,39 +16,38 @@ from starlette.requests import Request ...@@ -16,39 +16,38 @@ from starlette.requests import Request
from starlette.responses import JSONResponse, Response from starlette.responses import JSONResponse, Response
from opensearx_ws.config import j2_templates, opensearch_engines, settings from opensearx_ws.config import j2_templates, opensearch_engines, settings
from opensearx_ws.opensearch import merge_opensearch_responses from opensearx_ws.opensearch import merge_opensearch_responses, OpensearchResponse
from opensearx_ws.opensearch.model import (OpensearchQueryParameters, from opensearx_ws.opensearch.model import (OpensearchQueryParameters,
OpensearchResponseFormat) OpensearchResponseFormat)
app = FastAPI(root_path=settings.context_path) app = FastAPI(
root_path=settings.context_path,
title="OpensearX",
description="Execute requests on opensearch engines and merge the results.",
version="0.1.6"
)
@app.get("/engines") @app.get("/engines")
def conf() -> JSONResponse: def engines() -> JSONResponse:
"""Display the configuration """Display the configuration
Will be removed for production Will be removed for production
""" """
return opensearch_engines return JSONResponse({k: v.to_dict() for k, v in opensearch_engines.items()})
@app.get("/granules") @app.get("/granules", response_model=OpensearchResponse)
@app.get("/granules{response_fmt}") @app.get("/granules.{response_fmt}", response_model=OpensearchResponse)
async def granules( async def granules(
request: Request, request: Request,
response_fmt: OpensearchResponseFormat = OpensearchResponseFormat.atom, response_fmt: OpensearchResponseFormat = OpensearchResponseFormat.atom,
params: OpensearchQueryParameters = Depends()) -> Response: params: OpensearchQueryParameters = Depends()) -> Response:
"""granules endpoint """Execute a request on each opensearch engine if the dataset is available.
Execute a request on each opensearch engine if the dataset is available. If the output format is `raw`, then return a list for the raw responses.
If the output format is `.raw`, then return a list for the raw responses. Otherwise, the responses are merged into a single one and then is formatted
Otherwise, the responses are merged into a single response and the response id formatted to the specified format (`atom`, `json`)
with the specified format (`.atom`, `.json`)
:param request:
:param response_fmt: output format (atom, json, raw)
:param params:
:return:
""" """
# prepare task (i.e. coroutines) # prepare task (i.e. coroutines)
...@@ -80,7 +79,7 @@ async def granules( ...@@ -80,7 +79,7 @@ async def granules(
) )
@app.get("/") @app.get("/", include_in_schema=False)
def home(request: Request): def home(request: Request):
"""Home endpoint """Home endpoint
""" """
......
#!/usr/bin/env python #!/usr/bin/env python
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
"""Opensearch engines""" """Opensearch engines"""
import json
import logging import logging
from typing import Dict, List, Optional, Tuple from typing import Dict, List, Optional, Tuple
from urllib.parse import parse_qs, quote, urlencode, urlsplit, urlunsplit from urllib.parse import parse_qs, quote, urlencode, urlsplit, urlunsplit
...@@ -23,7 +24,7 @@ class OpensearchEngine: ...@@ -23,7 +24,7 @@ class OpensearchEngine:
def __init__(self, root_path: str, timeout: float = 10.0): def __init__(self, root_path: str, timeout: float = 10.0):
self.root_path = root_path self.root_path = root_path
self.timeout = timeout self.timeout = timeout
self._log = logging.getLogger() self._log = logging.getLogger(__name__)
async def request(self, params: OpensearchQueryParameters) -> Optional[OpensearchResponse]: async def request(self, params: OpensearchQueryParameters) -> Optional[OpensearchResponse]:
async def request_hook(request): async def request_hook(request):
...@@ -134,6 +135,12 @@ class OpensearchAtomEngine(OpensearchEngine): ...@@ -134,6 +135,12 @@ class OpensearchAtomEngine(OpensearchEngine):
def get_text(self, elt: Element, name: str) -> Optional[str]: def get_text(self, elt: Element, name: str) -> Optional[str]:
sub_elt = elt.find(name, namespaces=self.namespaces) sub_elt = elt.find(name, namespaces=self.namespaces)
if name == "dc:date":
print('*' * 70)
print(Element.tostring(elt))
print(sub_elt)
#print(Element.tostring(sub_elt))
if sub_elt is None: if sub_elt is None:
return None return None
return sub_elt.text return sub_elt.text
...@@ -141,6 +148,18 @@ class OpensearchAtomEngine(OpensearchEngine): ...@@ -141,6 +148,18 @@ class OpensearchAtomEngine(OpensearchEngine):
def get_children(self, elt: Element, name: str) -> List: def get_children(self, elt: Element, name: str) -> List:
return elt.findall(name, namespaces=self.namespaces) return elt.findall(name, namespaces=self.namespaces)
def to_dict(self):
return {
"root_path": self.root_path,
"timeout": self.timeout
}
def __repr__(self):
return json.dumps(self.to_dict())
def __str__(self):
return repr(self)
class IfremerOpensearchEngine(OpensearchAtomEngine): class IfremerOpensearchEngine(OpensearchAtomEngine):
"""Ifremer opensearch engine""" """Ifremer opensearch engine"""
...@@ -155,6 +174,19 @@ class IfremerOpensearchEngine(OpensearchAtomEngine): ...@@ -155,6 +174,19 @@ class IfremerOpensearchEngine(OpensearchAtomEngine):
class JPLOpensearchEngine(OpensearchAtomEngine): class JPLOpensearchEngine(OpensearchAtomEngine):
"""JPL opensearch engine""" """JPL opensearch engine"""
namespaces = {
"feed": "http://www.w3.org/2005/Atom",
"opensearch": "http://a9.com/-/spec/opensearch/1.1/",
"geo": "http://a9.com/-/opensearch/extensions/geo/1.0/",
"time": "http://a9.com/-/opensearch/extensions/time/1.0/",
"georss": "http://www.georss.org/georss",
"dc": "http://purl.org/dc/elements/1.1/",
"gml": "http://www.opengis.net/gml",
"cwic": "http://cwic.wgiss.ceos.org/opensearch/extensions/1.0/",
"echo": "https://cmr.earthdata.nasa.gov/search/site/docs/search/api.html#atom",
"esipdiscovery": "http://commons.esipfed.org/ns/discovery/1.2/",
"eo": "http://a9.com/-/opensearch/extensions/eo/1.0/"
}
def _prepare_query_parameters(self, params: OpensearchQueryParameters) -> Dict: def _prepare_query_parameters(self, params: OpensearchQueryParameters) -> Dict:
return { return {
......
...@@ -13,14 +13,11 @@ from httpx import Response ...@@ -13,14 +13,11 @@ from httpx import Response
from pydantic.main import BaseModel from pydantic.main import BaseModel
from starlette.datastructures import URL from starlette.datastructures import URL
# --------------------------------------------------------------------------
# MODEL
# --------------------------------------------------------------------------
class OpensearchResponseFormat(str, Enum): class OpensearchResponseFormat(str, Enum):
atom = '.atom' atom = 'atom'
json = '.json' json = 'json'
raw = '.raw' raw = 'raw'
class OpensearchQueryParameters(BaseModel): class OpensearchQueryParameters(BaseModel):
......
...@@ -28,8 +28,8 @@ exclude = ["tests"] ...@@ -28,8 +28,8 @@ exclude = ["tests"]
[tool.poetry-dynamic-versioning] [tool.poetry-dynamic-versioning]
enable = true enable = true
vcs = "git" vcs = "git"
#style = "semver" style = "semver"
#pattern = "^v?(?P<base>\\d+\\.\\d+\\.\\d+)(-?((?P<stage>[a-zA-Z]+)\\.?(?P<revision>\\d+)?))?$" pattern = "^v?(?P<base>\\d+\\.\\d+\\.\\d+)(-?((?P<stage>[a-zA-Z]+)\\.?(?P<revision>\\d+)?))?$"
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.7" python = "^3.7"
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment