Commit fa50f41a authored by CEVAER's avatar CEVAER
Browse files

Extracting cyclone wind speed from file and writing to netcdf

parent be4c5a61
......@@ -27,7 +27,7 @@ def extract_date_from_filename(filename):
def get_track_points_from_database(session, smap_Date):
req_tracks = session.query(SimpleTrack, func.ST_X(cast(SimpleTrack.geom, Geometry)).label("lon"),
func.ST_Y(cast(SimpleTrack.geom, Geometry)).label("lat")) \
func.ST_Y(cast(SimpleTrack.geom, Geometry)).label("lat")) \
.filter(and_(SimpleTrack.date >= smap_Date,
SimpleTrack.date < smap_Date + datetime.timedelta(days=1))).group_by(SimpleTrack).all()
return req_tracks
......@@ -39,7 +39,6 @@ def get_colocated_track_points(dataset, track_points, file_date, deg_delta=3, ti
logger.debug(f"Track point : {track_point}")
sel = dataset.sel(lon=track_point["lon"], lat=track_point["lat"], method="nearest")
logger.debug(f"Point find : lon: {sel['lon'].values}, lat: {sel['lat'].values}, minute: {sel['minute'].values}")
lonIndex = np.where(dataset["lon"].data == sel["lon"].data)[0][0]
latIndex = np.where(dataset["lat"].data == sel["lat"].data)[0][0]
logger.debug(f"Point find : lon: {sel['lon'].values}, lat: {sel['lat'].values}, minute: {sel['minute'].values},"
......@@ -59,9 +58,8 @@ def get_colocated_track_points(dataset, track_points, file_date, deg_delta=3, ti
if node == 0 or not "time_offset" in track_point or track_point["time_offset"] > time_offset:
track_point["time_offset"] = time_offset
track_point["node"] = node
track_point["lonIndex"] = lonIndex
track_point["latIndex"] = latIndex
logger.debug(f"ISNOTNA")
track_point["lon_index"] = lonIndex
track_point["lat_index"] = latIndex
if "time_offset" in track_point:
track_point_offsets.append(track_point)
......@@ -78,9 +76,16 @@ def get_colocated_track_points(dataset, track_points, file_date, deg_delta=3, ti
return kept_track_points
#def extract
def process_smap_file(session, file):
def extract_write_cyclone_data(dataset, kept_track_points, filename, output_path, extract_size=30):
for sid, track_point in kept_track_points.items():
sel = dataset.isel(lon=slice(track_point["lon_index"] - extract_size, track_point["lon_index"] + extract_size),
lat=slice(track_point["lat_index"] - extract_size, track_point["lat_index"] + extract_size),
node=track_point["node"])
sel.to_netcdf(os.path.join(output_path, filename))
def process_smap_file(session, file, output_path):
logger.debug(f"Processing {file}...")
filename = os.path.basename(file)
......@@ -90,13 +95,15 @@ def process_smap_file(session, file):
logger.debug(f"Number of track point found : {len(track_points)}")
track_points = [{"sid": track_point[0].sid, "lon": shape360(track_point.lon, 0)[0],
"lat": track_point.lat,
"date": track_point[0].date} for track_point in track_points]
"lat": track_point.lat,
"date": track_point[0].date} for track_point in track_points]
dataset = xarray.open_dataset(file)
kept_track_points = get_colocated_track_points(dataset, track_points, file_date)
logger.info(f"For file {filename} Kept track that will be used to extract SMAP data: {kept_track_points}")
extract_write_cyclone_data(dataset, kept_track_points, filename, output_path)
if __name__ == "__main__":
description = """
......@@ -107,6 +114,8 @@ if __name__ == "__main__":
parser.add_argument("--dbd", action="store", type=str, required=True,
help='database (postgresql://user:pass@host:5432/base_name)')
parser.add_argument("--debug", action="store_true", default=False, help="Run in debug mode (verbose output)")
parser.add_argument("-o", "--output", action="store", type=str, default="./output_test",
help="Output path where files will be written")
args = parser.parse_args()
if sys.gettrace():
......@@ -127,4 +136,4 @@ if __name__ == "__main__":
"/home/datawork-cersat-public/provider/remss/satellite/l3/smap/smap/wind/v1.0/daily/2020/211/RSS_smap_wind_daily_2020_07_29_v01.0.nc",
]
for f in smap_tests:
process_smap_file(Session(), f)
process_smap_file(Session(), f, args.output)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment