Commit 1e3a0431 authored by CEVAER's avatar CEVAER
Browse files

Added Netcdf product polar

parent f215d3d0
......@@ -43,7 +43,9 @@ if __name__ == "__main__":
fix_path = os.path.join(args.path, "fix")
os.makedirs(fix_path, exist_ok=True)
status = find_center_plot_save(args.path, args.sat_file, args.resolution, fix_path, file_logging=True,
netcdf_path = os.path.join(args.path, "product")
os.makedirs(netcdf_path, exist_ok=True)
status = find_center_plot_save(args.path, args.sat_file, args.resolution, fix_path, netcdf_path, file_logging=True,
debug=args.debug)
if status is True:
......
......@@ -16,7 +16,7 @@ setup(name='tc_center',
'numpy', 'netCDF4', "matplotlib", "owi", "numba",
"scipy", "shapely", "python-polylabel", "colormap", "bokeh<2.1.0",
"dask-jobqueue", "pandas", "holoviews<1.13.3", "xarray",
"owi>=0.0.dev0", "geoviews<1.9.0", "easydev", "geopandas",
"owi>=0.0.dev0", "geoviews<1.9.0", "easydev", "geopandas", "python-opencv",
"selenium", "rasterio", 'colormap-ext>=0.0.dev0', 'atcf>=0.0.dev0',
"pathurl>=0.0.dev0"
]
......
import sys
import numpy as np
from shapely import wkt
from shapely.geometry import LineString
from tc_center.functions import open_SAR_image, mask_outlier_points, mask_hetero, track_eye_sar, center_eye, make_cmap,\
center_status, proportion_valid_data_around_center, land_mask
import shapely.geometry as shp
from tc_center.analysis_product import save_netcdf
from tc_center.plotting import create_control_plot, save_plot
from tc_center.fix import save_fix_analysis
import matplotlib.pyplot as plt
import os
import os.path
......@@ -13,28 +15,12 @@ import pandas as pd
import geopandas as gpd
import shapely.wkt
import owi
import xarray as xr
import holoviews as hv
import geoviews as gv
import geoviews.tile_sources as gts
import rasterio.features
from bokeh.resources import INLINE
import colormap_ext
import matplotlib.cm as cm
import cartopy.crs as ccrs
import datetime
import atcf
import copy
import logging
logger = logging.getLogger(__name__)
matplot = False
if matplot:
hv.extension('matplotlib')
else:
hv.extension('bokeh')
def api_req(sat_file):
"""
......@@ -195,271 +181,7 @@ def compute_centers(sat_file, resolution, api_data=None):
# lat_eye_old, lon_eye_fg, lat_eye_fg, is_ok
def save_fix_analysis(path, sat_file, atcf_filename, lon_eye, lat_eye, rmw, vmax, cyclone_name, sid, sat_mission,
sub_basin, acq_time):
atcf_outfile = os.path.join(path, atcf_filename)
sar_outfile = os.path.join(path, f"{os.path.basename(sat_file).replace('.nc', '')}.dat")
fix = pd.DataFrame()
for emptyCol in ["BASIN", "CY", "date", "fix format", "fix type", "center/intensity", "flagged indicator", "lat", "lon",
"height of ob", "posit confidence", "wind speed", "confidence", "pressure", "pressure confidence",
"pressure derivation", "rad", "windcode", "rad1", "rad2", "rad3", "rad4",
"radmod1", "radmod2", "radmod3", "radmod4", "radii confidence", "MRD", "eye", "subregion",
"fix site", "initials", "analyst initials", "start time", "end time", "distance to nearest data", "SST",
"observation sources", "comments"]:
fix.loc[0, emptyCol] = None
stormid = sid[2:4]#al092020
basin = sid[0:2]
mis_to_type = {"SENTINEL-1 A": "SEN1", "SENTINEL-1 B": "SEN1", "RADARSAT-2": "RS2"}
# NI ok, SI ok, SP ok, NWP ok, NEP ok, NA ok, WA ok, EA ok, AS ok, BB ok, CS ok, GM ok
# It is ok to set North Indian ocean to A (=arabian sea) because the north indian ocean is fully covered by sub basin
# so "NI" should never be output by the API in the sub_basin field.
sub_basin_subregion = {"AS": "A", "BB": "B", "CP": "C", "NEP": "E", "NA": "L", "SP": "P", "SI": "S", "NWP": "W",
"GM": "L", "CS": "L", "NI": "A", "WA": "S", "EA": "P"}
fix.loc[0, 'BASIN'] = basin.upper()
fix.loc[0, 'CY'] = str(stormid).zfill(2)
fix.loc[0, 'date'] = owi.getDateFromFname(os.path.basename(sat_file)).strftime("%Y%m%d%H%M")
fix.loc[0, "fix format"] = 70
fix.loc[0, "fix type"] = mis_to_type[sat_mission]
fix.loc[0, "center/intensity"] = "C"
fix.loc[0, 'lat'] = lat_eye
fix.loc[0, 'lon'] = lon_eye
fix.loc[0, "height of ob"] = "10"
fix.loc[0, "subregion"] = sub_basin_subregion[sub_basin]
fix.loc[0, "fix site"] = "IFR"
fix.loc[0, "initials"] = "SAR"
fix.loc[0, "start time"] = acq_time.strftime("%Y%m%d%H%M")
fix.loc[0, "end time"] = acq_time.strftime("%Y%m%d%H%M")
fix.loc[0, "distance to nearest data"] = ""
fix.loc[0, "SST"] = ""
fix.loc[0, "observation sources"] = sat_mission
fix.loc[0, "comments"] = "Synthetic Aperture Radar 3KM Wind Speed Analysis"
if os.path.isfile(atcf_outfile):
logger.info(f"ATCF output file exists ({atcf_outfile}). Reading and merging.")
existing_fix = atcf.read(atcf_outfile, model="fix")
existing_fix["date"] = existing_fix["date"].apply(lambda x: x.strftime("%Y%m%d%H%M"))
existing_fix["start time"] = existing_fix["start time"].apply(lambda x: x.strftime("%Y%m%d%H%M"))
existing_fix["end time"] = existing_fix["end time"].apply(lambda x: x.strftime("%Y%m%d%H%M"))
to_save = existing_fix.append(fix.iloc[0], ignore_index=True)
to_save = to_save.drop_duplicates(subset=["start time"], keep="last")
to_save["CY"] = to_save["CY"].apply(lambda x: str(x).zfill(2))
else:
to_save = fix
df_result = to_save
atcf.write(df_result, open(atcf_outfile, "w"), model="fix")
logger.info("wrote %s" % atcf_outfile)
atcf.write(fix, open(sar_outfile, "w"), model="fix")
logger.info("wrote %s" % sar_outfile)
def save_fix(path, sat_file, atcf_filename, lon_eye, lat_eye, rmw, vmax, cyclone_name, sid):
atcf_outfile = os.path.join(path, atcf_filename)
sar_outfile = os.path.join(path, f"{os.path.basename(sat_file).replace('.nc', '')}.dat")
fix = pd.DataFrame()
for emptyCol in ['VMAX', 'MSLP', 'RAD_34_NEQ', 'RAD_34_SEQ', 'RAD_34_SWQ',
'RAD_34_NWQ', 'RAD_50_NEQ', 'RAD_50_SEQ', 'RAD_50_SWQ', 'RAD_50_NWQ',
'RAD_64_NEQ', 'RAD_64_SEQ', 'RAD_64_SWQ', 'RAD_64_NWQ', 'POUTER',
'ROUTER', 'RMW', 'GUSTS', 'EYE', 'DIR', 'SPEED', 'DEPTH', 'SEAS_12_NEQ', 'SEAS_12_SEQ',
'SEAS_12_SWQ', 'SEAS_12_NWQ', 'STORMNAME', 'USERDEFINED1', 'userdata1',
'USERDEFINED2', 'userdata2', 'USERDEFINED3', 'userdata3',
'USERDEFINED4', 'userdata4', 'USERDEFINED5', 'userdata5']:
fix.loc[0, emptyCol] = None
stormid = sid[2:4]#al092020
basin = sid[0:2]
fix.loc[0, 'date'] = owi.getDateFromFname(os.path.basename(sat_file))
fix.loc[0, 'TAU'] = 0
fix.loc[0, 'TECH'] = 'SAR'
fix.loc[0, 'VMAX'] = None#vmax
fix.loc[0, 'RMW'] = None#props['rmw'][eye_position_full[0], eye_position_full[1]] / 1.852
fix.loc[0, 'EYE'] = None#props['eye'][eye_position_full[0], eye_position_full[1]] / 1.852 * 2
fix.loc[0, 'lat'] = lat_eye
fix.loc[0, 'lon'] = lon_eye
fix.loc[0, 'INITIALS'] = 'IFR'
fix.loc[0, 'STORMNAME'] = cyclone_name.upper()
fix.loc[0, 'CY'] = str(stormid).zfill(2)
fix.loc[0, 'BASIN'] = basin.upper()
fix.loc[0, 'USERDEFINED1'] = "SAR file"
fix.loc[0, 'userdata1'] = os.path.basename(sat_file)
if os.path.isfile(atcf_outfile):
logger.info(f"ATCF output file exists ({atcf_outfile}). Reading and merging.")
existing_fix = atcf.read(atcf_outfile)
existing_fix = atcf.linearize(existing_fix)
to_save = existing_fix.append(fix.iloc[0], ignore_index=True)
to_save = to_save.drop_duplicates(subset=["userdata1"], keep="last")
to_save["CY"] = to_save["CY"].apply(lambda x: str(x).zfill(2))
else:
to_save = fix
df_result = atcf.delinearize(to_save)
atcf.write(df_result, open(atcf_outfile, "w"))
fix = atcf.delinearize(fix)
atcf.write(fix, open(sar_outfile, "w"))
logger.info("wrote %s" % atcf_outfile)
#df_result = atcf.linearize(pd.DataFrame([acq_atcf]))
#for emptyCol in ['VMAX', 'MSLP', 'RAD_34_NEQ', 'RAD_34_SEQ', 'RAD_34_SWQ',
# 'RAD_34_NWQ', 'RAD_50_NEQ', 'RAD_50_SEQ', 'RAD_50_SWQ', 'RAD_50_NWQ',
# 'RAD_64_NEQ', 'RAD_64_SEQ', 'RAD_64_SWQ', 'RAD_64_NWQ', 'POUTER',
# 'ROUTER', 'RMW', 'GUSTS', 'EYE', 'DIR', 'SPEED', 'DEPTH', 'SEAS_12_NEQ', 'SEAS_12_SEQ',
# 'SEAS_12_SWQ', 'SEAS_12_NWQ', 'USERDEFINED1', 'userdata1',
# 'USERDEFINED2', 'userdata2', 'USERDEFINED3', 'userdata3',
# 'USERDEFINED4', 'userdata4', 'USERDEFINED5', 'userdata5']:
# df_result.loc[0, emptyCol] = None
#df_result.loc[0, 'date'] = owi.getDateFromFname(filename)
#df_result.loc[0, 'TAU'] = 0
#df_result.loc[0, 'TECH'] = 'SAR'
#df_result.loc[0, 'RMW'] = props['rmw'][eye_position_full[0], eye_position_full[1]] / 1.852
#df_result.loc[0, 'EYE'] = props['eye'][eye_position_full[0], eye_position_full[1]] / 1.852 * 2
#df_result.loc[0, 'lat'] = eye_position_ll.y
#df_result.loc[0, 'lon'] = eye_position_ll.x
#df_result.loc[0, 'INITIALS'] = 'IFR'
#df_result = df_result.astype({"CY": int, "TAU": int}) # tmp, while types are bad in atcf table
#df_result = atcf.delinearize(df_result)
#atcf_outfile = os.path.join(outdir, "%s.dat" % basename)
#atcf.write(df_result, open(atcf_outfile, "w"))
#logger.info("wrote %s" % atcf_outfile)
def save_center_plot(path, sat_file, lon, lat, windspd_eye_masked, cm_cyc, lons_eye_shape, lats_eye_shape, lon_eye,
lat_eye, lon_eye_old, lat_eye_old, lon_eye_fg, lat_eye_fg, lon_eye_hwnd, lat_eye_hwnd):
fig, ax = plt.subplots()
ax.pcolormesh(lon, lat, windspd_eye_masked, cmap=cm_cyc)
ax.plot(lons_eye_shape, lats_eye_shape, 'k', label='eye shape')
ax.plot(lon_eye, lat_eye, 'og', label='center')
ax.plot(lon_eye_old, lat_eye_old, 'or', label='center')
ax.plot(lon_eye_hwnd, lat_eye_hwnd, 'oy', label='center')
ax.plot(lon_eye_fg, lat_eye_fg, 'ob', label='center')
ax.set_xlim((lon_eye - 2., lon_eye + 2.))
ax.set_ylim((lat_eye - 2., lat_eye + 2.))
plt.savefig(os.path.join(path, f'{os.path.basename(sat_file)}.png'))
def contour_indices(arr, lon):
arr[arr.mask] = 0
shapes = next(rasterio.features.shapes(arr))
coords = shapes[0]['coordinates'][0]
idx = np.array(coords, dtype='i4')
# Deleting indices that are outside of lon/lat length
sup = np.where(idx[:, 1] >= lon.shape[0])[0]
idx0 = np.delete(idx[:, 1], sup, 0)
idx1 = np.delete(idx[:, 0], sup, 0)
sup = np.where(idx1 >= lon.shape[1])[0]
idx0 = np.delete(idx0, sup, 0)
idx1 = np.delete(idx1, sup, 0)
zeros = np.where(idx0 == 0)[0]
idx0 = np.delete(idx0, zeros, 0)
idx1 = np.delete(idx1, zeros, 0)
return idx0, idx1
def create_control_plot(sat_file, lon, lat, lons_eye_shape, lats_eye_shape, lon_eye_final, lat_eye_final,
lon_eye_old, lat_eye_old, lon_eye_fg, lat_eye_fg, lon_eye_hwnd, lat_eye_hwnd,
i_hwnd, j_hwnd, lwind_mask, hwind_mask, cyclone_name, atcf_vmax, track_gdf):
sat_date = owi.getDateFromFname(os.path.basename(sat_file))
high_cmap = cm.get_cmap("high_wind_speed")
sat_file = owi.L2CtoLight(sat_file, mode="ll_gd")
ds = xr.open_dataset(sat_file)
min_max_lon = (float(ds["lon"].min()) - 1, float(ds["lon"].max()) + 1)
min_max_lat = (float(ds["lat"].min()) - 1, float(ds["lat"].max()) + 1)
vdim = hv.Dimension("wind_speed", range=(0, 80))
gvds = gv.Dataset(ds["wind_speed"].squeeze(), vdims=[vdim])
img = gvds.to(gv.Image, ['lon', 'lat']).opts(cmap=high_cmap, tools=["hover"], colorbar=True)
fg_p = gv.Points([(lon_eye_fg, lat_eye_fg)], label="First guess (from interpolated ATCF track)")\
.opts(size=10, color="green", line_color="black", line_width=2, tools=["hover"])
hwnd_p = gv.Points([(lon_eye_hwnd, lat_eye_hwnd)], label="High wind speed barycenter")\
.opts(size=10, color="#EAE8FF", line_color="black", line_width=2, tools=["hover"])
found_p = gv.Points([(lon_eye_old, lat_eye_old)], label="Low wind speed barycenter").opts(size=10, color="#724CF9",
line_color="black",
line_width=2,
tools=["hover"])
final_p = gv.Points([(lon_eye_final, lat_eye_final)], label="Center of eye shape")\
.opts(size=10, color="black", marker="s", line_color="grey", line_width=2, tools=["hover"])
hwnd_area = gv.Points(list(zip(lon[i_hwnd, j_hwnd], lat[i_hwnd, j_hwnd])), label="High wind speed area")\
.opts(color="grey", alpha=0.2)
eye = gv.Path((lons_eye_shape, lats_eye_shape), label="Eye shape")\
.opts(line_width=4, line_color="black", show_legend=True)
lwind_mask[lwind_mask.mask] = 0
idx0, idx1 = contour_indices(hwind_mask, lon)
# Getting lon/lat for hwind contour
lon_hwind = lon[idx0, idx1]
lat_hwind = lat[idx0, idx1]
high_contour = gv.Path((lon_hwind, lat_hwind), label="High wind speed research area")\
.opts(line_width=3, show_legend=True, line_color="grey")
idx0, idx1 = contour_indices(lwind_mask, lon)
lon_lwind = lon[idx0, idx1]
lat_lwind = lat[idx0, idx1]
low_contour = gv.Path((lon_lwind, lat_lwind), label="Low wind speed research area").opts(line_width=3,
show_legend=True,
line_color="#EAE8FF")
track_plot = gv.Points(track_gdf, vdims=["wind_speed (m/s)", "date"], label="Cyclone ATCF interpolated track").opts(
color="wind_speed (m/s)", cmap=high_cmap, size=10, tools=["hover"], clim=(0, 80), line_color="black",
line_width=1.5)
track_path = copy.copy(track_gdf)
# Converting geom Points to LineString
track_path["geometry"] = [
LineString([val, track_gdf.loc[idx + 1, "geometry"]]) if idx < len(track_gdf["geometry"]) - 1 else LineString([val, val])
for idx, val in track_gdf["geometry"].iteritems()]
# Removing last line because its geom is useless
track_path = track_path.iloc[0:-1]
track_path_plot = gv.Path(track_path, vdims=["wind_speed (m/s)", "date"],
label="Cyclone ATCF interpolated track").opts(
color="black", tools=["hover"], line_width=3, show_legend=True)
t_plot = hv.Overlay(track_path_plot * track_plot)
p = (gts.EsriImagery() * img * t_plot * hwnd_area * high_contour * low_contour *
eye * fg_p * hwnd_p * found_p * final_p).opts(tools=["hover", "save"], legend_position='right',
title=f"{cyclone_name}, ATCF VMAX: {atcf_vmax} m/s. "
f"Acquisition at {sat_date.strftime('%Y-%m-%d %H:%M:%S')}",
show_grid=True, active_tools=['wheel_zoom'])
p = p.redim.range(Longitude=min_max_lon, Latitude=min_max_lat)
if not matplot:
p.opts(width=900, height=500)
else:
p.opts(aspect=1, fig_inches=10, fig_bounds=(0, 0, 1, 1))
return p
def save_plot(sat_file, path, plot, prepend=""):
hv.save(plot, os.path.join(path, f'{prepend}_{os.path.basename(sat_file)}.html'), resources=INLINE)
def find_center_plot_save(path, sat_file, resolution, path_fix, api_data=None, track_gdf=None, debug=False,
def find_center_plot_save(path, sat_file, resolution, path_fix, netcdf_path, api_data=None, track_gdf=None, debug=False,
file_logging=True):
# Remove previously defined loggers
for handler in logging.root.handlers[:]:
......@@ -522,6 +244,13 @@ def find_center_plot_save(path, sat_file, resolution, path_fix, api_data=None, t
sat_mission=api_data.iloc[0]["mission"],
acq_time=pd.to_datetime(api_data.iloc[0]["acquisition_start_time"]))
save_netcdf(sat_file, netcdf_path, eye_dict["lon_eye"], eye_dict["lat_eye"],
eye_dict["lons_eye_shape"], eye_dict["lats_eye_shape"],
eye_dict["lon_eye_old"], eye_dict["lat_eye_old"],
eye_dict["lon_eye_fg"], eye_dict["lat_eye_fg"], eye_dict["lon_eye_hwnd"],
eye_dict["lat_eye_hwnd"], eye_dict["i_hwind"], eye_dict["j_hwind"], eye_dict["msk_lwind"],
eye_dict["msk_hwind"], api_data.iloc[0]["cyclone_name"],
api_data.iloc[0]["vmax (m/s)"], eye_dict["lon"], eye_dict["lat"], resolution)
#save_center_plot(path, sat_file, eye_dict["lon"], eye_dict["lat"], eye_dict["windspd_eye_masked"],
# eye_dict["cm_cyc"], eye_dict["lons_eye_shape"], eye_dict["lats_eye_shape"],
# eye_dict["lon_eye"], eye_dict["lat_eye"],
......
import xarray as xr
from tc_center.polar import to_polar, to_north_ref
from tc_center.plotting import contour_indices
import owi
from shapely.geometry import LineString, Point
import os
import logging
logger = logging.getLogger(__name__)
def research_contour(mask, lon, lat):
idx0, idx1 = contour_indices(mask, lon)
lon_lwind = lon[idx0, idx1]
lat_lwind = lat[idx0, idx1]
return lon_lwind, lat_lwind
def save_netcdf(sat_file, path, lon_center, lat_center, lons_eye_shape, lats_eye_shape,
lon_eye_lwind, lat_eye_lwind, lon_eye_fg, lat_eye_fg, lon_eye_hwind,
lat_eye_hwind, i_hwind, j_hwind, msk_lwind, msk_hwind, storm_name,
atcf_vmax, lon, lat, resolution):
sat_file_sw = owi.L2CtoLight(sat_file, resolution=resolution, mode="sw")
ds = xr.open_dataset(sat_file_sw)
ds_s = ds.squeeze()
ds_polar = to_polar(ds_s, lon_center, lat_center)
ds_polar_north = to_north_ref(ds_polar)
eye_coords = []
for c_i in range(0, len(lons_eye_shape)):
eye_coords.append((lons_eye_shape[c_i], lats_eye_shape[c_i]))
eye_shape = LineString(eye_coords)
ds_polar_north.attrs["Eye shape"] = eye_shape.wkt
ds_polar_north.attrs["Low wind area barycenter"] = Point(lon_eye_lwind, lat_eye_lwind).wkt
ds_polar_north.attrs["High wind area barycenter"] = Point(lon_eye_hwind, lat_eye_hwind).wkt
ds_polar_north.attrs["Interpolated ATCF track point"] = Point(lon_eye_fg, lat_eye_fg).wkt
ds_polar_north.attrs["Storm name"] = storm_name
ds_polar_north.attrs["ATCF VMAX"] = float(atcf_vmax)
lon_lwind, lat_lwind = research_contour(msk_lwind, lon, lat)
lwind_contour_coords = []
for c_i in range(0, len(lon_lwind)):
lwind_contour_coords.append((lon_lwind[c_i], lat_lwind[c_i]))
lwind_contour = LineString(lwind_contour_coords)
ds_polar_north.attrs["Low wind research area"] = lwind_contour.wkt
lon_hwind, lat_hwind = research_contour(msk_hwind, lon, lat)
hwind_contour_coords = []
for c_i in range(0, len(lon_lwind)):
hwind_contour_coords.append((lon_hwind[c_i], lat_hwind[c_i]))
hwind_contour = LineString(hwind_contour_coords)
ds_polar_north.attrs["High wind research area"] = hwind_contour.wkt
outfile = os.path.join(path, f"{os.path.basename(sat_file).replace('.nc', '').replace('cm', 'ca')}.nc")
ds_polar_north.to_netcdf(path=outfile)
logger.info(f"Wrote netCDF product to {outfile}")
import os
import atcf
import pandas as pd
import owi
import logging
logger = logging.getLogger(__name__)
def save_fix_analysis(path, sat_file, atcf_filename, lon_eye, lat_eye, rmw, vmax, cyclone_name, sid, sat_mission,
sub_basin, acq_time):
atcf_outfile = os.path.join(path, atcf_filename)
sar_outfile = os.path.join(path, f"{os.path.basename(sat_file).replace('.nc', '')}.dat")
fix = pd.DataFrame()
for emptyCol in ["BASIN", "CY", "date", "fix format", "fix type", "center/intensity", "flagged indicator", "lat", "lon",
"height of ob", "posit confidence", "wind speed", "confidence", "pressure", "pressure confidence",
"pressure derivation", "rad", "windcode", "rad1", "rad2", "rad3", "rad4",
"radmod1", "radmod2", "radmod3", "radmod4", "radii confidence", "MRD", "eye", "subregion",
"fix site", "initials", "analyst initials", "start time", "end time", "distance to nearest data", "SST",
"observation sources", "comments"]:
fix.loc[0, emptyCol] = None
stormid = sid[2:4]#al092020
basin = sid[0:2]
mis_to_type = {"SENTINEL-1 A": "SEN1", "SENTINEL-1 B": "SEN1", "RADARSAT-2": "RS2"}
# NI ok, SI ok, SP ok, NWP ok, NEP ok, NA ok, WA ok, EA ok, AS ok, BB ok, CS ok, GM ok
# It is ok to set North Indian ocean to A (=arabian sea) because the north indian ocean is fully covered by sub basin
# so "NI" should never be output by the API in the sub_basin field.
sub_basin_subregion = {"AS": "A", "BB": "B", "CP": "C", "NEP": "E", "NA": "L", "SP": "P", "SI": "S", "NWP": "W",
"GM": "L", "CS": "L", "NI": "A", "WA": "S", "EA": "P"}
fix.loc[0, 'BASIN'] = basin.upper()
fix.loc[0, 'CY'] = str(stormid).zfill(2)
fix.loc[0, 'date'] = owi.getDateFromFname(os.path.basename(sat_file)).strftime("%Y%m%d%H%M")
fix.loc[0, "fix format"] = 70
fix.loc[0, "fix type"] = mis_to_type[sat_mission]
fix.loc[0, "center/intensity"] = "C"
fix.loc[0, 'lat'] = lat_eye
fix.loc[0, 'lon'] = lon_eye
fix.loc[0, "height of ob"] = "10"
fix.loc[0, "subregion"] = sub_basin_subregion[sub_basin]
fix.loc[0, "fix site"] = "IFR"
fix.loc[0, "initials"] = "SAR"
fix.loc[0, "start time"] = acq_time.strftime("%Y%m%d%H%M")
fix.loc[0, "end time"] = acq_time.strftime("%Y%m%d%H%M")
fix.loc[0, "distance to nearest data"] = ""
fix.loc[0, "SST"] = ""
fix.loc[0, "observation sources"] = sat_mission
fix.loc[0, "comments"] = "Synthetic Aperture Radar 3KM Wind Speed Analysis"
if os.path.isfile(atcf_outfile):
logger.info(f"ATCF output file exists ({atcf_outfile}). Reading and merging.")
existing_fix = atcf.read(atcf_outfile, model="fix")
existing_fix["date"] = existing_fix["date"].apply(lambda x: x.strftime("%Y%m%d%H%M"))
existing_fix["start time"] = existing_fix["start time"].apply(lambda x: x.strftime("%Y%m%d%H%M"))
existing_fix["end time"] = existing_fix["end time"].apply(lambda x: x.strftime("%Y%m%d%H%M"))
to_save = existing_fix.append(fix.iloc[0], ignore_index=True)
to_save = to_save.drop_duplicates(subset=["start time"], keep="last")
to_save["CY"] = to_save["CY"].apply(lambda x: str(x).zfill(2))
else:
to_save = fix
df_result = to_save
atcf.write(df_result, open(atcf_outfile, "w"), model="fix")
logger.info("wrote %s" % atcf_outfile)
atcf.write(fix, open(sar_outfile, "w"), model="fix")
logger.info("wrote %s" % sar_outfile)
def save_fix(path, sat_file, atcf_filename, lon_eye, lat_eye, rmw, vmax, cyclone_name, sid):
atcf_outfile = os.path.join(path, atcf_filename)
sar_outfile = os.path.join(path, f"{os.path.basename(sat_file).replace('.nc', '')}.dat")
fix = pd.DataFrame()
for emptyCol in ['VMAX', 'MSLP', 'RAD_34_NEQ', 'RAD_34_SEQ', 'RAD_34_SWQ',
'RAD_34_NWQ', 'RAD_50_NEQ', 'RAD_50_SEQ', 'RAD_50_SWQ', 'RAD_50_NWQ',
'RAD_64_NEQ', 'RAD_64_SEQ', 'RAD_64_SWQ', 'RAD_64_NWQ', 'POUTER',
'ROUTER', 'RMW', 'GUSTS', 'EYE', 'DIR', 'SPEED', 'DEPTH', 'SEAS_12_NEQ', 'SEAS_12_SEQ',
'SEAS_12_SWQ', 'SEAS_12_NWQ', 'STORMNAME', 'USERDEFINED1', 'userdata1',
'USERDEFINED2', 'userdata2', 'USERDEFINED3', 'userdata3',
'USERDEFINED4', 'userdata4', 'USERDEFINED5', 'userdata5']:
fix.loc[0, emptyCol] = None
stormid = sid[2:4]#al092020
basin = sid[0:2]
fix.loc[0, 'date'] = owi.getDateFromFname(os.path.basename(sat_file))
fix.loc[0, 'TAU'] = 0
fix.loc[0, 'TECH'] = 'SAR'
fix.loc[0, 'VMAX'] = None#vmax
fix.loc[0, 'RMW'] = None#props['rmw'][eye_position_full[0], eye_position_full[1]] / 1.852
fix.loc[0, 'EYE'] = None#props['eye'][eye_position_full[0], eye_position_full[1]] / 1.852 * 2
fix.loc[0, 'lat'] = lat_eye
fix.loc[0, 'lon'] = lon_eye
fix.loc[0, 'INITIALS'] = 'IFR'
fix.loc[0, 'STORMNAME'] = cyclone_name.upper()
fix.loc[0, 'CY'] = str(stormid).zfill(2)
fix.loc[0, 'BASIN'] = basin.upper()
fix.loc[0, 'USERDEFINED1'] = "SAR file"
fix.loc[0, 'userdata1'] = os.path.basename(sat_file)
if os.path.isfile(atcf_outfile):
logger.info(f"ATCF output file exists ({atcf_outfile}). Reading and merging.")
existing_fix = atcf.read(atcf_outfile)
existing_fix = atcf.linearize(existing_fix)
to_save = existing_fix.append(fix.iloc[0], ignore_index=True)
to_save = to_save.drop_duplicates(subset=["userdata1"], keep="last")
to_save["CY"] = to_save["CY"].apply(lambda x: str(x).zfill(2))
else:
to_save = fix
df_result = atcf.delinearize(to_save)
atcf.write(df_result, open(atcf_outfile, "w"))
fix = atcf.delinearize(fix)
atcf.write(fix, open(sar_outfile, "w"))
logger.info("wrote %s" % atcf_outfile)
import numpy as np
import rasterio
import owi
import os
import xarray as xr
import holoviews as hv
import geoviews as gv
import copy
from matplotlib import pyplot as plt
from shapely.geometry import LineString
import geoviews.tile_sources as gts
from bokeh.resources import INLINE
import matplotlib.cm as cm
import colormap_ext
matplot = False
if matplot:
hv.extension('matplotlib')
else:
hv.extension('bokeh')
def contour_indices(arr, lon):
arr[arr.mask] = 0
shapes = next(rasterio.features.shapes(arr))
coords = shapes[0]['coordinates'][0]
idx = np.array(coords, dtype='i4')
# Deleting indices that are outside of lon/lat length
sup = np.where(idx[:, 1] >= lon.shape[0])[0]
idx0 = np.delete(idx[:, 1], sup, 0)
idx1 = np.delete(idx[:, 0], sup, 0)
sup = np.where(idx1 >= lon.shape[1])[0]
idx0 = np.delete(idx0, sup, 0)
idx1 = np.delete(idx1, sup, 0)
zeros = np.where(idx0 == 0)[0]
idx0 = np.delete(idx0, zeros, 0)
idx1 = np.delete(idx1, zeros, 0)
return idx0, idx1
def create_control_plot(sat_file, lon, lat, lons_eye_shape, lats_eye_shape, lon_eye_final, lat_eye_final,