Commit be4c5a61 authored by CEVAER's avatar CEVAER

Refactored variables names

parent 9f605ed4
......@@ -21,23 +21,23 @@ logger.setLevel(logging.INFO)
def extract_date_from_filename(filename):
datePart = "-".join(filename.split("_")[4:7])
return datetime.datetime.strptime(datePart, "%Y-%m-%d")
date_part = "-".join(filename.split("_")[4:7])
return datetime.datetime.strptime(date_part, "%Y-%m-%d")
def get_track_points_from_database(session, smapDate):
reqTracks = session.query(SimpleTrack, func.ST_X(cast(SimpleTrack.geom, Geometry)).label("lon"),
def get_track_points_from_database(session, smap_Date):
req_tracks = session.query(SimpleTrack, func.ST_X(cast(SimpleTrack.geom, Geometry)).label("lon"),
func.ST_Y(cast(SimpleTrack.geom, Geometry)).label("lat")) \
.filter(and_( >= smapDate, < smapDate + datetime.timedelta(days=1))).group_by(SimpleTrack).all()
return reqTracks
.filter(and_( >= smap_Date, < smap_Date + datetime.timedelta(days=1))).group_by(SimpleTrack).all()
return req_tracks
def get_colocated_track_points(dataset, trackPoints, fileDate, deg_delta=3, time_delta=60):
trackPointOffsets = []
for trackPoint in trackPoints:
logger.debug(f"Track point : {trackPoint}")
sel = dataset.sel(lon=trackPoint["lon"], lat=trackPoint["lat"], method="nearest")
def get_colocated_track_points(dataset, track_points, file_date, deg_delta=3, time_delta=60):
track_point_offsets = []
for track_point in track_points:
logger.debug(f"Track point : {track_point}")
sel = dataset.sel(lon=track_point["lon"], lat=track_point["lat"], method="nearest")
logger.debug(f"Point find : lon: {sel['lon'].values}, lat: {sel['lat'].values}, minute: {sel['minute'].values}")
lonIndex = np.where(dataset["lon"].data == sel["lon"].data)[0][0]
......@@ -50,50 +50,52 @@ def get_colocated_track_points(dataset, trackPoints, fileDate, deg_delta=3, time
# The time offset is stored with all the track point data to ease later processing
for node in df.index:
if not pd.isna(df["minute"][node]):
pointDate = fileDate + df["minute"][node]
timeOffset = abs(trackPoint["date"] - pointDate)
logger.debug(f" Data point date: {pointDate}, Track point date: {trackPoint['date']},"
f" Time offset: {timeOffset}")
point_date = file_date + df["minute"][node]
time_offset = abs(track_point["date"] - point_date)
logger.debug(f" Data point date: {point_date}, Track point date: {track_point['date']},"
f" Time offset: {time_offset}")
# If this is the first node processed OR that the previous nodes had NAN times OR that previous nodes
# offset times are greater than this one
if node == 0 or not "timeOffset" in trackPoint or trackPoint["timeOffset"] > timeOffset:
trackPoint["timeOffset"] = timeOffset
trackPoint["node"] = node
trackPoint["lonIndex"] = lonIndex
trackPoint["latIndex"] = latIndex
if node == 0 or not "time_offset" in track_point or track_point["time_offset"] > time_offset:
track_point["time_offset"] = time_offset
track_point["node"] = node
track_point["lonIndex"] = lonIndex
track_point["latIndex"] = latIndex
if "timeOffset" in trackPoint:
if "time_offset" in track_point:
keptTrackPoints = {}
kept_track_points = {}
# Getting the best track point for each sid (storm id).
# One SMAP file can contain several cyclones acquisitions so that is why we are doing this filtering per sid
for trackPoint in trackPointOffsets:
for track_point in track_point_offsets:
# TODO replace timedelta with database track sampling time
if trackPoint["timeOffset"] <= datetime.timedelta(minutes=7, seconds=30):
if trackPoint["sid"] not in keptTrackPoints:
keptTrackPoints[trackPoint["sid"]] = trackPoint
elif keptTrackPoints[trackPoint["sid"]]["timeOffset"] > trackPoint["timeOffset"]:
keptTrackPoints[trackPoint["sid"]] = trackPoint
if track_point["time_offset"] <= datetime.timedelta(minutes=7, seconds=30):
if track_point["sid"] not in kept_track_points:
kept_track_points[track_point["sid"]] = track_point
elif kept_track_points[track_point["sid"]]["time_offset"] > track_point["time_offset"]:
kept_track_points[track_point["sid"]] = track_point
return keptTrackPoints
return kept_track_points
#def extract
def process_smap_file(session, file):
logger.debug(f"Processing {file}...")
filename = os.path.basename(file)
fileDate = extract_date_from_filename(filename)
logger.debug(f"File date {fileDate}")
trackPoints = get_track_points_from_database(session, fileDate)
logger.debug(f"Number of track point found : {len(trackPoints)}")
file_date = extract_date_from_filename(filename)
logger.debug(f"File date {file_date}")
track_points = get_track_points_from_database(session, file_date)
logger.debug(f"Number of track point found : {len(track_points)}")
trackPoints = [{"sid": trackPoint[0].sid, "lon": shape360(trackPoint.lon, 0)[0],
"date": trackPoint[0].date} for trackPoint in trackPoints]
track_points = [{"sid": track_point[0].sid, "lon": shape360(track_point.lon, 0)[0],
"date": track_point[0].date} for track_point in track_points]
dataset = xarray.open_dataset(file)
keptTrackPoints = get_colocated_track_points(dataset, trackPoints, fileDate)"For file {filename} Kept track that will be used to extract SMAP data: {keptTrackPoints}")
kept_track_points = get_colocated_track_points(dataset, track_points, file_date)"For file {filename} Kept track that will be used to extract SMAP data: {kept_track_points}")
if __name__ == "__main__":
......@@ -115,7 +117,7 @@ if __name__ == "__main__":
engine = create_engine(args.dbd)
Session = sessionmaker(bind=engine)
smapTests = [
smap_tests = [
......@@ -124,5 +126,5 @@ if __name__ == "__main__":
for f in smapTests:
for f in smap_tests:
process_smap_file(Session(), f)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment