-
Notifications
You must be signed in to change notification settings - Fork 3
/
Copy pathNMSIM_DENA_Flight_Tracks.py
888 lines (623 loc) · 35.7 KB
/
NMSIM_DENA_Flight_Tracks.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
#-----------------------------------------------------------------------------#
# NMSIM_DENA_Flight_Tracks.py
#
# NPS Natural Sounds Program
#
# This module is used to initialize a workspace for analyzing
# the acoustic properties of overflights over Denali. These functions
# rely on GPS points from the DENA database as an input. Their intention
# is to produce a model using NMSIM. Given temporal correspondance, functions
# are also provided to compare modelled events with the acoustic record.
#
# In the future, we'd like to start using one (or both) of the acoustic
# descriptions to actually summarize and/or compare impacts.
#
# History:
# D. Halyn Betchkal -- Created
#
#-----------------------------------------------------------------------------#
# ================ Import Libraries =======================
# some very standard libraries
import sys
import datetime as dt
import os
import re
import glob
import subprocess
import numpy as np
import warnings
warnings.simplefilter(action='ignore', category=FutureWarning) # pandas has some verbose deprecation warnings
import pandas as pd
from functools import partial
pd.options.display.float_format = '{:.5f}'.format
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
from itertools import islice
# geoprocessing libraries
import fiona
from fiona.crs import from_epsg
import pyproj
import geopandas as gpd
from shapely.ops import transform
from shapely.geometry import mapping, Point, Polygon
# We also need two specialized NPS libaries: `iyore` and `soundDB` (which relies on `iyore` so is imported second)
# we expect them in the same directory as this repository
try:
sys.path.append(os.path.join(os.path.dirname(os.getcwd()), "iyore"))
import iyore
except ModuleNotFoundError:
print("Can't find library `iyore`, please clone the repository from https://github.com/nationalparkservice/iyore to",
os.path.dirname(os.getcwd()), "or install using pip")
try:
sys.path.append(os.path.join(os.path.dirname(os.getcwd()), "soundDB"))
from soundDB import *
except ModuleNotFoundError:
print("Can't find library `soundDB`, please clone the repository from https://github.com/gjoseph92/soundDB to",
os.path.dirname(os.getcwd()))
# ============ Set up a few paths and connections ===============
# DENA RDS computer
RDS = r"\\inpdenards\overflights"
try:
sys.path.append(os.path.join(RDS, "scripts"))
from query_tracks import query_tracks, get_mask_wkt
except:
print("While importing `query_tracks` encountered an error.")
# DENA Render computer (and an iyore dataset)
Render = r"\\inpdenarender\E\Sound Data"
archive = iyore.Dataset(Render)
# =========================== Define functions =======================================
def get_utm_zone(project_dir):
"""
Glean the UTM zone that was saved as a suffix to the elevation file's name.
This assumes that the elevation file was clipped using the NSNSD ArcToolbox
packaged with this tool.
"""
# the ArcPro tool will figure out the UTM zone associated with the western extent of the file
# (this is the native UTM zone of the NMSIM project)
elev_filename = os.path.basename(glob.glob(project_dir + os.sep + r"Input_Data\01_ELEVATION\*.flt")[0])
# this will glean the UTM zone from the filename as an integer
UTM_zone = int(elev_filename.split("_")[-1][3:-4])
return UTM_zone
def climb_angle(v):
"""
Compute the 'climb angle' of a vector
A = 𝑛•𝑏=|𝑛||𝑏|𝑠𝑖𝑛(𝜃)
"""
# a unit normal vector perpendicular to the xy plane
n = np.array([0,0,1])
degrees = np.degrees(np.arcsin( np.dot(n, v)/(np.linalg.norm(n)*np.linalg.norm(v))))
return degrees
def point_buffer(lon, lat, km):
"""
Given a radius in kilometers, create a circular buffer around a point.
Inputs
------
lon (float): longitude of the point (expected input as D.d, WGS84)
lat (float): latitude of the point (expected input as D.d, WGS84)
km (float): radius of the buffer in kilometers
Returns
-------
buf (geopandas GeoDataFrame): a circular buffer around the point (in WGS84)
"""
wgs84 = pyproj.Proj('epsg:4326')
# Azimuthal equidistant projection
aeqd_formatter = '+proj=aeqd +lat_0={lat} +lon_0={lon} +x_0=0 +y_0=0'
aeqd = pyproj.Proj(aeqd_formatter.format(lat=lat, lon=lon))
# set up two transformation objects
projector = pyproj.Transformer.from_proj('EPSG:4326', aeqd)
reprojector = pyproj.Transformer.from_proj(aeqd, 'EPSG:4326', always_xy=True)
# project the site coordinate into aeqd
long_m, lat_m = projector.transform(lat, lon)
# buffer using a radius in meters
buf_m = Point(long_m, lat_m).buffer(km * 1000) # distance in metres
# convert the polygon from aeqd back into wgs84
# note that this uses both `shapely` and `pyproj`
buf_poly = transform(reprojector.transform, buf_m) # apply projection
# we'll return a geopandas GeoDataFrame instead of the Shapely Polygon
buf = gpd.GeoDataFrame(geometry=gpd.GeoSeries(buf_poly))
# but we also need to set the crs
buf.crs = "EPSG:4326"
return buf
def interpolate_heading(start_heading, end_heading, num_points):
"""
Because heading is periodic, in many cases we cannot use
simple linear interpolation to correctly space out values.
Inputs
------
start_headings (float): the heading of the first point
end_heading (float): the heading of the last point
num_points (int): the number of interpolated points desired between the first and last
Returns
-------
headings (numpy array): an inclusive list of interpolated headings
"""
# first we need to know if the shortest path is through zero
passes_zero = np.abs(end_heading - start_heading) > 180
if(passes_zero):
# add 360 to the minimum value to ensure linearity
# note: this can reorder the sequence
headings = np.linspace(np.maximum(start_heading, end_heading), np.minimum(start_heading, end_heading) + 360, num_points)%360
# correct for reordering if necessary
if(headings[0] != start_heading):
headings = headings[::-1]
return headings
else:
# this is standard linear interpolation
headings = np.linspace(start_heading, end_heading, num_points)
return headings
def create_NMSIM_site_file(project_dir, unit, site, long_utm, lat_utm, height):
'''
Create an NMSIM site file for a given NPS monitoring deployment.
Inputs
------
project_dir (str): a canonical NMSIM project directory
unit (str): 4-character NPS Alpha Code, e.g. "BITH", "YUCH"
site (str): alpha-numeric acoustic monitoring site code, e.g., "002", "TRLA"
long_utm (float): longitude in meters for the NMSIM project's UTM zone
lat_utm (float): latitude in meters for the NMSIM project's UTM zone
height (float): microphone height in meters
Returns
-------
None
'''
# the full path to the eventual NMSIM site file
out_path = project_dir + os.sep + r"Input_Data\05_SITES" + os.sep + unit + site + ".sit"
# open a file and write to it
with open(out_path, 'w') as site_file:
site_file.write(" 0\n")
site_file.write(" 1\n")
site_file.write("{0:19.0f}.{1:9.0f}.{2:10.5f} {3:20}\n".format(long_utm, lat_utm, height, unit+site))
site_file.write(glob.glob(project_dir + os.sep + r"Input_Data\01_ELEVATION\*.flt")[0]+"\n")
def tracks_within(ds, site, year, search_within_km = 25, climb_ang_max = 20, aircraft_specs=False, NMSIM_proj_dir=None, decouple=False):
'''
Given a microphone location, load proximal GPS data from the Denali Overflights Database.
Save each flight as an NMSIM trajectory file. Optionally, scrape aircraft statistics from the FAA website.
Inputs
------
ds (iyore Dataset):
site (str):
year (int):
search_within_km (float): [default of 25 km]
climb_ang_max (float): the maximum climb angle one expects of aircraft [default of 20°]
aircraft_specs (bool): should additional aircraft make/model/powerplant data be scraped from the FAA website? [default False]
NMSIM_proj_dir (str, path): a user-specified output for trajectories - if None, trajectories are saved to a site's "Computational Outputs" folder.
decouple (bool): should all nearby tracks be loaded, not just those that correspond in time
with the microphone deployment? [default False]
Returns
-------
tracks (geopandas GeoDataFrame): a spatial table of GPS points corresponding to the site and year specified.
'''
unit = "DENA" # it always will be for this tool
# set up the output folders
if(NMSIM_proj_dir is not None):
# check that the folder for NMSIM .trj outputs exists, if not make it
trj_out = NMSIM_proj_dir + os.sep + r"Input_Data\03_TRAJECTORY"
if not os.path.exists(trj_out):
os.makedirs(trj_out)
# check that the folder for NMSIM .trj outputs exists, if not make it
sit_out = NMSIM_proj_dir + os.sep + r"Input_Data\05_SITES"
if not os.path.exists(sit_out):
os.makedirs(sit_out)
# lookup the UTM zone using the elevation file (since this assumes we've set up the project already)
zone = get_utm_zone(NMSIM_proj_dir)
else:
# a path to the site's computational outputs folder
cOut = [e.path for e in ds.dataDir(unit=unit, site=site, year=year)][0] + \
os.sep + r"02 ANALYSIS\Computational Outputs"
# make a folder for NMSIM .trj outputs
trj_out = cOut + os.sep + "NMSIM_trj"
if not os.path.exists(trj_out):
os.makedirs(trj_out)
# make a folder for NMSIM .sit outputs
sit_out = cOut + os.sep + "NMSIM_sit"
if not os.path.exists(trj_out):
os.makedirs(trj_out)
# ===== first part; site coordinate wrangling =====================
# load the metadata sheet
metadata = pd.read_csv(r"\\inpdenafiles\sound\Complete_Metadata_AKR_2001-2021.txt",
delimiter="\t", encoding = "ISO-8859-1")
# look up the site's coordinates in WGS84
lat_in, long_in = metadata.loc[(metadata["code"] == site)&(metadata["year"] == year), "lat":"long"].values[0]
# epsg codes for Alaskan UTM zones
epsg_lookup = {1:'epsg:26901', 2:'epsg:26902', 3:'epsg:26903', 4:'epsg:26904', 5:'epsg:26905',
6:'epsg:26906', 7:'epsg:26907', 8:'epsg:26908', 9:'epsg:26909', 10:'epsg:26910'}
# convert from D.d (WGS84) to meters (NAD83)
projector = pyproj.Transformer.from_crs('epsg:4326', epsg_lookup[zone])
# convert into NMSIM's coordinate system
long, lat = projector.transform(lat_in, long_in)
# ===== second part; mic height to feet, write NMSIM .sit file =====================
# look up the site's coordinates in WGS84
height = metadata.loc[(metadata["code"] == site)&(metadata["year"] == year), "microphone_height"].values[0]
print(unit+site+str(year)+":", "{0:.0f},".format(long), "{0:.0f}".format(lat), "- UTM zone", zone)
print("\tmicrophone height", "{0:.2f} feet.".format(height*3.28084))
# now write the microphone's position to an NMSIM .sit file
create_NMSIM_site_file(NMSIM_proj_dir, unit, site, long, lat, height)
# ===== third part; save mask file using the buffer radius of choice ===============
# create the buffer polygon
buf = point_buffer(long_in, lat_in, search_within_km)
# plot the buffer within the park boundary
base = buf.plot(color='white', edgecolor='black', zorder=-5)
gpd.GeoSeries(Point(long_in, lat_in)).plot(ax=base, color="green")
base.set_aspect(1.9)
plt.show()
# ===== fourth part; determine the date range of NVSPL files ===============
# load the datetime of every NVSPL file
NVSPL_dts = pd.Series([dt.datetime(year=int(e.year),
month=int(e.month),
day=int(e.day),
hour=int(e.hour),
minute=0,
second=0) for e in ds.nvspl(unit=unit, site=site, year=year)])
# everything should be in chronological order, but just in case...
NVSPL_dts.sort_values(inplace=True, ascending=True)
# retrieve the start/end bounds and convert back to YYYY-MM-DD strings
start, end = (dt.datetime.strftime(d, "%Y-%m-%d") for d in [NVSPL_dts.iloc[0], NVSPL_dts.iloc[-1]])
# this decouples model development from acoustic measurements
# (but as an obvious consequence, invalidates the pairing functions)
if(decouple):
start = "2012-05-01"
end = dt.datetime.strftime(dt.datetime.now(), "%Y-%m-%d")
print("\n\tSearching from", start, "and ends", end, "\n")
# load tracks from the database over a certain daterange, using the buffered site
tracks = query_tracks(connection_txt=os.path.join(RDS, "config\connection_info.txt"),
start_date=start, end_date=end, mask=buf,
aircraft_info=aircraft_specs)
# make a dataframe to hold distances and times
closest_approaches = pd.DataFrame([], index=np.unique(tracks["id"]), columns=["closest_distance", "closest_time"])
# process each unique flight track in sequence
for f_id, data in tracks.groupby("flight_id"):
if(len(data) > 1): # "one point does not a trajectory make"
# double check that the data are sorted by time
data = data.sort_values("ak_datetime")
# convert the GPS points from wgs84 to the correct UTM zone for the NMSIM elevation file
long_utm, lat_utm = projector.transform(data["latitude"].values, data["longitude"].values)
# assign back into the dataframe for subsequent use
data.loc[:, "long_UTM"] = long_utm
data.loc[:, "lat_UTM"] = lat_utm
# coordinates of each point
coords = np.array([[lo, la, e] for lo, la, e in zip(data["long_UTM"],
data["lat_UTM"],
0.3048*data["altitude_ft"])])
# convert the coordinates to vectors
V = np.diff(coords, axis=0)
# compute the climb angle for each point:
climb_angs = np.array([climb_angle(V[n]) for n in np.arange(len(V))])
# if the climb angle is greater than 20° something strange is going on: reset to zero
climb_angs[np.abs(climb_angs) > climb_ang_max] = 0
# we don't know what the last point's angle should be, but NMSIM requires a value
# just repeat the penultimate point's value
climb_angs = np.append(climb_angs, climb_angs[-1])
# assign the climb angles to the array
data["ClimbAngle"] = climb_angs
# NMSIM probably won't like nan... replace those with zero as well
data["ClimbAngle"].fillna(0, inplace=True)
# this is the start time of the track
start = data["ak_datetime"].iloc[0]
# truncate starting time to nearest hour to check against the NVSPL list
check = start.replace(minute=0, second=0)
# 1 if there is a match, 0 if not
match_bool = NVSPL_dts.apply(lambda date: date in [check]).sum()
if((match_bool == 0)&(not decouple)):
tracks = tracks[tracks["flight_id"] != f_id]
print("\t\t", "Flight starting", start, "has no matching acoustic record")
elif((match_bool == 1)|(decouple)):
if(decouple):
print("\t\t", "Flight starting", start, "has no matching acoustic record. Proceeding by user override.")
# find the time at which the flight passes closest to the station
site_coords = np.array([long, lat])
GPSpoints_xy = np.array([data["long_UTM"], data["lat_UTM"]])
# which point made the closest approach to the site?
min_distance = np.min(np.linalg.norm(site_coords - GPSpoints_xy.T, axis=1))/1000
# keep track of the closest approach by id
closest_approaches.loc[data["flight_id"].iloc[0], "closest_distance"] = min_distance
# at what time did the closest approach occur?
closest_time = data.iloc[np.argmin(np.linalg.norm(site_coords - GPSpoints_xy.T, axis=1))]['ak_datetime']
# likewise keep track of the closest time
closest_approaches.loc[data["flight_id"].iloc[0], "closest_time"] = closest_time
print("\t\t", "#"+str(f_id), "expected closest at", closest_time, "{0:0.1f}km".format(min_distance))
# create a time-elapsed column
data["time_elapsed"] = (data["ak_datetime"] - data["ak_datetime"].min()).apply(lambda t: t.total_seconds())
# we'll only save the trajectory if it's within the specified search radius!
if(min_distance <= search_within_km):
# ======= densify the GPS points for NMSIM ========
print("\n\t\t\t", "Flight is within search distance. Attempting to densify",
data.shape[0], "points...")
new_points = gpd.GeoDataFrame([])
for row in data.itertuples():
try:
next_ind = data.index[np.argwhere(data.index == row.Index)+1][0][0]
# this represents time steps < 1 second
interpSteps = int(1.1*(data.loc[next_ind, "ak_datetime"] - row.ak_datetime).total_seconds())
print("trying for", interpSteps, "steps between points")
# interpolate the indices, longitudes, latitudes, and altitudes
# we don't need the first or last values - they're already in the dataframe
indi = np.linspace(row.Index, next_ind, interpSteps)[1:-1]
ti = np.linspace(row.time_elapsed, data.loc[next_ind, "time_elapsed"], interpSteps)[1:-1]
xi = np.linspace(row.longitude, data.loc[next_ind, "longitude"], interpSteps)[1:-1]
yi = np.linspace(row.latitude, data.loc[next_ind, "latitude"], interpSteps)[1:-1]
zi = np.linspace(row.altitude_ft, data.loc[next_ind, "altitude_ft"], interpSteps)[1:-1]
utm_xi = np.linspace(row.long_UTM, data.loc[next_ind, "long_UTM"], interpSteps)[1:-1]
utm_yi = np.linspace(row.lat_UTM, data.loc[next_ind, "lat_UTM"], interpSteps)[1:-1]
cai = np.linspace(row.ClimbAngle, data.loc[next_ind, "ClimbAngle"], interpSteps)[1:-1]
hi = interpolate_heading(row.heading, data.loc[next_ind, "heading"], interpSteps)[1:-1]
vi = np.linspace(row.knots, data.loc[next_ind, "knots"], interpSteps)[1:-1]
# generate geometry objects for each new interpolated point
gi = [Point(xyz) for xyz in zip(xi, yi, zi)]
# create a dictionary of the interpolated values to their column
d = {'time_elapsed': ti,
'longitude': xi,
'latitude': yi,
'altitude_ft': zi,
'long_UTM': utm_xi,
'lat_UTM': utm_yi,
'ClimbAngle': cai,
'heading': hi,
'knots': vi,
'geom': gi}
# turn the newly interpolated values into a GeoDataFrame
rowsi = gpd.GeoDataFrame(d, index=indi, crs="EPSG:4326")
# append to the track's overall new points
new_points = new_points.append(rowsi)
# there is no next index on the last row... pass through
except IndexError:
pass
# append the new points and sort by index (which is also by time)
data = data.append(new_points)
data = data.sort_index()
print("\t\t\t", "...trajectory now has",
data.shape[0], "points!\n")
# ======= write the trajectory file! ==============
print("\t\t\t", "Densification complete, writing trajectory file...")
# add N-number and begin time
start_time = dt.datetime.strftime(data["utc_datetime"].min(skipna=True), "%Y-%m-%d %H:%M:%S")
file_name_dt = dt.datetime.strftime(data["utc_datetime"].min(skipna=True), "_%Y%m%d_%H%M%S")
N_number = data["registration"].iloc[0]
# path to the specific .trj file to be written
trj_path = trj_out + os.sep + str(N_number) + str(file_name_dt) + ".trj"
with open(trj_path, 'w') as trajectory:
# write the header information
trajectory.write("Flight track trajectory variable description:\n")
trajectory.write(" time - time in seconds from the reference time\n")
trajectory.write(" Xpos - x coordinate (UTM)\n")
trajectory.write(" Ypos - y coordinate (UTM)\n")
trajectory.write(" UTM Zone "+str(zone)+"\n")
trajectory.write(" Zpos - z coordinate in meters MSL\n")
trajectory.write(" heading - aircraft compass bearing in degrees\n")
trajectory.write(" climbANG - aircraft climb angle in degrees\n")
trajectory.write(" vel - aircraft velocity in knots\n")
trajectory.write(" power - % engine power\n")
trajectory.write(" roll - bank angle (right wing down), degrees\n")
trajectory.write("FLIGHT " + str(N_number) + " beginning " + start_time +" UTC\n")
trajectory.write("TEMP. 59.0\n")
trajectory.write("Humid. 70.0\n")
trajectory.write("\n")
trajectory.write(" time(s) Xpos Ypos Zpos heading climbANG Vel power rol\n")
# now write the data section row by row
for ind, point in data.iterrows():
# write the line
trajectory.write("{0:15.3f}".format(point["time_elapsed"]) + \
"{0:15.3f}".format(point["long_UTM"]) + \
"{0:15.3f}".format(point["lat_UTM"]) + \
"{0:15.3f}".format(0.3048*point["altitude_ft"]) + \
"{0:15.3f}".format(point["heading"]) + \
"{0:15.3f}".format(point["ClimbAngle"]) + \
"{0:15.3f}".format(point["knots"]) + \
"{0:15.3f}".format(95) + \
"{0:15.3f}".format(0) + "\n")
print("\t\t\t...finished writing .trj", "\n")
print("-----------------------------------------------------------------------------------------")
# (closes `if` from line 253) the flight was not within the search radius...
else:
tracks = tracks[tracks["flight_id"] != f_id] # ...drop the flight ID from the table
print("\t\t", "Flight starting", start, "was not within the search radius.")
print(tracks.size)
if(tracks.shape[0] <= 1):
print("\nSorry, no tracks in the database coincide with this deployment.")
tracks = gpd.GeoDataFrame([], columns=tracks.columns) # return an empty geodataframe with the original columns
return tracks
elif(tracks.shape[0] > 1):
u = np.unique(tracks["id"])
print("\nThere are", len(u), "tracks in the database which coincide with this deployment.")
print("Identification numbers:", u)
# iterate through each id number and add the closest approach information
for track_id, flight in closest_approaches.iterrows():
tracks.loc[tracks["flight_id"] == track_id, "closest_time"] = flight["closest_time"]
tracks.loc[tracks["flight_id"] == track_id, "closest_distance"] = flight["closest_distance"]
return tracks
def NMSIM_create_tis(project_dir, source_path, Nnumber=None, NMSIMpath=None):
'''
Create a site-based model run (.tis) using the NMSIM batch processor.
Inputs
------
project_dir (str, path): the location of a canonical NMSIM project directory, created with "Create_Base_Layers.py"
source_path (str, path): the location of the relevant NMSIM noise source file (.src)
NMSIMpath (str, path): [optional] an alternate location of the program `Nord2000batch.exe`
Returns
-------
None
'''
# ======= (1) define obvious, one-to-one project files ================
elev_file = glob.glob(project_dir + os.sep + r"Input_Data\01_ELEVATION\*.flt")[0]
# imped_file = project_dir + os.sep + "Input_Data\01_IMPEDANCE" + os.sep + "landcover.flt"
imped_file = None
trj_files = glob.glob(project_dir + os.sep + r"Input_Data\03_TRAJECTORY\*.trj")
# eventually the batch file is going to want this
tis_out_dir = project_dir + os.sep + r"Output_Data\TIG_TIS"
# ======= (2) define less obvious project files - these still need thought! ================
# site files need some thinking through... there COULD be more than one per study area
# (it's quite project dependant)
site_file = glob.glob(project_dir + os.sep + r"Input_Data\05_SITES\*.sit")[0]
# strip out the FAA registration number
registrations = [t.split("_")[-3][11:] for t in trj_files]
# the .tis name preserves: reciever + source + time (roughly 'source : path : reciever')
site_prefix = os.path.basename(site_file)[:-4]
tis_files = [tis_out_dir + os.sep + site_prefix + "_" + os.path.basename(t)[:-4] for t in trj_files]
trajectories = pd.DataFrame([registrations, trj_files, tis_files], index=["N_Number","TRJ_Path","TIS_Path"]).T
# ======= (3) write the control + batch files for command line control of NMSIM ================
# set up the two files we want to write
control_file = project_dir + os.sep + "control.nms"
batch_file = project_dir + os.sep + "batch.txt"
# select the trajectories to process
if(Nnumber == None):
trj_to_process = trajectories
else:
trj_to_process = trajectories.loc[trajectories["N_Number"] == Nnumber, :]
if(NMSIMpath == None):
# NMSIM is packaged right along with these scripts
# so by default we look for `Nord2000batch.exe` relative to this file
this_file = os.path.abspath(__file__)
one_dir_up = os.path.dirname(this_file)
Nord = one_dir_up + os.sep + r"NMSIM\Nord2000batch.exe"
else:
Nord = NMSIMpath
for meta, flight in trj_to_process.iterrows():
# write the control file for this situation
with open(control_file, 'w') as nms:
nms.write(elev_file+"\n") # elevation path
if(imped_file != None):
nms.write(imped_file+"\n") # impedance path
else:
nms.write("-\n")
nms.write(site_file+"\n") # site path
nms.write(flight["TRJ_Path"]+"\n")
nms.write("-\n")
nms.write("-\n")
nms.write(source_path+"\n")
nms.write("{0:11.4f} \n".format(500.0000))
nms.write("-\n")
nms.write("-")
# write the batch file to create a site-based analysis
with open(batch_file, 'w') as batch:
batch.write("open\n")
batch.write(control_file+"\n")
batch.write("site\n")
batch.write(flight["TIS_Path"]+"\n")
batch.write("dbf: no\n")
batch.write("hrs: 0\n")
batch.write("min: 0\n")
batch.write("sec: 0.0")
# ======= (4) compute the theoretically observed trace on the site's microphone ================
print(flight["TRJ_Path"]+"\n")
process = subprocess.Popen([Nord, batch_file], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
stdout, stderr = process.communicate()
output_messages = stdout.decode("utf-8").split("\r\n")
output_messages = [ out for out in output_messages if out.strip() != '' ]
print("\tthe following lines are directly from NMSIM:")
for s in output_messages+["\n"]:
print("\t"+s)
# slightly messier printing for error messages
if(stderr != None):
for s in sterr.decode("utf-8").split("\r\n"):
print(s.strip())
def pair_trj_to_tis_results(project_dir):
'''
Join a directory of .tis results created by NMSIM
to the .trj files that created them.
Inputs
------
project_dir (str): the path to a canonical NPS-style NMSIM project directory
Returns
-------
iterator (zip object): an iterator containing the paired .tis and .trj file paths
'''
# find all the '.tis' files
successful_tis = glob.glob(project_dir + os.sep + "Output_Data\TIG_TIS\*.tis")
# find all the '.trj' files
trajectories = [project_dir + os.sep + "Input_Data\\03_TRAJECTORY" + \
os.sep + os.path.basename(f)[9:-4] + ".trj" for f in successful_tis]
iterator = zip(trajectories, successful_tis)
return iterator
def tis_resampler(tis_path, dt_start, utc_offset=-8):
'''
'''
# read the data line-by-line
with open(tis_path) as f:
content = list(islice(f, 18 + (3600*24)))
# find the line index where the header ends
splitBegin = content.index('---End File Header---\n')
# take out the whitespace and two empty columns at either end
spectral_data = [re.split(r'\s+',c) for c in content[splitBegin+10:]]
spectral_data = [d[1:-2] for d in spectral_data]
# initalize a pandas dataframe using the raw spectral data and the expected column headers
tis = pd.DataFrame(spectral_data, columns=["SP#","TIME","F","A","10", "12.5","15.8","20","25","31.5","40","50","63",
"80","100","125","160","200","250","315","400","500","630","800","1000",
"1250","1600","2000","2500","3150","4000","5000","6300","8000","10000",
"12500"], dtype='float') #,"20000"
# there's a weird text line at the end of the file (is this true for all .tis files?)
tis.drop(tis.tail(1).index,inplace=True) # drop last n rows
# these columns are stubborn
tis["TIME"] = tis["TIME"].astype('float')
tis["SP#"] = tis["SP#"].astype('float').apply(lambda f: int(f))
tis["F"] = tis["F"].astype('int')
# convert relevant columns to decibels (dB) from centibels (cB)
tis.loc[:,'A':'12500'] *= 0.1
# timedelta to adjust to local time
utc_offset = dt.timedelta(hours=utc_offset)
# reindex the dataframe to AKT
tis.index = tis["TIME"].astype('float').apply(lambda t: dt_start + dt.timedelta(seconds=t) + utc_offset)
# resample to match NVSPL time resolution
clean_tis = tis.sort_index().resample('1S').quantile(0.5)
return clean_tis
def NVSPL_to_match_tis(ds, project_dir, startdate, clean_tis, trj, unit, site, year, utc_offset=-8, pad_length=5):
'''
'''
# timedelta to adjust to local time
utc_offset = dt.timedelta(hours=utc_offset)
# convert startdate to Alaska Time
ak_start = startdate + utc_offset
print("Alaska start time", ak_start)
# tidy up the TIS spectrogram by converting np.nan to -99.9
clean_tis.fillna(-99.9).values.T
# we can only compare 1/3rd octave bands down to 12.5 Hz... drop the rest
clean_tis = clean_tis.loc[:, ~clean_tis.columns.isin(["SP#", "TIME", "F", "A", "10"])]
# load NVSPL for the day of the event
nv = nvspl(ds,
unit=unit,
site=site,
year=ak_start.year,
month=str(ak_start.month).zfill(2),
day=str(ak_start.day).zfill(2),
hour=[str(h).zfill(2) for h in np.unique(clean_tis.index.hour.values)],
columns=["H"+s.replace(".", "p") for s in clean_tis.columns]).combine()
# if multiple hours, drop the heirarchical index
if isinstance(nv.index, pd.MultiIndex):
nv.index = nv.index.droplevel(0)
# select the SPL data that corresponds
pad = dt.timedelta(minutes=pad_length)
# find the NVSPL data the specifically corresponds to the timing of the model
event_SPL = nv.loc[clean_tis.index[0]-pad : clean_tis.index[-1]+pad,:]
print("NVSPL shape:", event_SPL.shape)
# pad the theoretical data as well
spect_pad = np.full((int(pad.total_seconds()), clean_tis.shape[1]), -99.9)
theoretical = np.vstack((spect_pad, clean_tis))
print("NMSIM shape:", theoretical.shape)
fig, ax = plt.subplots(nrows=2, ncols=1, figsize=(18,5), sharex=True)
# convert the NVSPL's nice datetime axis to numbers
x_lims = mdates.date2num(event_SPL.index)
ax[0].set_title("NMISIM results", loc="left")
ax[0].imshow(theoretical.T, aspect='auto', origin='lower',
extent=[x_lims[0], x_lims[-1], 0, event_SPL.shape[1]],
cmap='plasma', interpolation=None, vmin=-10, vmax=80, zorder=-5)
ax[0].set_yticks(np.arange(event_SPL.shape[1])[::4])
ax[0].set_yticklabels(event_SPL.columns.astype('float')[::4])
# tell matplotlib that the numeric axis should be formatted as dates
ax[0].xaxis_date()
ax[0].xaxis.set_major_formatter(mdates.DateFormatter("%b-%d\n%H:%M")) # tidy them!
ax[1].set_title("microphone measurement at "+unit+site, loc="left")
im = ax[1].imshow(event_SPL.T, aspect='auto', origin='lower',
extent=[x_lims[0], x_lims[-1], 0, event_SPL.shape[1]],
cmap='plasma', interpolation=None, vmin=-10, vmax=80)
# the same as for the first plot
ax[1].set_yticks(np.arange(event_SPL.shape[1])[::4])
ax[1].set_yticklabels(event_SPL.columns.astype('float')[::4])
ax[1].xaxis_date()
ax[1].xaxis.set_major_formatter(mdates.DateFormatter("%b-%d\n%H:%M")) # tidy them!
fig.colorbar(im, ax=ax.ravel().tolist(), anchor=(2.2, 0.0))
fig.text(1.06, 0.5, "Sound Level (Leq, 1s)", va='center', rotation='vertical', fontsize=10)
fig.text(-0.02, 0.55, "Frequency Band (Hz)", va='center', rotation='vertical', fontsize=13)
title = os.path.basename(trj)[:-4]
plt.suptitle(title, y=1.05, fontsize=17, ha="center")
fig.tight_layout()
plt.savefig(project_dir + os.sep + r"Output_Data\IMAGES" + os.sep + title + "_comparison.png", dpi=300, bbox_inches="tight")
plt.show()
return event_SPL