-
Notifications
You must be signed in to change notification settings - Fork 1
/
FetchData
executable file
·2352 lines (1992 loc) · 84.8 KB
/
FetchData
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
#!/usr/bin/perl
#
# FetchData
#
# Find the most current version at http://service.iris.edu/clients/
#
# Fetch data and related metadata from web services. The default web
# service are from the IRIS DMC, other FDSN web services may be
# specified by setting the following environment variables:
#
# SERVICEBASE = the base URI of the service(s) to use (http://service.iris.edu/)
# TIMESERIESWS = complete URI of service (http://service.iris.edu/fdsnws/dataselect/1)
# METADATAWS = complete URI of service (http://service.iris.edu/fdsnws/station/1)
# SACPZWS = complete URI of service (http://service.iris.edu/irisws/sacpz/1)
# RESPWS = complete URI of service (http://service.iris.edu/irisws/resp/1)
# FEDCATWS = complete URI of service (http://service.iris.edu/irisws/fedcatalog/1)
#
# This program is primarily written to select and fetch waveform data
# but can also fetch metadata and response information if those
# services exist at the specified data center. The fdsnws-dataselect
# service is a minimum requirement for use of this script. The
# fdsnws-station service is required if metadata is to be retrieved or
# if geographic selection options are used.
#
# Dependencies: This script should run without problems on Perl
# release 5.10 or newer, older versions of Perl might require the
# installation of the following modules (and their dependencies):
# Bundle::LWP (libwww-perl)
#
## Data selection
#
# Data is generally selected by specifying network, station, location,
# channel, quality, start time and end time. The name parameters may
# contain wildcard characters. All input options are optional but
# waveform requests should include a time window. Data may be
# selected one of three ways:
#
# 1) Command line arguments: -N, -S, -L, -C, -Q, -s, -e
#
# 2) A BREQ_FAST formatted file, http://ds.iris.edu/manuals/breq_fast.htm
#
# 3) A selection file containing a list of:
# Net Sta Loc Chan Start End
#
# Example selection file contents:
# II BFO 00 BHZ 2011-01-01T00:00:00 2011-01-01T01:00:00
# IU ANMO 00 BHZ 2011-01-01T00:00:00 2011-01-01T01:00:00
# IU COLA 00 BHZ 2011-01-01T00:00:00 2011-01-01T01:00:00
#
# For the command line arguments and the selection file the network,
# station location and channel fields may contain the common * and ?
# wildcards, meaning zero-to-many and a single character respectively.
# These fields may also be comma-separated lists, for example, the
# network may be specified as II,IU,TA to select three networks.
#
## Data output
#
# miniSEED: If the -o option is used to specify an output file
# waveform data will be requested based on the selection and all
# written to the single file.
#
# metadata: If the -m option is used to specifiy a metadata file a
# line will be written to the file for each channel epoch and will
# contain:
# "net|sta|loc|chan|lat|lon|elev|depth|azimuth|dip|instrument|scale|scalefreq|scaleunits|samplerate|start|end"
#
# This metadata file can be used directly with mseed2sac or tracedsp
# to create SAC files including basic metadata.
#
# SAC P&Zs: If the -sd option is given SAC Poles and Zeros will be
# fetched and a file for each channel will be written to the specified
# directory with the name 'SACPZ.Net.Sta.Loc.Chan'. If this option is
# used while fetching waveform data, only channels which returned
# waveforms will be requested.
#
# RESP: If the -rd option is given SEED RESP (as used by evalresp)
# will be fetched and a file for each channel will be written to the
#specified directory with the name 'RESP.Net.Sta.Loc.Chan'. If this
# option is used while fetching waveform data, only channels which
# returned waveforms will be requested.
#
#
# ## Change history ##
#
# 2013.042:
# - Rename to FetchData (from FetchBulkData), truncate change log.
# - Use the LWP::UserAgent method env_proxy() to check for and use connection
# proxy information from environment variables (e.g. http_proxy).
# - Add checking of environment variables that will override the web
# service base path (i.e. host name).
# - Change to allow data requests without metadata fetching.
#
# 2013.067:
# - Changed metadata parsing to understand FDSN StationXML schema.
# - Create override service URLs for ws-sacpz and ws-resp until they
# are migrated to service.iris.edu.
#
# 2013.074:
# - Add work around for bug in Perl's Digest Authorization headers
# that conflicts with pedantic behavior of Apache Tomcat, eventually
# Tomcat will be more lenient and this work around will be removed.
#
# 2013.077:
# - Convert metadata output line to be bar (|) separated instead of
# comma separated and leave dip in SEED convention.
# - Do not translate commas to semicolons in instrument name in metadata.
#
# 2013.086:
# - Remove code to filter Authorization headers, Apache Tomcat has been fixed
# to accept Digest Authentication credentials as submitted by libwww/LWP.
#
# 2013.118:
# - Fix parsing of start and end times from metadata that are used when no
# start and/or end is specified by the caller.
#
# 2013.150:
# - Allow dash characters in breqfast formatted requests for the network
# fields to support virtual networks that use dashes.
#
# 2013.186:
# - Change service URL override command line options to match
# environment variables.
#
# 2013.197:
# - Fix parsing of element values of "0".
#
# 2013.198:
# - Add test for minimum version of LWP (libwww) module of 5.806.
#
# 2013.212:
# - Fetch metadata for request by default, this allows grouping of time series
# requests and ultimately more efficient recovery in the case of connection
# breaks. Also added an option of --nometadata or -nm to suppress the
# fetching of metadata when it is not strictly needed.
# - Remove lingering overrides to deprecated service locations.
#
# 2014.056:
# - Allow gzip'ed HTTP encoding for metadata, SACPZ and RESP requests if
# support exists on the local system.
# - Add the -noretry option, when used the script will exit on time series
# request timeouts/errors with no retries.
#
# 2014.084:
# - Add -q option to make the script quiet except for errors.
# - Exit value will be 1 if any service requests failed.
#
# 2014.107:
# - Instantiate new UserAgent client for each group when fetching time series
# instead of reusing the same client object. This is to make sure no
# authentication details are shared between requests.
#
# 2014.129:
# - Convert metadata fetching to use POST capability of fdsnws-station.
# This allows making a single metadata request when the request is a list of
# many selections (selection list file or BREQ_FAST), instead of generating a
# request for each selection line. More efficient.
# - Code simplification: separately manage request list for secondary metadata
# such as SACPZ or RESP, track and request a range from earliest to latest
# metadata epochs for each channel. This fixes a bug where only the last epoch
# is represented in a metadata file when the request crosses many epochs.
#
# 2014.134:
# - Optimize the metadata and request window matching by using compiled regexes.
#
# 2014.135:
# - Fix matching requests and metadata for open time windows.
# - Optimize request and metadata matching with a nested hash of compiled regexes.
# - Avoid selecting too much secondary metadata (SACPZ and RESP) by shrinking
# the request window by one second on each end.
#
# 2014.136:
# - Fetch metadata for virtual networks separately from all other metadata in
# order to properly match data requests with metadata.
# - Properly match lists in network, station, location and channel fields.
#
# 2014.142:
# - Fetch metadata using extents for each unique NSLC group, this can be a much
# smaller (and faster query) for requests with a large number of repeated NSLCs.
#
# 2014.168:
# - Explicitly match metadata epochs to time series requests to avoid
# matching substrings, e.g. MONP matching MONP2. Only new code effected.
# - Accept empty location strings in metadata as "--" identifiers.
# - Follow redirects for POST method in addition to default GET and HEAD.
# - A small bit of special output for 429 (Too Many Requests) results to
# help the user understand what is going on if a server were to return this.
# - Fix handling of metadata with no end times (open interval).
#
# 2014.253:
# - Add detection of stream truncation by checking for "#STREAMERROR"
# at the end of the content buffer. As there is no way to commicate
# an error in HTTP after the transfer has started (and a full byte
# count is not known) the DMC's servers will include an error message
# in the stream when an error occurs. This should occur rarely.
# - Optimize downloading, in particular for time series, by a) avoiding
# checks for gzip-encoded HTTP streams when not needed and b) avoiding
# copying of the data buffer.
#
# 2014.322:
# - Add -F federation option, this will cause the request to be sent
# to a federator catalog service. The response of the catalog service
# is parsed and requests are sent to each identified data center.
# - Restructure internal flow for multiple data center handling.
# - Add FederateRequest() to handle federation catalog servicing.
#
# 2014.323:
# - Add -O and -M options to write all federated output to the same files.
# By default a data center prefix is added to the output from each DC.
# - Only include a quality specification in time series requests if
# supplied by the user.
#
# 2014.325:
# - Add error message and more graceful failure when service interfaces
# have not been identified for requested data.
#
# 2014.342:
# - Federator: Add parsing of values for SACPZSERVICE and RESPSERVICE in
# addition to the already parsed STATIONSERVICE and DATASELECTSERVICE.
# - Federator: Gracefully skip unrecognized SERVICE declarations and
# key=value parameters.
# - Fix creation of SACPZ and RESP directories.
# - Include output file names in diagnostic output.
# - Add data center identifier, when present, to header of metadata files.
#
# 2014.351:
# - Avoid undefined reference by checking for metadata before trying to access it.
#
# 2015.014:
# - Change validation of channel codes in breq_fast parsing to accept
# values less than 3 characters, this will allow single '*' wildcards.
# - Add support for matching metadata using exclusions as supported by
# the DMC's fdsnws-station service.
#
# 2015.135:
# - Fix SAC PZ and RESP output directory designation and creation when
# Federation is being performed. Data center specific directories are
# now created in the directory specified for the output.
#
# 2015.246:
# - Restore capability to write output miniSEED to stdout by specifying
# the output file as a single dash.
# - Support non-persisent, session cookies for HTTP requests.
# - On authentication errors, retry the request a single time.
#
# 2015.341:
# - Change retry count for failed time series requests from 60 to 10.
# This mitigates long delays somewhat when data centers do not fail
# for a very long time.
#
# 2016.007:
# - Trim trailing slashes from service endpoint URLs as a convenience.
#
# 2016.062:
# - Do not retry on 413 response and print server response on all non-auth errors.
# - Trim trailing slash from SERVICEBASE values.
#
# 2016.089:
# - Optimize the matching of metadata to requests by avoiding Tie'd hashes and
# combining regular expressions for fewer match executions.
#
# 2016.260:
# - When writing separate output files per data center, append data center ID
# to the file name portion of the specified path instead of just the beginning.
# - Only do 2 retries for time series requests when Federating.
# - Add warnings when using minimum segment length and longest segment only
# options when Federating, many data centers do no support these leading to
# sometimes cryptic errors.
#
# 2016.299:
# - Fix appending of data center ID to output file names when
# directories are not specified.
#
# 2017.017:
# - Add a minimum request interval and use it to throttle web service request
# loops to avoid generating requests too fast. At 50 milliseconds the throttling
# will not engage for the vast majority of users and uses.
#
# 2017.164:
# - Set $ENV{PERL_LWP_SSL_VERIFY_HOSTNAME} = 0 to allow usage with
# HTTPS endpoints without peforming certificate verification.
# - Allow -e end time specification to be an offset relative to the
# start time in the pattern #.#[SMHD], e.g. 30m, 1h, 2D, etc.
# - Allow -F option to accept a list of data centers that is passed
# to the Federator to limit the response to specific centers.
# Centers: http://service.iris.edu/irisws/fedcatalog/1/datacenters
# - Add more details to help message if -h is specified more than once.
# Relegate lesser used options to the extended help and add more details
# of federation usage and specification of alternate WS endpoints.
#
# 2018.337:
# - Add -X option to download metadata as StationXML at response level.
#
# 2020.314:
# - Add explicit test for no requests after discovery and skip data center if needed.
#
# Author: Chad Trabant, IRIS Data Management Center
use strict;
use File::Basename;
use File::Spec;
use Getopt::Long;
use LWP 5.806; # Require minimum version
use LWP::UserAgent;
use HTTP::Status qw(status_message);
use HTTP::Date;
use Time::HiRes qw(nanosleep);
my $version = "2020.314";
my $scriptname = basename($0);
# Default web service base
my $servicebase = 'http://service.iris.edu';
# Check for environment variable overrides for servicebase
$servicebase = $ENV{'SERVICEBASE'} if ( exists $ENV{'SERVICEBASE'} );
$servicebase =~ s/\/$//; # Trim trailing slash
# Web service for time series data
my $timeseriesservice = "$servicebase/fdsnws/dataselect/1";
# Check for environment variable override for timeseriesservice
$timeseriesservice = $ENV{'TIMESERIESWS'} if ( exists $ENV{'TIMESERIESWS'} );
# Default web service for metadata
my $metadataservice = "$servicebase/fdsnws/station/1";
# Check for environment variable override for metadataservice
$metadataservice = $ENV{'METADATAWS'} if ( exists $ENV{'METADATAWS'} );
# Web service for SAC P&Z
my $sacpzservice = "$servicebase/irisws/sacpz/1";
# Check for environment variable override for sacpzservice
$sacpzservice = $ENV{'SACPZWS'} if ( exists $ENV{'SACPZWS'} );
# Web service for RESP
my $respservice = "$servicebase/irisws/resp/1";
# Check for environment variable override for respservice
$respservice = $ENV{'RESPWS'} if ( exists $ENV{'RESPWS'} );
# Web service for federation catalog
my $fedcatservice = "$servicebase/irisws/fedcatalog/1";
# Check for environment variable override for fedcatservice
$fedcatservice = $ENV{'FEDCATWS'} if ( exists $ENV{'FEDCATWS'} );
# HTTP UserAgent reported to web services
my $useragent = "$scriptname/$version Perl/$] " . new LWP::UserAgent->_agent;
# Waveform data request group size in terms of station-days
my $groupstadays = 30;
# A minimum time interval (seconds) for web service requests
# This is used to avoid sending requests too quickly
my $minimumrequestinterval = 0.05;
# Allow encrypted connections without checking for valid certificate matching
# the expected hostname.
$ENV{PERL_LWP_SSL_VERIFY_HOSTNAME} = 0;
my $usage = undef;
my $verbose = 0;
my $nobsprint = undef;
my $net = undef;
my $sta = undef;
my $loc = undef;
my $chan = undef;
my $qual = undef;
my $starttime = undef;
my $endtime = undef;
my @latrange = (); # (minlat:maxlat)
my @lonrange = (); # (minlon:maxlon)
my @degrange = (); # (lat:lon:maxradius[:minradius])
my $selectfile = undef;
my $bfastfile = undef;
my $mslopt = undef;
my $lsoopt = undef;
my $appname = undef;
my $auth = undef;
my $outfile = undef;
my $outfileapp = undef;
my $sacpzdir = undef;
my $respdir = undef;
my $metafile = undef;
my $metafileapp= undef;
my $nometadata = undef;
my $sxmlfile = undef;
my $noretry = undef;
my $retries = 10;
my $federate = undef;
my $exitvalue = 0;
my $inflater = undef;
# If Compress::Raw::Zlib is available configure inflater for RFC 1952 (gzip)
if ( eval("use Compress::Raw::Zlib; 1") ) {
use Compress::Raw::Zlib;
$inflater = new Compress::Raw::Zlib::Inflate( -WindowBits => WANT_GZIP,
-ConsumeInput => 0 );
}
# Parse command line arguments
Getopt::Long::Configure ("bundling_override");
my $getoptsret = GetOptions(
'help|usage|h+' => \$usage,
'verbose|v+' => \$verbose,
'quiet|q' => sub { $verbose = -1; },
'nobs' => \$nobsprint,
'nometadata|nm' => \$nometadata,
'noretry|nr' => \$noretry,
'federate|F:s' => \$federate,
'net|N=s' => \$net,
'sta|S=s' => \$sta,
'loc|L=s' => \$loc,
'chan|C=s' => \$chan,
'qual|Q=s' => \$qual,
'starttime|s=s' => \$starttime,
'endtime|e=s' => \$endtime,
'lat=s' => \@latrange,
'lon=s' => \@lonrange,
'radius=s' => \@degrange,
'selectfile|l=s' => \$selectfile,
'bfastfile|b=s' => \$bfastfile,
'msl=s' => \$mslopt,
'lso' => \$lsoopt,
'appname|A=s' => \$appname,
'auth|a=s' => \$auth,
'outfile|o=s' => \$outfile,
'outfileapp|O=s' => \$outfileapp,
'sacpzdir|sd=s' => \$sacpzdir,
'respdir|rd=s' => \$respdir,
'metafile|m=s' => \$metafile,
'metafileapp|M=s' => \$metafileapp,
'sxmlfile|X=s' => \$sxmlfile,
'timeseriesws=s' => \$timeseriesservice,
'metadataws=s' => \$metadataservice,
'sacpzws=s' => \$sacpzservice,
'respws=s' => \$respservice,
);
my $required = (defined $net || defined $sta ||
defined $loc || defined $chan ||
scalar @latrange || scalar @lonrange || scalar @degrange ||
defined $starttime || defined $endtime ||
defined $selectfile || defined $bfastfile );
if ( ! $getoptsret || $usage || ! $required ) {
print "$scriptname: collect time series and related metadata (version $version)\n";
print "http://service.iris.edu/clients/\n\n";
print "Usage: $scriptname [options]\n\n";
print " Options:\n";
print " -v Increase verbosity, may be specified multiple times\n";
print " -h Print this help message, if multiple print more help\n";
print " -q Be quiet, do not print anything but errors\n";
print " -N,--net Network code, list and wildcards (* and ?) accepted\n";
print " -S,--sta Station code, list and wildcards (* and ?) accepted\n";
print " -L,--loc Location ID, list and wildcards (* and ?) accepted\n";
print " -C,--chan Channel codes, list and wildcards (* and ?) accepted\n";
print " -Q,--qual Quality indicator, by default no quality is specified\n";
print " -s starttime Specify start time (YYYY-MM-DD,HH:MM:SS.ssssss)\n";
print " -e endtime Specify end time (YYYY-MM-DD,HH:MM:SS.ssssss or #[SMHD])\n";
print " --lat min:max Specify a minimum and/or maximum latitude range\n";
print " --lon min:max Specify a minimum and/or maximum longitude range\n";
print " --radius lat:lon:maxradius[:minradius]\n";
print " Specify circular region with optional minimum radius\n";
print " -l listfile Read list of selections from file\n";
print " -b bfastfile Read list of selections from BREQ_FAST file\n";
print " -a user:pass User and password for access to restricted data\n";
print "\n";
print " -F [DC1[,DC2]] Federate the request to multiple data centers if needed\n";
print " Federation may be limited to an optional list of DCs\n";
print " Output files are prefixed by data center identifiers\n";
print "\n";
print " -o outfile Fetch time series data and write to output file\n";
print " -sd sacpzdir Fetch SAC P&Zs and write files to sacpzdir\n";
print " -rd respdir Fetch RESP and write files to respdir\n";
print " -m metafile Write basic metadata to specified file\n";
print " -X SXMLfile Write response-level StationXML to specified file\n";
print "\n";
if ( $usage >= 2 ) {
print " More options and help:\n";
print " -nm Do not request metadata unless output file requested\n";
print " -nr No retry, exit immediately on time series request errors\n";
print " -msl length Limit returned data to a minimum segment length\n";
print " -lso Limit returned data to the longest segment only\n";
print " -A appname Application/version string for identification\n";
print " -O outfile Write all timeseries to a single file, useful with -F\n";
print " -M metafile Write all metadata to a single file, useful with -F\n";
print "\n";
print "== Specifying data centers for federation\n";
print " List of data center identifiers:\n";
print " http://service.iris.edu/irisws/fedcatalog/1/datacenters\n";
print " To avoid a center, negate it by adding a 'not' prefix, e.g. 'notIRISDMC'\n";
print "\n";
print "== Specifying alternate web service endpoints (when not Federating)\n";
print " Alternate service endpoints may be specified using the following options:\n";
print " -timeseriesws URL (e.g. http://service.iris.edu/fdsnws/dataselect/1)\n";
print " -metadataws URL (e.g. http://service.iris.edu/fdsnws/station/1)\n";
print " -respws URL (e.g. http://service.iris.edu/irisws/resp/1)\n";
print " -sacpzws URL (e.g. http://service.iris.edu/irisws/sacpz/1/)\n";
print "\n";
}
exit 1;
}
# Truncate any existing appending output file and assign to outfile
if ( $outfileapp ) {
die "Cannot specify both -o and -O\n" if ( $outfile );
if ( -f "$outfileapp" ) {
truncate ($outfileapp, 0) || die "Cannot truncate existing file $outfileapp\n";
}
$outfile = $outfileapp;
}
# Truncate any existing appending metadata file and assign to metafile
if ( $metafileapp ) {
die "Cannot specify both -m and -M\n" if ( $metafile );
if ( -f "$metafileapp" ) {
truncate ($metafileapp, 0) || die "Cannot truncate existing file $metafileapp\n";
}
$metafile = $metafileapp;
}
if ( ! $outfile && ! $metafile && ! $sxmlfile && ! $sacpzdir && ! $respdir ) {
die "No output options specified, try -h for usage information\n";
}
# Print script name and local time string
if ( $verbose >= 1 ) {
my ($sec,$min,$hour,$mday,$mon,$year,$wday,$yday,$isdst) = localtime(time);
printf STDERR "$scriptname ($version) at %4d-%02d-%02d %02d:%02d:%02d\n", $year+1900, $mon+1, $mday, $hour, $min, $sec;
}
# Check for existence of output directories
if ( $sacpzdir && ! -d "$sacpzdir" ) {
die "Cannot find SAC P&Zs output directory: $sacpzdir\n";
}
if ( $respdir && ! -d "$respdir" ) {
die "Cannot find RESP output directory: $respdir\n";
}
# Check for time window if requesting time series data
if ( $outfile && ( ! defined $selectfile && ! defined $bfastfile &&
( ! defined $starttime || ! defined $endtime ) ) ) {
die "Cannot request time series data without start and end times\n";
}
# Normalize time strings given on the command line
if ( $starttime ) {
my ($year,$month,$mday,$hour,$min,$sec,$subsec) = split (/[-:,.\s\/T]/, $starttime);
$starttime = sprintf ("%04d-%02d-%02dT%02d:%02d:%02d", $year, $month, $mday, $hour, $min, $sec);
$starttime .= ".$subsec" if ( $subsec );
}
if ( $endtime ) {
# Check for and parse time in duration pattern: #.#[SMHD]
if ( $endtime =~ /^[\d\.]+[SsMmHhDd]?$/ ) {
if ( $starttime ) {
my ($offset, $timeunit) = $endtime =~ /^([\d\.]+)([SsMmHhDd]?)$/i;
$timeunit = 'S' if (! $timeunit);
# Convert offset value to seconds if specified as days, hours or minutes
if ($timeunit =~ /[Dd]/) {
$offset *= 86400;
}
elsif ($timeunit =~ /[Hh]/) {
$offset *= 3600;
}
elsif ($timeunit =~ /[Mm]/) {
$offset *= 60;
}
# Calculate end time from start + offset and generate string
my $rstartepoch = str2time ($starttime, "UTC");
if ( defined $rstartepoch ) {
$endtime = &mktimestring($rstartepoch + $offset, 1);
}
else {
die "Unable to parse start time: '$starttime'\n"
}
}
else {
die "Cannot specify end time as duration without specifying start time\n";
}
}
else {
my ($year,$month,$mday,$hour,$min,$sec,$subsec) = split (/[-:,.\s\/T]/, $endtime);
$endtime = sprintf ("%04d-%02d-%02dT%02d:%02d:%02d", $year, $month, $mday, $hour, $min, $sec);
$endtime .= ".$subsec" if ( $subsec );
}
}
# Validate and prepare lat, lon and radius input
if ( scalar @latrange ) {
@latrange = split (/:/, $latrange[0]);
if ( defined $latrange[0] && ($latrange[0] < -90.0 || $latrange[0] > 90.0) ) {
die "Minimum latitude out of range: $latrange[0]\n";
}
if ( defined $latrange[1] && ($latrange[1] < -90.0 || $latrange[1] > 90.0) ) {
die "Maximum latitude out of range: $latrange[1]\n";
}
}
if ( scalar @lonrange ) {
@lonrange = split (/\:/, $lonrange[0]);
if ( defined $lonrange[0] && ($lonrange[0] < -180.0 || $lonrange[0] > 180.0) ) {
die "Minimum longitude out of range: $lonrange[0]\n";
}
if ( defined $lonrange[1] && ($lonrange[1] < -180.0 || $lonrange[1] > 180.0) ) {
die "Maximum longitude out of range: $lonrange[1]\n";
}
}
if ( scalar @degrange ) {
@degrange = split (/\:/, $degrange[0]);
if ( scalar @degrange < 3 || scalar @degrange > 4 ) {
die "Unrecognized radius specification: @degrange\n";
}
if ( defined $degrange[0] && ($degrange[0] < -90.0 || $degrange[0] > 90.0) ) {
die "Radius latitude out of range: $degrange[0]\n";
}
if ( defined $degrange[1] && ($degrange[1] < -180.0 || $degrange[1] > 180.0) ) {
die "Radius longitude out of range: $degrange[1]\n";
}
}
# An array to hold data selections
my @selections = ();
# Add command line selection to list
if ( defined $net || defined $sta || defined $loc || defined $chan ||
defined $starttime || defined $endtime ) {
push (@selections,"$net|$sta|$loc|$chan|$starttime|$endtime");
}
# Read selection list file
if ( $selectfile ) {
print STDERR "Reading data selection from list file '$selectfile'\n";
&ReadSelectFile ($selectfile);
}
# Read BREQ_FAST file
if ( $bfastfile ) {
print STDERR "Reading data selection from BREQ_FAST file '$bfastfile'\n";
&ReadBFastFile ($bfastfile);
}
# Report complete data selections
if ( $verbose > 2 ) {
print STDERR "== Data selections ==\n";
foreach my $select ( @selections ) {
print STDERR " $select\n";
}
print STDERR "Latitude range: $latrange[0] : $latrange[1]\n" if ( scalar @latrange );
print STDERR "Longitude range: $lonrange[0] : $lonrange[1]\n" if ( scalar @lonrange );
print STDERR "Radius range: $degrange[0] : $degrange[1] : $degrange[2] : $degrange[3]\n" if ( scalar @degrange );
}
# A mega hash for data center details, requests and some results
#
# datacenter{DATACENTER}{website} = URL
# datacenter{DATACENTER}{timeseriesws} = URL
# datacenter{DATACENTER}{metadataws} = URL
# datacenter{DATACENTER}{sacpzws} = URL
# datacenter{DATACENTER}{respws} = URL
#
# datacenter{DATACENTER}{selection} = ref to ARRAY of selections
# datacenter{DATACENTER}{request} = ref to HASH of requests
# datacenter{DATACENTER}{metarequest} = ref to HASH of metadata requests (time extents)
# datacenter{DATACENTER}{metadata} = ref to ARRAY of metadata
my %datacenter = ();
# A buffer for metadata service responses
my $metadataresponse;
# Track bytes downloaded in callback handlers
my $datasize = 0;
# Fetch metadata from the station web service by default unless the nometadata option
# is specified or if metadata output file has been requested or if geographic range
# selection is requested.
$nometadata = undef if ( $metafile || $sxmlfile || $sacpzdir || $respdir
|| scalar @latrange || scalar @lonrange || scalar @degrange );
# Resolve federated requests
if ( defined $federate ) {
# Translate negation specified as 'not' with the '-' needed by the service
$federate =~ s/not/\-/g;
# Set number of time series request retries to 2
$retries = 2;
# Print warnings for options not commonly supported
if ( $mslopt ) {
print STDERR "WARNING: Minimum segment length (-msl) option is not broadly supported by data centers\n"
}
if ( $lsoopt ) {
print STDERR "WARNING: Longest segment only (-lso) option is not broadly supported by data centers\n"
}
&FederateRequest( $fedcatservice, \@selections );
if ( $verbose >= 1 ) {
printf STDERR "Federation catalog results from %d data center(s):\n", scalar keys %datacenter;
foreach my $dckey ( sort keys %datacenter ) {
printf STDERR "Data center: $dckey, %d selections\n", scalar @{$datacenter{$dckey}{selection}};
print STDERR " MetadataWS: $datacenter{$dckey}{metadataws}\n" if ( $datacenter{$dckey}{metadataws} );
print STDERR " TimeSeriesWS: $datacenter{$dckey}{timeseriesws}\n" if ( $datacenter{$dckey}{timeseriesws} );
print STDERR " SACPZWS: $datacenter{$dckey}{sacpzws}\n" if ( $datacenter{$dckey}{sacpzws} );
print STDERR " RESPWS: $datacenter{$dckey}{respws}\n" if ( $datacenter{$dckey}{respws} );
}
}
}
# Otherwise set up default (empty) data center
else {
# Trim trailing slashes from service endpoints
$timeseriesservice =~ s/\/$//;
$metadataservice =~ s/\/$//;
$sacpzservice =~ s/\/$//;
$respservice =~ s/\/$//;
# Add default/environmental entries to datacenter hash
$datacenter{""}{timeseriesws} = $timeseriesservice;
$datacenter{""}{metadataws} = $metadataservice;
$datacenter{""}{sacpzws} = $sacpzservice;
$datacenter{""}{respws} = $respservice;
# User selections used directly
$datacenter{""}{selection} = \@selections;
}
# Process each data center
foreach my $dckey ( sort keys %datacenter ) {
if ( $dckey ) {
printf STDERR "Fetching data from $dckey (%s)\n", $datacenter{$dckey}{website};
}
# Fetch metadata unless requested not to
if ( ! defined $nometadata ) {
if ( ! exists $datacenter{$dckey}{metadataws} ) {
print STDERR "Cannot fetch metadata, no fdsnws-station service available for data center $dckey\n";
}
else {
&FetchMetaData( $dckey );
}
}
# Build request hash directly from selections if not fetching metadata and not already populated
elsif ( ! exists $datacenter{$dckey}{request} ) {
foreach my $selection ( @{$datacenter{$dckey}{selection}} ) {
my ($snet,$ssta,$sloc,$schan,$sstart,$send) = split (/\|/,$selection);
# Subsitute non-specified fields with wildcards
$snet = "*" if ( ! $snet );
$ssta = "*" if ( ! $ssta );
$sloc = "*" if ( ! $sloc );
$schan = "*" if ( ! $schan );
$datacenter{$dckey}{request}->{"$snet|$ssta|$sloc|$schan|$sstart|$send"} = "$sstart|$send";
}
}
# Report complete data request and metadata request
if ( $verbose > 2 ) {
printf STDERR "== Request list (%d) ==\n", scalar keys %{$datacenter{$dckey}{request}};
foreach my $req ( sort keys %{$datacenter{$dckey}{request}} ) {
print STDERR " $req (metadata: $datacenter{$dckey}{request}->{$req})\n";
}
print STDERR "== End of request list ==\n";
printf STDERR "== Metadata request list (%d) ==\n", scalar keys %{$datacenter{$dckey}{metarequest}};
foreach my $req ( sort keys %{$datacenter{$dckey}{metarequest}} ) {
print STDERR " $req (metadata: $datacenter{$dckey}{metarequest}->{$req})\n";
}
print STDERR "== End of metadata request list ==\n";
}
# Done with this data center if no requests
if (scalar keys %{$datacenter{$dckey}{request}} <= 0 &&
scalar keys %{$datacenter{$dckey}{metarequest}} <= 0) {
if ( $verbose ) {
print STDERR "No requests for data center $dckey\n";
}
next;
}
# Fetch time series data if output file specified
if ( $outfile ) {
if ( ! exists $datacenter{$dckey}{timeseriesws} ) {
print STDERR "Cannot fetch time series, no fdsnws-dataselect service available for data center $dckey\n";
}
else {
# Determine output file mode (overwrite or append) and add data center prefix if needed
my $outfilemode = ( defined $outfileapp ) ? ">>" : ">";
my $outfilename = $outfile;
if ( ! defined $outfileapp && $dckey ) {
# Add data center identifier, $dckey, to the beginning of the file name
my ($volume,$directories,$file) = File::Spec->splitpath ($outfilename);
if ( $directories ) {
$outfilename = File::Spec->catfile ($directories, "$dckey-$file");
}
else {
$outfilename = "$dckey-$file";
}
}
&FetchTimeSeriesData( $dckey, $outfilename, $outfilemode ) if ( $outfile );
}
}
# Collect SAC P&Zs if output directory specified
if ( $sacpzdir ) {
if ( ! exists $datacenter{$dckey}{sacpzws} ) {
print STDERR "Cannot fetch SAC PZs, no SACPZ service available for data center $dckey\n";
}
else {
my $dcsacpzdir = ( $dckey ) ? File::Spec->catdir ($sacpzdir,$dckey) : $sacpzdir;
if ( ! -d "$dcsacpzdir" ) {
mkdir ($dcsacpzdir, 0755) || die "Cannot create directory $dcsacpzdir: $!\n";
}
&FetchSACPZ( $dckey, $dcsacpzdir );
}
}
# Collect RESP if output directory specified
if ( $respdir ) {
if ( ! exists $datacenter{$dckey}{respws} ) {
print STDERR "Cannot fetch RESP, no RESP service available for data center $dckey\n";
}
else {
my $dcrespdir = ( $dckey ) ? File::Spec->catdir ($respdir,$dckey) : $respdir;
if ( ! -d "$dcrespdir" ) {
mkdir ($dcrespdir, 0755) || die "Cannot create directory $dcrespdir: $!\n";
}
&FetchRESP( $dckey, $dcrespdir );
}
}
# Collect StationXML
if ( $sxmlfile ) {
if ( ! exists $datacenter{$dckey}{metadataws} ) {
print STDERR "Cannot fetch StationXML, no metadata service available for data center $dckey\n";
}
else {
my $dcsxmlfile = $sxmlfile;
if ( $dckey ) {
# Add data center identifier, $dckey, to the beginning of the file name
my ($volume,$directories,$file) = File::Spec->splitpath ($sxmlfile);
if ( $directories ) {
$dcsxmlfile = File::Spec->catfile ($directories, "$dckey-$file");
}
else {
$dcsxmlfile = "$dckey-$file";
}
}
&FetchStationXML( $dckey, $dcsxmlfile );
}
}
# Write metadata to file
if ( $metafile && exists $datacenter{$dckey}{metadata} ) {
if ( scalar @{$datacenter{$dckey}{metadata}} <= 0 ) {
printf STDERR "No metdata available\n";
}
else {
# Open metadata file, appending if requested, adding data center prefix if needed
my $mode = ( defined $metafileapp ) ? ">>" : ">";
my $metafilename = $metafile;
if ( ! defined $metafileapp && $dckey ) {
# Add data center identifier, $dckey, to the beginning of the file name
my ($volume,$directories,$file) = File::Spec->splitpath ($metafilename);
if ( $directories ) {
$metafilename = File::Spec->catfile ($directories, "$dckey-$file");
}
else {
$metafilename = "$dckey-$file";
}
}
open (META, $mode, $metafilename) || die "Cannot open metadata file '$metafilename': $!\n";
printf STDERR "Writing metadata (%d channel epochs) to file: %s\n",
scalar @{$datacenter{$dckey}{metadata}}, $metafilename if ( $verbose >= 0 );
# Print data center identifier
printf META "#$dckey: %s\n", $datacenter{$dckey}{website} if ( $dckey );
# Print header line
print META "#net|sta|loc|chan|lat|lon|elev|depth|azimuth|dip|instrument|scale|scalefreq|scaleunits|samplerate|start|end\n";
foreach my $channel ( sort @{$datacenter{$dckey}{metadata}} ) {
my ($net,$sta,$loc,$chan,$start,$end,$lat,$lon,$elev,$depth,$azimuth,$dip,$instrument,$samplerate,$sens,$sensfreq,$sensunit) =
split (/\|/, $channel);
$sensfreq = sprintf ("%0g", $sensfreq);
$samplerate = sprintf ("%0g", $samplerate);
print META "$net|$sta|$loc|$chan|$lat|$lon|$elev|$depth|$azimuth|$dip|$instrument|$sens|$sensfreq|$sensunit|$samplerate|$start|$end\n";
}
close META;
}
}
} # Done looping through data centers
my ($sec,$min,$hour,$mday,$mon,$year,$wday,$yday,$isdst) = localtime(time);
printf (STDERR "DONE at %4d-%02d-%02d %02d:%02d:%02d\n",
$year+1900, $mon+1, $mday, $hour, $min, $sec) if ( $verbose >= 0 );
exit $exitvalue;
## End of main
######################################################################
# ReadSelectFile:
#
# Read selection list file and add entries to the @selections array.
#
# Selection lines are expected to be in the following form:
#
# "Net Sta Loc Chan Start End"
#
# The Net, Sta, Loc and Channel fields are required and can be
# specified as wildcards.
######################################################################
sub ReadSelectFile {
my $selectfile = shift;
open (SF, "<$selectfile") || die "Cannot open '$selectfile': $!\n";
foreach my $line ( <SF> ) {
chomp $line;
next if ( $line =~ /^\#/ ); # Skip comment lines
my ($net,$sta,$loc,$chan,$start,$end) = split (' ', $line);
next if ( ! defined $chan );
# Normalize time strings
if ( $start ) {
my ($year,$month,$mday,$hour,$min,$sec,$subsec) = split (/[-:,.\s\/T]/, $start);
$start = sprintf ("%04d-%02d-%02dT%02d:%02d:%02d", $year, $month, $mday, $hour, $min, $sec);
$start .= ".$subsec" if ( $subsec );
}
if ( $end ) {
my ($year,$month,$mday,$hour,$min,$sec,$subsec) = split (/[-:,.\s\/T]/, $end);
$end = sprintf ("%04d-%02d-%02dT%02d:%02d:%02d", $year, $month, $mday, $hour, $min, $sec);
$end .= ".$subsec" if ( $subsec );
}
# Add selection to global list
push (@selections,"$net|$sta|$loc|$chan|$start|$end");
}
close SF;
} # End of ReadSelectFile()
######################################################################
# ReadBFastFile:
#
# Read BREQ_FAST file and add entries to the @selections array.
#
######################################################################
sub ReadBFastFile {
my $bfastfile = shift;
open (BF, "<$bfastfile") || die "Cannot open '$bfastfile': $!\n";
my $linecount = 0;
BFLINE: foreach my $line ( <BF> ) {
chomp $line;
$linecount++;
next if ( ! $line ); # Skip empty lines
# Capture .QUALTIY header
if ( $line =~ /^\.QUALITY .*$/ ) {
($qual) = $line =~ /^\.QUALITY ([DRQMBE])/;
next;