-
Notifications
You must be signed in to change notification settings - Fork 36
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
Showing
4 changed files
with
265 additions
and
34 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,83 @@ | ||
# %% | ||
# from args import parse_args | ||
## | ||
import argparse | ||
import json | ||
import os | ||
|
||
import numpy as np | ||
import pandas as pd | ||
|
||
|
||
def parse_args(): | ||
parser = argparse.ArgumentParser(description="Run Gamma on NCEDC/SCEDC data") | ||
parser.add_argument("--num_nodes", type=int, default=1) | ||
parser.add_argument("--node_rank", type=int, default=0) | ||
parser.add_argument("--year", type=int, default=2023) | ||
parser.add_argument("--root_path", type=str, default="local") | ||
parser.add_argument("--region", type=str, default="Cal") | ||
parser.add_argument("--bucket", type=str, default="quakeflow_catalog") | ||
return parser.parse_args() | ||
|
||
|
||
# %% | ||
args = parse_args() | ||
root_path = args.root_path | ||
region = args.region | ||
|
||
with open(f"{root_path}/{region}/config.json", "r") as fp: | ||
config = json.load(fp) | ||
|
||
# %% | ||
data_path = f"{region}/cctorch" | ||
result_path = f"{region}/hypodd" | ||
if not os.path.exists(f"{root_path}/{result_path}"): | ||
os.makedirs(f"{root_path}/{result_path}") | ||
|
||
# %% | ||
stations = pd.read_csv(f"{root_path}/{data_path}/cctorch_stations.csv") | ||
|
||
station_lines = {} | ||
for i, row in stations.iterrows(): | ||
station_id = row["station_id"] | ||
network_code, station_code, comp_code, channel_code = station_id.split(".") | ||
# tmp_code = f"{station_code}{channel_code}" | ||
tmp_code = f"{station_code}" | ||
station_lines[tmp_code] = f"{tmp_code:<8s} {row['latitude']:.3f} {row['longitude']:.3f}\n" | ||
|
||
|
||
with open(f"{root_path}/{result_path}/stations.dat", "w") as f: | ||
for line in sorted(station_lines.values()): | ||
f.write(line) | ||
|
||
# %% | ||
events = pd.read_csv(f"{root_path}/{data_path}/cctorch_events.csv") | ||
events["time"] = pd.to_datetime(events["event_time"], format="mixed") | ||
|
||
event_lines = [] | ||
|
||
for i, row in events.iterrows(): | ||
event_index = row["event_index"] | ||
origin = row["time"] | ||
magnitude = row["magnitude"] | ||
x_err = 0.0 | ||
z_err = 0.0 | ||
time_err = 0.0 | ||
dx, dy, dz = 0.0, 0.0, 0.0 | ||
# dx = np.random.uniform(-0.01, 0.01) | ||
# dy = np.random.uniform(-0.01, 0.01) | ||
# dz = np.random.uniform(0, 10) | ||
# dz = 0 | ||
event_lines.append( | ||
f"{origin.year:4d}{origin.month:02d}{origin.day:02d} " | ||
f"{origin.hour:2d}{origin.minute:02d}{origin.second:02d}{round(origin.microsecond / 1e4):02d} " | ||
# f"{row['latitude']:8.4f} {row['longitude']:9.4f} {row['depth_km']:8.4f} " | ||
f"{row['latitude'] + dy:8.4f} {row['longitude']+ dx:9.4f} {row['depth_km']+dz:8.4f} " | ||
f"{magnitude:5.2f} {x_err:5.2f} {z_err:5.2f} {time_err:5.2f} {event_index:9d}\n" | ||
) | ||
|
||
with open(f"{root_path}/{result_path}/events.dat", "w") as f: | ||
f.writelines(event_lines) | ||
|
||
# %% | ||
os.system(f"bash run_hypodd_cc.sh {root_path} {region}") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,107 @@ | ||
#!/bin/bash | ||
set -x | ||
WORKING_DIR=$PWD | ||
if [ $# -eq 2 ]; then | ||
root_path=$1 | ||
region=$2 | ||
else | ||
root_path="local" | ||
region="demo" | ||
fi | ||
|
||
if [ ! -d "$root_path/$region/hypodd" ]; then | ||
mkdir -p $root_path/$region/hypodd | ||
fi | ||
|
||
cp $root_path/$region/cctorch/dt.cc $root_path/$region/hypodd/dt.cc | ||
cd $root_path/$region/hypodd | ||
|
||
if [ ! -d "HypoDD" ]; then | ||
git clone [email protected]:zhuwq0/HypoDD.git | ||
export PATH=$PATH:$PWD/HypoDD | ||
make -C HypoDD/src/ | ||
fi | ||
|
||
cat <<EOF > cc.inp | ||
* RELOC.INP: | ||
*--- input file selection | ||
* cross correlation diff times: | ||
dt.cc | ||
* | ||
*catalog P diff times: | ||
* | ||
* event file: | ||
events.dat | ||
* | ||
* station file: | ||
stations.dat | ||
* | ||
*--- output file selection | ||
* original locations: | ||
hypodd_cc.loc | ||
* relocations: | ||
hypodd_cc.reloc | ||
* station information: | ||
hypodd.sta | ||
* residual information: | ||
hypodd.res | ||
* source paramater information: | ||
hypodd.src | ||
* | ||
*--- data type selection: | ||
* IDAT: 0 = synthetics; 1= cross corr; 2= catalog; 3= cross & cat | ||
* IPHA: 1= P; 2= S; 3= P&S | ||
* DIST:max dist [km] between cluster centroid and station | ||
* IDAT IPHA DIST | ||
1 3 120 | ||
* | ||
*--- event clustering: | ||
* OBSCC: min # of obs/pair for crosstime data (0= no clustering) | ||
* OBSCT: min # of obs/pair for network data (0= no clustering) | ||
* OBSCC OBSCT | ||
0 0 | ||
* | ||
*--- solution control: | ||
* ISTART: 1 = from single source; 2 = from network sources | ||
* ISOLV: 1 = SVD, 2=lsqr | ||
* NSET: number of sets of iteration with specifications following | ||
* ISTART ISOLV NSET | ||
2 2 4 | ||
* | ||
*--- data weighting and re-weighting: | ||
* NITER: last iteration to used the following weights | ||
* WTCCP, WTCCS: weight cross P, S | ||
* WTCTP, WTCTS: weight catalog P, S | ||
* WRCC, WRCT: residual threshold in sec for cross, catalog data | ||
* WDCC, WDCT: max dist [km] between cross, catalog linked pairs | ||
* DAMP: damping (for lsqr only) | ||
* --- CROSS DATA ----- ----CATALOG DATA ---- | ||
* NITER WTCCP WTCCS WRCC WDCC WTCTP WTCTS WRCT WDCT DAMP | ||
4 1 1 -9 -9 -9 -9 -9 -9 70 | ||
4 1 1 6 -9 -9 -9 -9 -9 70 | ||
4 1 0.8 3 4 -9 -9 -9 -9 70 | ||
4 1 0.8 2 2 -9 -9 -9 -9 70 | ||
* | ||
*--- 1D model: | ||
* NLAY: number of model layers | ||
* RATIO: vp/vs ratio | ||
* TOP: depths of top of layer (km) | ||
* VEL: layer velocities (km/s) | ||
* NLAY RATIO | ||
12 1.73 | ||
* TOP | ||
0.0 1.0 3.0 5.0 7.0 9.0 11.0 13.0 17.0 21.0 31.00 31.10 | ||
* VEL | ||
5.30 5.65 5.93 6.20 6.20 6.20 6.20 6.20 6.20 6.20 7.50 8.11 | ||
* | ||
*--- event selection: | ||
* CID: cluster to be relocated (0 = all) | ||
* ID: cuspids of event to be relocated (8 per line) | ||
* CID | ||
0 | ||
* ID | ||
EOF | ||
|
||
./HypoDD/src/hypoDD/hypoDD cc.inp | ||
cd $WORKING_DIR |