forked from cmu-soda/tla-robustness-src
-
Notifications
You must be signed in to change notification settings - Fork 1
/
recomp-verify.py
191 lines (162 loc) · 6.59 KB
/
recomp-verify.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
import concurrent.futures
import glob
import os
import signal
import shutil
import subprocess
import sys
from functools import partial
root_dir = os.path.dirname(os.path.abspath(__file__))
tool = root_dir + "/bin/recomp-verify.jar"
tlc = root_dir + "/bin/tla2tools.jar"
def write(name, contents):
f = open(name, "w")
f.write(contents)
f.close()
def decomp(spec, cfg):
cmd_args = ["java", "-jar", tool, spec, cfg, "--decomp"]
ret = subprocess.run(cmd_args, capture_output=True, text=True)
return ret.stdout.rstrip().split(",")
def create_err_trace(txt):
lines = txt.split("\n")
keep = []
capture = False
for l in lines:
if ("Error:" in l):
capture = True
if ("distinct states found" in l):
capture = False
if (capture and "errCounter" not in l):
keep.append(l)
return "\n".join(keep)
def verify(spec, cfg, cust, naive, verbose):
# run model checking alg
# use subprocess.call to send the output to stdout
cmd_args = ["java", "-Xmx25g", "-jar", tool, spec, cfg]
if cust:
cmd_args.append("--cust")
cmd_args.append("custom_recomp.csv")
if naive:
cmd_args.append("--naive")
if verbose:
cmd_args.append("--verbose")
retcode = subprocess.call(cmd_args)
if (retcode == 99):
replay_args = ["java", "-jar", tlc, "-deadlock", "ErrTrace.tla"]
replay = subprocess.run(replay_args, capture_output=True, text=True)
replay_out = replay.stdout
if ("Error:" in replay_out):
err_trace = create_err_trace(replay_out)
print("Here is the error trace:\n")
print(err_trace)
else:
print("Could not confirm the violating trace in the TLA+ spec; this is a bug in the tool.")
def verify_single_process(spec, cfg, cust, naive, verbose):
orig_dir = os.getcwd()
os.makedirs("out", exist_ok=True)
dest_dir = orig_dir + "/out"
#shutil.copyfile(spec, "out/"+spec)
#shutil.copyfile(cfg, "out/"+cfg)
for filename in glob.glob(os.path.join(orig_dir, '*.*')):
shutil.copy(filename, dest_dir)
os.chdir("out")
verify(spec, cfg, cust, naive, verbose)
os.chdir(orig_dir)
def verify_capture_output(spec, cfg, pdir, *args):
shutil.copy(spec, pdir+"/")
shutil.copy(cfg, pdir+"/")
#os.chdir(pdir)
#write("out.log","")
cmd_args = ["java", "-Xmx7g", "-jar", tool, spec, cfg]
for arg in args:
cmd_args.append(arg)
# uses TLC to run the one-component case
if pdir == "mono":
cmd_args = ["java", "-Xmx7g", "-jar", tlc, "-deadlock", "-workers", "1", "-config", cfg, spec]
#naive_log = open("out.log","w")
#process = subprocess.Popen(cmd_args, stdout=naive_log.fileno())
#process.wait()
#return open("naive/out.log").read().rstrip()
#ret = subprocess.run(cmd_args, capture_output=True, text=True)
cd_args = ["cd",pdir]
cmd = " ".join(cd_args) + "; " + " ".join(cmd_args)
ret = subprocess.run(cmd, capture_output=True, text=True, shell=True)
return ret.stdout.rstrip()
def run_multi_verif(dest_dir, spec, cfg):
executor = concurrent.futures.ThreadPoolExecutor(max_workers=8)
futures = []
# spawn a process for the naive map
f = partial(verify_capture_output, spec, cfg, "naive", "--naive")
futures.append( executor.submit(f) )
# spawn a process for each recomp map
pdirs = ["mono", "cust_1", "cust_2"]
for pdir in pdirs:
f = partial(verify_capture_output, spec, cfg, pdir, "--cust", "custom_recomp.csv")
futures.append( executor.submit(f) )
done, not_done = concurrent.futures.wait(futures, return_when=concurrent.futures.FIRST_COMPLETED)
winner = done.pop()
output = winner.result()
#for future in not_done:
#future.cancel()
return output
def verify_multi_process(spec, cfg, verbose):
orig_dir = os.getcwd()
os.makedirs("out", exist_ok=True)
dest_dir = orig_dir + "/out"
shutil.copy(spec, dest_dir)
shutil.copy(cfg, dest_dir)
os.chdir("out")
os.makedirs("decomp", exist_ok=True)
shutil.copy(spec, "decomp/")
shutil.copy(cfg, "decomp/")
os.chdir("decomp")
components = decomp(spec, cfg)
os.chdir(dest_dir)
# if there's only one component then there is no need to run multiple processes
if len(components) <= 1:
#os.chdir(orig_dir)
#verify_single_process(spec, cfg, False, False, verbose)
os.makedirs("mono", exist_ok=True)
output = verify_capture_output(spec, cfg, "mono")
print(output)
print(".")
return
# otherwise, build three recomp maps
os.makedirs("mono", exist_ok=True)
os.makedirs("cust_1", exist_ok=True)
os.makedirs("cust_2", exist_ok=True)
mono = ",".join(components) + "\n"
recomp_1 = components[0] + "\n" + ",".join(components[1:]) + "\n"
recomp_2 = ",".join(components[0:-1]) + "\n" + components[-1] + "\n"
write("mono/custom_recomp.csv", mono)
write("cust_1/custom_recomp.csv", recomp_1)
write("cust_2/custom_recomp.csv", recomp_2)
# also run the naive mapping
os.makedirs("naive", exist_ok=True)
output = run_multi_verif(dest_dir, spec, cfg)
print(output)
print(".")
def run():
if (len(sys.argv) < 3 or len(sys.argv) > 5):
print("usage: recomp-verify.py <file.tla> <file.cfg> [--cust|--naive|--parallel] [--verbose]")
return
if (len(sys.argv) > 3 and sys.argv[3] != "--parallel"):
if (len(sys.argv) == 4 and sys.argv[3] != "--cust" and sys.argv[3] != "--naive" and sys.argv[3] != "--verbose"):
print("usage: recomp-verify.py <file.tla> <file.cfg> [--cust|--naive|--parallel] [--verbose]")
return
if (len(sys.argv) == 5 and sys.argv[4] != "--cust" and sys.argv[4] != "--naive" and sys.argv[4] != "--verbose"):
print("usage: recomp-verify.py <file.tla> <file.cfg> [--cust|--naive|--parallel] [--verbose]")
return
spec = sys.argv[1]
cfg = sys.argv[2]
multi_process = len(sys.argv) >= 4 and sys.argv[3] == "--parallel"
cust = (len(sys.argv) >= 4 and sys.argv[3] == "--cust") or (len(sys.argv) >= 5 and sys.argv[4] == "--cust")
naive = (len(sys.argv) >= 4 and sys.argv[3] == "--naive") or (len(sys.argv) >= 5 and sys.argv[4] == "--naive")
verbose = (len(sys.argv) >= 4 and sys.argv[3] == "--verbose") or (len(sys.argv) >= 5 and sys.argv[4] == "--verbose")
if multi_process:
verify_multi_process(spec, cfg, verbose)
else:
verify_single_process(spec, cfg, cust, naive, verbose)
run()
# force exit. this will leave zombie processes running for a short period of time, which is not ideal
os._exit(0)