diff --git a/tutorials/autotvm/tune_conv2d_cuda.py b/tutorials/autotvm/tune_conv2d_cuda.py index 09b56045edaf..ca2a93c34f18 100644 --- a/tutorials/autotvm/tune_conv2d_cuda.py +++ b/tutorials/autotvm/tune_conv2d_cuda.py @@ -45,6 +45,7 @@ # Now return to python code. Import packages. import logging +import os import sys import numpy as np @@ -195,23 +196,32 @@ def conv2d_no_batching(N, H, W, CO, CI, KH, KW, stride, padding): # Begin tuning, log records to file `conv2d.log` # During tuning we will also try many invalid configs, so you are expected to # see many error reports. As long as you can see non-zero GFLOPS, it is okay. +try: + curdir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../") +except NameError: + # in sphinx + curdir = os.path.join(os.getcwd(), "../../") +logdir = os.path.join(curdir, "docs/tutorials/autotvm") +if not os.path.isdir(logdir): + os.makedirs(logdir) +logfile = os.path.join(logdir, "conv2d.log") tuner = autotvm.tuner.XGBTuner(task) tuner.tune(n_trial=20, measure_option=measure_option, - callbacks=[autotvm.callback.log_to_file('conv2d.log')]) + callbacks=[autotvm.callback.log_to_file(logfile)]) ######################################################################### # Finally we can inspect the best config from log file, check correctness, # and measure running time. # inspect the best config -dispatch_context = autotvm.apply_history_best("conv2d.log") +dispatch_context = autotvm.apply_history_best(logfile) best_config = dispatch_context.query(task.target, task.workload) print("\nBest config:") print(best_config) # apply history best from log file -with autotvm.apply_history_best('conv2d.log'): +with autotvm.apply_history_best(logfile): with tvm.target.create("cuda"): s, arg_bufs = conv2d_no_batching(N, H, W, CO, CI, KH, KW, strides, padding) func = tvm.build(s, arg_bufs) diff --git a/tutorials/autotvm/tune_simple_template.py b/tutorials/autotvm/tune_simple_template.py index b6ad7e94f883..ccc5d6ff0575 100644 --- a/tutorials/autotvm/tune_simple_template.py +++ b/tutorials/autotvm/tune_simple_template.py @@ -52,6 +52,7 @@ import logging import sys +import os import numpy as np import tvm @@ -297,10 +298,19 @@ def matmul(N, L, M, dtype): # Begin tuning with RandomTuner, log records to file `matmul.log` # You can use alternatives like XGBTuner. +try: + curdir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../") +except NameError: + # in sphinx + curdir = os.path.join(os.getcwd(), "../../") +logdir = os.path.join(curdir, "docs/tutorials/autotvm") +if not os.path.isdir(logdir): + os.makedirs(logdir) +logfile = os.path.join(logdir, "matmul.log") tuner = autotvm.tuner.RandomTuner(task) tuner.tune(n_trial=10, measure_option=measure_option, - callbacks=[autotvm.callback.log_to_file('matmul.log')]) + callbacks=[autotvm.callback.log_to_file(logfile)]) ######################################################################### # Finally we apply history best from the cache file and check its correctness. @@ -310,7 +320,7 @@ def matmul(N, L, M, dtype): # with the same argument. # apply history best from log file -with autotvm.apply_history_best('matmul.log'): +with autotvm.apply_history_best(logfile): with tvm.target.create("llvm"): s, arg_bufs = matmul(N, L, M, 'float32') func = tvm.build(s, arg_bufs)