From b0498663e78d7eac24cafbf6f67eb0b108c3dc06 Mon Sep 17 00:00:00 2001 From: Haichen Shen Date: Sun, 16 Feb 2020 05:52:21 +0000 Subject: [PATCH 1/3] fix autotvm tutorial --- tutorials/autotvm/tune_conv2d_cuda.py | 10 +++++++--- tutorials/autotvm/tune_simple_template.py | 9 +++++++-- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/tutorials/autotvm/tune_conv2d_cuda.py b/tutorials/autotvm/tune_conv2d_cuda.py index 09b56045edaf..94ca92e4bc20 100644 --- a/tutorials/autotvm/tune_conv2d_cuda.py +++ b/tutorials/autotvm/tune_conv2d_cuda.py @@ -195,23 +195,27 @@ def conv2d_no_batching(N, H, W, CO, CI, KH, KW, stride, padding): # Begin tuning, log records to file `conv2d.log` # During tuning we will also try many invalid configs, so you are expected to # see many error reports. As long as you can see non-zero GFLOPS, it is okay. +logdir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../docs/tutorials/autotvm") +if not os.path.isdir(logdir): + os.makedirs(logdir) +logfile = os.path.join(logdir, "conv2d.log") tuner = autotvm.tuner.XGBTuner(task) tuner.tune(n_trial=20, measure_option=measure_option, - callbacks=[autotvm.callback.log_to_file('conv2d.log')]) + callbacks=[autotvm.callback.log_to_file(logfile)]) ######################################################################### # Finally we can inspect the best config from log file, check correctness, # and measure running time. # inspect the best config -dispatch_context = autotvm.apply_history_best("conv2d.log") +dispatch_context = autotvm.apply_history_best(logfile) best_config = dispatch_context.query(task.target, task.workload) print("\nBest config:") print(best_config) # apply history best from log file -with autotvm.apply_history_best('conv2d.log'): +with autotvm.apply_history_best(logfile): with tvm.target.create("cuda"): s, arg_bufs = conv2d_no_batching(N, H, W, CO, CI, KH, KW, strides, padding) func = tvm.build(s, arg_bufs) diff --git a/tutorials/autotvm/tune_simple_template.py b/tutorials/autotvm/tune_simple_template.py index b6ad7e94f883..b6e5d5b00277 100644 --- a/tutorials/autotvm/tune_simple_template.py +++ b/tutorials/autotvm/tune_simple_template.py @@ -52,6 +52,7 @@ import logging import sys +import os import numpy as np import tvm @@ -297,10 +298,14 @@ def matmul(N, L, M, dtype): # Begin tuning with RandomTuner, log records to file `matmul.log` # You can use alternatives like XGBTuner. +logdir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../docs/tutorials/autotvm") +if not os.path.isdir(logdir): + os.makedirs(logdir) +logfile = os.path.join(logdir, "matmul.log") tuner = autotvm.tuner.RandomTuner(task) tuner.tune(n_trial=10, measure_option=measure_option, - callbacks=[autotvm.callback.log_to_file('matmul.log')]) + callbacks=[autotvm.callback.log_to_file(logfile)]) ######################################################################### # Finally we apply history best from the cache file and check its correctness. @@ -310,7 +315,7 @@ def matmul(N, L, M, dtype): # with the same argument. # apply history best from log file -with autotvm.apply_history_best('matmul.log'): +with autotvm.apply_history_best(logfile): with tvm.target.create("llvm"): s, arg_bufs = matmul(N, L, M, 'float32') func = tvm.build(s, arg_bufs) From 0b8c78aba97332a132b59143e976248f49089a3c Mon Sep 17 00:00:00 2001 From: Haichen Shen Date: Sun, 16 Feb 2020 05:53:56 +0000 Subject: [PATCH 2/3] x --- tutorials/autotvm/tune_conv2d_cuda.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tutorials/autotvm/tune_conv2d_cuda.py b/tutorials/autotvm/tune_conv2d_cuda.py index 94ca92e4bc20..b1f7222d4b3a 100644 --- a/tutorials/autotvm/tune_conv2d_cuda.py +++ b/tutorials/autotvm/tune_conv2d_cuda.py @@ -45,6 +45,7 @@ # Now return to python code. Import packages. import logging +import os import sys import numpy as np From 5e6e3fddd4b096c39f610cf75d59f4bd6dc824ad Mon Sep 17 00:00:00 2001 From: Haichen Shen Date: Sun, 16 Feb 2020 18:33:46 +0000 Subject: [PATCH 3/3] fix for sphinx --- tutorials/autotvm/tune_conv2d_cuda.py | 7 ++++++- tutorials/autotvm/tune_simple_template.py | 7 ++++++- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/tutorials/autotvm/tune_conv2d_cuda.py b/tutorials/autotvm/tune_conv2d_cuda.py index b1f7222d4b3a..ca2a93c34f18 100644 --- a/tutorials/autotvm/tune_conv2d_cuda.py +++ b/tutorials/autotvm/tune_conv2d_cuda.py @@ -196,7 +196,12 @@ def conv2d_no_batching(N, H, W, CO, CI, KH, KW, stride, padding): # Begin tuning, log records to file `conv2d.log` # During tuning we will also try many invalid configs, so you are expected to # see many error reports. As long as you can see non-zero GFLOPS, it is okay. -logdir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../docs/tutorials/autotvm") +try: + curdir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../") +except NameError: + # in sphinx + curdir = os.path.join(os.getcwd(), "../../") +logdir = os.path.join(curdir, "docs/tutorials/autotvm") if not os.path.isdir(logdir): os.makedirs(logdir) logfile = os.path.join(logdir, "conv2d.log") diff --git a/tutorials/autotvm/tune_simple_template.py b/tutorials/autotvm/tune_simple_template.py index b6e5d5b00277..ccc5d6ff0575 100644 --- a/tutorials/autotvm/tune_simple_template.py +++ b/tutorials/autotvm/tune_simple_template.py @@ -298,7 +298,12 @@ def matmul(N, L, M, dtype): # Begin tuning with RandomTuner, log records to file `matmul.log` # You can use alternatives like XGBTuner. -logdir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../docs/tutorials/autotvm") +try: + curdir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "../../") +except NameError: + # in sphinx + curdir = os.path.join(os.getcwd(), "../../") +logdir = os.path.join(curdir, "docs/tutorials/autotvm") if not os.path.isdir(logdir): os.makedirs(logdir) logfile = os.path.join(logdir, "matmul.log")