Skip to content

Commit

Permalink
[Ansor][AutoTVM v2.0] Part 1: Rename namspace form auto_schedule to a…
Browse files Browse the repository at this point in the history
…uto_scheduler (apache#6059)

* Rename namespace auto_schedule to auto_scheduler

* Update

* Lint fix
  • Loading branch information
jcf94 authored and Trevor Morris committed Aug 26, 2020
1 parent 0ef4a35 commit 3176a71
Show file tree
Hide file tree
Showing 35 changed files with 269 additions and 265 deletions.
2 changes: 1 addition & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -200,7 +200,7 @@ assign_source_group("Include" ${GROUP_INCLUDE})

# Source file lists
file(GLOB_RECURSE COMPILER_SRCS
src/auto_schedule/*.cc
src/auto_scheduler/*.cc
src/node/*.cc
src/ir/*.cc
src/arith/*.cc
Expand Down
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@
# specific language governing permissions and limitations
# under the License.

""" Register FFI APIs from C++ for the namespace tvm.auto_schedule. """
""" Register FFI APIs from C++ for the namespace tvm.auto_scheduler. """
import tvm._ffi


tvm._ffi._init_api("auto_schedule", __name__)
tvm._ffi._init_api("auto_scheduler", __name__)
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@
from . import _ffi_api


@tvm._ffi.register_object("auto_schedule.HardwareParams")
@tvm._ffi.register_object("auto_scheduler.HardwareParams")
class HardwareParams(Object):
""" The parameters of target hardware used to guide the search policy
Expand All @@ -55,7 +55,7 @@ def __init__(self, num_cores, vector_unit_bytes, cache_line_bytes):
vector_unit_bytes, cache_line_bytes)


@tvm._ffi.register_object("auto_schedule.SearchTask")
@tvm._ffi.register_object("auto_scheduler.SearchTask")
class SearchTask(Object):
""" The computation information and hardware parameters for a specific schedule search task.
Expand All @@ -79,12 +79,12 @@ def __init__(self, dag, workload_key, target, target_host=None,
hardware_params)


@tvm._ffi.register_object("auto_schedule.SearchPolicy")
@tvm._ffi.register_object("auto_scheduler.SearchPolicy")
class SearchPolicy(Object):
""" The base class of search policies. """


@tvm._ffi.register_object("auto_schedule.EmptyPolicy")
@tvm._ffi.register_object("auto_scheduler.EmptyPolicy")
class EmptyPolicy(SearchPolicy):
""" This is an example empty search policy which will always generate
the init state of ComputeDAG.
Expand All @@ -93,7 +93,7 @@ def __init__(self):
self.__init_handle_by_constructor__(_ffi_api.EmptyPolicy)


@tvm._ffi.register_object("auto_schedule.TuningOptions")
@tvm._ffi.register_object("auto_scheduler.TuningOptions")
class TuningOptions(Object):
""" This controls the options of performance tuning.
Expand All @@ -120,12 +120,12 @@ class TuningOptions(Object):
measure_callbacks: Optional[List[MeasureCallback]]
Callback functions called after each measurement.
Candidates:
- auto_schedule.RecordToFile
- auto_scheduler.RecordToFile
pre_search_callbacks: Optional[List[SearchCallback]]
Callback functions called before the search process.
Candidates:
- auto_schedule.PreloadMeasuredStates
- auto_schedule.PreloadCustomSketchRule
- auto_scheduler.PreloadMeasuredStates
- auto_scheduler.PreloadCustomSketchRule
TODO(jcf94): Add these implementation in later PRs.
"""
def __init__(self, num_measure_trials=0, early_stopping=None, num_measures_per_round=64,
Expand All @@ -136,7 +136,7 @@ def __init__(self, num_measure_trials=0, early_stopping=None, num_measures_per_r
builder = LocalBuilder()
else:
raise ValueError("Invalid builder: " + builder)
elif not isinstance(builder, tvm.auto_schedule.measure.ProgramBuilder):
elif not isinstance(builder, tvm.auto_scheduler.measure.ProgramBuilder):
raise ValueError("Invalid builder: " + builder +
" . TuningOptions expects a ProgramBuilder or string.")

Expand All @@ -145,7 +145,7 @@ def __init__(self, num_measure_trials=0, early_stopping=None, num_measures_per_r
runner = LocalRunner()
else:
raise ValueError("Invalid runner: " + runner)
elif not isinstance(runner, tvm.auto_schedule.measure.ProgramRunner):
elif not isinstance(runner, tvm.auto_scheduler.measure.ProgramRunner):
raise ValueError("Invalid runner: " + runner +
" . TuningOptions expects a ProgramRunner or string.")

Expand Down Expand Up @@ -176,7 +176,7 @@ def auto_schedule(task, search_policy='default', tuning_options=None):
"""
if not isinstance(task, SearchTask):
raise ValueError("Invalid task: " + task +
" . `auto_schedule.auto_schedule` expects a SearchTask.")
" . `auto_scheduler.auto_schedule` expects a SearchTask.")

if isinstance(search_policy, str):
if search_policy == 'default':
Expand All @@ -187,7 +187,7 @@ def auto_schedule(task, search_policy='default', tuning_options=None):
raise ValueError("Invalid search policy: " + search_policy)
elif not isinstance(search_policy, SearchPolicy):
raise ValueError("Invalid search policy: " + search_policy +
" . `auto_schedule.auto_schedule` expects a SearchPolicy or a string.")
" . `auto_scheduler.auto_schedule` expects a SearchPolicy or a string.")

sch, tensors = _ffi_api.AutoSchedule(task, search_policy,
tuning_options if tuning_options else TuningOptions())
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@
from . import _ffi_api


@tvm._ffi.register_object("auto_schedule.ComputeDAG")
@tvm._ffi.register_object("auto_scheduler.ComputeDAG")
class ComputeDAG(Object):
"""
The TVM Auto-scheduler computational graph and related program analyses.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -48,17 +48,17 @@
from . import _ffi_api


@tvm._ffi.register_object("auto_schedule.Iterator")
@tvm._ffi.register_object("auto_scheduler.Iterator")
class Iterator(Object):
""" A loop iterator structure. """


@tvm._ffi.register_object("auto_schedule.Stage")
@tvm._ffi.register_object("auto_scheduler.Stage")
class Stage(Object):
""" A stage in the compute declaration. Similar to tvm.te.schedule.Stage. """


@tvm._ffi.register_object("auto_schedule.State")
@tvm._ffi.register_object("auto_scheduler.State")
class StateObject(Object):
""" The internal State object """
def __eq__(self, other):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,12 +54,12 @@
# This can avoid expensive serialization of TVM IR when using multiprocessing.Pool
GLOBAL_BUILD_ARGUMENTS = None

@tvm._ffi.register_object("auto_schedule.MeasureCallback")
@tvm._ffi.register_object("auto_scheduler.MeasureCallback")
class MeasureCallback(Object):
""" The base class of measurement callback functions. """


@tvm._ffi.register_object("auto_schedule.MeasureInput")
@tvm._ffi.register_object("auto_scheduler.MeasureInput")
class MeasureInput(Object):
""" Store the input of a measurement.
Expand All @@ -74,7 +74,7 @@ def __init__(self, task, state):
self.__init_handle_by_constructor__(_ffi_api.MeasureInput, task, state.state_object)


@tvm._ffi.register_object("auto_schedule.BuildResult")
@tvm._ffi.register_object("auto_scheduler.BuildResult")
class BuildResult(Object):
""" Store the result of a build.
Expand All @@ -99,7 +99,7 @@ def __init__(self, filename, args, error_no, error_msg, time_cost):
_ffi_api.BuildResult, filename, args, error_no, error_msg, time_cost)


@tvm._ffi.register_object("auto_schedule.MeasureResult")
@tvm._ffi.register_object("auto_scheduler.MeasureResult")
class MeasureResult(Object):
""" Store the results of a measurement.
Expand All @@ -124,7 +124,7 @@ def __init__(self, costs, error_no, error_msg, all_cost, timestamp):
error_msg, all_cost, timestamp)


@tvm._ffi.register_object("auto_schedule.ProgramBuilder")
@tvm._ffi.register_object("auto_scheduler.ProgramBuilder")
class ProgramBuilder(Object):
""" The base class of ProgramBuilders. """

Expand All @@ -145,7 +145,7 @@ def build(self, measure_inputs, verbose=1):
return _ffi_api.ProgramBuilderBuild(self, measure_inputs, verbose)


@tvm._ffi.register_object("auto_schedule.ProgramRunner")
@tvm._ffi.register_object("auto_scheduler.ProgramRunner")
class ProgramRunner(Object):
""" The base class of ProgramRunners. """

Expand All @@ -168,7 +168,7 @@ def run(self, measure_inputs, build_results, verbose=1):
return _ffi_api.ProgramRunnerRun(self, measure_inputs, build_results, verbose)


@tvm._ffi.register_object("auto_schedule.LocalBuilder")
@tvm._ffi.register_object("auto_scheduler.LocalBuilder")
class LocalBuilder(ProgramBuilder):
""" LocalBuilder use local CPU cores to build programs in parallel.
Expand All @@ -191,7 +191,7 @@ def __init__(self,
_ffi_api.LocalBuilder, timeout, n_parallel, build_func)


@tvm._ffi.register_object("auto_schedule.LocalRunner")
@tvm._ffi.register_object("auto_scheduler.LocalRunner")
class LocalRunner(ProgramRunner):
""" LocalRunner that uses local CPU/GPU to measures the time cost of programs.
Expand Down Expand Up @@ -334,7 +334,7 @@ def timed_func():
return res


@tvm._ffi.register_func("auto_schedule.local_builder.build")
@tvm._ffi.register_func("auto_scheduler.local_builder.build")
def local_builder_build(inputs, timeout, n_parallel, build_func='default', verbose=1):
"""
Build function of LocalBuilder to build the MeasureInputs to runnable modules.
Expand Down Expand Up @@ -376,7 +376,7 @@ def local_builder_build(inputs, timeout, n_parallel, build_func='default', verbo

return results

@tvm._ffi.register_func("auto_schedule.local_runner.run")
@tvm._ffi.register_func("auto_scheduler.local_runner.run")
def local_run(inputs, build_results, timeout, number, repeat, min_repeat_ms, cooldown_interval,
verbose=1):
"""
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
from . import _ffi_api


@tvm._ffi.register_object("auto_schedule.RecordToFile")
@tvm._ffi.register_object("auto_scheduler.RecordToFile")
class RecordToFile(MeasureCallback):
"""
A measurement callback that writes measurement records into a file.
Expand All @@ -35,21 +35,21 @@ class RecordToFile(MeasureCallback):
filename : str
File name for this callback to write log to.
"""
def __init__(self, filename="auto_schedule_tuning.json"):
def __init__(self, filename="auto_scheduler_tuning.json"):
self.__init_handle_by_constructor__(_ffi_api.RecordToFile, filename)


@tvm._ffi.register_object("auto_schedule.RecordReader")
@tvm._ffi.register_object("auto_scheduler.RecordReader")
class RecordReader(Object):
"""
Reader of the json log file.
Parameters
----------
filename : str = "auto_schedule_tuning.json"
filename : str = "auto_scheduler_tuning.json"
File name for this reader to load log from.
"""
def __init__(self, filename="auto_schedule_tuning.json"):
def __init__(self, filename="auto_scheduler_tuning.json"):
self.__init_handle_by_constructor__(_ffi_api.RecordReader, filename)

def read_lines(self, max_lines=None, skip_lines=0):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
# specific language governing permissions and limitations
# under the License.

""" Common utilities for auto_schedule. """
""" Common utilities for auto_scheduler. """

from typing import Hashable
import multiprocessing
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ def register_workload(func_name, f=None, override=False):
Examples
--------
@auto_schedule.register_workload
@auto_scheduler.register_workload
def matmul(N, M, K):
A = te.placeholder((N, K), name='A')
B = te.placeholder((K, M), name='B')
Expand Down Expand Up @@ -110,7 +110,7 @@ def make_workload_key(func, args):

if not func_name in WORKLOAD_FUNC_REGISTRY:
raise ValueError("%s is not registered. " % func,
"Please register it with @auto_schedule.register_workload")
"Please register it with @auto_scheduler.register_workload")

args = serialize_args(args)

Expand All @@ -137,11 +137,11 @@ def decode_workload_key_to_func_args(workload_key):
workload = json.loads(workload_key)
if not workload[0] in WORKLOAD_FUNC_REGISTRY:
raise ValueError("%s is not registered. " % workload[0] +
"Please register it with @auto_schedule.register_workload")
"Please register it with @auto_scheduler.register_workload")
return workload[0], deserialize_args(workload[1:])


@tvm._ffi.register_func("auto_schedule.workload_key_to_tensors")
@tvm._ffi.register_func("auto_scheduler.workload_key_to_tensors")
def workload_key_to_tensors(workload_key):
""" Get the input/output tensors from the workload key.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,16 +18,18 @@
*/

/*!
* \file auto_schedule/auto_schedule.cc
* \brief The user interface of the TVM Auto-scheduler.
* \file auto_scheduler/auto_schedule.cc
* \brief The user interface of the TVM Auto-scheduler. This is the entry structure to get
* schedule search requirements from upper level (Python API), and returns a high performance
* schedule after search process.
*/

#include "auto_schedule.h"

#include <tvm/runtime/registry.h>

namespace tvm {
namespace auto_schedule {
namespace auto_scheduler {

TVM_REGISTER_NODE_TYPE(TuningOptionsNode);

Expand Down Expand Up @@ -61,7 +63,7 @@ std::pair<te::Schedule, Array<te::Tensor>> AutoSchedule(SearchTask task, SearchP
return task->compute_dag.ApplySteps(state->transform_steps);
}

TVM_REGISTER_GLOBAL("auto_schedule.TuningOptions")
TVM_REGISTER_GLOBAL("auto_scheduler.TuningOptions")
.set_body_typed([](int num_measure_trials, int early_stopping, int num_measures_per_round,
int verbose, ProgramBuilder builder, ProgramRunner runner,
Optional<Array<MeasureCallback>> measure_callbacks,
Expand All @@ -70,12 +72,12 @@ TVM_REGISTER_GLOBAL("auto_schedule.TuningOptions")
builder, runner, measure_callbacks, pre_search_callbacks);
});

TVM_REGISTER_GLOBAL("auto_schedule.AutoSchedule")
TVM_REGISTER_GLOBAL("auto_scheduler.AutoSchedule")
.set_body_typed([](SearchTask task, SearchPolicy search_policy, TuningOptions tuning_options) {
te::Schedule sch;
Array<te::Tensor> return_tensors;
std::tie(sch, return_tensors) = AutoSchedule(task, search_policy, tuning_options);
return Array<ObjectRef>{sch, return_tensors};
});
} // namespace auto_schedule
} // namespace auto_scheduler
} // namespace tvm
Original file line number Diff line number Diff line change
Expand Up @@ -18,22 +18,22 @@
*/

/*!
* \file auto_schedule/auto_schedule.h
* \file auto_scheduler/auto_schedule.h
* \brief The user interface of the TVM Auto-scheduler. This is the entry structure to get
* schedule search requirements from upper level (Python API), and returns a high performance
* schedule after search process.
*/

#ifndef TVM_AUTO_SCHEDULE_AUTO_SCHEDULE_H_
#define TVM_AUTO_SCHEDULE_AUTO_SCHEDULE_H_
#ifndef TVM_AUTO_SCHEDULER_AUTO_SCHEDULE_H_
#define TVM_AUTO_SCHEDULER_AUTO_SCHEDULE_H_

#include <utility>

#include "measure.h"
#include "search_policy/search_policy.h"

namespace tvm {
namespace auto_schedule {
namespace auto_scheduler {

/*! \brief Tuning and measurement options. */
class TuningOptionsNode : public Object {
Expand Down Expand Up @@ -69,7 +69,7 @@ class TuningOptionsNode : public Object {
v->Visit("pre_search_callbacks", &pre_search_callbacks);
}

static constexpr const char* _type_key = "auto_schedule.TuningOptions";
static constexpr const char* _type_key = "auto_scheduler.TuningOptions";
TVM_DECLARE_FINAL_OBJECT_INFO(TuningOptionsNode, Object);
};

Expand Down Expand Up @@ -110,7 +110,7 @@ class TuningOptions : public ObjectRef {
TVM_DLL std::pair<te::Schedule, Array<te::Tensor>> AutoSchedule(SearchTask task,
SearchPolicy search_policy,
TuningOptions tuning_options);
} // namespace auto_schedule
} // namespace auto_scheduler
} // namespace tvm

#endif // TVM_AUTO_SCHEDULE_AUTO_SCHEDULE_H_
#endif // TVM_AUTO_SCHEDULER_AUTO_SCHEDULE_H_
Loading

0 comments on commit 3176a71

Please sign in to comment.