Skip to content

Commit

Permalink
disable codecarbon as it's very unstable (#152)
Browse files Browse the repository at this point in the history
  • Loading branch information
stas00 authored Oct 22, 2021
1 parent 8dc8af5 commit 829cefd
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 4 deletions.
13 changes: 13 additions & 0 deletions megatron/global_vars.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,11 @@ def _set_tensorboard_writer(args):
'no TensorBoard logs will be written.', flush=True)


# Important: the codecarbon is very unstable and its latest incarnation using the python scheduler interferes with the asyncio library we use in the test suite which breaks everything, so making this a no-op for now.
def _set_codecarbon_tracker(args):

return # turned off

global _GLOBAL_CODECARBON_TRACKER
if not hasattr(args, 'codecarbon_dir') or args.codecarbon_dir is None:
return
Expand Down Expand Up @@ -187,6 +191,9 @@ def _set_codecarbon_tracker(args):


def codecarbon_tracker_start():

return # turned off, see the notes above

global _GLOBAL_CODECARBON_TRACKER
if _GLOBAL_CODECARBON_TRACKER is None:
return
Expand All @@ -196,6 +203,9 @@ def codecarbon_tracker_start():


def codecarbon_tracker_stop():

return # turned off, see the notes above

global _GLOBAL_CODECARBON_TRACKER
if _GLOBAL_CODECARBON_TRACKER is None:
return
Expand All @@ -205,6 +215,9 @@ def codecarbon_tracker_stop():


def codecarbon_tracker_flush():

return # turned off, see the notes above

global _GLOBAL_CODECARBON_TRACKER
if _GLOBAL_CODECARBON_TRACKER is None:
return
Expand Down
2 changes: 0 additions & 2 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,6 @@ tensorboard
torch
transformers
DeepSpeed @ git+https://github.com/microsoft/DeepSpeed.git
# at some point when it starts working freeze with ether min version or sha using the syntax codecarbon.git@deadbeaf
codecarbon @ git+https://github.com/mlco2/codecarbon.git
# versions from HF transformers
black==21.4b0
isort>=5.5.4
2 changes: 0 additions & 2 deletions tests/test_training.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,6 @@ def get_variation_config(self, variation, output_dir):
--save {output_dir}/checkpoints
--load {output_dir}/checkpoints
--data-path {data_dir}/meg-gpt2-openwebtext_text_document
--codecarbon-dir {output_dir}/codecarbon
--tensorboard-dir {output_dir}/tensorboard
--tensorboard-queue-size 5
--log-timers-to-tensorboard
Expand Down Expand Up @@ -314,7 +313,6 @@ def test_training_prefix_lm_all(self):
--save {output_dir}/checkpoints
--load {output_dir}/checkpoints
--data-path {data_dir}/meg-gpt2-openwebtext_text_document
--codecarbon-dir {output_dir}/codecarbon
--tensorboard-dir {output_dir}/tensorboard
--tensorboard-queue-size 5
--log-timers-to-tensorboard
Expand Down

0 comments on commit 829cefd

Please sign in to comment.