Skip to content

Commit

Permalink
skip put channel when datasize is zero (PaddlePaddle#52)
Browse files Browse the repository at this point in the history
  • Loading branch information
AUP0 authored Aug 17, 2022
1 parent 0b424ce commit 8d486a6
Showing 1 changed file with 15 additions and 13 deletions.
28 changes: 15 additions & 13 deletions paddle/fluid/framework/fleet/ps_gpu_wrapper.cc
Original file line number Diff line number Diff line change
Expand Up @@ -800,20 +800,22 @@ void PSGPUWrapper::LoadIntoMemory(bool is_shuffle) {
timer.Pause();
VLOG(0) << "LoadIntoMemory cost: " << timer.ElapsedSec() << "s";

// local shuffle
if (is_shuffle) {
dataset_->LocalShuffle();
}
InitSlotInfo();
std::shared_ptr<HeterContext> gpu_task = gpu_task_pool_.Get();
gpu_task->Reset();
gpu_task->pass_id_ = (uint16_t)(dataset_->GetPassID());
if (dataset_->GetMemoryDataSize() > 0) {
// local shuffle
if (is_shuffle) {
dataset_->LocalShuffle();
}
InitSlotInfo();
std::shared_ptr<HeterContext> gpu_task = gpu_task_pool_.Get();
gpu_task->Reset();
gpu_task->pass_id_ = (uint16_t)(dataset_->GetPassID());

dataset_mutex_.lock();
dataset_pipe_.push(dataset_);
dataset_mutex_.unlock();

data_ready_channel_->Put(gpu_task);
dataset_mutex_.lock();
dataset_pipe_.push(dataset_);
dataset_mutex_.unlock();

data_ready_channel_->Put(gpu_task);
}

VLOG(3) << "End LoadIntoMemory(), dataset[" << dataset_ << "]";
}
Expand Down

0 comments on commit 8d486a6

Please sign in to comment.