From 7db647d4aaf99445aab32b374f3e9d48a63ddc3e Mon Sep 17 00:00:00 2001 From: Rohan Julka Date: Thu, 20 Jun 2024 16:32:51 +0100 Subject: [PATCH] Use connected lists for tape data structure Instead of a single list use mutliple connected lists to store elements. This allows to dynamically increase the size of tape without the need of relocating elements. --- benchmark/AlgorithmicComplexity.cpp | 2 +- demos/CustomTypeNumDiff.cpp | 2 +- include/clad/Differentiator/Differentiator.h | 18 +-- include/clad/Differentiator/NewTape.h | 117 +++++++++++++++++++ test/NumericalDiff/PureCentralDiffCalls.C | 2 +- 5 files changed, 130 insertions(+), 11 deletions(-) create mode 100644 include/clad/Differentiator/NewTape.h diff --git a/benchmark/AlgorithmicComplexity.cpp b/benchmark/AlgorithmicComplexity.cpp index 9717cb0cd..15089a24e 100644 --- a/benchmark/AlgorithmicComplexity.cpp +++ b/benchmark/AlgorithmicComplexity.cpp @@ -19,7 +19,7 @@ static void BM_NumericGausP(benchmark::State& state) { double p[] = {1, 2, 3, 4, 5}; double dx[5] = {0, 0, 0, 0, 0}; double dp[5] = {0, 0, 0, 0, 0}; - clad::tape> results = {}; + clad::old_tape> results = {}; int dim = 5; results.emplace_back(dx, dim); results.emplace_back(dp, dim); diff --git a/demos/CustomTypeNumDiff.cpp b/demos/CustomTypeNumDiff.cpp index 0f92c22f9..33bc6adb1 100644 --- a/demos/CustomTypeNumDiff.cpp +++ b/demos/CustomTypeNumDiff.cpp @@ -134,7 +134,7 @@ int main() { // This is how we return the derivative with respect to all arguments. // The order of being placed in this tape should be the same as the order of // the arguments being passed to the function. - clad::tape> grad = {}; // Place the l-value reference of the variables in the tape. diff --git a/include/clad/Differentiator/Differentiator.h b/include/clad/Differentiator/Differentiator.h index 4a089c095..d35114fe1 100644 --- a/include/clad/Differentiator/Differentiator.h +++ b/include/clad/Differentiator/Differentiator.h @@ -14,6 +14,7 @@ #include "DynamicGraph.h" #include "FunctionTraits.h" #include "Matrix.h" +#include "NewTape.h" #include "NumericalDiff.h" #include "Tape.h" @@ -43,15 +44,16 @@ inline CUDA_HOST_DEVICE unsigned int GetLength(const char* code) { } /// Tape type used for storing values in reverse-mode AD inside loops. - template - using tape = tape_impl; +template using tape = new_tape_impl; - /// Add value to the end of the tape, return the same value. - template - CUDA_HOST_DEVICE T push(tape& to, ArgsT... val) { - to.emplace_back(std::forward(val)...); - return to.back(); - } +template using old_tape = tape_impl; + +/// Add value to the end of the tape, return the same value. +template +CUDA_HOST_DEVICE T push(tape& to, ArgsT... val) { + to.emplace_back(std::forward(val)...); + return to.back(); +} /// Add value to the end of the tape, return the same value. /// A specialization for clad::array_ref types to use in reverse mode. diff --git a/include/clad/Differentiator/NewTape.h b/include/clad/Differentiator/NewTape.h new file mode 100644 index 000000000..a4feb0e79 --- /dev/null +++ b/include/clad/Differentiator/NewTape.h @@ -0,0 +1,117 @@ +#ifndef CLAD_DIFFERENTIATOR_NEWTAPE_H +#define CLAD_DIFFERENTIATOR_NEWTAPE_H + +#include +#include +#include +#include +#include + +#include "clad/Differentiator/CladConfig.h" + +namespace clad { + +template class Block { +public: + T* data; + Block* next; + Block* prev; + std::size_t capacity; + using pointer = T*; + using iterator = pointer; + + CUDA_HOST_DEVICE Block(std::size_t& capacity) + : next(nullptr), prev(nullptr), capacity(capacity), + data(allocateRawStorage(capacity)) {} + + CUDA_HOST_DEVICE ~Block() { + destroy(block_begin(), block_end()); + ::operator delete( + const_cast(static_cast(data))); + } + Block(const Block& other) = delete; + Block& operator=(const Block& other) = delete; + + Block(Block&& other) = delete; + Block& operator=(const Block&& other) = delete; + + CUDA_HOST_DEVICE iterator block_begin() { return data; } + + CUDA_HOST_DEVICE iterator block_end() { return data + capacity; } + + template using value_type_of = decltype(*std::declval()); + + CUDA_HOST_DEVICE T* allocateRawStorage(std::size_t capacity) { +#ifdef __CUDACC__ + return static_cast(::operator new(capacity * sizeof(T), std::nothrow)); +#else + return static_cast(::operator new(capacity * sizeof(T), std::nothrow)); +#endif + } + + template + static typename std::enable_if< + !std::is_trivially_destructible>::value>::type + destroy(It B, It E) { + for (It I = E - 1; I >= B; --I) + I->~value_type_of(); + } + + template + static typename std::enable_if< + std::is_trivially_destructible>::value>::type + CUDA_HOST_DEVICE + destroy(It B, It E) {} +}; + +template class new_tape_impl { + Block* m_cur_block = nullptr; + std::size_t m_capacity = 32; + std::size_t m_size = 0; + using pointer = T*; + using reference = T&; + + using iterator = pointer; + +public: + template + + CUDA_HOST_DEVICE void emplace_back(ArgsT&&... args) { + if (!m_cur_block || m_size >= m_capacity) { + Block* prev_block = m_cur_block; + m_cur_block = new Block(m_capacity); + if (prev_block) { + prev_block->next = m_cur_block; + m_cur_block->prev = prev_block; + } + m_size = 0; + } + m_size += 1; + ::new (const_cast(static_cast(end()))) + T(std::forward(args)...); + } + + [[nodiscard]] CUDA_HOST_DEVICE std::size_t size() const { return m_size; } + + CUDA_HOST_DEVICE iterator end() { return m_cur_block->data + (m_size - 1); } + + CUDA_HOST_DEVICE reference back() { + assert(m_size || m_cur_block->prev); + return *end(); + } + + CUDA_HOST_DEVICE void pop_back() { + assert(m_size || m_cur_block->prev); + end()->~T(); + m_size -= 1; + if (m_size == 0) { + Block* temp = m_cur_block; + m_cur_block = m_cur_block->prev; + temp->~Block(); + m_size = m_capacity; + } + } +}; +} // namespace clad + +#endif // CLAD_DIFFERENTIATOR_NEWTAPE_H diff --git a/test/NumericalDiff/PureCentralDiffCalls.C b/test/NumericalDiff/PureCentralDiffCalls.C index b96b13515..fdbbb582b 100644 --- a/test/NumericalDiff/PureCentralDiffCalls.C +++ b/test/NumericalDiff/PureCentralDiffCalls.C @@ -75,7 +75,7 @@ int main() { // expected-no-diagnostics printf("Result is = %f\n", func1_res); // CHECK-EXEC: Result is = 2.000000 // Gradients, derivative wrt all args - clad::tape> grad = {}; + clad::old_tape> grad = {}; grad.emplace_back(dx, 3); grad.emplace_back(&dy); grad.emplace_back(&dz);