Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[pull] master from phate:master #207

Merged
merged 2 commits into from
Dec 18, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions .clang-tidy
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
---
Checks: '-*,
modernize-deprecated-headers,
'

WarningsAsErrors: '
# modernize-deprecated-headers,
'
13 changes: 12 additions & 1 deletion .github/actions/InstallPackages/action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,11 @@ inputs:
default: "false"
required: false

install-clang-tidy:
description: "Install clang-tidy package. Default is 'false'."
default: "false"
required: false

install-ninja:
description: "Install ninja package. Default is 'false'."
default: "false"
Expand Down Expand Up @@ -54,7 +59,8 @@ runs:
if: ${{inputs.install-llvm == 'true'
|| inputs.install-clang == 'true'
|| inputs.install-mlir == 'true'
|| inputs.install-clang-format == 'true'}}
|| inputs.install-clang-format == 'true'
|| inputs.install-clang-tidy == 'true'}}
run: |
export HAS_LLVM_REPOSITORY=$(find /etc/apt/ -name *.list | xargs cat | grep llvm-toolchain-jammy-${{inputs.llvm-version}})
if [[ -z $HAS_LLVM_REPOSITORY ]]; then
Expand Down Expand Up @@ -95,6 +101,11 @@ runs:
run: sudo apt-get install clang-format-${{inputs.llvm-version}}
shell: bash

- name: "Install clang-tidy package"
if: ${{inputs.install-clang-tidy == 'true'}}
run: sudo apt-get install clang-tidy-${{inputs.llvm-version}}
shell: bash

- name: "Install ninja package"
if: ${{inputs.install-ninja == 'true'}}
run: sudo apt-get install ninja-build
Expand Down
32 changes: 32 additions & 0 deletions .github/workflows/ClangTidy.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
name: ClangTidy

on:
pull_request:
branches: [ master ]

concurrency:
group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }}
cancel-in-progress: true

jobs:
CheckTidy:
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4

- name: "Install clang tidy"
uses: ./.github/actions/InstallPackages
with:
install-clang-tidy: true

- name: "Install HLS dialect dependencies"
uses: ./.github/actions/BuildCirct

- name: "Install MLIR dialect dependencies"
uses: ./.github/actions/BuildMlirDialect

- name: "Configure jlm with HLS and MLIR enabled"
run: ./configure.sh --enable-mlir=${{ github.workspace }}/lib/mlir-rvsdg --enable-hls=${{ github.workspace }}/build-circt/circt

- name: "Run clang tidy"
run: make tidy
6 changes: 6 additions & 0 deletions Makefile.rules
Original file line number Diff line number Diff line change
Expand Up @@ -183,3 +183,9 @@ format:

format-dry-run:
clang-format-$(LLVM_VERSION) --dry-run --Werror --style="file:.clang-format" --verbose -i $(SOURCES) $(HEADERS)

#################################################################################
# Clang tidy rules

tidy: $(COMMANDPATHSFILE)
clang-tidy-$(LLVM_VERSION) --config-file=.clang-tidy $(HEADERS) $(SOURCES) -- $(CXXFLAGS) $(CPPFLAGS) -I$(BUILD_OUT_PREFIX)
3 changes: 3 additions & 0 deletions jlm/llvm/backend/jlm2llvm/instruction.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
#ifndef JLM_LLVM_BACKEND_JLM2LLVM_INSTRUCTION_HPP
#define JLM_LLVM_BACKEND_JLM2LLVM_INSTRUCTION_HPP

#include <jlm/llvm/ir/tac.hpp>

namespace llvm
{

Expand All @@ -16,6 +18,7 @@ class Constant;
namespace jlm::llvm
{

class cfg_node;
class tac;

namespace jlm2llvm
Expand Down
2 changes: 2 additions & 0 deletions jlm/llvm/frontend/LlvmInstructionConversion.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@
#ifndef JLM_LLVM_FRONTEND_LLVMINSTRUCTIONCONVERSION_HPP
#define JLM_LLVM_FRONTEND_LLVMINSTRUCTIONCONVERSION_HPP

#include <jlm/llvm/ir/tac.hpp>

namespace llvm
{
class Constant;
Expand Down
68 changes: 37 additions & 31 deletions jlm/llvm/ir/operators/Store.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
#include <jlm/llvm/ir/operators/alloca.hpp>
#include <jlm/llvm/ir/operators/MemoryStateOperations.hpp>
#include <jlm/llvm/ir/operators/Store.hpp>
#include <jlm/util/HashSet.hpp>

namespace jlm::llvm
{
Expand Down Expand Up @@ -241,10 +242,8 @@ is_store_alloca_reducible(const std::vector<jlm::rvsdg::output *> & operands)
static bool
is_multiple_origin_reducible(const std::vector<jlm::rvsdg::output *> & operands)
{
std::unordered_set<jlm::rvsdg::output *> states(
std::next(std::next(operands.begin())),
operands.end());
return states.size() != operands.size() - 2;
const util::HashSet<rvsdg::output *> states(std::next(operands.begin(), 2), operands.end());
return states.Size() != operands.size() - 2;
}

static std::vector<jlm::rvsdg::output *>
Expand Down Expand Up @@ -296,17 +295,40 @@ perform_store_alloca_reduction(

static std::vector<jlm::rvsdg::output *>
perform_multiple_origin_reduction(
const StoreNonVolatileOperation & op,
const StoreNonVolatileOperation & operation,
const std::vector<jlm::rvsdg::output *> & operands)
{
std::unordered_set<jlm::rvsdg::output *> states(
std::next(std::next(operands.begin())),
operands.end());
return StoreNonVolatileNode::Create(
operands[0],
operands[1],
{ states.begin(), states.end() },
op.GetAlignment());
// FIXME: Unify with the duplicate state removal reduction of the LoadNonVolatile operation

JLM_ASSERT(operands.size() > 2);
const auto address = operands[0];
const auto value = operands[1];

std::vector<rvsdg::output *> newInputStates;
std::unordered_map<rvsdg::output *, size_t> stateIndexMap;
for (size_t n = 2; n < operands.size(); n++)
{
auto state = operands[n];
if (stateIndexMap.find(state) == stateIndexMap.end())
{
const size_t resultIndex = newInputStates.size();
newInputStates.push_back(state);
stateIndexMap[state] = resultIndex;
}
}

const auto storeResults =
StoreNonVolatileNode::Create(address, value, newInputStates, operation.GetAlignment());

std::vector<rvsdg::output *> results(operation.nresults(), nullptr);
for (size_t n = 2; n < operands.size(); n++)
{
auto state = operands[n];
JLM_ASSERT(stateIndexMap.find(state) != stateIndexMap.end());
results[n - 2] = storeResults[stateIndexMap[state]];
}

return results;
}

store_normal_form::~store_normal_form()
Expand Down Expand Up @@ -363,24 +385,8 @@ store_normal_form::normalize_node(rvsdg::Node * node) const

if (get_multiple_origin_reducible() && is_multiple_origin_reducible(operands))
{
auto outputs = perform_multiple_origin_reduction(*op, operands);
auto new_node = jlm::rvsdg::output::GetNode(*outputs[0]);

std::unordered_map<jlm::rvsdg::output *, jlm::rvsdg::output *> origin2output;
for (size_t n = 0; n < outputs.size(); n++)
{
auto origin = new_node->input(n + 2)->origin();
JLM_ASSERT(origin2output.find(origin) == origin2output.end());
origin2output[origin] = outputs[n];
}

for (size_t n = 2; n < node->ninputs(); n++)
{
auto origin = node->input(n)->origin();
JLM_ASSERT(origin2output.find(origin) != origin2output.end());
node->output(n - 2)->divert_users(origin2output[origin]);
}
remove(node);
divert_users(node, perform_multiple_origin_reduction(*op, operands));
node->region()->remove_node(node);
return false;
}

Expand Down
4 changes: 4 additions & 0 deletions jlm/llvm/opt/InvariantValueRedirection.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -11,12 +11,16 @@
namespace jlm::rvsdg
{
class GammaNode;
class Graph;
class Region;
class StructuralNode;
class ThetaNode;
}

namespace jlm::llvm
{

class CallNode;
class RvsdgModule;

/** \brief Invariant Value Redirection Optimization
Expand Down
12 changes: 12 additions & 0 deletions jlm/llvm/opt/alias-analyses/MemoryStateEncoder.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,18 @@
#define JLM_LLVM_OPT_ALIAS_ANALYSES_MEMORYSTATEENCODER_HPP

#include <memory>
#include <vector>

namespace rvsdg
{
class GammaNode;
class output;
class Region;
class simple_node;
class StructuralNode;
class ThetaNode;
class ThetaOutput;
}

namespace jlm::util
{
Expand Down
30 changes: 29 additions & 1 deletion jlm/llvm/opt/alias-analyses/Steensgaard.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -7,17 +7,44 @@
#define JLM_LLVM_OPT_ALIAS_ANALYSES_STEENSGAARD_HPP

#include <jlm/llvm/opt/alias-analyses/AliasAnalysis.hpp>
#include <jlm/llvm/opt/alias-analyses/PointsToGraph.hpp>
#include <jlm/util/disjointset.hpp>

namespace jlm::rvsdg
{
class GammaNode;
class Graph;
class output;
class Region;
class simple_node;
class StructuralNode;
class ThetaNode;
}

namespace jlm::llvm::aa
namespace jlm::llvm
{

namespace delta
{
class node;
}

namespace lambda
{
class node;
}

namespace phi
{
class node;
}

class CallNode;
class LoadNode;
class StoreNode;

namespace aa
{
class Location;
class RegisterLocation;

Expand Down Expand Up @@ -244,6 +271,7 @@ class Steensgaard final : public AliasAnalysis
std::unique_ptr<Context> Context_;
};

}
}

#endif
3 changes: 1 addition & 2 deletions jlm/llvm/opt/unroll.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,7 @@ class unrollinfo final
theta() const noexcept
{
auto node = idv()->region()->node();
JLM_ASSERT(is<rvsdg::ThetaOperation>(node));
return static_cast<rvsdg::ThetaNode *>(node);
return util::AssertedCast<rvsdg::ThetaNode>(node);
}

inline bool
Expand Down
1 change: 1 addition & 0 deletions jlm/util/AnnotationMap.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
#include <string_view>
#include <unordered_map>
#include <variant>
#include <vector>

namespace jlm::util
{
Expand Down
1 change: 1 addition & 0 deletions jlm/util/intrusive-hash.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@

#include <functional>
#include <memory>
#include <string>
#include <utility>
#include <vector>

Expand Down
Loading
Loading