Skip to content

Commit

Permalink
Merge branch 'main' into uaziz/broadcast-op-refactor
Browse files Browse the repository at this point in the history
  • Loading branch information
uazizTT authored Nov 22, 2024
2 parents c142254 + e2c3fe4 commit 59538b5
Show file tree
Hide file tree
Showing 24 changed files with 728 additions and 72 deletions.
99 changes: 88 additions & 11 deletions .github/workflows/issue-last-updated.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ jobs:
echo "project_id=PVT_kwDOA9MHEM4AjeTl" >> $GITHUB_ENV
echo "field_id=PVTF_lADOA9MHEM4AjeTlzgiiU18" >> $GITHUB_ENV
- name: Get Issue ID
id: get_issue_id
run: |
Expand All @@ -31,18 +32,94 @@ jobs:
- name: Get Item ID for Issue
id: get_item_by_issue_id
id: get_item_id_by_issue_id
run: |
ITEM_ID=$(curl -X POST -H "Authorization: Bearer $GITHUB_TOKEN" \
-H "Content-Type: application/json" \
-d '{
"query": "query($projectId: ID!) { node(id: $projectId) { ... on ProjectV2 { items(first: 100) { nodes { id content { ... on Issue { id } } } } } } }",
"variables": {
"projectId": "'"${{ env.project_id }}"'"
}
}' \
https://api.github.com/graphql | jq -r '.data.node.items.nodes[] | select(.content.id=="'"${{ env.issue_id }}"'") | .id')
echo "ITEM_ID=$ITEM_ID" >> $GITHUB_ENV
# Initialize variables
CURSOR=null
ITEM_ID=""
# Define the GraphQL query as a string
QUERY='query($projectId: ID!, $cursor: String) {
node(id: $projectId) {
... on ProjectV2 {
items(first: 100, after: $cursor) {
nodes {
id
content {
... on Issue {
id
}
}
}
pageInfo {
hasNextPage
endCursor
}
}
}
}
}'
while : ; do
# Construct JSON payload using jq for proper formatting
JSON_PAYLOAD=$(jq -n \
--arg query "$QUERY" \
--arg projectId "$PROJECT_ID" \
--arg cursor "$CURSOR" \
'{ query: $query, variables: { projectId: $projectId, cursor: $cursor }}')
# Make the GraphQL request
RESPONSE=$(curl -s -X POST -H "Authorization: Bearer $GITHUB_TOKEN" \
-H "Content-Type: application/json" \
-d "$JSON_PAYLOAD" \
https://api.github.com/graphql)
# Debug: print entire response
echo "RESPONSE: $RESPONSE"
# Check if the response contains `items` data
ITEMS_DATA=$(echo "$RESPONSE" | jq -r '.data.node.items.nodes' 2>/dev/null)
if [[ "$ITEMS_DATA" == "null" ]]; then
echo "Error: Items data not found. Please check your PROJECT_ID and GITHUB_TOKEN permissions."
break
fi
# Parse the item ID if it matches the ISSUE_NODE_ID
ITEM_ID=$(echo "$RESPONSE" | jq -r --arg ISSUE_NODE_ID "$ISSUE_NODE_ID" \
'.data.node.items.nodes[] | select(.content.id==$ISSUE_NODE_ID) | .id')
# If ITEM_ID is found, output it and stop the loop
if [[ -n "$ITEM_ID" && "$ITEM_ID" != "null" ]]; then
echo "Found ITEM_ID: $ITEM_ID"
echo "ITEM_ID=$ITEM_ID" >> $GITHUB_ENV # Save ITEM_ID to environment for future steps
break
fi
# Extract pagination information
HAS_NEXT_PAGE=$(echo "$RESPONSE" | jq -r '.data.node.items.pageInfo.hasNextPage')
CURSOR=$(echo "$RESPONSE" | jq -r '.data.node.items.pageInfo.endCursor')
# If no more pages, exit loop
if [[ "$HAS_NEXT_PAGE" != "true" ]]; then
echo "Issue not found in project items."
break
fi
done
- name: Use Found ITEM_ID
if: env.ITEM_ID # Only runs if ITEM_ID was set
run: echo "The ITEM_ID is ${{ env.ITEM_ID }}"


- name: Update Project Field
run: |
Expand Down
31 changes: 18 additions & 13 deletions .github/workflows/nightly-uplift.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,33 +5,38 @@ name: Nighty Uplift

on:
schedule:
- cron: '0 8 * * *' # Runs at 08:00 UTC every day
- cron: '0 6 * * *' # Runs at 06:00 UTC every day
workflow_dispatch: # Manual trigger

jobs:
uplift-pr:
runs-on: ubuntu-latest

env:
SUBMODULE_PATH: third_party/tt-metal
TT_METAL_VERSION: origin/main
TT_METAL_SUBMODULE_PATH: third_party/tt-metal

steps:

- uses: actions/checkout@v4
with:
submodules: recursive
fetch-depth: 0
ref: main

- name: Set env variable
- name: Set env variable for today's date
run: |
echo "TODAY=$(date +'%Y-%m-%d')" >> $GITHUB_ENV
- name: Update tt-metal reference
- name: Fetch latest SHA of tt-metal submodule
env:
GH_TOKEN: ${{ github.token }}
run: |
# Fetch the latest SHA using GitHub CLI
LATEST_SHA=$(gh api repos/tenstorrent/tt-metal/commits/main --jq '.sha')
# Update the third_party/CMakeLists.txt file with the new SHA
sed -i "s/set(TT_METAL_VERSION \".*\")/set(TT_METAL_VERSION \"${LATEST_SHA}\")/" third_party/CMakeLists.txt
LATEST_TT_METAL_VERSION=$(gh api repos/tenstorrent/tt-mlir/commits/main --jq '.sha')
echo "LATEST_TT_METAL_VERSION=$LATEST_TT_METAL_VERSION" >> $GITHUB_ENV
- name: Update tt-metal reference in third_party/CMakeLists.txt
run: |
echo "Updating tt-mlir to SHA: ${{ env.LATEST_TT_METAL_VERSION }}"
sed -i "s/set(TT_METAL_VERSION \".*\")/set(TT_METAL_VERSION \"${{ env.LATEST_TT_METAL_VERSION }}\")/" third_party/CMakeLists.txt
- name: Create Pull Request
uses: peter-evans/create-pull-request@v7
Expand All @@ -41,9 +46,9 @@ jobs:
committer: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com>
author: ${{ github.actor }} <${{ github.actor_id }}+${{ github.actor }}@users.noreply.github.com>
base: main
commit-message: "Uplift ${{ env.SUBMODULE_PATH }} to ${{ env.SUBMODULE_VERSION }} ${{ env.TODAY }}"
title: "Uplift ${{ env.SUBMODULE_PATH }} to ${{ env.SUBMODULE_VERSION }} ${{ env.TODAY }}"
body: "This PR uplifts the ${{ env.SUBMODULE_PATH }} to the ${{ env.SUBMODULE_VERSION }}"
commit-message: "Uplift ${{ env.TT_METAL_SUBMODULE_PATH }} to ${{ env.LATEST_TT_METAL_VERSION }} ${{ env.TODAY }}"
title: "Uplift ${{ env.TT_METAL_SUBMODULE_PATH }} to ${{ env.LATEST_TT_METAL_VERSION }} ${{ env.TODAY }}"
body: "This PR uplifts the ${{ env.TT_METAL_SUBMODULE_PATH }} to the ${{ env.LATEST_TT_METAL_VERSION }}"
labels: uplift
delete-branch: true
token: ${{ secrets.GH_TOKEN }}
Expand Down
6 changes: 3 additions & 3 deletions docs/src/ttmlir-translate.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,15 +5,15 @@ The `ttmlir-translate` translation utility. Unlike `ttmlir-opt` tool which is us

```bash
# First, let's run `ttmlir-opt` to convert to proper dialect
./build/bin/ttmlir-opt --ttir-load-system-desc --ttir-layout --convert-ttir-to-ttnn --convert-ttnn-to-emitc test/ttmlir/Dialect/TTNN/simple_multiply.mlir -o c.mlir
./build/bin/ttmlir-opt --ttir-to-emitc-pipeline test/ttmlir/Dialect/TTNN/simple_multiply.mlir -o c.mlir

# Now run `ttmlir-translate` to produce C++ code
./build/bin/ttmlir-translate -mlir-to-cpp c.mlir -allow-unregistered-dialect
./build/bin/ttmlir-translate --mlir-to-cpp c.mlir
```

Bonus: These two commands can be piped, to avoid writing a `mlir` file to disk, like so:
```bash
./build/bin/ttmlir-opt --ttir-load-system-desc --ttir-layout --convert-ttir-to-ttnn --convert-ttnn-to-emitc test/ttmlir/Dialect/TTNN/simple_multiply.mlir | ./build/bin/ttmlir-translate -mlir-to-cpp -allow-unregistered-dialect
./build/bin/ttmlir-opt --ttir-to-emitc-pipeline test/ttmlir/Dialect/TTNN/simple_multiply.mlir | ./build/bin/ttmlir-translate -mlir-to-cpp
```

## Generate flatbuffer file from MLIR
Expand Down
27 changes: 27 additions & 0 deletions include/ttmlir/Dialect/TTIR/IR/TTIROps.td
Original file line number Diff line number Diff line change
Expand Up @@ -925,6 +925,33 @@ def TTIR_SliceOp: TTIR_DPSOp<"slice"> {
let hasVerifier = 1;
}

def TTIR_SelectOp: TTIR_DPSOp<"select"> {
let summary = "Select op.";
let description = [{
Extracts a sub-tensor (slice) from the input tensor along a specified dimension in few steps defined by the
`begin`, `length`, and `stride` attributes.
The `begin` specifies the start index for the selected dimension of the tensor.
The `length` specifies the number of elements to extract from the input tensor along the selected dimension.
The `stride` specifies the step size for the start index. The default value is 0. 0 means no stride.
}];

let arguments = (ins AnyRankedTensor:$input,
AnyRankedTensor:$output,
SI32Attr:$dim,
SI32Attr:$begin,
SI32Attr:$length,
DefaultValuedOptionalAttr<SI32Attr, "0">:$stride,
TT_OperandConstraintArrayAttr:$operand_constraints);

let results = (outs AnyRankedTensor:$result);

let extraClassDeclaration = [{
MutableOperandRange getDpsInitsMutable() { return getOutputMutable(); }
}];

let hasVerifier = 1;
}

// ANCHOR: decomposing_an_op_index_ttir
def TTIR_IndexOp: TTIR_DPSOp<"index"> {
let summary = "Index op.";
Expand Down
10 changes: 5 additions & 5 deletions lib/Conversion/StableHLOToTTIR/StableHLOToTTIRPatterns.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -850,7 +850,7 @@ class StableHLOToTTIRBroadcastInDimOpConversionPattern

llvm::SmallVector<int64_t, 4> broadcastedShape;
auto srcType =
getTypeConverter()->convertType(srcOp.getOperand().getType());
getTypeConverter()->convertType(adaptor.getOperand().getType());
auto inputShape = mlir::cast<mlir::RankedTensorType>(srcType).getShape();
auto outputShape = mlir::cast<mlir::RankedTensorType>(srcType).getShape();

Expand Down Expand Up @@ -996,8 +996,8 @@ class StableHLOToTTIRConcatOpConversionPattern
"ConcatOp dimension is too large.");
}

auto rankedTensorType =
mlir::dyn_cast<mlir::RankedTensorType>(srcOp.getOperand(0).getType());
auto rankedTensorType = mlir::dyn_cast<mlir::RankedTensorType>(
adaptor.getOperands()[0].getType());
if (static_cast<int64_t>(adaptor.getDimension()) >=
rankedTensorType.getRank()) {
return rewriter.notifyMatchFailure(srcOp,
Expand Down Expand Up @@ -1185,8 +1185,8 @@ class StableHLOToTTIRGatherOpConversionPattern
auto dimensionNumbers = srcOp.getDimensionNumbers();

rewriter.replaceOpWithNewOp<mlir::tt::ttir::GatherOp>(
srcOp, outputType, srcOp.getOperands()[0],
srcOp.getOperands()[1], // Start indices
srcOp, outputType, adaptor.getOperands()[0],
adaptor.getOperands()[1], // Start indices
Value(outputTensor), dimensionNumbers.getOffsetDims(),
dimensionNumbers.getCollapsedSliceDims(),
dimensionNumbers.getOperandBatchingDims(),
Expand Down
Loading

0 comments on commit 59538b5

Please sign in to comment.