Skip to content

Commit

Permalink
Fix build and tests
Browse files Browse the repository at this point in the history
  • Loading branch information
subhankarshah committed Nov 8, 2022
1 parent 47cb7fc commit 265b933
Show file tree
Hide file tree
Showing 4 changed files with 41 additions and 260 deletions.
28 changes: 5 additions & 23 deletions stablehlo/dialect/StablehloAttrs.td
Original file line number Diff line number Diff line change
Expand Up @@ -61,24 +61,6 @@ def StableHLO_DotDimensionNumbers : AttrDef<StableHLO_Dialect, "DotDimensionNumb
let hasCustomAssemblyFormat = 1;
}

def StableHLO_ConvDimensionNumbers : AttrDef<StableHLO_Dialect, "ConvDimensionNumbers"> {
let cppNamespace = "::mlir::stablehlo";
let mnemonic = "conv";
let summary = "Structure of dimension information for conv op";
let parameters = (ins
"int64_t":$inputBatchDimension,
"int64_t":$inputFeatureDimension,
StableHLO_Dim:$inputSpatialDimensions,
"int64_t":$kernelInputFeatureDimension,
"int64_t":$kernelOutputFeatureDimension,
StableHLO_Dim:$kernelSpatialDimensions,
"int64_t":$outputBatchDimension,
"int64_t":$outputFeatureDimension,
StableHLO_Dim:$outputSpatialDimensions
);
let hasCustomAssemblyFormat = 1;
}

def OutputOperandAlias : AttrDef<StableHLO_Dialect, "OutputOperandAlias"> {
let cppNamespace = "::mlir::stablehlo";
let mnemonic = "output_operand_alias";
Expand Down Expand Up @@ -116,11 +98,11 @@ def OutputOperandAlias : AttrDef<StableHLO_Dialect, "OutputOperandAlias"> {
"int64_t":$operandIndex,
StableHLO_Dim:$operandTupleIndices
);
let assemblyFormat = [{
`<` `output_tuple_indices` `=` $outputTupleIndices `,`
`operand_index` `=` $operandIndex `,`
`operand_tuple_indices` `=` $operandTupleIndices `>`
}];
let assemblyFormat = "`<` "
"`output_tuple_indices` `=` `[` $outputTupleIndices `]` `,`"
"`operand_index` `=` $operandIndex `,`"
"`operand_tuple_indices` `=` `[` $operandTupleIndices `]`"
"`>`";
}

def StableHLO_ArgResultAlias : AttrDef<StableHLO_Dialect, "ArgResultAlias"> {
Expand Down
13 changes: 13 additions & 0 deletions stablehlo/dialect/StablehloOps.td
Original file line number Diff line number Diff line change
Expand Up @@ -2056,6 +2056,19 @@ def StableHLO_CustomCallOp: StableHLO_Op<"custom_call",
custom<CustomCallTarget>($call_target_name) `(` $inputs `)`
attr-dict `:` functional-type(operands, results)
}];

// TODO(b/244367323): Need update all usage by adding the arg
// `output_operand_aliases`, and remove this builder after the bug fix.
let builders = [
OpBuilder<(ins
"::mlir::TypeRange":$result_type, "::mlir::ValueRange":$operands,
"::mlir::StringAttr":$call_target_name,
"::mlir::BoolAttr":$has_side_effect,
"::mlir::StringAttr":$backend_config,
"::mlir::stablehlo::CustomCallApiVersionAttr":$api_version,
"::mlir::ArrayAttr":$called_computations,
"::mlir::ArrayAttr":$operand_layouts,
"::mlir::ArrayAttr":$result_layouts)>];
}

def StableHLO_DotOp: StableHLO_Op<"dot",
Expand Down
45 changes: 23 additions & 22 deletions stablehlo/integrations/python/tests/stablehlo.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,20 +35,6 @@ def test_channel_handle():
assert attr.handle == 1
assert attr.channel_type == 2

@run
def test_output_operand_alias():
attr = stablehlo.OutputOperandAlias.get(
output_tuple_indices=[0],
operand_index=0,
operand_tuple_indices=[1])
assert attr is not None
assert str(attr) == ("#stablehlo.output_operand_alias<output_tuple_indices = [0], "
"operand_index = 0, "
"operand_tuple_indices = [1]>")
assert attr.output_tuple_indices == [0]
assert attr.operand_index == 0
assert attr.operand_tuple_indices == [1]

@run
def test_comparison_direction_attr():
attr = stablehlo.ComparisonDirectionAttr.get("EQ")
Expand Down Expand Up @@ -135,6 +121,29 @@ def test_gather_dimension_numbers():
assert attr.index_vector_dim == 7


@run
def test_output_operand_alias():
attr = stablehlo.OutputOperandAlias.get(
output_tuple_indices=[0],
operand_index=0,
operand_tuple_indices=[1])
assert attr is not None
assert str(attr) == ("#stablehlo.output_operand_alias<output_tuple_indices = [0], "
"operand_index = 0, "
"operand_tuple_indices = [1]>")
assert attr.output_tuple_indices == [0]
assert attr.operand_index == 0
assert attr.operand_tuple_indices == [1]


@run
def test_precision_attr():
attr = stablehlo.PrecisionAttr.get("DEFAULT")
assert attr is not None
assert str(attr) == ("#stablehlo<precision DEFAULT>")
assert attr.value == "DEFAULT"


@run
def test_rng_algorithm_attr():
attr = stablehlo.RngAlgorithmAttr.get("DEFAULT")
Expand All @@ -151,14 +160,6 @@ def test_rng_distribution_attr():
assert attr.value == "UNIFORM"


@run
def test_precision_attr():
attr = stablehlo.PrecisionAttr.get("DEFAULT")
assert attr is not None
assert str(attr) == ("#stablehlo<precision DEFAULT>")
assert attr.value == "DEFAULT"


@run
def test_scatter_dimension_numbers():
attr = stablehlo.ScatterDimensionNumbers.get(
Expand Down
215 changes: 0 additions & 215 deletions stablehlo/tests/ops_stablehlo.mlir
Original file line number Diff line number Diff line change
Expand Up @@ -3802,221 +3802,6 @@ func.func @custom_call_output_operand_alias(%arg0: tuple<tensor<1x1xf32>, tensor
func.return
}

// -----
// CHECK: func @conv2d_generic
// CHECK: stablehlo.convolution
// CHECK-SAME: dim_numbers = [b, 0, 1, ?, f]x[0, 1, ?, i, o]->[?, b, 0, 1, f]
// CHECK-SAME{LITERAL}: window = {stride = [1, 1], pad = [[1, 1], [1, 1]], lhs_dilate = [1, 1], rhs_dilate = [1, 1]}
func.func @conv2d_generic(%arg0: tensor<1x8x8x32x207xf32>, %arg1: tensor<3x3x32x207x16xf32>) -> tensor<32x1x8x8x16xf32> {
%0 = "stablehlo.convolution"(%arg0, %arg1) {batch_group_count = 1 : i64,
dimension_numbers = #stablehlo.conv<raw
input_batch_dimension = 0,
input_feature_dimension = 4,
input_spatial_dimensions = [1, 2],
kernel_input_feature_dimension = 3,
kernel_output_feature_dimension = 4,
kernel_spatial_dimensions = [0, 1],
output_batch_dimension = 1,
output_feature_dimension = 4,
output_spatial_dimensions = [2, 3]
>, feature_group_count = 1 : i64, lhs_dilation = dense<1> : tensor<2xi64>, padding = dense<1> : tensor<2x2xi64>, precision_config = [#stablehlo<precision DEFAULT>, #stablehlo<precision DEFAULT>], rhs_dilation = dense<1> : tensor<2xi64>, window_strides = dense<1> : tensor<2xi64>} :
(tensor<1x8x8x32x207xf32>, tensor<3x3x32x207x16xf32>) -> tensor<32x1x8x8x16xf32>
func.return %0 : tensor<32x1x8x8x16xf32>
}

// CHECK: func @conv2d
// CHECK: stablehlo.convolution
// CHECK-SAME: dim_numbers = [b, 0, 1, f]x[0, 1, i, o]->[b, 0, 1, f]
// CHECK-SAME{LITERAL}: window = {stride = [1, 1], pad = [[1, 1], [1, 1]], lhs_dilate = [1, 1], rhs_dilate = [1, 1]}
func.func @conv2d(%arg0: tensor<1x8x8x207xf32>, %arg1: tensor<3x3x207x16xf32>) -> tensor<1x8x8x16xf32> {
%0 = stablehlo.convolution(%arg0, %arg1)
dim_numbers = [b, 0, 1, f]x[0, 1, i, o]->[b, 0, 1, f],
window = {stride = [1, 1], pad = [[1, 1], [1, 1]], lhs_dilate = [1, 1], rhs_dilate = [1, 1]}
{batch_group_count = 1 : i64, feature_group_count = 1 : i64, precision_config = [#stablehlo<precision DEFAULT>, #stablehlo<precision DEFAULT>]} :
(tensor<1x8x8x207xf32>, tensor<3x3x207x16xf32>) -> tensor<1x8x8x16xf32>
func.return %0 : tensor<1x8x8x16xf32>
}

// CHECK: func @conv_empty_spatial_dimensions
// CHECK: stablehlo.convolution
// CHECK-SAME: dim_numbers = [b, f]x[i, o]->[b, f]
// CHECK-SAME{LITERAL}: window = {stride = [], pad = [], lhs_dilate = [], rhs_dilate = [], reverse = []}
func.func @conv_empty_spatial_dimensions(%arg0: tensor<3x2xf16>, %arg1: tensor<2x2xf16>) -> tuple<tensor<3x2xf16>> {
%0 = stablehlo.convolution(%arg0, %arg1)
dim_numbers = [b, f]x[i, o]->[b, f],
window = {stride = [], pad = [], lhs_dilate = [], rhs_dilate = [], reverse = []}
{batch_group_count = 1 : i64, feature_group_count = 1 : i64, precision_config = [#stablehlo<precision DEFAULT>, #stablehlo<precision DEFAULT>]}
: (tensor<3x2xf16>, tensor<2x2xf16>) -> tensor<3x2xf16>
%1 = "stablehlo.tuple"(%0) : (tensor<3x2xf16>) -> tuple<tensor<3x2xf16>>
func.return %1 : tuple<tensor<3x2xf16>>
}
// -----

func.func @conv2d(%arg0: tensor<1x8x8x207xf32>, %arg1: tensor<3x3x207x16xf32>) -> tensor<1x8x8x16xf32> {
// expected-error @+3 {{'stablehlo.convolution' Expected array with 2 elements, got 3 elements instead}}
%0 = stablehlo.convolution(%arg0, %arg1)
dim_numbers = [b, 0, 1, f]x[0, 1, i, o]->[b, 0, 1, f],
window = {stride = [1, 1], pad = [[1, 1, 1], [1, 1, 1]], lhs_dilate = [1, 1], rhs_dilate = [1, 1]}
{batch_group_count = 1 : i64, feature_group_count = 1 : i64, precision_config = [#stablehlo<precision DEFAULT>, #stablehlo<precision DEFAULT>]} :
(tensor<1x8x8x207xf32>, tensor<3x3x207x16xf32>) -> tensor<1x8x8x16xf32>
func.return %0 : tensor<1x8x8x16xf32>
}

// -----

// CHECK: module
// CHECK-SAME: stablehlo.conv = #stablehlo.conv<[b, 0, 1, f]x[0, 1, i, o]->[b, 1, 0, f]>
module attributes { stablehlo.conv = #stablehlo.conv<raw
input_batch_dimension = 0,
input_feature_dimension = 3,
input_spatial_dimensions = [1, 2],
kernel_input_feature_dimension = 2,
kernel_output_feature_dimension = 3,
kernel_spatial_dimensions = [0, 1],
output_batch_dimension = 0,
output_feature_dimension = 3,
output_spatial_dimensions = [2, 1]>} {}

// -----

// CHECK-LABEL: func @convolution
// CHECK: stablehlo.convolution
// CHECK-SAME: dim_numbers = [b, 1, 0, f]x[0, 1, i, o]->[b, 0, 1, f]
// CHECK-SAME{LITERAL}: window = {stride = [2, 1], pad = [[0, 1], [0, 1]], rhs_dilate = [1, 2]}
func.func @convolution(%arg0: tensor<2x2x3x4xf32>, %arg1: tensor<3x2x4x3xf32>) -> tensor<2x1x1x3xf32> {
%0 = stablehlo.convolution(%arg0, %arg1)
dim_numbers = [b, 1, 0, f]x[0, 1, i, o]->[b, 0, 1, f],
window = {stride = [2, 1], pad = [[0, 1], [0, 1]], rhs_dilate = [1, 2]}
{ batch_group_count = 1 : i64, feature_group_count = 1 : i64}
: (tensor<2x2x3x4xf32>, tensor<3x2x4x3xf32>) -> tensor<2x1x1x3xf32>
func.return %0 : tensor<2x1x1x3xf32>
}

// -----

// CHECK: module
// CHECK: stablehlo.conv = #stablehlo.conv<[b, 1, 0, f]x[0, 1, i, o]->[b, 0, 1, f]>
module attributes {
stablehlo.conv = #stablehlo.conv<[b, 1, 0, f]x[0, 1, i, o]->[b, 0, 1, f]>
} {}

// -----

// CHECK: module
// CHECK: stablehlo.conv = #stablehlo.conv<[b, 1, 0, ?, f]x[?, 0, 1, i, o]->[b, ?, 0, 1, f]>
module attributes {
stablehlo.conv = #stablehlo.conv<[b, 1, 0, ?, f]x[?, 0, 1, i, o]->[b, ?, 0, 1, f]>
} {}

// -----

module attributes {
// expected-error@+1{{Unexpected dimension c, expecting b, f}}
stablehlo.conv = #stablehlo.conv<[c, 0, 1, f]x[0, 1, i, o]->[b, 0, 1, f]>
} {}

// -----

module attributes {
// expected-error@+1{{Unexpected dimension b, expecting i, o}}
stablehlo.conv = #stablehlo.conv<[b, 0, 1, f]x[0, 1, b, o]->[b, 0, 1, f]>
} {}

// -----

module attributes {
// expected-error@+1{{Unexpected dimension i, expecting o}}
stablehlo.conv = #stablehlo.conv<[b, 0, 1, f]x[0, 1, i, i]->[b, 0, 1, f]>
} {}

// -----

module attributes {
// expected-error@+1{{Expected dimensions f not specified}}
stablehlo.conv = #stablehlo.conv<[b, 0, 1]x[0, 1, i, o]->[b, 0, 1, f]>
} {}

// -----

module attributes {
// expected-error@+1{{Unexpected keyword b}}
stablehlo.conv = #stablehlo.conv<[b, 0, 1, f]x[0, 1, i, o, b]->[b, 0, 1, f]>
} {}

// -----

module attributes {
// expected-error@+1{{expected '['}}
stablehlo.conv = #stablehlo.conv<{b, 0, 1, f}x[0, 1, i, o]->[b, 0, 1, f]>
} {}

// -----

module attributes {
// expected-error@+1{{Expected spatial dimensions 0 not specified}}
stablehlo.conv = #stablehlo.conv<[b, f, 1]x[o, 0, 1, i]->[f, b, 0, 1]>
} {}

// -----

module attributes {
// expected-error@+1{{Duplicate entries for spatial dimension 1}}
stablehlo.conv = #stablehlo.conv<[b, f, 1, 0, 1]x[o, 0, 1, i]->[f, b, 0, 1]>
} {}

// -----

module attributes {
// expected-error@+1{{Unexpected dimension -2}}
stablehlo.conv = #stablehlo.conv<[b, f, 1, -2]x[o, 0, 1, i]->[f, b, 0, 1]>
} {}

// -----

func.func @convolution(%arg0: tensor<2x2x3x4xf32>, %arg1: tensor<3x5x5x3xf32>) -> tensor<3x5x5x4xf32> {
// expected-error@+3{{Expected array with 2 elements, got 3 elements instead}}
%0 = stablehlo.convolution(%arg0, %arg1)
dim_numbers = [b, 0, 1, f]x[0, 1, i, o]->[b, 0, 1, f],
window = {stride = [2, 1], pad = [[0, 1, 2], [0, 1]], rhs_dilate = [1, 2]}
{ batch_group_count = 1 : i64, feature_group_count = 1 : i64}
: (tensor<2x2x3x4xf32>, tensor<3x5x5x3xf32>) -> tensor<3x5x5x4xf32>
func.return %0 : tensor<3x5x5x4xf32>
}

// -----

func.func @convolution(%arg0: tensor<2x2x3x4xf32>, %arg1: tensor<3x5x5x3xf32>) -> tensor<3x5x5x4xf32> {
// expected-error@+3{{Unexpected keyword stide}}
%0 = stablehlo.convolution(%arg0, %arg1)
dim_numbers = [b, 0, 1, f]x[0, 1, i, o]->[b, 0, 1, f],
window = {stide = [2, 1], pad = [[0, 1], [0, 1]], rhs_dilate = [1, 2]}
{ batch_group_count = 1 : i64, feature_group_count = 1 : i64}
: (tensor<2x2x3x4xf32>, tensor<3x5x5x3xf32>) -> tensor<3x5x5x4xf32>
func.return %0 : tensor<3x5x5x4xf32>
}
// -----

func.func @convolution(%arg0: tensor<2x2x3x4xf32>, %arg1: tensor<3x5x5x3xf32>) -> tensor<3x5x5x4xf32> {
// expected-error@+3{{expected integer value}}
%0 = stablehlo.convolution(%arg0, %arg1)
dim_numbers = [b, 0, 1, f]x[0, 1, i, o]->[b, 0, 1, f],
window = {stride = [2, b], pad = [[0, 1], [0, 1]], rhs_dilate = [1, 2]}
{ batch_group_count = 1 : i64, feature_group_count = 1 : i64}
: (tensor<2x2x3x4xf32>, tensor<3x5x5x3xf32>) -> tensor<3x5x5x4xf32>
func.return %0 : tensor<3x5x5x4xf32>
}
// -----

func.func @convolution(%arg0: tensor<2x2x3x4xf32>, %arg1: tensor<3x5x5x3xf32>) -> tensor<3x5x5x4xf32> {
// expected-error@+3{{Unexpected keyword stride}}
%0 = stablehlo.convolution(%arg0, %arg1)
dim_numbers = [b, 0, 1, f]x[0, 1, i, o]->[b, 0, 1, f],
window = {stride = [2, 1], pad = [[0, 1], [0, 1]], rhs_dilate = [1, 2], stride=[2,1]}
{ batch_group_count = 1 : i64, feature_group_count = 1 : i64}
: (tensor<2x2x3x4xf32>, tensor<3x5x5x3xf32>) -> tensor<3x5x5x4xf32>
func.return %0 : tensor<3x5x5x4xf32>
}

// -----

// Test custom attribute printing/parsing.
Expand Down

0 comments on commit 265b933

Please sign in to comment.