Skip to content

Commit

Permalink
C# coding style regarding the use of var keyword. (#417)
Browse files Browse the repository at this point in the history
  • Loading branch information
imback82 authored Feb 6, 2020
1 parent 5f53809 commit 46e4eff
Show file tree
Hide file tree
Showing 38 changed files with 171 additions and 149 deletions.
24 changes: 24 additions & 0 deletions docs/coding-guidelines/csharp-coding-style.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,30 @@ C# Coding Style

We use the same [coding style](https://github.com/dotnet/corefx/blob/master/Documentation/coding-guidelines/coding-style.md) and [EditorConfig](https://editorconfig.org "EditorConfig homepage") file (`.editorconfig`) used by [dotnet/corefx](https://github.com/dotnet/corefx) with the following differences:

* **`var` must be used when `new`, `as`, or cast operator is used (and it can be used only with these operators).**
```C#
var foo = new Foo(); // OK
Foo foo = new Foo(); // NOT OK
var bar = foo as Bar; // OK
Bar bar = foo as Bar; // NOT OK
var bar = (Bar)foo; // OK
Bar bar = (Bar)foo; // NOT OK
string str = "hello"; // OK
var str = "hello"; // NOT OK
int i = 0; // OK
var i = 0; // NOT OK
var arr = new string[] { "abc", "def" }; // OK
string[] arr = new[] { "abc", "def" }; // NOT OK
var arr = new[] { "abc", "def" }; // NOT OK
string str = foo.GetString(); // Function name shouldn't matter.
var str = foo.GetString(); // NOT OK
```

* **A single line statement block must go with braces.**

```C#
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ public void Run(string[] args)

// UDF return type as array.
Func<Column, Column> udfArray =
Udf<string, string[]>((str) => new string[] { str, str + str });
Udf<string, string[]>((str) => new[] { str, str + str });
df.Select(Explode(udfArray(df["name"]))).Show();

// UDF return type as map.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ public void Run(string[] args)
.GetOrCreate();

// Read input log file and display it
var df = spark.Read().Text(args[0]);
DataFrame df = spark.Read().Text(args[0]);
df.Show();

// Step 1: UDF to determine if each line is a valid log entry
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ public void Run(string[] args)
{
// Default to running on localhost:9999
string hostname = "localhost";
var port = 9999;
int port = 9999;

// User designated their own host and port
if (args.Length == 2)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ public void Run(string[] args)
}

string hostname = args[0];
var port = int.Parse(args[1]);
int port = int.Parse(args[1]);

SparkSession spark = SparkSession
.Builder()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,17 +30,17 @@ public void Run(string[] args)
}

string hostname = args[0];
var port = int.Parse(args[1]);
var windowSize = int.Parse(args[2]);
var slideSize = (args.Length == 3) ? windowSize : int.Parse(args[3]);
int port = int.Parse(args[1]);
int windowSize = int.Parse(args[2]);
int slideSize = (args.Length == 3) ? windowSize : int.Parse(args[3]);
if (slideSize > windowSize)
{
Console.Error.WriteLine(
"<slide duration> must be less than or equal " +
"to <window duration>");
}
var windowDuration = $"{windowSize} seconds";
var slideDuration = $"{slideSize} seconds";
string windowDuration = $"{windowSize} seconds";
string slideDuration = $"{slideSize} seconds";

SparkSession spark = SparkSession
.Builder()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ public void TestTutorialScenario()
data.Write().Format("delta").Mode("overwrite").Save(path);

// Load the data into a DeltaTable object.
var deltaTable = DeltaTable.ForPath(path);
DeltaTable deltaTable = DeltaTable.ForPath(path);

// Validate that deltaTable contains the the sequence [5 ... 9].
ValidateRangeDataFrame(Enumerable.Range(5, 5), deltaTable.ToDF());
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,7 @@
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.

using System;
using System.Collections.Generic;
using System.Security.Cryptography;
using Microsoft.Spark.ML.Feature;
using Microsoft.Spark.Sql;
using Xunit;
Expand All @@ -24,15 +22,15 @@ public BucketizerTests(SparkFixture fixture)
[Fact]
public void TestBucketizer()
{
var expectedSplits = new double[] {Double.MinValue, 0.0, 10.0, 50.0, Double.MaxValue};

var expectedHandle = "skip";
var expectedUid = "uid";
var expectedInputCol = "input_col";
var expectedOutputCol = "output_col";
var bucketizer = new Bucketizer(expectedUid)
.SetInputCol(expectedInputCol)
var expectedSplits = new double[] { double.MinValue, 0.0, 10.0, 50.0, double.MaxValue };

string expectedHandle = "skip";
string expectedUid = "uid";
string expectedInputCol = "input_col";
string expectedOutputCol = "output_col";

var bucketizer = new Bucketizer(expectedUid);
bucketizer.SetInputCol(expectedInputCol)
.SetOutputCol(expectedOutputCol)
.SetHandleInvalid(expectedHandle)
.SetSplits(expectedSplits);
Expand All @@ -54,19 +52,19 @@ public void TestBucketizer()
[Fact]
public void TestBucketizer_MultipleColumns()
{
double[][] expectedSplitsArray = new[]
var expectedSplitsArray = new double[][]
{
new[] {Double.MinValue, 0.0, 10.0, 50.0, Double.MaxValue},
new[] {Double.MinValue, 0.0, 10000.0, Double.MaxValue}
new[] { double.MinValue, 0.0, 10.0, 50.0, double.MaxValue},
new[] { double.MinValue, 0.0, 10000.0, double.MaxValue}
};

var expectedHandle = "keep";
string expectedHandle = "keep";

var expectedInputCols = new List<string>() {"input_col_a", "input_col_b"};
var expectedOutputCols = new List<string>() {"output_col_a", "output_col_b"};
var bucketizer = new Bucketizer()
.SetInputCols(expectedInputCols)
var expectedInputCols = new List<string>() { "input_col_a", "input_col_b" };
var expectedOutputCols = new List<string>() { "output_col_a", "output_col_b" };

var bucketizer = new Bucketizer();
bucketizer.SetInputCols(expectedInputCols)
.SetOutputCols(expectedOutputCols)
.SetHandleInvalid(expectedHandle)
.SetSplitsArray(expectedSplitsArray);
Expand All @@ -79,7 +77,7 @@ public void TestBucketizer_MultipleColumns()
DataFrame output = bucketizer.Transform(input);
Assert.Contains(output.Schema().Fields, (f => f.Name == "output_col_a"));
Assert.Contains(output.Schema().Fields, (f => f.Name == "output_col_b"));

Assert.Equal(expectedInputCols, bucketizer.GetInputCols());
Assert.Equal(expectedOutputCols, bucketizer.GetOutputCols());
Assert.Equal(expectedSplitsArray, bucketizer.GetSplitsArray());
Expand Down
6 changes: 3 additions & 3 deletions src/csharp/Microsoft.Spark.E2ETest/IpcTests/RDDTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ public void TestParallelize()
Assert.Equal(new[] { 0, 1, 2, 3, 4 }, rdd.Collect());
}
{
var strs = new[] { "hello", "spark", "for", "dotnet" };
var strs = new string[] { "hello", "spark", "for", "dotnet" };
RDD<string> rdd = _sc.Parallelize(strs);
Assert.Equal(strs, rdd.Collect());
}
Expand All @@ -35,7 +35,7 @@ public void TestParallelize()
public void TestTextFile()
{
RDD<string> rdd = _sc.TextFile($"{TestEnvironment.ResourceDirectory}people.txt");
var strs = new[] { "Michael, 29", "Andy, 30", "Justin, 19" };
var strs = new string[] { "Michael, 29", "Andy, 30", "Justin, 19" };
Assert.Equal(strs, rdd.Collect());

// Test a transformation so that SerializedMode is correctly propagated.
Expand Down Expand Up @@ -107,7 +107,7 @@ public void TestSample()
RDD<int> rdd = _sc.Parallelize(Enumerable.Range(0, 10))
.Sample(true, 0.9, 0);

var count = rdd.Collect().Count();
int count = rdd.Collect().Count();
Assert.True(count > 0);
Assert.True(count <= 10);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ public void TestDataFrameStatFunctionSignatures()
double corr = stat.Corr("age", "age", "pearson");
corr = stat.Corr("age", "age");

var columnNames = new[] { "age", "name" };
var columnNames = new string[] { "age", "name" };
DataFrame df = stat.FreqItems(columnNames, 0.2);
df = stat.FreqItems(columnNames);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ public void TestCreateDataFrame()
// Calling CreateDataFrame(IEnumerable<string> _) without schema
{
var data = new List<string>(new string[] { "Alice", "Bob" });
var schema = SchemaWithSingleColumn(new StringType());
StructType schema = SchemaWithSingleColumn(new StringType());

DataFrame df = _spark.CreateDataFrame(data);
ValidateDataFrame(df, data.Select(a => new object[] { a }), schema);
Expand All @@ -102,7 +102,7 @@ public void TestCreateDataFrame()
// Calling CreateDataFrame(IEnumerable<int> _) without schema
{
var data = new List<int>(new int[] { 1, 2 });
var schema = SchemaWithSingleColumn(new IntegerType());
StructType schema = SchemaWithSingleColumn(new IntegerType());

DataFrame df = _spark.CreateDataFrame(data);
ValidateDataFrame(df, data.Select(a => new object[] { a }), schema);
Expand All @@ -111,7 +111,7 @@ public void TestCreateDataFrame()
// Calling CreateDataFrame(IEnumerable<double> _) without schema
{
var data = new List<double>(new double[] { 1.2, 2.3 });
var schema = SchemaWithSingleColumn(new DoubleType());
StructType schema = SchemaWithSingleColumn(new DoubleType());

DataFrame df = _spark.CreateDataFrame(data);
ValidateDataFrame(df, data.Select(a => new object[] { a }), schema);
Expand All @@ -120,7 +120,7 @@ public void TestCreateDataFrame()
// Calling CreateDataFrame(IEnumerable<bool> _) without schema
{
var data = new List<bool>(new bool[] { true, false });
var schema = SchemaWithSingleColumn(new BooleanType());
StructType schema = SchemaWithSingleColumn(new BooleanType());

DataFrame df = _spark.CreateDataFrame(data);
ValidateDataFrame(df, data.Select(a => new object[] { a }), schema);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ public void TestUdfWithArrayType()
Row[] rows = _df.Select(workingUdf(_df["ids"])).Collect().ToArray();
Assert.Equal(3, rows.Length);

var expected = new[] { "1", "3,5", "2,4" };
var expected = new string[] { "1", "3,5", "2,4" };
string[] rowsToArray = rows.Select(x => x[0].ToString()).ToArray();
Assert.Equal(expected, rowsToArray);
}
Expand Down Expand Up @@ -101,7 +101,7 @@ public void TestUdfWithMapType()
Row[] rows = df.Select(workingUdf(df["NameIdsMap"])).Collect().ToArray();
Assert.Equal(3, rows.Length);

var expected = new[] { "1", "1", "1" };
var expected = new string[] { "1", "1", "1" };
string[] actual = rows.Select(x => x[0].ToString()).ToArray();
Assert.Equal(expected, actual);
}
Expand Down Expand Up @@ -144,7 +144,7 @@ public void TestUdfWithRowType()
Row[] rows = _df.Select(udf(_df["info1"])).Collect().ToArray();
Assert.Equal(3, rows.Length);

var expected = new[] { "Burdwan", "Los Angeles", "Seattle" };
var expected = new string[] { "Burdwan", "Los Angeles", "Seattle" };
string[] actual = rows.Select(x => x[0].ToString()).ToArray();
Assert.Equal(expected, actual);
}
Expand All @@ -165,7 +165,7 @@ public void TestUdfWithRowType()
.ToArray();
Assert.Equal(3, rows.Length);

var expected = new[] {
var expected = new string[] {
"Michael:Burdwan,Paschimbanga",
"Andy:Los Angeles,California",
"Justin:Seattle,Washington" };
Expand All @@ -185,7 +185,7 @@ public void TestUdfWithRowType()
Row[] rows = _df.Select(udf(_df["info3"])).Collect().ToArray();
Assert.Equal(3, rows.Length);

var expected = new[] { "Developer", "Developer", "Developer" };
var expected = new string[] { "Developer", "Developer", "Developer" };
string[] actual = rows.Select(x => x[0].ToString()).ToArray();
Assert.Equal(expected, actual);
}
Expand Down
4 changes: 2 additions & 2 deletions src/csharp/Microsoft.Spark.E2ETest/UdfTests/UdfSerDeTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ public void TestUdfClosure()
Row[] rows = _df.Select(udf(_df["name"])).Collect().ToArray();
Assert.Equal(3, rows.Length);

var expected = new[] { "HelloMichael", "HelloAndy", "HelloJustin" };
var expected = new string[] { "HelloMichael", "HelloAndy", "HelloJustin" };
for (int i = 0; i < rows.Length; ++i)
{
Row row = rows[i];
Expand Down Expand Up @@ -81,7 +81,7 @@ public void TestInitExternalClassInUdf()
Row[] rows = _df.Select(udf(_df["name"])).Collect().ToArray();
Assert.Equal(3, rows.Length);

var expected = new[] { "HelloMichael", "HelloAndy", "HelloJustin" };
var expected = new string[] { "HelloMichael", "HelloAndy", "HelloJustin" };
for (int i = 0; i < rows.Length; ++i)
{
Row row = rows[i];
Expand Down
28 changes: 14 additions & 14 deletions src/csharp/Microsoft.Spark.UnitTest/CommandSerDeTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -18,18 +18,18 @@ public class CommandSerDeTests
[Fact]
public void TestCommandSerDeForSqlPickling()
{
var udfWrapper = new Sql.PicklingUdfWrapper<string, string>((str) => $"hello {str}");
var workerFunction = new Sql.PicklingWorkerFunction(udfWrapper.Execute);
var udfWrapper = new PicklingUdfWrapper<string, string>((str) => $"hello {str}");
var workerFunction = new PicklingWorkerFunction(udfWrapper.Execute);

var serializedCommand = Utils.CommandSerDe.Serialize(
byte[] serializedCommand = Utils.CommandSerDe.Serialize(
workerFunction.Func,
Utils.CommandSerDe.SerializedMode.Row,
Utils.CommandSerDe.SerializedMode.Row);

using (var ms = new MemoryStream(serializedCommand))
{
var deserializedWorkerFunction = new Sql.PicklingWorkerFunction(
Utils.CommandSerDe.Deserialize<Sql.PicklingWorkerFunction.ExecuteDelegate>(
var deserializedWorkerFunction = new PicklingWorkerFunction(
Utils.CommandSerDe.Deserialize<PicklingWorkerFunction.ExecuteDelegate>(
ms,
out Utils.CommandSerDe.SerializedMode serializerMode,
out Utils.CommandSerDe.SerializedMode deserializerMode,
Expand All @@ -39,23 +39,23 @@ public void TestCommandSerDeForSqlPickling()
Assert.Equal(Utils.CommandSerDe.SerializedMode.Row, deserializerMode);
Assert.Equal("N", runMode);

var result = deserializedWorkerFunction.Func(0, new[] { "spark" }, new[] { 0 });
object result = deserializedWorkerFunction.Func(0, new[] { "spark" }, new[] { 0 });
Assert.Equal("hello spark", result);
}
}

[Fact]
public void TestCommandSerDeForSqlArrow()
{
var udfWrapper = new Sql.ArrowUdfWrapper<StringArray, StringArray>(
var udfWrapper = new ArrowUdfWrapper<StringArray, StringArray>(
(strings) => (StringArray)ToArrowArray(
Enumerable.Range(0, strings.Length)
.Select(i => $"hello {strings.GetString(i)}")
.ToArray()));

var workerFunction = new ArrowWorkerFunction(udfWrapper.Execute);

var serializedCommand = Utils.CommandSerDe.Serialize(
byte[] serializedCommand = Utils.CommandSerDe.Serialize(
workerFunction.Func,
Utils.CommandSerDe.SerializedMode.Row,
Utils.CommandSerDe.SerializedMode.Row);
Expand All @@ -73,10 +73,10 @@ public void TestCommandSerDeForSqlArrow()
Assert.Equal(Utils.CommandSerDe.SerializedMode.Row, deserializerMode);
Assert.Equal("N", runMode);

Apache.Arrow.IArrowArray input = ToArrowArray(new[] { "spark" });
Apache.Arrow.IArrowArray result =
IArrowArray input = ToArrowArray(new[] { "spark" });
IArrowArray result =
deserializedWorkerFunction.Func(new[] { input }, new[] { 0 });
ArrowTestUtils.AssertEquals("hello spark", result);
AssertEquals("hello spark", result);
}
}

Expand All @@ -94,10 +94,10 @@ public void TestCommandSerDeForRDD()
var func3 = new RDD.WorkerFunction(
new RDD<int>.MapUdfWrapper<int, int>((a) => a + 5).Execute);

var chainedFunc1 = RDD.WorkerFunction.Chain(func1, func2);
var chainedFunc2 = RDD.WorkerFunction.Chain(chainedFunc1, func3);
RDD.WorkerFunction chainedFunc1 = RDD.WorkerFunction.Chain(func1, func2);
RDD.WorkerFunction chainedFunc2 = RDD.WorkerFunction.Chain(chainedFunc1, func3);

var serializedCommand = Utils.CommandSerDe.Serialize(
byte[] serializedCommand = Utils.CommandSerDe.Serialize(
chainedFunc2.Func,
Utils.CommandSerDe.SerializedMode.Byte,
Utils.CommandSerDe.SerializedMode.Byte);
Expand Down
Loading

0 comments on commit 46e4eff

Please sign in to comment.