From 46e4effc06c8dd52d9c7cb99f6c9a0377a3b047b Mon Sep 17 00:00:00 2001 From: Terry Kim Date: Thu, 6 Feb 2020 11:51:35 -0800 Subject: [PATCH] C# coding style regarding the use of var keyword. (#417) --- docs/coding-guidelines/csharp-coding-style.md | 24 +++++++++++ .../Sql/Batch/Basic.cs | 2 +- .../Sql/Batch/Logging.cs | 2 +- .../StructuredNetworkCharacterCount.cs | 2 +- .../Streaming/StructuredNetworkWordCount.cs | 2 +- .../StructuredNetworkWordCountWindowed.cs | 10 ++--- .../DeltaTableTests.cs | 2 +- .../IpcTests/ML/Feature/BucketizerTests.cs | 40 +++++++++---------- .../IpcTests/RDDTests.cs | 6 +-- .../IpcTests/Sql/DataFrameFunctionsTests.cs | 2 +- .../IpcTests/Sql/SparkSessionTests.cs | 8 ++-- .../UdfTests/UdfComplexTypesTests.cs | 10 ++--- .../UdfTests/UdfSerDeTests.cs | 4 +- .../CommandSerDeTests.cs | 28 ++++++------- .../Sql/ColumnTests.cs | 12 +++--- .../Microsoft.Spark.UnitTest/Sql/RowTests.cs | 8 ++-- .../Sql/TypesTests.cs | 2 +- .../TestUtils/ArrowTestUtils.cs | 2 +- .../WorkerFunctionTests.cs | 20 +++++----- .../CommandExecutorTests.cs | 6 +-- .../TaskRunnerTests.cs | 26 ++++++------ .../Command/RDDCommandExecutor.cs | 2 +- .../Microsoft.Spark.Worker/DaemonWorker.cs | 6 +-- .../Processor/BroadcastVariableProcessor.cs | 2 +- .../Microsoft.Spark.Worker/TaskRunner.cs | 2 +- .../Interop/Ipc/PayloadHelper.cs | 4 +- .../Microsoft.Spark/Interop/Ipc/SerDe.cs | 30 +++++++------- .../Network/DefaultSocketWrapper.cs | 2 +- src/csharp/Microsoft.Spark/RDD.cs | 8 ++-- src/csharp/Microsoft.Spark/SparkConf.cs | 2 +- .../Sql/RelationalGroupedDataset.cs | 2 +- src/csharp/Microsoft.Spark/Sql/Row.cs | 2 +- .../Microsoft.Spark/Sql/UDFRegistration.cs | 2 +- .../Microsoft.Spark/Utils/AssemblyLoader.cs | 4 +- .../Microsoft.Spark/Utils/Authenticator.cs | 2 +- .../Microsoft.Spark/Utils/CommandSerDe.cs | 24 +++++------ .../Microsoft.Spark/Utils/EnvironmentUtils.cs | 2 +- src/csharp/Microsoft.Spark/Utils/UdfSerDe.cs | 6 +-- 38 files changed, 171 insertions(+), 149 deletions(-) diff --git a/docs/coding-guidelines/csharp-coding-style.md b/docs/coding-guidelines/csharp-coding-style.md index 614370b7a..16ed69cdc 100644 --- a/docs/coding-guidelines/csharp-coding-style.md +++ b/docs/coding-guidelines/csharp-coding-style.md @@ -3,6 +3,30 @@ C# Coding Style We use the same [coding style](https://github.com/dotnet/corefx/blob/master/Documentation/coding-guidelines/coding-style.md) and [EditorConfig](https://editorconfig.org "EditorConfig homepage") file (`.editorconfig`) used by [dotnet/corefx](https://github.com/dotnet/corefx) with the following differences: +* **`var` must be used when `new`, `as`, or cast operator is used (and it can be used only with these operators).** + ```C# + var foo = new Foo(); // OK + Foo foo = new Foo(); // NOT OK + + var bar = foo as Bar; // OK + Bar bar = foo as Bar; // NOT OK + + var bar = (Bar)foo; // OK + Bar bar = (Bar)foo; // NOT OK + + string str = "hello"; // OK + var str = "hello"; // NOT OK + int i = 0; // OK + var i = 0; // NOT OK + + var arr = new string[] { "abc", "def" }; // OK + string[] arr = new[] { "abc", "def" }; // NOT OK + var arr = new[] { "abc", "def" }; // NOT OK + + string str = foo.GetString(); // Function name shouldn't matter. + var str = foo.GetString(); // NOT OK + ``` + * **A single line statement block must go with braces.** ```C# diff --git a/examples/Microsoft.Spark.CSharp.Examples/Sql/Batch/Basic.cs b/examples/Microsoft.Spark.CSharp.Examples/Sql/Batch/Basic.cs index 4af38a528..6ef95eefa 100644 --- a/examples/Microsoft.Spark.CSharp.Examples/Sql/Batch/Basic.cs +++ b/examples/Microsoft.Spark.CSharp.Examples/Sql/Batch/Basic.cs @@ -90,7 +90,7 @@ public void Run(string[] args) // UDF return type as array. Func udfArray = - Udf((str) => new string[] { str, str + str }); + Udf((str) => new[] { str, str + str }); df.Select(Explode(udfArray(df["name"]))).Show(); // UDF return type as map. diff --git a/examples/Microsoft.Spark.CSharp.Examples/Sql/Batch/Logging.cs b/examples/Microsoft.Spark.CSharp.Examples/Sql/Batch/Logging.cs index 2aa707569..e154d6c38 100644 --- a/examples/Microsoft.Spark.CSharp.Examples/Sql/Batch/Logging.cs +++ b/examples/Microsoft.Spark.CSharp.Examples/Sql/Batch/Logging.cs @@ -39,7 +39,7 @@ public void Run(string[] args) .GetOrCreate(); // Read input log file and display it - var df = spark.Read().Text(args[0]); + DataFrame df = spark.Read().Text(args[0]); df.Show(); // Step 1: UDF to determine if each line is a valid log entry diff --git a/examples/Microsoft.Spark.CSharp.Examples/Sql/Streaming/StructuredNetworkCharacterCount.cs b/examples/Microsoft.Spark.CSharp.Examples/Sql/Streaming/StructuredNetworkCharacterCount.cs index 090696787..e8442af8b 100644 --- a/examples/Microsoft.Spark.CSharp.Examples/Sql/Streaming/StructuredNetworkCharacterCount.cs +++ b/examples/Microsoft.Spark.CSharp.Examples/Sql/Streaming/StructuredNetworkCharacterCount.cs @@ -22,7 +22,7 @@ public void Run(string[] args) { // Default to running on localhost:9999 string hostname = "localhost"; - var port = 9999; + int port = 9999; // User designated their own host and port if (args.Length == 2) diff --git a/examples/Microsoft.Spark.CSharp.Examples/Sql/Streaming/StructuredNetworkWordCount.cs b/examples/Microsoft.Spark.CSharp.Examples/Sql/Streaming/StructuredNetworkWordCount.cs index 5392ce8cc..95b715ff5 100644 --- a/examples/Microsoft.Spark.CSharp.Examples/Sql/Streaming/StructuredNetworkWordCount.cs +++ b/examples/Microsoft.Spark.CSharp.Examples/Sql/Streaming/StructuredNetworkWordCount.cs @@ -28,7 +28,7 @@ public void Run(string[] args) } string hostname = args[0]; - var port = int.Parse(args[1]); + int port = int.Parse(args[1]); SparkSession spark = SparkSession .Builder() diff --git a/examples/Microsoft.Spark.CSharp.Examples/Sql/Streaming/StructuredNetworkWordCountWindowed.cs b/examples/Microsoft.Spark.CSharp.Examples/Sql/Streaming/StructuredNetworkWordCountWindowed.cs index c500fbbc1..4873fac6e 100644 --- a/examples/Microsoft.Spark.CSharp.Examples/Sql/Streaming/StructuredNetworkWordCountWindowed.cs +++ b/examples/Microsoft.Spark.CSharp.Examples/Sql/Streaming/StructuredNetworkWordCountWindowed.cs @@ -30,17 +30,17 @@ public void Run(string[] args) } string hostname = args[0]; - var port = int.Parse(args[1]); - var windowSize = int.Parse(args[2]); - var slideSize = (args.Length == 3) ? windowSize : int.Parse(args[3]); + int port = int.Parse(args[1]); + int windowSize = int.Parse(args[2]); + int slideSize = (args.Length == 3) ? windowSize : int.Parse(args[3]); if (slideSize > windowSize) { Console.Error.WriteLine( " must be less than or equal " + "to "); } - var windowDuration = $"{windowSize} seconds"; - var slideDuration = $"{slideSize} seconds"; + string windowDuration = $"{windowSize} seconds"; + string slideDuration = $"{slideSize} seconds"; SparkSession spark = SparkSession .Builder() diff --git a/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta.E2ETest/DeltaTableTests.cs b/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta.E2ETest/DeltaTableTests.cs index a3445c43d..69249d8c5 100644 --- a/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta.E2ETest/DeltaTableTests.cs +++ b/src/csharp/Extensions/Microsoft.Spark.Extensions.Delta.E2ETest/DeltaTableTests.cs @@ -47,7 +47,7 @@ public void TestTutorialScenario() data.Write().Format("delta").Mode("overwrite").Save(path); // Load the data into a DeltaTable object. - var deltaTable = DeltaTable.ForPath(path); + DeltaTable deltaTable = DeltaTable.ForPath(path); // Validate that deltaTable contains the the sequence [5 ... 9]. ValidateRangeDataFrame(Enumerable.Range(5, 5), deltaTable.ToDF()); diff --git a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/ML/Feature/BucketizerTests.cs b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/ML/Feature/BucketizerTests.cs index bee106dab..48c7fdf89 100644 --- a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/ML/Feature/BucketizerTests.cs +++ b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/ML/Feature/BucketizerTests.cs @@ -2,9 +2,7 @@ // The .NET Foundation licenses this file to you under the MIT license. // See the LICENSE file in the project root for more information. -using System; using System.Collections.Generic; -using System.Security.Cryptography; using Microsoft.Spark.ML.Feature; using Microsoft.Spark.Sql; using Xunit; @@ -24,15 +22,15 @@ public BucketizerTests(SparkFixture fixture) [Fact] public void TestBucketizer() { - var expectedSplits = new double[] {Double.MinValue, 0.0, 10.0, 50.0, Double.MaxValue}; - - var expectedHandle = "skip"; - var expectedUid = "uid"; - var expectedInputCol = "input_col"; - var expectedOutputCol = "output_col"; - - var bucketizer = new Bucketizer(expectedUid) - .SetInputCol(expectedInputCol) + var expectedSplits = new double[] { double.MinValue, 0.0, 10.0, 50.0, double.MaxValue }; + + string expectedHandle = "skip"; + string expectedUid = "uid"; + string expectedInputCol = "input_col"; + string expectedOutputCol = "output_col"; + + var bucketizer = new Bucketizer(expectedUid); + bucketizer.SetInputCol(expectedInputCol) .SetOutputCol(expectedOutputCol) .SetHandleInvalid(expectedHandle) .SetSplits(expectedSplits); @@ -54,19 +52,19 @@ public void TestBucketizer() [Fact] public void TestBucketizer_MultipleColumns() { - double[][] expectedSplitsArray = new[] + var expectedSplitsArray = new double[][] { - new[] {Double.MinValue, 0.0, 10.0, 50.0, Double.MaxValue}, - new[] {Double.MinValue, 0.0, 10000.0, Double.MaxValue} + new[] { double.MinValue, 0.0, 10.0, 50.0, double.MaxValue}, + new[] { double.MinValue, 0.0, 10000.0, double.MaxValue} }; - var expectedHandle = "keep"; + string expectedHandle = "keep"; - var expectedInputCols = new List() {"input_col_a", "input_col_b"}; - var expectedOutputCols = new List() {"output_col_a", "output_col_b"}; - - var bucketizer = new Bucketizer() - .SetInputCols(expectedInputCols) + var expectedInputCols = new List() { "input_col_a", "input_col_b" }; + var expectedOutputCols = new List() { "output_col_a", "output_col_b" }; + + var bucketizer = new Bucketizer(); + bucketizer.SetInputCols(expectedInputCols) .SetOutputCols(expectedOutputCols) .SetHandleInvalid(expectedHandle) .SetSplitsArray(expectedSplitsArray); @@ -79,7 +77,7 @@ public void TestBucketizer_MultipleColumns() DataFrame output = bucketizer.Transform(input); Assert.Contains(output.Schema().Fields, (f => f.Name == "output_col_a")); Assert.Contains(output.Schema().Fields, (f => f.Name == "output_col_b")); - + Assert.Equal(expectedInputCols, bucketizer.GetInputCols()); Assert.Equal(expectedOutputCols, bucketizer.GetOutputCols()); Assert.Equal(expectedSplitsArray, bucketizer.GetSplitsArray()); diff --git a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/RDDTests.cs b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/RDDTests.cs index ca0ba7ee5..560db6815 100644 --- a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/RDDTests.cs +++ b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/RDDTests.cs @@ -25,7 +25,7 @@ public void TestParallelize() Assert.Equal(new[] { 0, 1, 2, 3, 4 }, rdd.Collect()); } { - var strs = new[] { "hello", "spark", "for", "dotnet" }; + var strs = new string[] { "hello", "spark", "for", "dotnet" }; RDD rdd = _sc.Parallelize(strs); Assert.Equal(strs, rdd.Collect()); } @@ -35,7 +35,7 @@ public void TestParallelize() public void TestTextFile() { RDD rdd = _sc.TextFile($"{TestEnvironment.ResourceDirectory}people.txt"); - var strs = new[] { "Michael, 29", "Andy, 30", "Justin, 19" }; + var strs = new string[] { "Michael, 29", "Andy, 30", "Justin, 19" }; Assert.Equal(strs, rdd.Collect()); // Test a transformation so that SerializedMode is correctly propagated. @@ -107,7 +107,7 @@ public void TestSample() RDD rdd = _sc.Parallelize(Enumerable.Range(0, 10)) .Sample(true, 0.9, 0); - var count = rdd.Collect().Count(); + int count = rdd.Collect().Count(); Assert.True(count > 0); Assert.True(count <= 10); } diff --git a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/DataFrameFunctionsTests.cs b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/DataFrameFunctionsTests.cs index 0f7ba150c..aa7c83887 100644 --- a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/DataFrameFunctionsTests.cs +++ b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/DataFrameFunctionsTests.cs @@ -85,7 +85,7 @@ public void TestDataFrameStatFunctionSignatures() double corr = stat.Corr("age", "age", "pearson"); corr = stat.Corr("age", "age"); - var columnNames = new[] { "age", "name" }; + var columnNames = new string[] { "age", "name" }; DataFrame df = stat.FreqItems(columnNames, 0.2); df = stat.FreqItems(columnNames); diff --git a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/SparkSessionTests.cs b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/SparkSessionTests.cs index d9fab084c..5f11266c1 100644 --- a/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/SparkSessionTests.cs +++ b/src/csharp/Microsoft.Spark.E2ETest/IpcTests/Sql/SparkSessionTests.cs @@ -93,7 +93,7 @@ public void TestCreateDataFrame() // Calling CreateDataFrame(IEnumerable _) without schema { var data = new List(new string[] { "Alice", "Bob" }); - var schema = SchemaWithSingleColumn(new StringType()); + StructType schema = SchemaWithSingleColumn(new StringType()); DataFrame df = _spark.CreateDataFrame(data); ValidateDataFrame(df, data.Select(a => new object[] { a }), schema); @@ -102,7 +102,7 @@ public void TestCreateDataFrame() // Calling CreateDataFrame(IEnumerable _) without schema { var data = new List(new int[] { 1, 2 }); - var schema = SchemaWithSingleColumn(new IntegerType()); + StructType schema = SchemaWithSingleColumn(new IntegerType()); DataFrame df = _spark.CreateDataFrame(data); ValidateDataFrame(df, data.Select(a => new object[] { a }), schema); @@ -111,7 +111,7 @@ public void TestCreateDataFrame() // Calling CreateDataFrame(IEnumerable _) without schema { var data = new List(new double[] { 1.2, 2.3 }); - var schema = SchemaWithSingleColumn(new DoubleType()); + StructType schema = SchemaWithSingleColumn(new DoubleType()); DataFrame df = _spark.CreateDataFrame(data); ValidateDataFrame(df, data.Select(a => new object[] { a }), schema); @@ -120,7 +120,7 @@ public void TestCreateDataFrame() // Calling CreateDataFrame(IEnumerable _) without schema { var data = new List(new bool[] { true, false }); - var schema = SchemaWithSingleColumn(new BooleanType()); + StructType schema = SchemaWithSingleColumn(new BooleanType()); DataFrame df = _spark.CreateDataFrame(data); ValidateDataFrame(df, data.Select(a => new object[] { a }), schema); diff --git a/src/csharp/Microsoft.Spark.E2ETest/UdfTests/UdfComplexTypesTests.cs b/src/csharp/Microsoft.Spark.E2ETest/UdfTests/UdfComplexTypesTests.cs index 4d8153829..883e6580a 100644 --- a/src/csharp/Microsoft.Spark.E2ETest/UdfTests/UdfComplexTypesTests.cs +++ b/src/csharp/Microsoft.Spark.E2ETest/UdfTests/UdfComplexTypesTests.cs @@ -49,7 +49,7 @@ public void TestUdfWithArrayType() Row[] rows = _df.Select(workingUdf(_df["ids"])).Collect().ToArray(); Assert.Equal(3, rows.Length); - var expected = new[] { "1", "3,5", "2,4" }; + var expected = new string[] { "1", "3,5", "2,4" }; string[] rowsToArray = rows.Select(x => x[0].ToString()).ToArray(); Assert.Equal(expected, rowsToArray); } @@ -101,7 +101,7 @@ public void TestUdfWithMapType() Row[] rows = df.Select(workingUdf(df["NameIdsMap"])).Collect().ToArray(); Assert.Equal(3, rows.Length); - var expected = new[] { "1", "1", "1" }; + var expected = new string[] { "1", "1", "1" }; string[] actual = rows.Select(x => x[0].ToString()).ToArray(); Assert.Equal(expected, actual); } @@ -144,7 +144,7 @@ public void TestUdfWithRowType() Row[] rows = _df.Select(udf(_df["info1"])).Collect().ToArray(); Assert.Equal(3, rows.Length); - var expected = new[] { "Burdwan", "Los Angeles", "Seattle" }; + var expected = new string[] { "Burdwan", "Los Angeles", "Seattle" }; string[] actual = rows.Select(x => x[0].ToString()).ToArray(); Assert.Equal(expected, actual); } @@ -165,7 +165,7 @@ public void TestUdfWithRowType() .ToArray(); Assert.Equal(3, rows.Length); - var expected = new[] { + var expected = new string[] { "Michael:Burdwan,Paschimbanga", "Andy:Los Angeles,California", "Justin:Seattle,Washington" }; @@ -185,7 +185,7 @@ public void TestUdfWithRowType() Row[] rows = _df.Select(udf(_df["info3"])).Collect().ToArray(); Assert.Equal(3, rows.Length); - var expected = new[] { "Developer", "Developer", "Developer" }; + var expected = new string[] { "Developer", "Developer", "Developer" }; string[] actual = rows.Select(x => x[0].ToString()).ToArray(); Assert.Equal(expected, actual); } diff --git a/src/csharp/Microsoft.Spark.E2ETest/UdfTests/UdfSerDeTests.cs b/src/csharp/Microsoft.Spark.E2ETest/UdfTests/UdfSerDeTests.cs index 7b0417a1c..ea36a83f0 100644 --- a/src/csharp/Microsoft.Spark.E2ETest/UdfTests/UdfSerDeTests.cs +++ b/src/csharp/Microsoft.Spark.E2ETest/UdfTests/UdfSerDeTests.cs @@ -39,7 +39,7 @@ public void TestUdfClosure() Row[] rows = _df.Select(udf(_df["name"])).Collect().ToArray(); Assert.Equal(3, rows.Length); - var expected = new[] { "HelloMichael", "HelloAndy", "HelloJustin" }; + var expected = new string[] { "HelloMichael", "HelloAndy", "HelloJustin" }; for (int i = 0; i < rows.Length; ++i) { Row row = rows[i]; @@ -81,7 +81,7 @@ public void TestInitExternalClassInUdf() Row[] rows = _df.Select(udf(_df["name"])).Collect().ToArray(); Assert.Equal(3, rows.Length); - var expected = new[] { "HelloMichael", "HelloAndy", "HelloJustin" }; + var expected = new string[] { "HelloMichael", "HelloAndy", "HelloJustin" }; for (int i = 0; i < rows.Length; ++i) { Row row = rows[i]; diff --git a/src/csharp/Microsoft.Spark.UnitTest/CommandSerDeTests.cs b/src/csharp/Microsoft.Spark.UnitTest/CommandSerDeTests.cs index 611e85932..c05fba31f 100644 --- a/src/csharp/Microsoft.Spark.UnitTest/CommandSerDeTests.cs +++ b/src/csharp/Microsoft.Spark.UnitTest/CommandSerDeTests.cs @@ -18,18 +18,18 @@ public class CommandSerDeTests [Fact] public void TestCommandSerDeForSqlPickling() { - var udfWrapper = new Sql.PicklingUdfWrapper((str) => $"hello {str}"); - var workerFunction = new Sql.PicklingWorkerFunction(udfWrapper.Execute); + var udfWrapper = new PicklingUdfWrapper((str) => $"hello {str}"); + var workerFunction = new PicklingWorkerFunction(udfWrapper.Execute); - var serializedCommand = Utils.CommandSerDe.Serialize( + byte[] serializedCommand = Utils.CommandSerDe.Serialize( workerFunction.Func, Utils.CommandSerDe.SerializedMode.Row, Utils.CommandSerDe.SerializedMode.Row); using (var ms = new MemoryStream(serializedCommand)) { - var deserializedWorkerFunction = new Sql.PicklingWorkerFunction( - Utils.CommandSerDe.Deserialize( + var deserializedWorkerFunction = new PicklingWorkerFunction( + Utils.CommandSerDe.Deserialize( ms, out Utils.CommandSerDe.SerializedMode serializerMode, out Utils.CommandSerDe.SerializedMode deserializerMode, @@ -39,7 +39,7 @@ public void TestCommandSerDeForSqlPickling() Assert.Equal(Utils.CommandSerDe.SerializedMode.Row, deserializerMode); Assert.Equal("N", runMode); - var result = deserializedWorkerFunction.Func(0, new[] { "spark" }, new[] { 0 }); + object result = deserializedWorkerFunction.Func(0, new[] { "spark" }, new[] { 0 }); Assert.Equal("hello spark", result); } } @@ -47,7 +47,7 @@ public void TestCommandSerDeForSqlPickling() [Fact] public void TestCommandSerDeForSqlArrow() { - var udfWrapper = new Sql.ArrowUdfWrapper( + var udfWrapper = new ArrowUdfWrapper( (strings) => (StringArray)ToArrowArray( Enumerable.Range(0, strings.Length) .Select(i => $"hello {strings.GetString(i)}") @@ -55,7 +55,7 @@ public void TestCommandSerDeForSqlArrow() var workerFunction = new ArrowWorkerFunction(udfWrapper.Execute); - var serializedCommand = Utils.CommandSerDe.Serialize( + byte[] serializedCommand = Utils.CommandSerDe.Serialize( workerFunction.Func, Utils.CommandSerDe.SerializedMode.Row, Utils.CommandSerDe.SerializedMode.Row); @@ -73,10 +73,10 @@ public void TestCommandSerDeForSqlArrow() Assert.Equal(Utils.CommandSerDe.SerializedMode.Row, deserializerMode); Assert.Equal("N", runMode); - Apache.Arrow.IArrowArray input = ToArrowArray(new[] { "spark" }); - Apache.Arrow.IArrowArray result = + IArrowArray input = ToArrowArray(new[] { "spark" }); + IArrowArray result = deserializedWorkerFunction.Func(new[] { input }, new[] { 0 }); - ArrowTestUtils.AssertEquals("hello spark", result); + AssertEquals("hello spark", result); } } @@ -94,10 +94,10 @@ public void TestCommandSerDeForRDD() var func3 = new RDD.WorkerFunction( new RDD.MapUdfWrapper((a) => a + 5).Execute); - var chainedFunc1 = RDD.WorkerFunction.Chain(func1, func2); - var chainedFunc2 = RDD.WorkerFunction.Chain(chainedFunc1, func3); + RDD.WorkerFunction chainedFunc1 = RDD.WorkerFunction.Chain(func1, func2); + RDD.WorkerFunction chainedFunc2 = RDD.WorkerFunction.Chain(chainedFunc1, func3); - var serializedCommand = Utils.CommandSerDe.Serialize( + byte[] serializedCommand = Utils.CommandSerDe.Serialize( chainedFunc2.Func, Utils.CommandSerDe.SerializedMode.Byte, Utils.CommandSerDe.SerializedMode.Byte); diff --git a/src/csharp/Microsoft.Spark.UnitTest/Sql/ColumnTests.cs b/src/csharp/Microsoft.Spark.UnitTest/Sql/ColumnTests.cs index 58f9c3189..f88d53800 100644 --- a/src/csharp/Microsoft.Spark.UnitTest/Sql/ColumnTests.cs +++ b/src/csharp/Microsoft.Spark.UnitTest/Sql/ColumnTests.cs @@ -311,7 +311,7 @@ public void TestWhenCondition() { Column column1 = CreateColumn("col1"); Column column2 = CreateColumn("col2"); - var value = 0; + int value = 0; column1.When(column2, value); VerifyNonStaticCall(column1, "when", column2, value); } @@ -320,8 +320,8 @@ public void TestWhenCondition() public void TestBetweenCondition() { Column column1 = CreateColumn("col1"); - var val1 = 1; - var val2 = 2; + int val1 = 1; + int val2 = 2; column1.Between(val1, val2); VerifyNonStaticCall(column1, "between", val1, val2); } @@ -331,8 +331,8 @@ public void TestSubStr() { { Column column1 = CreateColumn("col1"); - var pos = 1; - var len = 2; + int pos = 1; + int len = 2; column1.SubStr(pos, len); VerifyNonStaticCall(column1, "substr", pos, len); } @@ -512,7 +512,7 @@ public void TestNamedOperatorsWithString(string funcName, string opName) { // These operators take string as the operand. Column column = CreateColumn("col"); - var literal = "hello"; + string literal = "hello"; System.Reflection.MethodInfo func = column.GetType().GetMethod( funcName, new Type[] { typeof(string) }); diff --git a/src/csharp/Microsoft.Spark.UnitTest/Sql/RowTests.cs b/src/csharp/Microsoft.Spark.UnitTest/Sql/RowTests.cs index 843a3620e..5a9ee419f 100644 --- a/src/csharp/Microsoft.Spark.UnitTest/Sql/RowTests.cs +++ b/src/csharp/Microsoft.Spark.UnitTest/Sql/RowTests.cs @@ -83,10 +83,10 @@ public void RowConstructorTest() var schema = (StructType)DataType.ParseDataType(_testJsonSchema); var row1 = new Row(new object[] { 10, "name1" }, schema); var row2 = new Row(new object[] { 15, "name2" }, schema); - var pickledBytes = pickler.dumps(new[] { row1, row2 }); + byte[] pickledBytes = pickler.dumps(new[] { row1, row2 }); // Note that the following will invoke RowConstructor.construct(). - var unpickledData = PythonSerDe.GetUnpickledObjects( + object[] unpickledData = PythonSerDe.GetUnpickledObjects( new MemoryStream(pickledBytes), pickledBytes.Length); @@ -106,13 +106,13 @@ public void RowCollectorTest() // Pickle two rows in one batch. var row1 = new Row(new object[] { 10, "name1" }, schema); var row2 = new Row(new object[] { 15, "name2" }, schema); - var batch1 = pickler.dumps(new[] { row1, row2 }); + byte[] batch1 = pickler.dumps(new[] { row1, row2 }); SerDe.Write(stream, batch1.Length); SerDe.Write(stream, batch1); // Pickle one row in one batch. var row3 = new Row(new object[] { 20, "name3" }, schema); - var batch2 = pickler.dumps(new[] { row3 }); + byte[] batch2 = pickler.dumps(new[] { row3 }); SerDe.Write(stream, batch2.Length); SerDe.Write(stream, batch2); diff --git a/src/csharp/Microsoft.Spark.UnitTest/Sql/TypesTests.cs b/src/csharp/Microsoft.Spark.UnitTest/Sql/TypesTests.cs index b3891689c..56030b601 100644 --- a/src/csharp/Microsoft.Spark.UnitTest/Sql/TypesTests.cs +++ b/src/csharp/Microsoft.Spark.UnitTest/Sql/TypesTests.cs @@ -25,7 +25,7 @@ public class TypesTests [InlineData("short")] public void TestSimpleTypes(string typeName) { - var atomicType = DataType.ParseDataType($@"""{typeName}"""); + DataType atomicType = DataType.ParseDataType($@"""{typeName}"""); Assert.Equal(typeName, atomicType.TypeName); Assert.Equal(typeName, atomicType.SimpleString); } diff --git a/src/csharp/Microsoft.Spark.UnitTest/TestUtils/ArrowTestUtils.cs b/src/csharp/Microsoft.Spark.UnitTest/TestUtils/ArrowTestUtils.cs index dccb62732..ca8346dde 100644 --- a/src/csharp/Microsoft.Spark.UnitTest/TestUtils/ArrowTestUtils.cs +++ b/src/csharp/Microsoft.Spark.UnitTest/TestUtils/ArrowTestUtils.cs @@ -247,7 +247,7 @@ private static IArrowArray ToStringArrowArray(string[] array) // TODO: Use array pool and encode directly into the array. foreach (string str in array) { - var bytes = Encoding.UTF8.GetBytes(str); + byte[] bytes = Encoding.UTF8.GetBytes(str); valueOffsets.Append(offset); // TODO: Anyway to use the span-based GetBytes to write directly to // the value buffer? diff --git a/src/csharp/Microsoft.Spark.UnitTest/WorkerFunctionTests.cs b/src/csharp/Microsoft.Spark.UnitTest/WorkerFunctionTests.cs index 7a98ea8b7..222a61ed3 100644 --- a/src/csharp/Microsoft.Spark.UnitTest/WorkerFunctionTests.cs +++ b/src/csharp/Microsoft.Spark.UnitTest/WorkerFunctionTests.cs @@ -43,11 +43,11 @@ public void TestChainingPicklingWorkerFunction() object[] input = { 100, "name" }; // Validate one-level chaining. - var chainedFunc1 = PicklingWorkerFunction.Chain(func1, func2); + PicklingWorkerFunction chainedFunc1 = PicklingWorkerFunction.Chain(func1, func2); Assert.Equal("outer1:name:100", chainedFunc1.Func(0, input, new[] { 0, 1 })); // Validate two-level chaining. - var chainedFunc2 = PicklingWorkerFunction.Chain(chainedFunc1, func3); + PicklingWorkerFunction chainedFunc2 = PicklingWorkerFunction.Chain(chainedFunc1, func3); Assert.Equal("outer2:outer1:name:100", chainedFunc2.Func(0, input, new[] { 0, 1 })); } @@ -65,7 +65,7 @@ public void TestInvalidChainingPickling() object[] input = { 100, "name" }; // The order does not align since workerFunction2 is executed first. - var chainedFunc1 = PicklingWorkerFunction.Chain(func2, func1); + PicklingWorkerFunction chainedFunc1 = PicklingWorkerFunction.Chain(func2, func1); Assert.ThrowsAny(() => chainedFunc1.Func(0, input, new[] { 0, 1 })); } @@ -133,21 +133,21 @@ public void TestChainingArrowWorkerFunction() .Select(i => $"outer2:{strings.GetString(i)}") .ToArray())).Execute); - Apache.Arrow.IArrowArray[] input = new[] + var input = new IArrowArray[] { ToArrowArray(new[] { 100 }), ToArrowArray(new[] { "name" }) }; // Validate one-level chaining. - var chainedFunc1 = ArrowWorkerFunction.Chain(func1, func2); - ArrowTestUtils.AssertEquals( + ArrowWorkerFunction chainedFunc1 = ArrowWorkerFunction.Chain(func1, func2); + AssertEquals( "outer1:name:100", chainedFunc1.Func(input, new[] { 0, 1 })); // Validate two-level chaining. - var chainedFunc2 = ArrowWorkerFunction.Chain(chainedFunc1, func3); - ArrowTestUtils.AssertEquals( + ArrowWorkerFunction chainedFunc2 = ArrowWorkerFunction.Chain(chainedFunc1, func3); + AssertEquals( "outer2:outer1:name:100", chainedFunc2.Func(input, new[] { 0, 1 })); } @@ -169,14 +169,14 @@ public void TestInvalidChainingArrow() .Select(i => $"outer1:{strings.GetString(i)}") .ToArray())).Execute); - Apache.Arrow.IArrowArray[] input = new[] + IArrowArray[] input = new[] { ToArrowArray(new[] { 100 }), ToArrowArray(new[] { "name" }) }; // The order does not align since workerFunction2 is executed first. - var chainedFunc1 = ArrowWorkerFunction.Chain(func2, func1); + ArrowWorkerFunction chainedFunc1 = ArrowWorkerFunction.Chain(func2, func1); Assert.ThrowsAny(() => chainedFunc1.Func(input, new[] { 0, 1 })); } } diff --git a/src/csharp/Microsoft.Spark.Worker.UnitTest/CommandExecutorTests.cs b/src/csharp/Microsoft.Spark.Worker.UnitTest/CommandExecutorTests.cs index 886a453de..377d12be9 100644 --- a/src/csharp/Microsoft.Spark.Worker.UnitTest/CommandExecutorTests.cs +++ b/src/csharp/Microsoft.Spark.Worker.UnitTest/CommandExecutorTests.cs @@ -52,7 +52,7 @@ public void TestPicklingSqlCommandExecutorWithSingleCommand() var pickler = new Pickler(); for (int i = 0; i < numRows; ++i) { - var pickled = pickler.dumps( + byte[] pickled = pickler.dumps( new[] { new object[] { (i % 2 == 0) ? null : i.ToString() } }); SerDe.Write(inputStream, pickled.Length); SerDe.Write(inputStream, pickled); @@ -378,7 +378,7 @@ await arrowWriter.WriteRecordBatchAsync( // Validate the output stream. outputStream.Seek(0, SeekOrigin.Begin); - var arrowLength = SerDe.ReadInt32(outputStream); + int arrowLength = SerDe.ReadInt32(outputStream); Assert.Equal((int)SpecialLengths.START_ARROW_STREAM, arrowLength); var arrowReader = new ArrowStreamReader(outputStream); RecordBatch outputBatch = await arrowReader.ReadNextRecordBatchAsync(); @@ -625,7 +625,7 @@ public void TestRDDCommandExecutor() var formatter = new BinaryFormatter(); var memoryStream = new MemoryStream(); - var inputs = new[] { 0, 1, 2, 3, 4 }; + var inputs = new int[] { 0, 1, 2, 3, 4 }; var values = new List(); foreach (int input in inputs) diff --git a/src/csharp/Microsoft.Spark.Worker.UnitTest/TaskRunnerTests.cs b/src/csharp/Microsoft.Spark.Worker.UnitTest/TaskRunnerTests.cs index d9712777a..1066d078a 100644 --- a/src/csharp/Microsoft.Spark.Worker.UnitTest/TaskRunnerTests.cs +++ b/src/csharp/Microsoft.Spark.Worker.UnitTest/TaskRunnerTests.cs @@ -27,7 +27,7 @@ public void TestTaskRunner() PayloadWriter payloadWriter = new PayloadWriterFactory().Create(); var taskRunner = new TaskRunner(0, clientSocket, false, payloadWriter.Version); - var clientTask = Task.Run(() => taskRunner.Run()); + Task clientTask = Task.Run(() => taskRunner.Run()); using (ISocketWrapper serverSocket = serverListener.Accept()) { @@ -43,7 +43,7 @@ public void TestTaskRunner() var pickler = new Pickler(); for (int i = 0; i < 10; ++i) { - var pickled = pickler.dumps( + byte[] pickled = pickler.dumps( new[] { new object[] { i.ToString(), i, i } }); SerDe.Write(outputStream, pickled.Length); SerDe.Write(outputStream, pickled); @@ -55,16 +55,16 @@ public void TestTaskRunner() outputStream.Flush(); // Now process the bytes flowing in from the client. - var timingDataReceived = false; - var exceptionThrown = false; + bool timingDataReceived = false; + bool exceptionThrown = false; var rowsReceived = new List(); while (true) { - var length = SerDe.ReadInt32(inputStream); + int length = SerDe.ReadInt32(inputStream); if (length > 0) { - var pickledBytes = SerDe.ReadBytes(inputStream, length); + byte[] pickledBytes = SerDe.ReadBytes(inputStream, length); using var unpickler = new Unpickler(); var rows = unpickler.loads(pickledBytes) as ArrayList; foreach (object row in rows) @@ -74,11 +74,11 @@ public void TestTaskRunner() } else if (length == (int)SpecialLengths.TIMING_DATA) { - var bootTime = SerDe.ReadInt64(inputStream); - var initTime = SerDe.ReadInt64(inputStream); - var finishTime = SerDe.ReadInt64(inputStream); - var memoryBytesSpilled = SerDe.ReadInt64(inputStream); - var diskBytesSpilled = SerDe.ReadInt64(inputStream); + long bootTime = SerDe.ReadInt64(inputStream); + long initTime = SerDe.ReadInt64(inputStream); + long finishTime = SerDe.ReadInt64(inputStream); + long memoryBytesSpilled = SerDe.ReadInt64(inputStream); + long diskBytesSpilled = SerDe.ReadInt64(inputStream); timingDataReceived = true; } else if (length == (int)SpecialLengths.PYTHON_EXCEPTION_THROWN) @@ -89,7 +89,7 @@ public void TestTaskRunner() } else if (length == (int)SpecialLengths.END_OF_DATA_SECTION) { - var numAccumulatorUpdates = SerDe.ReadInt32(inputStream); + int numAccumulatorUpdates = SerDe.ReadInt32(inputStream); SerDe.ReadInt32(inputStream); break; } @@ -104,7 +104,7 @@ public void TestTaskRunner() { // Two UDFs registered, thus expecting two columns. // Refer to TestData.GetDefaultCommandPayload(). - var row = rowsReceived[i]; + object[] row = rowsReceived[i]; Assert.Equal(2, rowsReceived[i].Length); Assert.Equal($"udf2 udf1 {i}", row[0]); Assert.Equal(i + i, row[1]); diff --git a/src/csharp/Microsoft.Spark.Worker/Command/RDDCommandExecutor.cs b/src/csharp/Microsoft.Spark.Worker/Command/RDDCommandExecutor.cs index 5733b400a..224eba57c 100644 --- a/src/csharp/Microsoft.Spark.Worker/Command/RDDCommandExecutor.cs +++ b/src/csharp/Microsoft.Spark.Worker/Command/RDDCommandExecutor.cs @@ -68,7 +68,7 @@ private IEnumerable GetInputIterator( RDD.Collector.IDeserializer deserializer = RDD.Collector.GetDeserializer(deserializerMode); - var messageLength = 0; + int messageLength = 0; while ((messageLength = SerDe.ReadInt32(inputStream)) != (int)SpecialLengths.END_OF_DATA_SECTION) { diff --git a/src/csharp/Microsoft.Spark.Worker/DaemonWorker.cs b/src/csharp/Microsoft.Spark.Worker/DaemonWorker.cs index bd6b010c5..bb30afe88 100644 --- a/src/csharp/Microsoft.Spark.Worker/DaemonWorker.cs +++ b/src/csharp/Microsoft.Spark.Worker/DaemonWorker.cs @@ -146,7 +146,7 @@ private void StartServer(ISocketWrapper listener) // When reuseWorker is set to true, numTaskRunners will be always one // greater than numWorkerThreads since TaskRunner.Run() does not return // so that the task runner object is not removed from _taskRunners. - var numTaskRunners = _taskRunners.Count(); + int numTaskRunners = _taskRunners.Count(); while (numWorkerThreads < numTaskRunners) { // Note that in the current implementation of RunWorkerThread() does @@ -221,7 +221,7 @@ private void WaitForSignal() while (true) { var bytes = new byte[sizeof(int)]; - var readBytes = inputStream.Read(bytes, 0, bytes.Length); + int readBytes = inputStream.Read(bytes, 0, bytes.Length); if (readBytes != bytes.Length) { @@ -229,7 +229,7 @@ private void WaitForSignal() Environment.Exit(-1); } - var taskRunnerId = BinaryPrimitives.ReadInt32BigEndian(bytes); + int taskRunnerId = BinaryPrimitives.ReadInt32BigEndian(bytes); if (taskRunnerId < 0) { s_logger.LogInfo($"Received negative TaskRunnerId: {taskRunnerId}, will exit"); diff --git a/src/csharp/Microsoft.Spark.Worker/Processor/BroadcastVariableProcessor.cs b/src/csharp/Microsoft.Spark.Worker/Processor/BroadcastVariableProcessor.cs index 4dd06fbc4..6b821895c 100644 --- a/src/csharp/Microsoft.Spark.Worker/Processor/BroadcastVariableProcessor.cs +++ b/src/csharp/Microsoft.Spark.Worker/Processor/BroadcastVariableProcessor.cs @@ -54,7 +54,7 @@ internal BroadcastVariables Process(Stream stream) } else { - var path = SerDe.ReadString(stream); + string path = SerDe.ReadString(stream); // TODO: Register new broadcast variable. } } diff --git a/src/csharp/Microsoft.Spark.Worker/TaskRunner.cs b/src/csharp/Microsoft.Spark.Worker/TaskRunner.cs index b282faed4..2c15c2bf4 100644 --- a/src/csharp/Microsoft.Spark.Worker/TaskRunner.cs +++ b/src/csharp/Microsoft.Spark.Worker/TaskRunner.cs @@ -167,7 +167,7 @@ private Payload ProcessStream( SerDe.Write(outputStream, 0); // Check the end of stream. - var endOfStream = SerDe.ReadInt32(inputStream); + int endOfStream = SerDe.ReadInt32(inputStream); if (endOfStream == (int)SpecialLengths.END_OF_STREAM) { s_logger.LogDebug($"[{TaskId}] Received END_OF_STREAM signal."); diff --git a/src/csharp/Microsoft.Spark/Interop/Ipc/PayloadHelper.cs b/src/csharp/Microsoft.Spark/Interop/Ipc/PayloadHelper.cs index e1771405d..9875078a7 100644 --- a/src/csharp/Microsoft.Spark/Interop/Ipc/PayloadHelper.cs +++ b/src/csharp/Microsoft.Spark/Interop/Ipc/PayloadHelper.cs @@ -40,7 +40,7 @@ internal static void BuildPayload( object[] args) { // Reserve space for total length. - var originalPosition = destination.Position; + long originalPosition = destination.Position; destination.Position += sizeof(int); SerDe.Write(destination, isStaticMethod); @@ -50,7 +50,7 @@ internal static void BuildPayload( ConvertArgsToBytes(destination, args); // Write the length now that we've written out everything else. - var afterPosition = destination.Position; + long afterPosition = destination.Position; destination.Position = originalPosition; SerDe.Write(destination, (int)afterPosition - sizeof(int)); destination.Position = afterPosition; diff --git a/src/csharp/Microsoft.Spark/Interop/Ipc/SerDe.cs b/src/csharp/Microsoft.Spark/Interop/Ipc/SerDe.cs index d0f6937ad..c2c742e87 100644 --- a/src/csharp/Microsoft.Spark/Interop/Ipc/SerDe.cs +++ b/src/csharp/Microsoft.Spark/Interop/Ipc/SerDe.cs @@ -72,7 +72,7 @@ public static bool ReadBool(Stream s) => /// The integer read from stream public static int ReadInt32(Stream s) { - var buffer = GetThreadLocalBuffer(sizeof(int)); + byte[] buffer = GetThreadLocalBuffer(sizeof(int)); TryReadBytes(s, buffer, sizeof(int)); return BinaryPrimitives.ReadInt32BigEndian(buffer); } @@ -84,7 +84,7 @@ public static int ReadInt32(Stream s) /// The long integer read from stream public static long ReadInt64(Stream s) { - var buffer = GetThreadLocalBuffer(sizeof(long)); + byte[] buffer = GetThreadLocalBuffer(sizeof(long)); TryReadBytes(s, buffer, sizeof(long)); return BinaryPrimitives.ReadInt64BigEndian(buffer); } @@ -96,7 +96,7 @@ public static long ReadInt64(Stream s) /// The double read from stream public static double ReadDouble(Stream s) { - var buffer = GetThreadLocalBuffer(sizeof(long)); + byte[] buffer = GetThreadLocalBuffer(sizeof(long)); TryReadBytes(s, buffer, sizeof(long)); return BitConverter.Int64BitsToDouble(BinaryPrimitives.ReadInt64BigEndian(buffer)); } @@ -108,7 +108,7 @@ public static double ReadDouble(Stream s) /// The string read from stream public static string ReadString(Stream s) { - var buffer = GetThreadLocalBuffer(sizeof(int)); + byte[] buffer = GetThreadLocalBuffer(sizeof(int)); if (!TryReadBytes(s, buffer, sizeof(int))) { return null; @@ -130,7 +130,7 @@ public static string ReadString(Stream s, int length) return null; } - var buffer = GetThreadLocalBuffer(length); + byte[] buffer = GetThreadLocalBuffer(length); TryReadBytes(s, buffer, length); return Encoding.UTF8.GetString(buffer, 0, length); } @@ -158,7 +158,7 @@ public static byte[] ReadBytes(Stream s, int length) if (length > 0) { int bytesRead; - var totalBytesRead = 0; + int totalBytesRead = 0; do { bytesRead = s.Read(buffer, totalBytesRead, length - totalBytesRead); @@ -187,7 +187,7 @@ public static bool TryReadBytes(Stream s, byte[] buffer, int length) if (length > 0) { int bytesRead; - var totalBytesRead = 0; + int totalBytesRead = 0; do { bytesRead = s.Read(buffer, totalBytesRead, length - totalBytesRead); @@ -213,13 +213,13 @@ public static bool TryReadBytes(Stream s, byte[] buffer, int length) public static int? ReadBytesLength(Stream s) { - var lengthBuffer = ReadBytes(s, sizeof(int)); + byte[] lengthBuffer = ReadBytes(s, sizeof(int)); if (lengthBuffer == null) { return null; } - var length = BinaryPrimitives.ReadInt32BigEndian(lengthBuffer); + int length = BinaryPrimitives.ReadInt32BigEndian(lengthBuffer); if (length == (int)SpecialLengths.NULL) { return null; @@ -235,7 +235,7 @@ public static bool TryReadBytes(Stream s, byte[] buffer, int length) /// The byte array read from stream public static byte[] ReadBytes(Stream s) { - var length = ReadBytesLength(s); + int? length = ReadBytesLength(s); if (length == null) { return null; @@ -248,7 +248,7 @@ private static byte[] GetThreadLocalBuffer(int minSize) { const int DefaultMinSize = 256; - var buffer = s_threadLocalBuffer; + byte[] buffer = s_threadLocalBuffer; if (buffer == null || buffer.Length < minSize) { s_threadLocalBuffer = buffer = new byte[Math.Max(DefaultMinSize, minSize)]; @@ -297,7 +297,7 @@ public static void Write(Stream s, bool value) => /// The integer to write public static void Write(Stream s, int value) { - var buffer = GetThreadLocalBuffer(sizeof(int)); + byte[] buffer = GetThreadLocalBuffer(sizeof(int)); BinaryPrimitives.WriteInt32BigEndian(buffer, value); Write(s, buffer, sizeof(int)); } @@ -309,7 +309,7 @@ public static void Write(Stream s, int value) /// The long integer to write public static void Write(Stream s, long value) { - var buffer = GetThreadLocalBuffer(sizeof(long)); + byte[] buffer = GetThreadLocalBuffer(sizeof(long)); BinaryPrimitives.WriteInt64BigEndian(buffer, value); Write(s, buffer, sizeof(long)); } @@ -329,9 +329,9 @@ public static void Write(Stream s, double value) => /// The string to write public static void Write(Stream s, string value) { - var buffer = GetThreadLocalBuffer( + byte[] buffer = GetThreadLocalBuffer( sizeof(int) + Encoding.UTF8.GetMaxByteCount(value.Length)); - var len = Encoding.UTF8.GetBytes(value, 0, value.Length, buffer, sizeof(int)); + int len = Encoding.UTF8.GetBytes(value, 0, value.Length, buffer, sizeof(int)); BinaryPrimitives.WriteInt32BigEndian(buffer, len); Write(s, buffer, sizeof(int) + len); } diff --git a/src/csharp/Microsoft.Spark/Network/DefaultSocketWrapper.cs b/src/csharp/Microsoft.Spark/Network/DefaultSocketWrapper.cs index aa39363c7..8647a14cb 100644 --- a/src/csharp/Microsoft.Spark/Network/DefaultSocketWrapper.cs +++ b/src/csharp/Microsoft.Spark/Network/DefaultSocketWrapper.cs @@ -115,7 +115,7 @@ public void Connect(IPAddress remoteaddr, int port, string secret) private Stream CreateStream(string bufferSizeEnvVarName) { - var envVar = Environment.GetEnvironmentVariable(bufferSizeEnvVarName); + string envVar = Environment.GetEnvironmentVariable(bufferSizeEnvVarName); if (string.IsNullOrEmpty(envVar) || !int.TryParse(envVar, out var writeBufferSize)) { diff --git a/src/csharp/Microsoft.Spark/RDD.cs b/src/csharp/Microsoft.Spark/RDD.cs index baa4855ac..17133a057 100644 --- a/src/csharp/Microsoft.Spark/RDD.cs +++ b/src/csharp/Microsoft.Spark/RDD.cs @@ -298,7 +298,7 @@ internal virtual RDD MapPartitionsWithIndexInternal( private (int, string) CollectAndServe() { JvmObjectReference rddRef = GetJvmRef(); - var result = rddRef.Jvm.CallStaticJavaMethod( + object result = rddRef.Jvm.CallStaticJavaMethod( "org.apache.spark.api.python.PythonRDD", "collectAndServe", rddRef.Invoke("rdd")); @@ -479,7 +479,7 @@ internal override RDD MapPartitionsWithIndexInternal( { if (IsPipelinable()) { - var newWorkerFunc = RDD.WorkerFunction.Chain( + RDD.WorkerFunction newWorkerFunc = RDD.WorkerFunction.Chain( new RDD.WorkerFunction(_func.Func), new RDD.WorkerFunction(newFunc)); @@ -508,8 +508,8 @@ JvmObjectReference IJvmObjectReferenceProvider.Reference { IJvmBridge jvm = _prevRddJvmObjRef.Jvm; - var rdd = _prevRddJvmObjRef.Invoke("rdd"); - var command = Serialize(_func.Func, _prevSerializedMode, _serializedMode); + object rdd = _prevRddJvmObjRef.Invoke("rdd"); + byte[] command = Serialize(_func.Func, _prevSerializedMode, _serializedMode); JvmObjectReference pythonFunction = UdfUtils.CreatePythonFunction(jvm, command); diff --git a/src/csharp/Microsoft.Spark/SparkConf.cs b/src/csharp/Microsoft.Spark/SparkConf.cs index fd5326d58..6608341b8 100644 --- a/src/csharp/Microsoft.Spark/SparkConf.cs +++ b/src/csharp/Microsoft.Spark/SparkConf.cs @@ -134,7 +134,7 @@ public IReadOnlyList> GetAll() { if (!string.IsNullOrEmpty(kvpString)) { - var kvpItems = kvpString.Split('='); + string[] kvpItems = kvpString.Split('='); if ((kvpItems.Length == 2) && !string.IsNullOrEmpty(kvpItems[0]) && !string.IsNullOrEmpty(kvpItems[1])) diff --git a/src/csharp/Microsoft.Spark/Sql/RelationalGroupedDataset.cs b/src/csharp/Microsoft.Spark/Sql/RelationalGroupedDataset.cs index 381db6402..f7b189562 100644 --- a/src/csharp/Microsoft.Spark/Sql/RelationalGroupedDataset.cs +++ b/src/csharp/Microsoft.Spark/Sql/RelationalGroupedDataset.cs @@ -89,7 +89,7 @@ internal DataFrame Apply(StructType returnType, Func f ArrowGroupedMapWorkerFunction.ExecuteDelegate wrapper = new ArrowGroupedMapUdfWrapper(func).Execute; - var udf = UserDefinedFunction.Create( + UserDefinedFunction udf = UserDefinedFunction.Create( _jvmObject.Jvm, func.Method.ToString(), CommandSerDe.Serialize( diff --git a/src/csharp/Microsoft.Spark/Sql/Row.cs b/src/csharp/Microsoft.Spark/Sql/Row.cs index ac4f386fc..7aaff8776 100644 --- a/src/csharp/Microsoft.Spark/Sql/Row.cs +++ b/src/csharp/Microsoft.Spark/Sql/Row.cs @@ -26,7 +26,7 @@ internal Row(object[] values, StructType schema) _genericRow = new GenericRow(values); Schema = schema; - var schemaColumnCount = Schema.Fields.Count; + int schemaColumnCount = Schema.Fields.Count; if (Size() != schemaColumnCount) { throw new Exception( diff --git a/src/csharp/Microsoft.Spark/Sql/UDFRegistration.cs b/src/csharp/Microsoft.Spark/Sql/UDFRegistration.cs index c2f5a694f..9ce5f25cc 100644 --- a/src/csharp/Microsoft.Spark/Sql/UDFRegistration.cs +++ b/src/csharp/Microsoft.Spark/Sql/UDFRegistration.cs @@ -258,7 +258,7 @@ internal void Register(string name, Delegate func, UdfUtils.PythonEvalT CommandSerDe.SerializedMode.Row, CommandSerDe.SerializedMode.Row); - var udf = UserDefinedFunction.Create( + UserDefinedFunction udf = UserDefinedFunction.Create( _jvmObject.Jvm, name, command, diff --git a/src/csharp/Microsoft.Spark/Utils/AssemblyLoader.cs b/src/csharp/Microsoft.Spark/Utils/AssemblyLoader.cs index ab3ed00ec..621a81881 100644 --- a/src/csharp/Microsoft.Spark/Utils/AssemblyLoader.cs +++ b/src/csharp/Microsoft.Spark/Utils/AssemblyLoader.cs @@ -75,8 +75,8 @@ internal static class AssemblyLoader private static readonly string[] s_extensions = RuntimeInformation.IsOSPlatform(OSPlatform.Windows) ? - new[] { ".dll", ".exe", ".ni.dll", ".ni.exe" } : - new[] { ".dll", ".ni.dll" }; + new[] { ".dll", ".exe", ".ni.dll", ".ni.exe" } : + new[] { ".dll", ".ni.dll" }; private static readonly object s_cacheLock = new object(); diff --git a/src/csharp/Microsoft.Spark/Utils/Authenticator.cs b/src/csharp/Microsoft.Spark/Utils/Authenticator.cs index f48c6a5b8..5e4b41a2c 100644 --- a/src/csharp/Microsoft.Spark/Utils/Authenticator.cs +++ b/src/csharp/Microsoft.Spark/Utils/Authenticator.cs @@ -40,7 +40,7 @@ public static bool AuthenticateAsClient(Stream stream, string secret) /// True if authentication succeeds. public static bool AuthenticateAsServer(ISocketWrapper socket, string secret) { - var clientSecret = SerDe.ReadString(socket.InputStream); + string clientSecret = SerDe.ReadString(socket.InputStream); bool result; if (clientSecret == secret) diff --git a/src/csharp/Microsoft.Spark/Utils/CommandSerDe.cs b/src/csharp/Microsoft.Spark/Utils/CommandSerDe.cs index 53c87c527..b5cf38b89 100644 --- a/src/csharp/Microsoft.Spark/Utils/CommandSerDe.cs +++ b/src/csharp/Microsoft.Spark/Utils/CommandSerDe.cs @@ -110,9 +110,9 @@ internal static byte[] Serialize( var commandPayloadBytesList = new List(); // Add serializer mode. - var modeBytes = Encoding.UTF8.GetBytes(serializerMode.ToString()); - var length = modeBytes.Length; - var lengthAsBytes = BitConverter.GetBytes(length); + byte[] modeBytes = Encoding.UTF8.GetBytes(serializerMode.ToString()); + int length = modeBytes.Length; + byte[] lengthAsBytes = BitConverter.GetBytes(length); Array.Reverse(lengthAsBytes); commandPayloadBytesList.Add(lengthAsBytes); commandPayloadBytesList.Add(modeBytes); @@ -128,8 +128,8 @@ internal static byte[] Serialize( // Add run mode: // N - normal // R - repl - var runMode = Environment.GetEnvironmentVariable("SPARK_NET_RUN_MODE") ?? "N"; - var runModeBytes = Encoding.UTF8.GetBytes(runMode); + string runMode = Environment.GetEnvironmentVariable("SPARK_NET_RUN_MODE") ?? "N"; + byte[] runModeBytes = Encoding.UTF8.GetBytes(runMode); lengthAsBytes = BitConverter.GetBytes(runModeBytes.Length); Array.Reverse(lengthAsBytes); commandPayloadBytesList.Add(lengthAsBytes); @@ -138,7 +138,7 @@ internal static byte[] Serialize( if ("R".Equals(runMode, StringComparison.InvariantCultureIgnoreCase)) { // add compilation dump directory - var compilationDumpDirBytes = Encoding.UTF8.GetBytes( + byte[] compilationDumpDirBytes = Encoding.UTF8.GetBytes( Environment.GetEnvironmentVariable("SPARK_NET_SCRIPT_COMPILATION_DIR") ?? "."); lengthAsBytes = BitConverter.GetBytes(compilationDumpDirBytes.Length); Array.Reverse(lengthAsBytes); @@ -164,8 +164,8 @@ internal static byte[] Serialize( { formatter.Serialize(stream, udfWrapperData); - var udfBytes = stream.ToArray(); - var udfBytesLengthAsBytes = BitConverter.GetBytes(udfBytes.Length); + byte[] udfBytes = stream.ToArray(); + byte[] udfBytesLengthAsBytes = BitConverter.GetBytes(udfBytes.Length); Array.Reverse(udfBytesLengthAsBytes); commandPayloadBytesList.Add(udfBytesLengthAsBytes); commandPayloadBytesList.Add(udfBytes); @@ -235,15 +235,15 @@ internal static T Deserialize( runMode = SerDe.ReadString(stream); - var serializedCommand = SerDe.ReadBytes(stream); + byte[] serializedCommand = SerDe.ReadBytes(stream); var bf = new BinaryFormatter(); var ms = new MemoryStream(serializedCommand, false); var udfWrapperData = (UdfWrapperData)bf.Deserialize(ms); - var nodeIndex = 0; - var udfIndex = 0; + int nodeIndex = 0; + int udfIndex = 0; var udf = (T)DeserializeUdfs(udfWrapperData, ref nodeIndex, ref udfIndex); // Check all the data is consumed. @@ -259,7 +259,7 @@ private static Delegate DeserializeUdfs( ref int udfIndex) { UdfWrapperNode node = data.UdfWrapperNodes[nodeIndex++]; - var nodeType = Type.GetType(node.TypeName); + Type nodeType = Type.GetType(node.TypeName); if (node.HasUdf) { diff --git a/src/csharp/Microsoft.Spark/Utils/EnvironmentUtils.cs b/src/csharp/Microsoft.Spark/Utils/EnvironmentUtils.cs index 034c2b8aa..6deeb2770 100644 --- a/src/csharp/Microsoft.Spark/Utils/EnvironmentUtils.cs +++ b/src/csharp/Microsoft.Spark/Utils/EnvironmentUtils.cs @@ -13,7 +13,7 @@ internal static class EnvironmentUtils { internal static bool GetEnvironmentVariableAsBool(string name) { - var str = Environment.GetEnvironmentVariable(name); + string str = Environment.GetEnvironmentVariable(name); if (string.IsNullOrEmpty(str)) { return false; diff --git a/src/csharp/Microsoft.Spark/Utils/UdfSerDe.cs b/src/csharp/Microsoft.Spark/Utils/UdfSerDe.cs index e3b36483c..638838b9f 100644 --- a/src/csharp/Microsoft.Spark/Utils/UdfSerDe.cs +++ b/src/csharp/Microsoft.Spark/Utils/UdfSerDe.cs @@ -143,7 +143,7 @@ public bool Equals(FieldData other) internal static UdfData Serialize(Delegate udf) { MethodInfo method = udf.Method; - var target = udf.Target; + object target = udf.Target; var udfData = new UdfData() { @@ -214,7 +214,7 @@ private static TargetData SerializeTarget(object target) // For now, one way to distinguish is to check if any of the field's type // is same as the target type. If so, fields will be emptied out. // TODO: Follow up with the dotnet team. - var doesUdfHaveClosure = fields. + bool doesUdfHaveClosure = fields. Where((field) => field.TypeData.Name.Equals(targetTypeData.Name)). Count() == 0; @@ -230,7 +230,7 @@ private static TargetData SerializeTarget(object target) private static object DeserializeTargetData(TargetData targetData) { Type targetType = DeserializeType(targetData.TypeData); - var target = FormatterServices.GetUninitializedObject(targetType); + object target = FormatterServices.GetUninitializedObject(targetType); foreach (FieldData field in targetData.Fields ?? Enumerable.Empty()) {