From 69b4e27bafc2d89ab43d9bebad2b9f82d5340a58 Mon Sep 17 00:00:00 2001 From: Yunchi Wang <54880216+wyunchi-ms@users.noreply.github.com> Date: Tue, 26 Oct 2021 15:51:30 +0800 Subject: [PATCH 1/6] Update ci config to add tools/GeneratedModuleList.txt to skip list (#16216) Co-authored-by: wyunchi-ms --- .ci-config.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.ci-config.json b/.ci-config.json index 31ab6953cd26..5a8b8cd9c958 100644 --- a/.ci-config.json +++ b/.ci-config.json @@ -65,7 +65,8 @@ "src/**/document/*", ".ci-config.json", "tools/PrepareAutorestModule.ps1", - "tools/SyncFromMainBranch.ps1" + "tools/SyncFromMainBranch.ps1", + "tools/GeneratedModuleList.txt" ], "phases": [] }, From 68872e74efea33b3e545e17ec820cd350336ee36 Mon Sep 17 00:00:00 2001 From: kceiw Date: Tue, 26 Oct 2021 20:04:21 -0700 Subject: [PATCH 2/6] Fix an issue in checking if we need to prompt survey. (#16232) --- .../Az.Tools.Predictor/AzPredictorSurveyHelper.cs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/Az.Tools.Predictor/Az.Tools.Predictor/AzPredictorSurveyHelper.cs b/tools/Az.Tools.Predictor/Az.Tools.Predictor/AzPredictorSurveyHelper.cs index 75b1eb526042..1b3b29241f65 100644 --- a/tools/Az.Tools.Predictor/Az.Tools.Predictor/AzPredictorSurveyHelper.cs +++ b/tools/Az.Tools.Predictor/Az.Tools.Predictor/AzPredictorSurveyHelper.cs @@ -91,7 +91,7 @@ public bool ShouldPromptSurvey() return _sharedSurveyHelper.ShouldPropmtSurvey(_ModuleName, _ModuleVersion); } - return true; + return false; } /// From c3a6d6a721612ab4b1b07637ebf3096f3be56a29 Mon Sep 17 00:00:00 2001 From: Reggie Gibson <31147354+regedit32@users.noreply.github.com> Date: Thu, 28 Oct 2021 03:39:32 -0400 Subject: [PATCH 3/6] Fix FirewallRuleName wildcard filtering in Get-AzSqlServerFirewallRule (#16237) * Add Wildcard helper * Fix wildcard filtering in Get-AzSqlServerFirewallRule * Update src/Sql/Sql/ChangeLog.md Co-authored-by: Beisi Zhou --- .../ResourceWildcardFilterHelperTests.cs | 60 +++++++++++++++++++ src/Sql/Sql/ChangeLog.md | 1 + .../Common/ResourceWildcardFilterHelper.cs | 42 +++++++++++++ .../Cmdlet/GetAzureSqlServerFirewallRule.cs | 4 +- 4 files changed, 106 insertions(+), 1 deletion(-) create mode 100644 src/Sql/Sql.Test/UnitTests/ResourceWildcardFilterHelperTests.cs create mode 100644 src/Sql/Sql/Common/ResourceWildcardFilterHelper.cs diff --git a/src/Sql/Sql.Test/UnitTests/ResourceWildcardFilterHelperTests.cs b/src/Sql/Sql.Test/UnitTests/ResourceWildcardFilterHelperTests.cs new file mode 100644 index 000000000000..6e2e5a23bfa0 --- /dev/null +++ b/src/Sql/Sql.Test/UnitTests/ResourceWildcardFilterHelperTests.cs @@ -0,0 +1,60 @@ +using Microsoft.Azure.Commands.Sql.Common; +using Microsoft.WindowsAzure.Commands.ScenarioTest; +using System.Collections.Generic; +using Xunit; + +namespace Microsoft.Azure.Commands.Sql.Test.UnitTests +{ + public class ResourceWildcardFilterHelperTests + { + [Fact] + [Trait(Category.AcceptanceType, Category.CheckIn)] + public void SqlSubResourceWildcardFilterTest() + { + ResourceWildcardFilterHelper wildcardFilterHelper = new ResourceWildcardFilterHelper(); + + // should match test01 + Assert.Single(wildcardFilterHelper.SqlSubResourceWildcardFilter("test01", ReturnedResources, "PropertyName1")); + // should match none + Assert.Empty(wildcardFilterHelper.SqlSubResourceWildcardFilter("test", ReturnedResources, "PropertyName1")); + // should match all + Assert.Equal(11, wildcardFilterHelper.SqlSubResourceWildcardFilter("t*t*", ReturnedResources, "PropertyName1").Count); + // should match none + Assert.Empty(wildcardFilterHelper.SqlSubResourceWildcardFilter("t*t", ReturnedResources, "PropertyName1")); + // should match test01 and test11 + Assert.Equal(2, wildcardFilterHelper.SqlSubResourceWildcardFilter("t*1", ReturnedResources, "PropertyName1").Count); + // should match all because empty value + Assert.Equal(11, wildcardFilterHelper.SqlSubResourceWildcardFilter(string.Empty, ReturnedResources, "PropertyName1").Count); + // should match all because null property name + Assert.Equal(11, wildcardFilterHelper.SqlSubResourceWildcardFilter("anything", ReturnedResources, null).Count); + } + + private readonly List ReturnedResources = new List() + { + new TestResource("test01", "case01"), + new TestResource("test02", "case02"), + new TestResource("test03", "case03"), + new TestResource("test04", "case04"), + new TestResource("test05", "case05"), + new TestResource("test06", "case06"), + new TestResource("test07", "case07"), + new TestResource("test08", "case08"), + new TestResource("test09", "case09"), + new TestResource("test10", "case10"), + new TestResource("test11", "case11"), + }; + } + + internal class TestResource + { + public TestResource(string PropertyName1, string PropertyName2) + { + this.PropertyName1 = PropertyName1; + this.PropertyName2 = PropertyName2; + } + + public string PropertyName1 { get; set; } + + public string PropertyName2 { get; set; } + } +} \ No newline at end of file diff --git a/src/Sql/Sql/ChangeLog.md b/src/Sql/Sql/ChangeLog.md index ee2c47db514e..38e30b3c8e9b 100644 --- a/src/Sql/Sql/ChangeLog.md +++ b/src/Sql/Sql/ChangeLog.md @@ -18,6 +18,7 @@ - Additional information about change #1 --> ## Upcoming Release +* Fixed FirewallRuleName wildcard filtering in `Get-AzSqlServerFirewallRule` [#16199] ## Version 3.5.1 diff --git a/src/Sql/Sql/Common/ResourceWildcardFilterHelper.cs b/src/Sql/Sql/Common/ResourceWildcardFilterHelper.cs new file mode 100644 index 000000000000..e8cbd34b0dba --- /dev/null +++ b/src/Sql/Sql/Common/ResourceWildcardFilterHelper.cs @@ -0,0 +1,42 @@ +using System.Collections.Generic; +using System.Linq; +using System.Management.Automation; + +namespace Microsoft.Azure.Commands.Sql.Common +{ + public class ResourceWildcardFilterHelper + { + public List SqlSubResourceWildcardFilter(string value, IEnumerable resources, string propertyName) + { + if (!string.IsNullOrEmpty(value) && !string.IsNullOrEmpty(propertyName)) + { + IEnumerable output = resources; + WildcardPattern pattern = new WildcardPattern(value, WildcardOptions.IgnoreCase); + output = output.Where(t => IsMatch(t, propertyName, pattern)); + + return output.ToList(); + } + else + { + return resources.ToList(); + } + } + + private bool IsMatch(T resource, string property, WildcardPattern pattern) + { + var value = (string)GetPropertyValue(resource, property); + return !string.IsNullOrEmpty(value) && pattern.IsMatch(value); + } + + private object GetPropertyValue(T resource, string property) + { + System.Reflection.PropertyInfo pi = typeof(T).GetProperty(property); + if (pi != null) + { + return pi.GetValue(resource, null); + } + + return null; + } + } +} \ No newline at end of file diff --git a/src/Sql/Sql/FirewallRule/Cmdlet/GetAzureSqlServerFirewallRule.cs b/src/Sql/Sql/FirewallRule/Cmdlet/GetAzureSqlServerFirewallRule.cs index f71514ba6c6a..e26d90534c61 100644 --- a/src/Sql/Sql/FirewallRule/Cmdlet/GetAzureSqlServerFirewallRule.cs +++ b/src/Sql/Sql/FirewallRule/Cmdlet/GetAzureSqlServerFirewallRule.cs @@ -12,6 +12,7 @@ // limitations under the License. // ---------------------------------------------------------------------------------- +using Microsoft.Azure.Commands.Sql.Common; using Microsoft.Azure.Commands.Sql.FirewallRule.Model; using System.Collections.Generic; using System.Management.Automation; @@ -44,6 +45,7 @@ public class GetAzureSqlServerFirewallRule : AzureSqlServerFirewallRuleCmdletBas protected override IEnumerable GetEntity() { ICollection results = null; + ResourceWildcardFilterHelper filterHelper = new ResourceWildcardFilterHelper(); if (this.MyInvocation.BoundParameters.ContainsKey("FirewallRuleName") && !WildcardPattern.ContainsWildcardCharacters(FirewallRuleName)) { @@ -55,7 +57,7 @@ protected override IEnumerable GetEntity() results = ModelAdapter.ListFirewallRules(this.ResourceGroupName, this.ServerName); } - return SubResourceWildcardFilter(FirewallRuleName, results); + return filterHelper.SqlSubResourceWildcardFilter(FirewallRuleName, results, nameof(AzureSqlServerFirewallRuleModel.FirewallRuleName)); } /// From 09b21e84026e3d62d3868c07b493763aa679c516 Mon Sep 17 00:00:00 2001 From: Wei Wei Date: Thu, 28 Oct 2021 17:29:45 +0800 Subject: [PATCH 4/6] [Storage] Add example for move datalakegen2 item with Sas (#16256) --- .../help/Move-AzDataLakeGen2Item.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/src/Storage/Storage.Management/help/Move-AzDataLakeGen2Item.md b/src/Storage/Storage.Management/help/Move-AzDataLakeGen2Item.md index 5fb08851b465..a3df6cd91b49 100644 --- a/src/Storage/Storage.Management/help/Move-AzDataLakeGen2Item.md +++ b/src/Storage/Storage.Management/help/Move-AzDataLakeGen2Item.md @@ -58,6 +58,24 @@ dir2/file2 False 1024 2020-03-23 09:57:33Z rwxrw-rw- This command move file 'dir1/file1' in 'filesystem1' to file 'dir2/file2' in 'filesystem2' in the same Storage account without prompt. +### Example 3: Move an item with Sas token +``` +PS C:\> $sas = New-AzStorageContainerSASToken -Name $filesystemName -Permission rdw -Context $ctx + +PS C:\> $sasctx = New-AzStorageContext -StorageAccountName $ctx.StorageAccountName -SasToken $sas + +PS C:\> Move-AzDataLakeGen2Item -FileSystem $filesystemName -Path $itempath1 -DestFileSystem $filesystemName -DestPath "$($itempath2)$($sas)" -Context $sasctx + + FileSystem Name: filesystem1 + +Path IsDirectory Length LastModified Permissions Owner Group +---- ----------- ------ ------------ ----------- ----- ----- +dir2/file1 False 1024 2021-03-23 09:57:33Z rwxrw-rw- $superuser $superuser +``` + +This first command creates a Sas token with rdw permission, the second command creates a Storage context from the Sas token, the 3rd command moves an item with the Sas token. +This example use same Sastoken with rdw permission on both source and destication, if use 2 SAS token for source and destication, source need permission rd, destication need permission w. + ## PARAMETERS ### -Context From de3deaa9e0d3861ccbf2729bd33bf02341d95e12 Mon Sep 17 00:00:00 2001 From: Micah McKittrick <32313503+mimckitt@users.noreply.github.com> Date: Thu, 28 Oct 2021 15:51:35 -0700 Subject: [PATCH 5/6] Update Invoke-AzVMRunCommand.md (#16000) * Update Invoke-AzVMRunCommand.md https://github.com/Azure/azure-powershell/issues/15704 * Update Invoke-AzVMRunCommand.md * Update Invoke-AzVMRunCommand.md * Update Invoke-AzVMRunCommand.md * Update Invoke-AzVMRunCommand.md * Update Invoke-AzVMRunCommand.md * Update Invoke-AzVMRunCommand.md * Update Invoke-AzVMRunCommand.md * Update Invoke-AzVMRunCommand.md Co-authored-by: Yunchi Wang <54880216+wyunchi-ms@users.noreply.github.com> Co-authored-by: Beisi Zhou --- .../Compute/help/Invoke-AzVMRunCommand.md | 24 +++++++++++++++---- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/src/Compute/Compute/help/Invoke-AzVMRunCommand.md b/src/Compute/Compute/help/Invoke-AzVMRunCommand.md index 21b51848d7b8..578005ab7ab2 100644 --- a/src/Compute/Compute/help/Invoke-AzVMRunCommand.md +++ b/src/Compute/Compute/help/Invoke-AzVMRunCommand.md @@ -38,12 +38,25 @@ Invoke a run command on the VM. ## EXAMPLES -### Example 1 -``` +### Example 1: Invoke a command on Windows +```powershell PS C:\> Invoke-AzVMRunCommand -ResourceGroupName 'rgname' -VMName 'vmname' -CommandId 'RunPowerShellScript' -ScriptPath 'sample.ps1' -Parameter @{param1 = "var1"; param2 = "var2"} ``` -Invoke a run command of RunPowerShellScript with overriding the script 'sample.ps1' and the parameters on the VM of 'vmname' in resource group 'rgname'. +Invoke a run command 'RunPowerShellScript' with overriding the script 'sample.ps1' on a Windows VM named 'vmname' in resource group 'rgname'. Var1 and var2 are defined as parameters in the sample.ps1. Parameter value can be string type only and script is responsible for converting them to other types if needed. + +### Example 2: Invoke a command on Linux +```powershell +export param1=var1 param2=var2 +set -- var1 var2 var3 + +Invoke-AzVMRunCommand -ResourceGroupName 'rgname' -Name 'vmname' -CommandId 'RunShellScript' -ScriptPath 'sample.bash' -Parameter @{"param1" = "var1";"param2" = "var2"} +echo This is a sample bash script +echo Usage 1: Ordered parameters: $0 $1 +echo Usage 2: Named exports: $var1 $var2 +``` + +This command invokes a run command 'RunShellScript' with overriding the script 'sample.bash' on a Linux VM named 'vmname'. Var1 and var2 are defined as parameters in the sample.bash. ## PARAMETERS @@ -93,7 +106,7 @@ Accept wildcard characters: False ``` ### -Parameter -The run command parameters. +The run command parameters. Specify parameters as key/value pairs to be passed at script execution. ```yaml Type: System.Collections.Hashtable @@ -138,7 +151,8 @@ Accept wildcard characters: False ``` ### -ScriptPath -Path of the script to be executed. When this value is given, the given script will override the default script of the command. +Path of the script to be executed. When this value is given, the given script will override the default script of the command. +Path should point to a file from a local file system. The command will load it and send it for execution. ```yaml Type: System.String From 9715db5cc2a0f49001444a4e502f1a2e7435d1b4 Mon Sep 17 00:00:00 2001 From: YanjunGao <85206987+yanjungao718@users.noreply.github.com> Date: Fri, 29 Oct 2021 14:14:40 +0800 Subject: [PATCH 6/6] Update notebook and spark job definition cmdlets to support folder path (#16269) -update change log -add example for specifying multi-level folder path -update FolderName to FolderPath as PowerShell Team suggested -update foldername description in help doc -resolve some comments in code review -update change log -add new parameter foldername --- src/Synapse/Synapse/ChangeLog.md | 1 + .../Notebooks/SetAzureSynapseNotebook.cs | 11 ++++ .../SetAzureSynapseSparkJobDefinition.cs | 15 +++++- src/Synapse/Synapse/Common/HelpMessages.cs | 4 ++ .../Artifact/Notebooks/PSNotebook.cs | 4 ++ .../Artifact/Notebooks/PSNotebookFolder.cs | 29 +++++++++++ .../PSSparkJobDefinition.cs | 4 ++ .../PSSparkJobDefinitionFolder.cs | 28 +++++++++++ .../SynapseAnalyticsArtifactsClient.cs | 6 +-- .../Synapse/help/Set-AzSynapseNotebook.md | 50 +++++++++++++++---- .../help/Set-AzSynapseSparkJobDefinition.md | 39 +++++++++++++-- 11 files changed, 174 insertions(+), 17 deletions(-) create mode 100644 src/Synapse/Synapse/Models/DataPlaneModels/Artifact/Notebooks/PSNotebookFolder.cs create mode 100644 src/Synapse/Synapse/Models/DataPlaneModels/Artifact/SparkJobDefinitions/PSSparkJobDefinitionFolder.cs diff --git a/src/Synapse/Synapse/ChangeLog.md b/src/Synapse/Synapse/ChangeLog.md index c7922a4e8354..97010b2b8d02 100644 --- a/src/Synapse/Synapse/ChangeLog.md +++ b/src/Synapse/Synapse/ChangeLog.md @@ -19,6 +19,7 @@ --> ## Upcoming Release +* Updated `Set-AzSynapseNoteBook` and `Set-AzSynapseSparkJobDefinition` to support new parameter [-FolderPath] * Added support for Synapse data flow debug session - Added `Start-AzSynapseDataFlowDebugSession` cmdlet to start a Synapse Analytics data flow debug session. - Added `Add-AzSynapseDataFlowDebugSessionPackage` cmdlet diff --git a/src/Synapse/Synapse/Commands/DataPlaneCommands/Artifact/Notebooks/SetAzureSynapseNotebook.cs b/src/Synapse/Synapse/Commands/DataPlaneCommands/Artifact/Notebooks/SetAzureSynapseNotebook.cs index 0db8eed3916e..572c1acc3be7 100644 --- a/src/Synapse/Synapse/Commands/DataPlaneCommands/Artifact/Notebooks/SetAzureSynapseNotebook.cs +++ b/src/Synapse/Synapse/Commands/DataPlaneCommands/Artifact/Notebooks/SetAzureSynapseNotebook.cs @@ -57,6 +57,10 @@ public class SetAzureSynapseNotebook : SynapseArtifactsCmdletBase [Alias("NotebookName")] public string Name { get; set; } + [Parameter(ValueFromPipelineByPropertyName = false, Mandatory = false, HelpMessage = HelpMessages.NoteBookFolderPath)] + [ValidateNotNullOrEmpty] + public string FolderPath { get; set; } + [Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = SetByNameAndSparkPool, Mandatory = true, HelpMessage = HelpMessages.SparkPoolName)] [Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = SetByObjectAndSparkPool, @@ -134,6 +138,13 @@ public override void ExecuteCmdlet() notebookResource.Properties.SessionProperties = new NotebookSessionProperties(options["memory"] + "g", (int)options["cores"], options["memory"] + "g", (int)options["cores"], (int)options["nodeCount"]); } + if (this.IsParameterBound(c => c.FolderPath)) + { + NotebookFolder folder = new NotebookFolder(); + folder.Name = this.FolderPath; + notebookResource.Properties.Folder = folder; + } + WriteObject(new PSNotebookResource(SynapseAnalyticsClient.CreateOrUpdateNotebook(this.Name, notebookResource), this.WorkspaceName)); } } diff --git a/src/Synapse/Synapse/Commands/DataPlaneCommands/Artifact/SparkJobDefinitions/SetAzureSynapseSparkJobDefinition.cs b/src/Synapse/Synapse/Commands/DataPlaneCommands/Artifact/SparkJobDefinitions/SetAzureSynapseSparkJobDefinition.cs index bb2a58eaf6e0..09a3379db006 100644 --- a/src/Synapse/Synapse/Commands/DataPlaneCommands/Artifact/SparkJobDefinitions/SetAzureSynapseSparkJobDefinition.cs +++ b/src/Synapse/Synapse/Commands/DataPlaneCommands/Artifact/SparkJobDefinitions/SetAzureSynapseSparkJobDefinition.cs @@ -12,6 +12,7 @@ // limitations under the License. // ---------------------------------------------------------------------------------- +using Azure.Analytics.Synapse.Artifacts.Models; using Microsoft.Azure.Commands.Common.Exceptions; using Microsoft.Azure.Commands.ResourceManager.Common.ArgumentCompleters; using Microsoft.Azure.Commands.Synapse.Common; @@ -54,6 +55,10 @@ public class SetAzureSynapseSparkJobDefinition : SynapseArtifactsCmdletBase [Alias("File")] public string DefinitionFile { get; set; } + [Parameter(ValueFromPipelineByPropertyName = false, Mandatory = false, HelpMessage = HelpMessages.SparkConfigurationFolderPath)] + [ValidateNotNullOrEmpty] + public string FolderPath { get; set; } + [Parameter(Mandatory = false, HelpMessage = HelpMessages.AsJob)] public SwitchParameter AsJob { get; set; } @@ -67,7 +72,15 @@ public override void ExecuteCmdlet() if (this.ShouldProcess(this.WorkspaceName, String.Format(Resources.SettingSynapseSparkJobDefinition, this.Name, this.WorkspaceName))) { string rawJsonContent = SynapseAnalyticsClient.ReadJsonFileContent(this.TryResolvePath(DefinitionFile)); - WriteObject(new PSSparkJobDefinitionResource(SynapseAnalyticsClient.CreateOrUpdateSparkJobDefinition(this.Name, rawJsonContent))); + SparkJobDefinition sparkJobDefinition = JsonConvert.DeserializeObject(rawJsonContent); + SparkJobDefinitionResource sparkJobDefinitionResource = new SparkJobDefinitionResource(sparkJobDefinition); + if (this.IsParameterBound(c => c.FolderPath)) + { + SparkJobDefinitionFolder folder = new SparkJobDefinitionFolder(); + folder.Name = FolderPath; + sparkJobDefinitionResource.Properties.Folder = folder; + } + WriteObject(new PSSparkJobDefinitionResource(SynapseAnalyticsClient.CreateOrUpdateSparkJobDefinition(this.Name, sparkJobDefinitionResource))); } } } diff --git a/src/Synapse/Synapse/Common/HelpMessages.cs b/src/Synapse/Synapse/Common/HelpMessages.cs index d2eaa834f071..9a69be0e5e73 100644 --- a/src/Synapse/Synapse/Common/HelpMessages.cs +++ b/src/Synapse/Synapse/Common/HelpMessages.cs @@ -540,5 +540,9 @@ SELECT on dbo.myTable by public public const string DefaultResultLimit = "The default resultLimit is 5000"; public const string SqlFilePath = "The SQL file path."; + + public const string SparkConfigurationFolderPath = "The folder that this Spark job definition is in. If specify a multi-level path such as [rootFolder/subFolder], the Spark job definition will appear at the bottom level. If not specified, this Spark job definition will appear at the root level."; + + public const string NoteBookFolderPath = "The folder that this notebook is in. If specify a multi-level path such as [rootFolder/subFolder], the notebook will appear at the bottom level. If not specified, this notebook will appear at the root level."; } } diff --git a/src/Synapse/Synapse/Models/DataPlaneModels/Artifact/Notebooks/PSNotebook.cs b/src/Synapse/Synapse/Models/DataPlaneModels/Artifact/Notebooks/PSNotebook.cs index 255eba8bd621..4feaa197b978 100644 --- a/src/Synapse/Synapse/Models/DataPlaneModels/Artifact/Notebooks/PSNotebook.cs +++ b/src/Synapse/Synapse/Models/DataPlaneModels/Artifact/Notebooks/PSNotebook.cs @@ -28,6 +28,7 @@ public PSNotebook(Notebook notebook) this.BigDataPool = new PSBigDataPoolReference(notebook?.BigDataPool); this.SessionProperties = new PSNotebookSessionProperties(notebook?.SessionProperties); this.Metadata = new PSNotebookMetadata(notebook?.Metadata); + this.Folder = new PSNotebookFolder(notebook?.Folder); this.NotebookFormat = notebook?.Nbformat; this.NotebookFormatMinor = notebook?.NbformatMinor; this.Cells = notebook?.Cells?.Select(element => new PSNotebookCell(element)).ToList(); @@ -46,6 +47,9 @@ public PSNotebook(Notebook notebook) [JsonProperty(PropertyName = "metadata")] public PSNotebookMetadata Metadata { get; set; } + [JsonIgnore] + public PSNotebookFolder Folder { get; set; } + [DefaultValue(4)] [JsonProperty(DefaultValueHandling = DefaultValueHandling.Populate, PropertyName = "nbformat")] public int? NotebookFormat { get; set; } diff --git a/src/Synapse/Synapse/Models/DataPlaneModels/Artifact/Notebooks/PSNotebookFolder.cs b/src/Synapse/Synapse/Models/DataPlaneModels/Artifact/Notebooks/PSNotebookFolder.cs new file mode 100644 index 000000000000..948951eaf5a7 --- /dev/null +++ b/src/Synapse/Synapse/Models/DataPlaneModels/Artifact/Notebooks/PSNotebookFolder.cs @@ -0,0 +1,29 @@ +// ---------------------------------------------------------------------------------- +// +// Copyright Microsoft Corporation +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ---------------------------------------------------------------------------------- + +using Azure.Analytics.Synapse.Artifacts.Models; +using Newtonsoft.Json; + +namespace Microsoft.Azure.Commands.Synapse.Models +{ + public class PSNotebookFolder + { + public PSNotebookFolder(NotebookFolder folder) + { + this.Name = folder?.Name; + } + + public string Name { get; set; } + } +} diff --git a/src/Synapse/Synapse/Models/DataPlaneModels/Artifact/SparkJobDefinitions/PSSparkJobDefinition.cs b/src/Synapse/Synapse/Models/DataPlaneModels/Artifact/SparkJobDefinitions/PSSparkJobDefinition.cs index 3438bd6380dd..1ccf8ecc4c67 100644 --- a/src/Synapse/Synapse/Models/DataPlaneModels/Artifact/SparkJobDefinitions/PSSparkJobDefinition.cs +++ b/src/Synapse/Synapse/Models/DataPlaneModels/Artifact/SparkJobDefinitions/PSSparkJobDefinition.cs @@ -24,6 +24,7 @@ public PSSparkJobDefinition(SparkJobDefinition properties) TargetBigDataPool = properties?.TargetBigDataPool != null ? new PSBigDataPoolReference(properties.TargetBigDataPool) : null; RequiredSparkVersion = properties?.RequiredSparkVersion; JobProperties = properties?.JobProperties != null ? new PSSparkJobProperties(properties.JobProperties) : null; + Folder = properties?.Folder != null ? new PSSparkJobDefinitionFolder(properties.Folder) : null; } /// The description of the Spark job definition. @@ -40,5 +41,8 @@ public PSSparkJobDefinition(SparkJobDefinition properties) /// The properties of the Spark job. public PSSparkJobProperties JobProperties { get; set; } + + /// The folder that this Spark job definition is in. If not specified, this Spark job definition will appear at the root level. + public PSSparkJobDefinitionFolder Folder { get; set; } } } \ No newline at end of file diff --git a/src/Synapse/Synapse/Models/DataPlaneModels/Artifact/SparkJobDefinitions/PSSparkJobDefinitionFolder.cs b/src/Synapse/Synapse/Models/DataPlaneModels/Artifact/SparkJobDefinitions/PSSparkJobDefinitionFolder.cs new file mode 100644 index 000000000000..684456bfcf5a --- /dev/null +++ b/src/Synapse/Synapse/Models/DataPlaneModels/Artifact/SparkJobDefinitions/PSSparkJobDefinitionFolder.cs @@ -0,0 +1,28 @@ +// ---------------------------------------------------------------------------------- +// +// Copyright Microsoft Corporation +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// ---------------------------------------------------------------------------------- + +using Azure.Analytics.Synapse.Artifacts.Models; + +namespace Microsoft.Azure.Commands.Synapse.Models +{ + public class PSSparkJobDefinitionFolder + { + public PSSparkJobDefinitionFolder(SparkJobDefinitionFolder folder) + { + this.Name = folder?.Name; + } + + public string Name { get; set; } + } +} \ No newline at end of file diff --git a/src/Synapse/Synapse/Models/DataPlaneModels/Artifact/SynapseAnalyticsArtifactsClient.cs b/src/Synapse/Synapse/Models/DataPlaneModels/Artifact/SynapseAnalyticsArtifactsClient.cs index 893c2f1ba66b..1be17fa7b57c 100644 --- a/src/Synapse/Synapse/Models/DataPlaneModels/Artifact/SynapseAnalyticsArtifactsClient.cs +++ b/src/Synapse/Synapse/Models/DataPlaneModels/Artifact/SynapseAnalyticsArtifactsClient.cs @@ -339,10 +339,10 @@ public BigDataPoolResourceInfo GetBigDataPool(string bigDataPoolName) #region Spark Job Definition - public SparkJobDefinitionResource CreateOrUpdateSparkJobDefinition(string SparkJobDefinitionName, string rawJsonContent) + public SparkJobDefinitionResource CreateOrUpdateSparkJobDefinition(string SparkJobDefinitionName, SparkJobDefinitionResource SparkJobDefinition) { - SparkJobDefinitionResource SparkJobDefinition = new SparkJobDefinitionResource(JsonConvert.DeserializeObject(rawJsonContent)); - return _sparkJobDefinitionClient.StartCreateOrUpdateSparkJobDefinition(SparkJobDefinitionName, SparkJobDefinition).Poll().Value; + var operation = _sparkJobDefinitionClient.StartCreateOrUpdateSparkJobDefinition(SparkJobDefinitionName, SparkJobDefinition); + return operation.Poll().Value; } public SparkJobDefinitionResource GetSparkJobDefinition(string SparkJobDefinitionName) diff --git a/src/Synapse/Synapse/help/Set-AzSynapseNotebook.md b/src/Synapse/Synapse/help/Set-AzSynapseNotebook.md index d1fc78df3bc1..35ad0b9e0b5a 100644 --- a/src/Synapse/Synapse/help/Set-AzSynapseNotebook.md +++ b/src/Synapse/Synapse/help/Set-AzSynapseNotebook.md @@ -14,27 +14,28 @@ Creates or updates a notebook in a workspace. ### SetByName (Default) ``` -Set-AzSynapseNotebook -WorkspaceName [-Name ] -DefinitionFile [-AsJob] - [-DefaultProfile ] [-WhatIf] [-Confirm] [] +Set-AzSynapseNotebook -WorkspaceName [-Name ] [-FolderPath ] -DefinitionFile + [-AsJob] [-DefaultProfile ] [-WhatIf] [-Confirm] [] ``` ### SetByNameAndSparkPool ``` -Set-AzSynapseNotebook -WorkspaceName [-Name ] -SparkPoolName [-ExecutorSize ] - -ExecutorCount -DefinitionFile [-AsJob] [-DefaultProfile ] [-WhatIf] - [-Confirm] [] +Set-AzSynapseNotebook -WorkspaceName [-Name ] [-FolderPath ] -SparkPoolName + [-ExecutorSize ] -ExecutorCount -DefinitionFile [-AsJob] + [-DefaultProfile ] [-WhatIf] [-Confirm] [] ``` ### SetByObject ``` -Set-AzSynapseNotebook -WorkspaceObject [-Name ] -DefinitionFile [-AsJob] - [-DefaultProfile ] [-WhatIf] [-Confirm] [] +Set-AzSynapseNotebook -WorkspaceObject [-Name ] [-FolderPath ] + -DefinitionFile [-AsJob] [-DefaultProfile ] [-WhatIf] [-Confirm] + [] ``` ### SetByObjectAndSparkPool ``` -Set-AzSynapseNotebook -WorkspaceObject [-Name ] -SparkPoolName - [-ExecutorSize ] -ExecutorCount -DefinitionFile [-AsJob] +Set-AzSynapseNotebook -WorkspaceObject [-Name ] [-FolderPath ] + -SparkPoolName [-ExecutorSize ] -ExecutorCount -DefinitionFile [-AsJob] [-DefaultProfile ] [-WhatIf] [-Confirm] [] ``` @@ -56,13 +57,27 @@ This command creates or updates a notebook from notebook file notebook.ipynb in ### Example 2 ```powershell +PS C:\> Set-AzSynapseNotebook -WorkspaceName ContosoWorkspace -DefinitionFile "C:\\samples\\notebook.ipynb" -FolderPath ContosoFolder +``` + +This command creates or updates a notebook from notebook file notebook.ipynb and specify a folder path ContosoFolder where the notebook will be placed in the workspace named ContosoWorkspace. + +### Example 3 +```powershell +PS C:\> Set-AzSynapseNotebook -WorkspaceName ContosoWorkspace -DefinitionFile "C:\\samples\\notebook.ipynb" -FolderPath ContosoFolder/SubFolder +``` + +This command creates or updates a notebook from notebook file notebook.ipynb and specify a multi-level folder path ContosoFolder/SubFolder where the notebook will be placed in the workspace named ContosoWorkspace. + +### Example 4 +```powershell PS C:\> $ws = Get-AzSynapseWorkspace -Name ContosoWorkspace PS C:\> $ws | Set-AzSynapseNotebook -DefinitionFile "C:\\samples\\notebook.ipynb" ``` This command creates or updates a notebook from notebook file notebook.ipynb in the workspace named ContosoWorkspace through pipeline. -### Example 3 +### Example 5 ```powershell PS C:\> Set-AzSynapseNotebook -WorkspaceName ContosoWorkspace -DefinitionFile "C:\\samples\\notebook.ipynb" -SparkPoolName ContosoSparkPool -ExecutorCount 2 ``` @@ -147,6 +162,21 @@ Accept pipeline input: False Accept wildcard characters: False ``` +### -FolderPath +The folder that this notebook is in. If specify a multi-level path such as [rootFolder/subFolder], the notebook will appear at the bottom level. If not specified, this notebook will appear at the root level. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + ### -Name The notebook name. diff --git a/src/Synapse/Synapse/help/Set-AzSynapseSparkJobDefinition.md b/src/Synapse/Synapse/help/Set-AzSynapseSparkJobDefinition.md index d4c1fc741c33..a72b0082d832 100644 --- a/src/Synapse/Synapse/help/Set-AzSynapseSparkJobDefinition.md +++ b/src/Synapse/Synapse/help/Set-AzSynapseSparkJobDefinition.md @@ -14,14 +14,16 @@ Creates a Spark job definition in workspace. ### SetByName (Default) ``` -Set-AzSynapseSparkJobDefinition -WorkspaceName -Name -DefinitionFile [-AsJob] - [-DefaultProfile ] [-WhatIf] [-Confirm] [] +Set-AzSynapseSparkJobDefinition -WorkspaceName -Name -DefinitionFile + [-FolderPath ] [-AsJob] [-DefaultProfile ] [-WhatIf] [-Confirm] + [] ``` ### SetByObject ``` Set-AzSynapseSparkJobDefinition -WorkspaceObject -Name -DefinitionFile - [-AsJob] [-DefaultProfile ] [-WhatIf] [-Confirm] [] + [-FolderPath ] [-AsJob] [-DefaultProfile ] [-WhatIf] [-Confirm] + [] ``` ## DESCRIPTION @@ -39,6 +41,22 @@ The command bases the Spark job definition on information in the sparkJobDefinit ### Example 2 ```powershell +PS C:\> Set-AzSynapseSparkJobDefinition -WorkspaceName ContosoWorkspace -Name ContosoSparkJobDefinition -DefinitionFile "C:\sparkJobDefinition.json" -FolderPath ContosoFolder +``` + +This command creates a Spark job definition named ContosoSparkJobDefinition and specify a folder path ContosoFolder where the spark job definition will be placed in the workspace named ContosoWorkspace. +The command bases the Spark job definition on information in the sparkJobDefinition.json file. + +### Example 3 +```powershell +PS C:\> Set-AzSynapseSparkJobDefinition -WorkspaceName ContosoWorkspace -Name ContosoSparkJobDefinition -DefinitionFile "C:\sparkJobDefinition.json" -FolderPath ContosoFolder/SubFolder +``` + +This command creates a Spark job definition named ContosoSparkJobDefinition and specify a multi-level folder path ContosoFolder/SubFolder where the spark job definition will be placed in the workspace named ContosoWorkspace. +The command bases the Spark job definition on information in the sparkJobDefinition.json file. + +### Example 4 +```powershell PS C:\> $ws = Get-AzSynapseWorkspace -Name ContosoWorkspace PS C:\> $ws | Set-AzSynapseSparkJobDefinition -Name ContosoSparkJobDefinition -DefinitionFile "C:\sparkJobDefinition.json" ``` @@ -93,6 +111,21 @@ Accept pipeline input: False Accept wildcard characters: False ``` +### -FolderPath +The folder that this Spark job definition is in. If specify a multi-level path such as [rootFolder/subFolder], the Spark job definition will appear at the bottom level. If not specified, this Spark job definition will appear at the root level. + +```yaml +Type: System.String +Parameter Sets: (All) +Aliases: + +Required: False +Position: Named +Default value: None +Accept pipeline input: False +Accept wildcard characters: False +``` + ### -Name The Spark job definition name.