Skip to content

Commit

Permalink
Merge pull request Azure#16283 from Azure/bez/fix
Browse files Browse the repository at this point in the history
Fix conflicts
  • Loading branch information
BethanyZhou authored Oct 29, 2021
2 parents 37010d9 + b7a961f commit 4493fc2
Show file tree
Hide file tree
Showing 19 changed files with 320 additions and 25 deletions.
3 changes: 2 additions & 1 deletion .ci-config.json
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,8 @@
"src/**/document/*",
".ci-config.json",
"tools/PrepareAutorestModule.ps1",
"tools/SyncFromMainBranch.ps1"
"tools/SyncFromMainBranch.ps1",
"tools/GeneratedModuleList.txt"
],
"phases": []
},
Expand Down
24 changes: 19 additions & 5 deletions src/Compute/Compute/help/Invoke-AzVMRunCommand.md
Original file line number Diff line number Diff line change
Expand Up @@ -38,12 +38,25 @@ Invoke a run command on the VM.

## EXAMPLES

### Example 1
```
### Example 1: Invoke a command on Windows
```powershell
PS C:\> Invoke-AzVMRunCommand -ResourceGroupName 'rgname' -VMName 'vmname' -CommandId 'RunPowerShellScript' -ScriptPath 'sample.ps1' -Parameter @{param1 = "var1"; param2 = "var2"}
```

Invoke a run command of RunPowerShellScript with overriding the script 'sample.ps1' and the parameters on the VM of 'vmname' in resource group 'rgname'.
Invoke a run command 'RunPowerShellScript' with overriding the script 'sample.ps1' on a Windows VM named 'vmname' in resource group 'rgname'. Var1 and var2 are defined as parameters in the sample.ps1. Parameter value can be string type only and script is responsible for converting them to other types if needed.

### Example 2: Invoke a command on Linux
```powershell
export param1=var1 param2=var2
set -- var1 var2 var3
Invoke-AzVMRunCommand -ResourceGroupName 'rgname' -Name 'vmname' -CommandId 'RunShellScript' -ScriptPath 'sample.bash' -Parameter @{"param1" = "var1";"param2" = "var2"}
echo This is a sample bash script
echo Usage 1: Ordered parameters: $0 $1
echo Usage 2: Named exports: $var1 $var2
```

This command invokes a run command 'RunShellScript' with overriding the script 'sample.bash' on a Linux VM named 'vmname'. Var1 and var2 are defined as parameters in the sample.bash.

## PARAMETERS

Expand Down Expand Up @@ -93,7 +106,7 @@ Accept wildcard characters: False
```
### -Parameter
The run command parameters.
The run command parameters. Specify parameters as key/value pairs to be passed at script execution.
```yaml
Type: System.Collections.Hashtable
Expand Down Expand Up @@ -138,7 +151,8 @@ Accept wildcard characters: False
```
### -ScriptPath
Path of the script to be executed. When this value is given, the given script will override the default script of the command.
Path of the script to be executed. When this value is given, the given script will override the default script of the command.
Path should point to a file from a local file system. The command will load it and send it for execution.
```yaml
Type: System.String
Expand Down
60 changes: 60 additions & 0 deletions src/Sql/Sql.Test/UnitTests/ResourceWildcardFilterHelperTests.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
using Microsoft.Azure.Commands.Sql.Common;
using Microsoft.WindowsAzure.Commands.ScenarioTest;
using System.Collections.Generic;
using Xunit;

namespace Microsoft.Azure.Commands.Sql.Test.UnitTests
{
public class ResourceWildcardFilterHelperTests
{
[Fact]
[Trait(Category.AcceptanceType, Category.CheckIn)]
public void SqlSubResourceWildcardFilterTest()
{
ResourceWildcardFilterHelper wildcardFilterHelper = new ResourceWildcardFilterHelper();

// should match test01
Assert.Single(wildcardFilterHelper.SqlSubResourceWildcardFilter("test01", ReturnedResources, "PropertyName1"));
// should match none
Assert.Empty(wildcardFilterHelper.SqlSubResourceWildcardFilter("test", ReturnedResources, "PropertyName1"));
// should match all
Assert.Equal(11, wildcardFilterHelper.SqlSubResourceWildcardFilter("t*t*", ReturnedResources, "PropertyName1").Count);
// should match none
Assert.Empty(wildcardFilterHelper.SqlSubResourceWildcardFilter("t*t", ReturnedResources, "PropertyName1"));
// should match test01 and test11
Assert.Equal(2, wildcardFilterHelper.SqlSubResourceWildcardFilter("t*1", ReturnedResources, "PropertyName1").Count);
// should match all because empty value
Assert.Equal(11, wildcardFilterHelper.SqlSubResourceWildcardFilter(string.Empty, ReturnedResources, "PropertyName1").Count);
// should match all because null property name
Assert.Equal(11, wildcardFilterHelper.SqlSubResourceWildcardFilter("anything", ReturnedResources, null).Count);
}

private readonly List<TestResource> ReturnedResources = new List<TestResource>()
{
new TestResource("test01", "case01"),
new TestResource("test02", "case02"),
new TestResource("test03", "case03"),
new TestResource("test04", "case04"),
new TestResource("test05", "case05"),
new TestResource("test06", "case06"),
new TestResource("test07", "case07"),
new TestResource("test08", "case08"),
new TestResource("test09", "case09"),
new TestResource("test10", "case10"),
new TestResource("test11", "case11"),
};
}

internal class TestResource
{
public TestResource(string PropertyName1, string PropertyName2)
{
this.PropertyName1 = PropertyName1;
this.PropertyName2 = PropertyName2;
}

public string PropertyName1 { get; set; }

public string PropertyName2 { get; set; }
}
}
1 change: 1 addition & 0 deletions src/Sql/Sql/ChangeLog.md
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
- Additional information about change #1
-->
## Upcoming Release
* Fixed FirewallRuleName wildcard filtering in `Get-AzSqlServerFirewallRule` [#16199]

## Version 3.5.1

Expand Down
42 changes: 42 additions & 0 deletions src/Sql/Sql/Common/ResourceWildcardFilterHelper.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
using System.Collections.Generic;
using System.Linq;
using System.Management.Automation;

namespace Microsoft.Azure.Commands.Sql.Common
{
public class ResourceWildcardFilterHelper
{
public List<T> SqlSubResourceWildcardFilter<T>(string value, IEnumerable<T> resources, string propertyName)
{
if (!string.IsNullOrEmpty(value) && !string.IsNullOrEmpty(propertyName))
{
IEnumerable<T> output = resources;
WildcardPattern pattern = new WildcardPattern(value, WildcardOptions.IgnoreCase);
output = output.Where(t => IsMatch(t, propertyName, pattern));

return output.ToList();
}
else
{
return resources.ToList();
}
}

private bool IsMatch<T>(T resource, string property, WildcardPattern pattern)
{
var value = (string)GetPropertyValue(resource, property);
return !string.IsNullOrEmpty(value) && pattern.IsMatch(value);
}

private object GetPropertyValue<T>(T resource, string property)
{
System.Reflection.PropertyInfo pi = typeof(T).GetProperty(property);
if (pi != null)
{
return pi.GetValue(resource, null);
}

return null;
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
// limitations under the License.
// ----------------------------------------------------------------------------------

using Microsoft.Azure.Commands.Sql.Common;
using Microsoft.Azure.Commands.Sql.FirewallRule.Model;
using System.Collections.Generic;
using System.Management.Automation;
Expand Down Expand Up @@ -44,6 +45,7 @@ public class GetAzureSqlServerFirewallRule : AzureSqlServerFirewallRuleCmdletBas
protected override IEnumerable<AzureSqlServerFirewallRuleModel> GetEntity()
{
ICollection<AzureSqlServerFirewallRuleModel> results = null;
ResourceWildcardFilterHelper filterHelper = new ResourceWildcardFilterHelper();

if (this.MyInvocation.BoundParameters.ContainsKey("FirewallRuleName") && !WildcardPattern.ContainsWildcardCharacters(FirewallRuleName))
{
Expand All @@ -55,7 +57,7 @@ protected override IEnumerable<AzureSqlServerFirewallRuleModel> GetEntity()
results = ModelAdapter.ListFirewallRules(this.ResourceGroupName, this.ServerName);
}

return SubResourceWildcardFilter(FirewallRuleName, results);
return filterHelper.SqlSubResourceWildcardFilter(FirewallRuleName, results, nameof(AzureSqlServerFirewallRuleModel.FirewallRuleName));
}

/// <summary>
Expand Down
18 changes: 18 additions & 0 deletions src/Storage/Storage.Management/help/Move-AzDataLakeGen2Item.md
Original file line number Diff line number Diff line change
Expand Up @@ -58,6 +58,24 @@ dir2/file2 False 1024 2020-03-23 09:57:33Z rwxrw-rw-

This command move file 'dir1/file1' in 'filesystem1' to file 'dir2/file2' in 'filesystem2' in the same Storage account without prompt.

### Example 3: Move an item with Sas token
```
PS C:\> $sas = New-AzStorageContainerSASToken -Name $filesystemName -Permission rdw -Context $ctx
PS C:\> $sasctx = New-AzStorageContext -StorageAccountName $ctx.StorageAccountName -SasToken $sas
PS C:\> Move-AzDataLakeGen2Item -FileSystem $filesystemName -Path $itempath1 -DestFileSystem $filesystemName -DestPath "$($itempath2)$($sas)" -Context $sasctx
FileSystem Name: filesystem1
Path IsDirectory Length LastModified Permissions Owner Group
---- ----------- ------ ------------ ----------- ----- -----
dir2/file1 False 1024 2021-03-23 09:57:33Z rwxrw-rw- $superuser $superuser
```

This first command creates a Sas token with rdw permission, the second command creates a Storage context from the Sas token, the 3rd command moves an item with the Sas token.
This example use same Sastoken with rdw permission on both source and destication, if use 2 SAS token for source and destication, source need permission rd, destication need permission w.

## PARAMETERS

### -Context
Expand Down
1 change: 1 addition & 0 deletions src/Synapse/Synapse/ChangeLog.md
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
-->

## Upcoming Release
* Updated `Set-AzSynapseNoteBook` and `Set-AzSynapseSparkJobDefinition` to support new parameter [-FolderPath]

## Version 0.18.0
* Added cmdlets for Synapse Kusto pool
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,6 +57,10 @@ public class SetAzureSynapseNotebook : SynapseArtifactsCmdletBase
[Alias("NotebookName")]
public string Name { get; set; }

[Parameter(ValueFromPipelineByPropertyName = false, Mandatory = false, HelpMessage = HelpMessages.NoteBookFolderPath)]
[ValidateNotNullOrEmpty]
public string FolderPath { get; set; }

[Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = SetByNameAndSparkPool,
Mandatory = true, HelpMessage = HelpMessages.SparkPoolName)]
[Parameter(ValueFromPipelineByPropertyName = false, ParameterSetName = SetByObjectAndSparkPool,
Expand Down Expand Up @@ -134,6 +138,13 @@ public override void ExecuteCmdlet()
notebookResource.Properties.SessionProperties = new NotebookSessionProperties(options["memory"] + "g", (int)options["cores"], options["memory"] + "g", (int)options["cores"], (int)options["nodeCount"]);
}

if (this.IsParameterBound(c => c.FolderPath))
{
NotebookFolder folder = new NotebookFolder();
folder.Name = this.FolderPath;
notebookResource.Properties.Folder = folder;
}

WriteObject(new PSNotebookResource(SynapseAnalyticsClient.CreateOrUpdateNotebook(this.Name, notebookResource), this.WorkspaceName));
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
// limitations under the License.
// ----------------------------------------------------------------------------------

using Azure.Analytics.Synapse.Artifacts.Models;
using Microsoft.Azure.Commands.Common.Exceptions;
using Microsoft.Azure.Commands.ResourceManager.Common.ArgumentCompleters;
using Microsoft.Azure.Commands.Synapse.Common;
Expand Down Expand Up @@ -54,6 +55,10 @@ public class SetAzureSynapseSparkJobDefinition : SynapseArtifactsCmdletBase
[Alias("File")]
public string DefinitionFile { get; set; }

[Parameter(ValueFromPipelineByPropertyName = false, Mandatory = false, HelpMessage = HelpMessages.SparkConfigurationFolderPath)]
[ValidateNotNullOrEmpty]
public string FolderPath { get; set; }

[Parameter(Mandatory = false, HelpMessage = HelpMessages.AsJob)]
public SwitchParameter AsJob { get; set; }

Expand All @@ -67,7 +72,15 @@ public override void ExecuteCmdlet()
if (this.ShouldProcess(this.WorkspaceName, String.Format(Resources.SettingSynapseSparkJobDefinition, this.Name, this.WorkspaceName)))
{
string rawJsonContent = SynapseAnalyticsClient.ReadJsonFileContent(this.TryResolvePath(DefinitionFile));
WriteObject(new PSSparkJobDefinitionResource(SynapseAnalyticsClient.CreateOrUpdateSparkJobDefinition(this.Name, rawJsonContent)));
SparkJobDefinition sparkJobDefinition = JsonConvert.DeserializeObject<SparkJobDefinition>(rawJsonContent);
SparkJobDefinitionResource sparkJobDefinitionResource = new SparkJobDefinitionResource(sparkJobDefinition);
if (this.IsParameterBound(c => c.FolderPath))
{
SparkJobDefinitionFolder folder = new SparkJobDefinitionFolder();
folder.Name = FolderPath;
sparkJobDefinitionResource.Properties.Folder = folder;
}
WriteObject(new PSSparkJobDefinitionResource(SynapseAnalyticsClient.CreateOrUpdateSparkJobDefinition(this.Name, sparkJobDefinitionResource)));
}
}
}
Expand Down
4 changes: 4 additions & 0 deletions src/Synapse/Synapse/Common/HelpMessages.cs
Original file line number Diff line number Diff line change
Expand Up @@ -540,5 +540,9 @@ SELECT on dbo.myTable by public
public const string DefaultResultLimit = "The default resultLimit is 5000";

public const string SqlFilePath = "The SQL file path.";

public const string SparkConfigurationFolderPath = "The folder that this Spark job definition is in. If specify a multi-level path such as [rootFolder/subFolder], the Spark job definition will appear at the bottom level. If not specified, this Spark job definition will appear at the root level.";

public const string NoteBookFolderPath = "The folder that this notebook is in. If specify a multi-level path such as [rootFolder/subFolder], the notebook will appear at the bottom level. If not specified, this notebook will appear at the root level.";
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ public PSNotebook(Notebook notebook)
this.BigDataPool = new PSBigDataPoolReference(notebook?.BigDataPool);
this.SessionProperties = new PSNotebookSessionProperties(notebook?.SessionProperties);
this.Metadata = new PSNotebookMetadata(notebook?.Metadata);
this.Folder = new PSNotebookFolder(notebook?.Folder);
this.NotebookFormat = notebook?.Nbformat;
this.NotebookFormatMinor = notebook?.NbformatMinor;
this.Cells = notebook?.Cells?.Select(element => new PSNotebookCell(element)).ToList();
Expand All @@ -46,6 +47,9 @@ public PSNotebook(Notebook notebook)
[JsonProperty(PropertyName = "metadata")]
public PSNotebookMetadata Metadata { get; set; }

[JsonIgnore]
public PSNotebookFolder Folder { get; set; }

[DefaultValue(4)]
[JsonProperty(DefaultValueHandling = DefaultValueHandling.Populate, PropertyName = "nbformat")]
public int? NotebookFormat { get; set; }
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,29 @@
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------

using Azure.Analytics.Synapse.Artifacts.Models;
using Newtonsoft.Json;

namespace Microsoft.Azure.Commands.Synapse.Models
{
public class PSNotebookFolder
{
public PSNotebookFolder(NotebookFolder folder)
{
this.Name = folder?.Name;
}

public string Name { get; set; }
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ public PSSparkJobDefinition(SparkJobDefinition properties)
TargetBigDataPool = properties?.TargetBigDataPool != null ? new PSBigDataPoolReference(properties.TargetBigDataPool) : null;
RequiredSparkVersion = properties?.RequiredSparkVersion;
JobProperties = properties?.JobProperties != null ? new PSSparkJobProperties(properties.JobProperties) : null;
Folder = properties?.Folder != null ? new PSSparkJobDefinitionFolder(properties.Folder) : null;
}

/// <summary> The description of the Spark job definition. </summary>
Expand All @@ -40,5 +41,8 @@ public PSSparkJobDefinition(SparkJobDefinition properties)

/// <summary> The properties of the Spark job. </summary>
public PSSparkJobProperties JobProperties { get; set; }

/// <summary> The folder that this Spark job definition is in. If not specified, this Spark job definition will appear at the root level.</summary>
public PSSparkJobDefinitionFolder Folder { get; set; }
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------

using Azure.Analytics.Synapse.Artifacts.Models;

namespace Microsoft.Azure.Commands.Synapse.Models
{
public class PSSparkJobDefinitionFolder
{
public PSSparkJobDefinitionFolder(SparkJobDefinitionFolder folder)
{
this.Name = folder?.Name;
}

public string Name { get; set; }
}
}
Loading

0 comments on commit 4493fc2

Please sign in to comment.