Skip to content

Commit

Permalink
Test avro and json dataset schema. (#41699)
Browse files Browse the repository at this point in the history
  • Loading branch information
wangbwn authored Feb 20, 2024
1 parent 0c4198b commit 2e5196e
Show file tree
Hide file tree
Showing 5 changed files with 425 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -1099,6 +1099,7 @@ public AvroDataset(Azure.Core.Expressions.DataFactory.DataFactoryLinkedServiceRe
public Azure.Core.Expressions.DataFactory.DataFactoryElement<string> AvroCompressionCodec { get { throw null; } set { } }
public int? AvroCompressionLevel { get { throw null; } set { } }
public Azure.ResourceManager.DataFactory.Models.DatasetLocation DataLocation { get { throw null; } set { } }
public new Azure.Core.Expressions.DataFactory.DataFactoryElement<System.BinaryData> Schema { get { throw null; } set { } }
Azure.ResourceManager.DataFactory.Models.AvroDataset System.ClientModel.Primitives.IJsonModel<Azure.ResourceManager.DataFactory.Models.AvroDataset>.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
void System.ClientModel.Primitives.IJsonModel<Azure.ResourceManager.DataFactory.Models.AvroDataset>.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
Azure.ResourceManager.DataFactory.Models.AvroDataset System.ClientModel.Primitives.IPersistableModel<Azure.ResourceManager.DataFactory.Models.AvroDataset>.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
Expand Down Expand Up @@ -6486,6 +6487,7 @@ public JsonDataset(Azure.Core.Expressions.DataFactory.DataFactoryLinkedServiceRe
public Azure.ResourceManager.DataFactory.Models.DatasetCompression Compression { get { throw null; } set { } }
public Azure.ResourceManager.DataFactory.Models.DatasetLocation DataLocation { get { throw null; } set { } }
public Azure.Core.Expressions.DataFactory.DataFactoryElement<string> EncodingName { get { throw null; } set { } }
public new Azure.Core.Expressions.DataFactory.DataFactoryElement<System.BinaryData> Schema { get { throw null; } set { } }
Azure.ResourceManager.DataFactory.Models.JsonDataset System.ClientModel.Primitives.IJsonModel<Azure.ResourceManager.DataFactory.Models.JsonDataset>.Create(ref System.Text.Json.Utf8JsonReader reader, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
void System.ClientModel.Primitives.IJsonModel<Azure.ResourceManager.DataFactory.Models.JsonDataset>.Write(System.Text.Json.Utf8JsonWriter writer, System.ClientModel.Primitives.ModelReaderWriterOptions options) { }
Azure.ResourceManager.DataFactory.Models.JsonDataset System.ClientModel.Primitives.IPersistableModel<Azure.ResourceManager.DataFactory.Models.JsonDataset>.Create(System.BinaryData data, System.ClientModel.Primitives.ModelReaderWriterOptions options) { throw null; }
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,5 @@
"AssetsRepo": "Azure/azure-sdk-assets",
"AssetsRepoPrefixPath": "net",
"TagPrefix": "net/datafactory/Azure.ResourceManager.DataFactory",
"Tag": "net/datafactory/Azure.ResourceManager.DataFactory_23a911dae9"
"Tag": "net/datafactory/Azure.ResourceManager.DataFactory_8bebc7d7e6"
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,193 @@
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.

#nullable disable

using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Text.Json;
using Azure.Core;
using Azure.Core.Expressions.DataFactory;

namespace Azure.ResourceManager.DataFactory.Models
{
/// <summary> Avro dataset. </summary>
[CodeGenSuppress(nameof(AvroDataset), typeof(string), typeof(string), typeof(DataFactoryElement<IList<DatasetDataElement>>),
typeof(DataFactoryElement<BinaryData>), typeof(Core.Expressions.DataFactory.DataFactoryLinkedServiceReference), typeof(IDictionary<string, EntityParameterSpecification>),
typeof(IList<BinaryData>), typeof(string), typeof(IDictionary<string, BinaryData>), typeof(DatasetLocation), typeof(DataFactoryElement<string>), typeof(int))]
public partial class AvroDataset : DataFactoryDatasetProperties
{
/// <summary> Initializes a new instance of <see cref="AvroDataset"/>. </summary>
/// <param name="datasetType"> Type of dataset. </param>
/// <param name="description"> Dataset description. </param>
/// <param name="structure"> Columns that define the structure of the dataset. Type: array (or Expression with resultType array), itemType: DatasetDataElement. </param>
/// <param name="schema"> Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. </param>
/// <param name="linkedServiceName"> Linked service reference. </param>
/// <param name="parameters"> Parameters for dataset. </param>
/// <param name="annotations"> List of tags that can be used for describing the Dataset. </param>
/// <param name="folder"> The folder that this Dataset is in. If not specified, Dataset will appear at the root level. </param>
/// <param name="additionalProperties"> Additional Properties. </param>
/// <param name="dataLocation">
/// The location of the avro storage.
/// Please note <see cref="DatasetLocation"/> is the base class. According to the scenario, a derived class of the base class might need to be assigned here, or this property needs to be casted to one of the possible derived classes.
/// The available derived classes include <see cref="AmazonS3CompatibleLocation"/>, <see cref="AmazonS3Location"/>, <see cref="AzureBlobFSLocation"/>, <see cref="AzureBlobStorageLocation"/>, <see cref="AzureDataLakeStoreLocation"/>, <see cref="AzureFileStorageLocation"/>, <see cref="FileServerLocation"/>, <see cref="FtpServerLocation"/>, <see cref="GoogleCloudStorageLocation"/>, <see cref="HdfsLocation"/>, <see cref="HttpServerLocation"/>, <see cref="LakeHouseLocation"/>, <see cref="OracleCloudStorageLocation"/> and <see cref="SftpLocation"/>.
/// </param>
/// <param name="avroCompressionCodec"> The data avroCompressionCodec. Type: string (or Expression with resultType string). </param>
/// <param name="avroCompressionLevel"></param>
internal AvroDataset(string datasetType, string description, DataFactoryElement<IList<DatasetDataElement>> structure, DataFactoryElement<BinaryData> schema, Core.Expressions.DataFactory.DataFactoryLinkedServiceReference linkedServiceName, IDictionary<string, EntityParameterSpecification> parameters, IList<BinaryData> annotations, DatasetFolder folder, IDictionary<string, BinaryData> additionalProperties, DatasetLocation dataLocation, DataFactoryElement<string> avroCompressionCodec, int? avroCompressionLevel) : base(datasetType, description, structure, null, linkedServiceName, parameters, annotations, folder, additionalProperties)
{
DataLocation = dataLocation;
AvroCompressionCodec = avroCompressionCodec;
AvroCompressionLevel = avroCompressionLevel;
DatasetType = datasetType ?? "Avro";
Schema = schema;
}

/// <summary> Columns that define the physical type schema of the dataset. Type: array (or Expression with resultType array), itemType: DatasetSchemaDataElement. </summary>
public new DataFactoryElement<BinaryData> Schema { get; set; }

internal static AvroDataset DeserializeAvroDataset(JsonElement element)
{
if (element.ValueKind == JsonValueKind.Null)
{
return null;
}
string type = default;
Optional<string> description = default;
Optional<DataFactoryElement<IList<DatasetDataElement>>> structure = default;
Optional<DataFactoryElement<BinaryData>> schema = default;
Core.Expressions.DataFactory.DataFactoryLinkedServiceReference linkedServiceName = default;
Optional<IDictionary<string, EntityParameterSpecification>> parameters = default;
Optional<IList<BinaryData>> annotations = default;
Optional<DatasetFolder> folder = default;
Optional<DatasetLocation> location = default;
Optional<DataFactoryElement<string>> avroCompressionCodec = default;
Optional<int> avroCompressionLevel = default;
IDictionary<string, BinaryData> additionalProperties = default;
Dictionary<string, BinaryData> additionalPropertiesDictionary = new Dictionary<string, BinaryData>();
foreach (var property in element.EnumerateObject())
{
if (property.NameEquals("type"u8))
{
type = property.Value.GetString();
continue;
}
if (property.NameEquals("description"u8))
{
description = property.Value.GetString();
continue;
}
if (property.NameEquals("structure"u8))
{
if (property.Value.ValueKind == JsonValueKind.Null)
{
continue;
}
structure = JsonSerializer.Deserialize<DataFactoryElement<IList<DatasetDataElement>>>(property.Value.GetRawText());
continue;
}
if (property.NameEquals("schema"u8))
{
if (property.Value.ValueKind == JsonValueKind.Null)
{
continue;
}
schema = JsonSerializer.Deserialize<DataFactoryElement<BinaryData>>(property.Value.GetRawText());
continue;
}
if (property.NameEquals("linkedServiceName"u8))
{
linkedServiceName = JsonSerializer.Deserialize<Core.Expressions.DataFactory.DataFactoryLinkedServiceReference>(property.Value.GetRawText());
continue;
}
if (property.NameEquals("parameters"u8))
{
if (property.Value.ValueKind == JsonValueKind.Null)
{
continue;
}
Dictionary<string, EntityParameterSpecification> dictionary = new Dictionary<string, EntityParameterSpecification>();
foreach (var property0 in property.Value.EnumerateObject())
{
dictionary.Add(property0.Name, EntityParameterSpecification.DeserializeEntityParameterSpecification(property0.Value));
}
parameters = dictionary;
continue;
}
if (property.NameEquals("annotations"u8))
{
if (property.Value.ValueKind == JsonValueKind.Null)
{
continue;
}
List<BinaryData> array = new List<BinaryData>();
foreach (var item in property.Value.EnumerateArray())
{
if (item.ValueKind == JsonValueKind.Null)
{
array.Add(null);
}
else
{
array.Add(BinaryData.FromString(item.GetRawText()));
}
}
annotations = array;
continue;
}
if (property.NameEquals("folder"u8))
{
if (property.Value.ValueKind == JsonValueKind.Null)
{
continue;
}
folder = DatasetFolder.DeserializeDatasetFolder(property.Value);
continue;
}
if (property.NameEquals("typeProperties"u8))
{
if (property.Value.ValueKind == JsonValueKind.Null)
{
property.ThrowNonNullablePropertyIsNull();
continue;
}
foreach (var property0 in property.Value.EnumerateObject())
{
if (property0.NameEquals("location"u8))
{
if (property0.Value.ValueKind == JsonValueKind.Null)
{
continue;
}
location = DatasetLocation.DeserializeDatasetLocation(property0.Value);
continue;
}
if (property0.NameEquals("avroCompressionCodec"u8))
{
if (property0.Value.ValueKind == JsonValueKind.Null)
{
continue;
}
avroCompressionCodec = JsonSerializer.Deserialize<DataFactoryElement<string>>(property0.Value.GetRawText());
continue;
}
if (property0.NameEquals("avroCompressionLevel"u8))
{
if (property0.Value.ValueKind == JsonValueKind.Null)
{
continue;
}
avroCompressionLevel = property0.Value.GetInt32();
continue;
}
}
continue;
}
additionalPropertiesDictionary.Add(property.Name, BinaryData.FromString(property.Value.GetRawText()));
}
additionalProperties = additionalPropertiesDictionary;
return new AvroDataset(type, description.Value, structure.Value, schema.Value, linkedServiceName, Optional.ToDictionary(parameters), Optional.ToList(annotations), folder.Value, additionalProperties, location.Value, avroCompressionCodec.Value, Optional.ToNullable(avroCompressionLevel));
}
}
}
Loading

0 comments on commit 2e5196e

Please sign in to comment.