Skip to content

Commit

Permalink
[Storage] validate and update the samples (#4683)
Browse files Browse the repository at this point in the history
  • Loading branch information
HarshaNalluru authored Aug 6, 2019
1 parent 6e83f54 commit c7ce347
Show file tree
Hide file tree
Showing 5 changed files with 28 additions and 37 deletions.
6 changes: 3 additions & 3 deletions sdk/storage/storage-blob/samples/javascript/advanced.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,10 +3,10 @@
*/

const fs = require("fs");
const { AbortController } = require("@azure/abort-controller");
const {
AnonymousCredential,
HttpPipelineLogLevel,
Aborter,
BlobServiceClient,
newPipeline
} = require("../.."); // Change to "@azure/storage-blob" in your package
Expand Down Expand Up @@ -75,7 +75,7 @@ async function main() {
// Parallel uploading a Readable stream with BlockBlobClient.uploadStream() in Node.js runtime
// BlockBlobClient.uploadStream() is only available in Node.js
await blockBlobClient.uploadStream(fs.createReadStream(localFilePath), 4 * 1024 * 1024, 20, {
abortSignal: Aborter.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
abortSignal: AbortController.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
progress: (ev) => console.log(ev)
});
console.log("uploadStream success");
Expand All @@ -96,7 +96,7 @@ async function main() {
const fileSize = fs.statSync(localFilePath).size;
const buffer = Buffer.alloc(fileSize);
await blockBlobClient.downloadToBuffer(buffer, 0, undefined, {
abortSignal: Aborter.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
abortSignal: AbortController.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
blockSize: 4 * 1024 * 1024, // 4MB block size
parallelism: 20, // 20 concurrency
progress: (ev) => console.log(ev)
Expand Down
32 changes: 11 additions & 21 deletions sdk/storage/storage-blob/samples/typescript/advanced.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,8 @@
*/

import fs from "fs";
import { AbortController } from "@azure/abort-controller";
import {
Aborter,
AnonymousCredential,
BlobServiceClient,
newPipeline,
Expand Down Expand Up @@ -75,15 +75,10 @@ async function main() {

// Parallel uploading a Readable stream with BlockBlobClient.uploadStream() in Node.js runtime
// BlockBlobClient.uploadStream() is only available in Node.js
await blockBlobClient.uploadStream(
fs.createReadStream(localFilePath),
4 * 1024 * 1024,
20,
{
abortSignal: Aborter.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
progress: (ev) => console.log(ev)
}
);
await blockBlobClient.uploadStream(fs.createReadStream(localFilePath), 4 * 1024 * 1024, 20, {
abortSignal: AbortController.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
progress: (ev) => console.log(ev)
});
console.log("uploadStream success");

// Parallel uploading a browser File/Blob/ArrayBuffer in browsers with BlockBlobClient.uploadBrowserData()
Expand All @@ -101,17 +96,12 @@ async function main() {
// downloadToBuffer is only available in Node.js
const fileSize = fs.statSync(localFilePath).size;
const buffer = Buffer.alloc(fileSize);
await blockBlobClient.downloadToBuffer(
buffer,
0,
undefined,
{
abortSignal: Aborter.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
blockSize: 4 * 1024 * 1024, // 4MB block size
parallelism: 20, // 20 concurrency
progress: ev => console.log(ev)
}
);
await blockBlobClient.downloadToBuffer(buffer, 0, undefined, {
abortSignal: AbortController.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
blockSize: 4 * 1024 * 1024, // 4MB block size
parallelism: 20, // 20 concurrency
progress: (ev) => console.log(ev)
});
console.log("downloadToBuffer success");

// Delete container
Expand Down
8 changes: 4 additions & 4 deletions sdk/storage/storage-file/samples/javascript/advanced.js
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,9 @@
*/

const fs = require("fs");
const { AbortController } = require("@azure/abort-controller");
const {
AnonymousCredential,
Aborter,
FileServiceClient,
HttpPipelineLogLevel,
newPipeline
Expand Down Expand Up @@ -82,7 +82,7 @@ async function main() {
// Parallel uploading a Readable stream with FileClient.uploadStream() in Node.js runtime
// FileClient.uploadStream() is only available in Node.js
await fileClient.uploadStream(fs.createReadStream(localFilePath), fileSize, 4 * 1024 * 1024, 20, {
abortSignal: Aborter.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
abortSignal: AbortController.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
progress: (ev) => console.log(ev)
});
console.log("uploadStream success");
Expand All @@ -102,7 +102,7 @@ async function main() {
// FileClient.downloadToBuffer() is only available in Node.js
const buffer = Buffer.alloc(fileSize);
await fileClient.downloadToBuffer(buffer, 0, undefined, {
abortSignal: Aborter.timeout(30 * 60 * 1000),
abortSignal: AbortController.timeout(30 * 60 * 1000),
rangeSize: 4 * 1024 * 1024, // 4MB range size
parallelism: 20, // 20 concurrency
progress: (ev) => console.log(ev)
Expand All @@ -119,6 +119,6 @@ main()
.then(() => {
console.log("Successfully executed sample.");
})
.catch(err => {
.catch((err) => {
console.log(err.message);
});
12 changes: 9 additions & 3 deletions sdk/storage/storage-file/samples/typescript/advanced.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,13 @@
*/

import fs from "fs";
import { Aborter, AnonymousCredential, FileServiceClient, newPipeline, HttpPipelineLogLevel } from "../../src"; // Change to "@azure/storage-file" in your package
import { AbortController } from "@azure/abort-controller";
import {
AnonymousCredential,
FileServiceClient,
newPipeline,
HttpPipelineLogLevel
} from "../../src"; // Change to "@azure/storage-file" in your package

class ConsoleHttpPipelineLogger {
minimumLogLevel: any;
Expand Down Expand Up @@ -77,7 +83,7 @@ async function main() {
// Parallel uploading a Readable stream with FileClient.uploadStream() in Node.js runtime
// FileClient.uploadStream() is only available in Node.js
await fileClient.uploadStream(fs.createReadStream(localFilePath), fileSize, 4 * 1024 * 1024, 20, {
abortSignal: Aborter.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
abortSignal: AbortController.timeout(30 * 60 * 1000), // Abort uploading with timeout in 30mins
progress: (ev: any) => console.log(ev)
});
console.log("uploadStream success");
Expand All @@ -97,7 +103,7 @@ async function main() {
// FileClient.downloadToBuffer() is only available in Node.js
const buffer = Buffer.alloc(fileSize);
await fileClient.downloadToBuffer(buffer, 0, undefined, {
abortSignal: Aborter.timeout(30 * 60 * 1000),
abortSignal: AbortController.timeout(30 * 60 * 1000),
rangeSize: 4 * 1024 * 1024, // 4MB range size
parallelism: 20, // 20 concurrency
progress: (ev) => console.log(ev)
Expand Down
7 changes: 1 addition & 6 deletions sdk/storage/storage-queue/samples/typescript/basic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,7 @@
Setup: Enter your storage account name and shared key in main()
*/

import {
QueueServiceClient,
newPipeline,
SharedKeyCredential,
RawTokenCredential
} from "../../src"; // Change to "@azure/storage-queue" in your package
import { QueueServiceClient, newPipeline, SharedKeyCredential } from "../../src"; // Change to "@azure/storage-queue" in your package

async function main() {
// Enter your storage account name and shared key
Expand Down

0 comments on commit c7ce347

Please sign in to comment.