Skip to content

Commit

Permalink
Highlevel functions (Azure#2376)
Browse files Browse the repository at this point in the history
* Add high level functions that takes string url parameters

* Use intersection types for options parameter

* Add documentation

* Add type alias for the intersection types

* Add docs for alias types

* Work around typedoc issue

where comments are not rendered when there are brackets around optional
parameter name (TypeStrong/typedoc#567).
  • Loading branch information
jeremymeng committed May 10, 2019
1 parent 0b9d31f commit d7a24f0
Show file tree
Hide file tree
Showing 5 changed files with 232 additions and 5 deletions.
42 changes: 41 additions & 1 deletion sdk/storage/storage-blob/src/highlevel.browser.ts
@@ -1,7 +1,13 @@
import { generateUuid } from "@azure/ms-rest-js";

import { BlockBlobURL } from "./BlockBlobURL";
import { BlobUploadCommonResponse, IUploadToBlockBlobOptions } from "./highlevel.common";
import { AnonymousCredential } from "./credentials/AnonymousCredential";
import {
BlobUploadCommonResponse,
IUploadToBlockBlobOptions,
CredentialOptions
} from "./highlevel.common";
import { INewPipelineOptions, StorageURL } from "./StorageURL";
import { Batch } from "./utils/Batch";
import {
BLOCK_BLOB_MAX_BLOCKS,
Expand Down Expand Up @@ -42,6 +48,40 @@ export async function uploadBrowserDataToBlockBlob(
);
}

// /**
// * Intersection type that is {@link IUploadToBlockBlobOptions}, {@link CredentialOptions}, and
// * {@link INewPipelineOptions} at the same time. It contains all members of these types.
// */
// export type UploadBrowserDataToBlockBlobUrlOptions = IUploadToBlockBlobOptions & CredentialOptions & INewPipelineOptions;

/**
* ONLY AVAILABLE IN BROWSERS.
*
* Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob given a url to that blob.
* This method assumes container already exists.
*
* When buffer length <= 256MB, this method will use 1 upload call to finish the upload.
* Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList
* to commit the block list.
*
* @export
* @param {Blob | ArrayBuffer | ArrayBufferView} browserData Blob, File, ArrayBuffer or ArrayBufferView
* @param {BlockBlobURL} blockBlobURL
* @param {IUploadToBlockBlobOptions & CredentialOptions & INewPipelineOptions} options Options for Uploading browser data, credential, and new pipeline.
* If credential options is not specified {@link AnonymousCredential} is used.
* @returns {Promise<BlobUploadCommonResponse>}
*/
export async function uploadBrowserDataToBlockBlobUrl(
browserData: Blob | ArrayBuffer | ArrayBufferView,
url: string,
options: IUploadToBlockBlobOptions & CredentialOptions & INewPipelineOptions = {},
): Promise<BlobUploadCommonResponse> {

const pipeline = StorageURL.newPipeline(options.credential || new AnonymousCredential(), options);
const blockBlobURL = new BlockBlobURL(url, pipeline);
return uploadBrowserDataToBlockBlob(browserData, blockBlobURL, options);
}

/**
* ONLY AVAILABLE IN BROWSERS.
*
Expand Down
5 changes: 5 additions & 0 deletions sdk/storage/storage-blob/src/highlevel.common.ts
Expand Up @@ -3,6 +3,11 @@ import { HttpResponse, TransferProgressEvent } from "@azure/ms-rest-js";
import { Aborter } from "./Aborter";
import * as Models from "./generated/lib/models";
import { IBlobAccessConditions } from "./models";
import { Credential } from "./credentials/Credential";

export interface CredentialOptions {
credential?: Credential;
}

/**
* Option interface for uploadFileToBlockBlob and uploadSeekableStreamToBlockBlob.
Expand Down
117 changes: 115 additions & 2 deletions sdk/storage/storage-blob/src/highlevel.node.ts
@@ -1,17 +1,20 @@
import * as fs from "fs";
import { generateUuid, TransferProgressEvent } from "@azure/ms-rest-js";
import * as fs from "fs";
import { Readable } from "stream";

import { Aborter } from "./Aborter";
import { BlobURL } from "./BlobURL";
import { BlockBlobURL } from "./BlockBlobURL";
import { AnonymousCredential } from "./credentials/AnonymousCredential";
import { BlobHTTPHeaders } from "./generated/lib/models";
import {
BlobUploadCommonResponse,
IDownloadFromBlobOptions,
IUploadToBlockBlobOptions
IUploadToBlockBlobOptions,
CredentialOptions
} from "./highlevel.common";
import { IBlobAccessConditions } from "./models";
import { INewPipelineOptions, StorageURL } from "./StorageURL";
import { Batch } from "./utils/Batch";
import { BufferScheduler } from "./utils/BufferScheduler";
import {
Expand Down Expand Up @@ -57,6 +60,40 @@ export async function uploadFileToBlockBlob(
);
}

/**
* Intersection type that is {@link IUploadToBlockBlobOptions}, {@link CredentialOptions}, and
* {@link INewPipelineOptions} at the same time. It contains all members of these types.
*/
export type UploadFileToBlockBlobUrlOptions = IUploadToBlockBlobOptions & CredentialOptions & INewPipelineOptions;

/**
* ONLY AVAILABLE IN NODE.JS RUNTIME.
*
* Uploads a local file in blocks to a block blob given a url to that block blob.
* This method assumes that the container already exists.
*
* When file size <= 256MB, this method will use 1 upload call to finish the upload.
* Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList
* to commit the block list.
*
* @export
* @param {string} filePath Full path of local file
* @param {string} blockBlobURL url to a block blob
* @param {UploadFileToBlockBlobUrlOptions} options IUploadToBlockBlobOptions & CredentialOptions & INewPipelineOptions.
* If credential options is not specified {@link AnonymousCredential} is used.
* @returns {(Promise<BlobUploadCommonResponse>)} ICommonResponse
*/
export async function uploadFileToBlockBlobUrl(
filePath: string,
url: string,
options: UploadFileToBlockBlobUrlOptions = {},
): Promise<BlobUploadCommonResponse> {

const pipeline = StorageURL.newPipeline(options.credential || new AnonymousCredential(), options);
const blockBlobURL = new BlockBlobURL(url, pipeline);
return uploadFileToBlockBlob(filePath, blockBlobURL, options);
}

/**
* ONLY AVAILABLE IN NODE.JS RUNTIME.
*
Expand Down Expand Up @@ -258,6 +295,43 @@ export async function downloadBlobToBuffer(
await batch.do();
}

/**
* Intersection type that is {@link IDownloadFromBlobOptions}, {@link CredentialOptions}, and
* {@link INewPipelineOptions} at the same time. It contains all members of these types.
*/
export type DownloadBlobToBufferFromUrlOptions = IDownloadFromBlobOptions & CredentialOptions & INewPipelineOptions;

/**
* ONLY AVAILABLE IN NODE.JS RUNTIME.
*
* Downloads an Azure Blob in parallel to a buffer given a url to that blob.
* Offset and count are optional, pass 0 for both to download the entire blob.
*
* @export
* @param {Buffer} buffer Buffer to be fill, must have length larger than count
* @param {BlobURL} blobURL A BlobURL object
* @param {number} [offset] From which position of the block blob to download
* @param {number} [count] How much data to be downloaded. Will download to the end when passing undefined
* @param {DownloadBlobToBufferFromUrlOptions} options IDownloadFromBlobOptions & CredentialOptions & INewPipelineOptions.
* If credential options is not specified {@link AnonymousCredential} is used.
* @param {credential} [credential] Credential. If not specified {@link AnonymousCredential} is used.
* @param {INewPipelineOptions} [pipelineOptions]
* @returns {Promise<void>}
*/
export async function downloadBlobToBufferFromUrl(
buffer: Buffer,
url: string,
offset: number = 0,
count: number = 0,
options: DownloadBlobToBufferFromUrlOptions = {},
): Promise<void> {

const pipeline = StorageURL.newPipeline(options.credential || new AnonymousCredential(), options);
const blockBlobURL = new BlockBlobURL(url, pipeline);

return downloadBlobToBuffer(buffer, blockBlobURL, offset, count, options);
}

/**
* Option interface for uploadStreamToBlockBlob.
*
Expand Down Expand Up @@ -374,3 +448,42 @@ export async function uploadStreamToBlockBlob(

return blockBlobURL.commitBlockList(blockList, options);
}

/**
* Intersection type that is {@link IUploadStreamToBlockBlobOptions}, {@link CredentialOptions}, and
* {@link INewPipelineOptions} at the same time. It contains all members of these types.
*/
export type UploadStreamToBlockBlobUrlOptions = IUploadStreamToBlockBlobOptions & CredentialOptions & INewPipelineOptions

/**
* ONLY AVAILABLE IN NODE.JS RUNTIME.
*
* Uploads a Node.js Readable stream into block blob given a url to that block blob.
* This method assumes that the container already exists.
*
* PERFORMANCE IMPROVEMENT TIPS:
* * Input stream highWaterMark is better to set a same value with bufferSize
* parameter, which will avoid Buffer.concat() operations.
*
* @export
* @param {Readable} stream Node.js Readable stream
* @param {BlockBlobURL} blockBlobURL A BlockBlobURL instance
* @param {number} bufferSize Size of every buffer allocated, also the block size in the uploaded block blob
* @param {number} maxBuffers Max buffers will allocate during uploading, positive correlation
* with max uploading concurrency
* @param {UploadStreamToBlockBlobUrlOptions} options IUploadStreamToBlockBlobOptions & CredentialOptions & INewPipelineOptions.
* If credential options is not specified {@link AnonymousCredential} is used.
* @returns {Promise<BlobUploadCommonResponse>}
*/
export async function uploadStreamToBlockBlobUrl(
stream: Readable,
url: string,
bufferSize: number = 4 * 1024 * 1024,
maxBuffers: number = 20,
options: UploadStreamToBlockBlobUrlOptions = {},
): Promise<BlobUploadCommonResponse> {
const pipeline = StorageURL.newPipeline(options.credential || new AnonymousCredential(), options);
const blockBlobURL = new BlockBlobURL(url, pipeline);

return uploadStreamToBlockBlob(stream, blockBlobURL, bufferSize, maxBuffers, options);
}
18 changes: 17 additions & 1 deletion sdk/storage/storage-blob/test/browser/highlevel.browser.test.ts
Expand Up @@ -4,7 +4,7 @@ import { Aborter } from "../../src/Aborter";
import { BlobURL } from "../../src/BlobURL";
import { BlockBlobURL } from "../../src/BlockBlobURL";
import { ContainerURL } from "../../src/ContainerURL";
import { uploadBrowserDataToBlockBlob } from "../../src/highlevel.browser";
import { uploadBrowserDataToBlockBlob, uploadBrowserDataToBlockBlobUrl } from "../../src/highlevel.browser";
import {
arrayBufferEqual,
blobToArrayBuffer,
Expand Down Expand Up @@ -128,6 +128,22 @@ describe("Highelvel", () => {
assert.equal(uploadedString, downloadedString);
});

it("uploadBrowserDataToBlockBlobUrl should success when blob < BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES", async () => {
const url = blockBlobURL.url;
const credential = blockBlobURL.pipeline.factories[blockBlobURL.pipeline.factories.length - 1];
await uploadBrowserDataToBlockBlobUrl(tempFile2, url, {
blockSize: 4 * 1024 * 1024,
parallelism: 2,
credential: credential
});

const downloadResponse = await blockBlobURL.download(0);
const downloadedString = await bodyToString(downloadResponse);
const uploadedString = await blobToString(tempFile2);

assert.equal(uploadedString, downloadedString);
});

it("uploadBrowserDataToBlockBlob should success when blob < BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES and configured maxSingleShotSize", async () => {
await uploadBrowserDataToBlockBlob(tempFile2, blockBlobURL, {
blockSize: 512 * 1024,
Expand Down
55 changes: 54 additions & 1 deletion sdk/storage/storage-blob/test/node/highlevel.node.test.ts
Expand Up @@ -8,7 +8,9 @@ import { Aborter } from "../../src/Aborter";
import {
downloadBlobToBuffer,
uploadFileToBlockBlob,
uploadStreamToBlockBlob
uploadFileToBlockBlobUrl,
uploadStreamToBlockBlob,
uploadStreamToBlockBlobUrl
} from "../../src/highlevel.node";
import { IRetriableReadableStreamOptions } from "../../src/utils/RetriableReadableStream";
import { createRandomLocalFile, getBSU, getUniqueName, readStreamToLocalFile } from "../utils";
Expand Down Expand Up @@ -101,6 +103,34 @@ describe("Highlevel", () => {
assert.ok(downloadedData.equals(uploadedData));
});

it("uploadFileToBlockBlobUrl should success when blob < BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES", async () => {
const url = blockBlobURL.url;
const credential = blockBlobURL.pipeline.factories[blockBlobURL.pipeline.factories.length - 1];
await uploadFileToBlockBlobUrl(tempFileSmall, url, {
blockSize: 4 * 1024 * 1024,
parallelism: 20,
credential: credential
// Enable logger when debugging
// logger: new ConsoleHttpPipelineLogger(HttpPipelineLogLevel.INFO)
});

const downloadResponse = await blockBlobURL.download(0);
const downloadedFile = path.join(
tempFolderPath,
getUniqueName("downloadfile.")
);
await readStreamToLocalFile(
downloadResponse.readableStreamBody!,
downloadedFile
);

const downloadedData = await fs.readFileSync(downloadedFile);
const uploadedData = await fs.readFileSync(tempFileSmall);

fs.unlinkSync(downloadedFile);
assert.ok(downloadedData.equals(uploadedData));
});

it("uploadFileToBlockBlob should success when blob < BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES and configured maxSingleShotSize", async () => {
await uploadFileToBlockBlob(tempFileSmall, blockBlobURL, {
maxSingleShotSize: 0
Expand Down Expand Up @@ -294,6 +324,29 @@ describe("Highlevel", () => {
assert.ok(localFileContent.equals(buf));
});

it("downloadBlobToBufferUrl should success", async () => {
const url = blockBlobURL.url;
const credential = blockBlobURL.pipeline.factories[blockBlobURL.pipeline.factories.length - 1];
const rs = fs.createReadStream(tempFileLarge);
await uploadStreamToBlockBlobUrl(
rs,
url,
4 * 1024 * 1024,
20,
{ credential: credential },
);

const buf = Buffer.alloc(tempFileLargeLength);
await downloadBlobToBuffer(buf, blockBlobURL, 0, undefined, {
blockSize: 4 * 1024 * 1024,
maxRetryRequestsPerBlock: 5,
parallelism: 20
});

const localFileContent = fs.readFileSync(tempFileLarge);
assert.ok(localFileContent.equals(buf));
});

it("downloadBlobToBuffer should abort", async () => {
const rs = fs.createReadStream(tempFileLarge);
await uploadStreamToBlockBlob(
Expand Down

0 comments on commit d7a24f0

Please sign in to comment.