Skip to content

Commit

Permalink
[Perf tests] Storage File Datalake - track 2 (Azure#12806)
Browse files Browse the repository at this point in the history
### Changes in the PR
- Track 2 tests are part of the test folder, which would be compiled along with the regular tests and would require changes if the API is updated

### To run track 2 perf tests
1. Build the storage-file-datalake package `rush build -t storage-file-datalake`.
2. Navigate to `storage-file-datalake` folder `cd sdk\storage\storage-file-datalake\`.
3. Create a storage account and populate the .env file at `storage\storage-file-datalake` folder with `STORAGE_CONNECTION_STRING` variable.
4. Run the tests as shown below
   - append
     - `npm run perfstress-test:node -- StorageDFSAppendTest --warmup 2 --duration 7 --iterations 2 --parallel 2`
   - read
     - `npm run perfstress-test:node -- StorageDFSReadTest --warmup 2 --duration 7 --iterations 2 --parallel 2`
   - upload
     - `npm run perfstress-test:node -- StorageDFSUploadTest --warmup 2 --duration 7 --iterations 2 --parallel 2`
   - upload from file
     - `npm run perfstress-test:node -- StorageDFSUploadFromFileTest --warmup 2 --duration 7 --iterations 2 --parallel 2`
  • Loading branch information
HarshaNalluru authored Feb 1, 2021
1 parent db530fc commit edf5e9e
Show file tree
Hide file tree
Showing 8 changed files with 245 additions and 0 deletions.
2 changes: 2 additions & 0 deletions sdk/storage/storage-file-datalake/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,7 @@
"lint:fix": "eslint package.json api-extractor.json src test --ext .ts --fix",
"lint": "eslint package.json api-extractor.json src test --ext .ts -f html -o storage-file-datalake-lintReport.html || exit 0",
"pack": "npm pack 2>&1",
"perfstress-test:node": "cross-env TS_NODE_COMPILER_OPTIONS=\"{\\\"module\\\": \\\"commonjs\\\"}\" ts-node test/perfstress/track-2/index.spec.ts",
"prebuild": "npm run clean",
"test:browser": "npm run clean && npm run build:test && npm run unit-test:browser",
"test:node": "npm run clean && npm run build:test && npm run unit-test:node",
Expand Down Expand Up @@ -111,6 +112,7 @@
"@azure/eslint-plugin-azure-sdk": "^3.0.0",
"@azure/identity": "^1.1.0",
"@azure/test-utils-recorder": "^1.0.0",
"@azure/test-utils-perfstress": "^1.0.0",
"@microsoft/api-extractor": "7.7.11",
"@opentelemetry/api": "^0.10.2",
"@rollup/plugin-commonjs": "11.0.2",
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
### Guide

1. Build the storage-file-datalake package `rush build -t storage-file-datalake`.
2. Navigate to `storage-file-datalake` folder `cd sdk\storage\storage-file-datalake\`.
3. Create a storage account and populate the .env file at `storage\storage-file-datalake` folder with `ACCOUNT_NAME` and `ACCOUNT_KEY` variables.
4. Run the tests as shown below
- append
- `npm run perfstress-test:node -- StorageDFSAppendTest --warmup 2 --duration 7 --iterations 2 --parallel 2`
- read
- `npm run perfstress-test:node -- StorageDFSReadTest --warmup 2 --duration 7 --iterations 2 --parallel 2`
- upload
- `npm run perfstress-test:node -- StorageDFSUploadTest --warmup 2 --duration 7 --iterations 2 --parallel 2`
- upload from file
- `npm run perfstress-test:node -- StorageDFSUploadFromFileTest --warmup 2 --duration 7 --iterations 2 --parallel 2`
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.

import { PerfStressOptionDictionary } from "@azure/test-utils-perfstress";
import { StorageDFSTest } from "./storageTest.spec";
import { DataLakeFileClient } from "../../../src";
import { generateUuid } from "@azure/core-http";
interface StorageDFSAppendTestOptions {
size: number;
}

export class StorageDFSAppendTest extends StorageDFSTest<StorageDFSAppendTestOptions> {
buffer: Buffer;
public options: PerfStressOptionDictionary<StorageDFSAppendTestOptions> = {
size: {
required: true,
description: "Size to append in bytes",
shortName: "sz",
longName: "size",
defaultValue: 1024
}
};
fileClient: DataLakeFileClient;

constructor() {
super();
this.fileClient = this.directoryClient.getFileClient(generateUuid());
this.buffer = Buffer.alloc(this.parsedOptions.size.value!);
}

public async setup() {
await this.fileClient.create();
}

async runAsync(): Promise<void> {
await this.fileClient.append(this.buffer, 0, this.parsedOptions.size.value!);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.

import { PerfStressProgram, selectPerfStressTest } from "@azure/test-utils-perfstress";
import { StorageDFSAppendTest } from "./append.spec";
import { StorageDFSReadTest } from "./read.spec";
import { StorageDFSUploadTest } from "./upload.spec";
import { StorageDFSUploadFromFileTest } from "./uploadFromFile.spec";

console.log("=== Starting the perfStress test ===");

const perfStressProgram = new PerfStressProgram(
selectPerfStressTest([
StorageDFSAppendTest,
StorageDFSReadTest,
StorageDFSUploadTest,
StorageDFSUploadFromFileTest
])
);

perfStressProgram.run();
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.

import { drainStream, PerfStressOptionDictionary } from "@azure/test-utils-perfstress";
import { StorageDFSTest } from "./storageTest.spec";
import { DataLakeFileClient } from "../../../src";
import { generateUuid } from "@azure/core-http";
interface StorageDFSReadTestOptions {
size: number;
}

export class StorageDFSReadTest extends StorageDFSTest<StorageDFSReadTestOptions> {
public options: PerfStressOptionDictionary<StorageDFSReadTestOptions> = {
size: {
required: true,
description: "Size in bytes",
shortName: "sz",
longName: "size",
defaultValue: 1024
}
};
static fileName = generateUuid();
fileClient: DataLakeFileClient;

constructor() {
super();
this.fileClient = this.directoryClient.getFileClient(StorageDFSReadTest.fileName);
}

public async globalSetup() {
await super.globalSetup();
await this.fileClient.upload(Buffer.alloc(this.parsedOptions.size.value!));
}

async runAsync(): Promise<void> {
const ReadResponse = await this.fileClient.read();
await drainStream(ReadResponse.readableStreamBody!);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.

import { getEnvVar, PerfStressTest } from "@azure/test-utils-perfstress";

import {
DataLakeServiceClient,
StorageSharedKeyCredential,
DataLakeFileSystemClient,
DataLakeDirectoryClient
} from "../../../src";

// Expects the .env file at the same level as the "test" folder
import * as dotenv from "dotenv";
import { generateUuid } from "@azure/core-http";
dotenv.config();

export abstract class StorageDFSTest<TOptions> extends PerfStressTest<TOptions> {
datalakeServiceClient: DataLakeServiceClient;
fileSystemClient: DataLakeFileSystemClient;
directoryClient: DataLakeDirectoryClient;
static fileSystemName = generateUuid();
static directoryName = generateUuid();

constructor() {
super();
const connectionString = getEnvVar("STORAGE_CONNECTION_STRING");
const accountName = getValueInConnString(connectionString, "AccountName");
const accountKey = getValueInConnString(connectionString, "AccountKey");
const sharedKeyCredential = new StorageSharedKeyCredential(accountName, accountKey);

this.datalakeServiceClient = new DataLakeServiceClient(
`https://${accountName}.dfs.core.windows.net`,
sharedKeyCredential
);

this.fileSystemClient = this.datalakeServiceClient.getFileSystemClient(
StorageDFSTest.fileSystemName
);

this.directoryClient = this.fileSystemClient.getDirectoryClient(StorageDFSTest.directoryName);
}

public async globalSetup() {
await this.fileSystemClient.create();
await this.directoryClient.create();
}

public async globalCleanup() {
await this.fileSystemClient.delete();
}
}

export function getValueInConnString(
connectionString: string,
argument: "AccountName" | "AccountKey"
) {
const elements = connectionString.split(";");
for (const element of elements) {
if (element.trim().startsWith(argument)) {
return element.trim().match(argument + "=(.*)")![1];
}
}
return "";
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.

import { generateUuid } from "@azure/core-http";
import { PerfStressOptionDictionary } from "@azure/test-utils-perfstress";
import { DataLakeFileClient } from "../../../src";
import { StorageDFSTest } from "./storageTest.spec";

interface StorageFileShareUploadTestOptions {
size: number;
}

export class StorageDFSUploadTest extends StorageDFSTest<StorageFileShareUploadTestOptions> {
buffer: Buffer;
fileClient: DataLakeFileClient;
public options: PerfStressOptionDictionary<StorageFileShareUploadTestOptions> = {
size: {
required: true,
description: "Size in bytes",
shortName: "sz",
longName: "size",
defaultValue: 1024
}
};

constructor() {
super();
const fileName = generateUuid();
this.fileClient = this.directoryClient.getFileClient(fileName);
this.buffer = Buffer.alloc(this.parsedOptions.size.value!);
}

async runAsync(): Promise<void> {
await this.fileClient.upload(this.buffer);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT license.

import { StorageDFSUploadTest } from "./upload.spec";
import fs from "fs";
import util from "util";
const writeFile = util.promisify(fs.writeFile);
const fileExists = util.promisify(fs.exists);
const mkdir = util.promisify(fs.mkdir);
const deleteFile = util.promisify(fs.unlink);

const localDirName = "temp";
const localFileName = `${localDirName}/upload-from-test-temp-file.txt`;

export class StorageDFSUploadFromFileTest extends StorageDFSUploadTest {
public async globalSetup() {
await super.globalSetup();
if (!(await fileExists(localDirName))) await mkdir(localDirName);
await writeFile(localFileName, Buffer.alloc(this.parsedOptions.size.value!));
}

public async globalCleanup() {
await deleteFile(localFileName);
await super.globalCleanup();
}

async runAsync(): Promise<void> {
await this.fileClient.uploadFile(localFileName);
}
}

0 comments on commit edf5e9e

Please sign in to comment.