diff --git a/sdk/storage/Azure.Storage.DataMovement.Blobs/perf-resources.bicep b/sdk/storage/Azure.Storage.DataMovement.Blobs/perf-resources.bicep
new file mode 100644
index 000000000000..2f3f5b7bdafd
--- /dev/null
+++ b/sdk/storage/Azure.Storage.DataMovement.Blobs/perf-resources.bicep
@@ -0,0 +1,17 @@
+param baseName string = resourceGroup().name
+param location string = resourceGroup().location
+
+resource storageAccount 'Microsoft.Storage/storageAccounts@2023-05-01' = {
+ name: '${baseName}blob'
+ location: location
+ kind: 'StorageV2'
+ sku: {
+ name: 'Standard_LRS'
+ }
+}
+
+var name = storageAccount.name
+var key = storageAccount.listKeys().keys[0].value
+
+output AZURE_STORAGE_ACCOUNT_NAME string = name
+output AZURE_STORAGE_ACCOUNT_KEY string = key
diff --git a/sdk/storage/Azure.Storage.DataMovement.Blobs/perf-tests.yml b/sdk/storage/Azure.Storage.DataMovement.Blobs/perf-tests.yml
new file mode 100644
index 000000000000..2dce771b126a
--- /dev/null
+++ b/sdk/storage/Azure.Storage.DataMovement.Blobs/perf-tests.yml
@@ -0,0 +1,22 @@
+Service: storage-datamovement-blob
+
+Project: sdk/storage/Azure.Storage.DataMovement.Blobs/perf/Azure.Storage.DataMovement.Blobs.Perf/Azure.Storage.DataMovement.Blobs.Perf.csproj
+
+PackageVersions:
+- Azure.Storage.DataMovement.Blobs: source
+
+Tests:
+- Test: download
+ Class: DownloadDirectory
+ Arguments: &sizes
+ - --size 1024 --count 5000 --duration 60 --concurrency 64
+ - --size 10485760 --count 500 --duration 90 --concurrency 64
+ - --size 1073741824 --count 5 --duration 120 --concurrency 64
+
+- Test: upload
+ Class: UploadDirectory
+ Arguments: *sizes
+
+- Test: copy
+ Class: CopyDirectory
+ Arguments: *sizes
diff --git a/sdk/storage/Azure.Storage.DataMovement.Blobs/perf.yml b/sdk/storage/Azure.Storage.DataMovement.Blobs/perf.yml
new file mode 100644
index 000000000000..3ccb7c01276b
--- /dev/null
+++ b/sdk/storage/Azure.Storage.DataMovement.Blobs/perf.yml
@@ -0,0 +1,42 @@
+parameters:
+- name: LanguageVersion
+ displayName: LanguageVersion (6, 8)
+ type: string
+ default: '8'
+- name: PackageVersions
+ displayName: PackageVersions (regex of package versions to run)
+ type: string
+ default: 'source'
+- name: Tests
+ displayName: Tests (regex of tests to run)
+ type: string
+ default: '^(upload|download|copy)$'
+- name: Arguments
+ displayName: Arguments (regex of arguments to run)
+ type: string
+ default: '.*'
+- name: Iterations
+ displayName: Iterations (times to run each test)
+ type: number
+ default: '5'
+- name: Profile
+ type: boolean
+ default: false
+- name: AdditionalArguments
+ displayName: AdditionalArguments (passed to PerfAutomation)
+ type: string
+ default: '--no-sync'
+
+extends:
+ template: /eng/pipelines/templates/jobs/perf.yml
+ parameters:
+ LanguageVersion: ${{ parameters.LanguageVersion }}
+ ServiceDirectory: storage/Azure.Storage.DataMovement.Blobs
+ PackageVersions: ${{ parameters.PackageVersions }}
+ Tests: ${{ parameters.Tests }}
+ Arguments: ${{ parameters.Arguments }}
+ Iterations: ${{ parameters.Iterations }}
+ Profile: ${{ parameters.Profile }}
+ AdditionalArguments: ${{ parameters.AdditionalArguments }}
+ EnvVars:
+ AZURE_AUTHORITY_HOST: $(AZURE_STORAGE_DATAMOVEMENT_BLOBS_AZURE_AUTHORITY_HOST)
diff --git a/sdk/storage/Azure.Storage.DataMovement.Blobs/perf/Azure.Storage.DataMovement.Blobs.Perf/Infrastructure/DirectoryTransferTest.cs b/sdk/storage/Azure.Storage.DataMovement.Blobs/perf/Azure.Storage.DataMovement.Blobs.Perf/Infrastructure/DirectoryTransferTest.cs
index 1dc5dadeabc4..6c96a00f08a8 100644
--- a/sdk/storage/Azure.Storage.DataMovement.Blobs/perf/Azure.Storage.DataMovement.Blobs.Perf/Infrastructure/DirectoryTransferTest.cs
+++ b/sdk/storage/Azure.Storage.DataMovement.Blobs/perf/Azure.Storage.DataMovement.Blobs.Perf/Infrastructure/DirectoryTransferTest.cs
@@ -25,7 +25,7 @@ public DirectoryTransferTest(TOptions options) : base(options)
BlobServiceClient = new BlobServiceClient(PerfTestEnvironment.Instance.StorageEndpoint, PerfTestEnvironment.Instance.Credential);
LocalFileResourceProvider = new LocalFilesStorageResourceProvider();
BlobResourceProvider = new BlobsStorageResourceProvider(PerfTestEnvironment.Instance.Credential);
- _transferTimeout = TimeSpan.FromSeconds(10 + (Options.Count * Options.Size) / (1 * 1024 * 1024));
+ _transferTimeout = TimeSpan.FromSeconds(Options.Duration);
TransferManagerOptions managerOptions = new()
{
@@ -118,7 +118,7 @@ protected async Task RunAndVerifyTransferAsync(
private Task HandleFailure(TransferItemFailedEventArgs args)
{
- Console.WriteLine(args.Exception);
+ Console.WriteLine($"Transfer failure event - {args.Exception}");
return Task.CompletedTask;
}
}
diff --git a/sdk/storage/Azure.Storage.DataMovement.Blobs/perf/Azure.Storage.DataMovement.Blobs.Perf/Scenarios/CopyDirectory.cs b/sdk/storage/Azure.Storage.DataMovement.Blobs/perf/Azure.Storage.DataMovement.Blobs.Perf/Scenarios/CopyDirectory.cs
index c31d4705361d..0d232cc91007 100644
--- a/sdk/storage/Azure.Storage.DataMovement.Blobs/perf/Azure.Storage.DataMovement.Blobs.Perf/Scenarios/CopyDirectory.cs
+++ b/sdk/storage/Azure.Storage.DataMovement.Blobs/perf/Azure.Storage.DataMovement.Blobs.Perf/Scenarios/CopyDirectory.cs
@@ -26,8 +26,14 @@ public override async Task GlobalSetupAsync()
public override async Task GlobalCleanupAsync()
{
- await _sourceContainer.DeleteIfExistsAsync();
- await _destinationContainer.DeleteIfExistsAsync();
+ if (_sourceContainer != null)
+ {
+ await _sourceContainer.DeleteIfExistsAsync();
+ }
+ if (_destinationContainer != null)
+ {
+ await _destinationContainer.DeleteIfExistsAsync();
+ }
await base.GlobalCleanupAsync();
}
diff --git a/sdk/storage/Azure.Storage.DataMovement.Blobs/perf/Azure.Storage.DataMovement.Blobs.Perf/Scenarios/DownloadDirectory.cs b/sdk/storage/Azure.Storage.DataMovement.Blobs/perf/Azure.Storage.DataMovement.Blobs.Perf/Scenarios/DownloadDirectory.cs
index 1760f38c9bf3..d60181a712da 100644
--- a/sdk/storage/Azure.Storage.DataMovement.Blobs/perf/Azure.Storage.DataMovement.Blobs.Perf/Scenarios/DownloadDirectory.cs
+++ b/sdk/storage/Azure.Storage.DataMovement.Blobs/perf/Azure.Storage.DataMovement.Blobs.Perf/Scenarios/DownloadDirectory.cs
@@ -27,8 +27,14 @@ public override async Task GlobalSetupAsync()
public override async Task GlobalCleanupAsync()
{
- await _sourceContainer.DeleteIfExistsAsync();
- Directory.Delete(_destinationDirectory, true);
+ if (_sourceContainer != null)
+ {
+ await _sourceContainer.DeleteIfExistsAsync();
+ }
+ if (_destinationDirectory != null)
+ {
+ Directory.Delete(_destinationDirectory, true);
+ }
await base.GlobalCleanupAsync();
}
diff --git a/sdk/storage/Azure.Storage.DataMovement.Blobs/perf/Azure.Storage.DataMovement.Blobs.Perf/Scenarios/UploadDirectory.cs b/sdk/storage/Azure.Storage.DataMovement.Blobs/perf/Azure.Storage.DataMovement.Blobs.Perf/Scenarios/UploadDirectory.cs
index 2a53c450eea2..017832fb7edf 100644
--- a/sdk/storage/Azure.Storage.DataMovement.Blobs/perf/Azure.Storage.DataMovement.Blobs.Perf/Scenarios/UploadDirectory.cs
+++ b/sdk/storage/Azure.Storage.DataMovement.Blobs/perf/Azure.Storage.DataMovement.Blobs.Perf/Scenarios/UploadDirectory.cs
@@ -27,8 +27,14 @@ public override async Task GlobalSetupAsync()
public override async Task GlobalCleanupAsync()
{
- Directory.Delete(_sourceDirectory, true);
- await _destinationContainer.DeleteIfExistsAsync();
+ if (_sourceDirectory != null)
+ {
+ Directory.Delete(_sourceDirectory, true);
+ }
+ if (_destinationContainer != null)
+ {
+ await _destinationContainer.DeleteIfExistsAsync();
+ }
await base.GlobalCleanupAsync();
}
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPlanFile.cs b/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPlanFile.cs
index 744bfe3ebe10..d9482a8fc0d8 100644
--- a/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPlanFile.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/src/JobPlan/JobPlanFile.cs
@@ -2,6 +2,7 @@
// Licensed under the MIT License.
using System;
+using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Threading;
@@ -27,7 +28,7 @@ internal class JobPlanFile : IDisposable
///
/// List of Job Part Plan Files associated with this job.
///
- public Dictionary JobParts { get; private set; }
+ public ConcurrentDictionary JobParts { get; private set; }
///
/// Lock for the memory mapped file to allow only one writer.
@@ -40,7 +41,7 @@ private JobPlanFile(string id, string filePath)
{
Id = id;
FilePath = filePath;
- JobParts = new Dictionary();
+ JobParts = new();
WriteLock = new SemaphoreSlim(1);
}
diff --git a/sdk/storage/Azure.Storage.DataMovement/src/LocalTransferCheckpointer.cs b/sdk/storage/Azure.Storage.DataMovement/src/LocalTransferCheckpointer.cs
index 10d0ae06e7a6..a8105e1f06c1 100644
--- a/sdk/storage/Azure.Storage.DataMovement/src/LocalTransferCheckpointer.cs
+++ b/sdk/storage/Azure.Storage.DataMovement/src/LocalTransferCheckpointer.cs
@@ -128,7 +128,10 @@ public override async Task AddNewJobPartAsync(
cancellationToken).ConfigureAwait(false);
// Add the job part into the current state
- _transferStates[transferId].JobParts.Add(partNumber, mappedFile);
+ if (!_transferStates[transferId].JobParts.TryAdd(partNumber, mappedFile))
+ {
+ throw Errors.CollisionJobPart(transferId, partNumber);
+ }
}
public override Task CurrentJobPartCountAsync(
@@ -413,7 +416,7 @@ private void InitializeExistingCheckpointer()
// Job plan file should already exist since we already iterated job plan files
if (_transferStates.TryGetValue(partPlanFileName.Id, out JobPlanFile jobPlanFile))
{
- jobPlanFile.JobParts.Add(
+ jobPlanFile.JobParts.TryAdd(
partPlanFileName.JobPartNumber,
JobPartPlanFile.CreateExistingPartPlanFile(partPlanFileName));
}