diff --git a/sdk/storage/azure-storage-blob/src/test/java/com/azure/storage/blob/APISpec.groovy b/sdk/storage/azure-storage-blob/src/test/java/com/azure/storage/blob/APISpec.groovy index 0714209fdb4e..e989f13dc8c4 100644 --- a/sdk/storage/azure-storage-blob/src/test/java/com/azure/storage/blob/APISpec.groovy +++ b/sdk/storage/azure-storage-blob/src/test/java/com/azure/storage/blob/APISpec.groovy @@ -148,12 +148,12 @@ class APISpec extends Specification { // in case the upload or download open too many connections. System.setProperty("reactor.bufferSize.x", "16") System.setProperty("reactor.bufferSize.small", "100") + System.out.println(String.format("--------%s---------", testMode)) } def setup() { String fullTestName = specificationContext.getCurrentIteration().getName().replace(' ', '').toLowerCase() String className = specificationContext.getCurrentSpec().getName() - int iterationIndex = fullTestName.lastIndexOf("[") int substringIndex = (int) Math.min((iterationIndex != -1) ? iterationIndex : fullTestName.length(), 50) this.testName = fullTestName.substring(0, substringIndex) diff --git a/sdk/storage/azure-storage-file-datalake/src/main/java/com/azure/storage/file/datalake/DataLakeFileClient.java b/sdk/storage/azure-storage-file-datalake/src/main/java/com/azure/storage/file/datalake/DataLakeFileClient.java index df29def80f17..de6f534dfbc5 100644 --- a/sdk/storage/azure-storage-file-datalake/src/main/java/com/azure/storage/file/datalake/DataLakeFileClient.java +++ b/sdk/storage/azure-storage-file-datalake/src/main/java/com/azure/storage/file/datalake/DataLakeFileClient.java @@ -226,7 +226,7 @@ public PathInfo flush(long position) { */ public PathInfo flush(long position, boolean overwrite) { DataLakeRequestConditions requestConditions = new DataLakeRequestConditions(); - if (!overwrite) { + if (overwrite) { requestConditions = new DataLakeRequestConditions().setIfNoneMatch(Constants.HeaderConstants.ETAG_WILDCARD); } return flushWithResponse(position, false, false, null, requestConditions, null, Context.NONE).getValue(); diff --git a/sdk/storage/azure-storage-file-datalake/src/test/java/com/azure/storage/file/datalake/FileAPITest.groovy b/sdk/storage/azure-storage-file-datalake/src/test/java/com/azure/storage/file/datalake/FileAPITest.groovy index fe001d58c08e..1d168f30d493 100644 --- a/sdk/storage/azure-storage-file-datalake/src/test/java/com/azure/storage/file/datalake/FileAPITest.groovy +++ b/sdk/storage/azure-storage-file-datalake/src/test/java/com/azure/storage/file/datalake/FileAPITest.groovy @@ -1418,7 +1418,7 @@ class FileAPITest extends APISpec { fc.flush(defaultDataSize) fc.append(new ByteArrayInputStream(defaultData.array()), 0, defaultDataSize) // Attempt to write data without overwrite enabled - fc.flush(defaultDataSize, false) + fc.flush(defaultDataSize, true) then: thrown(DataLakeStorageException) diff --git a/sdk/storage/tests.yml b/sdk/storage/tests.yml index cd3b2b8fada9..af692cf0dcc8 100644 --- a/sdk/storage/tests.yml +++ b/sdk/storage/tests.yml @@ -3,6 +3,12 @@ trigger: none jobs: - template: ../../eng/pipelines/templates/jobs/archetype-sdk-tests.yml parameters: + Matrix: + Windows - Java 8: + OSName: 'Windows' + OSVmImage: 'windows-2019' + JavaVersion: '1.8' + DisplayName: 'Windows - Java 8' ServiceDirectory: storage EnvVars: AZURE_TEST_MODE: LIVE