diff --git a/playground/MSTest1/MSTest1.csproj b/playground/MSTest1/MSTest1.csproj index c1a72605a4..a8f5883dae 100644 --- a/playground/MSTest1/MSTest1.csproj +++ b/playground/MSTest1/MSTest1.csproj @@ -6,9 +6,10 @@ - $(TargetFrameworks);net472 + $(TargetFrameworks);net472;net5.0 $(TargetFrameworks);net451 + false false diff --git a/playground/MSTest1/UnitTest1.cs b/playground/MSTest1/UnitTest1.cs index 4ae9e815df..aa92d18eef 100644 --- a/playground/MSTest1/UnitTest1.cs +++ b/playground/MSTest1/UnitTest1.cs @@ -13,5 +13,6 @@ public class UnitTest1 [TestMethod] public void TestMethod1() { + // Thread.Sleep(1000); } } diff --git a/playground/TestPlatform.Playground/Program.cs b/playground/TestPlatform.Playground/Program.cs index 9b8486ed55..184a5f28fc 100644 --- a/playground/TestPlatform.Playground/Program.cs +++ b/playground/TestPlatform.Playground/Program.cs @@ -39,39 +39,70 @@ static void Main(string[] args) var playground = Path.GetFullPath(Path.Combine(here, "..", "..", "..", "..")); var console = Path.Combine(here, "vstest.console", "vstest.console.exe"); - var consoleOptions = new ConsoleParameters - { - LogFilePath = Path.Combine(here, "logs", "log.txt"), - TraceLevel = TraceLevel.Verbose, - }; - var r = new VsTestConsoleWrapper(console, consoleOptions); var sourceSettings = @" true - 0 + 4 "; + var sources = new[] { - Path.Combine(playground, "MSTest1", "bin", "Debug", "net472", "MSTest1.dll") + Path.Combine(playground, "MSTest1", "bin", "Debug", "net472", "MSTest1.dll"), + Path.Combine(playground, "MSTest1", "bin", "Debug", "net5.0", "MSTest1.dll"), + @"C:\Users\jajares\source\repos\TestProject48\TestProject48\bin\Debug\net48\TestProject48.dll", + @"C:\Users\jajares\source\repos\TestProject48\TestProject1\bin\Debug\net48\win10-x64\TestProject1.dll" }; + // console mode + var settingsFile = Path.GetTempFileName(); + try + { + File.WriteAllText(settingsFile, sourceSettings); + var process = Process.Start(console, string.Join(" ", sources) + " --settings:" + settingsFile + " --listtests"); + process.WaitForExit(); + if (process.ExitCode != 0) + { + throw new Exception($"Process failed with {process.ExitCode}"); + } + } + finally + { + try { File.Delete(settingsFile); } catch { } + } + + // design mode + var consoleOptions = new ConsoleParameters + { + LogFilePath = Path.Combine(here, "logs", "log.txt"), + TraceLevel = TraceLevel.Verbose, + }; var options = new TestPlatformOptions(); - r.RunTestsWithCustomTestHost(sources, sourceSettings, options, new TestRunHandler(), new DebuggerTestHostLauncher()); + var r = new VsTestConsoleWrapper(console, consoleOptions); + var sessionHandler = new TestSessionHandler(); +#pragma warning disable CS0618 // Type or member is obsolete + r.StartTestSession(sources, sourceSettings, sessionHandler); +#pragma warning restore CS0618 // Type or member is obsolete + var discoveryHandler = new PlaygroundTestDiscoveryHandler(); + r.DiscoverTests(sources, sourceSettings, options, sessionHandler.TestSessionInfo, discoveryHandler); + r.RunTestsWithCustomTestHost(discoveryHandler.TestCases, sourceSettings, options, sessionHandler.TestSessionInfo, new TestRunHandler(), new DebuggerTestHostLauncher()); } public class PlaygroundTestDiscoveryHandler : ITestDiscoveryEventsHandler, ITestDiscoveryEventsHandler2 { private int _testCasesCount; + public List TestCases { get; internal set; } = new List(); + public void HandleDiscoveredTests(IEnumerable discoveredTestCases) { Console.WriteLine($"[DISCOVERY.PROGRESS]"); Console.WriteLine(WriteTests(discoveredTestCases)); _testCasesCount += discoveredTestCases.Count(); + if (discoveredTestCases != null) { TestCases.AddRange(discoveredTestCases); } } public void HandleDiscoveryComplete(long totalTests, IEnumerable lastChunk, bool isAborted) @@ -79,6 +110,7 @@ public void HandleDiscoveryComplete(long totalTests, IEnumerable lastC Console.WriteLine($"[DISCOVERY.COMPLETE] aborted? {isAborted}, tests count: {totalTests}"); Console.WriteLine("Last chunk:"); Console.WriteLine(WriteTests(lastChunk)); + if (lastChunk != null) { TestCases.AddRange(lastChunk); } } public void HandleDiscoveryComplete(DiscoveryCompleteEventArgs discoveryCompleteEventArgs, IEnumerable lastChunk) @@ -92,6 +124,7 @@ public void HandleDiscoveryComplete(DiscoveryCompleteEventArgs discoveryComplete Console.WriteLine(WriteSources(discoveryCompleteEventArgs.PartiallyDiscoveredSources)); Console.WriteLine("Not discovered:"); Console.WriteLine(WriteSources(discoveryCompleteEventArgs.NotDiscoveredSources)); + if (lastChunk != null) { TestCases.AddRange(lastChunk); } } public void HandleLogMessage(TestMessageLevel level, string message) @@ -106,7 +139,7 @@ public void HandleRawMessage(string rawMessage) private static string WriteTests(IEnumerable testCases) => testCases?.Any() == true - ? "\t" + string.Join("\n\t", testCases.Select(r => r.DisplayName)) + ? "\t" + string.Join("\n\t", testCases.Select(r => r.Source + " " + r.DisplayName)) : "\t"; private static string WriteSources(IEnumerable sources) @@ -183,3 +216,28 @@ public int LaunchTestHost(TestProcessStartInfo defaultTestHostStartInfo, Cancell } } } + +internal class TestSessionHandler : ITestSessionEventsHandler +{ + public TestSessionInfo TestSessionInfo { get; private set; } + + public void HandleLogMessage(TestMessageLevel level, string message) + { + + } + + public void HandleRawMessage(string rawMessage) + { + + } + + public void HandleStartTestSessionComplete(StartTestSessionCompleteEventArgs eventArgs) + { + TestSessionInfo = eventArgs.TestSessionInfo; + } + + public void HandleStopTestSessionComplete(StopTestSessionCompleteEventArgs eventArgs) + { + + } +} diff --git a/playground/TestPlatform.Playground/Properties/launchSettings.json b/playground/TestPlatform.Playground/Properties/launchSettings.json index c688670d9c..7cd5a38d95 100644 --- a/playground/TestPlatform.Playground/Properties/launchSettings.json +++ b/playground/TestPlatform.Playground/Properties/launchSettings.json @@ -5,9 +5,9 @@ "environmentVariables": { "VSTEST_CONNECTION_TIMEOUT": "999", "VSTEST_DEBUG_NOBP": "1", - "VSTEST_RUNNER_DEBUG_ATTACHVS": "1", - "VSTEST_HOST_DEBUG_ATTACHVS": "1", - "VSTEST_DATACOLLECTOR_DEBUG_ATTACHVS": "1" + "VSTEST_RUNNER_DEBUG_ATTACHVS": "0", + "VSTEST_HOST_DEBUG_ATTACHVS": "0", + "VSTEST_DATACOLLECTOR_DEBUG_ATTACHVS": "0" } } } diff --git a/scripts/build/TestPlatform.Dependencies.props b/scripts/build/TestPlatform.Dependencies.props index c1c05c5f9b..0f78ef53b2 100644 --- a/scripts/build/TestPlatform.Dependencies.props +++ b/scripts/build/TestPlatform.Dependencies.props @@ -33,9 +33,9 @@ Exact versions are used to avoid Nuget substituting them by closest match, if we make a typo. These versions need to be "statically" readable because we read this file as xml in our build and tests. --> - [2.2.9-preview-20220210-07] - [2.2.8] - [2.2.7] + [2.2.10-preview-20220414-01] + [2.2.10] + [2.2.8] [2.1.0] [2.1.0] [1.4.0] @@ -45,9 +45,9 @@ See Invoke-TestAssetsBuild in scripts/build.ps1. Exact versions are used to avoid Nuget substituting them by closest match, if we make a typo. These versions need to be "statically" readable because we read this file as xml in our build and tests. --> - [17.2.0-preview-20220131-20] - [17.1.0] - [17.0.0] + [17.2.0-preview-20220401-08] + [17.2.0] + [17.1.0] [16.6.1] [16.11.0] [15.9.2] diff --git a/scripts/build/TestPlatform.targets b/scripts/build/TestPlatform.targets index df4eb0ec58..737c067d2e 100644 --- a/scripts/build/TestPlatform.targets +++ b/scripts/build/TestPlatform.targets @@ -3,7 +3,7 @@ $(MSBuildThisFileDirectory)..\..\ false - $(NoWarn);CA1416 + $(NoWarn);CA1416;RS0037 diff --git a/src/AttachVS/AttachVs.cs b/src/AttachVS/AttachVs.cs index acbcfabe52..407c1a5045 100644 --- a/src/AttachVS/AttachVs.cs +++ b/src/AttachVS/AttachVs.cs @@ -138,19 +138,12 @@ private static bool AttachVs(Process vs, int pid) } } } - catch (COMException ex) + // Catch the exception if it is COMException coming directly, or coming from methodInvocation, otherwise just let it be. + catch (Exception ex) when (ex is COMException || (ex is TargetInvocationException tie && tie.InnerException is COMException)) { Trace($"ComException: Retrying in 250ms.\n{ex}"); Thread.Sleep(250); } - catch (TargetInvocationException ex) - { - if (ex.InnerException is not COMException) - throw; - - Trace($"ComException: Retrying in 250ms.\n{ex}"); - Thread.Sleep(250); - } } Marshal.ReleaseComObject(moniker[0]); diff --git a/src/Microsoft.TestPlatform.Client/TestPlatform.cs b/src/Microsoft.TestPlatform.Client/TestPlatform.cs index fc3f32da85..d029abb5d9 100644 --- a/src/Microsoft.TestPlatform.Client/TestPlatform.cs +++ b/src/Microsoft.TestPlatform.Client/TestPlatform.cs @@ -13,7 +13,6 @@ using Microsoft.VisualStudio.TestPlatform.Common; using Microsoft.VisualStudio.TestPlatform.Common.ExtensionFramework; using Microsoft.VisualStudio.TestPlatform.Common.Hosting; -using Microsoft.VisualStudio.TestPlatform.Common.Logging; using Microsoft.VisualStudio.TestPlatform.Common.Utilities; using Microsoft.VisualStudio.TestPlatform.CrossPlatEngine; using Microsoft.VisualStudio.TestPlatform.ObjectModel; @@ -71,34 +70,27 @@ protected internal TestPlatform( IFileHelper filehelper, ITestRuntimeProviderManager testHostProviderManager) { - TestEngine = testEngine; + _testEngine = testEngine; _fileHelper = filehelper; _testHostProviderManager = testHostProviderManager; } - /// - /// Gets or sets the test engine instance. - /// - private ITestEngine TestEngine { get; set; } + private readonly ITestEngine _testEngine; /// public IDiscoveryRequest CreateDiscoveryRequest( IRequestData requestData, DiscoveryCriteria discoveryCriteria!!, - TestPlatformOptions options) + TestPlatformOptions options, + Dictionary sourceToSourceDetailMap) { PopulateExtensions(discoveryCriteria.RunSettings, discoveryCriteria.Sources); // Initialize loggers. - ITestLoggerManager loggerManager = TestEngine.GetLoggerManager(requestData); + ITestLoggerManager loggerManager = _testEngine.GetLoggerManager(requestData); loggerManager.Initialize(discoveryCriteria.RunSettings); - ITestRuntimeProvider testHostManager = _testHostProviderManager.GetTestHostManagerByRunConfiguration(discoveryCriteria.RunSettings); - TestPlatform.ThrowExceptionIfTestHostManagerIsNull(testHostManager, discoveryCriteria.RunSettings); - - testHostManager.Initialize(TestSessionMessageLogger.Instance, discoveryCriteria.RunSettings); - - IProxyDiscoveryManager discoveryManager = TestEngine.GetDiscoveryManager(requestData, testHostManager, discoveryCriteria); + IProxyDiscoveryManager discoveryManager = _testEngine.GetDiscoveryManager(requestData, discoveryCriteria, sourceToSourceDetailMap); discoveryManager.Initialize(options?.SkipDefaultAdapters ?? false); return new DiscoveryRequest(requestData, discoveryCriteria, discoveryManager, loggerManager); @@ -108,32 +100,17 @@ public IDiscoveryRequest CreateDiscoveryRequest( public ITestRunRequest CreateTestRunRequest( IRequestData requestData, TestRunCriteria testRunCriteria!!, - TestPlatformOptions options) + TestPlatformOptions options, + Dictionary sourceToSourceDetailMap) { IEnumerable sources = GetSources(testRunCriteria); PopulateExtensions(testRunCriteria.TestRunSettings, sources); // Initialize loggers. - ITestLoggerManager loggerManager = TestEngine.GetLoggerManager(requestData); + ITestLoggerManager loggerManager = _testEngine.GetLoggerManager(requestData); loggerManager.Initialize(testRunCriteria.TestRunSettings); - // TODO: PERF: this will create a testhost manager, and then it will pass that to GetExecutionManager, where it will - // be used only when we will run in-process. If we don't run in process, we will throw away the manager we just - // created and let the proxy parallel callbacks to create a new one. This seems to be very easy to move to the GetExecutionManager, - // and safe as well, so we create the manager only once. - // TODO: Of course TestEngine.GetExecutionManager is public api... - ITestRuntimeProvider testHostManager = _testHostProviderManager.GetTestHostManagerByRunConfiguration(testRunCriteria.TestRunSettings); - TestPlatform.ThrowExceptionIfTestHostManagerIsNull(testHostManager, testRunCriteria.TestRunSettings); - - testHostManager.Initialize(TestSessionMessageLogger.Instance, testRunCriteria.TestRunSettings); - - // NOTE: The custom launcher should not be set when we have test session info available. - if (testRunCriteria.TestHostLauncher != null) - { - testHostManager.SetCustomLauncher(testRunCriteria.TestHostLauncher); - } - - IProxyExecutionManager executionManager = TestEngine.GetExecutionManager(requestData, testHostManager, testRunCriteria); + IProxyExecutionManager executionManager = _testEngine.GetExecutionManager(requestData, testRunCriteria, sourceToSourceDetailMap); executionManager.Initialize(options?.SkipDefaultAdapters ?? false); return new TestRunRequest(requestData, testRunCriteria, executionManager, loggerManager); @@ -143,7 +120,8 @@ public ITestRunRequest CreateTestRunRequest( public bool StartTestSession( IRequestData requestData, StartTestSessionCriteria testSessionCriteria!!, - ITestSessionEventsHandler eventsHandler) + ITestSessionEventsHandler eventsHandler, + Dictionary sourceToSourceDetailMap) { RunConfiguration runConfiguration = XmlRunSettingsUtilities.GetRunConfigurationNode(testSessionCriteria.RunSettings); TestAdapterLoadingStrategy strategy = runConfiguration.TestAdapterLoadingStrategy; @@ -155,7 +133,7 @@ public bool StartTestSession( return false; } - IProxyTestSessionManager testSessionManager = TestEngine.GetTestSessionManager(requestData, testSessionCriteria); + IProxyTestSessionManager testSessionManager = _testEngine.GetTestSessionManager(requestData, testSessionCriteria, sourceToSourceDetailMap); if (testSessionManager == null) { // The test session manager is null because the combination of runsettings and @@ -197,13 +175,13 @@ public void UpdateExtensions( IEnumerable pathToAdditionalExtensions, bool skipExtensionFilters) { - TestEngine.GetExtensionManager().UseAdditionalExtensions(pathToAdditionalExtensions, skipExtensionFilters); + _testEngine.GetExtensionManager().UseAdditionalExtensions(pathToAdditionalExtensions, skipExtensionFilters); } /// public void ClearExtensions() { - TestEngine.GetExtensionManager().ClearExtensions(); + _testEngine.GetExtensionManager().ClearExtensions(); } private static void ThrowExceptionIfTestHostManagerIsNull( diff --git a/src/Microsoft.TestPlatform.Common/Hosting/ITestRuntimeProviderManager.cs b/src/Microsoft.TestPlatform.Common/Hosting/ITestRuntimeProviderManager.cs index 781e002fad..d1073bc2ab 100644 --- a/src/Microsoft.TestPlatform.Common/Hosting/ITestRuntimeProviderManager.cs +++ b/src/Microsoft.TestPlatform.Common/Hosting/ITestRuntimeProviderManager.cs @@ -1,12 +1,14 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. +using System.Collections.Generic; + using Microsoft.VisualStudio.TestPlatform.ObjectModel.Host; namespace Microsoft.VisualStudio.TestPlatform.Common.Hosting; internal interface ITestRuntimeProviderManager { - ITestRuntimeProvider GetTestHostManagerByRunConfiguration(string runConfiguration); + ITestRuntimeProvider GetTestHostManagerByRunConfiguration(string runConfiguration, List sources); ITestRuntimeProvider GetTestHostManagerByUri(string hostUri); } diff --git a/src/Microsoft.TestPlatform.Common/Hosting/TestRunTimeProviderManager.cs b/src/Microsoft.TestPlatform.Common/Hosting/TestRunTimeProviderManager.cs index 5730b4de84..d0cbf55fec 100644 --- a/src/Microsoft.TestPlatform.Common/Hosting/TestRunTimeProviderManager.cs +++ b/src/Microsoft.TestPlatform.Common/Hosting/TestRunTimeProviderManager.cs @@ -2,6 +2,7 @@ // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; +using System.Collections.Generic; using Microsoft.VisualStudio.TestPlatform.Common.Logging; using Microsoft.VisualStudio.TestPlatform.ObjectModel.Host; @@ -44,7 +45,7 @@ public ITestRuntimeProvider GetTestHostManagerByUri(string hostUri) return host?.Value; } - public virtual ITestRuntimeProvider GetTestHostManagerByRunConfiguration(string runConfiguration) + public virtual ITestRuntimeProvider GetTestHostManagerByRunConfiguration(string runConfiguration, List _) { foreach (var testExtension in _testHostExtensionManager.TestExtensions) { diff --git a/src/Microsoft.TestPlatform.Common/Interfaces/Engine/ClientProtocol/IParallelProxyDiscoveryManager.cs b/src/Microsoft.TestPlatform.Common/Interfaces/Engine/ClientProtocol/IParallelProxyDiscoveryManager.cs index 96011d53bc..9e32610fd4 100644 --- a/src/Microsoft.TestPlatform.Common/Interfaces/Engine/ClientProtocol/IParallelProxyDiscoveryManager.cs +++ b/src/Microsoft.TestPlatform.Common/Interfaces/Engine/ClientProtocol/IParallelProxyDiscoveryManager.cs @@ -10,7 +10,7 @@ namespace Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine; /// /// Interface defining the parallel discovery manager /// -public interface IParallelProxyDiscoveryManager : IParallelOperationManager, IProxyDiscoveryManager +public interface IParallelProxyDiscoveryManager : IProxyDiscoveryManager { /// /// Indicates if user requested an abortion diff --git a/src/Microsoft.TestPlatform.Common/Interfaces/Engine/ClientProtocol/IParallelProxyExecutionManager.cs b/src/Microsoft.TestPlatform.Common/Interfaces/Engine/ClientProtocol/IParallelProxyExecutionManager.cs index 80c1675c8f..8b18e34d47 100644 --- a/src/Microsoft.TestPlatform.Common/Interfaces/Engine/ClientProtocol/IParallelProxyExecutionManager.cs +++ b/src/Microsoft.TestPlatform.Common/Interfaces/Engine/ClientProtocol/IParallelProxyExecutionManager.cs @@ -12,7 +12,7 @@ namespace Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine; /// /// Interface defining the parallel execution manager /// -public interface IParallelProxyExecutionManager : IParallelOperationManager, IProxyExecutionManager +public interface IParallelProxyExecutionManager : IProxyExecutionManager { /// /// Handles Partial Run Complete event coming from a specific concurrent proxy execution manager diff --git a/src/Microsoft.TestPlatform.Common/Interfaces/Engine/ClientProtocol/ITestEngine.cs b/src/Microsoft.TestPlatform.Common/Interfaces/Engine/ClientProtocol/ITestEngine.cs index 8fea354758..369049c605 100644 --- a/src/Microsoft.TestPlatform.Common/Interfaces/Engine/ClientProtocol/ITestEngine.cs +++ b/src/Microsoft.TestPlatform.Common/Interfaces/Engine/ClientProtocol/ITestEngine.cs @@ -1,9 +1,9 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. -using Microsoft.VisualStudio.TestPlatform.ObjectModel.Client; +using System.Collections.Generic; -using Microsoft.VisualStudio.TestPlatform.ObjectModel.Host; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Client; #nullable disable @@ -28,8 +28,8 @@ public interface ITestEngine /// An IProxyDiscoveryManager object that can do discovery. IProxyDiscoveryManager GetDiscoveryManager( IRequestData requestData, - ITestRuntimeProvider testHostManager, - DiscoveryCriteria discoveryCriteria); + DiscoveryCriteria discoveryCriteria, + IDictionary sourceToSourceDetailMap); /// /// Fetches the ExecutionManager for this engine. This manager would provide all @@ -45,8 +45,8 @@ IProxyDiscoveryManager GetDiscoveryManager( /// An IProxyExecutionManager object that can do execution. IProxyExecutionManager GetExecutionManager( IRequestData requestData, - ITestRuntimeProvider testHostManager, - TestRunCriteria testRunCriteria); + TestRunCriteria testRunCriteria, + IDictionary sourceToSourceDetailMap); /// /// Fetches the TestSessionManager for this engine. This manager would provide all @@ -63,7 +63,8 @@ IProxyExecutionManager GetExecutionManager( /// An IProxyTestSessionManager object that can manage test sessions. IProxyTestSessionManager GetTestSessionManager( IRequestData requestData, - StartTestSessionCriteria testSessionCriteria); + StartTestSessionCriteria testSessionCriteria, + IDictionary sourceToSourceDetailMap); /// /// Fetches the extension manager for this engine. This manager would provide extensibility diff --git a/src/Microsoft.TestPlatform.Common/PublicAPI/PublicAPI.Shipped.txt b/src/Microsoft.TestPlatform.Common/PublicAPI/PublicAPI.Shipped.txt index c5d6770657..b9d09e94a1 100644 --- a/src/Microsoft.TestPlatform.Common/PublicAPI/PublicAPI.Shipped.txt +++ b/src/Microsoft.TestPlatform.Common/PublicAPI/PublicAPI.Shipped.txt @@ -174,8 +174,6 @@ Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ClientProtocol.TestExecut Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ClientProtocol.TestExecutionContext.TestExecutionContext(long frequencyOfRunStatsChangeEvent, System.TimeSpan runStatsChangeEventTimeout, bool inIsolation, bool keepAlive, bool isDataCollectionEnabled, bool areTestCaseLevelEventsRequired, bool hasTestRun, bool isDebug, string testCaseFilter, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.FilterOptions filterOptions) -> void Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ClientProtocol.TestExecutionContext.TestRunConfiguration.get -> Microsoft.VisualStudio.TestPlatform.ObjectModel.RunConfiguration Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ClientProtocol.TestExecutionContext.TestRunConfiguration.set -> void -Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IParallelOperationManager -Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IParallelOperationManager.UpdateParallelLevel(int parallelLevel) -> void Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IParallelProxyDiscoveryManager Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IParallelProxyDiscoveryManager.HandlePartialDiscoveryComplete(Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IProxyDiscoveryManager proxyDiscoveryManager, long totalTests, System.Collections.Generic.IEnumerable lastChunk, bool isAborted) -> bool Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IParallelProxyExecutionManager @@ -200,11 +198,8 @@ Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ITestCaseEventsHandler.Se Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ITestCaseEventsHandler.SendTestCaseStart(Microsoft.VisualStudio.TestPlatform.ObjectModel.TestCase testCase) -> void Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ITestCaseEventsHandler.SendTestResult(Microsoft.VisualStudio.TestPlatform.ObjectModel.TestResult result) -> void Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ITestEngine -Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ITestEngine.GetDiscoveryManager(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData, Microsoft.VisualStudio.TestPlatform.ObjectModel.Host.ITestRuntimeProvider testHostManager, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.DiscoveryCriteria discoveryCriteria) -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IProxyDiscoveryManager -Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ITestEngine.GetExecutionManager(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData, Microsoft.VisualStudio.TestPlatform.ObjectModel.Host.ITestRuntimeProvider testHostManager, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.TestRunCriteria testRunCriteria) -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IProxyExecutionManager Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ITestEngine.GetExtensionManager() -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ITestExtensionManager Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ITestEngine.GetLoggerManager(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData) -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ITestLoggerManager -Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ITestEngine.GetTestSessionManager(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.StartTestSessionCriteria testSessionCriteria) -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IProxyTestSessionManager Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ITestExtensionManager Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ITestExtensionManager.ClearExtensions() -> void Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ITestExtensionManager.UseAdditionalExtensions(System.Collections.Generic.IEnumerable pathToAdditionalExtensions, bool skipExtensionFilters) -> void @@ -251,7 +246,7 @@ static Microsoft.VisualStudio.TestPlatform.Common.Utilities.RunSettingsUtilities virtual Microsoft.VisualStudio.TestPlatform.Common.ExtensionFramework.TestPluginCache.GetFilteredExtensions(System.Collections.Generic.List extensions, string endsWithPattern) -> System.Collections.Generic.IEnumerable virtual Microsoft.VisualStudio.TestPlatform.Common.ExtensionFramework.Utilities.TestPluginInformation.IdentifierData.get -> string virtual Microsoft.VisualStudio.TestPlatform.Common.ExtensionFramework.Utilities.TestPluginInformation.Metadata.get -> System.Collections.Generic.ICollection -virtual Microsoft.VisualStudio.TestPlatform.Common.Hosting.TestRuntimeProviderManager.GetTestHostManagerByRunConfiguration(string runConfiguration) -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Host.ITestRuntimeProvider +virtual Microsoft.VisualStudio.TestPlatform.Common.Hosting.TestRuntimeProviderManager.GetTestHostManagerByRunConfiguration(string runConfiguration, System.Collections.Generic.List _) -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Host.ITestRuntimeProvider Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IProxyTestSessionManager.StartSession(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestSessionEventsHandler eventsHandler, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData) -> bool Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IProxyTestSessionManager.StopSession(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData) -> bool Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IParallelProxyDiscoveryManager.IsAbortRequested.get -> bool @@ -261,3 +256,9 @@ Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.DiscoveryStatus Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.DiscoveryStatus.NotDiscovered = 0 -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.DiscoveryStatus Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.DiscoveryStatus.PartiallyDiscovered = 1 -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.DiscoveryStatus Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.DiscoveryStatus.FullyDiscovered = 2 -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.DiscoveryStatus +Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IParallelOperationManager +Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IParallelOperationManager.UpdateParallelLevel(int parallelLevel) -> void +Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ITestEngine.GetDiscoveryManager(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.DiscoveryCriteria discoveryCriteria, System.Collections.Generic.IDictionary sourceToSourceDetailMap) -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IProxyDiscoveryManager +Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ITestEngine.GetExecutionManager(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.TestRunCriteria testRunCriteria, System.Collections.Generic.IDictionary sourceToSourceDetailMap) -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IProxyExecutionManager +Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ITestEngine.GetTestSessionManager(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.StartTestSessionCriteria testSessionCriteria, System.Collections.Generic.IDictionary sourceToSourceDetailMap) -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IProxyTestSessionManager + diff --git a/src/Microsoft.TestPlatform.CoreUtilities/FeatureFlag/FeatureFlag.cs b/src/Microsoft.TestPlatform.CoreUtilities/FeatureFlag/FeatureFlag.cs index 41ba1e26d8..26ab30b2c9 100644 --- a/src/Microsoft.TestPlatform.CoreUtilities/FeatureFlag/FeatureFlag.cs +++ b/src/Microsoft.TestPlatform.CoreUtilities/FeatureFlag/FeatureFlag.cs @@ -27,7 +27,7 @@ internal partial class FeatureFlag : IFeatureFlag { private readonly ConcurrentDictionary _cache = new(); - public static IFeatureFlag Instance { get; } = new FeatureFlag(); + public static IFeatureFlag Instance { get; private set; } = new FeatureFlag(); private FeatureFlag() { } @@ -47,6 +47,22 @@ private FeatureFlag() { } // Faster JSON serialization relies on less internals of NewtonsoftJson, and on some additional caching. public const string DISABLE_FASTER_JSON_SERIALIZATION = VSTEST_ + nameof(DISABLE_FASTER_JSON_SERIALIZATION); + + // Forces vstest.console to run all sources using the same target framework (TFM) and architecture, instead of allowing + // multiple different tfms and architectures to run at the same time. + public const string DISABLE_MULTI_TFM_RUN = VSTEST_ + nameof(DISABLE_MULTI_TFM_RUN); + + [Obsolete("Only use this in tests.")] + internal static void Reset() + { + Instance = new FeatureFlag(); + } + + [Obsolete("Only use this in tests.")] + internal void SetFlag(string key, bool value) + { + _cache[key] = value; + } } #endif diff --git a/src/Microsoft.TestPlatform.CoreUtilities/Friends.cs b/src/Microsoft.TestPlatform.CoreUtilities/Friends.cs index fd7d102a35..d39fb25c14 100644 --- a/src/Microsoft.TestPlatform.CoreUtilities/Friends.cs +++ b/src/Microsoft.TestPlatform.CoreUtilities/Friends.cs @@ -7,3 +7,5 @@ [assembly: InternalsVisibleTo("vstest.console.arm64, PublicKey=002400000480000094000000060200000024000052534131000400000100010007d1fa57c4aed9f0a32e84aa0faefd0de9e8fd6aec8f87fb03766c834c99921eb23be79ad9d5dcc1dd9ad236132102900b723cf980957fc4e177108fc607774f29e8320e92ea05ece4e821c0a5efe8f1645c4c0c93c1ab99285d622caa652c1dfad63d745d6f2de5f17e5eaf0fc4963d261c8a12436518206dc093344d5ad293")] [assembly: InternalsVisibleTo("Microsoft.TestPlatform.CommunicationUtilities, PublicKey=002400000480000094000000060200000024000052534131000400000100010007d1fa57c4aed9f0a32e84aa0faefd0de9e8fd6aec8f87fb03766c834c99921eb23be79ad9d5dcc1dd9ad236132102900b723cf980957fc4e177108fc607774f29e8320e92ea05ece4e821c0a5efe8f1645c4c0c93c1ab99285d622caa652c1dfad63d745d6f2de5f17e5eaf0fc4963d261c8a12436518206dc093344d5ad293")] [assembly: InternalsVisibleTo("Microsoft.VisualStudio.TestPlatform.ObjectModel, PublicKey=002400000480000094000000060200000024000052534131000400000100010007d1fa57c4aed9f0a32e84aa0faefd0de9e8fd6aec8f87fb03766c834c99921eb23be79ad9d5dcc1dd9ad236132102900b723cf980957fc4e177108fc607774f29e8320e92ea05ece4e821c0a5efe8f1645c4c0c93c1ab99285d622caa652c1dfad63d745d6f2de5f17e5eaf0fc4963d261c8a12436518206dc093344d5ad293")] + +[assembly: InternalsVisibleTo("vstest.ProgrammerTests, PublicKey=002400000480000094000000060200000024000052534131000400000100010007d1fa57c4aed9f0a32e84aa0faefd0de9e8fd6aec8f87fb03766c834c99921eb23be79ad9d5dcc1dd9ad236132102900b723cf980957fc4e177108fc607774f29e8320e92ea05ece4e821c0a5efe8f1645c4c0c93c1ab99285d622caa652c1dfad63d745d6f2de5f17e5eaf0fc4963d261c8a12436518206dc093344d5ad293")] diff --git a/src/Microsoft.TestPlatform.CrossPlatEngine/Client/Parallel/ParallelOperationManager.cs b/src/Microsoft.TestPlatform.CrossPlatEngine/Client/Parallel/ParallelOperationManager.cs index 686ca58a1f..a1723e98d2 100644 --- a/src/Microsoft.TestPlatform.CrossPlatEngine/Client/Parallel/ParallelOperationManager.cs +++ b/src/Microsoft.TestPlatform.CrossPlatEngine/Client/Parallel/ParallelOperationManager.cs @@ -2,215 +2,226 @@ // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; -using System.Collections; -using System.Collections.Concurrent; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; +using Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Client.Parallel; using Microsoft.VisualStudio.TestPlatform.ObjectModel; -using Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine; - -#nullable disable namespace Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Client; /// -/// Abstract class having common parallel manager implementation +/// Manages work that is done on multiple managers (testhosts) in parallel such as parallel discovery or parallel run. /// -internal abstract class ParallelOperationManager : IParallelOperationManager, IDisposable +internal sealed class ParallelOperationManager : IDisposable { - #region ConcurrentManagerInstanceData - - protected Func CreateNewConcurrentManager { get; set; } - - /// - /// Gets a value indicating whether hosts are shared. - /// - protected bool SharedHosts { get; private set; } - - private ConcurrentDictionary _concurrentManagerHandlerMap; - - /// - /// Singleton Instance of this class - /// - protected static T s_instance; + private readonly Func _createNewManager; /// /// Default number of Processes /// - private int _currentParallelLevel; - - #endregion - - #region Concurrency Keeper Objects - - /// - /// LockObject to iterate our sourceEnumerator in parallel - /// We can use the sourceEnumerator itself as lockObject, but since its a changing object - it's risky to use it as one - /// - protected object _sourceEnumeratorLockObject = new(); + private TEventHandler? _eventHandler; + private Func? _getEventHandler; + private Action? _runWorkload; + private bool _acceptMoreWork; + private readonly List> _workloads = new(); + private readonly List _managerSlots = new(); - #endregion + private readonly object _lock = new(); - protected ParallelOperationManager(Func createNewManager, int parallelLevel, bool sharedHosts) - { - CreateNewConcurrentManager = createNewManager; - SharedHosts = sharedHosts; - - // Update Parallel Level - UpdateParallelLevel(parallelLevel); - } - - /// - /// Remove and dispose a manager from concurrent list of manager. - /// - /// Manager to remove - public void RemoveManager(T manager) - { - _concurrentManagerHandlerMap.TryRemove(manager, out _); - } + public int MaxParallelLevel { get; } + public int OccupiedSlotCount { get; private set; } + public int AvailableSlotCount { get; private set; } /// - /// Add a manager in concurrent list of manager. + /// Creates new instance of ParallelOperationManager. /// - /// Manager to add - /// eventHandler of the manager - public void AddManager(T manager, TU handler) + /// Creates a new manager that is responsible for running a single part of the overall workload. + /// A manager is typically a testhost, and the part of workload is discovering or running a single test dll. + /// Determines the maximum amount of parallel managers that can be active at the same time. + public ParallelOperationManager(Func createNewManager, int parallelLevel) { - _concurrentManagerHandlerMap.TryAdd(manager, handler); + _createNewManager = createNewManager; + MaxParallelLevel = parallelLevel; + ClearSlots(acceptMoreWork: true); } - /// - /// Update event handler for the manager. - /// If it is a new manager, add this. - /// - /// Manager to update - /// event handler to update for manager - public void UpdateHandlerForManager(T manager, TU handler) + private void ClearSlots(bool acceptMoreWork) { - if (_concurrentManagerHandlerMap.ContainsKey(manager)) - { - _concurrentManagerHandlerMap[manager] = handler; - } - else + lock (_lock) { - AddManager(manager, handler); + _acceptMoreWork = acceptMoreWork; + _managerSlots.Clear(); + _managerSlots.AddRange(Enumerable.Range(0, MaxParallelLevel).Select(_ => new Slot())); + SetOccupiedSlotCount(); } } - /// - /// Get the event handler associated with the manager. - /// - /// Manager - public TU GetHandlerForGivenManager(T manager) + private void SetOccupiedSlotCount() { - return _concurrentManagerHandlerMap[manager]; + AvailableSlotCount = _managerSlots.Count(s => s.IsAvailable); + OccupiedSlotCount = _managerSlots.Count - AvailableSlotCount; } - /// - /// Get total number of active concurrent manager - /// - public int GetConcurrentManagersCount() + public void StartWork( + List> workloads!!, + TEventHandler eventHandler!!, + Func getEventHandler!!, + Action runWorkload!!) { - return _concurrentManagerHandlerMap.Count; + _eventHandler = eventHandler; + _getEventHandler = getEventHandler; + _runWorkload = runWorkload; + + _workloads.AddRange(workloads); + + // This creates as many slots as possible even though we might not use them when we get less workloads to process, + // this is not a big issue, and not worth optimizing, because the parallel level is determined by the logical CPU count, + // so it is a small number. + ClearSlots(acceptMoreWork: true); + RunWorkInParallel(); } - /// - /// Get instances of all active concurrent manager - /// - public IEnumerable GetConcurrentManagerInstances() + // This does not do anything in parallel, all the workloads we schedule are offloaded to separate Task in the _runWorkload callback. + // I did not want to change that, yet but this is the correct place to do that offloading. Not each manager. + private bool RunWorkInParallel() { - return _concurrentManagerHandlerMap.Keys.ToList(); - } + // TODO: Right now we don't re-use shared hosts, but if we did, this is the place + // where we should find a workload that fits the manager if any of them is shared. + // Or tear it down, and start a new one. + if (_eventHandler == null) + throw new InvalidOperationException($"{nameof(_eventHandler)} was not provided."); - /// - /// Updates the Concurrent Executors according to new parallel setting - /// - /// Number of Parallel Executors allowed - public void UpdateParallelLevel(int newParallelLevel) - { - if (_concurrentManagerHandlerMap == null) + if (_getEventHandler == null) + throw new InvalidOperationException($"{nameof(_getEventHandler)} was not provided."); + + if (_runWorkload == null) + throw new InvalidOperationException($"{nameof(_runWorkload)} was not provided."); + + // Reserve slots and assign them work under the lock so we keep + // the slots consistent. + List workToRun = new(); + lock (_lock) { - // not initialized yet - // create rest of concurrent clients other than default one - _concurrentManagerHandlerMap = new ConcurrentDictionary(); - for (int i = 0; i < newParallelLevel; i++) + if (_workloads.Count == 0) + return false; + + // When HandlePartialDiscovery or HandlePartialRun are in progress, and we call StopAllManagers, + // it is possible that we will clear all slots, and have RunWorkInParallel waiting on the lock, + // so when it is allowed to enter it will try to add more work, but we already cancelled, + // so we should not start more work. + if (!_acceptMoreWork) + return false; + + var availableSlots = _managerSlots.Where(slot => slot.IsAvailable).ToList(); + var availableWorkloads = _workloads.Where(workload => workload != null).ToList(); + var amount = Math.Min(availableSlots.Count, availableWorkloads.Count); + var workloadsToRun = availableWorkloads.Take(amount).ToList(); + + for (int i = 0; i < amount; i++) + { + var slot = availableSlots[i]; + slot.IsAvailable = false; + var workload = workloadsToRun[i]; + workToRun.Add(new SlotWorkloadPair(slot, workload)); + _workloads.Remove(workload); + } + + SetOccupiedSlotCount(); + + foreach (var pair in workToRun) { - AddManager(CreateNewConcurrentManager(), default); + var manager = _createNewManager(pair.Workload.Provider); + var eventHandler = _getEventHandler(_eventHandler, manager); + pair.Slot.EventHandler = eventHandler; + pair.Slot.Manager = manager; + pair.Slot.ManagerInfo = pair.Workload.Provider; + pair.Slot.Work = pair.Workload.Work; } } - else if (_currentParallelLevel != newParallelLevel) + + // Kick of the work in parallel outside of the lock so if we have more requests to run + // that come in at the same time we only block them from reserving the same slot at the same time + // but not from starting their assigned work at the same time. + foreach (var pair in workToRun) { - // If number of concurrent clients is less than the new level - // Create more concurrent clients and update the list - if (_currentParallelLevel < newParallelLevel) + try { - for (int i = 0; i < newParallelLevel - _currentParallelLevel; i++) - { - AddManager(CreateNewConcurrentManager(), default); - } + _runWorkload(pair.Slot.Manager!, pair.Slot.EventHandler!, pair.Workload.Work); } - else + finally { - // If number of concurrent clients is more than the new level - // Dispose off the extra ones - int managersCount = _currentParallelLevel - newParallelLevel; - - foreach (var concurrentManager in GetConcurrentManagerInstances()) - { - if (managersCount == 0) - { - break; - } - else - { - RemoveManager(concurrentManager); - managersCount--; - } - } + // clean the slot or something, to make sure we don't keep it reserved. } } - // Update current parallel setting to new one - _currentParallelLevel = newParallelLevel; + // Return true when we started more work. Or false, when there was nothing more to do. + // This will propagate to handling of partial discovery or partial run. + return workToRun.Count > 0; } - public void Dispose() + public bool RunNextWork(TManager completedManager!!) + { + ClearCompletedSlot(completedManager); + return RunWorkInParallel(); + } + + private void ClearCompletedSlot(TManager completedManager) { - if (_concurrentManagerHandlerMap != null) + lock (_lock) { - foreach (var managerInstance in GetConcurrentManagerInstances()) + var completedSlot = _managerSlots.Where(s => ReferenceEquals(completedManager, s.Manager)).ToList(); + // When HandlePartialDiscovery or HandlePartialRun are in progress, and we call StopAllManagers, + // it is possible that we will clear all slots, while ClearCompletedSlot is waiting on the lock, + // so when it is allowed to enter it will fail to find the respective slot and fail. In this case it is + // okay that the slot is not found, and we do nothing, because we already stopped all work and cleared the slots. + if (completedSlot.Count == 0) { - RemoveManager(managerInstance); + if (_acceptMoreWork) + { + throw new InvalidOperationException("The provided manager was not found in any slot."); + } + else + { + return; + } } - } - s_instance = default; - } + if (completedSlot.Count > 1) + { + throw new InvalidOperationException("The provided manager was found in multiple slots."); + } - protected void DoActionOnAllManagers(Action action, bool doActionsInParallel = false) - { - if (_concurrentManagerHandlerMap == null - || _concurrentManagerHandlerMap.IsEmpty) - { - return; + var slot = completedSlot[0]; + slot.IsAvailable = true; + + SetOccupiedSlotCount(); } + } + public void DoActionOnAllManagers(Action action, bool doActionsInParallel = false) + { + // We don't need to lock here, we just grab the current list of + // slots that are occupied (have managers) and run action on each one of them. + var managers = _managerSlots.Where(slot => !slot.IsAvailable).Select(slot => slot.Manager).ToList(); int i = 0; - var actionTasks = new Task[_concurrentManagerHandlerMap.Count]; - foreach (var client in GetConcurrentManagerInstances()) + var actionTasks = new Task[managers.Count]; + foreach (var manager in managers) { + if (manager == null) + continue; + // Read the array before firing the task - beware of closures if (doActionsInParallel) { - actionTasks[i] = Task.Run(() => action(client)); + actionTasks[i] = Task.Run(() => action(manager)); i++; } else { - DoManagerAction(() => action(client)); + DoManagerAction(() => action(manager)); } } @@ -235,24 +246,37 @@ private static void DoManagerAction(Action action) } } - /// - /// Fetches the next data object for the concurrent executor to work on - /// - /// source data to work on - source file or testCaseList - /// True, if data exists. False otherwise - protected bool TryFetchNextSource(IEnumerator enumerator, out TY source) + internal void StopAllManagers() + { + ClearSlots(acceptMoreWork: false); + } + + public void Dispose() + { + ClearSlots(acceptMoreWork: false); + } + + private class Slot { - source = default; - var hasNext = false; - lock (_sourceEnumeratorLockObject) + public bool IsAvailable { get; set; } = true; + + public TManager? Manager { get; set; } + + public TestRuntimeProviderInfo? ManagerInfo { get; set; } + + public TEventHandler? EventHandler { get; set; } + + public TWorkload? Work { get; set; } + } + + private class SlotWorkloadPair + { + public SlotWorkloadPair(Slot slot, ProviderSpecificWorkload workload) { - if (enumerator != null && enumerator.MoveNext()) - { - source = (TY)enumerator.Current; - hasNext = source != null; - } + Slot = slot; + Workload = workload; } - - return hasNext; + public Slot Slot { get; } + public ProviderSpecificWorkload Workload { get; } } } diff --git a/src/Microsoft.TestPlatform.CrossPlatEngine/Client/Parallel/ParallelProxyDiscoveryManager.cs b/src/Microsoft.TestPlatform.CrossPlatEngine/Client/Parallel/ParallelProxyDiscoveryManager.cs index 85570a66fe..23f8f69574 100644 --- a/src/Microsoft.TestPlatform.CrossPlatEngine/Client/Parallel/ParallelProxyDiscoveryManager.cs +++ b/src/Microsoft.TestPlatform.CrossPlatEngine/Client/Parallel/ParallelProxyDiscoveryManager.cs @@ -20,18 +20,17 @@ namespace Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Client.Parallel; /// /// ParallelProxyDiscoveryManager that manages parallel discovery /// -internal class ParallelProxyDiscoveryManager : ParallelOperationManager, IParallelProxyDiscoveryManager +internal class ParallelProxyDiscoveryManager : IParallelProxyDiscoveryManager { private readonly IDataSerializer _dataSerializer; private readonly DiscoveryDataAggregator _dataAggregator; - private readonly IRequestData _requestData; - + private readonly ParallelOperationManager _parallelOperationManager; + private readonly Dictionary _sourceToTestHostProviderMap; private int _discoveryCompletedClients; private int _availableTestSources = -1; - private DiscoveryCriteria? _actualDiscoveryCriteria; - private IEnumerator? _sourceEnumerator; - private ITestDiscoveryEventsHandler2? _currentDiscoveryEventsHandler; + private bool _skipDefaultAdapters; + private readonly IRequestData _requestData; public bool IsAbortRequested { get; private set; } @@ -40,17 +39,31 @@ internal class ParallelProxyDiscoveryManager : ParallelOperationManager private readonly object _discoveryStatusLockObject = new(); - public ParallelProxyDiscoveryManager(IRequestData requestData, Func actualProxyManagerCreator, DiscoveryDataAggregator dataAggregator, int parallelLevel, bool sharedHosts) - : this(requestData, actualProxyManagerCreator, dataAggregator, JsonDataSerializer.Instance, parallelLevel, sharedHosts) + public ParallelProxyDiscoveryManager( + IRequestData requestData, + Func actualProxyManagerCreator, + DiscoveryDataAggregator dataAggregator, + int parallelLevel, + List testHostProviders) + : this(requestData, actualProxyManagerCreator, dataAggregator, JsonDataSerializer.Instance, parallelLevel, testHostProviders) { } - internal ParallelProxyDiscoveryManager(IRequestData requestData, Func actualProxyManagerCreator, DiscoveryDataAggregator dataAggregator, IDataSerializer dataSerializer, int parallelLevel, bool sharedHosts) - : base(actualProxyManagerCreator, parallelLevel, sharedHosts) + internal ParallelProxyDiscoveryManager( + IRequestData requestData, + Func actualProxyManagerCreator, + DiscoveryDataAggregator dataAggregator, + IDataSerializer dataSerializer, + int parallelLevel, + List testHostProviders) { _requestData = requestData; _dataSerializer = dataSerializer; _dataAggregator = dataAggregator; + _parallelOperationManager = new(actualProxyManagerCreator, parallelLevel); + _sourceToTestHostProviderMap = testHostProviders + .SelectMany(provider => provider.SourceDetails.Select(s => new KeyValuePair(s.Source, provider))) + .ToDictionary(pair => pair.Key, pair => pair.Value); } #region IProxyDiscoveryManager @@ -58,20 +71,16 @@ internal ParallelProxyDiscoveryManager(IRequestData requestData, Func public void Initialize(bool skipDefaultAdapters) { - DoActionOnAllManagers((proxyManager) => proxyManager.Initialize(skipDefaultAdapters), doActionsInParallel: true); + _skipDefaultAdapters = skipDefaultAdapters; } /// - public void DiscoverTests(DiscoveryCriteria discoveryCriteria, ITestDiscoveryEventsHandler2 eventHandler) + public void DiscoverTests(DiscoveryCriteria discoveryCriteria!!, ITestDiscoveryEventsHandler2 eventHandler!!) { - _actualDiscoveryCriteria = discoveryCriteria; - - // Set the enumerator for parallel yielding of sources - // Whenever a concurrent executor becomes free, it picks up the next source using this enumerator - _sourceEnumerator = discoveryCriteria.Sources.GetEnumerator(); - _availableTestSources = discoveryCriteria.Sources.Count(); + var workloads = SplitToWorkloads(discoveryCriteria, _sourceToTestHostProviderMap); + _availableTestSources = workloads.Count; - EqtTrace.Verbose($"ParallelProxyDiscoveryManager.DiscoverTests: Start discovery. Total sources: {_availableTestSources}"); + EqtTrace.Verbose("ParallelProxyDiscoveryManager.DiscoverTests: Start discovery. Total sources: " + _availableTestSources); // Mark all sources as NotDiscovered here because if we get an early cancellation it's // possible that we didn't yet start all the proxy managers and so we didn't mark all sources @@ -81,27 +90,35 @@ public void DiscoverTests(DiscoveryCriteria discoveryCriteria, ITestDiscoveryEve // marked as NotDiscovered. _dataAggregator.MarkSourcesWithStatus(discoveryCriteria.Sources, DiscoveryStatus.NotDiscovered); - DiscoverTestsPrivate(eventHandler); + _parallelOperationManager.StartWork(workloads, eventHandler, GetParallelEventHandler, DiscoverTestsOnConcurrentManager); } + private ITestDiscoveryEventsHandler2 GetParallelEventHandler(ITestDiscoveryEventsHandler2 eventHandler, IProxyDiscoveryManager concurrentManager) + => new ParallelDiscoveryEventsHandler( + _requestData, + concurrentManager, + eventHandler, + this, + _dataAggregator); + /// public void Abort() { IsAbortRequested = true; - DoActionOnAllManagers((proxyManager) => proxyManager.Abort(), doActionsInParallel: true); + _parallelOperationManager.DoActionOnAllManagers((proxyManager) => proxyManager.Abort(), doActionsInParallel: true); } /// public void Abort(ITestDiscoveryEventsHandler2 eventHandler) { IsAbortRequested = true; - DoActionOnAllManagers((proxyManager) => proxyManager.Abort(eventHandler), doActionsInParallel: true); + _parallelOperationManager.DoActionOnAllManagers((proxyManager) => proxyManager.Abort(eventHandler), doActionsInParallel: true); } /// public void Close() { - DoActionOnAllManagers(proxyManager => proxyManager.Close(), doActionsInParallel: true); + _parallelOperationManager.DoActionOnAllManagers(proxyManager => proxyManager.Close(), doActionsInParallel: true); } #endregion @@ -117,12 +134,13 @@ public bool HandlePartialDiscoveryComplete(IProxyDiscoveryManager proxyDiscovery var notDiscoveredCount = _dataAggregator.GetSourcesWithStatus(DiscoveryStatus.NotDiscovered).Count; var partiallyDiscoveredCount = _dataAggregator.GetSourcesWithStatus(DiscoveryStatus.PartiallyDiscovered).Count; var fullyDiscoveredCount = _dataAggregator.GetSourcesWithStatus(DiscoveryStatus.FullyDiscovered).Count; - var expectedCount = _actualDiscoveryCriteria?.Sources.Count() ?? 0; + var expectedCount = _availableTestSources; Debug.Assert(notDiscoveredCount + partiallyDiscoveredCount + fullyDiscoveredCount == expectedCount, $"Total count of sources ({expectedCount}) should match the count of sources with status not discovered ({notDiscoveredCount}), partially discovered ({partiallyDiscoveredCount}) and fully discovered ({fullyDiscoveredCount})."); #endif var allDiscoverersCompleted = false; + // TODO: Interlocked.Increment the count, and the condition below probably does not need to be in a lock? lock (_discoveryStatusLockObject) { // Each concurrent Executor calls this method @@ -141,13 +159,7 @@ public bool HandlePartialDiscoveryComplete(IProxyDiscoveryManager proxyDiscovery // Schedule the clean up for managers and handlers. if (allDiscoverersCompleted || IsAbortRequested) { - // Reset enumerators - _sourceEnumerator = null; - - _currentDiscoveryEventsHandler = null; - - // Dispose concurrent executors - UpdateParallelLevel(0); + _parallelOperationManager.StopAllManagers(); if (allDiscoverersCompleted) { @@ -161,57 +173,39 @@ public bool HandlePartialDiscoveryComplete(IProxyDiscoveryManager proxyDiscovery return true; } - // Discovery is not complete. - // Now when both.net framework and.net core projects can run in parallel we should clear - // manager and create new one for both cases. Otherwise 'proxyDiscoveryManager' instance - // is already closed by now and it will give exception when trying to do some operation - // on it. - EqtTrace.Verbose("ParallelProxyDiscoveryManager.HandlePartialDiscoveryComplete: Replace discovery manager. Shared: {0}, Aborted: {1}.", SharedHosts, isAborted); - - RemoveManager(proxyDiscoveryManager); - - if (_currentDiscoveryEventsHandler is null) - { - Debug.Assert(!TryFetchNextSource(_sourceEnumerator, out string nextSource), $"When discovery event handler is null, we should not have any more sources but we got '{nextSource}'."); - EqtTrace.Verbose("ParallelProxyDiscoveryManager.HandlePartialDiscoveryComplete: Skip adding more manager because discovery event handler is null."); - return false; - } - - proxyDiscoveryManager = CreateNewConcurrentManager(); - var parallelEventsHandler = new ParallelDiscoveryEventsHandler( - _requestData, - proxyDiscoveryManager, - _currentDiscoveryEventsHandler, - this, - _dataAggregator); - AddManager(proxyDiscoveryManager, parallelEventsHandler); - - // Second, let's attempt to trigger discovery for the next source. - DiscoverTestsOnConcurrentManager(proxyDiscoveryManager); + _parallelOperationManager.RunNextWork(proxyDiscoveryManager); return false; } #endregion - private void DiscoverTestsPrivate(ITestDiscoveryEventsHandler2 discoveryEventsHandler) + private List> SplitToWorkloads(DiscoveryCriteria discoveryCriteria, Dictionary sourceToTestHostProviderMap) { - _currentDiscoveryEventsHandler = discoveryEventsHandler; + List> workloads = new(); + foreach (var source in discoveryCriteria.Sources) + { + var testHostProviderInfo = sourceToTestHostProviderMap[source]; + var runsettingsXml = testHostProviderInfo.RunSettings; + var updatedDiscoveryCriteria = new ProviderSpecificWorkload(NewDiscoveryCriteriaFromSourceAndSettings(source, discoveryCriteria, runsettingsXml), testHostProviderInfo); + workloads.Add(updatedDiscoveryCriteria); + } - // Reset the discovery complete data - _discoveryCompletedClients = 0; + return workloads; - foreach (var concurrentManager in GetConcurrentManagerInstances()) + static DiscoveryCriteria NewDiscoveryCriteriaFromSourceAndSettings(string source, DiscoveryCriteria discoveryCriteria, string runsettingsXml) { - var parallelEventsHandler = new ParallelDiscoveryEventsHandler( - _requestData, - concurrentManager, - discoveryEventsHandler, - this, - _dataAggregator); - - UpdateHandlerForManager(concurrentManager, parallelEventsHandler); - DiscoverTestsOnConcurrentManager(concurrentManager); + var criteria = new DiscoveryCriteria( + new[] { source }, + discoveryCriteria.FrequencyOfDiscoveredTestsEvent, + discoveryCriteria.DiscoveredTestEventTimeout, + runsettingsXml, + discoveryCriteria.TestSessionInfo + ); + + criteria.TestCaseFilter = discoveryCriteria.TestCaseFilter; + + return criteria; } } @@ -220,46 +214,39 @@ private void DiscoverTestsPrivate(ITestDiscoveryEventsHandler2 discoveryEventsHa /// Each concurrent discoverer calls this method, once its completed working on previous data /// /// Proxy discovery manager instance. - private void DiscoverTestsOnConcurrentManager(IProxyDiscoveryManager proxyDiscoveryManager) + private void DiscoverTestsOnConcurrentManager(IProxyDiscoveryManager proxyDiscoveryManager, ITestDiscoveryEventsHandler2 eventHandler, DiscoveryCriteria discoveryCriteria) { - Debug.Assert(_actualDiscoveryCriteria is not null, "Discovery criteria is null, DiscoverTests should have been called before reaching this point."); - // Peek to see if we have sources to trigger a discovery - if (TryFetchNextSource(_sourceEnumerator, out string nextSource)) - { - EqtTrace.Verbose("ProxyParallelDiscoveryManager.DiscoverTestsOnConcurrentManager: Triggering test discovery for next source: {0}", nextSource); + // Kick off another discovery task for the next source + Task.Run(() => + { + EqtTrace.Verbose("ParallelProxyDiscoveryManager: Discovery started."); - // Kick off another discovery task for the next source - var discoveryCriteria = new DiscoveryCriteria(new[] { nextSource }, _actualDiscoveryCriteria.FrequencyOfDiscoveredTestsEvent, _actualDiscoveryCriteria.DiscoveredTestEventTimeout, _actualDiscoveryCriteria.RunSettings); - discoveryCriteria.TestCaseFilter = _actualDiscoveryCriteria.TestCaseFilter; - Task.Run(() => + proxyDiscoveryManager.Initialize(_skipDefaultAdapters); + proxyDiscoveryManager.DiscoverTests(discoveryCriteria, eventHandler); + }) + .ContinueWith(t => { - EqtTrace.Verbose("ParallelProxyDiscoveryManager.DiscoverTestsOnConcurrentManager: Discovery started."); - - proxyDiscoveryManager.DiscoverTests(discoveryCriteria, GetHandlerForGivenManager(proxyDiscoveryManager)); - }) - .ContinueWith(t => - { - // Just in case, the actual discovery couldn't start for an instance. Ensure that - // we call discovery complete since we have already fetched a source. Otherwise - // discovery will not terminate - EqtTrace.Error("ParallelProxyDiscoveryManager.DiscoverTestsOnConcurrentManager: Failed to trigger discovery. Exception: " + t.Exception); - - var handler = GetHandlerForGivenManager(proxyDiscoveryManager); - var testMessagePayload = new TestMessagePayload { MessageLevel = TestMessageLevel.Error, Message = t.Exception.ToString() }; - handler.HandleRawMessage(_dataSerializer.SerializePayload(MessageType.TestMessage, testMessagePayload)); - handler.HandleLogMessage(TestMessageLevel.Error, t.Exception.ToString()); - - // Send discovery complete. Similar logic is also used in ProxyDiscoveryManager.DiscoverTests. - // Differences: - // Total tests must be zero here since parallel discovery events handler adds the count - // Keep `lastChunk` as null since we don't want a message back to the IDE (discovery didn't even begin) - // Set `isAborted` as true since we want this instance of discovery manager to be replaced - var discoveryCompleteEventsArgs = new DiscoveryCompleteEventArgs(-1, true); - handler.HandleDiscoveryComplete(discoveryCompleteEventsArgs, null); - }, - TaskContinuationOptions.OnlyOnFaulted); - } + // Just in case, the actual discovery couldn't start for an instance. Ensure that + // we call discovery complete since we have already fetched a source. Otherwise + // discovery will not terminate + EqtTrace.Error("ParallelProxyDiscoveryManager: Failed to trigger discovery. Exception: " + t.Exception); + + var handler = eventHandler; + var testMessagePayload = new TestMessagePayload { MessageLevel = TestMessageLevel.Error, Message = t.Exception.ToString() }; + handler.HandleRawMessage(_dataSerializer.SerializePayload(MessageType.TestMessage, testMessagePayload)); + handler.HandleLogMessage(TestMessageLevel.Error, t.Exception.ToString()); + + // Send discovery complete. Similar logic is also used in ProxyDiscoveryManager.DiscoverTests. + // Differences: + // Total tests must be zero here since parallel discovery events handler adds the count + // Keep `lastChunk` as null since we don't want a message back to the IDE (discovery didn't even begin) + // Set `isAborted` as true since we want this instance of discovery manager to be replaced + // TODO: the comment above mentions 0 tests but sends -1. Make sense of this. + var discoveryCompleteEventsArgs = new DiscoveryCompleteEventArgs(-1, true); + handler.HandleDiscoveryComplete(discoveryCompleteEventsArgs, null); + }, + TaskContinuationOptions.OnlyOnFaulted); EqtTrace.Verbose("ProxyParallelDiscoveryManager.DiscoverTestsOnConcurrentManager: No sources available for discovery."); } diff --git a/src/Microsoft.TestPlatform.CrossPlatEngine/Client/Parallel/ParallelProxyExecutionManager.cs b/src/Microsoft.TestPlatform.CrossPlatEngine/Client/Parallel/ParallelProxyExecutionManager.cs index 073ba8b21a..682f8ace01 100644 --- a/src/Microsoft.TestPlatform.CrossPlatEngine/Client/Parallel/ParallelProxyExecutionManager.cs +++ b/src/Microsoft.TestPlatform.CrossPlatEngine/Client/Parallel/ParallelProxyExecutionManager.cs @@ -2,7 +2,6 @@ // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; -using System.Collections; using System.Collections.Generic; using System.Collections.ObjectModel; using System.Linq; @@ -27,9 +26,11 @@ namespace Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Client.Parallel; /// /// ParallelProxyExecutionManager that manages parallel execution /// -internal class ParallelProxyExecutionManager : ParallelOperationManager, IParallelProxyExecutionManager +internal class ParallelProxyExecutionManager : IParallelProxyExecutionManager { private readonly IDataSerializer _dataSerializer; + private readonly ParallelOperationManager _parallelOperationManager; + private readonly Dictionary _sourceToTestHostProviderMap; #region TestRunSpecificData @@ -38,17 +39,7 @@ internal class ParallelProxyExecutionManager : ParallelOperationManager _sourceEnumerator; - - private IEnumerator _testCaseListEnumerator; - - private bool _hasSpecificTestsRun; - - private ITestRunEventsHandler _currentRunEventsHandler; + private int _availableWorkloads = -1; private ParallelRunDataAggregator _currentRunDataAggregator; @@ -69,102 +60,73 @@ internal class ParallelProxyExecutionManager : ParallelOperationManager actualProxyManagerCreator, int parallelLevel) - : this(requestData, actualProxyManagerCreator, JsonDataSerializer.Instance, parallelLevel, true) + public ParallelProxyExecutionManager( + IRequestData requestData, + Func actualProxyManagerCreator, + int parallelLevel, + List testHostProviders) + : this(requestData, actualProxyManagerCreator, JsonDataSerializer.Instance, parallelLevel, testHostProviders) { } - public ParallelProxyExecutionManager(IRequestData requestData, Func actualProxyManagerCreator, int parallelLevel, bool sharedHosts) - : this(requestData, actualProxyManagerCreator, JsonDataSerializer.Instance, parallelLevel, sharedHosts) - { - } - - internal ParallelProxyExecutionManager(IRequestData requestData, Func actualProxyManagerCreator, IDataSerializer dataSerializer, int parallelLevel, bool sharedHosts) - : base(actualProxyManagerCreator, parallelLevel, sharedHosts) + internal ParallelProxyExecutionManager( + IRequestData requestData, + Func actualProxyManagerCreator, + IDataSerializer dataSerializer, + int parallelLevel, + List testHostProviders) { _requestData = requestData; _dataSerializer = dataSerializer; + _parallelOperationManager = new(actualProxyManagerCreator, parallelLevel); + _sourceToTestHostProviderMap = testHostProviders + .SelectMany(provider => provider.SourceDetails.Select(s => new KeyValuePair(s.Source, provider))) + .ToDictionary(pair => pair.Key, pair => pair.Value); } - #region IProxyExecutionManager - public void Initialize(bool skipDefaultAdapters) { _skipDefaultAdapters = skipDefaultAdapters; - DoActionOnAllManagers((proxyManager) => proxyManager.Initialize(skipDefaultAdapters), doActionsInParallel: true); - IsInitialized = true; } public int StartTestRun(TestRunCriteria testRunCriteria, ITestRunEventsHandler eventHandler) { - _hasSpecificTestsRun = testRunCriteria.HasSpecificTests; - _actualTestRunCriteria = testRunCriteria; + var workloads = SplitToWorkloads(testRunCriteria, _sourceToTestHostProviderMap); + _availableWorkloads = workloads.Count; - if (_hasSpecificTestsRun) - { - var testCasesBySource = new Dictionary>(); - foreach (var test in testRunCriteria.Tests) - { - if (!testCasesBySource.ContainsKey(test.Source)) - { - testCasesBySource.Add(test.Source, new List()); - } + EqtTrace.Verbose("ParallelProxyExecutionManager: Start execution. Total sources: " + _availableWorkloads); - testCasesBySource[test.Source].Add(test); - } + // Reset the run complete data + _runCompletedClients = 0; - // Do not use "Dictionary.ValueCollection.Enumerator" - it becomes nondeterministic once we go out of scope of this method - // Use "ToArray" to copy ValueColleciton to a simple array and use it's enumerator - // Set the enumerator for parallel yielding of testCases - // Whenever a concurrent executor becomes free, it picks up the next set of testCases using this enumerator - var testCaseLists = testCasesBySource.Values.ToArray(); - _testCaseListEnumerator = testCaseLists.GetEnumerator(); - _availableTestSources = testCaseLists.Length; - } - else - { - // Set the enumerator for parallel yielding of sources - // Whenever a concurrent executor becomes free, it picks up the next source using this enumerator - _sourceEnumerator = testRunCriteria.Sources.GetEnumerator(); - _availableTestSources = testRunCriteria.Sources.Count(); - } + // One data aggregator per parallel run + _currentRunDataAggregator = new ParallelRunDataAggregator(testRunCriteria.TestRunSettings); - EqtTrace.Verbose("ParallelProxyExecutionManager: Start execution. Total sources: " + _availableTestSources); + _parallelOperationManager.StartWork(workloads, eventHandler, GetParallelEventHandler, StartTestRunOnConcurrentManager); - return StartTestRunPrivate(eventHandler); + // Why 1? Because this is supposed to be a processId, and that is just the default that was chosen by someone before me, + // and maybe is checked somewhere, but I don't see it checked in our codebase. + return 1; } public void Abort(ITestRunEventsHandler runEventsHandler) { // Test platform initiated abort. _abortRequested = true; - DoActionOnAllManagers((proxyManager) => proxyManager.Abort(runEventsHandler), doActionsInParallel: true); + _parallelOperationManager.DoActionOnAllManagers((proxyManager) => proxyManager.Abort(runEventsHandler), doActionsInParallel: true); } public void Cancel(ITestRunEventsHandler runEventsHandler) { - DoActionOnAllManagers((proxyManager) => proxyManager.Cancel(runEventsHandler), doActionsInParallel: true); + _parallelOperationManager.DoActionOnAllManagers((proxyManager) => proxyManager.Cancel(runEventsHandler), doActionsInParallel: true); } public void Close() { - DoActionOnAllManagers(proxyManager => proxyManager.Close(), doActionsInParallel: true); + _parallelOperationManager.DoActionOnAllManagers(proxyManager => proxyManager.Close(), doActionsInParallel: true); } - #endregion - - #region IParallelProxyExecutionManager methods - - /// - /// Handles Partial Run Complete event coming from a specific concurrent proxy execution manager - /// Each concurrent proxy execution manager will signal the parallel execution manager when its complete - /// - /// Concurrent Execution manager that completed the run - /// RunCompleteArgs for the concurrent run - /// LastChunk testresults for the concurrent run - /// RunAttachments for the concurrent run - /// ExecutorURIs of the adapters involved in executing the tests - /// True if parallel run is complete + /// public bool HandlePartialRunComplete( IProxyExecutionManager proxyExecutionManager, TestRunCompleteEventArgs testRunCompleteArgs, @@ -173,6 +135,7 @@ public bool HandlePartialRunComplete( ICollection executorUris) { var allRunsCompleted = false; + // TODO: Interlocked.Increment _runCompletedClients, and the condition on the bottom probably does not need to be under lock?? lock (_executionStatusLockObject) { // Each concurrent Executor calls this method @@ -181,69 +144,137 @@ public bool HandlePartialRunComplete( allRunsCompleted = testRunCompleteArgs.IsCanceled || _abortRequested ? _runCompletedClients == _runStartedClients - : _runCompletedClients == _availableTestSources; + : _runCompletedClients == _availableWorkloads; EqtTrace.Verbose("ParallelProxyExecutionManager: HandlePartialRunComplete: Total completed clients = {0}, Run complete = {1}, Run canceled: {2}.", _runCompletedClients, allRunsCompleted, testRunCompleteArgs.IsCanceled); } - // verify that all executors are done with the execution and there are no more sources/testcases to execute if (allRunsCompleted) { - // Reset enumerators - _sourceEnumerator = null; - _testCaseListEnumerator = null; - - _currentRunDataAggregator = null; - _currentRunEventsHandler = null; - - // Dispose concurrent executors - // Do not do the cleanup task in the current thread as we will unnecessarily add to execution time - UpdateParallelLevel(0); - + _parallelOperationManager.StopAllManagers(); return true; } - - EqtTrace.Verbose("ParallelProxyExecutionManager: HandlePartialRunComplete: Replace execution manager. Shared: {0}, Aborted: {1}.", SharedHosts, testRunCompleteArgs.IsAborted); - - RemoveManager(proxyExecutionManager); - proxyExecutionManager = CreateNewConcurrentManager(); - var parallelEventsHandler = GetEventsHandler(proxyExecutionManager); - AddManager(proxyExecutionManager, parallelEventsHandler); - // If cancel is triggered for any one run or abort is requested by test platform, there is no reason to fetch next source - // and queue another test run + // and queue another test run. if (!testRunCompleteArgs.IsCanceled && !_abortRequested) { - StartTestRunOnConcurrentManager(proxyExecutionManager); + // Do NOT return true here, there should be only one place where this method returns true, + // and cancellation or success or any other other combination or timing should result in only one true. + // This is largely achieved by returning true above when "allRunsCompleted" is true. That variable is true + // when we cancel all sources or when we complete all sources. + // + // But we can also start a source, and cancel right after, which will remove all managers, and RunNextWork returns + // false, because we had no more work to do. If we check that result here and return true, then the whole logic is + // broken and we end up calling RunComplete handlers twice and writing logger output to screen twice. So don't do it. + // var hadMoreWork = _parallelOperationManager.RunNextWork(proxyExecutionManager); + // if (!hadMoreWork) + // { + // return true; + // } + var _ = _parallelOperationManager.RunNextWork(proxyExecutionManager); } return false; } - #endregion - - private int StartTestRunPrivate(ITestRunEventsHandler runEventsHandler) + /// + /// Split the incoming work into smaller workloads that we can run on different testhosts. + /// Each workload is associated with a type of provider that can run it. + /// + /// + /// + /// + private List> SplitToWorkloads(TestRunCriteria testRunCriteria, Dictionary sourceToTestHostProviderMap) { - _currentRunEventsHandler = runEventsHandler; + // We split the work to workloads that will run on each testhost, and add all of them + // to a bag of work that needs to be processed. (The workloads are just + // a single source, or all test cases for a given source.) + // + // For every workload we associated a given type of testhost that can run the work. + // This is important when we have shared testhosts. A shared testhost can re-use the same process + // to run more than one workload, as long as the provider is the same. + // + // We then start as many instances of testhost as we are allowed by parallel level, + // and we start sending them work. Once any testhost is done processing a given workload, + // we will get notified with the completed event for the work we are doing. For example for StartTestRun + // we will get TestExecutionCompleted, and we will call HandlePartialTestExecutionComplete. + // (The "partial" here refers to possibly having more work in the work bag. It does not mean that + // there was an error in the testhost and we only did part of execution.). + // + // At that point we know that at least one testhost is not busy doing work anymore. It either + // processed the workload and waits for another one, or it crashed and we should move to + // another source. + // + // In the "partial" step we check if we have more workloads, and if the currently running testhost + // is shared we try to find a workload that is appropriate for it. If we don't find any work that the + // running testhost can do. Or if the testhost already exited (possibly because of crash), we start another one + // and give it the next workload. + List> workloads = new(); + if (testRunCriteria.HasSpecificTests) + { + // We split test cases to their respective sources, and associate them with additional info about on + // which type of provider they can run so we can later select the correct workload for the provider + // if we already have a shared provider running, that can take more sources. + var testCasesPerSource = testRunCriteria.Tests.GroupBy(t => t.Source); + foreach (var group in testCasesPerSource) + { + var testHostProviderInfo = sourceToTestHostProviderMap[group.Key]; + var runsettings = testHostProviderInfo.RunSettings; + // ToList because it is easier to see what is going on when debugging. + var testCases = group.ToList(); + var updatedCriteria = CreateTestRunCriteriaFromTestCasesAndSettings(testCases, testRunCriteria, runsettings); + var workload = new ProviderSpecificWorkload(updatedCriteria, testHostProviderInfo); + workloads.Add(workload); + } - // Reset the run complete data - _runCompletedClients = 0; + } + else + { + // We associate every source with additional info about on which type of provider it can run so we can later + // select the correct workload for the provider if we already have a provider running, and it is shared. + foreach (var source in testRunCriteria.Sources) + { + var testHostProviderInfo = sourceToTestHostProviderMap[source]; + var runsettings = testHostProviderInfo.RunSettings; + var updatedCriteria = CreateTestRunCriteriaFromSourceAndSettings(new[] { source }, testRunCriteria, runsettings); + var workload = new ProviderSpecificWorkload(updatedCriteria, testHostProviderInfo); + workloads.Add(workload); + } + } - // One data aggregator per parallel run - _currentRunDataAggregator = new ParallelRunDataAggregator(_actualTestRunCriteria.TestRunSettings); + return workloads; - foreach (var concurrentManager in GetConcurrentManagerInstances()) + TestRunCriteria CreateTestRunCriteriaFromTestCasesAndSettings(IEnumerable testCases, TestRunCriteria criteria, string runsettingsXml) { - var parallelEventsHandler = GetEventsHandler(concurrentManager); - UpdateHandlerForManager(concurrentManager, parallelEventsHandler); - StartTestRunOnConcurrentManager(concurrentManager); + return new TestRunCriteria( + testCases, + testRunCriteria.FrequencyOfRunStatsChangeEvent, + testRunCriteria.KeepAlive, + runsettingsXml, + testRunCriteria.RunStatsChangeEventTimeout, + testRunCriteria.TestHostLauncher, + testRunCriteria.TestSessionInfo, + testRunCriteria.DebugEnabledForTestSession); } - return 1; + TestRunCriteria CreateTestRunCriteriaFromSourceAndSettings(IEnumerable sources, TestRunCriteria criteria, string runsettingsXml) + { + return new TestRunCriteria( + sources, + testRunCriteria.FrequencyOfRunStatsChangeEvent, + testRunCriteria.KeepAlive, + runsettingsXml, + testRunCriteria.RunStatsChangeEventTimeout, + testRunCriteria.TestHostLauncher, + testRunCriteria.TestCaseFilter, + testRunCriteria.FilterOptions, + testRunCriteria.TestSessionInfo, + testRunCriteria.DebugEnabledForTestSession); + } } - private ParallelRunEventsHandler GetEventsHandler(IProxyExecutionManager concurrentManager) + private ParallelRunEventsHandler GetParallelEventHandler(ITestRunEventsHandler eventHandler, IProxyExecutionManager concurrentManager) { if (concurrentManager is ProxyExecutionManagerWithDataCollection) { @@ -253,7 +284,7 @@ private ParallelRunEventsHandler GetEventsHandler(IProxyExecutionManager concurr return new ParallelDataCollectionEventsHandler( _requestData, concurrentManagerWithDataCollection, - _currentRunEventsHandler, + eventHandler, this, _currentRunDataAggregator, attachmentsProcessingManager, @@ -263,7 +294,7 @@ private ParallelRunEventsHandler GetEventsHandler(IProxyExecutionManager concurr return new ParallelRunEventsHandler( _requestData, concurrentManager, - _currentRunEventsHandler, + eventHandler, this, _currentRunDataAggregator); } @@ -274,26 +305,8 @@ private ParallelRunEventsHandler GetEventsHandler(IProxyExecutionManager concurr /// /// Proxy execution manager instance. /// True, if execution triggered - private void StartTestRunOnConcurrentManager(IProxyExecutionManager proxyExecutionManager) + private void StartTestRunOnConcurrentManager(IProxyExecutionManager proxyExecutionManager, ITestRunEventsHandler eventHandler, TestRunCriteria testRunCriteria) { - TestRunCriteria testRunCriteria = null; - if (!_hasSpecificTestsRun) - { - if (TryFetchNextSource(_sourceEnumerator, out string nextSource)) - { - EqtTrace.Info("ProxyParallelExecutionManager: Triggering test run for next source: {0}", nextSource); - testRunCriteria = new TestRunCriteria(new[] { nextSource }, _actualTestRunCriteria); - } - } - else - { - if (TryFetchNextSource(_testCaseListEnumerator, out List nextSetOfTests)) - { - EqtTrace.Info("ProxyParallelExecutionManager: Triggering test run for next source: {0}", nextSetOfTests?.FirstOrDefault()?.Source); - testRunCriteria = new TestRunCriteria(nextSetOfTests, _actualTestRunCriteria); - } - } - if (testRunCriteria != null) { if (!proxyExecutionManager.IsInitialized) @@ -306,7 +319,7 @@ private void StartTestRunOnConcurrentManager(IProxyExecutionManager proxyExecuti Interlocked.Increment(ref _runStartedClients); EqtTrace.Verbose("ParallelProxyExecutionManager: Execution started. Started clients: " + _runStartedClients); - proxyExecutionManager.StartTestRun(testRunCriteria, GetHandlerForGivenManager(proxyExecutionManager)); + proxyExecutionManager.StartTestRun(testRunCriteria, eventHandler); }) .ContinueWith(t => { @@ -315,7 +328,7 @@ private void StartTestRunOnConcurrentManager(IProxyExecutionManager proxyExecuti // execution will not terminate EqtTrace.Error("ParallelProxyExecutionManager: Failed to trigger execution. Exception: " + t.Exception); - var handler = GetHandlerForGivenManager(proxyExecutionManager); + var handler = eventHandler; var testMessagePayload = new TestMessagePayload { MessageLevel = TestMessageLevel.Error, Message = t.Exception.ToString() }; handler.HandleRawMessage(_dataSerializer.SerializePayload(MessageType.TestMessage, testMessagePayload)); handler.HandleLogMessage(TestMessageLevel.Error, t.Exception.ToString()); @@ -334,3 +347,21 @@ private void StartTestRunOnConcurrentManager(IProxyExecutionManager proxyExecuti EqtTrace.Verbose("ProxyParallelExecutionManager: No sources available for execution."); } } + +/// +/// A workload with a specification of a provider that can run that workload. The workload is a list of sources, +/// or a list of testcases. Provider is a testhost manager, that is capable of running this workload, so +/// we end up running .NET sources on .NET testhost, and .NET Framework sources on .NET Framework testhost. +/// +internal class ProviderSpecificWorkload +{ + public T Work { get; } + + public TestRuntimeProviderInfo Provider { get; protected set; } + + public ProviderSpecificWorkload(T work, TestRuntimeProviderInfo provider) + { + Provider = provider; + Work = work; + } +} diff --git a/src/Microsoft.TestPlatform.CrossPlatEngine/Client/ProxyDiscoveryManager.cs b/src/Microsoft.TestPlatform.CrossPlatEngine/Client/ProxyDiscoveryManager.cs index 9fe0bfae09..b361cc9934 100644 --- a/src/Microsoft.TestPlatform.CrossPlatEngine/Client/ProxyDiscoveryManager.cs +++ b/src/Microsoft.TestPlatform.CrossPlatEngine/Client/ProxyDiscoveryManager.cs @@ -132,6 +132,7 @@ public void DiscoverTests(DiscoveryCriteria discoveryCriteria, ITestDiscoveryEve if (_proxyOperationManager == null) { + // Passing only first because that is how the testhost pool is keyed. _proxyOperationManager = _proxyOperationManagerCreator(discoverySources[0], this); _testHostManager = _proxyOperationManager.TestHostManager; diff --git a/src/Microsoft.TestPlatform.CrossPlatEngine/Client/TestRuntimeProviderInfo.cs b/src/Microsoft.TestPlatform.CrossPlatEngine/Client/TestRuntimeProviderInfo.cs new file mode 100644 index 0000000000..1369e89f55 --- /dev/null +++ b/src/Microsoft.TestPlatform.CrossPlatEngine/Client/TestRuntimeProviderInfo.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. + +using System; +using System.Collections.Generic; + +using Microsoft.VisualStudio.TestPlatform.ObjectModel; + +namespace Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Client; + +public class TestRuntimeProviderInfo +{ + public Type Type { get; } + public bool Shared { get; } + public string RunSettings { get; } + public List SourceDetails { get; } + + public TestRuntimeProviderInfo(Type type, bool shared, string runSettings, List sourceDetails) + { + Type = type; + Shared = shared; + RunSettings = runSettings; + SourceDetails = sourceDetails; + } +} diff --git a/src/Microsoft.TestPlatform.CrossPlatEngine/PublicAPI/PublicAPI.Shipped.txt b/src/Microsoft.TestPlatform.CrossPlatEngine/PublicAPI/PublicAPI.Shipped.txt index 0bbca9f3a6..cec5b51cbd 100644 --- a/src/Microsoft.TestPlatform.CrossPlatEngine/PublicAPI/PublicAPI.Shipped.txt +++ b/src/Microsoft.TestPlatform.CrossPlatEngine/PublicAPI/PublicAPI.Shipped.txt @@ -86,13 +86,9 @@ Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Execution.ExecutionManager.I Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Execution.ExecutionManager.StartTestRun(System.Collections.Generic.Dictionary> adapterSourceMap, string package, string runSettings, Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ClientProtocol.TestExecutionContext testExecutionContext, Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ITestCaseEventsHandler testCaseEventsHandler, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestRunEventsHandler runEventsHandler) -> void Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Execution.ExecutionManager.StartTestRun(System.Collections.Generic.IEnumerable tests, string package, string runSettings, Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ClientProtocol.TestExecutionContext testExecutionContext, Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ITestCaseEventsHandler testCaseEventsHandler, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestRunEventsHandler runEventsHandler) -> void Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.ProxyTestSessionManager -Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.ProxyTestSessionManager.ProxyTestSessionManager(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.StartTestSessionCriteria criteria, int testhostCount, System.Func proxyCreator) -> void Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.TestEngine -Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.TestEngine.GetDiscoveryManager(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData, Microsoft.VisualStudio.TestPlatform.ObjectModel.Host.ITestRuntimeProvider testHostManager, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.DiscoveryCriteria discoveryCriteria) -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IProxyDiscoveryManager -Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.TestEngine.GetExecutionManager(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData, Microsoft.VisualStudio.TestPlatform.ObjectModel.Host.ITestRuntimeProvider testHostManager, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.TestRunCriteria testRunCriteria) -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IProxyExecutionManager Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.TestEngine.GetExtensionManager() -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ITestExtensionManager Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.TestEngine.GetLoggerManager(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData) -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.ITestLoggerManager -Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.TestEngine.GetTestSessionManager(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.StartTestSessionCriteria testSessionCriteria) -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IProxyTestSessionManager Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.TestEngine.TestEngine() -> void Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.TestEngine.TestEngine(Microsoft.VisualStudio.TestPlatform.Common.Hosting.TestRuntimeProviderManager testHostProviderManager, Microsoft.VisualStudio.TestPlatform.PlatformAbstractions.Interfaces.IProcessHelper processHelper) -> void Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.TestExtensionManager @@ -130,3 +126,14 @@ virtual Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.ProxyTestSessionMana virtual Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.ProxyTestSessionManager.StopSession(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData) -> bool Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Client.ProxyDiscoveryManager.Abort(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestDiscoveryEventsHandler2 eventHandler) -> void Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Discovery.DiscoveryManager.Abort(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestDiscoveryEventsHandler2 eventHandler) -> void +Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Client.TestRuntimeProviderInfo +Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Client.TestRuntimeProviderInfo.RunSettings.get -> string +Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Client.TestRuntimeProviderInfo.Shared.get -> bool +Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Client.TestRuntimeProviderInfo.SourceDetails.get -> System.Collections.Generic.List +Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Client.TestRuntimeProviderInfo.TestRuntimeProviderInfo(System.Type type, bool shared, string runSettings, System.Collections.Generic.List sourceDetails) -> void +Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Client.TestRuntimeProviderInfo.Type.get -> System.Type +Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.TestEngine.GetDiscoveryManager(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.DiscoveryCriteria discoveryCriteria, System.Collections.Generic.IDictionary sourceToSourceDetailMap) -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IProxyDiscoveryManager +Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.TestEngine.GetExecutionManager(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.TestRunCriteria testRunCriteria, System.Collections.Generic.IDictionary sourceToSourceDetailMap) -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IProxyExecutionManager +Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.TestEngine.GetTestSessionManager(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.StartTestSessionCriteria testSessionCriteria, System.Collections.Generic.IDictionary sourceToSourceDetailMap) -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine.IProxyTestSessionManager +Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.ProxyTestSessionManager.ProxyTestSessionManager(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.StartTestSessionCriteria criteria, int maxTesthostCount, System.Func proxyCreator, System.Collections.Generic.List runtimeProviders) -> void + diff --git a/src/Microsoft.TestPlatform.CrossPlatEngine/TestEngine.cs b/src/Microsoft.TestPlatform.CrossPlatEngine/TestEngine.cs index 20c85c3f2d..f86cae546d 100644 --- a/src/Microsoft.TestPlatform.CrossPlatEngine/TestEngine.cs +++ b/src/Microsoft.TestPlatform.CrossPlatEngine/TestEngine.cs @@ -5,6 +5,7 @@ using System.Collections.Generic; using System.Globalization; using System.Linq; +using System.Text; using Microsoft.VisualStudio.TestPlatform.Common; using Microsoft.VisualStudio.TestPlatform.Common.Hosting; @@ -61,28 +62,47 @@ internal TestEngine( /// public IProxyDiscoveryManager GetDiscoveryManager( IRequestData requestData, - ITestRuntimeProvider testHostManager, - DiscoveryCriteria discoveryCriteria) + DiscoveryCriteria discoveryCriteria, + IDictionary sourceToSourceDetailMap) { + // Parallel level determines how many processes at most we should start at the same time. We take the number from settings, and if user + // has no preference or the preference is 0 then we use the number of logical processors. Or the number of sources, whatever is lower. + // We don't know for sure if we will start that many processes as some of the sources can run in a single testhost. This is determined by + // Shared on the test runtime provider. At this point we need to know only if the parallel level is more than 1, and so if we will do parallel + // run or not. var parallelLevel = VerifyParallelSettingAndCalculateParallelLevel( discoveryCriteria.Sources.Count(), discoveryCriteria.RunSettings); + var isParallelRun = parallelLevel > 1; + // Collecting IsParallel enabled. - requestData.MetricsCollection.Add( - TelemetryDataConstants.ParallelEnabledDuringDiscovery, - parallelLevel > 1 ? "True" : "False"); - requestData.MetricsCollection.Add( - TelemetryDataConstants.TestSessionId, - discoveryCriteria.TestSessionInfo?.Id.ToString() ?? string.Empty); + requestData.MetricsCollection.Add(TelemetryDataConstants.ParallelEnabledDuringDiscovery, isParallelRun ? "True" : "False"); + requestData.MetricsCollection.Add(TelemetryDataConstants.TestSessionId, discoveryCriteria.TestSessionInfo?.Id.ToString() ?? string.Empty); + + // Get testhost managers by configuration, and either use it for in-process run. or for single source run. + List testHostManagers = GetTestRuntimeProvidersForUniqueConfigurations(discoveryCriteria.RunSettings, sourceToSourceDetailMap, out ITestRuntimeProvider testHostManager); - if (ShouldRunInNoIsolation(discoveryCriteria.RunSettings, parallelLevel > 1, false)) + // This is a big if that figures out if we can run in process. In process run is very restricted, it is non-parallel run + // that has the same target framework as the current process, and it also must not be running in DesignMode (server mode / under IDE) + // and more conditions. In all other cases we run in a separate testhost process. + if (ShouldRunInProcess(discoveryCriteria.RunSettings, isParallelRun, isDataCollectorEnabled: false, testHostManagers)) { + // We are running in process, so whatever the architecture and framework that was figured out is, it must be compatible. If we have more + // changes that we want to do to runsettings in the future, based on SourceDetail then it will depend on those details. But in general + // we will have to check that all source details are the same. Otherwise we for sure cannot run in process. + // E.g. if we get list of sources where one of them has different architecture we for sure cannot run in process, because the current + // process can handle only single runsettings. + if (testHostManagers.Count != 1) + { + throw new InvalidOperationException($"Exactly 1 testhost manager must be provided when running in process, but there {testHostManagers.Count} were provided."); + } + var testHostManagerInfo = testHostManagers[0]; + testHostManager.Initialize(TestSessionMessageLogger.Instance, testHostManagerInfo.RunSettings); + var isTelemetryOptedIn = requestData.IsTelemetryOptedIn; var newRequestData = GetRequestData(isTelemetryOptedIn); - return new InProcessProxyDiscoveryManager( - testHostManager, - new TestHostManagerFactory(newRequestData)); + return new InProcessProxyDiscoveryManager(testHostManager, new TestHostManagerFactory(newRequestData)); } // Create one data aggregator per parallel discovery and share it with all the proxy discovery managers. @@ -91,10 +111,13 @@ public IProxyDiscoveryManager GetDiscoveryManager( // discovery manager to publish its current state. But doing so we are losing the collected state of all the // other managers. var discoveryDataAggregator = new DiscoveryDataAggregator(); - Func proxyDiscoveryManagerCreator = () => + Func proxyDiscoveryManagerCreator = runtimeProviderInfo => { - var hostManager = _testHostProviderManager.GetTestHostManagerByRunConfiguration(discoveryCriteria.RunSettings); - hostManager?.Initialize(TestSessionMessageLogger.Instance, discoveryCriteria.RunSettings); + var sources = runtimeProviderInfo.SourceDetails.Select(r => r.Source).ToList(); + var hostManager = _testHostProviderManager.GetTestHostManagerByRunConfiguration(runtimeProviderInfo.RunSettings, sources); + hostManager?.Initialize(TestSessionMessageLogger.Instance, runtimeProviderInfo.RunSettings); + + ThrowExceptionIfTestHostManagerIsNull(hostManager, runtimeProviderInfo.RunSettings); // This function is used to either take a pre-existing proxy operation manager from // the test pool or to create a new proxy operation manager on the spot. @@ -108,7 +131,7 @@ public IProxyDiscoveryManager GetDiscoveryManager( var proxyOperationManager = TestSessionPool.Instance.TryTakeProxy( discoveryCriteria.TestSessionInfo, source, - discoveryCriteria.RunSettings); + runtimeProviderInfo.RunSettings); if (proxyOperationManager == null) { @@ -144,43 +167,54 @@ public IProxyDiscoveryManager GetDiscoveryManager( discoveryDataAggregator); }; - return (parallelLevel > 1 || !testHostManager.Shared) - ? new ParallelProxyDiscoveryManager( - requestData, - proxyDiscoveryManagerCreator, - discoveryDataAggregator, - parallelLevel, - sharedHosts: testHostManager.Shared) - : proxyDiscoveryManagerCreator(); + return new ParallelProxyDiscoveryManager(requestData, proxyDiscoveryManagerCreator, discoveryDataAggregator, parallelLevel, testHostManagers); } /// public IProxyExecutionManager GetExecutionManager( IRequestData requestData, - ITestRuntimeProvider testHostManager, - TestRunCriteria testRunCriteria) + TestRunCriteria testRunCriteria, + IDictionary sourceToSourceDetailMap) { + // We use mulitple "different" runsettings here. We have runsettings that come with the testRunCriteria, + // and we use that to figure out the common stuff before we try to setup the run. Later we patch the settings + // from the additional details that were passed. Those should not affect the common properties that are used for setup. + // Right now the only two things that change there are the architecture and framework so we can mix them in a single run. var distinctSources = GetDistinctNumberOfSources(testRunCriteria); - var parallelLevel = VerifyParallelSettingAndCalculateParallelLevel( - distinctSources, - testRunCriteria.TestRunSettings); + var parallelLevel = VerifyParallelSettingAndCalculateParallelLevel(distinctSources, testRunCriteria.TestRunSettings); + + // See comments in GetDiscoveryManager for more info about what is happening in this method. + var isParallelRun = parallelLevel > 1; // Collecting IsParallel enabled. - requestData.MetricsCollection.Add( - TelemetryDataConstants.ParallelEnabledDuringExecution, - parallelLevel > 1 ? "True" : "False"); - requestData.MetricsCollection.Add( - TelemetryDataConstants.TestSessionId, - testRunCriteria.TestSessionInfo?.Id.ToString() ?? string.Empty); + requestData.MetricsCollection.Add(TelemetryDataConstants.ParallelEnabledDuringExecution, isParallelRun ? "True" : "False"); + requestData.MetricsCollection.Add(TelemetryDataConstants.TestSessionId, testRunCriteria.TestSessionInfo?.Id.ToString() ?? string.Empty); var isDataCollectorEnabled = XmlRunSettingsUtilities.IsDataCollectionEnabled(testRunCriteria.TestRunSettings); var isInProcDataCollectorEnabled = XmlRunSettingsUtilities.IsInProcDataCollectionEnabled(testRunCriteria.TestRunSettings); - if (ShouldRunInNoIsolation( + var testHostProviders = GetTestRuntimeProvidersForUniqueConfigurations(testRunCriteria.TestRunSettings, sourceToSourceDetailMap, out ITestRuntimeProvider testHostManager); + + if (ShouldRunInProcess( testRunCriteria.TestRunSettings, - parallelLevel > 1, - isDataCollectorEnabled || isInProcDataCollectorEnabled)) + isParallelRun, + isDataCollectorEnabled || isInProcDataCollectorEnabled, + testHostProviders)) { + // Not updating runsettings from source detail on purpose here. We are running in process, so whatever the settings we figured out at the start. They must be compatible + // with the current process, otherwise we would not be able to run inside of the current process. + // + // We know that we only have a single testHostManager here, because we figure that out in ShouldRunInProcess. + ThrowExceptionIfTestHostManagerIsNull(testHostManager, testRunCriteria.TestRunSettings); + + testHostManager.Initialize(TestSessionMessageLogger.Instance, testRunCriteria.TestRunSettings); + + // NOTE: The custom launcher should not be set when we have test session info available. + if (testRunCriteria.TestHostLauncher != null) + { + testHostManager.SetCustomLauncher(testRunCriteria.TestHostLauncher); + } + var isTelemetryOptedIn = requestData.IsTelemetryOptedIn; var newRequestData = GetRequestData(isTelemetryOptedIn); return new InProcessProxyExecutionManager( @@ -188,134 +222,140 @@ public IProxyExecutionManager GetExecutionManager( new TestHostManagerFactory(newRequestData)); } + // This creates a single non-parallel execution manager, based requestData, isDataCollectorEnabled and the + // overall testRunCriteria. The overall testRunCriteria are split to smaller pieces (e.g. each source from the overall + // testRunCriteria) so we can run them in parallel, and those are then passed to those non-parallel execution managers. + // + // The function below grabs most of the parameter via closure from the local context, + // but gets the runtime provider later, because that is specific info to the source (or sources) it will be running. + // This creator does not get those smaller pieces of testRunCriteria, those come later when we call a method on + // the non-parallel execution manager we create here. E.g. StartTests(). + Func proxyExecutionManagerCreator = runtimeProviderInfo => + CreateNonParallelExecutionManager(requestData, testRunCriteria, isDataCollectorEnabled, runtimeProviderInfo); + + var executionManager = new ParallelProxyExecutionManager(requestData, proxyExecutionManagerCreator, parallelLevel, testHostProviders); + + EqtTrace.Verbose($"TestEngine.GetExecutionManager: Chosen execution manager '{executionManager.GetType().AssemblyQualifiedName}' ParallelLevel '{parallelLevel}'."); + + return executionManager; + } + + // This is internal so tests can use it. + internal IProxyExecutionManager CreateNonParallelExecutionManager(IRequestData requestData, TestRunCriteria testRunCriteria, bool isDataCollectorEnabled, TestRuntimeProviderInfo runtimeProviderInfo) + { // SetupChannel ProxyExecutionManager with data collection if data collectors are // specified in run settings. - Func proxyExecutionManagerCreator = () => + // Create a new host manager, to be associated with individual + // ProxyExecutionManager(&POM) + var sources = runtimeProviderInfo.SourceDetails.Select(r => r.Source).ToList(); + var hostManager = _testHostProviderManager.GetTestHostManagerByRunConfiguration(runtimeProviderInfo.RunSettings, sources); + ThrowExceptionIfTestHostManagerIsNull(hostManager, runtimeProviderInfo.RunSettings); + hostManager.Initialize(TestSessionMessageLogger.Instance, runtimeProviderInfo.RunSettings); + + if (testRunCriteria.TestHostLauncher != null) { - // Create a new host manager, to be associated with individual - // ProxyExecutionManager(&POM) - var hostManager = _testHostProviderManager.GetTestHostManagerByRunConfiguration(testRunCriteria.TestRunSettings); - hostManager?.Initialize(TestSessionMessageLogger.Instance, testRunCriteria.TestRunSettings); + hostManager.SetCustomLauncher(testRunCriteria.TestHostLauncher); + } - if (testRunCriteria.TestHostLauncher != null) - { - hostManager.SetCustomLauncher(testRunCriteria.TestHostLauncher); - } + var requestSender = new TestRequestSender(requestData.ProtocolConfig, hostManager); - var requestSender = new TestRequestSender(requestData.ProtocolConfig, hostManager); + if (testRunCriteria.TestSessionInfo != null) + { + // This function is used to either take a pre-existing proxy operation manager from + // the test pool or to create a new proxy operation manager on the spot. + Func + proxyOperationManagerCreator = ( + string source, + ProxyExecutionManager proxyExecutionManager) => + { + var proxyOperationManager = TestSessionPool.Instance.TryTakeProxy( + testRunCriteria.TestSessionInfo, + source, + runtimeProviderInfo.RunSettings); - if (testRunCriteria.TestSessionInfo != null) - { - // This function is used to either take a pre-existing proxy operation manager from - // the test pool or to create a new proxy operation manager on the spot. - Func - proxyOperationManagerCreator = ( - string source, - ProxyExecutionManager proxyExecutionManager) => + if (proxyOperationManager == null) { - var proxyOperationManager = TestSessionPool.Instance.TryTakeProxy( - testRunCriteria.TestSessionInfo, - source, - testRunCriteria.TestRunSettings); - - if (proxyOperationManager == null) - { - // If the proxy creation process based on test session info failed, then - // we'll proceed with the normal creation process as if no test session - // info was passed in in the first place. - // - // WARNING: This should not normally happen and it raises questions - // regarding the test session pool operation and consistency. - EqtTrace.Warning("ProxyExecutionManager creation with test session failed."); - - proxyOperationManager = new ProxyOperationManager( - requestData, - requestSender, - hostManager, - proxyExecutionManager); - } - - return proxyOperationManager; - }; - - // In case we have an active test session, data collection needs were - // already taken care of when first creating the session. As a consequence - // we always return this proxy instead of choosing between the vanilla - // execution proxy and the one with data collection enabled. - return new ProxyExecutionManager( - testRunCriteria.TestSessionInfo, - proxyOperationManagerCreator, - testRunCriteria.DebugEnabledForTestSession); - } + // If the proxy creation process based on test session info failed, then + // we'll proceed with the normal creation process as if no test session + // info was passed in in the first place. + // + // WARNING: This should not normally happen and it raises questions + // regarding the test session pool operation and consistency. + EqtTrace.Warning("ProxyExecutionManager creation with test session failed."); - return isDataCollectorEnabled - ? new ProxyExecutionManagerWithDataCollection( - requestData, - requestSender, - hostManager, - new ProxyDataCollectionManager( - requestData, - testRunCriteria.TestRunSettings, - GetSourcesFromTestRunCriteria(testRunCriteria))) - : new ProxyExecutionManager( - requestData, - requestSender, - hostManager); - }; + proxyOperationManager = new ProxyOperationManager( + requestData, + requestSender, + hostManager, + proxyExecutionManager); + } - // parallelLevel = 1 for desktop should go via else route. - var executionManager = (parallelLevel > 1 || !testHostManager.Shared) - ? new ParallelProxyExecutionManager( - requestData, - proxyExecutionManagerCreator, - parallelLevel, - sharedHosts: testHostManager.Shared) - : proxyExecutionManagerCreator(); + return proxyOperationManager; + }; - EqtTrace.Verbose($"TestEngine.GetExecutionManager: Chosen execution manager '{executionManager.GetType().AssemblyQualifiedName}' ParallelLevel '{parallelLevel}' Shared host '{testHostManager.Shared}'"); + // In case we have an active test session, data collection needs were + // already taken care of when first creating the session. As a consequence + // we always return this proxy instead of choosing between the vanilla + // execution proxy and the one with data collection enabled. + return new ProxyExecutionManager( + testRunCriteria.TestSessionInfo, + proxyOperationManagerCreator, + testRunCriteria.DebugEnabledForTestSession); + } - return executionManager; + return isDataCollectorEnabled + ? new ProxyExecutionManagerWithDataCollection( + requestData, + requestSender, + hostManager, + new ProxyDataCollectionManager( + requestData, + runtimeProviderInfo.RunSettings, + sources)) + : new ProxyExecutionManager( + requestData, + requestSender, + hostManager); } /// public IProxyTestSessionManager GetTestSessionManager( IRequestData requestData, - StartTestSessionCriteria testSessionCriteria) + StartTestSessionCriteria testSessionCriteria, + IDictionary sourceToSourceDetailMap) { var parallelLevel = VerifyParallelSettingAndCalculateParallelLevel( testSessionCriteria.Sources.Count, testSessionCriteria.RunSettings); + bool isParallelRun = parallelLevel > 1; requestData.MetricsCollection.Add( TelemetryDataConstants.ParallelEnabledDuringStartTestSession, - parallelLevel > 1 ? "True" : "False"); + isParallelRun ? "True" : "False"); var isDataCollectorEnabled = XmlRunSettingsUtilities.IsDataCollectionEnabled(testSessionCriteria.RunSettings); var isInProcDataCollectorEnabled = XmlRunSettingsUtilities.IsInProcDataCollectionEnabled(testSessionCriteria.RunSettings); - if (ShouldRunInNoIsolation( + List testRuntimeProviders = GetTestRuntimeProvidersForUniqueConfigurations(testSessionCriteria.RunSettings, sourceToSourceDetailMap, out var _); + + if (ShouldRunInProcess( testSessionCriteria.RunSettings, - parallelLevel > 1, - isDataCollectorEnabled || isInProcDataCollectorEnabled)) + isParallelRun, + isDataCollectorEnabled || isInProcDataCollectorEnabled, + testRuntimeProviders)) { - // This condition is the equivalent of the in-process proxy execution manager case. - // In this case all tests will be run in the vstest.console process, so there's no - // test host to be started. As a consequence there'll be no session info. + // In this case all tests will be run in the current process (vstest.console), so there is no + // testhost to pre-start. No session will be created, and the session info will be null. return null; } - Func proxyCreator = () => + Func proxyCreator = testRuntimeProviderInfo => { - var hostManager = _testHostProviderManager.GetTestHostManagerByRunConfiguration(testSessionCriteria.RunSettings); - if (hostManager == null) - { - throw new TestPlatformException( - string.Format( - CultureInfo.CurrentCulture, - Resources.Resources.NoTestHostProviderFound)); - } + var sources = testRuntimeProviderInfo.SourceDetails.Select(x => x.Source).ToList(); + var hostManager = _testHostProviderManager.GetTestHostManagerByRunConfiguration(testRuntimeProviderInfo.RunSettings, sources); + ThrowExceptionIfTestHostManagerIsNull(hostManager, testRuntimeProviderInfo.RunSettings); - hostManager.Initialize(TestSessionMessageLogger.Instance, testSessionCriteria.RunSettings); + hostManager.Initialize(TestSessionMessageLogger.Instance, testRuntimeProviderInfo.RunSettings); if (testSessionCriteria.TestHostLauncher != null) { hostManager.SetCustomLauncher(testSessionCriteria.TestHostLauncher); @@ -344,7 +384,7 @@ public IProxyTestSessionManager GetTestSessionManager( // hostManager, // new ProxyDataCollectionManager( // requestData, - // testSessionCriteria.RunSettings, + // runsettingsXml, // testSessionCriteria.Sources)) // { // CloseRequestSenderChannelOnProxyClose = true @@ -355,13 +395,40 @@ public IProxyTestSessionManager GetTestSessionManager( hostManager); }; - var testhostManager = _testHostProviderManager.GetTestHostManagerByRunConfiguration(testSessionCriteria.RunSettings); - testhostManager.Initialize(TestSessionMessageLogger.Instance, testSessionCriteria.RunSettings); - var testhostCount = (parallelLevel > 1 || !testhostManager.Shared) - ? testSessionCriteria.Sources.Count - : 1; + // TODO: This condition should be returning the maxParallel level to avoid pre-starting way too many testhosts, because maxParallel level, + // can be smaller than the number of sources to run. + var maxTesthostCount = isParallelRun ? testSessionCriteria.Sources.Count : 1; - return new ProxyTestSessionManager(testSessionCriteria, testhostCount, proxyCreator); + return new ProxyTestSessionManager(testSessionCriteria, maxTesthostCount, proxyCreator, testRuntimeProviders); + } + + private List GetTestRuntimeProvidersForUniqueConfigurations( + string runSettings, + IDictionary sourceToSourceDetailMap, + out ITestRuntimeProvider mostRecentlyCreatedInstance) + { + // Group source details to get unique frameworks and architectures for which we will run, so we can figure + // out which runtime providers would run them, and if the runtime provider is shared or not. + mostRecentlyCreatedInstance = null; + var testRuntimeProviders = new List(); + var uniqueRunConfigurations = sourceToSourceDetailMap.Values.GroupBy(k => $"{k.Framework}|{k.Architecture}"); + foreach (var runConfiguration in uniqueRunConfigurations) + { + // It is okay to take the first (or any) source detail in the group. We are grouping to get the same source detail, so all architectures and frameworks are the same. + var sourceDetail = runConfiguration.First(); + var runsettingsXml = SourceDetailHelper.UpdateRunSettingsFromSourceDetail(runSettings, sourceDetail); + var sources = runConfiguration.Select(c => c.Source).ToList(); + // TODO: We could improve the implementation by adding an overload that won't create a new instance always, because we only need to know the Type. + var testRuntimeProvider = _testHostProviderManager.GetTestHostManagerByRunConfiguration(runsettingsXml, sources); + var testRuntimeProviderInfo = new TestRuntimeProviderInfo(testRuntimeProvider.GetType(), testRuntimeProvider.Shared, runsettingsXml, sourceDetails: runConfiguration.ToList()); + + // Outputting the instance, because the code for in-process run uses it, and we don't want to resolve it another time. + mostRecentlyCreatedInstance = testRuntimeProvider; + testRuntimeProviders.Add(testRuntimeProviderInfo); + } + + ThrowExceptionIfAnyTestHostManagerIsNullOrNoneAreFound(testRuntimeProviders); + return testRuntimeProviders; } /// @@ -408,6 +475,7 @@ private int VerifyParallelSettingAndCalculateParallelLevel( // Check the user parallel setting. int userParallelSetting = RunSettingsUtilities.GetMaxCpuCount(runSettings); parallelLevelToUse = userParallelSetting == 0 + // TODO: use environment helper so we can control this from tests. ? Environment.ProcessorCount : userParallelSetting; var enableParallel = parallelLevelToUse > 1; @@ -445,11 +513,18 @@ private int VerifyParallelSettingAndCalculateParallelLevel( return parallelLevelToUse; } - private bool ShouldRunInNoIsolation( + private bool ShouldRunInProcess( string runsettings, bool isParallelEnabled, - bool isDataCollectorEnabled) + bool isDataCollectorEnabled, + List testHostProviders) { + if (testHostProviders.Count > 1) + { + EqtTrace.Info("TestEngine.ShouldRunInNoIsolation: This run has multiple different architectures or frameworks, running in isolation (in a separate testhost proces)."); + return false; + } + var runConfiguration = XmlRunSettingsUtilities.GetRunConfigurationNode(runsettings); if (runConfiguration.InIsolation) @@ -512,15 +587,32 @@ private IRequestData GetRequestData(bool isTelemetryOptedIn) }; } - /// - /// Gets test sources from test run criteria. - /// - /// - /// The test sources. - private IEnumerable GetSourcesFromTestRunCriteria(TestRunCriteria testRunCriteria) + private static void ThrowExceptionIfTestHostManagerIsNull(ITestRuntimeProvider testHostManager, string settingsXml) + { + if (testHostManager == null) + { + EqtTrace.Error($"{nameof(TestEngine)}.{nameof(ThrowExceptionIfTestHostManagerIsNull)}: No suitable testHostProvider found for runsettings: {settingsXml}"); + throw new TestPlatformException(string.Format(CultureInfo.CurrentCulture, Resources.Resources.NoTestHostProviderFound)); + } + } + + private static void ThrowExceptionIfAnyTestHostManagerIsNullOrNoneAreFound(List testRuntimeProviders) { - return testRunCriteria.HasSpecificTests - ? TestSourcesUtility.GetSources(testRunCriteria.Tests) - : testRunCriteria.Sources; + if (!testRuntimeProviders.Any()) + throw new ArgumentException(null, nameof(testRuntimeProviders)); + + var missingRuntimeProviders = testRuntimeProviders.Where(p => p.Type == null); + if (missingRuntimeProviders.Any()) + { + var stringBuilder = new StringBuilder(); + stringBuilder.AppendLine(string.Format(CultureInfo.CurrentCulture, Resources.Resources.NoTestHostProviderFound)); + foreach (var missingRuntimeProvider in missingRuntimeProviders) + { + EqtTrace.Error($"{nameof(TestEngine)}.{nameof(ThrowExceptionIfAnyTestHostManagerIsNullOrNoneAreFound)}: No suitable testHostProvider found for sources {missingRuntimeProvider.SourceDetails.Select(s => s.Source)} and runsettings: {missingRuntimeProvider.RunSettings}"); + missingRuntimeProvider.SourceDetails.ForEach(detail => stringBuilder.AppendLine(detail.Source)); + } + + throw new TestPlatformException(stringBuilder.ToString()); + } } } diff --git a/src/Microsoft.TestPlatform.CrossPlatEngine/TestSession/ProxyTestSessionManager.cs b/src/Microsoft.TestPlatform.CrossPlatEngine/TestSession/ProxyTestSessionManager.cs index 2ed0607d9d..4bc8f33b17 100644 --- a/src/Microsoft.TestPlatform.CrossPlatEngine/TestSession/ProxyTestSessionManager.cs +++ b/src/Microsoft.TestPlatform.CrossPlatEngine/TestSession/ProxyTestSessionManager.cs @@ -39,12 +39,14 @@ private enum TestSessionState private readonly object _proxyOperationLockObject = new(); private volatile bool _proxySetupFailed; private readonly StartTestSessionCriteria _testSessionCriteria; - private readonly int _testhostCount; + private readonly int _maxTesthostCount; private TestSessionInfo _testSessionInfo; - private readonly Func _proxyCreator; + private readonly Func _proxyCreator; + private readonly List _runtimeProviders; private readonly IList _proxyContainerList; private readonly IDictionary _proxyMap; private readonly Stopwatch _testSessionStopwatch; + private readonly Dictionary _sourceToRuntimeProviderInfoMap; private IDictionary _testSessionEnvironmentVariables = new Dictionary(); private IDictionary TestSessionEnvironmentVariables @@ -67,20 +69,27 @@ private IDictionary TestSessionEnvironmentVariables /// /// /// The test session criteria. - /// The testhost count. + /// The testhost count. /// The proxy creator. public ProxyTestSessionManager( StartTestSessionCriteria criteria, - int testhostCount, - Func proxyCreator) + int maxTesthostCount, + Func proxyCreator, + List runtimeProviders) { _testSessionCriteria = criteria; - _testhostCount = testhostCount; + _maxTesthostCount = maxTesthostCount; _proxyCreator = proxyCreator; - + _runtimeProviders = runtimeProviders; _proxyContainerList = new List(); _proxyMap = new Dictionary(); _testSessionStopwatch = new Stopwatch(); + + // Get dictionary from source -> runtimeProviderInfo, that has the type of runtime provider to create for this + // source, and updated runsettings. + _sourceToRuntimeProviderInfoMap = _runtimeProviders + .SelectMany(runtimeProviderInfo => runtimeProviderInfo.SourceDetails.Select(detail => new KeyValuePair(detail.Source, runtimeProviderInfo))) + .ToDictionary(pair => pair.Key, pair => pair.Value); } // NOTE: The method is virtual for mocking purposes. @@ -99,33 +108,28 @@ public virtual bool StartSession(ITestSessionEventsHandler eventsHandler, IReque var stopwatch = new Stopwatch(); stopwatch.Start(); + // TODO: Right now we either pre-create 1 testhost if parallel is disabled, or we pre-create as many + // testhosts as we have sources. In the future we will have a maxParallelLevel set to the actual parallel level + // (which might be lower than the number of sources) and we should do some kind of thinking here to figure out how to split the sources. + // To follow the way parallel execution and discovery is (supposed to be) working, there should be as many testhosts + // as the maxParallel level pre-started, and marked with the Shared, and configuration that they can run. + // Create all the proxies in parallel, one task per proxy. - var taskList = new Task[_testhostCount]; + var taskList = new Task[_maxTesthostCount]; for (int i = 0; i < taskList.Length; ++i) { - // The testhost count is equal to 1 because one of the following conditions - // holds true: - // 1. we're dealing with a shared testhost (e.g.: .NET Framework testhost) - // that must process multiple sources within the same testhost process; - // 2. we're dealing with a single testhost (shared or not, it doesn't matter) - // that must process a single source; - // Either way, no further processing of the original test source list is needed - // in either of those cases. - // - // Consequentely, if the testhost count is greater than one it means that the - // testhost is not shared (e.g.: .NET Core testhost), in which case each test - // source must be processed by a dedicated testhost, which is the reason we - // create a list with a single element, i.e. the current source to be processed. - var sources = (_testhostCount == 1) - ? _testSessionCriteria.Sources - : new List() { _testSessionCriteria.Sources[i] }; + // This is similar to what we do in ProxyExecutionManager, and ProxyDiscoveryManager, we split + // up the payload into multiple smaller pieces. Here it is one source per proxy. + var source = _testSessionCriteria.Sources[i]; + var sources = new List() { source }; + var runtimeProviderInfo = _sourceToRuntimeProviderInfoMap[source]; taskList[i] = Task.Factory.StartNew(() => { - if (!SetupRawProxy( - sources, - _testSessionCriteria.RunSettings)) + var proxySetupSucceeded = SetupRawProxy(sources, runtimeProviderInfo); + if (!proxySetupSucceeded) { + // Set this only in the failed case, so we can check if any proxy failed to setup. _proxySetupFailed = true; } }); @@ -248,6 +252,7 @@ public virtual ProxyOperationManager DequeueProxy(string source, string runSetti // its own proxy instead. if (!CheckRunSettingsAreCompatible(runSettings)) { + EqtTrace.Verbose($"ProxyTestSessionManager.DequeueProxy: A proxy exists, but the runsettings do not match. Skipping it. Incoming settings: {runSettings}, Settings on proxy: {_testSessionCriteria.RunSettings}"); throw new InvalidOperationException( string.Format( CultureInfo.CurrentUICulture, @@ -328,12 +333,12 @@ private int EnqueueNewProxy( private bool SetupRawProxy( IList sources, - string runSettings) + TestRuntimeProviderInfo runtimeProviderInfo) { try { // Create and cache the proxy. - var operationManagerProxy = _proxyCreator(); + var operationManagerProxy = _proxyCreator(runtimeProviderInfo); if (operationManagerProxy == null) { return false; @@ -343,7 +348,7 @@ private bool SetupRawProxy( operationManagerProxy.Initialize(skipDefaultAdapters: false); // Start the test host associated to the proxy. - if (!operationManagerProxy.SetupChannel(sources, runSettings)) + if (!operationManagerProxy.SetupChannel(sources, runtimeProviderInfo.RunSettings)) { return false; } diff --git a/src/Microsoft.TestPlatform.CrossPlatEngine/Utilities/SourceDetailHelper.cs b/src/Microsoft.TestPlatform.CrossPlatEngine/Utilities/SourceDetailHelper.cs new file mode 100644 index 0000000000..b08c94d482 --- /dev/null +++ b/src/Microsoft.TestPlatform.CrossPlatEngine/Utilities/SourceDetailHelper.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. + +using System.IO; +using System.Xml; + +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Utilities; +using Microsoft.VisualStudio.TestPlatform.Utilities; + +namespace Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Utilities; + +internal static class SourceDetailHelper +{ + internal static string UpdateRunSettingsFromSourceDetail(string runSettings, SourceDetail sourceDetail) + { + using var stream = new StringReader(runSettings); + using var reader = XmlReader.Create(stream, XmlRunSettingsUtilities.ReaderSettings); + var document = new XmlDocument(); + document.Load(reader); + var navigator = document.CreateNavigator(); + + InferRunSettingsHelper.UpdateTargetFramework(document, sourceDetail.Framework.ToString(), overwrite: true); + InferRunSettingsHelper.UpdateTargetPlatform(document, sourceDetail.Architecture.ToString(), overwrite: true); + + var updatedRunSettings = navigator.OuterXml; + return updatedRunSettings; + } +} diff --git a/src/Microsoft.TestPlatform.Execution.Shared/DebuggerBreakpoint.cs b/src/Microsoft.TestPlatform.Execution.Shared/DebuggerBreakpoint.cs index aff5059c8f..e7db29756d 100644 --- a/src/Microsoft.TestPlatform.Execution.Shared/DebuggerBreakpoint.cs +++ b/src/Microsoft.TestPlatform.Execution.Shared/DebuggerBreakpoint.cs @@ -42,11 +42,11 @@ internal static void AttachVisualStudioDebugger(string environmentVariable) if (vsPid == null) { - ConsoleOutput.Instance.WriteLine("Attaching Visual Studio, either a parent or the one that was started first... To specify a VS instance to use, use the PID in the option, instead of 1. No breakpoints are automatically set.", OutputLevel.Information); + ConsoleOutput.Instance.WriteLine("Attaching Visual Studio, either a parent or the one that was started first... To specify a VS instance to use, use the PID in the option, instead of 1.", OutputLevel.Information); } else { - ConsoleOutput.Instance.WriteLine($"Attaching Visual Studio with PID {vsPid} to the process '{Process.GetCurrentProcess().ProcessName}({Process.GetCurrentProcess().Id})'... No breakpoints are automatically set.", OutputLevel.Information); + ConsoleOutput.Instance.WriteLine($"Attaching Visual Studio with PID {vsPid} to the process '{Process.GetCurrentProcess().ProcessName}({Process.GetCurrentProcess().Id})'...", OutputLevel.Information); } AttachVs(Process.GetCurrentProcess(), vsPid); diff --git a/src/Microsoft.TestPlatform.ObjectModel/Client/Interfaces/ITestHostLauncher2.cs b/src/Microsoft.TestPlatform.ObjectModel/Client/Interfaces/ITestHostLauncher2.cs index 5f16818c44..91db204e1e 100644 --- a/src/Microsoft.TestPlatform.ObjectModel/Client/Interfaces/ITestHostLauncher2.cs +++ b/src/Microsoft.TestPlatform.ObjectModel/Client/Interfaces/ITestHostLauncher2.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. +using System; using System.Threading; #nullable disable @@ -27,3 +28,18 @@ public interface ITestHostLauncher2 : ITestHostLauncher /// if the debugger was successfully attached to the requested process, otherwise. bool AttachDebuggerToProcess(int pid, CancellationToken cancellationToken); } + +[Obsolete("Do not use this api, it is not ready yet.")] +public interface ITestHostLauncher3 : ITestHostLauncher2 +{ + bool AttachDebuggerToProcess(AttachDebuggerInfo attachDebuggerInfo); +} + +[Obsolete("Do not use this api, it is not ready yet.")] +public class AttachDebuggerInfo +{ + public Version Version { get; set; } + public int ProcessId { get; set; } + public Framework TargetFramework { get; set; } + public CancellationToken CancellationToken { get; set; } +} diff --git a/src/Microsoft.TestPlatform.ObjectModel/Client/Interfaces/ITestPlatform.cs b/src/Microsoft.TestPlatform.ObjectModel/Client/Interfaces/ITestPlatform.cs index c91c1ff6f2..a58e5cbad5 100644 --- a/src/Microsoft.TestPlatform.ObjectModel/Client/Interfaces/ITestPlatform.cs +++ b/src/Microsoft.TestPlatform.ObjectModel/Client/Interfaces/ITestPlatform.cs @@ -45,7 +45,8 @@ void UpdateExtensions( IDiscoveryRequest CreateDiscoveryRequest( IRequestData requestData, DiscoveryCriteria discoveryCriteria, - TestPlatformOptions options); + TestPlatformOptions options, + Dictionary sourceToSourceDetailMap); /// /// Creates a test run request. @@ -59,7 +60,8 @@ IDiscoveryRequest CreateDiscoveryRequest( ITestRunRequest CreateTestRunRequest( IRequestData requestData, TestRunCriteria testRunCriteria, - TestPlatformOptions options); + TestPlatformOptions options, + Dictionary sourceToSourceDetailMap); /// /// Starts a test session. @@ -75,5 +77,6 @@ ITestRunRequest CreateTestRunRequest( bool StartTestSession( IRequestData requestData, StartTestSessionCriteria criteria, - ITestSessionEventsHandler eventsHandler); + ITestSessionEventsHandler eventsHandler, + Dictionary sourceToSourceDetailMap); } diff --git a/src/Microsoft.TestPlatform.ObjectModel/Friends.cs b/src/Microsoft.TestPlatform.ObjectModel/Friends.cs index ed2a57ef52..4e58caefe6 100644 --- a/src/Microsoft.TestPlatform.ObjectModel/Friends.cs +++ b/src/Microsoft.TestPlatform.ObjectModel/Friends.cs @@ -13,6 +13,7 @@ [assembly: InternalsVisibleTo("vstest.console.arm64, PublicKey=002400000480000094000000060200000024000052534131000400000100010007d1fa57c4aed9f0a32e84aa0faefd0de9e8fd6aec8f87fb03766c834c99921eb23be79ad9d5dcc1dd9ad236132102900b723cf980957fc4e177108fc607774f29e8320e92ea05ece4e821c0a5efe8f1645c4c0c93c1ab99285d622caa652c1dfad63d745d6f2de5f17e5eaf0fc4963d261c8a12436518206dc093344d5ad293")] [assembly: InternalsVisibleTo("Microsoft.VisualStudio.TestPlatform.Client, PublicKey=002400000480000094000000060200000024000052534131000400000100010007d1fa57c4aed9f0a32e84aa0faefd0de9e8fd6aec8f87fb03766c834c99921eb23be79ad9d5dcc1dd9ad236132102900b723cf980957fc4e177108fc607774f29e8320e92ea05ece4e821c0a5efe8f1645c4c0c93c1ab99285d622caa652c1dfad63d745d6f2de5f17e5eaf0fc4963d261c8a12436518206dc093344d5ad293")] [assembly: InternalsVisibleTo("Microsoft.TestPlatform.ObjectModel.UnitTests, PublicKey=002400000480000094000000060200000024000052534131000400000100010007d1fa57c4aed9f0a32e84aa0faefd0de9e8fd6aec8f87fb03766c834c99921eb23be79ad9d5dcc1dd9ad236132102900b723cf980957fc4e177108fc607774f29e8320e92ea05ece4e821c0a5efe8f1645c4c0c93c1ab99285d622caa652c1dfad63d745d6f2de5f17e5eaf0fc4963d261c8a12436518206dc093344d5ad293")] +[assembly: InternalsVisibleTo("Microsoft.TestPlatform.CrossPlatEngine.UnitTests, PublicKey=002400000480000094000000060200000024000052534131000400000100010007d1fa57c4aed9f0a32e84aa0faefd0de9e8fd6aec8f87fb03766c834c99921eb23be79ad9d5dcc1dd9ad236132102900b723cf980957fc4e177108fc607774f29e8320e92ea05ece4e821c0a5efe8f1645c4c0c93c1ab99285d622caa652c1dfad63d745d6f2de5f17e5eaf0fc4963d261c8a12436518206dc093344d5ad293")] [assembly: InternalsVisibleTo("datacollector.UnitTests, PublicKey=002400000480000094000000060200000024000052534131000400000100010007d1fa57c4aed9f0a32e84aa0faefd0de9e8fd6aec8f87fb03766c834c99921eb23be79ad9d5dcc1dd9ad236132102900b723cf980957fc4e177108fc607774f29e8320e92ea05ece4e821c0a5efe8f1645c4c0c93c1ab99285d622caa652c1dfad63d745d6f2de5f17e5eaf0fc4963d261c8a12436518206dc093344d5ad293")] [assembly: InternalsVisibleTo("Microsoft.TestPlatform.Extensions.EventLogCollector.UnitTests, PublicKey=002400000480000094000000060200000024000052534131000400000100010007d1fa57c4aed9f0a32e84aa0faefd0de9e8fd6aec8f87fb03766c834c99921eb23be79ad9d5dcc1dd9ad236132102900b723cf980957fc4e177108fc607774f29e8320e92ea05ece4e821c0a5efe8f1645c4c0c93c1ab99285d622caa652c1dfad63d745d6f2de5f17e5eaf0fc4963d261c8a12436518206dc093344d5ad293")] [assembly: InternalsVisibleTo("Microsoft.TestPlatform.ObjectModel.ManagedNameUtilities.UnitTests, PublicKey=002400000480000094000000060200000024000052534131000400000100010007d1fa57c4aed9f0a32e84aa0faefd0de9e8fd6aec8f87fb03766c834c99921eb23be79ad9d5dcc1dd9ad236132102900b723cf980957fc4e177108fc607774f29e8320e92ea05ece4e821c0a5efe8f1645c4c0c93c1ab99285d622caa652c1dfad63d745d6f2de5f17e5eaf0fc4963d261c8a12436518206dc093344d5ad293")] diff --git a/src/Microsoft.TestPlatform.ObjectModel/PublicAPI/PublicAPI.Shipped.txt b/src/Microsoft.TestPlatform.ObjectModel/PublicAPI/PublicAPI.Shipped.txt index b3728799c9..691575a4da 100644 --- a/src/Microsoft.TestPlatform.ObjectModel/PublicAPI/PublicAPI.Shipped.txt +++ b/src/Microsoft.TestPlatform.ObjectModel/PublicAPI/PublicAPI.Shipped.txt @@ -228,8 +228,8 @@ Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestMessageEventHandler. Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestMessageEventHandler.HandleRawMessage(string rawMessage) -> void Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestPlatform Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestPlatform.ClearExtensions() -> void -Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestPlatform.CreateDiscoveryRequest(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.DiscoveryCriteria discoveryCriteria, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.TestPlatformOptions options) -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IDiscoveryRequest -Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestPlatform.CreateTestRunRequest(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.TestRunCriteria testRunCriteria, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.TestPlatformOptions options) -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestRunRequest +Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestPlatform.CreateDiscoveryRequest(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.DiscoveryCriteria discoveryCriteria, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.TestPlatformOptions options, System.Collections.Generic.Dictionary sourceToSourceDetailMap) -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IDiscoveryRequest +Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestPlatform.CreateTestRunRequest(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.TestRunCriteria testRunCriteria, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.TestPlatformOptions options, System.Collections.Generic.Dictionary sourceToSourceDetailMap) -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestRunRequest Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestPlatform.UpdateExtensions(System.Collections.Generic.IEnumerable pathToAdditionalExtensions, bool skipExtensionFilters) -> void Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestPlatformCapabilities Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestPlatformCapabilities.TestPlatformType.get -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.TestPlatformType @@ -893,7 +893,7 @@ Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.DiscoveryCriteria.TestSes Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.DiscoveryCriteria.TestSessionInfo.set -> void Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.DiscoveryRequestPayload.TestSessionInfo.get -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.TestSessionInfo Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.DiscoveryRequestPayload.TestSessionInfo.set -> void -Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestPlatform.StartTestSession(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.StartTestSessionCriteria criteria, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestSessionEventsHandler eventsHandler) -> bool +Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestPlatform.StartTestSession(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.IRequestData requestData, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.StartTestSessionCriteria criteria, Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestSessionEventsHandler eventsHandler, System.Collections.Generic.Dictionary sourceToSourceDetailMap) -> bool Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestSessionEventsHandler.HandleStartTestSessionComplete(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.StartTestSessionCompleteEventArgs eventArgs) -> void Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.ITestSessionEventsHandler.HandleStopTestSessionComplete(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.StopTestSessionCompleteEventArgs eventArgs) -> void Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.Payloads.StartTestSessionAckPayload.EventArgs.get -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.StartTestSessionCompleteEventArgs @@ -928,3 +928,21 @@ Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.TestRunCompleteEventArgs. Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.TestRunCompleteEventArgs.DiscoveredExtensions.set -> void Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.DiscoveryCompleteEventArgs.IsAborted.set -> void Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.DiscoveryCompleteEventArgs.TotalCount.set -> void +Microsoft.VisualStudio.TestPlatform.ObjectModel.SourceDetail +Microsoft.VisualStudio.TestPlatform.ObjectModel.SourceDetail.Architecture.get -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Architecture +Microsoft.VisualStudio.TestPlatform.ObjectModel.SourceDetail.Framework.get -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Framework +Microsoft.VisualStudio.TestPlatform.ObjectModel.SourceDetail.Source.get -> string +Microsoft.VisualStudio.TestPlatform.ObjectModel.SourceDetail.SourceDetail() -> void +Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.Interfaces.AttachDebuggerInfo +Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.Interfaces.AttachDebuggerInfo.AttachDebuggerInfo() -> void +Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.Interfaces.AttachDebuggerInfo.CancellationToken.get -> System.Threading.CancellationToken +Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.Interfaces.AttachDebuggerInfo.CancellationToken.set -> void +Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.Interfaces.AttachDebuggerInfo.ProcessId.get -> int +Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.Interfaces.AttachDebuggerInfo.ProcessId.set -> void +Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.Interfaces.AttachDebuggerInfo.TargetFramework.get -> Microsoft.VisualStudio.TestPlatform.ObjectModel.Framework +Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.Interfaces.AttachDebuggerInfo.TargetFramework.set -> void +Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.Interfaces.AttachDebuggerInfo.Version.get -> System.Version +Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.Interfaces.AttachDebuggerInfo.Version.set -> void +Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.Interfaces.ITestHostLauncher3 +Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.Interfaces.ITestHostLauncher3.AttachDebuggerToProcess(Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.Interfaces.AttachDebuggerInfo attachDebuggerInfo) -> bool + diff --git a/src/Microsoft.TestPlatform.ObjectModel/PublicAPI/net/PublicAPI.Unshipped.txt b/src/Microsoft.TestPlatform.ObjectModel/PublicAPI/net/PublicAPI.Unshipped.txt index e02abfc9b0..5f282702bb 100644 --- a/src/Microsoft.TestPlatform.ObjectModel/PublicAPI/net/PublicAPI.Unshipped.txt +++ b/src/Microsoft.TestPlatform.ObjectModel/PublicAPI/net/PublicAPI.Unshipped.txt @@ -1 +1 @@ - + \ No newline at end of file diff --git a/src/Microsoft.TestPlatform.ObjectModel/Resources/CommonResources.Designer.cs b/src/Microsoft.TestPlatform.ObjectModel/Resources/CommonResources.Designer.cs index 3a37b375e5..696900893f 100644 --- a/src/Microsoft.TestPlatform.ObjectModel/Resources/CommonResources.Designer.cs +++ b/src/Microsoft.TestPlatform.ObjectModel/Resources/CommonResources.Designer.cs @@ -20,7 +20,7 @@ namespace Microsoft.VisualStudio.TestPlatform.ObjectModel.Resources { // class via a tool like ResGen or Visual Studio. // To add or remove a member, edit your .ResX file then rerun ResGen // with the /str option, or rebuild your VS project. - [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "16.0.0.0")] + [global::System.CodeDom.Compiler.GeneratedCodeAttribute("System.Resources.Tools.StronglyTypedResourceBuilder", "17.0.0.0")] [global::System.Diagnostics.DebuggerNonUserCodeAttribute()] [global::System.Runtime.CompilerServices.CompilerGeneratedAttribute()] public class CommonResources { @@ -71,7 +71,7 @@ public static string CannotBeNullOrEmpty { } /// - /// Looks up a localized string similar to Test run detected DLL(s) which were built for different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings.. + /// Looks up a localized string similar to Test run detected DLL(s) which would use different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings.. /// public static string DisplayChosenSettings { get { @@ -98,7 +98,7 @@ public static string NoMatchingSourcesFound { } /// - /// Looks up a localized string similar to {0} is built for Framework {1} and Platform {2}.. + /// Looks up a localized string similar to {0} would use Framework {1} and Platform {2}.. /// public static string SourceIncompatible { get { diff --git a/src/Microsoft.TestPlatform.ObjectModel/Resources/CommonResources.resx b/src/Microsoft.TestPlatform.ObjectModel/Resources/CommonResources.resx index be18466964..329f3050ef 100644 --- a/src/Microsoft.TestPlatform.ObjectModel/Resources/CommonResources.resx +++ b/src/Microsoft.TestPlatform.ObjectModel/Resources/CommonResources.resx @@ -121,7 +121,7 @@ The parameter cannot be null or empty. - Test run detected DLL(s) which were built for different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. + Test run detected DLL(s) which would use different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. Settings file provided does not conform to required format. @@ -130,6 +130,6 @@ None of the provided test containers match the Platform Architecture and .Net Framework settings for the test run. Platform: {0} .Net Framework: {1}. Go to http://go.microsoft.com/fwlink/?LinkID=330428 for more details on managing these settings. - {0} is built for Framework {1} and Platform {2}. + {0} would use Framework {1} and Platform {2}. - \ No newline at end of file + diff --git a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.cs.xlf b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.cs.xlf index 5e6f6bda56..a97e4c557b 100644 --- a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.cs.xlf +++ b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.cs.xlf @@ -8,9 +8,9 @@ Parametr nemůže být null nebo prázdný. - Test run detected DLL(s) which were built for different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. + Test run detected DLL(s) which would use different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. - Otestujte zjištěné knihovny DLL, které se vytvořily pro různé verze architektury a platformy. Následující knihovny DLL neodpovídají aktuálnímu nastavení, tedy architektuře {0} a platformě {1}.{2}Další podrobnosti o správě těchto nastavení najdete v {3}. + Otestujte zjištěné knihovny DLL, které se vytvořily pro různé verze architektury a platformy. Následující knihovny DLL neodpovídají aktuálnímu nastavení, tedy architektuře {0} a platformě {1}.{2}Další podrobnosti o správě těchto nastavení najdete v {3}. None of the provided test containers match the Platform Architecture and .Net Framework settings for the test run. Platform: {0} .Net Framework: {1}. Go to http://go.microsoft.com/fwlink/?LinkID=330428 for more details on managing these settings. @@ -23,8 +23,8 @@ - {0} is built for Framework {1} and Platform {2}. - Produkt {0} je sestaven pro rozhraní {1} a platformu {2}. + {0} would use Framework {1} and Platform {2}. + Produkt {0} je sestaven pro rozhraní {1} a platformu {2}. diff --git a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.de.xlf b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.de.xlf index 0904b4e6a4..3d2e7b6df9 100644 --- a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.de.xlf +++ b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.de.xlf @@ -8,9 +8,9 @@ Der Parameter darf nicht NULL oder leer sein. - Test run detected DLL(s) which were built for different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. + Test run detected DLL(s) which would use different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. - Beim Testlauf wurden DLLs erkannt, die für verschiedene Framework- und Plattformversionen erstellt wurden. Die folgenden DLLs entsprechen nicht den aktuellen Einstellungen: Framework {0} und Plattform {1}.{2}Wechseln Sie zu {3}, um weitere Informationen zum Verwalten dieser Einstellungen zu erhalten. + Beim Testlauf wurden DLLs erkannt, die für verschiedene Framework- und Plattformversionen erstellt wurden. Die folgenden DLLs entsprechen nicht den aktuellen Einstellungen: Framework {0} und Plattform {1}.{2}Wechseln Sie zu {3}, um weitere Informationen zum Verwalten dieser Einstellungen zu erhalten. None of the provided test containers match the Platform Architecture and .Net Framework settings for the test run. Platform: {0} .Net Framework: {1}. Go to http://go.microsoft.com/fwlink/?LinkID=330428 for more details on managing these settings. @@ -23,8 +23,8 @@ - {0} is built for Framework {1} and Platform {2}. - "{0}" wurde für das Framework {1} und die Plattform {2} erstellt. + {0} would use Framework {1} and Platform {2}. + "{0}" wurde für das Framework {1} und die Plattform {2} erstellt. diff --git a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.es.xlf b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.es.xlf index ea9dd037da..3375c797af 100644 --- a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.es.xlf +++ b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.es.xlf @@ -8,9 +8,9 @@ El parámetro no puede ser nulo ni estar vacío. - Test run detected DLL(s) which were built for different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. + Test run detected DLL(s) which would use different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. - La serie de pruebas detectó archivos DLL que se compilaron para diferentes versiones de marco de trabajo y plataforma. Los siguientes archivos DLL no coinciden con la configuración actual, que es el marco de trabajo {0} y la plataforma {1}. {2}Vaya a {3} para obtener más información sobre cómo administrar esta configuración. + La serie de pruebas detectó archivos DLL que se compilaron para diferentes versiones de marco de trabajo y plataforma. Los siguientes archivos DLL no coinciden con la configuración actual, que es el marco de trabajo {0} y la plataforma {1}. {2}Vaya a {3} para obtener más información sobre cómo administrar esta configuración. None of the provided test containers match the Platform Architecture and .Net Framework settings for the test run. Platform: {0} .Net Framework: {1}. Go to http://go.microsoft.com/fwlink/?LinkID=330428 for more details on managing these settings. @@ -23,8 +23,8 @@ - {0} is built for Framework {1} and Platform {2}. - {0} se ha compilado para el marco {1} y la plataforma {2}. + {0} would use Framework {1} and Platform {2}. + {0} se ha compilado para el marco {1} y la plataforma {2}. diff --git a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.fr.xlf b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.fr.xlf index 0c9b75f180..a433ea6503 100644 --- a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.fr.xlf +++ b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.fr.xlf @@ -8,9 +8,9 @@ Le paramètre ne peut pas avoir une valeur null ou être vide. - Test run detected DLL(s) which were built for different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. + Test run detected DLL(s) which would use different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. - La série de tests a détecté une ou plusieurs DLL qui ont été générées pour d'autres versions de framework et de plateforme. La ou les DLL suivantes ne correspondent pas aux paramètres actuels : framework {0} et plateforme {1}.{2}Accédez à {3} pour plus d'informations sur la gestion de ces paramètres. + La série de tests a détecté une ou plusieurs DLL qui ont été générées pour d'autres versions de framework et de plateforme. La ou les DLL suivantes ne correspondent pas aux paramètres actuels : framework {0} et plateforme {1}.{2}Accédez à {3} pour plus d'informations sur la gestion de ces paramètres. None of the provided test containers match the Platform Architecture and .Net Framework settings for the test run. Platform: {0} .Net Framework: {1}. Go to http://go.microsoft.com/fwlink/?LinkID=330428 for more details on managing these settings. @@ -23,8 +23,8 @@ - {0} is built for Framework {1} and Platform {2}. - {0} est généré pour le framework {1} et la plateforme {2}. + {0} would use Framework {1} and Platform {2}. + {0} est généré pour le framework {1} et la plateforme {2}. diff --git a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.it.xlf b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.it.xlf index cde112bdb8..4e25bc663c 100644 --- a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.it.xlf +++ b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.it.xlf @@ -8,9 +8,9 @@ Il parametro non può essere vuoto o Null. - Test run detected DLL(s) which were built for different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. + Test run detected DLL(s) which would use different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. - L'esecuzione dei test ha rilevato DLL compilate per versioni diverse del framework e della piattaforma. Le DLL seguenti non corrispondono alle impostazioni correnti, ovvero al framework {0} e alla piattaforma {1}.{2}Per altri dettagli sulla gestione di queste impostazioni, vedere {3}. + L'esecuzione dei test ha rilevato DLL compilate per versioni diverse del framework e della piattaforma. Le DLL seguenti non corrispondono alle impostazioni correnti, ovvero al framework {0} e alla piattaforma {1}.{2}Per altri dettagli sulla gestione di queste impostazioni, vedere {3}. None of the provided test containers match the Platform Architecture and .Net Framework settings for the test run. Platform: {0} .Net Framework: {1}. Go to http://go.microsoft.com/fwlink/?LinkID=330428 for more details on managing these settings. @@ -23,8 +23,8 @@ - {0} is built for Framework {1} and Platform {2}. - {0} viene compilato per il framework {1} e la piattaforma {2}. + {0} would use Framework {1} and Platform {2}. + {0} viene compilato per il framework {1} e la piattaforma {2}. diff --git a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.ja.xlf b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.ja.xlf index 5e8f037b7a..33c7312fd5 100644 --- a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.ja.xlf +++ b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.ja.xlf @@ -8,9 +8,9 @@ パラメーターを null または空にすることはできません。 - Test run detected DLL(s) which were built for different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. + Test run detected DLL(s) which would use different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. - テストの実行で、別のフレームワークとプラットフォームのバージョン用にビルドされた DLL が検出されました。以下の DLL は、現在の設定と一致しません。現在のフレームワーク: {0}、プラットフォーム: {1}。{2}これらの設定を管理することについて詳しくは、{3} にアクセスしてください。 + テストの実行で、別のフレームワークとプラットフォームのバージョン用にビルドされた DLL が検出されました。以下の DLL は、現在の設定と一致しません。現在のフレームワーク: {0}、プラットフォーム: {1}。{2}これらの設定を管理することについて詳しくは、{3} にアクセスしてください。 None of the provided test containers match the Platform Architecture and .Net Framework settings for the test run. Platform: {0} .Net Framework: {1}. Go to http://go.microsoft.com/fwlink/?LinkID=330428 for more details on managing these settings. @@ -23,8 +23,8 @@ - {0} is built for Framework {1} and Platform {2}. - {0} はフレームワーク {1} およびプラットフォーム {2} 向けにビルドされます。 + {0} would use Framework {1} and Platform {2}. + {0} はフレームワーク {1} およびプラットフォーム {2} 向けにビルドされます。 diff --git a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.ko.xlf b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.ko.xlf index ee4b96f092..727fe5f09d 100644 --- a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.ko.xlf +++ b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.ko.xlf @@ -8,9 +8,9 @@ 매개 변수는 null이거나 비워 둘 수 없습니다. - Test run detected DLL(s) which were built for different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. + Test run detected DLL(s) which would use different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. - 테스트 실행에서 다른 프레임워크 및 플랫폼 버전용으로 빌드된 DLL을 감지했습니다. 다음 DLL은 현재 설정({0} 프레임워크 및 {1} 플랫폼)과 일치하지 않습니다.{2}이 설정을 관리하는 방법에 대한 자세한 내용을 보려면 {3}(으)로 이동하세요. + 테스트 실행에서 다른 프레임워크 및 플랫폼 버전용으로 빌드된 DLL을 감지했습니다. 다음 DLL은 현재 설정({0} 프레임워크 및 {1} 플랫폼)과 일치하지 않습니다.{2}이 설정을 관리하는 방법에 대한 자세한 내용을 보려면 {3}(으)로 이동하세요. None of the provided test containers match the Platform Architecture and .Net Framework settings for the test run. Platform: {0} .Net Framework: {1}. Go to http://go.microsoft.com/fwlink/?LinkID=330428 for more details on managing these settings. @@ -23,8 +23,8 @@ - {0} is built for Framework {1} and Platform {2}. - {0}은(는) 프레임워크 {1} 및 플랫폼 {2}을(를) 대상으로 빌드됩니다. + {0} would use Framework {1} and Platform {2}. + {0}은(는) 프레임워크 {1} 및 플랫폼 {2}을(를) 대상으로 빌드됩니다. diff --git a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.pl.xlf b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.pl.xlf index e4e0d81fb4..21d972a1ec 100644 --- a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.pl.xlf +++ b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.pl.xlf @@ -8,9 +8,9 @@ Parametr nie może mieć wartości null ani być pusty. - Test run detected DLL(s) which were built for different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. + Test run detected DLL(s) which would use different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. - Przebieg testowy wykrył biblioteki DLL utworzone dla innych wersji struktury i platformy. Następujące biblioteki DLL są niezgodne z bieżącymi ustawieniami (określającymi strukturę {0} i platformę {1}).{2}Aby uzyskać więcej informacji o zarządzaniu tymi ustawieniami, przejdź do: {3}. + Przebieg testowy wykrył biblioteki DLL utworzone dla innych wersji struktury i platformy. Następujące biblioteki DLL są niezgodne z bieżącymi ustawieniami (określającymi strukturę {0} i platformę {1}).{2}Aby uzyskać więcej informacji o zarządzaniu tymi ustawieniami, przejdź do: {3}. None of the provided test containers match the Platform Architecture and .Net Framework settings for the test run. Platform: {0} .Net Framework: {1}. Go to http://go.microsoft.com/fwlink/?LinkID=330428 for more details on managing these settings. @@ -23,8 +23,8 @@ - {0} is built for Framework {1} and Platform {2}. - Element {0} został skompilowany dla struktury {1} i platformy {2}. + {0} would use Framework {1} and Platform {2}. + Element {0} został skompilowany dla struktury {1} i platformy {2}. diff --git a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.pt-BR.xlf b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.pt-BR.xlf index d36d02f785..54be5010de 100644 --- a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.pt-BR.xlf +++ b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.pt-BR.xlf @@ -8,9 +8,9 @@ O parâmetro não pode ser nulo ou estar vazio. - Test run detected DLL(s) which were built for different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. + Test run detected DLL(s) which would use different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. - A execução de teste detectou DLLs que foram criadas para diferentes versões de estrutura e de plataforma. As DLLs a seguir não correspondem às configurações atuais, que são a estrutura {0} e a plataforma {1}.{2}Acesse {3} para obter mais detalhes sobre o gerenciamento dessas configurações. + A execução de teste detectou DLLs que foram criadas para diferentes versões de estrutura e de plataforma. As DLLs a seguir não correspondem às configurações atuais, que são a estrutura {0} e a plataforma {1}.{2}Acesse {3} para obter mais detalhes sobre o gerenciamento dessas configurações. None of the provided test containers match the Platform Architecture and .Net Framework settings for the test run. Platform: {0} .Net Framework: {1}. Go to http://go.microsoft.com/fwlink/?LinkID=330428 for more details on managing these settings. @@ -23,8 +23,8 @@ - {0} is built for Framework {1} and Platform {2}. - {0} é compilado para Framework {1} e Platform {2}. + {0} would use Framework {1} and Platform {2}. + {0} é compilado para Framework {1} e Platform {2}. diff --git a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.ru.xlf b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.ru.xlf index 10fda594fd..1b033235c6 100644 --- a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.ru.xlf +++ b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.ru.xlf @@ -8,9 +8,9 @@ Параметр не может быть пустым или иметь значение null. - Test run detected DLL(s) which were built for different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. + Test run detected DLL(s) which would use different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. - Тестовый запуск обнаружил библиотеки DLL, которые были созданы для различных версий инфраструктуры и платформы. Следующие библиотеки DLL не соответствуют текущим параметрам (инфраструктура {0} и платформа {1}).{2}Перейдите по ссылке {3} для получения дополнительных сведений об управлении этими параметрами. + Тестовый запуск обнаружил библиотеки DLL, которые были созданы для различных версий инфраструктуры и платформы. Следующие библиотеки DLL не соответствуют текущим параметрам (инфраструктура {0} и платформа {1}).{2}Перейдите по ссылке {3} для получения дополнительных сведений об управлении этими параметрами. None of the provided test containers match the Platform Architecture and .Net Framework settings for the test run. Platform: {0} .Net Framework: {1}. Go to http://go.microsoft.com/fwlink/?LinkID=330428 for more details on managing these settings. @@ -23,8 +23,8 @@ - {0} is built for Framework {1} and Platform {2}. - {0} создается для программной платформы {1} и аппаратной платформы {2}. + {0} would use Framework {1} and Platform {2}. + {0} создается для программной платформы {1} и аппаратной платформы {2}. diff --git a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.tr.xlf b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.tr.xlf index d8243a5dff..9d70720de4 100644 --- a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.tr.xlf +++ b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.tr.xlf @@ -8,9 +8,9 @@ Parametre null veya boş olamaz. - Test run detected DLL(s) which were built for different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. + Test run detected DLL(s) which would use different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. - Test çalıştırması, farklı çerçeve ve platform sürümleri için oluşturulmuş DLL'ler buldu. Aşağıdaki DLL'ler, {0} çerçevesi ve {1} platformu olan geçerli ayarlarla eşleşmiyor. {2}Bu ayarları yönetme hakkında daha fazla ayrıntı için {3} bölümüne gidin. + Test çalıştırması, farklı çerçeve ve platform sürümleri için oluşturulmuş DLL'ler buldu. Aşağıdaki DLL'ler, {0} çerçevesi ve {1} platformu olan geçerli ayarlarla eşleşmiyor. {2}Bu ayarları yönetme hakkında daha fazla ayrıntı için {3} bölümüne gidin. None of the provided test containers match the Platform Architecture and .Net Framework settings for the test run. Platform: {0} .Net Framework: {1}. Go to http://go.microsoft.com/fwlink/?LinkID=330428 for more details on managing these settings. @@ -23,8 +23,8 @@ - {0} is built for Framework {1} and Platform {2}. - {0}, {1} Çerçevesi ve {2} Platformu için derlenmiştir. + {0} would use Framework {1} and Platform {2}. + {0}, {1} Çerçevesi ve {2} Platformu için derlenmiştir. diff --git a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.xlf b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.xlf index 29ceff30df..9b140afbe6 100644 --- a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.xlf +++ b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.xlf @@ -7,7 +7,7 @@ - Test run detected DLL(s) which were built for different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. + Test run detected DLL(s) which would use different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. @@ -20,7 +20,7 @@ - {0} is built for Framework {1} and Platform {2}. + {0} would use Framework {1} and Platform {2}. {0} is built for Framework {1} and Platform {2}. diff --git a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.zh-Hans.xlf b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.zh-Hans.xlf index 91aa3fa748..e5d069d375 100644 --- a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.zh-Hans.xlf +++ b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.zh-Hans.xlf @@ -8,9 +8,9 @@ 参数不能为 NULL 或为空。 - Test run detected DLL(s) which were built for different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. + Test run detected DLL(s) which would use different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. - 测试运行检测到为不同框架和平台版本构建的 DLL。后面的 DLL 与当前设置(即 {0} 框架和 {1} 平台)不匹配。{2}如需了解如何管理这些设置,请转到 {3}。 + 测试运行检测到为不同框架和平台版本构建的 DLL。后面的 DLL 与当前设置(即 {0} 框架和 {1} 平台)不匹配。{2}如需了解如何管理这些设置,请转到 {3}。 None of the provided test containers match the Platform Architecture and .Net Framework settings for the test run. Platform: {0} .Net Framework: {1}. Go to http://go.microsoft.com/fwlink/?LinkID=330428 for more details on managing these settings. @@ -23,8 +23,8 @@ - {0} is built for Framework {1} and Platform {2}. - {0} 适用于 {1} 框架和 {2} 平台。 + {0} would use Framework {1} and Platform {2}. + {0} 适用于 {1} 框架和 {2} 平台。 diff --git a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.zh-Hant.xlf b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.zh-Hant.xlf index faa0382b32..5de22766e6 100644 --- a/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.zh-Hant.xlf +++ b/src/Microsoft.TestPlatform.ObjectModel/Resources/xlf/CommonResources.zh-Hant.xlf @@ -8,9 +8,9 @@ 參數不可為 null 或空白。 - Test run detected DLL(s) which were built for different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. + Test run detected DLL(s) which would use different framework and platform versions. Following DLL(s) do not match current settings, which are {0} framework and {1} platform.{2}Go to {3} for more details on managing these settings. - 測試回合偵測到為不同架構和平台版本所建置的 DLL。下列 DLL 與目前的設定不相符,亦即 {0} 架構及 {1} 平台。{2}如需管理這些設定的詳細資料,請前往 {3}。 + 測試回合偵測到為不同架構和平台版本所建置的 DLL。下列 DLL 與目前的設定不相符,亦即 {0} 架構及 {1} 平台。{2}如需管理這些設定的詳細資料,請前往 {3}。 None of the provided test containers match the Platform Architecture and .Net Framework settings for the test run. Platform: {0} .Net Framework: {1}. Go to http://go.microsoft.com/fwlink/?LinkID=330428 for more details on managing these settings. @@ -23,8 +23,8 @@ - {0} is built for Framework {1} and Platform {2}. - {0} 專為架構 {1} 及平台 {2} 建置。 + {0} would use Framework {1} and Platform {2}. + {0} 專為架構 {1} 及平台 {2} 建置。 diff --git a/src/Microsoft.TestPlatform.ObjectModel/RunSettings/RunConfiguration.cs b/src/Microsoft.TestPlatform.ObjectModel/RunSettings/RunConfiguration.cs index 6830b39109..0f241c36de 100644 --- a/src/Microsoft.TestPlatform.ObjectModel/RunSettings/RunConfiguration.cs +++ b/src/Microsoft.TestPlatform.ObjectModel/RunSettings/RunConfiguration.cs @@ -571,6 +571,7 @@ public static RunConfiguration FromXml(XmlReader reader!!) while (reader.NodeType == XmlNodeType.Element) { string elementName = reader.Name; + // TODO: make run settings nodes case insensitive? switch (elementName) { case "ResultsDirectory": diff --git a/src/Microsoft.TestPlatform.ObjectModel/SourceDetail.cs b/src/Microsoft.TestPlatform.ObjectModel/SourceDetail.cs new file mode 100644 index 0000000000..4270aa9518 --- /dev/null +++ b/src/Microsoft.TestPlatform.ObjectModel/SourceDetail.cs @@ -0,0 +1,13 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. + +#nullable disable + +namespace Microsoft.VisualStudio.TestPlatform.ObjectModel; + +public class SourceDetail +{ + public string Source { get; internal set; } + public Architecture Architecture { get; internal set; } + public Framework Framework { get; internal set; } +} diff --git a/src/Microsoft.TestPlatform.TestHostProvider/Hosting/DefaultTestHostManager.cs b/src/Microsoft.TestPlatform.TestHostProvider/Hosting/DefaultTestHostManager.cs index a522c7c150..be1a4c1ac3 100644 --- a/src/Microsoft.TestPlatform.TestHostProvider/Hosting/DefaultTestHostManager.cs +++ b/src/Microsoft.TestPlatform.TestHostProvider/Hosting/DefaultTestHostManager.cs @@ -124,9 +124,12 @@ public TestHostConnectionInfo GetTestHostConnectionInfo() } /// - public async Task LaunchTestHostAsync(TestProcessStartInfo testHostStartInfo, CancellationToken cancellationToken) + public Task LaunchTestHostAsync(TestProcessStartInfo testHostStartInfo, CancellationToken cancellationToken) { - return await Task.Run(() => LaunchHost(testHostStartInfo, cancellationToken), cancellationToken); + // Do NOT offload this to thread pool using Task.Run, we already are on thread pool + // and this would go into a queue after all the other startup tasks. Meaning we will start + // testhost much later, and not immediately. + return Task.FromResult(LaunchHost(testHostStartInfo, cancellationToken)); } /// diff --git a/src/Microsoft.TestPlatform.TestHostProvider/Hosting/DotnetTestHostManager.cs b/src/Microsoft.TestPlatform.TestHostProvider/Hosting/DotnetTestHostManager.cs index fc5ff062ca..5700e51a7e 100644 --- a/src/Microsoft.TestPlatform.TestHostProvider/Hosting/DotnetTestHostManager.cs +++ b/src/Microsoft.TestPlatform.TestHostProvider/Hosting/DotnetTestHostManager.cs @@ -193,9 +193,12 @@ public TestHostConnectionInfo GetTestHostConnectionInfo() } /// - public async Task LaunchTestHostAsync(TestProcessStartInfo testHostStartInfo, CancellationToken cancellationToken) + public Task LaunchTestHostAsync(TestProcessStartInfo testHostStartInfo, CancellationToken cancellationToken) { - return await Task.Run(() => LaunchHost(testHostStartInfo, cancellationToken), cancellationToken); + // Do NOT offload this to thread pool using Task.Run, we already are on thread pool + // and this would go into a queue after all the other startup tasks. Meaning we will start + // testhost much later, and not immediately. + return Task.FromResult(LaunchHost(testHostStartInfo, cancellationToken)); } /// @@ -664,6 +667,7 @@ private bool LaunchHost(TestProcessStartInfo testHostStartInfo, CancellationToke EqtTrace.Verbose("DotnetTestHostManager: Starting process '{0}' with command line '{1}'", testHostStartInfo.FileName, testHostStartInfo.Arguments); cancellationToken.ThrowIfCancellationRequested(); + _testHostProcess = _processHelper.LaunchProcess( testHostStartInfo.FileName, testHostStartInfo.Arguments, diff --git a/src/Microsoft.TestPlatform.Utilities/InferRunSettingsHelper.cs b/src/Microsoft.TestPlatform.Utilities/InferRunSettingsHelper.cs index 738e26be2b..b75811c1d1 100644 --- a/src/Microsoft.TestPlatform.Utilities/InferRunSettingsHelper.cs +++ b/src/Microsoft.TestPlatform.Utilities/InferRunSettingsHelper.cs @@ -495,9 +495,16 @@ private static void AddNodeIfNotPresent(XmlDocument xmlDocument, string nodeP if (root.SelectSingleNode(RunConfigurationNodePath) == null) { - // TODO: When runsettings are incomplete this will silently return, when we run just TestRequestManager we don't get full settings. - EqtTrace.Error("InferRunSettingsHelper.UpdateNodeIfNotPresent: Unable to navigate to RunConfiguration. Current node: " + xmlDocument.LocalName); - return; + if (root.Name == RunSettingsNodeName) + { + // When just is provided in the runsettings string, then this will add the common RunConfiguration inner node. + XmlUtilities.AppendOrModifyChild(xmlDocument, RunConfigurationNodePath, RunConfigurationNodeName, innerXml: null); + } + else + { + EqtTrace.Error("InferRunSettingsHelper.UpdateNodeIfNotPresent: Unable to navigate to RunConfiguration. Current node: " + xmlDocument.LocalName); + return; + } } var node = xmlDocument.SelectSingleNode(nodePath); diff --git a/src/Microsoft.TestPlatform.VsTestConsole.TranslationLayer/VsTestConsoleProcessManager.cs b/src/Microsoft.TestPlatform.VsTestConsole.TranslationLayer/VsTestConsoleProcessManager.cs index c1f1b9ca39..8cb2706e17 100644 --- a/src/Microsoft.TestPlatform.VsTestConsole.TranslationLayer/VsTestConsoleProcessManager.cs +++ b/src/Microsoft.TestPlatform.VsTestConsole.TranslationLayer/VsTestConsoleProcessManager.cs @@ -117,7 +117,7 @@ public void StartProcess(ConsoleParameters consoleParameters) UseShellExecute = false, CreateNoWindow = true, RedirectStandardOutput = true, - RedirectStandardError = true + RedirectStandardError = true, }; EqtTrace.Verbose("VsTestCommandLineWrapper.StartProcess: Process Start Info {0} {1}", info.FileName, info.Arguments); diff --git a/src/vstest.console/CommandLine/InferHelper.cs b/src/vstest.console/CommandLine/InferHelper.cs index 2c80aedf40..08a4e5347d 100644 --- a/src/vstest.console/CommandLine/InferHelper.cs +++ b/src/vstest.console/CommandLine/InferHelper.cs @@ -26,84 +26,128 @@ internal InferHelper(IAssemblyMetadataProvider assemblyMetadataProvider) /// /// Determines Architecture from sources. /// - public Architecture AutoDetectArchitecture(IList sources, IDictionary sourcePlatforms, Architecture defaultArchitecture) + public Architecture AutoDetectArchitecture(IList sources, Architecture defaultArchitecture, out IDictionary sourceToPlatformMap) { - var architecture = defaultArchitecture; + sourceToPlatformMap = new Dictionary(); + if (sources == null || sources.Count == 0) + return defaultArchitecture; + + // Set the default for all sources. + foreach (var source in sources) + { + // TODO: Add default architecture to runtime providers info, or something and that will allow us to have test + // cases without any sources. Otherwise change test AutoDetectArchitectureShouldReturnDefaultArchitectureOnNullItemInSources + // because this condition is making that test happy. + if (source != null) + { + sourceToPlatformMap.Add(source, defaultArchitecture); + } + } + try { - if (sources != null && sources.Count > 0) + Architecture? commonArchitecture = null; + foreach (string source in sources) { - Architecture? finalArch = null; - foreach (string source in sources) + try { - Architecture arch; - if (IsDotNetAssembly(source)) + Architecture detectedArchitecture; + if (IsDllOrExe(source)) { - arch = _assemblyMetadataProvider.GetArchitecture(source); + detectedArchitecture = _assemblyMetadataProvider.GetArchitecture(source); + + if (detectedArchitecture == Architecture.AnyCPU) + { + // This is AnyCPU .NET assembly, this source should run using the default architecture, + // which we've already set for the source. + EqtTrace.Info("Determined platform for source '{0}' was AnyCPU and it will use the default plaform {1}.", source, defaultArchitecture); + } + else + { + sourceToPlatformMap[source] = detectedArchitecture; + EqtTrace.Info("Determined platform for source '{0}' was '{1}'.", source, detectedArchitecture); + } } else { - // Set AnyCPU for non dotnet test sources (js, py and other). Otherwise warning will - // show up if there is mismatch with user provided platform. - arch = Architecture.AnyCPU; + // This is non-dll source, this source should run using the default architecture, + // which we've already set for the source. + EqtTrace.Info("No platform was determined for source '{0}' because it is not a dll or an executable.", source); + + // This source has no associated architecture so it does not help use determine a common architecture for + // all the sources, so we continue to next one. + sourceToPlatformMap[source] = defaultArchitecture; + continue; } - EqtTrace.Info("Determined platform for source '{0}' is '{1}'", source, arch); - sourcePlatforms[source] = arch; - if (Architecture.AnyCPU.Equals(arch)) + if (Architecture.AnyCPU.Equals(detectedArchitecture)) { - // If arch is AnyCPU ignore it. + // The architecture of the source is AnyCPU and so we can skip to the next one, + // because it does not help use determine a common architecture for all the sources. continue; } - if (finalArch == null) + // This is the first source that provided some architecture use that as a candidate + // for the common architecture. + if (commonArchitecture == null) { - finalArch = arch; + commonArchitecture = detectedArchitecture; continue; } - if (!finalArch.Equals(arch)) + // The detected architecture, is different than the common architecture. So at least + // one of the sources is incompatible with the others. Use the default architecture as the common + // fallback. + if (!commonArchitecture.Equals(detectedArchitecture)) { - finalArch = defaultArchitecture; - EqtTrace.Info("Conflict in platform architecture, using default platform:{0}", finalArch); + commonArchitecture = defaultArchitecture; } } - - if (finalArch != null) + catch (Exception ex) { - architecture = (Architecture)finalArch; + EqtTrace.Error("Failed to determine platform for source: {0}, using default: {1}, exception: {2}", source, defaultArchitecture, ex); + sourceToPlatformMap[source] = defaultArchitecture; } } + + if (commonArchitecture != null) + { + EqtTrace.Info("Determined platform for all sources: {0}", commonArchitecture); + return commonArchitecture.Value; + } + + EqtTrace.Info("None of the sources provided any runnable platform, using the default platform: {0}", defaultArchitecture); + + return defaultArchitecture; } catch (Exception ex) { - EqtTrace.Error("Failed to determine platform: {0}, using default: {1}", ex, architecture); + EqtTrace.Error("Failed to determine platform for all sources: {0}, using default: {1}", ex, defaultArchitecture); + return defaultArchitecture; } - - EqtTrace.Info("Determined platform for all sources: {0}", architecture); - - return architecture; } /// /// Determines Framework from sources. /// - public Framework AutoDetectFramework(IList sources, IDictionary sourceFrameworkVersions) + public Framework AutoDetectFramework(IList sources, out IDictionary sourceToFrameworkMap) { + sourceToFrameworkMap = new Dictionary(); Framework framework = Framework.DefaultFramework; + + if (sources == null || sources.Count == 0) + return framework; + try { - if (sources != null && sources.Count > 0) + var finalFx = DetermineFrameworkName(sources, out sourceToFrameworkMap, out var conflictInFxIdentifier); + framework = Framework.FromString(finalFx.FullName); + if (conflictInFxIdentifier) { - var finalFx = DetermineFrameworkName(sources, sourceFrameworkVersions, out var conflictInFxIdentifier); - framework = Framework.FromString(finalFx.FullName); - if (conflictInFxIdentifier) - { - // TODO Log to console and client. - EqtTrace.Info( - "conflicts in Framework identifier of provided sources(test assemblies), using default framework:{0}", - framework); - } + // TODO Log to console and client. + EqtTrace.Info( + "conflicts in Framework identifier of provided sources(test assemblies), using default framework:{0}", + framework); } } catch (Exception ex) @@ -116,62 +160,75 @@ public Framework AutoDetectFramework(IList sources, IDictionary sources, IDictionary sourceFrameworkVersions, out bool conflictInFxIdentifier) + private FrameworkName DetermineFrameworkName(IEnumerable sources, out IDictionary sourceToFrameworkMap, out bool conflictInFxIdentifier) { + sourceToFrameworkMap = new Dictionary(); + + var defaultFramework = Framework.DefaultFramework; FrameworkName finalFx = null; conflictInFxIdentifier = false; foreach (string source in sources) { - FrameworkName fx; - if (IsDotNetAssembly(source)) + try { - fx = _assemblyMetadataProvider.GetFrameWork(source); - } - else - { - // TODO What else to do with appx, js and other? - var extension = Path.GetExtension(source); - if (extension.Equals(".js", StringComparison.OrdinalIgnoreCase)) + FrameworkName fx; + if (IsDllOrExe(source)) { - // Currently to run tests for .NET Core, assembly need dependency to Microsoft.NET.Test.Sdk. Which is not - // possible for js files. So using default .NET Full framework version. - fx = new FrameworkName(Constants.DotNetFramework40); + fx = _assemblyMetadataProvider.GetFrameWork(source); } else { - fx = extension.Equals(".appx", StringComparison.OrdinalIgnoreCase) - || extension.Equals(".msix", StringComparison.OrdinalIgnoreCase) - || extension.Equals(".appxrecipe", StringComparison.OrdinalIgnoreCase) - ? new FrameworkName(Constants.DotNetFrameworkUap10) - : new FrameworkName(Framework.DefaultFramework.Name); + // TODO What else to do with appx, js and other? + var extension = Path.GetExtension(source); + if (extension.Equals(".js", StringComparison.OrdinalIgnoreCase)) + { + // Currently to run tests for .NET Core, assembly need dependency to Microsoft.NET.Test.Sdk. Which is not + // possible for js files. So using default .NET Full framework version. + fx = new FrameworkName(Constants.DotNetFramework40); + } + else + { + fx = extension.Equals(".appx", StringComparison.OrdinalIgnoreCase) + || extension.Equals(".msix", StringComparison.OrdinalIgnoreCase) + || extension.Equals(".appxrecipe", StringComparison.OrdinalIgnoreCase) + ? new FrameworkName(Constants.DotNetFrameworkUap10) + : new FrameworkName(Framework.DefaultFramework.Name); + } } - } - sourceFrameworkVersions[source] = Framework.FromString(fx.FullName); - if (finalFx == null) - { - finalFx = fx; - continue; - } + sourceToFrameworkMap.Add(source, Framework.FromString(fx.FullName)); - if (finalFx.Identifier.Equals(fx.Identifier)) - { - // Use latest version. - if (finalFx.Version < fx.Version) + if (finalFx == null) { finalFx = fx; + continue; + } + + if (finalFx.Identifier.Equals(fx.Identifier)) + { + // Use latest version. + if (finalFx.Version < fx.Version) + { + finalFx = fx; + } + } + else + { + conflictInFxIdentifier = true; + finalFx = new FrameworkName(defaultFramework.Name); } } - else + catch (Exception ex) { - conflictInFxIdentifier = true; - finalFx = new FrameworkName(Framework.DefaultFramework.Name); + sourceToFrameworkMap.Add(source, defaultFramework); + EqtTrace.Error("Failed to determine framework for source: {0} using default framework: {1}, exception: {2}", source, defaultFramework.Name, ex); } } + return finalFx; } - private bool IsDotNetAssembly(string filePath) + private bool IsDllOrExe(string filePath) { var extType = Path.GetExtension(filePath); return extType != null && (extType.Equals(".dll", StringComparison.OrdinalIgnoreCase) || diff --git a/src/vstest.console/TestPlatformHelpers/TestRequestManager.cs b/src/vstest.console/TestPlatformHelpers/TestRequestManager.cs index e666b99505..09565da14a 100644 --- a/src/vstest.console/TestPlatformHelpers/TestRequestManager.cs +++ b/src/vstest.console/TestPlatformHelpers/TestRequestManager.cs @@ -160,6 +160,10 @@ public void DiscoverTests( { EqtTrace.Info("TestRequestManager.DiscoverTests: Discovery tests started."); + // TODO: Normalize rest of the data on the request as well + discoveryPayload.Sources = discoveryPayload.Sources?.Distinct().ToList() ?? new List(); + discoveryPayload.RunSettings ??= ""; + var runsettings = discoveryPayload.RunSettings; if (discoveryPayload.TestPlatformOptions != null) @@ -170,13 +174,22 @@ public void DiscoverTests( var requestData = GetRequestData(protocolConfig); if (UpdateRunSettingsIfRequired( runsettings, - discoveryPayload.Sources?.ToList(), + discoveryPayload.Sources.ToList(), discoveryEventsRegistrar, - out string updatedRunsettings)) + out string updatedRunsettings, + out IDictionary sourceToArchitectureMap, + out IDictionary sourceToFrameworkMap)) { runsettings = updatedRunsettings; } + var sourceToSourceDetailMap = discoveryPayload.Sources.Select(source => new SourceDetail + { + Source = source, + Architecture = sourceToArchitectureMap[source], + Framework = sourceToFrameworkMap[source], + }).ToDictionary(k => k.Source); + var runConfiguration = XmlRunSettingsUtilities.GetRunConfigurationNode(runsettings); var batchSize = runConfiguration.BatchSize; var testCaseFilterFromRunsettings = runConfiguration.TestCaseFilter; @@ -213,7 +226,8 @@ public void DiscoverTests( _currentDiscoveryRequest = _testPlatform.CreateDiscoveryRequest( requestData, criteria, - discoveryPayload.TestPlatformOptions); + discoveryPayload.TestPlatformOptions, + sourceToSourceDetailMap); discoveryEventsRegistrar?.RegisterDiscoveryEvents(_currentDiscoveryRequest); // Notify start of discovery start. @@ -268,11 +282,20 @@ public void RunTests( runsettings, sources, testRunEventsRegistrar, - out string updatedRunsettings)) + out string updatedRunsettings, + out IDictionary sourceToArchitectureMap, + out IDictionary sourceToFrameworkMap)) { runsettings = updatedRunsettings; } + var sourceToSourceDetailMap = sources.Select(source => new SourceDetail + { + Source = source, + Architecture = sourceToArchitectureMap[source], + Framework = sourceToFrameworkMap[source], + }).ToDictionary(k => k.Source); + if (InferRunSettingsHelper.AreRunSettingsCollectorsIncompatibleWithTestSettings(runsettings)) { throw new SettingsException( @@ -299,6 +322,10 @@ public void RunTests( // Get Fakes data collector settings. if (!string.Equals(Environment.GetEnvironmentVariable("VSTEST_SKIP_FAKES_CONFIGURATION"), "1")) { + // TODO: Are the sources in _commandLineOptions any different from the ones we get on the request? + // because why would they be? We never pass that forward to the executor, so this probably should + // just look at sources anyway. + // The commandline options do not have sources in design time mode, // and so we fall back to using sources instead. if (_commandLineOptions.Sources.Any()) @@ -317,6 +344,11 @@ public void RunTests( } } + // We can have either a run that contains string as test container (usually a DLL), which is later resolved to the actual path + // and all tests that match filter are run from that container. + // + // OR we already did discovery and have a list of TestCases that have concrete test method information + // and so we only pass those. TestCase also has the test container path (usually a DLL). TestRunCriteria runCriteria = testRunRequestPayload.Sources != null && testRunRequestPayload.Sources.Any() ? new TestRunCriteria( testRunRequestPayload.Sources, @@ -348,7 +380,8 @@ public void RunTests( requestData, runCriteria, testRunEventsRegistrar, - testRunRequestPayload.TestPlatformOptions); + testRunRequestPayload.TestPlatformOptions, + sourceToSourceDetailMap); EqtTrace.Info("TestRequestManager.RunTests: run tests completed."); } finally @@ -427,15 +460,27 @@ public void StartTestSession( _telemetryOptedIn = payload.TestPlatformOptions.CollectMetrics; } + payload.Sources ??= new List(); + payload.RunSettings ??= ""; + if (UpdateRunSettingsIfRequired( payload.RunSettings, payload.Sources, null, - out string updatedRunsettings)) + out string updatedRunsettings, + out IDictionary sourceToArchitectureMap, + out IDictionary sourceToFrameworkMap)) { payload.RunSettings = updatedRunsettings; } + var sourceToSourceDetailMap = payload.Sources.Select(source => new SourceDetail + { + Source = source, + Architecture = sourceToArchitectureMap[source], + Framework = sourceToFrameworkMap[source], + }).ToDictionary(k => k.Source); + if (InferRunSettingsHelper.AreRunSettingsCollectorsIncompatibleWithTestSettings(payload.RunSettings)) { throw new SettingsException( @@ -465,7 +510,8 @@ public void StartTestSession( TestHostLauncher = testHostLauncher }; - if (!_testPlatform.StartTestSession(requestData, criteria, eventsHandler)) + var testSessionStarted = _testPlatform.StartTestSession(requestData, criteria, eventsHandler, sourceToSourceDetailMap); + if (!testSessionStarted) { EqtTrace.Warning("TestRequestManager.StartTestSession: Unable to start test session."); } @@ -604,100 +650,160 @@ private void Dispose(bool disposing) } private bool UpdateRunSettingsIfRequired( - string runsettingsXml, + string runsettingsXml!!, IList sources, IBaseTestEventsRegistrar registrar, - out string updatedRunSettingsXml) + out string updatedRunSettingsXml, + out IDictionary sourceToArchitectureMap, + out IDictionary sourceToFrameworkMap) { bool settingsUpdated = false; updatedRunSettingsXml = runsettingsXml; - var sourcePlatforms = new Dictionary(); - var sourceFrameworks = new Dictionary(); - if (!string.IsNullOrEmpty(runsettingsXml)) + // TargetFramework is full CLR. Set DesignMode based on current context. + using var stream = new StringReader(runsettingsXml); + using var reader = XmlReader.Create( + stream, + XmlRunSettingsUtilities.ReaderSettings); + var document = new XmlDocument(); + document.Load(reader); + var navigator = document.CreateNavigator(); + var runConfiguration = XmlRunSettingsUtilities.GetRunConfigurationNode(runsettingsXml); + var loggerRunSettings = XmlRunSettingsUtilities.GetLoggerRunSettings(runsettingsXml) + ?? new LoggerRunSettings(); + + + // True when runsettings don't set target framework. False when runsettings force target framework + // in both cases the sourceToFrameworkMap is populated with the real frameworks as we inferred them + // from dlls. For sources like .js, we return the default framework. + var frameworkWasAutodetected = UpdateFrameworkInRunSettingsIfRequired( + document, + navigator, + sources, + registrar, + out Framework chosenFramework, + out sourceToFrameworkMap); + + settingsUpdated |= frameworkWasAutodetected; + var frameworkSetByRunsettings = !frameworkWasAutodetected; + + // Before MULTI_TFM feature the sourceToArchitectureMap and sourceToFrameworkMap were only used as informational + // to be able to do this compatibility check and print warning. And in the later steps only chosenPlatform, chosenFramework + // were used, that represented the single architecture and framework to be used. + // + // After MULTI_TFM sourceToArchitectureMap and sourceToFrameworkMap are the source of truth, and are propagated forward, + // so when we want to revert to the older behavior we need to re-enable the check, and unify all the architecture and + // framework entries to the same chosen value. + var disableMultiTfm = FeatureFlag.Instance.IsSet(FeatureFlag.DISABLE_MULTI_TFM_RUN); + + // Choose default architecture based on the framework. + // For a run with mixed tfms enabled, or .NET "Core", the default platform architecture should be based on the process. + // This will choose x64 by default for both .NET and .NET Framework, and avoids choosing x86 for a mixed + // run, so we will run via .NET testhost.exe, and not via dotnet testhost.dll. + Architecture defaultArchitecture = Architecture.X86; + if (!disableMultiTfm + || chosenFramework.Name.IndexOf("netstandard", StringComparison.OrdinalIgnoreCase) >= 0 + || chosenFramework.Name.IndexOf("netcoreapp", StringComparison.OrdinalIgnoreCase) >= 0 + // This is a special case for 1 version of Nuget.Frameworks that was shipped with using identifier NET5 instead of NETCoreApp5 for .NET 5. + || chosenFramework.Name.IndexOf("net5", StringComparison.OrdinalIgnoreCase) >= 0) { - // TargetFramework is full CLR. Set DesignMode based on current context. - using var stream = new StringReader(runsettingsXml); - using var reader = XmlReader.Create( - stream, - XmlRunSettingsUtilities.ReaderSettings); - var document = new XmlDocument(); - document.Load(reader); - var navigator = document.CreateNavigator(); - var runConfiguration = XmlRunSettingsUtilities.GetRunConfigurationNode(runsettingsXml); - var loggerRunSettings = XmlRunSettingsUtilities.GetLoggerRunSettings(runsettingsXml) - ?? new LoggerRunSettings(); - - settingsUpdated |= UpdateFramework( - document, - navigator, - sources, - sourceFrameworks, - registrar, - out Framework chosenFramework); - - // Choose default architecture based on the framework. - // For .NET core, the default platform architecture should be based on the process. - Architecture defaultArchitecture = Architecture.X86; - if (chosenFramework.Name.IndexOf("netstandard", StringComparison.OrdinalIgnoreCase) >= 0 - || chosenFramework.Name.IndexOf("netcoreapp", StringComparison.OrdinalIgnoreCase) >= 0 - // This is a special case for 1 version of Nuget.Frameworks that was shipped with using identifier NET5 instead of NETCoreApp5 for .NET 5. - || chosenFramework.Name.IndexOf("net5", StringComparison.OrdinalIgnoreCase) >= 0) + // We are running in vstest.console that is either started via dotnet + // or via vstest.console.exe. The architecture of the current process + // determines the default architecture to use for AnyCPU dlls + // and other sources that don't dictate architecture (e.g. js files). + // This way starting 32-bit dotnet will try to run as 32-bit testhost + // using the runtime that was installed with that 32-bit dotnet SDK. + // Similarly ARM64 vstest.console will start ARM64 testhost, making sure + // that we choose the architecture that we already know we can run as. + // 64-bit SDK when running from 64-bit dotnet process. + // As default architecture we specify the expected test host architecture, + // it can be specified by user on the command line with --arch or through runsettings. + // If it's not specified by user will be filled by current processor architecture; + // should be the same as SDK. + defaultArchitecture = GetDefaultArchitecture(runConfiguration); + } + else + { + if (_environment.Architecture == PlatformArchitecture.ARM64 && _environment.OperatingSystem == PlatformOperatingSystem.Windows) { - // We are running in vstest.console that is either started via dotnet - // or via vstest.console.exe. The architecture of the current process - // determines the default architecture to use for AnyCPU dlls - // and other sources that don't dictate architecture (e.g. js files). - // This way starting 32-bit dotnet will try to run as 32-bit testhost - // using the runtime that was installed with that 32-bit dotnet SDK. - // Similarly ARM64 vstest.console will start ARM64 testhost, making sure - // that we choose the architecture that we already know we can run as. - // 64-bit SDK when running from 64-bit dotnet process. + // For non .NET Core containers only on win ARM64 we want to run AnyCPU using current process architecture as a default + // for both vstest.console.exe and design mode scenario. // As default architecture we specify the expected test host architecture, - // it can be specified by user on the command line with --arch or through runsettings. - // If it's not specified by user will be filled by current processor architecture; - // should be the same as SDK. + // it can be specified by user on the command line with /Platform or through runsettings. + // If it's not specified by user will be filled by current processor architecture. defaultArchitecture = GetDefaultArchitecture(runConfiguration); } - else - { - if (_environment.Architecture == PlatformArchitecture.ARM64 && _environment.OperatingSystem == PlatformOperatingSystem.Windows) - { - // For non .NET Core containers only on win ARM64 we want to run AnyCPU using current process architecture as a default - // for both vstest.console.exe and design mode scenario. - // As default architecture we specify the expected test host architecture, - // it can be specified by user on the command line with /Platform or through runsettings. - // If it's not specified by user will be filled by current processor architecture. - defaultArchitecture = GetDefaultArchitecture(runConfiguration); - } - // For all other scenario we keep the old default Architecture.X86. - } + // For all other scenarios we keep the old default Architecture.X86. + } - EqtTrace.Verbose($"TestRequestManager.UpdateRunSettingsIfRequired: Default architecture: {defaultArchitecture} IsDefaultTargetArchitecture: {RunSettingsHelper.Instance.IsDefaultTargetArchitecture}, Current process architecture: {_processHelper.GetCurrentProcessArchitecture()} OperatingSystem: {_environment.OperatingSystem}."); + EqtTrace.Verbose($"TestRequestManager.UpdateRunSettingsIfRequired: Default architecture: {defaultArchitecture} IsDefaultTargetArchitecture: {RunSettingsHelper.Instance.IsDefaultTargetArchitecture}, Current process architecture: {_processHelper.GetCurrentProcessArchitecture()} OperatingSystem: {_environment.OperatingSystem}."); - settingsUpdated |= UpdatePlatform( - document, - navigator, - sources, - sourcePlatforms, - defaultArchitecture, - out Architecture chosenPlatform); + // True when runsettings don't set platformk. False when runsettings force platform + // in both cases the sourceToArchitectureMap is populated with the real architecture as we inferred it + // from dlls. For sources like .js, we return the default architecture. + var platformWasAutodetected = UpdatePlatform( + document, + navigator, + sources, + defaultArchitecture, + out Architecture chosenPlatform, + out sourceToArchitectureMap); + + settingsUpdated |= platformWasAutodetected; + var platformSetByRunsettings = !platformWasAutodetected; + + // Before MULTI_TFM feature the sourceToArchitectureMap and sourceToFrameworkMap were only used as informational + // to be able to do this compatibility check and print warning. And in the later steps only chosenPlatform, chosenFramework + // were used, that represented the single architecture and framework to be used. + // + // After MULTI_TFM sourceToArchitectureMap and sourceToFrameworkMap are the source of truth, and are propagated forward, + // so when we want to revert to the older behavior we need to re-enable the check, and unify all the architecture and + // framework entries to the same chosen value. + + // Do the check only when we enable MULTI_TFM and platform or framework are forced by settings, because then we maybe have some sources + // that are not compatible with the chosen settings. And do the check always when MULTI_TFM is disabled, because then we want to warn every + // time there are multiple tfms or architectures mixed. + if (disableMultiTfm || (!disableMultiTfm && (platformSetByRunsettings || frameworkSetByRunsettings))) + { CheckSourcesForCompatibility( chosenFramework, chosenPlatform, defaultArchitecture, - sourcePlatforms, - sourceFrameworks, + sourceToArchitectureMap, + sourceToFrameworkMap, registrar); - settingsUpdated |= UpdateDesignMode(document, runConfiguration); - settingsUpdated |= UpdateCollectSourceInformation(document, runConfiguration); - settingsUpdated |= UpdateTargetDevice(navigator, document); - settingsUpdated |= AddOrUpdateConsoleLogger(document, runConfiguration, loggerRunSettings); + } - updatedRunSettingsXml = navigator.OuterXml; + // The sourceToArchitectureMap contains the real architecture, overwrite it by the value chosen by runsettings, to force one unified platform to be used. + if (disableMultiTfm || platformSetByRunsettings) + { + // Copy the list of key, otherwise we will get collection changed exception. + var keys = sourceToArchitectureMap.Keys.ToList(); + foreach (var key in keys) + { + sourceToArchitectureMap[key] = chosenPlatform; + } + } + + // The sourceToFrameworkMap contains the real framework, overwrite it by the value chosen by runsettings, to force one unified framework to be used. + if (disableMultiTfm || frameworkSetByRunsettings) + { + // Copy the list of key, otherwise we will get collection changed exception. + var keys = sourceToFrameworkMap.Keys.ToList(); + foreach (var key in keys) + { + sourceToFrameworkMap[key] = chosenFramework; + } } + settingsUpdated |= UpdateDesignMode(document, runConfiguration); + settingsUpdated |= UpdateCollectSourceInformation(document, runConfiguration); + settingsUpdated |= UpdateTargetDevice(navigator, document); + settingsUpdated |= AddOrUpdateConsoleLogger(document, runConfiguration, loggerRunSettings); + + updatedRunSettingsXml = navigator.OuterXml; + return settingsUpdated; Architecture GetDefaultArchitecture(RunConfiguration runConfiguration) @@ -825,72 +931,97 @@ private bool UpdatePlatform( XmlDocument document, XPathNavigator navigator, IList sources, - IDictionary sourcePlatforms, Architecture defaultArchitecture, - out Architecture chosenPlatform) + out Architecture commonPlatform, + out IDictionary sourceToPlatformMap) { - // Get platform from sources. - var inferedPlatform = _inferHelper.AutoDetectArchitecture( - sources, - sourcePlatforms, - defaultArchitecture); - - EqtTrace.Info($"Infered platform '{inferedPlatform}'."); + // Get platform from runsettings. If runsettings specify a platform, we don't need to + // auto detect it and update it, because it is forced by run settings to be a single given platform + // for all the provided sources. + bool platformSetByRunsettings = IsPlatformSetByRunSettings(navigator, out commonPlatform); - // Get platform from runsettings. - bool updatePlatform = IsAutoPlatformDetectRequired(navigator, out chosenPlatform); - - // Update platform if required. For command line scenario update happens in - // ArgumentProcessor. - if (updatePlatform) + if (platformSetByRunsettings) { - EqtTrace.Info($"Platform update to '{inferedPlatform}' required."); - InferRunSettingsHelper.UpdateTargetPlatform( - document, - inferedPlatform.ToString(), - overwrite: true); - chosenPlatform = inferedPlatform; + EqtTrace.Info($"Platform is set by runsettings to be '{commonPlatform}' for all sources."); + // Autodetect platforms from sources, so we can check that they are compatible with the settings, and report + // incompatibilities as warnings. + // + // DO NOT overwrite the common platform, the one forced by runsettings should be used. + var _ = _inferHelper.AutoDetectArchitecture(sources, defaultArchitecture, out sourceToPlatformMap); + + // If we would not want to report the incompatibilities later, we would simply return dictionary populated to the + // platform that is set by the settings. + // + // sourceToPlatformMap = new Dictionary(); + // foreach (var source in sources) + // { + // sourceToPlatformMap.Add(source, commonPlatform); + // } + + // Return false, because we did not update runsettings. + return false; } - return updatePlatform; + // Autodetect platform from sources, and return a single common platform. + commonPlatform = _inferHelper.AutoDetectArchitecture(sources, defaultArchitecture, out sourceToPlatformMap); + InferRunSettingsHelper.UpdateTargetPlatform(document, commonPlatform.ToString(), overwrite: true); + + EqtTrace.Info($"Platform was updated to '{commonPlatform}'."); + // Return true because we updated runsettings. + return true; } - private bool UpdateFramework( + private bool UpdateFrameworkInRunSettingsIfRequired( XmlDocument document, XPathNavigator navigator, IList sources, - IDictionary sourceFrameworks, IBaseTestEventsRegistrar registrar, - out Framework chosenFramework) + out Framework commonFramework, + out IDictionary sourceToFrameworkMap) { - // Get framework from sources. - // This looks like you can optimize it by moving it down to if (updateFramework), but it has a side-effect of - // populating the sourceFrameworks, which is later checked when source compatibility check is done against the value - // that we either inferred as the common framework, or that is forced in runsettings. - var inferedFramework = _inferHelper.AutoDetectFramework(sources, sourceFrameworks); - - // See if framework is forced by runsettings. If not autodetect it. - bool updateFramework = IsAutoFrameworkDetectRequired(navigator, out chosenFramework); - - // Update framework if required. For command line scenario update happens in - // ArgumentProcessor. - if (updateFramework) + bool frameworkSetByRunsettings = IsFrameworkSetByRunSettings(navigator, out commonFramework); + + if (frameworkSetByRunsettings) { - InferRunSettingsHelper.UpdateTargetFramework( - document, - inferedFramework.ToString(), - overwrite: true); - chosenFramework = inferedFramework; + // Autodetect frameworks from sources, so we can check that they are compatible with the settings, and report + // incompatibilities as warnings. + // + // DO NOT overwrite the common framework, the one forced by runsettings should be used. + var _ = _inferHelper.AutoDetectFramework(sources, out sourceToFrameworkMap); + + // If we would not want to report the incompatibilities later, we would simply return dictionary populated to the + // framework that is set by the settings. + // + // sourceToFrameworkMap = new Dictionary(); + // foreach (var source in sources) + // { + // sourceToFrameworkMap.Add(source, commonFramework); + // } + + WriteWarningForNetFramework35IsUnsupported(registrar, commonFramework); + // Return false because we did not update runsettings. + return false; } + // Autodetect framework from sources, and return a single common platform. + commonFramework = _inferHelper.AutoDetectFramework(sources, out sourceToFrameworkMap); + InferRunSettingsHelper.UpdateTargetFramework(document, commonFramework.ToString(), overwrite: true); + + WriteWarningForNetFramework35IsUnsupported(registrar, commonFramework); + + // Return true because we updated runsettings. + return true; + } + + private static void WriteWarningForNetFramework35IsUnsupported(IBaseTestEventsRegistrar registrar, Framework commonFramework) + { // Raise warnings for unsupported frameworks. - if (ObjectModel.Constants.DotNetFramework35.Equals(chosenFramework.Name)) + // TODO: Look at the sourceToFrameworkMap, and report paths to the sources that use that framework, rather than the chosen framework + if (ObjectModel.Constants.DotNetFramework35.Equals(commonFramework.Name)) { EqtTrace.Warning("TestRequestManager.UpdateRunSettingsIfRequired: throw warning on /Framework:Framework35 option."); registrar.LogWarning(Resources.Resources.Framework35NotSupported); } - - return updateFramework; } /// @@ -955,7 +1086,8 @@ private void RunTests( IRequestData requestData, TestRunCriteria testRunCriteria, ITestRunEventsRegistrar testRunEventsRegistrar, - TestPlatformOptions options) + TestPlatformOptions options, + Dictionary sourceToSourceDetailMap) { // Make sure to run the run request inside a lock as the below section is not thread-safe. // TranslationLayer can process faster as it directly gets the raw un-serialized messages @@ -969,7 +1101,8 @@ private void RunTests( _currentTestRunRequest = _testPlatform.CreateTestRunRequest( requestData, testRunCriteria, - options); + options, + sourceToSourceDetailMap); _testRunResultAggregator.RegisterTestRunEvents(_currentTestRunRequest); testRunEventsRegistrar?.RegisterTestRunEvents(_currentTestRunRequest); @@ -1001,61 +1134,80 @@ private void RunTests( } } - private bool IsAutoFrameworkDetectRequired( + /// + /// Check runsettings, to see if framework was specified by the user, if yes then use that for all sources. + /// This method either looks at runsettings directly when running as a server (DesignMode / IDE / via VSTestConsoleWrapper, or how you wanna call it) + /// or uses the pre-parsed runsettings when in console mode. + /// + /// + /// + private bool IsFrameworkSetByRunSettings( XPathNavigator navigator, out Framework chosenFramework) { - bool required = true; - chosenFramework = null; + if (_commandLineOptions.IsDesignMode) { - bool isValidFx = - InferRunSettingsHelper.TryGetFrameworkXml( - navigator, - out var frameworkFromrunsettingsXml); - required = !isValidFx || string.IsNullOrWhiteSpace(frameworkFromrunsettingsXml); - if (!required) + bool isValidFrameworkXml = InferRunSettingsHelper.TryGetFrameworkXml(navigator, out var frameworkXml); + var runSettingsHaveValidFramework = isValidFrameworkXml && !string.IsNullOrWhiteSpace(frameworkXml); + if (runSettingsHaveValidFramework) { - chosenFramework = Framework.FromString(frameworkFromrunsettingsXml); + // TODO: this should just ask the runsettings to give that value so we always parse it the same way + chosenFramework = Framework.FromString(frameworkXml); + return true; } + + chosenFramework = Framework.DefaultFramework; + return false; } - else if (!_commandLineOptions.IsDesignMode - && _commandLineOptions.FrameworkVersionSpecified) + + if (_commandLineOptions.FrameworkVersionSpecified) { - required = false; chosenFramework = _commandLineOptions.TargetFrameworkVersion; + return true; } - return required; + chosenFramework = Framework.DefaultFramework; + return false; } - private bool IsAutoPlatformDetectRequired( - XPathNavigator navigator, - out Architecture chosenPlatform) + /// + /// Check runsettings, to see if platform was specified by the user, if yes then use that for all sources. + /// This method either looks at runsettings directly when running as a server (DesignMode / IDE / via VSTestConsoleWrapper, or how you wanna call it) + /// or uses the pre-parsed runsettings when in console mode. + /// + /// + /// + private bool IsPlatformSetByRunSettings( + XPathNavigator navigator, out Architecture chosenPlatform) { - bool required = true; - chosenPlatform = Architecture.Default; if (_commandLineOptions.IsDesignMode) { - bool isValidPlatform = InferRunSettingsHelper.TryGetPlatformXml( + bool isValidPlatformXml = InferRunSettingsHelper.TryGetPlatformXml( navigator, out var platformXml); - required = !isValidPlatform || string.IsNullOrWhiteSpace(platformXml); - if (!required) + bool runSettingsHaveValidPlatform = isValidPlatformXml && !string.IsNullOrWhiteSpace(platformXml); + if (runSettingsHaveValidPlatform) { - chosenPlatform = (Architecture)Enum.Parse( - typeof(Architecture), - platformXml, true); + // TODO: this should be checking if the enum has the value specified, or ideally just ask the runsettings to give that value + // so we parse the same way always + chosenPlatform = (Architecture)Enum.Parse(typeof(Architecture), platformXml, ignoreCase: true); + return true; } + + chosenPlatform = Architecture.Default; + return false; } - else if (!_commandLineOptions.IsDesignMode && _commandLineOptions.ArchitectureSpecified) + + if (_commandLineOptions.ArchitectureSpecified) { - required = false; chosenPlatform = _commandLineOptions.TargetArchitecture; + return true; } - return required; + chosenPlatform = Architecture.Default; + return false; } /// @@ -1220,6 +1372,7 @@ private IRequestData GetRequestData(ProtocolConfig protocolConfig) private List GetSources(TestRunRequestPayload testRunRequestPayload) { + // TODO: This should also use hashset to only return distinct sources. List sources = new(); if (testRunRequestPayload.Sources != null && testRunRequestPayload.Sources.Count > 0) diff --git a/test/Intent.Primitives/ExcludeAttribute.cs b/test/Intent.Primitives/ExcludeAttribute.cs index 4fe517e902..8c538f2e9d 100644 --- a/test/Intent.Primitives/ExcludeAttribute.cs +++ b/test/Intent.Primitives/ExcludeAttribute.cs @@ -3,7 +3,13 @@ namespace Intent; -[AttributeUsage(AttributeTargets.Assembly | AttributeTargets.Class | AttributeTargets.Constructor | AttributeTargets.Method)] +[AttributeUsage(AttributeTargets.Assembly | AttributeTargets.Class | AttributeTargets.Method)] public class ExcludeAttribute : Attribute { + public ExcludeAttribute(string? reason = null) + { + Reason = reason; + } + + public string? Reason { get; } } diff --git a/test/Intent.Primitives/IRunLogger.cs b/test/Intent.Primitives/IRunLogger.cs index 20891f65af..f8f32393bf 100644 --- a/test/Intent.Primitives/IRunLogger.cs +++ b/test/Intent.Primitives/IRunLogger.cs @@ -7,8 +7,8 @@ namespace Intent; public interface IRunLogger { - void WriteTestPassed(MethodInfo m); - void WriteTestInconclusive(MethodInfo m); - void WriteTestFailure(MethodInfo m, Exception ex); + void WriteTestPassed(MethodInfo m, TimeSpan t); + void WriteTestFailure(MethodInfo m, Exception ex, TimeSpan t); void WriteFrameworkError(Exception ex); + void WriteSummary(int passed, List<(MethodInfo method, Exception exception, TimeSpan time)> failures, TimeSpan duration); } diff --git a/test/Intent.Primitives/OnlyAttribute.cs b/test/Intent.Primitives/OnlyAttribute.cs index 3d327e1f55..684bd1142d 100644 --- a/test/Intent.Primitives/OnlyAttribute.cs +++ b/test/Intent.Primitives/OnlyAttribute.cs @@ -3,7 +3,7 @@ namespace Intent; -[AttributeUsage(AttributeTargets.Assembly | AttributeTargets.Class | AttributeTargets.Constructor | AttributeTargets.Method)] +[AttributeUsage(AttributeTargets.Assembly | AttributeTargets.Class | AttributeTargets.Method)] public class OnlyAttribute : Attribute { } diff --git a/test/Intent.Primitives/TestAttribute.cs b/test/Intent.Primitives/TestAttribute.cs new file mode 100644 index 0000000000..57144b4239 --- /dev/null +++ b/test/Intent.Primitives/TestAttribute.cs @@ -0,0 +1,15 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. + +namespace Intent; + +[AttributeUsage(AttributeTargets.Method)] +public class TestAttribute : Attribute +{ + public TestAttribute(string name) + { + Name = name; + } + + public string Name { get; } +} diff --git a/test/Intent/ConsoleLogger.cs b/test/Intent/ConsoleLogger.cs index bbb4b120f1..751f9b3736 100644 --- a/test/Intent/ConsoleLogger.cs +++ b/test/Intent/ConsoleLogger.cs @@ -2,7 +2,6 @@ // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System.Reflection; -using System.Text.RegularExpressions; using static System.Console; using static System.ConsoleColor; @@ -11,27 +10,19 @@ namespace Intent.Console; internal class ConsoleLogger : IRunLogger { - public void WriteTestInconclusive(MethodInfo m) - { - var currentColor = ForegroundColor; - ForegroundColor = Yellow; - WriteLine($"[?] {FormatMethodName(m.Name)}"); - ForegroundColor = currentColor; - } - - public void WriteTestPassed(MethodInfo m) + public void WriteTestPassed(MethodInfo m, TimeSpan t) { var currentColor = ForegroundColor; ForegroundColor = Green; - WriteLine($"[+] {FormatMethodName(m.Name)}"); + WriteLine($"[+] {FormatMethodName(m)} {(int)t.TotalMilliseconds} ms"); ForegroundColor = currentColor; } - public void WriteTestFailure(MethodInfo m, Exception ex) + public void WriteTestFailure(MethodInfo m, Exception ex, TimeSpan t) { var currentColor = ForegroundColor; ForegroundColor = Red; - WriteLine($"[-] {FormatMethodName(m.Name)}{Environment.NewLine}{ex}"); + WriteLine($"[-] {FormatMethodName(m)} {(int)t.TotalMilliseconds} ms{Environment.NewLine}{ex}"); ForegroundColor = currentColor; } @@ -43,16 +34,30 @@ public void WriteFrameworkError(Exception ex) ForegroundColor = currentColor; } - private static string FormatMethodName(string methodName) + private static string FormatMethodName(MethodInfo method) + { + var methodName = method.GetCustomAttribute() is TestAttribute test ? test.Name : method.Name; + if (!methodName.Contains('\n')) + { + return methodName; + } + + var lines = methodName.Split('\n').Select(line => line.Trim()); + var first = lines.Take(1).ToList(); + var rest = lines.Skip(1).Select(l => $"{Environment.NewLine} {l}").ToList(); + + return string.Join(null, first.Concat(rest)); + } + + public void WriteSummary(int passed, List<(MethodInfo method, Exception exception, TimeSpan time)> failures, TimeSpan duration) { - var noUnderscores = methodName.Replace('_', ' '); - // insert space before every capital letter or number that is after a non-capital letter - var spaced = Regex.Replace(noUnderscores, "(?<=[a-z])([A-Z0-9])", " $1"); - // insert space before every capital leter that is after a number - var spaced2 = Regex.Replace(spaced, "(?<=[0-9]|^)([A-Z])", " $1"); - var newLines = spaced2.Replace("When", $"{Environment.NewLine} When") - .Replace("Then", $"{Environment.NewLine} Then"); - - return newLines.ToLowerInvariant(); + WriteLine(); + WriteLine(); + if (failures.Count > 0) { + WriteLine($"There were {failures.Count} failures:"); + } + failures.ForEach(t => { WriteTestFailure(t.method, t.exception, t.time); WriteLine(); }); + WriteLine(); + WriteLine($"Test run finished: Total: {passed + failures.Count} Passed: {passed} Failed: {failures.Count} Duration: {(int)duration.TotalMilliseconds} ms"); } } diff --git a/test/Intent/Extensions.cs b/test/Intent/Extensions.cs index b806090330..35545300aa 100644 --- a/test/Intent/Extensions.cs +++ b/test/Intent/Extensions.cs @@ -19,7 +19,7 @@ public static List SkipExcluded(this IEnumerable e) public static List SkipNonPublic(this IEnumerable e) { - return e.Where(i => i.IsPublic).ToList(); + return e.Where(i => i.IsPublic || i.IsNestedPublic).ToList(); } public static List SkipExcluded(this IEnumerable e) diff --git a/test/Intent/Runner.cs b/test/Intent/Runner.cs index a45e63cb1e..26a0cb1955 100644 --- a/test/Intent/Runner.cs +++ b/test/Intent/Runner.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. +using System.Diagnostics; using System.Reflection; namespace Intent; @@ -9,6 +10,7 @@ public class Runner { public static void Run(IEnumerable paths, IRunLogger logger) { + var assemblies = new List(); foreach (var path in paths) { try @@ -17,44 +19,71 @@ public static void Run(IEnumerable paths, IRunLogger logger) if (assembly.IsExcluded()) continue; - var types = assembly.GetTypes().SkipNonPublic().SkipExcluded(); - foreach (var type in types) - { - var methods = type.GetMethods().SkipExcluded(); + assemblies.Add(assembly); + } + catch (Exception ex) + { + logger.WriteFrameworkError(ex); + } + } + + var types = assemblies.SelectMany(assembly => assembly.GetTypes().SkipNonPublic().SkipExcluded()).ToList(); + var typesWithOnly = types.Where(type => type.GetCustomAttribute() != null).ToList(); + + var methods = types.SelectMany(type => type.GetMethods().SkipExcluded()).ToList(); + var methodsWithOnly = methods.Where(m => m.GetCustomAttribute() != null).ToList(); - // TODO: This chooses the Only tests only for single assembly and single class, - // to support this full we would have to enumerate all classes and methods first, - // it is easy, I just don't need it right now. - var methodsWithOnly = methods.Where(m => m.GetCustomAttribute() != null).ToList(); - if (methodsWithOnly.Count > 0) - methods = methodsWithOnly; + var methodsToRun = new List(); + if (typesWithOnly.Count > 0 || methodsWithOnly.Count > 0) + { + // Some types or methods are decorated with Only. Putting Only on a type should run all methods in + // that type. Putting Only on a method should run that method. + // + // So we need a list of all distinct methods that match that rule. + + var onlyMethodsFromTypes = typesWithOnly.SelectMany(type => type.GetMethods().SkipExcluded()).ToList(); + methodsToRun = onlyMethodsFromTypes.Concat(methodsWithOnly).Distinct().ToList(); + } + else + { + methodsToRun = methods; + } - foreach (var method in methods) + var failures = new List<(MethodInfo method, Exception exception, TimeSpan duration)>(); + var passed = 0; + var runStopwatch = Stopwatch.StartNew(); + foreach (var method in methodsToRun) + { + try + { + var testStopwatch = Stopwatch.StartNew(); + try + { + // Declaring type cannot be really null for types you define in C# + // without doing any reflection magic. + var instance = Activator.CreateInstance(method.DeclaringType!); + var testResult = method.Invoke(instance, Array.Empty()); + if (testResult is Task task) { - try - { - var instance = Activator.CreateInstance(type); - var testResult = method.Invoke(instance, Array.Empty()); - if (testResult is Task task) - { - // When the result is a task we need to await it. - // TODO: this can be improved with await, imho - task.GetAwaiter().GetResult(); - }; + // When the result is a task we need to await it. + // TODO: this can be improved with await, imho + task.GetAwaiter().GetResult(); + }; - logger.WriteTestPassed(method); - } - catch (Exception ex) - { - if (ex is TargetInvocationException tex && tex.InnerException != null) - { - logger.WriteTestFailure(method, tex.InnerException); - } - else - { - logger.WriteTestFailure(method, ex); - } - } + passed++; + logger.WriteTestPassed(method, testStopwatch.Elapsed); + } + catch (Exception ex) + { + if (ex is TargetInvocationException tex && tex.InnerException != null) + { + failures.Add((method, tex.InnerException, testStopwatch.Elapsed)); + logger.WriteTestFailure(method, tex.InnerException, testStopwatch.Elapsed); + } + else + { + failures.Add((method, ex, testStopwatch.Elapsed)); + logger.WriteTestFailure(method, ex, testStopwatch.Elapsed); } } } @@ -63,5 +92,7 @@ public static void Run(IEnumerable paths, IRunLogger logger) logger.WriteFrameworkError(ex); } } + + logger.WriteSummary(passed, failures, runStopwatch.Elapsed); } } diff --git a/test/Microsoft.TestPlatform.AcceptanceTests/ExecutionTests.cs b/test/Microsoft.TestPlatform.AcceptanceTests/ExecutionTests.cs index 037ec5deda..01cbe7a8c2 100644 --- a/test/Microsoft.TestPlatform.AcceptanceTests/ExecutionTests.cs +++ b/test/Microsoft.TestPlatform.AcceptanceTests/ExecutionTests.cs @@ -65,6 +65,8 @@ public void RunMultipleMSTestAssembliesOnVstestConsoleAndTesthostCombinations(Ru ValidateSummaryStatus(2, 2, 2); ExitCodeEquals(1); // failing tests + StdErrHasTestRunFailedMessageButNoOtherError(); + StdOutHasNoWarnings(); } [TestMethod] @@ -78,6 +80,8 @@ public void RunMultipleMSTestAssembliesOnVstestConsoleAndTesthostCombinations2(R InvokeVsTestForExecution(assemblyPaths, testAdapterPath: null, FrameworkArgValue, string.Empty); + InvokeVsTestForExecution(assemblyPaths, testAdapterPath: null, FrameworkArgValue, string.Empty); + ValidateSummaryStatus(2, 2, 2); ExitCodeEquals(1); // failing tests } @@ -251,11 +255,11 @@ public void UnhandleExceptionExceptionShouldBeLoggedToDiagLogFile(RunnerInfo run [TestMethod] [TestCategory("Windows-Review")] [NetFullTargetFrameworkDataSource] - public void IncompatibleSourcesWarningShouldBeDisplayedInTheConsole(RunnerInfo runnerInfo) + public void IncompatibleSourcesWarningShouldBeDisplayedInTheConsoleWhenGivenIncompatibleX86andX64Dll(RunnerInfo runnerInfo) { SetTestEnvironment(_testEnvironment, runnerInfo); - var expectedWarningContains = @"Following DLL(s) do not match current settings, which are .NETFramework,Version=v4.5.1 framework and X86 platform. SimpleTestProject3.dll is built for Framework .NETFramework,Version=v4.5.1 and Platform X64"; + var expectedWarningContains = @"Following DLL(s) do not match current settings, which are .NETFramework,Version=v4.5.1 framework and X64 platform. SimpleTestProjectx86.dll would use Framework .NETFramework,Version=v4.5.1 and Platform X86"; var assemblyPaths = BuildMultipleAssemblyPath("SimpleTestProject3.dll", "SimpleTestProjectx86.dll"); var arguments = PrepareArguments(assemblyPaths, GetTestAdapterPath(), string.Empty, FrameworkArgValue, runnerInfo.InIsolationValue, resultsDirectory: TempDirectory.Path); @@ -273,11 +277,11 @@ public void IncompatibleSourcesWarningShouldBeDisplayedInTheConsole(RunnerInfo r [TestMethod] [TestCategory("Windows-Review")] [NetFullTargetFrameworkDataSource] - public void NoIncompatibleSourcesWarningShouldBeDisplayedInTheConsole(RunnerInfo runnerInfo) + public void NoIncompatibleSourcesWarningShouldBeDisplayedInTheConsoleWhenGivenSingleX86Dll(RunnerInfo runnerInfo) { SetTestEnvironment(_testEnvironment, runnerInfo); - var expectedWarningContains = @"Following DLL(s) do not match current settings, which are .NETFramework,Version=v4.5.1 framework and X86 platform. SimpleTestProjectx86 is built for Framework .NETFramework,Version=v4.5.1 and Platform X86"; + var expectedWarningContains = @"Following DLL(s) do not match current settings, which are .NETFramework,Version=v4.5.1 framework and X86 platform. SimpleTestProjectx86 would use Framework .NETFramework,Version=v4.5.1 and Platform X86"; var assemblyPaths = BuildMultipleAssemblyPath("SimpleTestProjectx86.dll"); var arguments = PrepareArguments(assemblyPaths, GetTestAdapterPath(), string.Empty, FrameworkArgValue, runnerInfo.InIsolationValue, resultsDirectory: TempDirectory.Path); @@ -297,7 +301,7 @@ public void IncompatibleSourcesWarningShouldBeDisplayedInTheConsoleOnlyWhenRunni { SetTestEnvironment(_testEnvironment, runnerInfo); - var expectedWarningContains = @"Following DLL(s) do not match current settings, which are .NETFramework,Version=v4.5.1 framework and X86 platform. SimpleTestProject2.dll is built for Framework .NETFramework,Version=v4.5.1 and Platform X64"; + var expectedWarningContains = @"Following DLL(s) do not match current settings, which are .NETFramework,Version=v4.5.1 framework and X86 platform. SimpleTestProject2.dll would use Framework .NETFramework,Version=v4.5.1 and Platform X64"; var assemblyPaths = BuildMultipleAssemblyPath("SimpleTestProject2.dll"); var arguments = PrepareArguments(assemblyPaths, GetTestAdapterPath(), string.Empty, FrameworkArgValue, runnerInfo.InIsolationValue, resultsDirectory: TempDirectory.Path); @@ -312,6 +316,7 @@ public void IncompatibleSourcesWarningShouldBeDisplayedInTheConsoleOnlyWhenRunni { StdOutputDoesNotContains(expectedWarningContains); } + // If we are running this test on 32 bit OS, it should output warning message else { diff --git a/test/Microsoft.TestPlatform.AcceptanceTests/Extension/CompatibilityRowsBuilder.cs b/test/Microsoft.TestPlatform.AcceptanceTests/Extension/CompatibilityRowsBuilder.cs index 157b2a14b0..70c130da5e 100644 --- a/test/Microsoft.TestPlatform.AcceptanceTests/Extension/CompatibilityRowsBuilder.cs +++ b/test/Microsoft.TestPlatform.AcceptanceTests/Extension/CompatibilityRowsBuilder.cs @@ -56,8 +56,8 @@ public CompatibilityRowsBuilder(string runnerFrameworks = AcceptanceTestBase.DEF public bool DebugVSTestConsole { get; set; } public bool DebugTestHost { get; set; } public bool DebugDataCollector { get; set; } - public bool NoDefaultBreakpoints { get; set; } = true; - + public bool DebugStopAtEntrypoint { get; set; } + public int? JustRow { get; internal set; } public List CreateData() { @@ -78,8 +78,8 @@ public List CreateData() if (WithInProcess) AddInProcess(dataRows); - var minVersion = SemanticVersion.Parse("0.0.0-alpha.1"); - var maxVersion = SemanticVersion.Parse("9999.0.0"); + var minVersion = ParseAndPatchSemanticVersion("0.0.0-alpha.1"); + var maxVersion = ParseAndPatchSemanticVersion("9999.0.0"); SemanticVersion? beforeRunnerVersion = maxVersion; SemanticVersion? afterRunnerVersion = minVersion; SemanticVersion? beforeTestHostVersion = maxVersion; @@ -90,37 +90,37 @@ public List CreateData() if (BeforeRunnerFeature != null) { var feature = Features.TestPlatformFeatures[BeforeRunnerFeature]; - beforeRunnerVersion = SemanticVersion.Parse(feature.Version.TrimStart('v')); + beforeRunnerVersion = ParseAndPatchSemanticVersion(feature.Version); } if (AfterRunnerFeature != null) { var feature = Features.TestPlatformFeatures[AfterRunnerFeature]; - afterRunnerVersion = SemanticVersion.Parse(feature.Version.TrimStart('v')); + afterRunnerVersion = ParseAndPatchSemanticVersion(feature.Version); } if (BeforeTestHostFeature != null) { var feature = Features.TestPlatformFeatures[BeforeTestHostFeature]; - beforeTestHostVersion = SemanticVersion.Parse(feature.Version.TrimStart('v')); + beforeTestHostVersion = ParseAndPatchSemanticVersion(feature.Version); } if (AfterTestHostFeature != null) { var feature = Features.TestPlatformFeatures[AfterTestHostFeature]; - afterTestHostVersion = SemanticVersion.Parse(feature.Version.TrimStart('v')); + afterTestHostVersion = ParseAndPatchSemanticVersion(feature.Version); } if (BeforeAdapterFeature != null) { var feature = Features.TestPlatformFeatures[BeforeAdapterFeature]; - beforeAdapterVersion = SemanticVersion.Parse(feature.Version.TrimStart('v')); + beforeAdapterVersion = ParseAndPatchSemanticVersion(feature.Version); } if (AfterAdapterFeature != null) { var feature = Features.AdapterFeatures[AfterAdapterFeature]; - afterAdapterVersion = SemanticVersion.Parse(feature.Version.TrimStart('v')); + afterAdapterVersion = ParseAndPatchSemanticVersion(feature.Version); } var isWindows = Environment.OSVersion.Platform.ToString().StartsWith("Win"); @@ -132,12 +132,12 @@ public List CreateData() // We probably don't have that need right now, because legacy version is 15.x.x, which is very old, and we are still keeping // compatibility. - Func isInRange = (version, before, after) => version < before && after < version; + Func isInRange = (version, before, after) => version < before && after <= version; var rows = dataRows.Where(r => r.VSTestConsoleInfo != null - && isInRange(SemanticVersion.Parse(r.VSTestConsoleInfo.Version), beforeRunnerVersion, afterRunnerVersion) - && r.TestHostInfo != null && isInRange(SemanticVersion.Parse(r.TestHostInfo.Version), beforeTestHostVersion, afterTestHostVersion) - && r.AdapterInfo != null && isInRange(SemanticVersion.Parse(r.AdapterInfo.Version), beforeAdapterVersion, afterAdapterVersion)).ToList(); + && isInRange(ParseAndPatchSemanticVersion(r.VSTestConsoleInfo.Version), beforeRunnerVersion, afterRunnerVersion) + && r.TestHostInfo != null && isInRange(ParseAndPatchSemanticVersion(r.TestHostInfo.Version), beforeTestHostVersion, afterTestHostVersion) + && r.AdapterInfo != null && isInRange(ParseAndPatchSemanticVersion(r.AdapterInfo.Version), beforeAdapterVersion, afterAdapterVersion)).ToList(); // We use ToString to determine which values are unique. Not great solution, but works better than using records. var distinctRows = new Dictionary(); @@ -149,7 +149,17 @@ public List CreateData() throw new InvalidOperationException("There were no rows that matched the specified criteria."); } - return distinctRows.Values.ToList(); + var allRows = distinctRows.Values.ToList(); + + return JustRow == null ? allRows : new List { allRows[JustRow.Value] }; + } + + private static SemanticVersion ParseAndPatchSemanticVersion(string? version) + { + // Our developer version is 17.2.0-dev, but we release few preview, that are named 17.2.0-preview or 17.2.0-release, yet we still + // want 17.2.0-dev to be considered the latest version. So we patch it. + var v = version != null && version.EndsWith("-dev") ? version?.Substring(0, version.Length - 4) + "-ZZZZZZZZZZ" : version; + return SemanticVersion.Parse(v?.TrimStart('v')); } private void AddInProcess(List dataRows) @@ -292,7 +302,7 @@ private DebugInfo GetDebugInfo() DebugDataCollector = DebugDataCollector, DebugTestHost = DebugTestHost, DebugVSTestConsole = DebugVSTestConsole, - NoDefaultBreakpoints = NoDefaultBreakpoints + DebugStopAtEntrypoint = DebugStopAtEntrypoint }; } diff --git a/test/Microsoft.TestPlatform.AcceptanceTests/Extension/Features.cs b/test/Microsoft.TestPlatform.AcceptanceTests/Extension/Features.cs index 0be9ac104c..bbfb7104e3 100644 --- a/test/Microsoft.TestPlatform.AcceptanceTests/Extension/Features.cs +++ b/test/Microsoft.TestPlatform.AcceptanceTests/Extension/Features.cs @@ -10,10 +10,12 @@ public static class Features { public const string ATTACH_DEBUGGER_FLOW = nameof(ATTACH_DEBUGGER_FLOW); public const string MSTEST_EXAMPLE_FEATURE = nameof(MSTEST_EXAMPLE_FEATURE); + public const string MULTI_TFM = nameof(MULTI_TFM); public static IImmutableDictionary TestPlatformFeatures { get; } = new Dictionary { [ATTACH_DEBUGGER_FLOW] = new(version: "v16.7.0-preview-20200519-01", issue: "https://github.com/microsoft/vstest/pull/2325"), + [MULTI_TFM] = new(version: "17.3.0-dev", issue: "https://github.com/microsoft/vstest/pull/3412") }.ToImmutableDictionary(); public static IImmutableDictionary AdapterFeatures { get; internal set; } = new Dictionary diff --git a/test/Microsoft.TestPlatform.AcceptanceTests/Extension/MSTestCompatibilityDataSource.cs b/test/Microsoft.TestPlatform.AcceptanceTests/Extension/MSTestCompatibilityDataSource.cs index f6d20b577c..e75bc87594 100644 --- a/test/Microsoft.TestPlatform.AcceptanceTests/Extension/MSTestCompatibilityDataSource.cs +++ b/test/Microsoft.TestPlatform.AcceptanceTests/Extension/MSTestCompatibilityDataSource.cs @@ -34,7 +34,7 @@ public MSTestCompatibilityDataSource( public bool DebugVSTestConsole { get; set; } public bool DebugTestHost { get; set; } public bool DebugDataCollector { get; set; } - public bool NoDefaultBreakpoints { get; set; } = true; + public bool DebugStopAtEntrypoint { get; set; } /// /// Add run for in-process using the selected .NET Framework runners, and and all selected adapters. @@ -70,7 +70,7 @@ public override void CreateData(MethodInfo methodInfo) _builder.DebugDataCollector = DebugDataCollector; _builder.DebugVSTestConsole = DebugVSTestConsole; _builder.DebugTestHost = DebugTestHost; - _builder.NoDefaultBreakpoints = NoDefaultBreakpoints; + _builder.DebugStopAtEntrypoint = DebugStopAtEntrypoint; var data = _builder.CreateData(); data.ForEach(AddData); diff --git a/test/Microsoft.TestPlatform.AcceptanceTests/Extension/NetCoreRunner.cs b/test/Microsoft.TestPlatform.AcceptanceTests/Extension/NetCoreRunner.cs index 202264d0c7..477715e581 100644 --- a/test/Microsoft.TestPlatform.AcceptanceTests/Extension/NetCoreRunner.cs +++ b/test/Microsoft.TestPlatform.AcceptanceTests/Extension/NetCoreRunner.cs @@ -37,7 +37,7 @@ public NetCoreRunner(string targetFrameworks = AcceptanceTestBase.NETFX452_NET50 public bool DebugVSTestConsole { get; set; } public bool DebugTestHost { get; set; } public bool DebugDataCollector { get; set; } - public bool NoDefaultBreakpoints { get; set; } = true; + public bool DebugStopAtEntrypoint { get; set; } public IEnumerable GetData(MethodInfo methodInfo) { @@ -58,7 +58,7 @@ public IEnumerable GetData(MethodInfo methodInfo) DebugVSTestConsole = DebugVSTestConsole, DebugTestHost = DebugTestHost, DebugDataCollector = DebugDataCollector, - NoDefaultBreakpoints = NoDefaultBreakpoints, + DebugStopAtEntrypoint = DebugStopAtEntrypoint, }; dataRows.Add(new object[] { runnerInfo }); } diff --git a/test/Microsoft.TestPlatform.AcceptanceTests/Extension/NetCoreTargetFrameworkDataSource.cs b/test/Microsoft.TestPlatform.AcceptanceTests/Extension/NetCoreTargetFrameworkDataSource.cs index 56ff01036b..a5b51cc79b 100644 --- a/test/Microsoft.TestPlatform.AcceptanceTests/Extension/NetCoreTargetFrameworkDataSource.cs +++ b/test/Microsoft.TestPlatform.AcceptanceTests/Extension/NetCoreTargetFrameworkDataSource.cs @@ -51,7 +51,7 @@ public NetCoreTargetFrameworkDataSource( public bool DebugVSTestConsole { get; set; } public bool DebugTestHost { get; set; } public bool DebugDataCollector { get; set; } - public bool NoDefaultBreakpoints { get; set; } = true; + public bool DebugStopAtEntrypoint { get; set; } private void AddRunnerDataRow(List dataRows, string runnerFramework, string targetFramework) { @@ -66,7 +66,7 @@ private void AddRunnerDataRow(List dataRows, string runnerFramework, s DebugDataCollector = DebugDataCollector, DebugTestHost = DebugTestHost, DebugVSTestConsole = DebugVSTestConsole, - NoDefaultBreakpoints = NoDefaultBreakpoints, + DebugStopAtEntrypoint = DebugStopAtEntrypoint, }; dataRows.Add(new object[] { runnerInfo }); } diff --git a/test/Microsoft.TestPlatform.AcceptanceTests/Extension/NetFrameworkRunner.cs b/test/Microsoft.TestPlatform.AcceptanceTests/Extension/NetFrameworkRunner.cs index 0f1faa65f5..2d5ceb82a5 100644 --- a/test/Microsoft.TestPlatform.AcceptanceTests/Extension/NetFrameworkRunner.cs +++ b/test/Microsoft.TestPlatform.AcceptanceTests/Extension/NetFrameworkRunner.cs @@ -34,7 +34,7 @@ public NetFrameworkRunner(string targetFrameworks = AcceptanceTestBase.NETFX452_ public bool DebugVSTestConsole { get; set; } public bool DebugTestHost { get; set; } public bool DebugDataCollector { get; set; } - public bool NoDefaultBreakpoints { get; set; } = true; + public bool DebugStopAtEntrypoint { get; set; } private readonly string _targetFrameworks; @@ -60,7 +60,7 @@ public IEnumerable GetData(MethodInfo methodInfo) DebugVSTestConsole = DebugVSTestConsole, DebugTestHost = DebugTestHost, DebugDataCollector = DebugDataCollector, - NoDefaultBreakpoints = NoDefaultBreakpoints, + DebugStopAtEntrypoint = DebugStopAtEntrypoint, }; dataRows.Add(new object[] { runnerInfo }); diff --git a/test/Microsoft.TestPlatform.AcceptanceTests/Extension/NetFullTargetFrameworkDataSource.cs b/test/Microsoft.TestPlatform.AcceptanceTests/Extension/NetFullTargetFrameworkDataSource.cs index b809da7799..c756619ba5 100644 --- a/test/Microsoft.TestPlatform.AcceptanceTests/Extension/NetFullTargetFrameworkDataSource.cs +++ b/test/Microsoft.TestPlatform.AcceptanceTests/Extension/NetFullTargetFrameworkDataSource.cs @@ -45,7 +45,7 @@ public NetFullTargetFrameworkDataSource(bool inIsolation = true, bool inProcess public bool DebugVSTestConsole { get; set; } public bool DebugTestHost { get; set; } public bool DebugDataCollector { get; set; } - public bool NoDefaultBreakpoints { get; set; } = true; + public bool DebugStopAtEntrypoint { get; set; } public IEnumerable GetData(MethodInfo methodInfo) { @@ -64,7 +64,7 @@ public IEnumerable GetData(MethodInfo methodInfo) DebugVSTestConsole = DebugVSTestConsole, DebugTestHost = DebugTestHost, DebugDataCollector = DebugDataCollector, - NoDefaultBreakpoints = NoDefaultBreakpoints, + DebugStopAtEntrypoint = DebugStopAtEntrypoint, }; dataRows.Add(new object[] { runnerInfo }); } @@ -84,7 +84,7 @@ public IEnumerable GetData(MethodInfo methodInfo) DebugVSTestConsole = DebugVSTestConsole, DebugTestHost = DebugTestHost, DebugDataCollector = DebugDataCollector, - NoDefaultBreakpoints = NoDefaultBreakpoints, + DebugStopAtEntrypoint = DebugStopAtEntrypoint, }; dataRows.Add(new object[] { runnerInfo }); } @@ -102,7 +102,7 @@ public IEnumerable GetData(MethodInfo methodInfo) DebugVSTestConsole = DebugVSTestConsole, DebugTestHost = DebugTestHost, DebugDataCollector = DebugDataCollector, - NoDefaultBreakpoints = NoDefaultBreakpoints, + DebugStopAtEntrypoint = DebugStopAtEntrypoint, }; dataRows.Add(new object[] { runnerInfo }); } diff --git a/test/Microsoft.TestPlatform.AcceptanceTests/Extension/RunnerCompatibilityDataSource.cs b/test/Microsoft.TestPlatform.AcceptanceTests/Extension/RunnerCompatibilityDataSource.cs index 357dcd1070..026afa38b8 100644 --- a/test/Microsoft.TestPlatform.AcceptanceTests/Extension/RunnerCompatibilityDataSource.cs +++ b/test/Microsoft.TestPlatform.AcceptanceTests/Extension/RunnerCompatibilityDataSource.cs @@ -39,7 +39,8 @@ public RunnerCompatibilityDataSource( public bool DebugVSTestConsole { get; set; } public bool DebugTestHost { get; set; } public bool DebugDataCollector { get; set; } - public bool NoDefaultBreakpoints { get; set; } = true; + public bool DebugStopAtEntrypoint { get; set; } + public int JustRow { get; set; } = -1; /// /// Add run for in-process using the selected .NET Framework runners, and and all selected adapters. @@ -75,7 +76,9 @@ public override void CreateData(MethodInfo methodInfo) _builder.DebugDataCollector = DebugDataCollector; _builder.DebugVSTestConsole = DebugVSTestConsole; _builder.DebugTestHost = DebugTestHost; - _builder.NoDefaultBreakpoints = NoDefaultBreakpoints; + _builder.DebugStopAtEntrypoint = DebugStopAtEntrypoint; + + _builder.JustRow = JustRow < 0 ? null : JustRow; var data = _builder.CreateData(); data.ForEach(AddData); diff --git a/test/Microsoft.TestPlatform.AcceptanceTests/Extension/TestPlatformCompatibilityDataSource.cs b/test/Microsoft.TestPlatform.AcceptanceTests/Extension/TestPlatformCompatibilityDataSource.cs index 5f117fff0b..95c0e290d6 100644 --- a/test/Microsoft.TestPlatform.AcceptanceTests/Extension/TestPlatformCompatibilityDataSource.cs +++ b/test/Microsoft.TestPlatform.AcceptanceTests/Extension/TestPlatformCompatibilityDataSource.cs @@ -44,7 +44,7 @@ public TestPlatformCompatibilityDataSource( public bool DebugVSTestConsole { get; set; } public bool DebugTestHost { get; set; } public bool DebugDataCollector { get; set; } - public bool NoDefaultBreakpoints { get; set; } = true; + public bool DebugStopAtEntrypoint { get; set; } /// /// Add run for in-process using the selected .NET Framework runners, and and all selected adapters. @@ -88,7 +88,7 @@ public override void CreateData(MethodInfo methodInfo) _builder.DebugDataCollector = DebugDataCollector; _builder.DebugVSTestConsole = DebugVSTestConsole; _builder.DebugTestHost = DebugTestHost; - _builder.NoDefaultBreakpoints = NoDefaultBreakpoints; + _builder.DebugStopAtEntrypoint = DebugStopAtEntrypoint; var data = _builder.CreateData(); data.ForEach(AddData); diff --git a/test/Microsoft.TestPlatform.AcceptanceTests/Extension/TesthostCompatibilityDataSource.cs b/test/Microsoft.TestPlatform.AcceptanceTests/Extension/TesthostCompatibilityDataSource.cs index 4bcbba2f93..2af3e23e5d 100644 --- a/test/Microsoft.TestPlatform.AcceptanceTests/Extension/TesthostCompatibilityDataSource.cs +++ b/test/Microsoft.TestPlatform.AcceptanceTests/Extension/TesthostCompatibilityDataSource.cs @@ -39,7 +39,7 @@ public TestHostCompatibilityDataSource( public bool DebugVSTestConsole { get; set; } public bool DebugTestHost { get; set; } public bool DebugDataCollector { get; set; } - public bool NoDefaultBreakpoints { get; set; } = true; + public bool DebugStopAtEntrypoint { get; set; } public string? BeforeFeature { get; set; } public string? AfterFeature { get; set; } @@ -58,7 +58,7 @@ public override void CreateData(MethodInfo methodInfo) _builder.DebugDataCollector = DebugDataCollector; _builder.DebugVSTestConsole = DebugVSTestConsole; _builder.DebugTestHost = DebugTestHost; - _builder.NoDefaultBreakpoints = NoDefaultBreakpoints; + _builder.DebugStopAtEntrypoint = DebugStopAtEntrypoint; var data = _builder.CreateData(); data.ForEach(AddData); diff --git a/test/Microsoft.TestPlatform.AcceptanceTests/FrameworkTests.cs b/test/Microsoft.TestPlatform.AcceptanceTests/FrameworkTests.cs index 394db87a80..452c60510b 100644 --- a/test/Microsoft.TestPlatform.AcceptanceTests/FrameworkTests.cs +++ b/test/Microsoft.TestPlatform.AcceptanceTests/FrameworkTests.cs @@ -92,7 +92,7 @@ public void RunSpecificTestsShouldWorkWithFrameworkInCompatibleWarning(RunnerInf } else { - StdOutputContains("Following DLL(s) do not match current settings, which are .NETFramework,Version=v4.0 framework and X86 platform."); + StdOutputContains("Following DLL(s) do not match current settings, which are .NETFramework,Version=v4.0 framework and X64 platform."); ValidateSummaryStatus(1, 0, 0); } } diff --git a/test/Microsoft.TestPlatform.AcceptanceTests/RunsettingsTests.cs b/test/Microsoft.TestPlatform.AcceptanceTests/RunsettingsTests.cs index 2633e3f715..7b6ddfa5f2 100644 --- a/test/Microsoft.TestPlatform.AcceptanceTests/RunsettingsTests.cs +++ b/test/Microsoft.TestPlatform.AcceptanceTests/RunsettingsTests.cs @@ -31,7 +31,10 @@ public void CommandLineRunSettingsShouldWinAmongAllOptions(RunnerInfo runnerInfo // We pass 2 dlls in RunTestWithRunSettings, for .NET Framework they run in // 1 hosts because that host is Shared. - var expectedNumOfProcessCreated = runnerInfo.IsNetFrameworkTarget ? 1 : 2; + // + // Testhosts are no longer shared, we should see 2 of them always. + // var expectedNumOfProcessCreated = runnerInfo.IsNetFrameworkTarget ? 1 : 2; + var expectedNumOfProcessCreated = 2; // passing parallel var runConfigurationDictionary = new Dictionary @@ -71,7 +74,10 @@ public void CLIRunsettingsShouldWinBetweenCLISwitchesAndCLIRunsettings(RunnerInf // We pass 2 dlls in RunTestWithRunSettings, for .NET Framework they run in // 1 hosts because that host is Shared. - var expectedNumOfProcessCreated = runnerInfo.IsNetFrameworkTarget ? 1 : 2; + // + // Testhosts are no longer shared, we should see 2 of them always. + // var expectedNumOfProcessCreated = runnerInfo.IsNetFrameworkTarget ? 1 : 2; + var expectedNumOfProcessCreated = 2; // Pass parallel var additionalArgs = "/Parallel"; @@ -107,7 +113,10 @@ public void CommandLineSwitchesShouldWinBetweenSettingsFileAndCommandLineSwitche // We pass 2 dlls in RunTestWithRunSettings, for .NET Framework they run in // 1 hosts because that host is Shared. - var expectedNumOfProcessCreated = runnerInfo.IsNetFrameworkTarget ? 1 : 2; + // + // Testhosts are no longer shared, we should see 2 of them always. + // var expectedNumOfProcessCreated = runnerInfo.IsNetFrameworkTarget ? 1 : 2; + var expectedNumOfProcessCreated = 2; // passing different platform var runConfigurationDictionary = new Dictionary @@ -136,7 +145,10 @@ public void RunSettingsWithoutParallelAndPlatformX86(RunnerInfo runnerInfo) // We pass 2 dlls in RunTestWithRunSettings, for .NET Framework they run in // 1 hosts because that host is Shared. - var expectedNumOfProcessCreated = runnerInfo.IsNetFrameworkTarget ? 1 : 2; + // + // Testhosts are no longer shared. We should always see 2. + // var expectedNumOfProcessCreated = runnerInfo.IsNetFrameworkTarget ? 1 : 2; + var expectedNumOfProcessCreated = 2; var runConfigurationDictionary = new Dictionary { @@ -160,7 +172,10 @@ public void RunSettingsParamsAsArguments(RunnerInfo runnerInfo) // We pass 2 dlls in RunTestWithRunSettings, for .NET Framework they run in // 1 hosts because that host is Shared. - var expectedNumOfProcessCreated = runnerInfo.IsNetFrameworkTarget ? 1 : 2; + // + // Testhosts are no longer shared, we should see 2 of them always. + // var expectedNumOfProcessCreated = runnerInfo.IsNetFrameworkTarget ? 1 : 2; + var expectedNumOfProcessCreated = 2; var runSettingsArgs = string.Join( " ", @@ -187,7 +202,10 @@ public void RunSettingsAndRunSettingsParamsAsArguments(RunnerInfo runnerInfo) // We pass 2 dlls in RunTestWithRunSettings, for .NET Framework they run in // 1 hosts because that host is Shared. - var expectedNumOfProcessCreated = runnerInfo.IsNetFrameworkTarget ? 1 : 2; + // + // Testhosts are no longer shared, we should see 2 of them always. + // var expectedNumOfProcessCreated = runnerInfo.IsNetFrameworkTarget ? 1 : 2; + var expectedNumOfProcessCreated = 2; var runConfigurationDictionary = new Dictionary { diff --git a/test/Microsoft.TestPlatform.AcceptanceTests/TranslationLayerTests/CustomTestHostTests.cs b/test/Microsoft.TestPlatform.AcceptanceTests/TranslationLayerTests/CustomTestHostTests.cs index ac6b33001a..b08964a690 100644 --- a/test/Microsoft.TestPlatform.AcceptanceTests/TranslationLayerTests/CustomTestHostTests.cs +++ b/test/Microsoft.TestPlatform.AcceptanceTests/TranslationLayerTests/CustomTestHostTests.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. +using System.Collections.Generic; using System.Diagnostics; using System.Linq; using System.Threading; @@ -138,6 +139,66 @@ public void RunTestsWithCustomTestHostLauncherUsesLaunchWhenGivenAnOutdatedITest customTestHostLauncher.LaunchProcessProcessId.Should().NotBeNull("we should launch some real process and save the pid of it"); } + [TestMethod] + [TestCategory("Windows-Review")] + [TestCategory("Feature")] + [Ignore("This is for debugger v3 and does not work yet.")] + [RunnerCompatibilityDataSource(AfterFeature = Features.MULTI_TFM, JustRow = 1)] + public void RunAllTestsWithMixedTFMsWillProvideAdditionalInformationToTheDebugger(RunnerInfo runnerInfo) + { + // Arrange + SetTestEnvironment(_testEnvironment, runnerInfo); + + var vstestConsoleWrapper = GetVsTestConsoleWrapper(); + var runEventHandler = new RunEventHandler(); + var netFrameworkDll = GetTestDllForFramework("MSTestProject1.dll", "net451"); + var netDll = GetTestDllForFramework("MSTestProject1.dll", "netcoreapp2.1"); + var testHostLauncher = new TestHostLauncherV3(); + + // Act + // We have no preference around what TFM is used. It will be autodetected. + var runsettingsXml = ""; + vstestConsoleWrapper.RunTestsWithCustomTestHost(new[] { netFrameworkDll, netDll }, runsettingsXml, runEventHandler, testHostLauncher); + + // Assert + runEventHandler.Errors.Should().BeEmpty(); + testHostLauncher.AttachDebuggerInfos.Should().HaveCount(2); + var targetFrameworks = testHostLauncher.AttachDebuggerInfos.Select(i => i.TargetFramework).ToList(); + targetFrameworks.Should().OnlyContain(tfm => tfm == Framework.FromString("net451") || tfm == Framework.FromString("netcoreapp2.1")); + + runEventHandler.TestResults.Should().HaveCount(6, "we run all tests from both assemblies"); + } + + [TestMethod] + [TestCategory("Windows-Review")] + [TestCategory("BackwardCompatibilityWithRunner")] + [Ignore("This is for debugger v3 and does not work yet.")] + [RunnerCompatibilityDataSource(BeforeFeature = Features.MULTI_TFM, JustRow = 1)] + public void RunAllTestsWithMixedTFMsCallsBackToTestHostLauncherV3EvenWhenRunnerDoesNotSupportItYet(RunnerInfo runnerInfo) + { + // Arrange + SetTestEnvironment(_testEnvironment, runnerInfo); + + var vstestConsoleWrapper = GetVsTestConsoleWrapper(); + var runEventHandler = new RunEventHandler(); + var netFrameworkDll = GetTestDllForFramework("MSTestProject1.dll", "net451"); + var netDll = GetTestDllForFramework("MSTestProject1.dll", "netcoreapp2.1"); + var testHostLauncher = new TestHostLauncherV3(); + + // Act + // We have no preference around what TFM is used. It will be autodetected. + var runsettingsXml = ""; + vstestConsoleWrapper.RunTestsWithCustomTestHost(new[] { netFrameworkDll, netDll }, runsettingsXml, runEventHandler, testHostLauncher); + + // Assert + runEventHandler.Errors.Should().BeEmpty(); + testHostLauncher.AttachDebuggerInfos.Should().HaveCount(2); + var targetFrameworks = testHostLauncher.AttachDebuggerInfos.Select(i => i.TargetFramework).ToList(); + targetFrameworks.Should().OnlyContain(tfm => tfm == Framework.FromString("net451") || tfm == Framework.FromString("netcoreapp2.1")); + + runEventHandler.TestResults.Should().HaveCount(6, "we run all tests from both assemblies"); + } + private static void EnsureTestsRunWithoutErrors(RunEventHandler runEventHandler, int passed, int failed, int skipped) { runEventHandler.Errors.Should().BeEmpty(); @@ -198,4 +259,53 @@ public bool AttachDebuggerToProcess(int pid, CancellationToken cancellationToken return true; } } + +#pragma warning disable CS0618 // Type or member is obsolete + private class TestHostLauncherV3 : ITestHostLauncher3 + { + public bool IsDebug => true; + + public List AttachDebuggerInfos { get; } = new(); + + public bool AttachDebuggerToProcess(AttachDebuggerInfo attachDebuggerInfo) + { + AttachDebuggerInfos.Add(attachDebuggerInfo); + + return true; + } + + public bool AttachDebuggerToProcess(int pid) + { + return AttachDebuggerToProcess(new AttachDebuggerInfo + { + ProcessId = pid, + TargetFramework = null, + Version = null, + CancellationToken = CancellationToken.None + }); + } + + public bool AttachDebuggerToProcess(int pid, CancellationToken cancellationToken) + { + return AttachDebuggerToProcess(new AttachDebuggerInfo + { + ProcessId = pid, + TargetFramework = null, + Version = null, + CancellationToken = cancellationToken + }); + } + + public int LaunchTestHost(TestProcessStartInfo defaultTestHostStartInfo) + { + return -1; + } + + public int LaunchTestHost(TestProcessStartInfo defaultTestHostStartInfo, CancellationToken cancellationToken) + { + return -1; + } + } } +#pragma warning restore CS0618 // Type or member is obsolete + diff --git a/test/Microsoft.TestPlatform.AcceptanceTests/TranslationLayerTests/DiscoverTests.cs b/test/Microsoft.TestPlatform.AcceptanceTests/TranslationLayerTests/DiscoverTests.cs index 220ece0ba2..3364c3dc9f 100644 --- a/test/Microsoft.TestPlatform.AcceptanceTests/TranslationLayerTests/DiscoverTests.cs +++ b/test/Microsoft.TestPlatform.AcceptanceTests/TranslationLayerTests/DiscoverTests.cs @@ -7,6 +7,8 @@ using System.Linq; using System.Threading.Tasks; +using FluentAssertions; + using Microsoft.TestPlatform.TestUtilities; using Microsoft.TestPlatform.VsTestConsole.TranslationLayer.Interfaces; using Microsoft.VisualStudio.TestPlatform.Common.Telemetry; @@ -109,11 +111,11 @@ public void DiscoverTestsUsingEventHandler2AndBatchSize(RunnerInfo runnerInfo) Setup(); var discoveryEventHandlerForBatchSize = new DiscoveryEventHandlerForBatchSize(); - - string runSettingsXml = @" + var batchSize = 2; + string runSettingsXml = $@" - 3 + {batchSize} "; @@ -124,8 +126,10 @@ public void DiscoverTestsUsingEventHandler2AndBatchSize(RunnerInfo runnerInfo) discoveryEventHandlerForBatchSize); // Assert. - Assert.AreEqual(6, discoveryEventHandlerForBatchSize.DiscoveredTestCases.Count); - Assert.AreEqual(3, discoveryEventHandlerForBatchSize.BatchSize); + discoveryEventHandlerForBatchSize.DiscoveredTestCases.Should().HaveCount(6, "we found 6 tests in total"); + // Batching happens based on size and time interva. The middle batch should almost always be 2, + // if the discovery is fast enough, but the only requirement we can reliably check and enforce is that no batch is bigger than the expected size. + discoveryEventHandlerForBatchSize.Batches.Should().OnlyContain(v => v <= batchSize, "all batches should be the same size or smaller than the batch size"); } [TestMethod] @@ -137,11 +141,11 @@ public void DiscoverTestsUsingEventHandler1AndBatchSize(RunnerInfo runnerInfo) Setup(); var discoveryEventHandlerForBatchSize = new DiscoveryEventHandlerForBatchSize(); - - string runSettingsXml = @" + var batchSize = 2; + string runSettingsXml = $@" - 3 + {batchSize} "; @@ -151,8 +155,10 @@ public void DiscoverTestsUsingEventHandler1AndBatchSize(RunnerInfo runnerInfo) discoveryEventHandlerForBatchSize); // Assert. - Assert.AreEqual(6, discoveryEventHandlerForBatchSize.DiscoveredTestCases.Count); - Assert.AreEqual(3, discoveryEventHandlerForBatchSize.BatchSize); + discoveryEventHandlerForBatchSize.DiscoveredTestCases.Should().HaveCount(6, "we found 6 tests in total"); + // Batching happens based on size and time interva. The middle batch should almost always be 2, + // if the discovery is fast enough, but the only requirement we can reliably check and enforce is that no batch is bigger than the expected size. + discoveryEventHandlerForBatchSize.Batches.Should().OnlyContain(v => v <= batchSize, "all batches should be the same size or smaller than the batch size"); } [TestMethod] diff --git a/test/Microsoft.TestPlatform.AcceptanceTests/TranslationLayerTests/EventHandler/DiscoveryEventHandler.cs b/test/Microsoft.TestPlatform.AcceptanceTests/TranslationLayerTests/EventHandler/DiscoveryEventHandler.cs index 4a9d890b41..5f7b825de2 100644 --- a/test/Microsoft.TestPlatform.AcceptanceTests/TranslationLayerTests/EventHandler/DiscoveryEventHandler.cs +++ b/test/Microsoft.TestPlatform.AcceptanceTests/TranslationLayerTests/EventHandler/DiscoveryEventHandler.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. +using System; using System.Collections.Generic; using System.Linq; @@ -127,7 +128,7 @@ public class DiscoveryEventHandlerForBatchSize : ITestDiscoveryEventsHandler2, I /// /// Gets the batch size. /// - public long BatchSize { get; private set; } + public List Batches { get; } = new List(); /// /// Gets the discovered test cases. @@ -146,7 +147,10 @@ public void HandleRawMessage(string rawMessage) public void HandleLogMessage(TestMessageLevel level, string message) { - // No Op + if (level == TestMessageLevel.Error) + { + Console.WriteLine($"ERROR:{message}"); + }; } public void HandleDiscoveryComplete(DiscoveryCompleteEventArgs discoveryCompleteEventArgs, IEnumerable lastChunk) @@ -170,7 +174,7 @@ public void HandleDiscoveredTests(IEnumerable discoveredTestCases) if (discoveredTestCases != null && discoveredTestCases.Any()) { DiscoveredTestCases.AddRange(discoveredTestCases); - BatchSize = discoveredTestCases.Count(); + Batches.Add(discoveredTestCases.Count()); } } } diff --git a/test/Microsoft.TestPlatform.AcceptanceTests/TranslationLayerTests/RunTests.cs b/test/Microsoft.TestPlatform.AcceptanceTests/TranslationLayerTests/RunTests.cs index 7df1089ddd..e31da3df04 100644 --- a/test/Microsoft.TestPlatform.AcceptanceTests/TranslationLayerTests/RunTests.cs +++ b/test/Microsoft.TestPlatform.AcceptanceTests/TranslationLayerTests/RunTests.cs @@ -5,12 +5,16 @@ using System.Collections.Generic; using System.Diagnostics; using System.Linq; +using System.Threading; + +using FluentAssertions; using Microsoft.TestPlatform.TestUtilities; using Microsoft.TestPlatform.VsTestConsole.TranslationLayer.Interfaces; using Microsoft.VisualStudio.TestPlatform.Common.Telemetry; using Microsoft.VisualStudio.TestPlatform.ObjectModel; using Microsoft.VisualStudio.TestPlatform.ObjectModel.Client; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.Interfaces; using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; using Microsoft.VisualStudio.TestTools.UnitTesting; @@ -57,6 +61,52 @@ public void RunAllTests(RunnerInfo runnerInfo) Assert.AreEqual(2, runEventHandler.TestResults.Count(t => t.Outcome == TestOutcome.Skipped)); } + [TestMethod] + [TestCategory("Windows-Review")] + [RunnerCompatibilityDataSource(BeforeFeature = Features.MULTI_TFM)] + public void RunAllTestsWithMixedTFMsWillFailToRunTestsFromTheIncompatibleTFMDll(RunnerInfo runnerInfo) + { + // Arrange + SetTestEnvironment(_testEnvironment, runnerInfo); + + var vstestConsoleWrapper = GetVsTestConsoleWrapper(); + var runEventHandler = new RunEventHandler(); + var compatibleDll = GetTestDllForFramework("MSTestProject1.dll", "net451"); + var incompatibleDll = GetTestDllForFramework("MSTestProject1.dll", "netcoreapp2.1"); + + // Act + // We have no preference around what TFM is used. It will be autodetected. + var runsettingsXml = ""; + vstestConsoleWrapper.RunTests(new[] { compatibleDll, incompatibleDll }, runsettingsXml, runEventHandler); + + // Assert + runEventHandler.TestResults.Should().HaveCount(3, "we failed to run those tests because they are not compatible."); + } + + [TestMethod] + [TestCategory("Windows-Review")] + [TestHostCompatibilityDataSource] + [RunnerCompatibilityDataSource(AfterFeature = Features.MULTI_TFM)] + public void RunAllTestsWithMixedTFMsWillRunTestsFromAllProvidedDllEvenWhenTheyMixTFMs(RunnerInfo runnerInfo) + { + // Arrange + SetTestEnvironment(_testEnvironment, runnerInfo); + + var vstestConsoleWrapper = GetVsTestConsoleWrapper(); + var runEventHandler = new RunEventHandler(); + var netFrameworkDll = GetTestDllForFramework("MSTestProject1.dll", "net451"); + var netDll = GetTestDllForFramework("MSTestProject1.dll", "netcoreapp2.1"); + + // Act + // We have no preference around what TFM is used. It will be autodetected. + var runsettingsXml = ""; + vstestConsoleWrapper.RunTests(new[] { netFrameworkDll, netDll }, runsettingsXml, runEventHandler); + + // Assert + runEventHandler.Errors.Should().BeEmpty(); + runEventHandler.TestResults.Should().HaveCount(6, "we run all tests from both assemblies"); + } + [TestMethod] [NetFullTargetFrameworkDataSource] [NetCoreTargetFrameworkDataSource] @@ -195,4 +245,29 @@ private IList GetTestAssemblies() GetAssetFullPath("SimpleTestProject2.dll") }; } + + private class TestHostLauncher : ITestHostLauncher2 + { + public bool IsDebug => true; + + public bool AttachDebuggerToProcess(int pid) + { + return true; + } + + public bool AttachDebuggerToProcess(int pid, CancellationToken cancellationToken) + { + return true; + } + + public int LaunchTestHost(TestProcessStartInfo defaultTestHostStartInfo) + { + return -1; + } + + public int LaunchTestHost(TestProcessStartInfo defaultTestHostStartInfo, CancellationToken cancellationToken) + { + return -1; + } + } } diff --git a/test/Microsoft.TestPlatform.Client.UnitTests/TestPlatformTests.cs b/test/Microsoft.TestPlatform.Client.UnitTests/TestPlatformTests.cs index 1c28233b09..d5b2ec13d6 100644 --- a/test/Microsoft.TestPlatform.Client.UnitTests/TestPlatformTests.cs +++ b/test/Microsoft.TestPlatform.Client.UnitTests/TestPlatformTests.cs @@ -49,21 +49,20 @@ public TestPlatformTests() } [TestMethod] - public void CreateDiscoveryRequestShouldInitializeManagersAndCreateDiscoveryRequestWithGivenCriteriaAndReturnIt() + public void CreateDiscoveryRequestShouldInitializeDiscoveryManagerAndCreateDiscoveryRequestWithGivenCriteriaAndReturnIt() { _discoveryManager.Setup(dm => dm.Initialize(false)).Verifiable(); var discoveryCriteria = new DiscoveryCriteria(new List { "foo" }, 1, null); _hostManager.Setup(hm => hm.GetTestSources(discoveryCriteria.Sources)) .Returns(discoveryCriteria.Sources); - _testEngine.Setup(te => te.GetDiscoveryManager(_mockRequestData.Object, _hostManager.Object, It.IsAny())).Returns(_discoveryManager.Object); + _testEngine.Setup(te => te.GetDiscoveryManager(_mockRequestData.Object, It.IsAny(), It.IsAny>())).Returns(_discoveryManager.Object); _testEngine.Setup(te => te.GetExtensionManager()).Returns(_extensionManager.Object); _testEngine.Setup(te => te.GetLoggerManager(_mockRequestData.Object)).Returns(_loggerManager.Object); var tp = new TestableTestPlatform(_testEngine.Object, _hostManager.Object); - var discoveryRequest = tp.CreateDiscoveryRequest(_mockRequestData.Object, discoveryCriteria, new TestPlatformOptions()); + var discoveryRequest = tp.CreateDiscoveryRequest(_mockRequestData.Object, discoveryCriteria, new TestPlatformOptions(), It.IsAny>()); - _hostManager.Verify(hm => hm.Initialize(It.IsAny(), It.IsAny()), Times.Once); _discoveryManager.Verify(dm => dm.Initialize(false), Times.Once); Assert.AreEqual(discoveryCriteria, discoveryRequest.DiscoveryCriteria); } @@ -107,7 +106,7 @@ public void CreateDiscoveryRequestThrowsIfDiscoveryCriteriaIsNull() { TestPlatform tp = new(); - Assert.ThrowsException(() => tp.CreateDiscoveryRequest(_mockRequestData.Object, null, new TestPlatformOptions())); + Assert.ThrowsException(() => tp.CreateDiscoveryRequest(_mockRequestData.Object, null, new TestPlatformOptions(), It.IsAny>())); } [TestMethod] @@ -133,36 +132,6 @@ public void ClearExtensionsShouldClearTheExtensionsCachedInEngine() _extensionManager.Verify(em => em.ClearExtensions()); } - [TestMethod] - public void CreateTestRunRequestShouldThrowExceptionIfNoTestHostproviderFound() - { - _testEngine.Setup(te => te.GetLoggerManager(_mockRequestData.Object)).Returns(_loggerManager.Object); - - string settingsXml = - @" - - - .NETPortable,Version=v4.5 - - "; - - var testRunCriteria = new TestRunCriteria(new List { @"x:dummy\foo.dll" }, 10, false, settingsXml, TimeSpan.Zero); - var tp = new TestableTestPlatform(_testEngine.Object, _mockFileHelper.Object, null); - bool exceptionThrown = false; - - try - { - tp.CreateTestRunRequest(_mockRequestData.Object, testRunCriteria, new TestPlatformOptions()); - } - catch (TestPlatformException ex) - { - exceptionThrown = true; - Assert.AreEqual("No suitable test runtime provider found for this run.", ex.Message); - } - - Assert.IsTrue(exceptionThrown, "TestPlatformException should get thrown"); - } - [TestMethod] public void CreateTestRunRequestShouldUpdateLoggerExtensionWhenDesingModeIsFalseForRunAll() { @@ -184,13 +153,13 @@ public void CreateTestRunRequestShouldUpdateLoggerExtensionWhenDesingModeIsFalse _hostManager.Setup(hm => hm.GetTestSources(testRunCriteria.Sources)) .Returns(testRunCriteria.Sources); - _testEngine.Setup(te => te.GetExecutionManager(_mockRequestData.Object, _hostManager.Object, It.IsAny())).Returns(_executionManager.Object); + _testEngine.Setup(te => te.GetExecutionManager(_mockRequestData.Object, It.IsAny(), It.IsAny>())).Returns(_executionManager.Object); _testEngine.Setup(te => te.GetExtensionManager()).Returns(_extensionManager.Object); _testEngine.Setup(te => te.GetLoggerManager(_mockRequestData.Object)).Returns(_loggerManager.Object); var tp = new TestableTestPlatform(_testEngine.Object, _mockFileHelper.Object, _hostManager.Object); - var testRunRequest = tp.CreateTestRunRequest(_mockRequestData.Object, testRunCriteria, new TestPlatformOptions()); + var testRunRequest = tp.CreateTestRunRequest(_mockRequestData.Object, testRunCriteria, new TestPlatformOptions(), It.IsAny>()); _extensionManager.Verify(em => em.UseAdditionalExtensions(additionalExtensions, false)); } @@ -213,13 +182,13 @@ public void CreateTestRunRequestShouldUpdateLoggerExtensionWhenDesignModeIsFalse var testRunCriteria = new TestRunCriteria(new List { new TestCase("dll1.class1.test1", new Uri("hello://x/"), $"xyz{Path.DirectorySeparatorChar}1.dll") }, 10, false, settingsXml); - _testEngine.Setup(te => te.GetExecutionManager(_mockRequestData.Object, _hostManager.Object, It.IsAny())).Returns(_executionManager.Object); + _testEngine.Setup(te => te.GetExecutionManager(_mockRequestData.Object, It.IsAny(), It.IsAny>())).Returns(_executionManager.Object); _testEngine.Setup(te => te.GetExtensionManager()).Returns(_extensionManager.Object); _testEngine.Setup(te => te.GetLoggerManager(_mockRequestData.Object)).Returns(_loggerManager.Object); var tp = new TestableTestPlatform(_testEngine.Object, _mockFileHelper.Object, _hostManager.Object); - var testRunRequest = tp.CreateTestRunRequest(_mockRequestData.Object, testRunCriteria, new TestPlatformOptions()); + var testRunRequest = tp.CreateTestRunRequest(_mockRequestData.Object, testRunCriteria, new TestPlatformOptions(), It.IsAny>()); _extensionManager.Verify(em => em.UseAdditionalExtensions(additionalExtensions, false)); } @@ -244,13 +213,13 @@ public void CreateTestRunRequestShouldNotUpdateTestSourcesIfSelectedTestAreRun() _hostManager.Setup(hm => hm.GetTestSources(It.IsAny>())) .Returns(new List { $"xyz{Path.DirectorySeparatorChar}1.dll" }); - _testEngine.Setup(te => te.GetExecutionManager(_mockRequestData.Object, _hostManager.Object, It.IsAny())).Returns(_executionManager.Object); + _testEngine.Setup(te => te.GetExecutionManager(_mockRequestData.Object, It.IsAny(), It.IsAny>())).Returns(_executionManager.Object); _testEngine.Setup(te => te.GetExtensionManager()).Returns(_extensionManager.Object); _testEngine.Setup(te => te.GetLoggerManager(_mockRequestData.Object)).Returns(_loggerManager.Object); var tp = new TestableTestPlatform(_testEngine.Object, _mockFileHelper.Object, _hostManager.Object); - tp.CreateTestRunRequest(_mockRequestData.Object, testRunCriteria, new TestPlatformOptions()); + tp.CreateTestRunRequest(_mockRequestData.Object, testRunCriteria, new TestPlatformOptions(), It.IsAny>()); _extensionManager.Verify(em => em.UseAdditionalExtensions(additionalExtensions, false)); _hostManager.Verify(hm => hm.GetTestSources(It.IsAny>()), Times.Never); } @@ -259,7 +228,7 @@ public void CreateTestRunRequestShouldNotUpdateTestSourcesIfSelectedTestAreRun() public void CreateTestRunRequestShouldInitializeManagersAndCreateTestRunRequestWithSpecifiedCriteria() { _executionManager.Setup(dm => dm.Initialize(false)).Verifiable(); - _testEngine.Setup(te => te.GetExecutionManager(_mockRequestData.Object, _hostManager.Object, It.IsAny())).Returns(_executionManager.Object); + _testEngine.Setup(te => te.GetExecutionManager(_mockRequestData.Object, It.IsAny(), It.IsAny>())).Returns(_executionManager.Object); _testEngine.Setup(te => te.GetExtensionManager()).Returns(_extensionManager.Object); _testEngine.Setup(te => te.GetLoggerManager(_mockRequestData.Object)).Returns(_loggerManager.Object); @@ -268,11 +237,10 @@ public void CreateTestRunRequestShouldInitializeManagersAndCreateTestRunRequestW _hostManager.Setup(hm => hm.GetTestSources(testRunCriteria.Sources)) .Returns(testRunCriteria.Sources); - var testRunRequest = tp.CreateTestRunRequest(_mockRequestData.Object, testRunCriteria, new TestPlatformOptions()); + var testRunRequest = tp.CreateTestRunRequest(_mockRequestData.Object, testRunCriteria, new TestPlatformOptions(), It.IsAny>()); var actualTestRunRequest = testRunRequest as TestRunRequest; - _hostManager.Verify(hm => hm.Initialize(It.IsAny(), It.IsAny()), Times.Once); _executionManager.Verify(em => em.Initialize(false), Times.Once); Assert.AreEqual(testRunCriteria, actualTestRunRequest?.TestRunCriteria); } @@ -311,65 +279,12 @@ public void CreateTestRunRequestShouldInitializeManagersWithFalseFlagWhenTestPla _executionManager.Verify(dm => dm.Initialize(false), Times.Once); } - [TestMethod] - public void CreateTestRunRequestShouldSetCustomHostLauncherOnEngineDefaultLauncherIfSpecified() - { - var mockCustomLauncher = new Mock(); - _executionManager.Setup(dm => dm.Initialize(false)).Verifiable(); - _testEngine.Setup(te => te.GetExecutionManager(_mockRequestData.Object, _hostManager.Object, It.IsAny())).Returns(_executionManager.Object); - _testEngine.Setup(te => te.GetExtensionManager()).Returns(_extensionManager.Object); - _testEngine.Setup(te => te.GetLoggerManager(_mockRequestData.Object)).Returns(_loggerManager.Object); - - var tp = new TestableTestPlatform(_testEngine.Object, _hostManager.Object); - var testRunCriteria = new TestRunCriteria(new List { "foo" }, 10, false, null, TimeSpan.Zero, mockCustomLauncher.Object); - _hostManager.Setup(hm => hm.GetTestSources(testRunCriteria.Sources)) - .Returns(testRunCriteria.Sources); - - var testRunRequest = tp.CreateTestRunRequest(_mockRequestData.Object, testRunCriteria, new TestPlatformOptions()); - - var actualTestRunRequest = testRunRequest as TestRunRequest; - Assert.AreEqual(testRunCriteria, actualTestRunRequest?.TestRunCriteria); - _hostManager.Verify(hl => hl.SetCustomLauncher(mockCustomLauncher.Object), Times.Once); - } - [TestMethod] public void CreateTestRunRequestThrowsIfTestRunCriteriaIsNull() { var tp = new TestPlatform(); - Assert.ThrowsException(() => tp.CreateTestRunRequest(_mockRequestData.Object, null, new TestPlatformOptions())); - } - - - [TestMethod] - public void CreateDiscoveryRequestShouldThrowExceptionIfNoTestHostproviderFound() - { - _testEngine.Setup(te => te.GetLoggerManager(_mockRequestData.Object)) - .Returns(_loggerManager.Object); - - string settingsXml = - @" - - - .NETPortable,Version=v4.5 - - "; - - var discoveryCriteria = new DiscoveryCriteria(new List { @"x:dummy\foo.dll" }, 1, settingsXml); - var tp = new TestableTestPlatform(_testEngine.Object, _mockFileHelper.Object, null); - bool exceptionThrown = false; - - try - { - tp.CreateDiscoveryRequest(_mockRequestData.Object, discoveryCriteria, new TestPlatformOptions()); - } - catch (TestPlatformException ex) - { - exceptionThrown = true; - Assert.AreEqual("No suitable test runtime provider found for this run.", ex.Message); - } - - Assert.IsTrue(exceptionThrown, "TestPlatformException should get thrown"); + Assert.ThrowsException(() => tp.CreateTestRunRequest(_mockRequestData.Object, null, new TestPlatformOptions(), It.IsAny>())); } /// @@ -397,13 +312,13 @@ public void CreateDiscoveryRequestShouldUpdateLoggerExtensionWhenDesignModeIsFal _hostManager.Setup(hm => hm.GetTestSources(discoveryCriteria.Sources)) .Returns(discoveryCriteria.Sources); - _testEngine.Setup(te => te.GetDiscoveryManager(It.IsAny(), _hostManager.Object, It.IsAny())).Returns(_discoveryManager.Object); + _testEngine.Setup(te => te.GetDiscoveryManager(It.IsAny(), It.IsAny(), It.IsAny>())).Returns(_discoveryManager.Object); _testEngine.Setup(te => te.GetExtensionManager()).Returns(_extensionManager.Object); _testEngine.Setup(te => te.GetLoggerManager(_mockRequestData.Object)).Returns(_loggerManager.Object); var tp = new TestableTestPlatform(_testEngine.Object, _mockFileHelper.Object, _hostManager.Object); // Action - var discoveryRequest = tp.CreateDiscoveryRequest(_mockRequestData.Object, discoveryCriteria, new TestPlatformOptions()); + var discoveryRequest = tp.CreateDiscoveryRequest(_mockRequestData.Object, discoveryCriteria, new TestPlatformOptions(), It.IsAny>()); // Verify _extensionManager.Verify(em => em.UseAdditionalExtensions(additionalExtensions, false)); @@ -415,7 +330,7 @@ public void CreateDiscoveryRequestShouldUpdateLoggerExtensionWhenDesignModeIsFal [TestMethod] public void CreateTestRunRequestShouldInitializeLoggerManagerForDesignMode() { - _testEngine.Setup(te => te.GetExecutionManager(_mockRequestData.Object, _hostManager.Object, It.IsAny())).Returns(_executionManager.Object); + _testEngine.Setup(te => te.GetExecutionManager(_mockRequestData.Object, It.IsAny(), It.IsAny>())).Returns(_executionManager.Object); _testEngine.Setup(te => te.GetLoggerManager(_mockRequestData.Object)).Returns(_loggerManager.Object); string settingsXml = @@ -428,7 +343,7 @@ public void CreateTestRunRequestShouldInitializeLoggerManagerForDesignMode() var testRunCriteria = new TestRunCriteria(new List { @"x:dummy\foo.dll" }, 10, false, settingsXml); var tp = new TestableTestPlatform(_testEngine.Object, _hostManager.Object); - tp.CreateTestRunRequest(_mockRequestData.Object, testRunCriteria, new TestPlatformOptions()); + tp.CreateTestRunRequest(_mockRequestData.Object, testRunCriteria, new TestPlatformOptions(), It.IsAny>()); _loggerManager.Verify(lm => lm.Initialize(settingsXml)); } @@ -439,7 +354,7 @@ public void CreateTestRunRequestShouldInitializeLoggerManagerForDesignMode() [TestMethod] public void CreateDiscoveryRequestShouldInitializeLoggerManagerForDesignMode() { - _testEngine.Setup(te => te.GetDiscoveryManager(_mockRequestData.Object, _hostManager.Object, It.IsAny())).Returns(_discoveryManager.Object); + _testEngine.Setup(te => te.GetDiscoveryManager(_mockRequestData.Object, It.IsAny(), It.IsAny>())).Returns(_discoveryManager.Object); _testEngine.Setup(te => te.GetLoggerManager(_mockRequestData.Object)).Returns(_loggerManager.Object); string settingsXml = @@ -452,7 +367,7 @@ public void CreateDiscoveryRequestShouldInitializeLoggerManagerForDesignMode() var discoveryCriteria = new DiscoveryCriteria(new List { @"x:dummy\foo.dll" }, 10, settingsXml); var tp = new TestableTestPlatform(_testEngine.Object, _hostManager.Object); - tp.CreateDiscoveryRequest(_mockRequestData.Object, discoveryCriteria, new TestPlatformOptions()); + tp.CreateDiscoveryRequest(_mockRequestData.Object, discoveryCriteria, new TestPlatformOptions(), It.IsAny>()); _loggerManager.Verify(lm => lm.Initialize(settingsXml)); } @@ -463,7 +378,7 @@ public void CreateDiscoveryRequestShouldInitializeLoggerManagerForDesignMode() [TestMethod] public void CreateTestRunRequestShouldInitializeLoggerManagerForNonDesignMode() { - _testEngine.Setup(te => te.GetExecutionManager(_mockRequestData.Object, _hostManager.Object, It.IsAny())).Returns(_executionManager.Object); + _testEngine.Setup(te => te.GetExecutionManager(_mockRequestData.Object, It.IsAny(), It.IsAny>())).Returns(_executionManager.Object); _testEngine.Setup(te => te.GetLoggerManager(_mockRequestData.Object)).Returns(_loggerManager.Object); string settingsXml = @@ -476,7 +391,7 @@ public void CreateTestRunRequestShouldInitializeLoggerManagerForNonDesignMode() var testRunCriteria = new TestRunCriteria(new List { "foo" }, 10, false, settingsXml); var tp = new TestableTestPlatform(_testEngine.Object, _hostManager.Object); - tp.CreateTestRunRequest(_mockRequestData.Object, testRunCriteria, new TestPlatformOptions()); + tp.CreateTestRunRequest(_mockRequestData.Object, testRunCriteria, new TestPlatformOptions(), It.IsAny>()); _loggerManager.Verify(lm => lm.Initialize(settingsXml)); } @@ -487,7 +402,7 @@ public void CreateTestRunRequestShouldInitializeLoggerManagerForNonDesignMode() [TestMethod] public void CreateDiscoveryRequestShouldInitializeLoggerManagerForNonDesignMode() { - _testEngine.Setup(te => te.GetDiscoveryManager(_mockRequestData.Object, _hostManager.Object, It.IsAny())).Returns(_discoveryManager.Object); + _testEngine.Setup(te => te.GetDiscoveryManager(_mockRequestData.Object, It.IsAny(), It.IsAny>())).Returns(_discoveryManager.Object); _testEngine.Setup(te => te.GetLoggerManager(_mockRequestData.Object)).Returns(_loggerManager.Object); string settingsXml = @@ -500,7 +415,7 @@ public void CreateDiscoveryRequestShouldInitializeLoggerManagerForNonDesignMode( var discoveryCriteria = new DiscoveryCriteria(new List { "foo" }, 10, settingsXml); var tp = new TestableTestPlatform(_testEngine.Object, _hostManager.Object); - tp.CreateDiscoveryRequest(_mockRequestData.Object, discoveryCriteria, new TestPlatformOptions()); + tp.CreateDiscoveryRequest(_mockRequestData.Object, discoveryCriteria, new TestPlatformOptions(), It.IsAny>()); _loggerManager.Verify(lm => lm.Initialize(settingsXml)); } @@ -514,7 +429,8 @@ public void StartTestSessionShouldThrowExceptionIfTestSessionCriteriaIsNull() tp.StartTestSession( new Mock().Object, null, - new Mock().Object)); + new Mock().Object, + new Dictionary())); } [TestMethod] @@ -536,7 +452,8 @@ public void StartTestSessionShouldReturnFalseIfDesignModeIsDisabled() tp.StartTestSession( new Mock().Object, testSessionCriteria, - new Mock().Object)); + new Mock().Object, + It.IsAny>())); } [TestMethod] @@ -545,8 +462,9 @@ public void StartTestSessionShouldReturnFalseIfTestSessionManagerIsNull() _testEngine.Setup( te => te.GetTestSessionManager( It.IsAny(), - It.IsAny())) - .Returns(null); + It.IsAny(), + It.IsAny>())) + .Returns((IProxyTestSessionManager)null!); var tp = new TestableTestPlatform(_testEngine.Object, _hostManager.Object); var mockEventsHandler = new Mock(); @@ -574,7 +492,8 @@ public void StartTestSessionShouldReturnFalseIfTestSessionManagerIsNull() tp.StartTestSession( new Mock().Object, testSessionCriteria, - mockEventsHandler.Object)); + mockEventsHandler.Object, + It.IsAny>())); mockEventsHandler.Verify( eh => eh.HandleStartTestSessionComplete(It.IsAny()), @@ -607,14 +526,16 @@ public void StartTestSessionShouldReturnTrueIfTestSessionManagerStartSessionRetu _testEngine.Setup( te => te.GetTestSessionManager( It.IsAny(), - It.IsAny())) + It.IsAny(), + It.IsAny>())) .Returns(mockTestSessionManager.Object); Assert.IsTrue( tp.StartTestSession( new Mock().Object, testSessionCriteria, - mockEventsHandler.Object)); + mockEventsHandler.Object, + It.IsAny>())); mockTestSessionManager.Verify( tsm => tsm.StartSession(mockEventsHandler.Object, It.IsAny()), @@ -647,14 +568,16 @@ public void StartTestSessionShouldReturnFalseIfTestSessionManagerStartSessionRet _testEngine.Setup( te => te.GetTestSessionManager( It.IsAny(), - It.IsAny())) + It.IsAny(), + It.IsAny>())) .Returns(mockTestSessionManager.Object); Assert.IsFalse( tp.StartTestSession( mockRequestData.Object, testSessionCriteria, - mockEventsHandler.Object)); + mockEventsHandler.Object, + It.IsAny>())); mockTestSessionManager.Verify( tsm => tsm.StartSession(mockEventsHandler.Object, mockRequestData.Object), @@ -668,18 +591,18 @@ private void InvokeCreateDiscoveryRequest(TestPlatformOptions? options = null) _hostManager.Setup(hm => hm.GetTestSources(discoveryCriteria.Sources)) .Returns(discoveryCriteria.Sources); - _testEngine.Setup(te => te.GetDiscoveryManager(_mockRequestData.Object, _hostManager.Object, It.IsAny())).Returns(_discoveryManager.Object); + _testEngine.Setup(te => te.GetDiscoveryManager(_mockRequestData.Object, It.IsAny(), It.IsAny>())).Returns(_discoveryManager.Object); _testEngine.Setup(te => te.GetExtensionManager()).Returns(_extensionManager.Object); _testEngine.Setup(te => te.GetLoggerManager(_mockRequestData.Object)).Returns(_loggerManager.Object); var tp = new TestableTestPlatform(_testEngine.Object, _hostManager.Object); - tp.CreateDiscoveryRequest(_mockRequestData.Object, discoveryCriteria, options); + tp.CreateDiscoveryRequest(_mockRequestData.Object, discoveryCriteria, options, new Dictionary()); } private void InvokeCreateTestRunRequest(TestPlatformOptions? options = null) { _executionManager.Setup(dm => dm.Initialize(false)).Verifiable(); - _testEngine.Setup(te => te.GetExecutionManager(_mockRequestData.Object, _hostManager.Object, It.IsAny())).Returns(_executionManager.Object); + _testEngine.Setup(te => te.GetExecutionManager(_mockRequestData.Object, It.IsAny(), It.IsAny>())).Returns(_executionManager.Object); _testEngine.Setup(te => te.GetExtensionManager()).Returns(_extensionManager.Object); _testEngine.Setup(te => te.GetLoggerManager(_mockRequestData.Object)).Returns(_loggerManager.Object); @@ -688,7 +611,7 @@ private void InvokeCreateTestRunRequest(TestPlatformOptions? options = null) _hostManager.Setup(hm => hm.GetTestSources(testRunCriteria.Sources)) .Returns(testRunCriteria.Sources); - tp.CreateTestRunRequest(_mockRequestData.Object, testRunCriteria, options); + tp.CreateTestRunRequest(_mockRequestData.Object, testRunCriteria, options, new Dictionary()); } private class TestableTestPlatform : TestPlatform @@ -698,7 +621,7 @@ public TestableTestPlatform(ITestEngine testEngine, ITestRuntimeProvider hostPro { } - public TestableTestPlatform(ITestEngine testEngine, IFileHelper fileHelper, ITestRuntimeProvider? hostProvider) + public TestableTestPlatform(ITestEngine testEngine, IFileHelper fileHelper, ITestRuntimeProvider hostProvider) : base(testEngine, fileHelper, new TestableTestRuntimeProviderManager(hostProvider)) { } @@ -706,15 +629,15 @@ public TestableTestPlatform(ITestEngine testEngine, IFileHelper fileHelper, ITes private class TestableTestRuntimeProviderManager : TestRuntimeProviderManager { - private readonly ITestRuntimeProvider? _hostProvider; + private readonly ITestRuntimeProvider _hostProvider; - public TestableTestRuntimeProviderManager(ITestRuntimeProvider? hostProvider) + public TestableTestRuntimeProviderManager(ITestRuntimeProvider hostProvider) : base(TestSessionMessageLogger.Instance) { _hostProvider = hostProvider; } - public override ITestRuntimeProvider? GetTestHostManagerByRunConfiguration(string runConfiguration) + public override ITestRuntimeProvider GetTestHostManagerByRunConfiguration(string runConfiguration, List _) { return _hostProvider; } diff --git a/test/Microsoft.TestPlatform.Common.UnitTests/Hosting/TestHostProviderManagerTests.cs b/test/Microsoft.TestPlatform.Common.UnitTests/Hosting/TestHostProviderManagerTests.cs index b059b59803..6245c9c28b 100644 --- a/test/Microsoft.TestPlatform.Common.UnitTests/Hosting/TestHostProviderManagerTests.cs +++ b/test/Microsoft.TestPlatform.Common.UnitTests/Hosting/TestHostProviderManagerTests.cs @@ -57,7 +57,7 @@ public void TestHostProviderManagerShouldReturnTestHostBasedOnRunConfiguration() "; var manager = TestRuntimeProviderManager.Instance; - Assert.IsNotNull(manager.GetTestHostManagerByRunConfiguration(runSettingsXml)); + Assert.IsNotNull(manager.GetTestHostManagerByRunConfiguration(runSettingsXml, null)); } [TestMethod] @@ -69,7 +69,7 @@ public void GetDefaultTestHostManagerReturnsANonNullInstance() Framework.DefaultFramework.Name, " "); - Assert.IsNotNull(TestRuntimeProviderManager.Instance.GetTestHostManagerByRunConfiguration(runSettingsXml)); + Assert.IsNotNull(TestRuntimeProviderManager.Instance.GetTestHostManagerByRunConfiguration(runSettingsXml, null)); } [TestMethod] @@ -81,8 +81,8 @@ public void GetDefaultTestHostManagerReturnsANewInstanceEverytime() Framework.DefaultFramework.Name, " "); - var instance1 = TestRuntimeProviderManager.Instance.GetTestHostManagerByRunConfiguration(runSettingsXml); - var instance2 = TestRuntimeProviderManager.Instance.GetTestHostManagerByRunConfiguration(runSettingsXml); + var instance1 = TestRuntimeProviderManager.Instance.GetTestHostManagerByRunConfiguration(runSettingsXml, null); + var instance2 = TestRuntimeProviderManager.Instance.GetTestHostManagerByRunConfiguration(runSettingsXml, null); Assert.AreNotEqual(instance1, instance2); } @@ -96,7 +96,7 @@ public void GetDefaultTestHostManagerReturnsDotnetCoreHostManagerIfFrameworkIsNe ".NETCoreApp,Version=v1.0", " "); - var testHostManager = TestRuntimeProviderManager.Instance.GetTestHostManagerByRunConfiguration(runSettingsXml); + var testHostManager = TestRuntimeProviderManager.Instance.GetTestHostManagerByRunConfiguration(runSettingsXml, null); Assert.AreEqual(typeof(TestableTestHostManager), testHostManager.GetType()); } @@ -110,7 +110,7 @@ public void GetDefaultTestHostManagerReturnsASharedManagerIfDisableAppDomainIsFa ".NETFramework,Version=v4.5.1", " "); - var testHostManager = TestRuntimeProviderManager.Instance.GetTestHostManagerByRunConfiguration(runSettingsXml); + var testHostManager = TestRuntimeProviderManager.Instance.GetTestHostManagerByRunConfiguration(runSettingsXml, null); testHostManager.Initialize(null, runSettingsXml); Assert.IsNotNull(testHostManager); @@ -126,7 +126,7 @@ public void GetDefaultTestHostManagerReturnsANonSharedManagerIfDisableAppDomainI ".NETFramework,Version=v4.5.1", "true "); - var testHostManager = TestRuntimeProviderManager.Instance.GetTestHostManagerByRunConfiguration(runSettingsXml); + var testHostManager = TestRuntimeProviderManager.Instance.GetTestHostManagerByRunConfiguration(runSettingsXml, null); testHostManager.Initialize(null, runSettingsXml); Assert.IsNotNull(testHostManager); @@ -146,7 +146,7 @@ public void TestHostProviderManagerShouldReturnNullIfTargetFrameworkIsPortable() "; var manager = TestRuntimeProviderManager.Instance; - Assert.IsNull(manager.GetTestHostManagerByRunConfiguration(runSettingsXml)); + Assert.IsNull(manager.GetTestHostManagerByRunConfiguration(runSettingsXml, null)); } #region Implementations diff --git a/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/Client/Parallel/ParallelOperationManagerTests.cs b/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/Client/Parallel/ParallelOperationManagerTests.cs index 935198e7ad..392be647ad 100644 --- a/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/Client/Parallel/ParallelOperationManagerTests.cs +++ b/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/Client/Parallel/ParallelOperationManagerTests.cs @@ -5,177 +5,262 @@ using System.Collections.Generic; using System.Linq; -using Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Client; using Microsoft.VisualStudio.TestTools.UnitTesting; +using Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Client; +using Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Client.Parallel; +using FluentAssertions; namespace TestPlatform.CrossPlatEngine.UnitTests.Client; [TestClass] public class ParallelOperationManagerTests { - private MockParallelOperationManager _proxyParallelManager; - - public ParallelOperationManagerTests() - { - Func sampleCreator = - () => new SampleConcurrentClass(); - - _proxyParallelManager = new MockParallelOperationManager(sampleCreator, 2, true); - } - [TestMethod] - public void AbstractProxyParallelManagerShouldCreateCorrectNumberOfConcurrentObjects() + public void OperationManagerShouldRunOnlyMaximumParallelLevelOfWorkInParallelEvenWhenThereAreMoreWorkloads() { - var createdSampleClasses = new List(); - Func sampleCreator = - () => - { - var sample = new SampleConcurrentClass(); - createdSampleClasses.Add(sample); - return sample; - }; + // Arrange + Func createNewManager = _ => new SampleManager(); + var maxParallelLevel = 3; + var parallelOperationManager = new ParallelOperationManager(createNewManager, maxParallelLevel); - _proxyParallelManager = new MockParallelOperationManager(sampleCreator, 3, true); + // Create more workloads than our parallel level so we can observe that the maximum parallel level is reached but not more + var workloads = Enumerable.Range(1, maxParallelLevel + 2) + .Select(i => new ProviderSpecificWorkload(new SampleWorkload { Id = i }, provider: null)) + .ToList(); + var eventHandler = new SampleHandler(); - Assert.AreEqual(3, createdSampleClasses.Count, "Number of Concurrent Objects created should be 3"); - } + List workerCounts = new(); - [TestMethod] - public void AbstractProxyParallelManagerShouldUpdateToCorrectNumberOfConcurrentObjects() - { - var createdSampleClasses = new List(); - Func sampleCreator = - () => - { - var sample = new SampleConcurrentClass(); - createdSampleClasses.Add(sample); - return sample; - }; - - _proxyParallelManager = new MockParallelOperationManager(sampleCreator, 1, true); - - Assert.AreEqual(1, createdSampleClasses.Count, "Number of Concurrent Objects created should be 1"); - - _proxyParallelManager.UpdateParallelLevel(4); - - Assert.AreEqual(4, createdSampleClasses.Count, "Number of Concurrent Objects created should be 4"); + Func getEventHandler = (handler, _) => handler; + Action runWorkload = (manager, _, _) => + { + // Every time we run a workload check how many slots are occupied, + // we should see 3 slots at max, because that is our max parallel level, we should NOT see 4 or more: + // This is what the data should be: + // - At the start we schedule as much work as we can, workloads 1, 2, 3 + // are started and grab a slot. + // We only update the slot count after scheduling all the work up to the max parallel level, + // so when we reach this method, all the slots are already occupied, so for workloads 1, 2, 3 we record 3, 3, 3. + // - Workload 1 finishes and leaves the slot, 4 starts and grabs a slot, 2, 3, 4 are now running we record 3. + // - workload 2 finishes and leaves the slot, 5 starts and grabs a slot, 3, 4, 5 are now running we record 3. + // - workload 2 finishes and leaves the slot, 5 starts and grabs a slot, 3, 4, 5 are now running we record 3. + // - workload 3 finishes and leaves the slot, there is no more work to do so we don't grab any additional slot. Just 4, 5 are now running we record 2. + // - workload 4 finishes and leaves the slot, there is no more work to do so we don't grab any additional slot. Just 5 is now running we record 1. + + workerCounts.Add(parallelOperationManager.OccupiedSlotCount); + + System.Threading.Thread.Sleep(100); + + // Tell the operation manager that we are done, and it should move to the next piece of work. + // Normally the operation manager would get this notification via the handler because the work we do + // is asynchronous, but here we know that we are already done so we just tell the operation manager directly + // and pass on the current manager that is done. + parallelOperationManager.RunNextWork(manager); + }; + + // Act + parallelOperationManager.StartWork(workloads, eventHandler, getEventHandler, runWorkload); + + // Assert + workerCounts.Should().BeEquivalentTo(3, 3, 3, 2, 1); } [TestMethod] - public void DoActionOnConcurrentObjectsShouldCallAllObjects() + public void OperationManagerShouldCreateOnlyAsManyParallelWorkersAsThereAreWorkloadsWhenTheAmountOfWorkloadsIsSmallerThanMaxParallelLevel() { - var createdSampleClasses = new List(); - Func sampleCreator = - () => - { - var sample = new SampleConcurrentClass(); - createdSampleClasses.Add(sample); - return sample; - }; + // Arrange + Func createNewManager = _ => new SampleManager(); + var maxParallelLevel = 10; + var parallelOperationManager = new ParallelOperationManager(createNewManager, maxParallelLevel); - _proxyParallelManager = new MockParallelOperationManager(sampleCreator, 4, true); + // Create less workloads than our parallel level so we can observe that only as many slots are created as there are workloads. + var workloads = Enumerable.Range(1, 2) + .Select(i => new ProviderSpecificWorkload(new SampleWorkload { Id = i }, provider: null)) + .ToList(); + var eventHandler = new SampleHandler(); - Assert.AreEqual(4, createdSampleClasses.Count, "Number of Concurrent Objects created should be 4"); + List workerCounts = new(); - int count = 0; - _proxyParallelManager.DoActionOnAllConcurrentObjects( - (sample) => - { - count++; - Assert.IsTrue(createdSampleClasses.Contains(sample), "Called object must be in the created list."); - // Make sure action is not called on same object multiple times - createdSampleClasses.Remove(sample); - }); + Func getEventHandler = (handler, _) => handler; + Action runWorkload = (manager, _, _) => + { + // See comments in test above for explanation. + workerCounts.Add(parallelOperationManager.OccupiedSlotCount); + System.Threading.Thread.Sleep(100); - Assert.AreEqual(4, count, "Number of Concurrent Objects called should be 4"); + parallelOperationManager.RunNextWork(manager); + }; - Assert.AreEqual(0, createdSampleClasses.Count, "All concurrent objects must be called."); - } + // Act + parallelOperationManager.StartWork(workloads, eventHandler, getEventHandler, runWorkload); - [TestMethod] - public void AddManagerShouldAddAManagerWithHandlerInConcurrentManagerList() - { - // At the beginning it should be equal to parallel level - Assert.AreEqual(2, _proxyParallelManager.GetConcurrentManagersCount()); - - _proxyParallelManager.AddManager(new SampleConcurrentClass(true), new SampleHandlerClass()); - - Assert.AreEqual(3, _proxyParallelManager.GetConcurrentManagersCount()); - Assert.AreEqual(1, _proxyParallelManager.GetConcurrentManagerInstances().Count(m => m.CheckValue)); + // Assert + workerCounts.Should().BeEquivalentTo(2, 1); } + [TestMethod] - public void RemoveManagerShouldRemoveAManagerFromConcurrentManagerList() + public void OperationManagerShouldCreateAsManyMaxParallelLevel() { - var manager = new SampleConcurrentClass(true); - _proxyParallelManager.AddManager(manager, new SampleHandlerClass()); - - Assert.AreEqual(3, _proxyParallelManager.GetConcurrentManagersCount()); - - _proxyParallelManager.RemoveManager(manager); - - Assert.AreEqual(2, _proxyParallelManager.GetConcurrentManagersCount()); - Assert.AreEqual(0, _proxyParallelManager.GetConcurrentManagerInstances().Count(m => m.CheckValue)); + // Arrange + Func createNewManager = _ => new SampleManager(); + var maxParallelLevel = 10; + var parallelOperationManager = new ParallelOperationManager(createNewManager, maxParallelLevel); + + // Create less workloads than our parallel level so we can observe that only as many slots are created as there are workloads. + var workloads = Enumerable.Range(1, 2) + .Select(i => new ProviderSpecificWorkload(new SampleWorkload { Id = i }, provider: null)) + .ToList(); + var eventHandler = new SampleHandler(); + + List workerCounts = new(); + List availableWorkerCounts = new(); + + Func getEventHandler = (handler, _) => handler; + Action runWorkload = (manager, _, _) => + { + // See comments in test above for explanation. + workerCounts.Add(parallelOperationManager.OccupiedSlotCount); + availableWorkerCounts.Add(parallelOperationManager.AvailableSlotCount); + System.Threading.Thread.Sleep(100); + + parallelOperationManager.RunNextWork(manager); + }; + + // Act + parallelOperationManager.StartWork(workloads, eventHandler, getEventHandler, runWorkload); + + // Assert + workerCounts.Should().BeEquivalentTo(2, 1); + // We create 10 slots, because that is the max parallel level, when we observe, there are 2 workloads running, + // and then 1 workload running, so we see 8 and 9 (10 - 2, and 10 - 1). + availableWorkerCounts.Should().BeEquivalentTo(8, 9); } [TestMethod] - public void UpdateHandlerForManagerShouldAddNewHandlerIfNotexist() + public void OperationManagerMovesToTheNextWorkloadOnlyWhenRunNextWorkIsCalled() { - var manager = new SampleConcurrentClass(true); - _proxyParallelManager.UpdateHandlerForManager(manager, new SampleHandlerClass()); + // Arrange + Func createNewManager = _ => new SampleManager(); + var maxParallelLevel = 2; + var parallelOperationManager = new ParallelOperationManager(createNewManager, maxParallelLevel); + + // Create more workloads than our parallel level so we can observe that when one workload is finished, calling RunNextWork will move on + // to the next workload. + var workloads = Enumerable.Range(1, maxParallelLevel + 3) + .Select(i => new ProviderSpecificWorkload(new SampleWorkload { Id = i }, provider: null)) + .ToList(); + var eventHandler = new SampleHandler(); + + List workloadsProcessed = new(); + + Func getEventHandler = (handler, _) => handler; + Action runWorkload = (manager, _, workload) => + { + // See comments in test above for explanation. + System.Threading.Thread.Sleep(100); - Assert.AreEqual(3, _proxyParallelManager.GetConcurrentManagersCount()); - Assert.AreEqual(1, _proxyParallelManager.GetConcurrentManagerInstances().Count(m => m.CheckValue)); + workloadsProcessed.Add(workload.Id); + // Only move to next when we run the first workload. Meaning we process 1, 2, and then 3, but not 4 and 5. + if (workload.Id == 1) + { + parallelOperationManager.RunNextWork(manager); + } + }; + + // Act + parallelOperationManager.StartWork(workloads, eventHandler, getEventHandler, runWorkload); + + // Assert + // We start by scheduling 2 workloads (1 and 2) becuase that is the max parallel level. + // Then we call next and go to 3. After that, we don't call next anymore which means we are done, + // even though we did not process workloads 4 and 5. + // (e.g. In real life Abort was called so the handler won't call RunNextWork, because we don't want to run the remaining sources, + // and all the sources that are currently running be aborted by calling Abort on each manager via DoActionOnAllManagers.) + workloadsProcessed.Should().BeEquivalentTo(1, 2, 3); } [TestMethod] - public void UpdateHandlerForManagerShouldUpdateHandlerForGivenManager() + public void OperationManagerRunsAnOperationOnAllActiveManagersWhenDoActionOnAllManagersIsCalled() { - var manager = new SampleConcurrentClass(true); - _proxyParallelManager.AddManager(manager, new SampleHandlerClass()); + // Arrange + var createdManagers = new List(); + // Store the managers we created so we can inspect them later and see if Abort was called on them. + Func createNewManager = _ => + { + var manager = new SampleManager(); + createdManagers.Add(manager); + return manager; + }; + + var maxParallelLevel = 2; + // Create more workloads than the parallel level so we can go past max parallel level of active workers and simulate that we + // are aborting in the middle of a run. + var workloads = Enumerable.Range(1, maxParallelLevel + 3) + .Select(i => new ProviderSpecificWorkload(new SampleWorkload { Id = i }, provider: null)) + .ToList(); + + var parallelOperationManager = new ParallelOperationManager(createNewManager, maxParallelLevel); + var eventHandler = new SampleHandler(); + + Func getEventHandler = (handler, _) => handler; + Action runWorkload = (manager, _, workload) => + { + // See comments in test above for explanation. - // For current handler the value of variable CheckValue should be false; - Assert.IsFalse(_proxyParallelManager.GetHandlerForGivenManager(manager).CheckValue); + // Make workload 1 fast, we want to put this in state where 2 and 3 are running and we call abort on them. + if (workload.Id != 1) + { + System.Threading.Thread.Sleep(100); + } + + // Only move to next when we run the first workload. Meaning we process 1, 2, and then 3, but not 4 and 5. + if (workload.Id == 1) + { + parallelOperationManager.RunNextWork(manager); + } + }; - var newHandler = new SampleHandlerClass(true); + // Start the work, so we process workload 1 and then move to 2. + parallelOperationManager.StartWork(workloads, eventHandler, getEventHandler, runWorkload); - // Update manager with new handler - _proxyParallelManager.UpdateHandlerForManager(manager, newHandler); + // Act + parallelOperationManager.DoActionOnAllManagers(manager => manager.Abort(), doActionsInParallel: true); - // It should not add new manager but update the current one - Assert.AreEqual(3, _proxyParallelManager.GetConcurrentManagersCount()); - Assert.IsTrue(_proxyParallelManager.GetHandlerForGivenManager(manager).CheckValue); + // Assert + // When we aborted workload 1 was already processed, and 2, and 3 were active. + // We should see that the first manager did not call abort, but second and third called abort, + // and there were no more managers created because we stopped calling next after 1 was done. + createdManagers.Select(manager => manager.AbortCalled).Should().BeEquivalentTo(false, true, true); } - private class MockParallelOperationManager : ParallelOperationManager + /// + /// Represents a manager that is responsible for processing a single given workload. Normally this would be a testhost. + /// + private class SampleManager { - public MockParallelOperationManager(Func createNewClient, int parallelLevel, bool sharedHosts) : - base(createNewClient, parallelLevel, sharedHosts) - { - } + public bool AbortCalled { get; private set; } - public void DoActionOnAllConcurrentObjects(Action action) + public void Abort() { - DoActionOnAllManagers(action, false); + AbortCalled = true; } } - private class SampleConcurrentClass + /// + /// Represents a handler, in our tests it does nothing, because we are not running any "async" work + /// so we don't need a handler to call us back when processing one workload is done and we can progress to next + /// workload. + /// + private class SampleHandler { - public readonly bool CheckValue; - public SampleConcurrentClass(bool value = false) - { - CheckValue = value; - } + } - private class SampleHandlerClass + // Represents a workload, normally this would be a test dll, or a collection of testcases from a single dll that + // are supposed to run on 1 testhost. + private class SampleWorkload { - public readonly bool CheckValue; - public SampleHandlerClass(bool value = false) - { - CheckValue = value; - } - + public int Id { get; set; } } } diff --git a/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/Client/Parallel/ParallelProxyDiscoveryManagerTests.cs b/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/Client/Parallel/ParallelProxyDiscoveryManagerTests.cs index 577f23a01f..e508746831 100644 --- a/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/Client/Parallel/ParallelProxyDiscoveryManagerTests.cs +++ b/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/Client/Parallel/ParallelProxyDiscoveryManagerTests.cs @@ -25,13 +25,15 @@ namespace TestPlatform.CrossPlatEngine.UnitTests.Client; [TestClass] public class ParallelProxyDiscoveryManagerTests { - private const int TaskTimeout = 15 * 1000; // In milliseconds. - private readonly List> _createdMockManagers; - private readonly Func _proxyManagerFunc; - private readonly Mock _mockHandler; + private const int Timeout3Seconds = 3 * 1000; + private readonly Queue> _preCreatedMockManagers; + private readonly List> _usedMockManagers; + private readonly Func _createMockManager; + private readonly Mock _mockEventHandler; private readonly List _sources = new() { "1.dll", "2.dll" }; - private readonly DiscoveryCriteria _testDiscoveryCriteria; - private bool _proxyManagerFuncCalled; + private readonly DiscoveryCriteria _discoveryCriteriaWith2Sources; + private readonly List _runtimeProviders; + private int _createMockManagerCalled; private readonly List _processedSources; private readonly ManualResetEventSlim _discoveryCompleted; private readonly Mock _mockRequestData; @@ -40,16 +42,42 @@ public class ParallelProxyDiscoveryManagerTests public ParallelProxyDiscoveryManagerTests() { _processedSources = new List(); - _createdMockManagers = new List>(); - _proxyManagerFunc = () => + _preCreatedMockManagers = new Queue>( + new List>{ + // have at least as many of them as you have test dlls + // they will be dequeued when we "create" a non-parallel + // manager. The setup adds callback for handler to complete + // the discovery. + new Mock(), + new Mock(), + new Mock(), + new Mock(), + new Mock(), + }); + _usedMockManagers = new List>(); + _createMockManager = _ => { - _proxyManagerFuncCalled = true; - var manager = new Mock(); - _createdMockManagers.Add(manager); + // We create the manager at the last possible + // moment now, not when we create the parallel proxy manager class + // so rather than creating the class here, and adding the mock setup + // that allows the tests to complete. We instead pre-create a bunch of managers + // and then grab and setup the ones we need, and only assert on the used ones. + _createMockManagerCalled++; + var manager = _preCreatedMockManagers.Dequeue(); + _usedMockManagers.Add(manager); return manager.Object; }; - _mockHandler = new Mock(); - _testDiscoveryCriteria = new DiscoveryCriteria(_sources, 100, null); + _mockEventHandler = new Mock(); + _discoveryCriteriaWith2Sources = new DiscoveryCriteria(_sources, 100, null); + _runtimeProviders = new List { + new TestRuntimeProviderInfo(typeof(ITestRuntimeProvider), false, "", new List + { + new SourceDetail{ Source = _sources[0], Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + new SourceDetail{ Source = _sources[1], Architecture = Architecture.X86, Framework = Framework.DefaultFramework } + }) + }; + + // This event is Set by callback from _mockEventHandler in SetupDiscoveryManager _discoveryCompleted = new ManualResetEventSlim(false); _mockRequestData = new Mock(); _mockRequestData.Setup(rd => rd.MetricsCollection).Returns(new NoOpMetricsCollection()); @@ -58,45 +86,62 @@ public ParallelProxyDiscoveryManagerTests() } [TestMethod] - public void InitializeShouldCallAllConcurrentManagersOnce() + public void CreatingAndInitializingProxyExecutionManagerDoesNothingUntilThereIsActualWorkToDo() { InvokeAndVerifyInitialize(3); } [TestMethod] - public void InitializeShouldCallAllConcurrentManagersWithFalseFlagIfSkipDefaultAdaptersIsFalse() + public void CreatingAndInitializingProxyExecutionManagerDoesNothingUntilThereIsActualWorkToDoButItKeepsSkipDefaultAdaptersValueFalse() { - InvokeAndVerifyInitialize(3, false); + InvokeAndVerifyInitialize(3, skipDefaultAdapters: false); } [TestMethod] - public void InitializeShouldCallAllConcurrentManagersWithTrueFlagIfSkipDefaultAdaptersIsTrue() + public void CreatingAndInitializingProxyExecutionManagerDoesNothingUntilThereIsActualWorkToDoButItKeepsSkipDefaultAdaptersValueTrue() { - InvokeAndVerifyInitialize(3, true); + InvokeAndVerifyInitialize(3, skipDefaultAdapters: true); } [TestMethod] public void AbortShouldCallAllConcurrentManagersOnce() { - var parallelDiscoveryManager = new ParallelProxyDiscoveryManager(_mockRequestData.Object, _proxyManagerFunc, new(), 4, false); + var parallelDiscoveryManager = new ParallelProxyDiscoveryManager(_mockRequestData.Object, _createMockManager, dataAggregator: new(), parallelLevel: 1000, _runtimeProviders); + + // Starting parallel discovery will create 2 proxy managers, which we will then promptly abort. + parallelDiscoveryManager.DiscoverTests(_discoveryCriteriaWith2Sources, new Mock().Object); parallelDiscoveryManager.Abort(); - Assert.AreEqual(4, _createdMockManagers.Count, "Number of Concurrent Managers created should be 4"); - _createdMockManagers.ForEach(dm => dm.Verify(m => m.Abort(), Times.Once)); + Assert.AreEqual(2, _usedMockManagers.Count, "Number of Concurrent Managers created should be equal to the number of sources that should run"); + _usedMockManagers.ForEach(dm => dm.Verify(m => m.Abort(), Times.Once)); } [TestMethod] public void DiscoverTestsShouldProcessAllSources() { // Testcase filter should be passed to all parallel discovery criteria. - _testDiscoveryCriteria.TestCaseFilter = "Name~Test"; - var parallelDiscoveryManager = SetupDiscoveryManager(_proxyManagerFunc, 2, false); + _discoveryCriteriaWith2Sources.TestCaseFilter = "Name~Test"; + var parallelDiscoveryManager = SetupDiscoveryManager(_createMockManager, 2, false); - Task.Run(() => parallelDiscoveryManager.DiscoverTests(_testDiscoveryCriteria, _mockHandler.Object)); + var task = Task.Run(() => parallelDiscoveryManager.DiscoverTests(_discoveryCriteriaWith2Sources, _mockEventHandler.Object)); + var discoveryCompleted = _discoveryCompleted.Wait(Timeout3Seconds); - Assert.IsTrue(_discoveryCompleted.Wait(TaskTimeout), "Test discovery not completed."); + if (task.IsCompleted) + { + // If the work is done, either there is output, + // or an exception that we want to "receive" to + // fail our test. + task.GetAwaiter().GetResult(); + } + else + { + // We don't want to await the result because we + // completed or timed out on the event above. + } + + Assert.IsTrue(discoveryCompleted, "Test discovery not completed."); Assert.AreEqual(_sources.Count, _processedSources.Count, "All Sources must be processed."); AssertMissingAndDuplicateSources(_processedSources); } @@ -104,10 +149,10 @@ public void DiscoverTestsShouldProcessAllSources() [TestMethod] public void HandlePartialDiscoveryCompleteShouldReturnTrueIfDiscoveryWasAbortedWithEventHandler() { - var parallelDiscoveryManager = new ParallelProxyDiscoveryManager(_mockRequestData.Object, _proxyManagerFunc, new(), 1, false); + var parallelDiscoveryManager = new ParallelProxyDiscoveryManager(_mockRequestData.Object, _createMockManager, dataAggregator: new(), parallelLevel: 1, new List()); var proxyDiscovermanager = new ProxyDiscoveryManager(_mockRequestData.Object, new Mock().Object, new Mock().Object); - parallelDiscoveryManager.Abort(_mockHandler.Object); + parallelDiscoveryManager.Abort(_mockEventHandler.Object); bool isPartialDiscoveryComplete = parallelDiscoveryManager.HandlePartialDiscoveryComplete(proxyDiscovermanager, 20, new List(), isAborted: false); Assert.IsTrue(isPartialDiscoveryComplete); @@ -116,7 +161,7 @@ public void HandlePartialDiscoveryCompleteShouldReturnTrueIfDiscoveryWasAbortedW [TestMethod] public void HandlePartialDiscoveryCompleteShouldReturnTrueIfDiscoveryWasAborted() { - var parallelDiscoveryManager = new ParallelProxyDiscoveryManager(_mockRequestData.Object, _proxyManagerFunc, new(), 1, false); + var parallelDiscoveryManager = new ParallelProxyDiscoveryManager(_mockRequestData.Object, _createMockManager, dataAggregator: new(), parallelLevel: 1, new List()); var proxyDiscovermanager = new ProxyDiscoveryManager(_mockRequestData.Object, new Mock().Object, new Mock().Object); parallelDiscoveryManager.Abort(); @@ -130,16 +175,16 @@ public void DiscoveryTestsShouldStopDiscoveryIfAbortionWasRequested() { // Since the hosts are aborted, total aggregated tests sent across will be -1 var discoveryManagerMock = new Mock(); - _createdMockManagers.Add(discoveryManagerMock); - var parallelDiscoveryManager = SetupDiscoveryManager(() => discoveryManagerMock.Object, 1, true); + _preCreatedMockManagers.Enqueue(discoveryManagerMock); + var parallelDiscoveryManager = SetupDiscoveryManager(_ => discoveryManagerMock.Object, 1, true, totalTests: -1); Task.Run(() => { - parallelDiscoveryManager.DiscoverTests(_testDiscoveryCriteria, _mockHandler.Object); + parallelDiscoveryManager.DiscoverTests(_discoveryCriteriaWith2Sources, _mockEventHandler.Object); parallelDiscoveryManager.Abort(); }); - Assert.IsTrue(_discoveryCompleted.Wait(TaskTimeout), "Test discovery not completed."); + Assert.IsTrue(_discoveryCompleted.Wait(Timeout3Seconds), "Test discovery not completed."); Assert.AreEqual(1, _processedSources.Count, "One source should be processed."); } @@ -148,16 +193,16 @@ public void DiscoveryTestsShouldStopDiscoveryIfAbortionWithEventHandlerWasReques { // Since the hosts are aborted, total aggregated tests sent across will be -1 var discoveryManagerMock = new Mock(); - _createdMockManagers.Add(discoveryManagerMock); - var parallelDiscoveryManager = SetupDiscoveryManager(() => discoveryManagerMock.Object, 1, true); + _preCreatedMockManagers.Enqueue(discoveryManagerMock); + var parallelDiscoveryManager = SetupDiscoveryManager(_ => discoveryManagerMock.Object, 1, true, totalTests: -1); Task.Run(() => { - parallelDiscoveryManager.DiscoverTests(_testDiscoveryCriteria, _mockHandler.Object); - parallelDiscoveryManager.Abort(_mockHandler.Object); + parallelDiscoveryManager.DiscoverTests(_discoveryCriteriaWith2Sources, _mockEventHandler.Object); + parallelDiscoveryManager.Abort(_mockEventHandler.Object); }); - Assert.IsTrue(_discoveryCompleted.Wait(TaskTimeout), "Test discovery not completed."); + Assert.IsTrue(_discoveryCompleted.Wait(Timeout3Seconds), "Test discovery not completed."); Assert.AreEqual(1, _processedSources.Count, "One source should be processed."); } @@ -166,17 +211,18 @@ public void DiscoveryTestsShouldProcessAllSourceIfOneDiscoveryManagerIsStarved() { // Ensure that second discovery manager never starts. Expect 10 total tests. // Override DiscoveryComplete since overall aborted should be true - var parallelDiscoveryManager = SetupDiscoveryManager(_proxyManagerFunc, 2, false); - _createdMockManagers[1].Reset(); - _createdMockManagers[1].Setup(dm => dm.DiscoverTests(It.IsAny(), It.IsAny())) + var parallelDiscoveryManager = SetupDiscoveryManager(_createMockManager, 2, false, totalTests: 10); + var secondMockManager = _preCreatedMockManagers.ToArray()[1]; + secondMockManager.Reset(); + secondMockManager.Setup(dm => dm.DiscoverTests(It.IsAny(), It.IsAny())) .Throws(); - _mockHandler.Setup(mh => mh.HandleDiscoveryComplete(It.IsAny(), null)) + _mockEventHandler.Setup(mh => mh.HandleDiscoveryComplete(It.IsAny(), null)) .Callback>((t, l) => _discoveryCompleted.Set()); - Task.Run(() => parallelDiscoveryManager.DiscoverTests(_testDiscoveryCriteria, _mockHandler.Object)); + Task.Run(() => parallelDiscoveryManager.DiscoverTests(_discoveryCriteriaWith2Sources, _mockEventHandler.Object)); // Processed sources should be 1 since the 2nd source is never discovered - Assert.IsTrue(_discoveryCompleted.Wait(TaskTimeout), "Test discovery not completed."); + Assert.IsTrue(_discoveryCompleted.Wait(Timeout3Seconds), "Test discovery not completed."); Assert.AreEqual(1, _processedSources.Count, "All Sources must be processed."); } @@ -185,18 +231,19 @@ public void DiscoveryTestsShouldCatchExceptionAndHandleLogMessageOfError() { // Ensure that second discovery manager never starts. Expect 10 total tests. // Override DiscoveryComplete since overall aborted should be true - var parallelDiscoveryManager = SetupDiscoveryManager(_proxyManagerFunc, 2, false); - _createdMockManagers[1].Reset(); - _createdMockManagers[1].Setup(dm => dm.DiscoverTests(It.IsAny(), It.IsAny())) + var parallelDiscoveryManager = SetupDiscoveryManager(_createMockManager, 2, false, totalTests: 10); + var secondMockManager = _preCreatedMockManagers.ToArray()[1]; + secondMockManager.Reset(); + secondMockManager.Setup(dm => dm.DiscoverTests(It.IsAny(), It.IsAny())) .Throws(); - _mockHandler.Setup(mh => mh.HandleDiscoveryComplete(It.IsAny(), null)) + _mockEventHandler.Setup(mh => mh.HandleDiscoveryComplete(It.IsAny(), null)) .Callback>((t, l) => _discoveryCompleted.Set()); - Task.Run(() => parallelDiscoveryManager.DiscoverTests(_testDiscoveryCriteria, _mockHandler.Object)); + Task.Run(() => parallelDiscoveryManager.DiscoverTests(_discoveryCriteriaWith2Sources, _mockEventHandler.Object)); // Processed sources should be 1 since the 2nd source is never discovered - Assert.IsTrue(_discoveryCompleted.Wait(TaskTimeout), "Test discovery not completed."); - _mockHandler.Verify(s => s.HandleLogMessage(TestMessageLevel.Error, It.IsAny()), Times.Once); + Assert.IsTrue(_discoveryCompleted.Wait(Timeout3Seconds), "Test discovery not completed."); + _mockEventHandler.Verify(s => s.HandleLogMessage(TestMessageLevel.Error, It.IsAny()), Times.Once); } [TestMethod] @@ -204,56 +251,65 @@ public void DiscoveryTestsShouldCatchExceptionAndHandleRawMessageOfTestMessage() { // Ensure that second discovery manager never starts. Expect 10 total tests. // Override DiscoveryComplete since overall aborted should be true - var parallelDiscoveryManager = SetupDiscoveryManager(_proxyManagerFunc, 2, false); - _createdMockManagers[1].Reset(); - _createdMockManagers[1].Setup(dm => dm.DiscoverTests(It.IsAny(), It.IsAny())) + var parallelDiscoveryManager = SetupDiscoveryManager(_createMockManager, 2, false, totalTests: 10); + var secondMockManager = _preCreatedMockManagers.ToArray()[1]; + secondMockManager.Reset(); + secondMockManager.Setup(dm => dm.DiscoverTests(It.IsAny(), It.IsAny())) .Throws(); - _mockHandler.Setup(mh => mh.HandleDiscoveryComplete(It.IsAny(), null)) + _mockEventHandler.Setup(mh => mh.HandleDiscoveryComplete(It.IsAny(), null)) .Callback>((t, l) => _discoveryCompleted.Set()); - Task.Run(() => parallelDiscoveryManager.DiscoverTests(_testDiscoveryCriteria, _mockHandler.Object)); + Task.Run(() => parallelDiscoveryManager.DiscoverTests(_discoveryCriteriaWith2Sources, _mockEventHandler.Object)); // Processed sources should be 1 since the 2nd source is never discovered - Assert.IsTrue(_discoveryCompleted.Wait(TaskTimeout), "Test discovery not completed."); - _mockHandler.Verify(s => s.HandleRawMessage(It.Is(str => str.Contains(MessageType.TestMessage)))); + Assert.IsTrue(_discoveryCompleted.Wait(Timeout3Seconds), "Test discovery not completed."); + _mockEventHandler.Verify(s => s.HandleRawMessage(It.Is(str => str.Contains(MessageType.TestMessage)))); } [TestMethod] public void HandlePartialDiscoveryCompleteShouldCreateANewProxyDiscoveryManagerIfIsAbortedIsTrue() { - var parallelDiscoveryManager = new ParallelProxyDiscoveryManager(_mockRequestData.Object, _proxyManagerFunc, new(), 1, false); - var proxyDiscovermanager = new ProxyDiscoveryManager(_mockRequestData.Object, new Mock().Object, new Mock().Object); + var parallelDiscoveryManager = new ParallelProxyDiscoveryManager(_mockRequestData.Object, _createMockManager, dataAggregator: new(), parallelLevel: 1, _runtimeProviders); + + // Trigger discover tests, this will create a manager by calling the _createMockManager func + // which dequeues it to _usedMockManagers. + parallelDiscoveryManager.DiscoverTests(_discoveryCriteriaWith2Sources, _mockEventHandler.Object); + var completedManager = _usedMockManagers[0]; - _proxyManagerFuncCalled = false; - parallelDiscoveryManager.DiscoverTests(_testDiscoveryCriteria, _mockHandler.Object); - parallelDiscoveryManager.HandlePartialDiscoveryComplete(proxyDiscovermanager, 20, new List(), isAborted: true); + // act + // Tell the manager that completedManager finished work, and that it should progress to next work + parallelDiscoveryManager.HandlePartialDiscoveryComplete(completedManager.Object, 20, new List(), isAborted: true); - Assert.IsTrue(_proxyManagerFuncCalled); + // assert + // We created 2 managers 1 for the original work and another one + // when we called HandlePartialDiscoveryComplete and it moved on to the next piece of work. + Assert.AreEqual(2, _createMockManagerCalled); } [TestMethod] public void DiscoveryTestsWithCompletionMarksAllSourcesAsFullyDiscovered() { - _testDiscoveryCriteria.TestCaseFilter = "Name~Test"; - var parallelDiscoveryManager = SetupDiscoveryManager(_proxyManagerFunc, 2, false); + _discoveryCriteriaWith2Sources.TestCaseFilter = "Name~Test"; + var parallelDiscoveryManager = SetupDiscoveryManager(_createMockManager, 2, false); - Task.Run(() => parallelDiscoveryManager.DiscoverTests(_testDiscoveryCriteria, _mockHandler.Object)); + Task.Run(() => parallelDiscoveryManager.DiscoverTests(_discoveryCriteriaWith2Sources, _mockEventHandler.Object)); - Assert.IsTrue(_discoveryCompleted.Wait(TaskTimeout), "Test discovery not completed."); + Assert.IsTrue(_discoveryCompleted.Wait(Timeout3Seconds), "Test discovery not completed."); Assert.AreEqual(_sources.Count, _processedSources.Count, "All Sources must be processed."); CollectionAssert.AreEquivalent(_sources, _dataAggregator.GetSourcesWithStatus(DiscoveryStatus.FullyDiscovered)); Assert.AreEqual(0, _dataAggregator.GetSourcesWithStatus(DiscoveryStatus.PartiallyDiscovered).Count); Assert.AreEqual(0, _dataAggregator.GetSourcesWithStatus(DiscoveryStatus.NotDiscovered).Count); } - private ParallelProxyDiscoveryManager SetupDiscoveryManager(Func getProxyManager, int parallelLevel, bool abortDiscovery) + private IParallelProxyDiscoveryManager SetupDiscoveryManager(Func getProxyManager, int parallelLevel, bool abortDiscovery, int totalTests = 20) { - var parallelDiscoveryManager = new ParallelProxyDiscoveryManager(_mockRequestData.Object, getProxyManager, _dataAggregator, parallelLevel, false); + var parallelDiscoveryManager = new ParallelProxyDiscoveryManager(_mockRequestData.Object, getProxyManager, dataAggregator: new(), parallelLevel, _runtimeProviders); SetupDiscoveryTests(_processedSources, abortDiscovery); // Setup a complete handler for parallel discovery manager - _mockHandler.Setup(mh => mh.HandleDiscoveryComplete(It.IsAny(), null)) - .Callback>((discoveryCompleteEventArgs, lastChunk) => _discoveryCompleted.Set()); + _mockEventHandler.Setup(mh => mh.HandleDiscoveryComplete(It.IsAny(), null)) + .Callback>( + (discoveryCompleteEventArgs, lastChunk) => _discoveryCompleted.Set()); return parallelDiscoveryManager; } @@ -261,7 +317,10 @@ private ParallelProxyDiscoveryManager SetupDiscoveryManager(Func processedSources, bool isAbort) { var syncObject = new object(); - foreach (var manager in _createdMockManagers) + // This setups callbacks for the handler the we pass through. + // We pick up those managers in the _createMockManager func, + // and return them. + foreach (var manager in _preCreatedMockManagers.ToArray()) { manager.Setup(m => m.DiscoverTests(It.IsAny(), It.IsAny())). Callback( @@ -275,8 +334,8 @@ private void SetupDiscoveryTests(List processedSources, bool isAbort) _dataAggregator.MarkSourcesWithStatus(criteria.Sources, DiscoveryStatus.FullyDiscovered); Task.Delay(100).Wait(); - Assert.AreEqual(_testDiscoveryCriteria.TestCaseFilter, criteria.TestCaseFilter); + Assert.AreEqual(_discoveryCriteriaWith2Sources.TestCaseFilter, criteria.TestCaseFilter); handler.HandleDiscoveryComplete(isAbort ? new DiscoveryCompleteEventArgs(-1, isAbort) : new DiscoveryCompleteEventArgs(10, isAbort), null); }); } @@ -305,15 +364,15 @@ private void AssertMissingAndDuplicateSources(List processedSources) } } - private void InvokeAndVerifyInitialize(int concurrentManagersCount, bool skipDefaultAdapters = false) + private void InvokeAndVerifyInitialize(int maxParallelLevel, bool skipDefaultAdapters = false) { - var parallelDiscoveryManager = new ParallelProxyDiscoveryManager(_mockRequestData.Object, _proxyManagerFunc, new(), concurrentManagersCount, false); + var parallelDiscoveryManager = new ParallelProxyDiscoveryManager(_mockRequestData.Object, _createMockManager, dataAggregator: new(), maxParallelLevel, new List()); // Action parallelDiscoveryManager.Initialize(skipDefaultAdapters); // Verify - Assert.AreEqual(concurrentManagersCount, _createdMockManagers.Count, $"Number of Concurrent Managers created should be {concurrentManagersCount}"); - _createdMockManagers.ForEach(dm => dm.Verify(m => m.Initialize(skipDefaultAdapters), Times.Once)); + Assert.AreEqual(0, _usedMockManagers.Count, $"No managers are pre-created until there is work for them."); + _usedMockManagers.ForEach(dm => dm.Verify(m => m.Initialize(skipDefaultAdapters), Times.Once)); } } diff --git a/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/Client/Parallel/ParallelProxyExecutionManagerTests.cs b/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/Client/Parallel/ParallelProxyExecutionManagerTests.cs index 345bcf48f9..219b595dea 100644 --- a/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/Client/Parallel/ParallelProxyExecutionManagerTests.cs +++ b/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/Client/Parallel/ParallelProxyExecutionManagerTests.cs @@ -13,7 +13,6 @@ using Microsoft.VisualStudio.TestPlatform.CommunicationUtilities.ObjectModel; using Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Client; using Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Client.Parallel; -using Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.DataCollection; using Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.DataCollection.Interfaces; using Microsoft.VisualStudio.TestPlatform.ObjectModel; using Microsoft.VisualStudio.TestPlatform.ObjectModel.Client; @@ -29,100 +28,121 @@ namespace TestPlatform.CrossPlatEngine.UnitTests.Client; [TestClass] public class ParallelProxyExecutionManagerTests { - private static readonly int TaskTimeout = 15 * 1000; // In milliseconds + private static readonly int Timeout3Seconds = 3 * 1000; // In milliseconds + + private readonly List> _usedMockManagers; + private readonly Func _createMockManager; + private readonly Mock _mockEventHandler; - private readonly List> _createdMockManagers; - private readonly Mock _mockHandler; private readonly List _sources; private readonly List _processedSources; - private readonly TestRunCriteria _testRunCriteriaWithSources; + private readonly TestRunCriteria _testRunCriteriaWith2Sources; + private readonly List _runtimeProviders; private readonly List _testCases; private readonly List _processedTestCases; - private readonly TestRunCriteria _testRunCriteriaWithTests; + private readonly TestRunCriteria _testRunCriteriaWithTestsFrom3Dlls; + + private int _createMockManagerCalled; private readonly ManualResetEventSlim _executionCompleted; + private readonly Queue> _preCreatedMockManagers; private readonly Mock _mockRequestData; - private Func _proxyManagerFunc; - private Mock? _mockTestHostManager; - private Mock? _mockRequestSender; - private Mock? _mockDataCollectionManager; - private bool _proxyManagerFuncCalled; - public ParallelProxyExecutionManagerTests() { _executionCompleted = new ManualResetEventSlim(false); - _createdMockManagers = new List>(); - _proxyManagerFunc = () => + _preCreatedMockManagers = new Queue>( + new List> + { + new Mock(), + new Mock(), + new Mock(), + new Mock(), + }); + _usedMockManagers = new List>(); + _createMockManager = _ => { - _proxyManagerFuncCalled = true; - var manager = new Mock(); - _createdMockManagers.Add(manager); + _createMockManagerCalled++; + var manager = _preCreatedMockManagers.Dequeue(); + _usedMockManagers.Add(manager); return manager.Object; }; - _mockHandler = new Mock(); + _mockEventHandler = new Mock(); // Configure sources _sources = new List() { "1.dll", "2.dll" }; _processedSources = new List(); - _testRunCriteriaWithSources = new TestRunCriteria(_sources, 100, false, string.Empty, TimeSpan.MaxValue, null, "Name~Test", new FilterOptions() { FilterRegEx = @"^[^\s\(]+" }); + _testRunCriteriaWith2Sources = new TestRunCriteria(_sources, 100, false, string.Empty, TimeSpan.MaxValue, null, "Name~Test", new FilterOptions() { FilterRegEx = @"^[^\s\(]+" }); + _runtimeProviders = new List { + new TestRuntimeProviderInfo(typeof(ITestRuntimeProvider), false, "", new List + { + new SourceDetail{ Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + new SourceDetail{ Source = "2.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + // For testcases on the bottom. + new SourceDetail{ Source = "3.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }) + }; // Configure testcases _testCases = CreateTestCases(); _processedTestCases = new List(); - _testRunCriteriaWithTests = new TestRunCriteria(_testCases, 100); + _testRunCriteriaWithTestsFrom3Dlls = new TestRunCriteria(_testCases, 100); _mockRequestData = new Mock(); _mockRequestData.Setup(rd => rd.MetricsCollection).Returns(new NoOpMetricsCollection()); _mockRequestData.Setup(rd => rd.ProtocolConfig).Returns(new ProtocolConfig()); } [TestMethod] - public void InitializeShouldCallAllConcurrentManagersOnce() + public void NoManagersArePreCreatedUntilThereIsWorkForThem() { InvokeAndVerifyInitialize(3); } [TestMethod] - public void InitializeShouldCallAllConcurrentManagersWithFalseFlagIfSkipDefaultAdaptersIsFalse() + public void NoManagersArePreCreatedUntilThereIsWorkForThemButSkipDefaultAdaptersValueFalseIsKept() { - InvokeAndVerifyInitialize(3, false); + InvokeAndVerifyInitialize(3, skipDefaultAdapters: false); } [TestMethod] - public void InitializeShouldCallAllConcurrentManagersWithTrueFlagIfSkipDefaultAdaptersIsTrue() + public void NoManagersArePreCreatedUntilThereIsWorkForThemButSkipDefaultAdaptersValueTrueIsKept() { - InvokeAndVerifyInitialize(3, true); + InvokeAndVerifyInitialize(3, skipDefaultAdapters: true); } [TestMethod] public void AbortShouldCallAllConcurrentManagersOnce() { - var parallelExecutionManager = new ParallelProxyExecutionManager(_mockRequestData.Object, _proxyManagerFunc, 4); + var parallelExecutionManager = new ParallelProxyExecutionManager(_mockRequestData.Object, _createMockManager, parallelLevel: 1000, _runtimeProviders); + // Starting parallel run will create 2 proxy managers, which we will then promptly abort. + parallelExecutionManager.StartTestRun(_testRunCriteriaWith2Sources, new Mock().Object); parallelExecutionManager.Abort(It.IsAny()); - Assert.AreEqual(4, _createdMockManagers.Count, "Number of Concurrent Managers created should be 4"); - _createdMockManagers.ForEach(em => em.Verify(m => m.Abort(It.IsAny()), Times.Once)); + Assert.AreEqual(2, _usedMockManagers.Count, "Number of Concurrent Managers created should be equal to the amount of dlls that run"); + _usedMockManagers.ForEach(em => em.Verify(m => m.Abort(It.IsAny()), Times.Once)); } [TestMethod] public void CancelShouldCallAllConcurrentManagersOnce() { - var parallelExecutionManager = new ParallelProxyExecutionManager(_mockRequestData.Object, _proxyManagerFunc, 4); + var parallelExecutionManager = new ParallelProxyExecutionManager(_mockRequestData.Object, _createMockManager, 4, _runtimeProviders); + // Starting parallel run will create 2 proxy managers, which we will then promptly cancel. + parallelExecutionManager.StartTestRun(_testRunCriteriaWith2Sources, new Mock().Object); parallelExecutionManager.Cancel(It.IsAny()); - Assert.AreEqual(4, _createdMockManagers.Count, "Number of Concurrent Managers created should be 4"); - _createdMockManagers.ForEach(em => em.Verify(m => m.Cancel(It.IsAny()), Times.Once)); + Assert.AreEqual(2, _usedMockManagers.Count, "Number of Concurrent Managers created should be equal to the amount of dlls that run"); + _usedMockManagers.ForEach(em => em.Verify(m => m.Cancel(It.IsAny()), Times.Once)); } [TestMethod] public void StartTestRunShouldProcessAllSources() { - var parallelExecutionManager = SetupExecutionManager(_proxyManagerFunc, 2); + var parallelExecutionManager = SetupExecutionManager(_createMockManager, 2); - parallelExecutionManager.StartTestRun(_testRunCriteriaWithSources, _mockHandler.Object); + parallelExecutionManager.StartTestRun(_testRunCriteriaWith2Sources, _mockEventHandler.Object); - Assert.IsTrue(_executionCompleted.Wait(TaskTimeout), "Test run not completed."); + Assert.IsTrue(_executionCompleted.Wait(Timeout3Seconds), "Test run not completed."); Assert.AreEqual(_sources.Count, _processedSources.Count, "All Sources must be processed."); AssertMissingAndDuplicateSources(_processedSources); } @@ -132,11 +152,11 @@ public void StartTestRunShouldProcessAllSources() [TestMethod] public void StartTestRunShouldProcessAllTestCases() { - var parallelExecutionManager = SetupExecutionManager(_proxyManagerFunc, 3, setupTestCases: true); + var parallelExecutionManager = SetupExecutionManager(_createMockManager, 3, setupTestCases: true); - parallelExecutionManager.StartTestRun(_testRunCriteriaWithTests, _mockHandler.Object); + parallelExecutionManager.StartTestRun(_testRunCriteriaWithTestsFrom3Dlls, _mockEventHandler.Object); - Assert.IsTrue(_executionCompleted.Wait(TaskTimeout), "Test run not completed."); + Assert.IsTrue(_executionCompleted.Wait(Timeout3Seconds), "Test run not completed."); Assert.AreEqual(_testCases.Count, _processedTestCases.Count, "All Tests must be processed."); AssertMissingAndDuplicateTestCases(_testCases, _processedTestCases); } @@ -144,11 +164,11 @@ public void StartTestRunShouldProcessAllTestCases() [TestMethod] public void StartTestRunWithSourcesShouldNotSendCompleteUntilAllSourcesAreProcessed() { - var parallelExecutionManager = SetupExecutionManager(_proxyManagerFunc, 2); + var parallelExecutionManager = SetupExecutionManager(_createMockManager, 2); - Task.Run(() => parallelExecutionManager.StartTestRun(_testRunCriteriaWithSources, _mockHandler.Object)); + Task.Run(() => parallelExecutionManager.StartTestRun(_testRunCriteriaWith2Sources, _mockEventHandler.Object)); - Assert.IsTrue(_executionCompleted.Wait(TaskTimeout), "Test run not completed."); + Assert.IsTrue(_executionCompleted.Wait(Timeout3Seconds), "Test run not completed."); Assert.AreEqual(_sources.Count, _processedSources.Count, "All Sources must be processed."); AssertMissingAndDuplicateSources(_processedSources); } @@ -156,66 +176,67 @@ public void StartTestRunWithSourcesShouldNotSendCompleteUntilAllSourcesAreProces [TestMethod] public void HandlePartialRunCompleteShouldCreateNewProxyExecutionManagerIfDataCollectionEnabled() { - var completeArgs = new TestRunCompleteEventArgs(null, true, true, null, null, null, TimeSpan.Zero); - _mockTestHostManager = new Mock(); - _mockRequestSender = new Mock(); - _mockDataCollectionManager = new Mock(); - var proxyDataCollectionManager = new ProxyExecutionManagerWithDataCollection(_mockRequestData.Object, _mockRequestSender.Object, _mockTestHostManager.Object, _mockDataCollectionManager.Object); - var parallelExecutionManager = SetupExecutionManager(_proxyManagerFunc, 2, setupTestCases: true); - - _proxyManagerFuncCalled = false; - parallelExecutionManager.HandlePartialRunComplete(proxyDataCollectionManager, completeArgs, null, null, null); - Assert.IsTrue(_proxyManagerFuncCalled); - } - - [TestMethod] - public void HandlePartialRunCompleteShouldCreateNewProxyExecutionManagerIfDataCollectionEnabledAndCreatorWithDataCollection() - { - var completeArgs = new TestRunCompleteEventArgs(null, true, true, null, null, null, TimeSpan.Zero); - _mockTestHostManager = new Mock(); - _mockRequestSender = new Mock(); - _mockDataCollectionManager = new Mock(); - var proxyDataCollectionManager = new ProxyExecutionManagerWithDataCollection(_mockRequestData.Object, _mockRequestSender.Object, _mockTestHostManager.Object, _mockDataCollectionManager.Object); - var managers = new List>(); - _proxyManagerFunc = () => - { - _proxyManagerFuncCalled = true; - var manager = new Mock(_mockRequestData.Object, _mockRequestSender.Object, _mockTestHostManager.Object, _mockDataCollectionManager.Object); - managers.Add(manager); - return manager.Object; - }; - var parallelExecutionManager = SetupExecutionManager(_proxyManagerFunc, 2, setupTestCases: true); - - _proxyManagerFuncCalled = false; - parallelExecutionManager.HandlePartialRunComplete(proxyDataCollectionManager, completeArgs, null, null, null); - Assert.IsTrue(_proxyManagerFuncCalled); - - var handler = parallelExecutionManager.GetHandlerForGivenManager(managers.Last().Object); - Assert.IsTrue(handler is ParallelDataCollectionEventsHandler); + var completeArgs = new TestRunCompleteEventArgs(null, isCanceled: false, isAborted: false, null, null, null, TimeSpan.Zero); + var mockTestHostManager = new Mock(); + var mockRequestSender = new Mock(); + var mockDataCollectionManager = new Mock(); + var parallelExecutionManager = SetupExecutionManager(_createMockManager, 2, setupTestCases: true); + + // Trigger discover tests, this will create a manager by calling the _createMockManager func + // which dequeues it to _usedMockManagers. + parallelExecutionManager.StartTestRun(_testRunCriteriaWith2Sources, _mockEventHandler.Object); + var completedManager = _usedMockManagers[0]; + + // act + // Tell the manager that completedManager finished work, and that it should progress to next work + parallelExecutionManager.HandlePartialRunComplete(completedManager.Object, completeArgs, null, null, null); + + // assert + // We created 2 managers 1 for the original work and another one + // when we called HandlePartialDiscoveryComplete and it moved on to the next piece of work. + Assert.AreEqual(2, _createMockManagerCalled); } [TestMethod] public void HandlePartialRunCompleteShouldCreateNewProxyExecutionManagerIfIsAbortedIsTrue() { var completeArgs = new TestRunCompleteEventArgs(null, true, true, null, null, null, TimeSpan.Zero); - _mockTestHostManager = new Mock(); - _mockRequestSender = new Mock(); - var parallelExecutionManager = SetupExecutionManager(_proxyManagerFunc, 2, setupTestCases: true); - - _proxyManagerFuncCalled = false; - var proxyExecutionManagerManager = new ProxyExecutionManager(_mockRequestData.Object, _mockRequestSender.Object, _mockTestHostManager.Object); - parallelExecutionManager.HandlePartialRunComplete(proxyExecutionManagerManager, completeArgs, null, null, null); - Assert.IsTrue(_proxyManagerFuncCalled); + var mockTestHostManager = new Mock(); + var mockRequestSender = new Mock(); + var parallelExecutionManager = SetupExecutionManager(_createMockManager, 2, setupTestCases: true); + + // Trigger discover tests, this will create a manager by calling the _createMockManager func + // which dequeues it to _usedMockManagers. + parallelExecutionManager.StartTestRun(_testRunCriteriaWith2Sources, _mockEventHandler.Object); + var completedManager = _usedMockManagers[0]; + + // act + // Tell the manager that completedManager finished work, and that it should progress to next work + parallelExecutionManager.HandlePartialRunComplete(completedManager.Object, completeArgs, null, null, null); + + // assert + // We created 2 managers 1 for the original work and another one + // when we called HandlePartialDiscoveryComplete and it moved on to the next piece of work. + Assert.AreEqual(2, _createMockManagerCalled); } [TestMethod] public void StartTestRunWithTestsShouldNotSendCompleteUntilAllTestsAreProcessed() { - var parallelExecutionManager = SetupExecutionManager(_proxyManagerFunc, 3, setupTestCases: true); + var parallelExecutionManager = SetupExecutionManager(_createMockManager, 3, setupTestCases: true); - Task.Run(() => parallelExecutionManager.StartTestRun(_testRunCriteriaWithTests, _mockHandler.Object)); + var task = Task.Run(() => parallelExecutionManager.StartTestRun(_testRunCriteriaWithTestsFrom3Dlls, _mockEventHandler.Object)); - Assert.IsTrue(_executionCompleted.Wait(TaskTimeout), "Test run not completed."); + bool executionCompleted = _executionCompleted.Wait(Timeout3Seconds); + + if (task.IsCompleted) + { + // Receive any exception if some happened + // Don't await if not completed, to avoid hanging the test. + task.GetAwaiter().GetResult(); + } + + Assert.IsTrue(executionCompleted, "Test run not completed."); Assert.AreEqual(_testCases.Count, _processedTestCases.Count, "All Tests must be processed."); AssertMissingAndDuplicateTestCases(_testCases, _processedTestCases); } @@ -224,14 +245,14 @@ public void StartTestRunWithTestsShouldNotSendCompleteUntilAllTestsAreProcessed( public void StartTestRunShouldNotProcessAllSourcesOnExecutionCancelsForAnySource() { var executionManagerMock = new Mock(); - var parallelExecutionManager = new ParallelProxyExecutionManager(_mockRequestData.Object, () => executionManagerMock.Object, 1); - _createdMockManagers.Add(executionManagerMock); + var parallelExecutionManager = new ParallelProxyExecutionManager(_mockRequestData.Object, _ => executionManagerMock.Object, 1, _runtimeProviders); + _preCreatedMockManagers.Enqueue(executionManagerMock); SetupMockManagers(_processedSources, isCanceled: true, isAborted: false); SetupHandleTestRunComplete(_executionCompleted); - Task.Run(() => parallelExecutionManager.StartTestRun(_testRunCriteriaWithSources, _mockHandler.Object)); + Task.Run(() => parallelExecutionManager.StartTestRun(_testRunCriteriaWith2Sources, _mockEventHandler.Object)); - Assert.IsTrue(_executionCompleted.Wait(TaskTimeout), "Test run not completed."); + Assert.IsTrue(_executionCompleted.Wait(Timeout3Seconds), "Test run not completed."); Assert.AreEqual(1, _processedSources.Count, "Abort should stop all sources execution."); } @@ -239,15 +260,15 @@ public void StartTestRunShouldNotProcessAllSourcesOnExecutionCancelsForAnySource public void StartTestRunShouldNotProcessAllSourcesOnExecutionAborted() { var executionManagerMock = new Mock(); - var parallelExecutionManager = new ParallelProxyExecutionManager(_mockRequestData.Object, () => executionManagerMock.Object, 1); - _createdMockManagers.Add(executionManagerMock); + var parallelExecutionManager = new ParallelProxyExecutionManager(_mockRequestData.Object, _ => executionManagerMock.Object, 1, _runtimeProviders); + _preCreatedMockManagers.Enqueue(executionManagerMock); SetupMockManagers(_processedSources, isCanceled: false, isAborted: false); SetupHandleTestRunComplete(_executionCompleted); parallelExecutionManager.Abort(It.IsAny()); - Task.Run(() => parallelExecutionManager.StartTestRun(_testRunCriteriaWithSources, _mockHandler.Object)); + Task.Run(() => parallelExecutionManager.StartTestRun(_testRunCriteriaWith2Sources, _mockEventHandler.Object)); - Assert.IsTrue(_executionCompleted.Wait(TaskTimeout), "Test run not completed."); + Assert.IsTrue(_executionCompleted.Wait(Timeout3Seconds), "Test run not completed."); Assert.AreEqual(1, _processedSources.Count, "Abort should stop all sources execution."); } @@ -255,14 +276,14 @@ public void StartTestRunShouldNotProcessAllSourcesOnExecutionAborted() public void StartTestRunShouldProcessAllSourcesOnExecutionAbortsForAnySource() { var executionManagerMock = new Mock(); - var parallelExecutionManager = new ParallelProxyExecutionManager(_mockRequestData.Object, () => executionManagerMock.Object, 1); - _createdMockManagers.Add(executionManagerMock); + var parallelExecutionManager = new ParallelProxyExecutionManager(_mockRequestData.Object, _ => executionManagerMock.Object, 1, _runtimeProviders); + _preCreatedMockManagers.Enqueue(executionManagerMock); SetupMockManagers(_processedSources, isCanceled: false, isAborted: true); SetupHandleTestRunComplete(_executionCompleted); - Task.Run(() => parallelExecutionManager.StartTestRun(_testRunCriteriaWithSources, _mockHandler.Object)); + Task.Run(() => parallelExecutionManager.StartTestRun(_testRunCriteriaWith2Sources, _mockEventHandler.Object)); - Assert.IsTrue(_executionCompleted.Wait(TaskTimeout), "Test run not completed."); + Assert.IsTrue(_executionCompleted.Wait(Timeout3Seconds), "Test run not completed."); Assert.AreEqual(2, _processedSources.Count, "Abort should stop all sources execution."); } @@ -270,53 +291,56 @@ public void StartTestRunShouldProcessAllSourcesOnExecutionAbortsForAnySource() public void StartTestRunShouldProcessAllSourceIfOneDiscoveryManagerIsStarved() { // Ensure that second discovery manager never starts. Expect 10 total tests. - var parallelExecutionManager = SetupExecutionManager(_proxyManagerFunc, 2); - _createdMockManagers[1].Reset(); - _createdMockManagers[1].Setup(em => em.StartTestRun(It.IsAny(), It.IsAny())) + var parallelExecutionManager = SetupExecutionManager(_createMockManager, 2); + var mockManagers = _preCreatedMockManagers.ToArray(); + mockManagers[1].Reset(); + mockManagers[1].Setup(em => em.StartTestRun(It.IsAny(), It.IsAny())) .Throws(); - Task.Run(() => parallelExecutionManager.StartTestRun(_testRunCriteriaWithSources, _mockHandler.Object)); + Task.Run(() => parallelExecutionManager.StartTestRun(_testRunCriteriaWith2Sources, _mockEventHandler.Object)); // Processed sources should be 1 since the 2nd source is never discovered - Assert.IsTrue(_executionCompleted.Wait(TaskTimeout), "Test run not completed."); + Assert.IsTrue(_executionCompleted.Wait(Timeout3Seconds), "Test run not completed."); Assert.AreEqual(1, _processedSources.Count, "All Sources must be processed."); } [TestMethod] public void StartTestRunShouldCatchExceptionAndHandleLogMessageOfError() { - var parallelExecutionManager = SetupExecutionManager(_proxyManagerFunc, 2); - _createdMockManagers[1].Reset(); - _createdMockManagers[1].Setup(em => em.StartTestRun(It.IsAny(), It.IsAny())) + var parallelExecutionManager = SetupExecutionManager(_createMockManager, 2); + var mockManagers = _preCreatedMockManagers.ToArray(); + mockManagers[1].Reset(); + mockManagers[1].Setup(em => em.StartTestRun(It.IsAny(), It.IsAny())) .Throws(); - Task.Run(() => parallelExecutionManager.StartTestRun(_testRunCriteriaWithSources, _mockHandler.Object)); + Task.Run(() => parallelExecutionManager.StartTestRun(_testRunCriteriaWith2Sources, _mockEventHandler.Object)); - Assert.IsTrue(_executionCompleted.Wait(TaskTimeout), "Test run not completed."); - _mockHandler.Verify(s => s.HandleLogMessage(TestMessageLevel.Error, It.IsAny()), Times.Once); + Assert.IsTrue(_executionCompleted.Wait(Timeout3Seconds), "Test run not completed."); + _mockEventHandler.Verify(s => s.HandleLogMessage(TestMessageLevel.Error, It.IsAny()), Times.Once); } [TestMethod] public void StartTestRunShouldCatchExceptionAndHandleRawMessageOfTestMessage() { - var parallelExecutionManager = SetupExecutionManager(_proxyManagerFunc, 2); - _createdMockManagers[1].Reset(); - _createdMockManagers[1].Setup(em => em.StartTestRun(It.IsAny(), It.IsAny())) + var parallelExecutionManager = SetupExecutionManager(_createMockManager, 2); + var mockManagers = _preCreatedMockManagers.ToArray(); + mockManagers[1].Reset(); + mockManagers[1].Setup(em => em.StartTestRun(It.IsAny(), It.IsAny())) .Throws(); - Task.Run(() => parallelExecutionManager.StartTestRun(_testRunCriteriaWithSources, _mockHandler.Object)); + Task.Run(() => parallelExecutionManager.StartTestRun(_testRunCriteriaWith2Sources, _mockEventHandler.Object)); - Assert.IsTrue(_executionCompleted.Wait(TaskTimeout), "Test run not completed."); - _mockHandler.Verify(s => s.HandleRawMessage(It.Is(str => str.Contains(MessageType.TestMessage)))); + Assert.IsTrue(_executionCompleted.Wait(Timeout3Seconds), "Test run not completed."); + _mockEventHandler.Verify(s => s.HandleRawMessage(It.Is(str => str.Contains(MessageType.TestMessage)))); } [TestMethod] public void StartTestRunShouldAggregateRunData() { - var parallelExecutionManager = new ParallelProxyExecutionManager(_mockRequestData.Object, _proxyManagerFunc, 2); + var parallelExecutionManager = new ParallelProxyExecutionManager(_mockRequestData.Object, _createMockManager, 2, _runtimeProviders); var syncObject = new object(); - foreach (var manager in _createdMockManagers) + foreach (var manager in _preCreatedMockManagers) { manager.Setup(m => m.StartTestRun(It.IsAny(), It.IsAny())). Callback( @@ -356,7 +380,7 @@ public void StartTestRunShouldAggregateRunData() } Exception? assertException = null; - _mockHandler.Setup(m => m.HandleTestRunComplete( + _mockEventHandler.Setup(m => m.HandleTestRunComplete( It.IsAny(), It.IsAny(), It.IsAny>(), @@ -393,27 +417,28 @@ public void StartTestRunShouldAggregateRunData() } }); - Task.Run(() => parallelExecutionManager.StartTestRun(_testRunCriteriaWithSources, _mockHandler.Object)); + Task.Run(() => parallelExecutionManager.StartTestRun(_testRunCriteriaWith2Sources, _mockEventHandler.Object)); - Assert.IsTrue(_executionCompleted.Wait(TaskTimeout), "Test run not completed."); + // If you are debugging this, maybe it is good idea to set this timeout higher. + Assert.IsTrue(_executionCompleted.Wait(Timeout3Seconds), "Test run not completed."); Assert.IsNull(assertException, assertException?.ToString()); Assert.AreEqual(_sources.Count, _processedSources.Count, "All Sources must be processed."); AssertMissingAndDuplicateSources(_processedSources); } - private ParallelProxyExecutionManager SetupExecutionManager(Func proxyManagerFunc, int parallelLevel) + private ParallelProxyExecutionManager SetupExecutionManager(Func proxyManagerFunc, int parallelLevel) { return SetupExecutionManager(proxyManagerFunc, parallelLevel, false); } - private ParallelProxyExecutionManager SetupExecutionManager(Func proxyManagerFunc, int parallelLevel, bool setupTestCases) + private ParallelProxyExecutionManager SetupExecutionManager(Func proxyManagerFunc, int parallelLevel, bool setupTestCases) { - var parallelExecutionManager = new ParallelProxyExecutionManager(_mockRequestData.Object, proxyManagerFunc, parallelLevel); + var parallelExecutionManager = new ParallelProxyExecutionManager(_mockRequestData.Object, proxyManagerFunc, parallelLevel, _runtimeProviders); if (setupTestCases) { - SetupMockManagersForTestCase(_processedTestCases, _testRunCriteriaWithTests); + SetupMockManagersForTestCase(_processedTestCases, _testRunCriteriaWithTestsFrom3Dlls); } else { @@ -426,7 +451,7 @@ private ParallelProxyExecutionManager SetupExecutionManager(Func mh.HandleTestRunComplete(It.IsAny(), + _mockEventHandler.Setup(mh => mh.HandleTestRunComplete(It.IsAny(), It.IsAny(), It.IsAny>(), It.IsAny>())) @@ -493,7 +518,7 @@ private static void AssertMissingAndDuplicateTestCases(List tests, Lis private void SetupMockManagersForTestCase(List processedTestCases, TestRunCriteria testRunCriteria) { var syncObject = new object(); - foreach (var manager in _createdMockManagers) + foreach (var manager in _preCreatedMockManagers) { manager.Setup(m => m.StartTestRun(It.IsAny(), It.IsAny())). Callback( @@ -528,7 +553,7 @@ private static List CreateTestCases() private void SetupMockManagers(List processedSources, bool isCanceled = false, bool isAborted = false) { var syncObject = new object(); - foreach (var manager in _createdMockManagers) + foreach (var manager in _preCreatedMockManagers) { manager.Setup(m => m.StartTestRun(It.IsAny(), It.IsAny())). Callback( @@ -540,20 +565,18 @@ private void SetupMockManagers(List processedSources, bool isCanceled = } Task.Delay(100).Wait(); - // Duplicated testRunCriteria should match the actual one. - Assert.AreEqual(_testRunCriteriaWithSources, criteria, "Mismatch in testRunCriteria"); handler.HandleTestRunComplete(CreateTestRunCompleteArgs(isCanceled, isAborted), null, null, null); }); } } - private void InvokeAndVerifyInitialize(int concurrentManagersCount, bool skipDefaultAdapters = false) + private void InvokeAndVerifyInitialize(int parallelLevel, bool skipDefaultAdapters = false) { - var parallelExecutionManager = new ParallelProxyExecutionManager(_mockRequestData.Object, _proxyManagerFunc, concurrentManagersCount); + var parallelExecutionManager = new ParallelProxyExecutionManager(_mockRequestData.Object, _createMockManager, parallelLevel, _runtimeProviders); parallelExecutionManager.Initialize(skipDefaultAdapters); - Assert.AreEqual(concurrentManagersCount, _createdMockManagers.Count, $"Number of Concurrent Managers created should be {concurrentManagersCount}"); - _createdMockManagers.ForEach(em => em.Verify(m => m.Initialize(skipDefaultAdapters), Times.Once)); + Assert.AreEqual(0, _usedMockManagers.Count, $"No concurrent managers should be pre-created, until there is work for them"); + _usedMockManagers.ForEach(em => em.Verify(m => m.Initialize(skipDefaultAdapters), Times.Once)); } } diff --git a/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/Client/ProxyTestSessionManagerTests.cs b/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/Client/ProxyTestSessionManagerTests.cs index 5a64e8f930..1b8e508c8e 100644 --- a/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/Client/ProxyTestSessionManagerTests.cs +++ b/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/Client/ProxyTestSessionManagerTests.cs @@ -3,12 +3,14 @@ using System; using System.Collections.Generic; +using System.Linq; using Microsoft.VisualStudio.TestPlatform.Common.Telemetry; using Microsoft.VisualStudio.TestPlatform.CrossPlatEngine; using Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Client; using Microsoft.VisualStudio.TestPlatform.ObjectModel; using Microsoft.VisualStudio.TestPlatform.ObjectModel.Client; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Host; using Microsoft.VisualStudio.TestTools.UnitTesting; using Moq; @@ -21,6 +23,7 @@ namespace Microsoft.TestPlatform.CrossPlatEngine.UnitTests.Client; public class ProxyTestSessionManagerTests { private readonly IList _fakeTestSources = new List() { @"C:\temp\FakeTestAsset.dll" }; + private Dictionary _fakeTestSourcesToRuntimeProviderMap; private readonly IList _fakeTestMultipleSources = new List() { @"C:\temp\FakeTestAsset1.dll", @"C:\temp\FakeTestAsset2.dll", @@ -105,6 +108,18 @@ public void TestInitialize() _mockMetricsCollection.Setup(mc => mc.Metrics).Returns(metrics); _mockMetricsCollection.Setup(mc => mc.Add(It.IsAny(), It.IsAny())) .Callback((string metric, object value) => metrics.Add(metric, value)); + + _fakeTestSourcesToRuntimeProviderMap = new Dictionary + { + [_fakeTestSources[0]] = new TestRuntimeProviderInfo(typeof(ITestRuntimeProvider), false, _fakeRunSettings, new List + { + new SourceDetail { + Source = _fakeTestSources[0], + Architecture = Architecture.X86, + Framework = Framework.DefaultFramework + } + }) + }; } [TestMethod] @@ -513,10 +528,27 @@ private ProxyTestSessionManager CreateProxy( StartTestSessionCriteria testSessionCriteria, ProxyOperationManager proxyOperationManager) { + var runSettings = testSessionCriteria.RunSettings ?? _fakeRunSettings; + var runtimeProviderInfo = new TestRuntimeProviderInfo + ( + typeof(ITestRuntimeProvider), + shared: false, + runSettings, + testSessionCriteria.Sources.Select(s => new SourceDetail + { + Source = s, + Architecture = Architecture.X86, + Framework = Framework.DefaultFramework + }).ToList() + ); + + var runtimeProviders = new List { runtimeProviderInfo }; return new ProxyTestSessionManager( testSessionCriteria, testSessionCriteria.Sources.Count, - () => proxyOperationManager); + _ => proxyOperationManager, + runtimeProviders + ); } private void CheckStopSessionTelemetry(bool exists) diff --git a/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/TestEngineTests.cs b/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/TestEngineTests.cs index be32a38f3e..6a62a2657d 100644 --- a/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/TestEngineTests.cs +++ b/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/TestEngineTests.cs @@ -6,7 +6,6 @@ using System.Linq; using System.Reflection; -using Microsoft.TestPlatform.CrossPlatEngine.UnitTests.TestableImplementations; using Microsoft.TestPlatform.TestUtilities; using Microsoft.VisualStudio.TestPlatform.Common.Telemetry; using Microsoft.VisualStudio.TestPlatform.CrossPlatEngine; @@ -14,7 +13,6 @@ using Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Client.Parallel; using Microsoft.VisualStudio.TestPlatform.ObjectModel; using Microsoft.VisualStudio.TestPlatform.ObjectModel.Client; -using Microsoft.VisualStudio.TestPlatform.ObjectModel.Engine; using Microsoft.VisualStudio.TestPlatform.ObjectModel.Host; using Microsoft.VisualStudio.TestPlatform.PlatformAbstractions.Interfaces; using Microsoft.VisualStudio.TestTools.UnitTesting; @@ -26,19 +24,16 @@ namespace TestPlatform.CrossPlatEngine.UnitTests; [TestClass] public class TestEngineTests { - private readonly ITestEngine _testEngine; + private readonly TestableTestEngine _testEngine; private readonly Mock _mockProcessHelper; private readonly ProtocolConfig _protocolConfig = new() { Version = 1 }; private readonly Mock _mockRequestData; private readonly Mock _mockMetricsCollection; - private ITestRuntimeProvider _testableTestRuntimeProvider; - public TestEngineTests() { TestPluginCacheHelper.SetupMockExtensions(new[] { typeof(TestEngineTests).GetTypeInfo().Assembly.Location }, () => { }); _mockProcessHelper = new Mock(); - _testableTestRuntimeProvider = new TestableRuntimeProvider(true); _mockRequestData = new Mock(); _mockMetricsCollection = new Mock(); _mockRequestData.Setup(rd => rd.MetricsCollection).Returns(_mockMetricsCollection.Object); @@ -49,173 +44,208 @@ public TestEngineTests() [TestMethod] public void GetDiscoveryManagerShouldReturnANonNullInstance() - { - var discoveryCriteria = new DiscoveryCriteria(new List { "1.dll" }, 100, null); - Assert.IsNotNull(_testEngine.GetDiscoveryManager(_mockRequestData.Object, _testableTestRuntimeProvider, discoveryCriteria)); - } - - [TestMethod] - public void GetDiscoveryManagerShouldReturnsNewInstanceOfProxyDiscoveryManagerIfTestHostIsShared() { string settingXml = @" - - true - - "; + + true + + "; var discoveryCriteria = new DiscoveryCriteria(new List { "1.dll" }, 100, settingXml); - var discoveryManager = _testEngine.GetDiscoveryManager(_mockRequestData.Object, _testableTestRuntimeProvider, discoveryCriteria); + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; - Assert.AreNotSame(discoveryManager, _testEngine.GetDiscoveryManager(_mockRequestData.Object, _testableTestRuntimeProvider, discoveryCriteria)); - Assert.IsInstanceOfType(_testEngine.GetDiscoveryManager(_mockRequestData.Object, _testableTestRuntimeProvider, discoveryCriteria), typeof(ProxyDiscoveryManager)); + Assert.IsNotNull(_testEngine.GetDiscoveryManager(_mockRequestData.Object, discoveryCriteria, sourceToSourceDetailMap)); } + [TestMethod] - public void GetDiscoveryManagerShouldReturnsParallelDiscoveryManagerIfTestHostIsNotShared() + public void GetDiscoveryManagerShouldReturnParallelProxyDiscoveryManagerIfNotRunningInProcess() { string settingXml = @" - - true - - "; + + true + + "; var discoveryCriteria = new DiscoveryCriteria(new List { "1.dll" }, 100, settingXml); - _testableTestRuntimeProvider = new TestableRuntimeProvider(false); + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; - Assert.IsNotNull(_testEngine.GetDiscoveryManager(_mockRequestData.Object, _testableTestRuntimeProvider, discoveryCriteria)); - Assert.IsInstanceOfType(_testEngine.GetDiscoveryManager(_mockRequestData.Object, _testableTestRuntimeProvider, discoveryCriteria), typeof(ParallelProxyDiscoveryManager)); + var discoveryManager = _testEngine.GetDiscoveryManager(_mockRequestData.Object, discoveryCriteria, sourceToSourceDetailMap); + Assert.IsNotNull(discoveryManager); + Assert.IsInstanceOfType(discoveryManager, typeof(ParallelProxyDiscoveryManager)); } [TestMethod] - public void GetDiscoveryManagerShouldNotReturnsInProcessProxyDiscoveryManagerIfCurrentProcessIsDotnet() + public void GetDiscoveryManagerShouldNotReturnInProcessProxyDiscoveryManagerIfCurrentProcessIsDotnet() { - var discoveryCriteria = new DiscoveryCriteria(new List { "1.dll" }, 100, null); + string settingXml = + @" + + true + + "; + var discoveryCriteria = new DiscoveryCriteria(new List { "1.dll" }, 100, settingXml); + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; + _mockProcessHelper.Setup(o => o.GetCurrentProcessFileName()).Returns("dotnet.exe"); - var discoveryManager = _testEngine.GetDiscoveryManager(_mockRequestData.Object, _testableTestRuntimeProvider, discoveryCriteria); + var discoveryManager = _testEngine.GetDiscoveryManager(_mockRequestData.Object, discoveryCriteria, sourceToSourceDetailMap); Assert.IsNotNull(discoveryManager); Assert.IsNotInstanceOfType(discoveryManager, typeof(InProcessProxyDiscoveryManager)); } [TestMethod] - public void GetDiscoveryManagerShouldNotReturnsInProcessProxyDiscoveryManagerIfDisableAppDomainIsSet() + public void GetDiscoveryManagerShouldNotReturnInProcessProxyDiscoveryManagerIfDisableAppDomainIsSet() { string settingXml = @" - - x86 - true - false - .NETFramework, Version=v4.5 - - "; + + x86 + true + false + .NETFramework, Version=v4.5 + + "; var discoveryCriteria = new DiscoveryCriteria(new List { "1.dll" }, 100, settingXml); + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; - var discoveryManager = _testEngine.GetDiscoveryManager(_mockRequestData.Object, _testableTestRuntimeProvider, discoveryCriteria); + var discoveryManager = _testEngine.GetDiscoveryManager(_mockRequestData.Object, discoveryCriteria, sourceToSourceDetailMap); Assert.IsNotNull(discoveryManager); Assert.IsNotInstanceOfType(discoveryManager, typeof(InProcessProxyDiscoveryManager)); } [TestMethod] - public void GetDiscoveryManagerShouldNotReturnsInProcessProxyDiscoveryManagerIfDesignModeIsTrue() + public void GetDiscoveryManagerShouldNotReturnInProcessProxyDiscoveryManagerIfDesignModeIsTrue() { string settingXml = @" - - x86 - false - true - .NETFramework, Version=v4.5 - - "; + + x86 + false + true + .NETFramework, Version=v4.5 + + "; var discoveryCriteria = new DiscoveryCriteria(new List { "1.dll" }, 100, settingXml); + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; - var discoveryManager = _testEngine.GetDiscoveryManager(_mockRequestData.Object, _testableTestRuntimeProvider, discoveryCriteria); + var discoveryManager = _testEngine.GetDiscoveryManager(_mockRequestData.Object, discoveryCriteria, sourceToSourceDetailMap); Assert.IsNotNull(discoveryManager); Assert.IsNotInstanceOfType(discoveryManager, typeof(InProcessProxyDiscoveryManager)); } [TestMethod] - public void GetDiscoveryManagerShouldNotReturnsInProcessProxyDiscoveryManagereIfTargetFrameworkIsNetcoreApp() + public void GetDiscoveryManagerShouldNotReturnInProcessProxyDiscoveryManagereIfTargetFrameworkIsNetcoreApp() { string settingXml = @" - - x86 - false - false - .NETCoreApp, Version=v1.1 - - "; + + x86 + false + false + .NETCoreApp, Version=v1.1 + + "; var discoveryCriteria = new DiscoveryCriteria(new List { "1.dll" }, 100, settingXml); + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; - var discoveryManager = _testEngine.GetDiscoveryManager(_mockRequestData.Object, _testableTestRuntimeProvider, discoveryCriteria); + var discoveryManager = _testEngine.GetDiscoveryManager(_mockRequestData.Object, discoveryCriteria, sourceToSourceDetailMap); Assert.IsNotNull(discoveryManager); Assert.IsNotInstanceOfType(discoveryManager, typeof(InProcessProxyDiscoveryManager)); } [TestMethod] - public void GetDiscoveryManagerShouldNotReturnsInProcessProxyDiscoveryManagereIfTargetFrameworkIsNetStandard() + public void GetDiscoveryManagerShouldNotReturnInProcessProxyDiscoveryManagereIfTargetFrameworkIsNetStandard() { string settingXml = @" - - x86 - false - false - .NETStandard, Version=v1.4 - - "; + + x86 + false + false + .NETStandard, Version=v1.4 + + "; var discoveryCriteria = new DiscoveryCriteria(new List { "1.dll" }, 100, settingXml); + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; - var discoveryManager = _testEngine.GetDiscoveryManager(_mockRequestData.Object, _testableTestRuntimeProvider, discoveryCriteria); + var discoveryManager = _testEngine.GetDiscoveryManager(_mockRequestData.Object, discoveryCriteria, sourceToSourceDetailMap); Assert.IsNotNull(discoveryManager); Assert.IsNotInstanceOfType(discoveryManager, typeof(InProcessProxyDiscoveryManager)); } [TestMethod] - public void GetDiscoveryManagerShouldNotReturnsInProcessProxyDiscoveryManagereIfTargetPlatformIsX64() + public void GetDiscoveryManagerShouldNotReturnInProcessProxyDiscoveryManagereIfTargetPlatformIsX64() { string settingXml = @" - - x64 - false - false - .NETStandard, Version=v1.4 - - "; + + x64 + false + false + .NETStandard, Version=v1.4 + + "; var discoveryCriteria = new DiscoveryCriteria(new List { "1.dll" }, 100, settingXml); + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; - var discoveryManager = _testEngine.GetDiscoveryManager(_mockRequestData.Object, _testableTestRuntimeProvider, discoveryCriteria); + var discoveryManager = _testEngine.GetDiscoveryManager(_mockRequestData.Object, discoveryCriteria, sourceToSourceDetailMap); Assert.IsNotNull(discoveryManager); Assert.IsNotInstanceOfType(discoveryManager, typeof(InProcessProxyDiscoveryManager)); } [TestMethod] - public void GetDiscoveryManagerShouldNotReturnsInProcessProxyDiscoveryManagereIfrunsettingsHasTestSettingsInIt() + public void GetDiscoveryManagerShouldNotReturnInProcessProxyDiscoveryManagereIfrunsettingsHasTestSettingsInIt() { string settingXml = @" - - x86 - false - false - .NETFramework, Version=v4.5 - - - C:\temp.testsettings - - "; + + x86 + false + false + .NETFramework, Version=v4.5 + + + C:\temp.testsettings + + "; var discoveryCriteria = new DiscoveryCriteria(new List { "1.dll" }, 100, settingXml); + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; - var discoveryManager = _testEngine.GetDiscoveryManager(_mockRequestData.Object, _testableTestRuntimeProvider, discoveryCriteria); + var discoveryManager = _testEngine.GetDiscoveryManager(_mockRequestData.Object, discoveryCriteria, sourceToSourceDetailMap); Assert.IsNotNull(discoveryManager); Assert.IsNotInstanceOfType(discoveryManager, typeof(InProcessProxyDiscoveryManager)); } @@ -225,17 +255,21 @@ public void GetDiscoveryManagerShouldReturnsInProcessProxyDiscoveryManager() { string settingXml = @" - - x64 - false - false - .NETFramework, Version=v4.5 - - "; + + x64 + false + false + .NETFramework, Version=v4.5 + + "; var discoveryCriteria = new DiscoveryCriteria(new List { "1.dll" }, 100, settingXml); + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; - var discoveryManager = _testEngine.GetDiscoveryManager(_mockRequestData.Object, _testableTestRuntimeProvider, discoveryCriteria); + var discoveryManager = _testEngine.GetDiscoveryManager(_mockRequestData.Object, discoveryCriteria, sourceToSourceDetailMap); Assert.IsNotNull(discoveryManager); Assert.IsInstanceOfType(discoveryManager, typeof(InProcessProxyDiscoveryManager)); } @@ -243,54 +277,49 @@ public void GetDiscoveryManagerShouldReturnsInProcessProxyDiscoveryManager() [TestMethod] public void GetExecutionManagerShouldReturnANonNullInstance() { - var testRunCriteria = new TestRunCriteria(new List { "1.dll" }, 100); + string settingsXml = @""; + var testRunCriteria = new TestRunCriteria(new List { "1.dll" }, 100, false, settingsXml); + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; - Assert.IsNotNull(_testEngine.GetExecutionManager(_mockRequestData.Object, _testableTestRuntimeProvider, testRunCriteria)); + Assert.IsNotNull(_testEngine.GetExecutionManager(_mockRequestData.Object, testRunCriteria, sourceToSourceDetailMap)); } [TestMethod] public void GetExecutionManagerShouldReturnNewInstance() { - var testRunCriteria = new TestRunCriteria(new List { "1.dll" }, 100); - var executionManager = _testEngine.GetExecutionManager(_mockRequestData.Object, _testableTestRuntimeProvider, testRunCriteria); - - Assert.AreNotSame(executionManager, _testEngine.GetExecutionManager(_mockRequestData.Object, _testableTestRuntimeProvider, testRunCriteria)); - } - - [TestMethod] - public void GetExecutionManagerShouldReturnDefaultExecutionManagerIfParallelDisabled() - { - string settingXml = @"true"; - var testRunCriteria = new TestRunCriteria(new List { "1.dll" }, 100, false, settingXml); + string settingsXml = @""; + var testRunCriteria = new TestRunCriteria(new List { "1.dll" }, 100, false, settingsXml); + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; + var executionManager = _testEngine.GetExecutionManager(_mockRequestData.Object, testRunCriteria, sourceToSourceDetailMap); - Assert.IsNotNull(_testEngine.GetExecutionManager(_mockRequestData.Object, _testableTestRuntimeProvider, testRunCriteria)); - Assert.IsInstanceOfType(_testEngine.GetExecutionManager(_mockRequestData.Object, _testableTestRuntimeProvider, testRunCriteria), typeof(ProxyExecutionManager)); + Assert.AreNotSame(executionManager, _testEngine.GetExecutionManager(_mockRequestData.Object, testRunCriteria, sourceToSourceDetailMap)); } [TestMethod] - public void GetExecutionManagerWithSingleSourceShouldReturnDefaultExecutionManagerEvenIfParallelEnabled() + public void GetExecutionManagerShouldReturnParallelExecutionManagerIfParallelEnabled() { string settingXml = @" - - 2 - true - - "; - var testRunCriteria = new TestRunCriteria(new List { "1.dll" }, 100, false, settingXml); - - Assert.IsNotNull(_testEngine.GetExecutionManager(_mockRequestData.Object, _testableTestRuntimeProvider, testRunCriteria)); - Assert.IsInstanceOfType(_testEngine.GetExecutionManager(_mockRequestData.Object, _testableTestRuntimeProvider, testRunCriteria), typeof(ProxyExecutionManager)); - } + + 2 + + "; - [TestMethod] - public void GetExecutionManagerShouldReturnParallelExecutionManagerIfParallelEnabled() - { - string settingXml = @"2"; var testRunCriteria = new TestRunCriteria(new List { "1.dll", "2.dll" }, 100, false, settingXml); + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + ["2.dll"] = new SourceDetail { Source = "2.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; - Assert.IsNotNull(_testEngine.GetExecutionManager(_mockRequestData.Object, _testableTestRuntimeProvider, testRunCriteria)); - Assert.IsInstanceOfType(_testEngine.GetExecutionManager(_mockRequestData.Object, _testableTestRuntimeProvider, testRunCriteria), typeof(ParallelProxyExecutionManager)); + Assert.IsNotNull(_testEngine.GetExecutionManager(_mockRequestData.Object, testRunCriteria, sourceToSourceDetailMap)); + Assert.IsInstanceOfType(_testEngine.GetExecutionManager(_mockRequestData.Object, testRunCriteria, sourceToSourceDetailMap), typeof(ParallelProxyExecutionManager)); } [TestMethod] @@ -298,26 +327,20 @@ public void GetExecutionManagerShouldReturnParallelExecutionManagerIfHostIsNotSh { string settingXml = @" - - true - - "; - _testableTestRuntimeProvider = new TestableRuntimeProvider(false); - var testRunCriteria = new TestRunCriteria(new List { "1.dll", "2.dll" }, 100, false, settingXml); - - Assert.IsNotNull(_testEngine.GetExecutionManager(_mockRequestData.Object, _testableTestRuntimeProvider, testRunCriteria)); - Assert.IsInstanceOfType(_testEngine.GetExecutionManager(_mockRequestData.Object, _testableTestRuntimeProvider, testRunCriteria), typeof(ParallelProxyExecutionManager)); - } + + true + + "; - [TestMethod] - public void GetExcecutionManagerShouldReturnExectuionManagerWithDataCollectionIfDataCollectionIsEnabled() - { - var settingXml = @""; - var testRunCriteria = new TestRunCriteria(new List { "1.dll" }, 100, false, settingXml); - var result = _testEngine.GetExecutionManager(_mockRequestData.Object, _testableTestRuntimeProvider, testRunCriteria); + var testRunCriteria = new TestRunCriteria(new List { "1.dll", "2.dll" }, 100, false, settingXml); + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + ["2.dll"] = new SourceDetail { Source = "2.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; - Assert.IsNotNull(result); - Assert.IsInstanceOfType(result, typeof(ProxyExecutionManagerWithDataCollection)); + Assert.IsNotNull(_testEngine.GetExecutionManager(_mockRequestData.Object, testRunCriteria, sourceToSourceDetailMap)); + Assert.IsInstanceOfType(_testEngine.GetExecutionManager(_mockRequestData.Object, testRunCriteria, sourceToSourceDetailMap), typeof(ParallelProxyExecutionManager)); } [TestMethod] @@ -325,17 +348,22 @@ public void GetExecutionManagerShouldNotReturnInProcessProxyexecutionManagerIfIn { string settingXml = @" - - true - false - false - .NETFramework, Version=v4.5 - - "; + + true + false + false + .NETFramework, Version=v4.5 + + "; var testRunCriteria = new TestRunCriteria(new List { "1.dll", "2.dll" }, 100, false, settingXml); + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + ["2.dll"] = new SourceDetail { Source = "2.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; - var executionManager = _testEngine.GetExecutionManager(_mockRequestData.Object, _testableTestRuntimeProvider, testRunCriteria); + var executionManager = _testEngine.GetExecutionManager(_mockRequestData.Object, testRunCriteria, sourceToSourceDetailMap); Assert.IsNotNull(executionManager); Assert.IsNotInstanceOfType(executionManager, typeof(InProcessProxyExecutionManager)); @@ -346,17 +374,22 @@ public void GetExecutionManagerShouldNotReturnInProcessProxyexecutionManagerIfPa { string settingXml = @" - - false - false - .NETFramework, Version=v4.5 - 2 - - "; + + false + false + .NETFramework, Version=v4.5 + 2 + + "; var testRunCriteria = new TestRunCriteria(new List { "1.dll", "2.dll" }, 100, false, settingXml); + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + ["2.dll"] = new SourceDetail { Source = "2.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; - var executionManager = _testEngine.GetExecutionManager(_mockRequestData.Object, _testableTestRuntimeProvider, testRunCriteria); + var executionManager = _testEngine.GetExecutionManager(_mockRequestData.Object, testRunCriteria, sourceToSourceDetailMap); Assert.IsNotNull(executionManager); Assert.IsNotInstanceOfType(executionManager, typeof(InProcessProxyExecutionManager)); @@ -367,23 +400,28 @@ public void GetExecutionManagerShouldNotReturnInProcessProxyexecutionManagerIfDa { string settingXml = @" - - false - false - .NETFramework, Version=v4.5 - 1 - - - - - - - - "; + + false + false + .NETFramework, Version=v4.5 + 1 + + + + + + + + "; var testRunCriteria = new TestRunCriteria(new List { "1.dll", "2.dll" }, 100, false, settingXml); + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + ["2.dll"] = new SourceDetail { Source = "2.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; - var executionManager = _testEngine.GetExecutionManager(_mockRequestData.Object, _testableTestRuntimeProvider, testRunCriteria); + var executionManager = _testEngine.GetExecutionManager(_mockRequestData.Object, testRunCriteria, sourceToSourceDetailMap); Assert.IsNotNull(executionManager); Assert.IsNotInstanceOfType(executionManager, typeof(InProcessProxyExecutionManager)); @@ -394,26 +432,31 @@ public void GetExecutionManagerShouldNotReturnInProcessProxyexecutionManagerIfIn { string settingXml = @" - - false - false - .NETFramework, Version=v4.5 - 1 - - - - - - 4312 - - - - - "; + + false + false + .NETFramework, Version=v4.5 + 1 + + + + + + 4312 + + + + + "; var testRunCriteria = new TestRunCriteria(new List { "1.dll", "2.dll" }, 100, false, settingXml); + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + ["2.dll"] = new SourceDetail { Source = "2.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; - var executionManager = _testEngine.GetExecutionManager(_mockRequestData.Object, _testableTestRuntimeProvider, testRunCriteria); + var executionManager = _testEngine.GetExecutionManager(_mockRequestData.Object, testRunCriteria, sourceToSourceDetailMap); Assert.IsNotNull(executionManager); Assert.IsNotInstanceOfType(executionManager, typeof(InProcessProxyExecutionManager)); @@ -424,20 +467,25 @@ public void GetExecutionManagerShouldNotReturnInProcessProxyexecutionManagerIfru { string settingXml = @" - - false - false - .NETFramework, Version=v4.5 - 1 - - - C:\temp.testsettings - - "; + + false + false + .NETFramework, Version=v4.5 + 1 + + + C:\temp.testsettings + + "; var testRunCriteria = new TestRunCriteria(new List { "1.dll", "2.dll" }, 100, false, settingXml); + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + ["2.dll"] = new SourceDetail { Source = "2.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; - var executionManager = _testEngine.GetExecutionManager(_mockRequestData.Object, _testableTestRuntimeProvider, testRunCriteria); + var executionManager = _testEngine.GetExecutionManager(_mockRequestData.Object, testRunCriteria, sourceToSourceDetailMap); Assert.IsNotNull(executionManager); Assert.IsNotInstanceOfType(executionManager, typeof(InProcessProxyExecutionManager)); @@ -449,17 +497,22 @@ public void GetExecutionManagerShouldReturnInProcessProxyexecutionManager() { string settingXml = @" - - false - false - .NETFramework, Version=v4.5 - 1 - - "; + + false + false + .NETFramework, Version=v4.5 + 1 + + "; var testRunCriteria = new TestRunCriteria(new List { "1.dll", "2.dll" }, 100, false, settingXml); + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + ["2.dll"] = new SourceDetail { Source = "2.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; - var executionManager = _testEngine.GetExecutionManager(_mockRequestData.Object, _testableTestRuntimeProvider, testRunCriteria); + var executionManager = _testEngine.GetExecutionManager(_mockRequestData.Object, testRunCriteria, sourceToSourceDetailMap); Assert.IsNotNull(executionManager); Assert.IsInstanceOfType(executionManager, typeof(InProcessProxyExecutionManager)); @@ -476,46 +529,26 @@ public void GetExtensionManagerShouldCollectMetrics() { string settingXml = @" - - false - false - .NETFramework, Version=v4.5 - 1 - - "; + + false + false + .NETFramework, Version=v4.5 + 1 + + "; var testRunCriteria = new TestRunCriteria(new List { "1.dll", "2.dll" }, 100, false, settingXml); + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + ["2.dll"] = new SourceDetail { Source = "2.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; - var executionManager = _testEngine.GetExecutionManager(_mockRequestData.Object, _testableTestRuntimeProvider, testRunCriteria); + var executionManager = _testEngine.GetExecutionManager(_mockRequestData.Object, testRunCriteria, sourceToSourceDetailMap); _mockMetricsCollection.Verify(mc => mc.Add(TelemetryDataConstants.ParallelEnabledDuringExecution, It.IsAny()), Times.Once); } - [TestMethod] - public void ProxyDataCollectionManagerShouldBeInitialzedWithCorrectTestSourcesWhenTestRunCriteriaContainsSourceList() - { - var settingXml = @""; - - var testRunCriteria = new TestRunCriteria(new List { "1.dll", "2.dll" }, 100, false, settingXml); - - var executionManager = _testEngine.GetExecutionManager(_mockRequestData.Object, _testableTestRuntimeProvider, testRunCriteria); - - Assert.IsTrue(((ProxyExecutionManagerWithDataCollection)executionManager).ProxyDataCollectionManager.Sources.Contains("1.dll")); - } - - [TestMethod] - public void ProxyDataCollectionManagerShouldBeInitialzedWithCorrectTestSourcesWhenTestRunCriteriaContainsTestCaseList() - { - var settingXml = @""; - - var testCaseList = new List { new TestCase("x.y.z", new Uri("uri://dummy"), "x.dll") }; - var testRunCriteria = new TestRunCriteria(testCaseList, 100, false, settingXml); - - var executionManager = _testEngine.GetExecutionManager(_mockRequestData.Object, _testableTestRuntimeProvider, testRunCriteria); - - Assert.IsTrue(((ProxyExecutionManagerWithDataCollection)executionManager).ProxyDataCollectionManager.Sources.Contains("x.dll")); - } - /// /// GetLoggerManager should return a non null instance. /// @@ -544,9 +577,15 @@ public void GetTestSessionManagerShouldReturnAValidInstance() RunSettings = settingXml }; + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; + Assert.IsNotNull(_testEngine.GetTestSessionManager( _mockRequestData.Object, - testSessionCriteria)); + testSessionCriteria, + sourceToSourceDetailMap)); } [TestMethod] @@ -559,30 +598,49 @@ public void GetTestSessionManagerShouldReturnNewInstance() RunSettings = settingXml }; + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; + var testSessionManager1 = _testEngine.GetTestSessionManager( _mockRequestData.Object, - testSessionCriteria); + testSessionCriteria, + sourceToSourceDetailMap); Assert.AreNotSame( _testEngine.GetTestSessionManager( _mockRequestData.Object, - testSessionCriteria), + testSessionCriteria, + sourceToSourceDetailMap), testSessionManager1); } [TestMethod] public void GetTestSessionManagerShouldReturnDefaultTestSessionManagerIfParallelDisabled() { - var settingXml = @"true"; + var settingXml = + @" + + true + + "; + var testSessionCriteria = new StartTestSessionCriteria() { Sources = new List { "1.dll" }, RunSettings = settingXml }; + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; + var testSessionManager = _testEngine.GetTestSessionManager( _mockRequestData.Object, - testSessionCriteria); + testSessionCriteria, + sourceToSourceDetailMap); Assert.IsNotNull(testSessionManager); Assert.IsInstanceOfType(testSessionManager, typeof(ProxyTestSessionManager)); @@ -593,20 +651,27 @@ public void GetTestSessionManagerShouldReturnDefaultTestSessionManagerEvenIfPara { string settingXml = @" - - 2 - true - - "; + + 2 + true + + "; + var testSessionCriteria = new StartTestSessionCriteria() { Sources = new List { "1.dll" }, RunSettings = settingXml }; + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; + var testSessionManager = _testEngine.GetTestSessionManager( _mockRequestData.Object, - testSessionCriteria); + testSessionCriteria, + sourceToSourceDetailMap); Assert.IsNotNull(testSessionManager); Assert.IsInstanceOfType(testSessionManager, typeof(ProxyTestSessionManager)); @@ -615,16 +680,28 @@ public void GetTestSessionManagerShouldReturnDefaultTestSessionManagerEvenIfPara [TestMethod] public void GetTestSessionManagerShouldReturnDefaultTestSessionManagerIfParallelEnabled() { - string settingXml = @"2"; + string settingXml = + @" + + 2 + + "; var testSessionCriteria = new StartTestSessionCriteria() { Sources = new List { "1.dll", "2.dll" }, RunSettings = settingXml }; + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + ["2.dll"] = new SourceDetail { Source = "2.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; + var testSessionManager = _testEngine.GetTestSessionManager( _mockRequestData.Object, - testSessionCriteria); + testSessionCriteria, + sourceToSourceDetailMap); Assert.IsNotNull(testSessionManager); Assert.IsInstanceOfType(testSessionManager, typeof(ProxyTestSessionManager)); @@ -635,19 +712,26 @@ public void GetTestSessionManagerShouldReturnDefaultTestSessionManagerIfHostIsNo { string settingXml = @" - - true - - "; + + true + + "; var testSessionCriteria = new StartTestSessionCriteria() { Sources = new List { "1.dll", "2.dll" }, RunSettings = settingXml }; + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + ["2.dll"] = new SourceDetail { Source = "2.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; + var testSessionManager = _testEngine.GetTestSessionManager( _mockRequestData.Object, - testSessionCriteria); + testSessionCriteria, + sourceToSourceDetailMap); Assert.IsNotNull(testSessionManager); Assert.IsInstanceOfType(testSessionManager, typeof(ProxyTestSessionManager)); @@ -658,22 +742,27 @@ public void GetTestSessionManagerShouldReturnDefaultTestSessionManagerIfDataColl { var settingXml = @" - - - - - - - "; + + + + + + "; var testSessionCriteria = new StartTestSessionCriteria() { Sources = new List { "1.dll" }, RunSettings = settingXml }; + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; + var testSessionManager = _testEngine.GetTestSessionManager( _mockRequestData.Object, - testSessionCriteria); + testSessionCriteria, + sourceToSourceDetailMap); Assert.IsNotNull(testSessionManager); Assert.IsInstanceOfType(testSessionManager, typeof(ProxyTestSessionManager)); @@ -684,19 +773,26 @@ public void GetTestSessionManagerShouldReturnNullWhenTargetFrameworkIsNetFramewo { var settingXml = @" - - .NETFramework, Version=v4.5 - - "; + + .NETFramework, Version=v4.5 + + "; + var testSessionCriteria = new StartTestSessionCriteria() { Sources = new List { "1.dll" }, RunSettings = settingXml }; + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; + Assert.IsNull(_testEngine.GetTestSessionManager( _mockRequestData.Object, - testSessionCriteria)); + testSessionCriteria, + sourceToSourceDetailMap)); } [TestMethod] @@ -705,13 +801,20 @@ public void GetTestSessionManagerShouldReturnNotNullIfCurrentProcessIsDotnet() var testSessionCriteria = new StartTestSessionCriteria() { Sources = new List { "1.dll" }, - RunSettings = null + RunSettings = @"" }; + + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; + _mockProcessHelper.Setup(ph => ph.GetCurrentProcessFileName()).Returns("dotnet.exe"); var testSessionManager = _testEngine.GetTestSessionManager( _mockRequestData.Object, - testSessionCriteria); + testSessionCriteria, + sourceToSourceDetailMap); Assert.IsNotNull(testSessionManager); } @@ -721,23 +824,28 @@ public void GetTestSessionManagerShouldReturnNotNullIfDisableAppDomainIsSet() { string settingXml = @" - - x86 - true - false - .NETFramework, Version=v4.5 - - "; + + x86 + true + false + .NETFramework, Version=v4.5 + + "; var testSessionCriteria = new StartTestSessionCriteria() { Sources = new List { "1.dll" }, RunSettings = settingXml }; + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; var testSessionManager = _testEngine.GetTestSessionManager( _mockRequestData.Object, - testSessionCriteria); + testSessionCriteria, + sourceToSourceDetailMap); Assert.IsNotNull(testSessionManager); } @@ -747,13 +855,13 @@ public void GetTestSessionManagerShouldReturnNotNullIfDesignModeIsTrue() { string settingXml = @" - - x86 - false - true - .NETFramework, Version=v4.5 - - "; + + x86 + false + true + .NETFramework, Version=v4.5 + + "; var testSessionCriteria = new StartTestSessionCriteria() { @@ -761,9 +869,15 @@ public void GetTestSessionManagerShouldReturnNotNullIfDesignModeIsTrue() RunSettings = settingXml }; + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; + var testSessionManager = _testEngine.GetTestSessionManager( _mockRequestData.Object, - testSessionCriteria); + testSessionCriteria, + sourceToSourceDetailMap); Assert.IsNotNull(testSessionManager); } @@ -773,13 +887,13 @@ public void GetTestSessionManagerShouldReturnNotNullIfTargetFrameworkIsNetcoreAp { string settingXml = @" - - x86 - false - false - .NETCoreApp, Version=v1.1 - - "; + + x86 + false + false + .NETCoreApp, Version=v1.1 + + "; var testSessionCriteria = new StartTestSessionCriteria() { @@ -787,9 +901,15 @@ public void GetTestSessionManagerShouldReturnNotNullIfTargetFrameworkIsNetcoreAp RunSettings = settingXml }; + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; + var testSessionManager = _testEngine.GetTestSessionManager( _mockRequestData.Object, - testSessionCriteria); + testSessionCriteria, + sourceToSourceDetailMap); Assert.IsNotNull(testSessionManager); } @@ -799,13 +919,13 @@ public void GetTestSessionManagerShouldReturnNotNullIfTargetFrameworkIsNetStanda { string settingXml = @" - - x86 - false - false - .NETStandard, Version=v1.4 - - "; + + x86 + false + false + .NETStandard, Version=v1.4 + + "; var testSessionCriteria = new StartTestSessionCriteria() { @@ -813,9 +933,15 @@ public void GetTestSessionManagerShouldReturnNotNullIfTargetFrameworkIsNetStanda RunSettings = settingXml }; + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; + var testSessionManager = _testEngine.GetTestSessionManager( _mockRequestData.Object, - testSessionCriteria); + testSessionCriteria, + sourceToSourceDetailMap); Assert.IsNotNull(testSessionManager); } @@ -825,13 +951,13 @@ public void GetTestSessionManagerShouldReturnNotNullIfTargetPlatformIsX64() { string settingXml = @" - - x64 - false - false - .NETStandard, Version=v1.4 - - "; + + x64 + false + false + .NETStandard, Version=v1.4 + + "; var testSessionCriteria = new StartTestSessionCriteria() { @@ -839,9 +965,15 @@ public void GetTestSessionManagerShouldReturnNotNullIfTargetPlatformIsX64() RunSettings = settingXml }; + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; + var testSessionManager = _testEngine.GetTestSessionManager( _mockRequestData.Object, - testSessionCriteria); + testSessionCriteria, + sourceToSourceDetailMap); Assert.IsNotNull(testSessionManager); } @@ -851,16 +983,16 @@ public void GetTestSessionManagerShouldReturnNotNullIfRunSettingsHasTestSettings { string settingXml = @" - - x86 - false - false - .NETFramework, Version=v4.5 - - - C:\temp.testsettings - - "; + + x86 + false + false + .NETFramework, Version=v4.5 + + + C:\temp.testsettings + + "; var testSessionCriteria = new StartTestSessionCriteria() { @@ -868,10 +1000,67 @@ public void GetTestSessionManagerShouldReturnNotNullIfRunSettingsHasTestSettings RunSettings = settingXml }; + var sourceToSourceDetailMap = new Dictionary + { + ["1.dll"] = new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + }; + var testSessionManager = _testEngine.GetTestSessionManager( _mockRequestData.Object, - testSessionCriteria); + testSessionCriteria, + sourceToSourceDetailMap); Assert.IsNotNull(testSessionManager); } + + [TestMethod] + public void CreatingNonParallelExecutionManagerShouldReturnExecutionManagerWithDataCollectionIfDataCollectionIsEnabled() + { + var settingXml = + @" + + + + + + "; + var testRunCriteria = new TestRunCriteria(new List { "1.dll" }, 100, false, settingXml); + + var runtimeProviderInfo = new TestRuntimeProviderInfo(typeof(ITestRuntimeProvider), false, settingXml, + new List { new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework } }); + var nonParallelExecutionManager = _testEngine.CreateNonParallelExecutionManager(_mockRequestData.Object, testRunCriteria, true, runtimeProviderInfo); + + Assert.IsNotNull(nonParallelExecutionManager); + Assert.IsInstanceOfType(nonParallelExecutionManager, typeof(ProxyExecutionManagerWithDataCollection)); + } + + + [TestMethod] + public void CreatedNonParallelExecutionManagerShouldBeInitialzedWithCorrectTestSourcesWhenTestRunCriteriaContainsSourceList() + { + // Test run criteria are NOT used to get sources or settings + // those are taken from the runtimeProviderInfo, because we've split the + // test run criteria into smaller pieces to run them on each non-parallel execution manager. + var testRunCriteria = new TestRunCriteria(new List { "none.dll" }, 100, false, testSettings: null); + + var settingXml = + @" + + + + + + "; + + var runtimeProviderInfo = new TestRuntimeProviderInfo(typeof(ITestRuntimeProvider), false, settingXml, + new List { + new SourceDetail { Source = "1.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework }, + new SourceDetail { Source = "2.dll", Architecture = Architecture.X86, Framework = Framework.DefaultFramework } + }); + var nonParallelExecutionManager = _testEngine.CreateNonParallelExecutionManager(_mockRequestData.Object, testRunCriteria, true, runtimeProviderInfo); + + Assert.IsInstanceOfType(nonParallelExecutionManager, typeof(ProxyExecutionManagerWithDataCollection)); + Assert.IsTrue(((ProxyExecutionManagerWithDataCollection)nonParallelExecutionManager).ProxyDataCollectionManager.Sources.Contains("1.dll")); + Assert.IsTrue(((ProxyExecutionManagerWithDataCollection)nonParallelExecutionManager).ProxyDataCollectionManager.Sources.Contains("2.dll")); + } } diff --git a/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/TestSession/TestSessionPoolTests.cs b/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/TestSession/TestSessionPoolTests.cs index c1a7a165eb..25abc5b8a9 100644 --- a/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/TestSession/TestSessionPoolTests.cs +++ b/test/Microsoft.TestPlatform.CrossPlatEngine.UnitTests/TestSession/TestSessionPoolTests.cs @@ -2,6 +2,7 @@ // Licensed under the MIT license. See LICENSE file in the project root for full license information. using System; +using System.Collections.Generic; using Microsoft.VisualStudio.TestPlatform.CrossPlatEngine; using Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.Client; @@ -24,7 +25,8 @@ public void AddSessionShouldSucceedIfTestSessionInfoIsUnique() var proxyTestSessionManager = new ProxyTestSessionManager( new StartTestSessionCriteria(), 1, - () => null); + _ => null, + new List()); Assert.IsNotNull(TestSessionPool.Instance); Assert.IsTrue(TestSessionPool.Instance.AddSession(testSessionInfo, proxyTestSessionManager)); @@ -40,7 +42,8 @@ public void KillSessionShouldSucceedIfTestSessionExists() var mockProxyTestSessionManager = new Mock( new StartTestSessionCriteria(), 1, - (Func)(() => null)); + (Func)(_ => null!), + new List()); var mockRequestData = new Mock(); mockProxyTestSessionManager.SetupSequence(tsm => tsm.StopSession(It.IsAny())) @@ -69,7 +72,8 @@ public void TakeProxyShouldSucceedIfMatchingCriteriaAreCorrect() var mockProxyTestSessionManager = new Mock( new StartTestSessionCriteria(), 1, - (Func)(() => null)); + (Func)(_ => null!), + new List()); mockProxyTestSessionManager.SetupSequence(tsm => tsm.DequeueProxy(It.IsAny(), It.IsAny())) .Throws(new InvalidOperationException("Test Exception")) @@ -100,7 +104,8 @@ public void ReturnProxyShouldSucceedIfProxyIdIsValid() var mockProxyTestSessionManager = new Mock( new StartTestSessionCriteria(), 1, - (Func)(() => null)); + (Func)(_ => null!), + new List()); mockProxyTestSessionManager.SetupSequence(tsm => tsm.EnqueueProxy(It.IsAny())) .Throws(new ArgumentException("Test Exception")) diff --git a/test/Microsoft.TestPlatform.TestHostProvider.UnitTests/Hosting/DefaultTestHostManagerTests.cs b/test/Microsoft.TestPlatform.TestHostProvider.UnitTests/Hosting/DefaultTestHostManagerTests.cs index e3d4bb198e..ca482cc9bf 100644 --- a/test/Microsoft.TestPlatform.TestHostProvider.UnitTests/Hosting/DefaultTestHostManagerTests.cs +++ b/test/Microsoft.TestPlatform.TestHostProvider.UnitTests/Hosting/DefaultTestHostManagerTests.cs @@ -354,7 +354,7 @@ public void LaunchTestHostAsyncShouldNotStartHostProcessIfCancellationTokenIsSet CancellationTokenSource cancellationTokenSource = new(); cancellationTokenSource.Cancel(); - Assert.ThrowsException(() => _testableTestHostManager.LaunchTestHostAsync(GetDefaultStartInfo(), cancellationTokenSource.Token).Wait()); + Assert.ThrowsException(() => _testableTestHostManager.LaunchTestHostAsync(GetDefaultStartInfo(), cancellationTokenSource.Token).Wait()); } [TestMethod] diff --git a/test/Microsoft.TestPlatform.TestHostProvider.UnitTests/Hosting/DotnetTestHostManagerTests.cs b/test/Microsoft.TestPlatform.TestHostProvider.UnitTests/Hosting/DotnetTestHostManagerTests.cs index 289e0799e4..581c26ad93 100644 --- a/test/Microsoft.TestPlatform.TestHostProvider.UnitTests/Hosting/DotnetTestHostManagerTests.cs +++ b/test/Microsoft.TestPlatform.TestHostProvider.UnitTests/Hosting/DotnetTestHostManagerTests.cs @@ -456,7 +456,7 @@ public void LaunchTestHostAsyncShouldNotStartHostProcessIfCancellationTokenIsSet CancellationTokenSource cancellationTokenSource = new(); cancellationTokenSource.Cancel(); - Assert.ThrowsException(() => _dotnetHostManager.LaunchTestHostAsync(startInfo, cancellationTokenSource.Token).Wait()); + Assert.ThrowsException(() => _dotnetHostManager.LaunchTestHostAsync(startInfo, cancellationTokenSource.Token).Wait()); } [TestMethod] diff --git a/test/Microsoft.TestPlatform.TestUtilities/DebugInfo.cs b/test/Microsoft.TestPlatform.TestUtilities/DebugInfo.cs index f9ac35d770..12eb25aaa5 100644 --- a/test/Microsoft.TestPlatform.TestUtilities/DebugInfo.cs +++ b/test/Microsoft.TestPlatform.TestUtilities/DebugInfo.cs @@ -14,5 +14,5 @@ public sealed class DebugInfo public bool DebugVSTestConsole { get; set; } public bool DebugTestHost { get; set; } public bool DebugDataCollector { get; set; } - public bool NoDefaultBreakpoints { get; set; } = true; + public bool DebugStopAtEntrypoint { get; set; } } diff --git a/test/Microsoft.TestPlatform.TestUtilities/IntegrationTestBase.cs b/test/Microsoft.TestPlatform.TestUtilities/IntegrationTestBase.cs index f3bd1c31c4..324d8957ad 100644 --- a/test/Microsoft.TestPlatform.TestUtilities/IntegrationTestBase.cs +++ b/test/Microsoft.TestPlatform.TestUtilities/IntegrationTestBase.cs @@ -250,7 +250,7 @@ private Dictionary AddDebugEnvironmentVariables(Dictionary + + + + + + + + + + + + + + diff --git a/test/vstest.ProgrammerTests/BasicRunAndDiscovery.cs b/test/vstest.ProgrammerTests/BasicRunAndDiscovery.cs new file mode 100644 index 0000000000..37e71ad330 --- /dev/null +++ b/test/vstest.ProgrammerTests/BasicRunAndDiscovery.cs @@ -0,0 +1,157 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. + +using FluentAssertions; +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Client; + +using vstest.ProgrammerTests.Fakes; +using Intent; + +#pragma warning disable IDE1006 // Naming Styles +namespace vstest.ProgrammerTests; +#pragma warning restore IDE1006 // Naming Styles + +// Tests are run by Intent library that is executed from our Program.Main. To debug press F5 in VS, and maybe mark just a single test with [Only]. +// To just run, press Ctrl+F5 to run without debugging. It will use short timeout for abort in case something is wrong with your test. + +public class BasicRunAndDiscovery +{ + [Test(@" + Given a test assembly with 108 tests. + + When we run tests. + + Then all 108 tests are executed. + ")] + public async Task A() + { + // -- arrange + using var fixture = new Fixture(); + + var mstest1Dll = new FakeTestDllBuilder() + .WithPath(@"X:\fake\mstest1.dll") + .WithFramework(KnownFrameworkNames.Net5) + .WithArchitecture(Architecture.X64) + .WithTestCount(108, 10) + .Build(); + + var testhost1Process = new FakeProcess(fixture.ErrorAggregator, @"X:\fake\testhost1.exe"); + + var runTests1 = new FakeTestHostResponsesBuilder() + .VersionCheck(5) + .ExecutionInitialize(FakeMessage.NoResponse) + .StartTestExecutionWithSources(mstest1Dll.TestResultBatches) + .SessionEnd(FakeMessage.NoResponse, _ => testhost1Process.Exit()) + .SessionEnd(FakeMessage.NoResponse) + .Build(); + + var testhost1 = new FakeTestHostFixtureBuilder(fixture) + .WithTestDll(mstest1Dll) + .WithProcess(testhost1Process) + .WithResponses(runTests1) + .Build(); + + fixture.AddTestHostFixtures(testhost1); + + var testRequestManager = fixture.BuildTestRequestManager(); + + // -- act + var testRunRequestPayload = new TestRunRequestPayload + { + Sources = new List { mstest1Dll.Path }, + + RunSettings = $"" + }; + + await testRequestManager.ExecuteWithAbort(tm => tm.RunTests(testRunRequestPayload, testHostLauncher: null, fixture.TestRunEventsRegistrar, fixture.ProtocolConfig)); + + // -- assert + fixture.AssertNoErrors(); + fixture.ExecutedTests.Should().HaveCount(mstest1Dll.TestCount); + } + + [Test(@" + Given multple test assemblies that use the same target framework. + + When we run tests. + + Then all tests from all assemblies are run. + ")] + public async Task B() + { + // -- arrange + using var fixture = new Fixture(); + + var mstest1Dll = new FakeTestDllBuilder() + .WithPath(@"X:\fake\mstest1.dll") + .WithFramework(KnownFrameworkNames.Net5) + .WithArchitecture(Architecture.X64) + .WithTestCount(108, 10) + .Build(); + + var testhost1Process = new FakeProcess(fixture.ErrorAggregator, @"X:\fake\testhost1.exe"); + + var runTests1 = new FakeTestHostResponsesBuilder() + .VersionCheck(5) + .ExecutionInitialize(FakeMessage.NoResponse) + .StartTestExecutionWithSources(mstest1Dll.TestResultBatches) + .SessionEnd(FakeMessage.NoResponse, _ => testhost1Process.Exit()) + .SessionEnd(FakeMessage.NoResponse) + .Build(); + + var testhost1 = new FakeTestHostFixtureBuilder(fixture) + .WithTestDll(mstest1Dll) + .WithProcess(testhost1Process) + .WithResponses(runTests1) + .Build(); + + var mstest2Dll = new FakeTestDllBuilder() + .WithPath(@"X:\fake\mstest2.dll") + .WithFramework(KnownFrameworkNames.Net5) + .WithArchitecture(Architecture.X64) + .WithTestCount(50, 8) + .Build(); + + var testhost2Process = new FakeProcess(fixture.ErrorAggregator, @"X:\fake\testhost2.exe"); + + var runTests2 = new FakeTestHostResponsesBuilder() + .VersionCheck(5) + .ExecutionInitialize(FakeMessage.NoResponse) + .StartTestExecutionWithSources(mstest2Dll.TestResultBatches) + .SessionEnd(FakeMessage.NoResponse, f => f.Process.Exit()) + .SessionEnd(FakeMessage.NoResponse) + .Build(); + + var testhost2 = new FakeTestHostFixtureBuilder(fixture) + .WithTestDll(mstest2Dll) + .WithProcess(testhost2Process) + .WithResponses(runTests2) + .Build(); + + fixture.AddTestHostFixtures(testhost1, testhost2); + + var testRequestManager = fixture.BuildTestRequestManager(); + + // -- act + var testRunRequestPayload = new TestRunRequestPayload + { + Sources = new List { mstest1Dll.Path, mstest2Dll.Path }, + + RunSettings = $"" + }; + + await testRequestManager.ExecuteWithAbort(tm => tm.RunTests(testRunRequestPayload, testHostLauncher: null, fixture.TestRunEventsRegistrar, fixture.ProtocolConfig)); + + // -- assert + fixture.AssertNoErrors(); + fixture.ExecutedTests.Should().HaveCount(mstest1Dll.TestCount + mstest2Dll.TestCount); + } +} + +// Test and improvmement ideas: +// TODO: passing null runsettings does not fail fast, instead it fails in Fakes settings code +// TODO: passing empty string fails in the xml parser code +// TODO: passing null sources and null testcases does not fail fast +// TODO: Just calling Exit, Close won't stop the run, we will keep waiting for test run to complete, I think in real life when we exit then Disconnected will be called on the vstest.console side, leading to abort flow. +//.StartTestExecutionWithSources(new FakeMessage(MessageType.TestMessage, new TestMessagePayload { MessageLevel = TestMessageLevel.Error, Message = "Loading type failed." }), afterAction: f => { /*f.Process.Exit();*/ f.FakeCommunicationEndpoint.Disconnect(); }) diff --git a/test/vstest.ProgrammerTests/Fakes/ActionRecord.cs b/test/vstest.ProgrammerTests/Fakes/ActionRecord.cs new file mode 100644 index 0000000000..dede5a031a --- /dev/null +++ b/test/vstest.ProgrammerTests/Fakes/ActionRecord.cs @@ -0,0 +1,19 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. + +namespace vstest.ProgrammerTests.Fakes; + +/// +/// Contains information about something that happened, e.g. when a runtime provider resolve was called. +/// +/// +internal class ActionRecord +{ + public T Value { get; } + public string StackTrace { get; } + public ActionRecord(T value) + { + StackTrace = Environment.StackTrace; + Value = value; + } +} diff --git a/test/vstest.ProgrammerTests/Fakes/FakeCommunicationChannel.cs b/test/vstest.ProgrammerTests/Fakes/FakeCommunicationChannel.cs index 8b6771be0b..2f689fd722 100644 --- a/test/vstest.ProgrammerTests/Fakes/FakeCommunicationChannel.cs +++ b/test/vstest.ProgrammerTests/Fakes/FakeCommunicationChannel.cs @@ -20,7 +20,6 @@ public FakeCommunicationChannel(int id) public int Id { get; } public CancellationTokenSource CancellationTokenSource { get; } = new(); - public BlockingCollection InQueue { get; } = new(); public BlockingCollection OutQueue { get; } = new(); @@ -47,6 +46,7 @@ public void Dispose() { CancellationTokenSource.Cancel(); InQueue.CompleteAdding(); + OutQueue.CompleteAdding(); } public Task NotifyDataAvailable() @@ -78,7 +78,7 @@ internal class FakeCommunicationChannel : FakeCommunicationChannel, IC /// public Queue> NextResponses { get; } = new(); public FakeErrorAggregator FakeErrorAggregator { get; } - public FakeMessage? OutgoingMessage { get; private set; } + public FakeMessage? PendingMessage { get; private set; } public TContext? Context { get; private set; } public List> ProcessedMessages { get; } = new(); public Task? ProcessIncomingMessagesTask { get; private set; } @@ -104,11 +104,12 @@ private void ProcessOutgoingMessages() { try { - // TODO: better name? this is message that we are currently trying to send - OutgoingMessage = OutQueue.Take(); - OnMessageReceived(this, new MessageReceivedEventArgs { Data = OutgoingMessage.SerializedMessage }); - OutgoingMessage = null; + // TODO: better name for the property? This is message that we are currently trying to send. + PendingMessage = OutQueue.Take(token); + OnMessageReceived(this, new MessageReceivedEventArgs { Data = PendingMessage.SerializedMessage }); + PendingMessage = null; } + catch (OperationCanceledException) { } catch (Exception ex) { FakeErrorAggregator.Add(ex); @@ -182,7 +183,6 @@ private void ProcessIncomingMessages(TContext context) Debugger.Break(); } - // TODO: passing the raw message in, is strange responsePair.BeforeAction?.Invoke(context); var responses = responsePair.Responses; ProcessedMessages.Add(new RequestResponsePair(requestMessage, responses, false)); @@ -199,11 +199,17 @@ private void ProcessIncomingMessages(TContext context) responsePair.AfterAction?.Invoke(context); } } + catch (OperationCanceledException) { } catch (Exception ex) { FakeErrorAggregator.Add(ex); } } } + + public override string? ToString() + { + return NextResponses.Peek()?.ToString(); + } } diff --git a/test/vstest.ProgrammerTests/Fakes/FakeDiscoveryEventsRegistrar.cs b/test/vstest.ProgrammerTests/Fakes/FakeDiscoveryEventsRegistrar.cs new file mode 100644 index 0000000000..2b22bf9b40 --- /dev/null +++ b/test/vstest.ProgrammerTests/Fakes/FakeDiscoveryEventsRegistrar.cs @@ -0,0 +1,75 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. + +using Microsoft.VisualStudio.TestPlatform.Common.Interfaces; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Client; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; + +namespace vstest.ProgrammerTests.Fakes; + +internal class FakeTestDiscoveryEventsRegistrar : ITestDiscoveryEventsRegistrar +{ + private readonly FakeErrorAggregator _fakeErrorAggregator; + + public List AllEvents { get; } = new(); + public List LoggedWarnings { get; } = new(); + public List> DiscoveryCompleteEvents { get; } = new(); + public List> DiscoveredTestsEvents { get; } = new(); + public List> DiscoveryStartEvents { get; } = new(); + public List> DiscoveryMessageEvents { get; } = new(); + + public FakeTestDiscoveryEventsRegistrar(FakeErrorAggregator fakeErrorAggregator) + { + _fakeErrorAggregator = fakeErrorAggregator; + } + + public void LogWarning(string message) + { + AllEvents.Add(message); + LoggedWarnings.Add(message); + } + + public void RegisterDiscoveryEvents(IDiscoveryRequest discoveryRequest) + { + discoveryRequest.OnDiscoveryMessage += OnDiscoveryMessage; + discoveryRequest.OnDiscoveryStart += OnDiscoveryStart; + discoveryRequest.OnDiscoveredTests += OnDiscoveredTests; + discoveryRequest.OnDiscoveryComplete += OnDiscoveryComplete; + } + + public void UnregisterDiscoveryEvents(IDiscoveryRequest discoveryRequest) + { + discoveryRequest.OnDiscoveryMessage -= OnDiscoveryMessage; + discoveryRequest.OnDiscoveryStart -= OnDiscoveryStart; + discoveryRequest.OnDiscoveredTests -= OnDiscoveredTests; + discoveryRequest.OnDiscoveryComplete -= OnDiscoveryComplete; + } + + private void OnDiscoveryComplete(object? sender, DiscoveryCompleteEventArgs e) + { + var eventRecord = new EventRecord(sender, e); + AllEvents.Add(eventRecord); + DiscoveryCompleteEvents.Add(eventRecord); + } + + private void OnDiscoveredTests(object? sender, DiscoveredTestsEventArgs e) + { + var eventRecord = new EventRecord(sender, e); + AllEvents.Add(eventRecord); + DiscoveredTestsEvents.Add(eventRecord); + } + + private void OnDiscoveryStart(object? sender, DiscoveryStartEventArgs e) + { + var eventRecord = new EventRecord(sender, e); + AllEvents.Add(eventRecord); + DiscoveryStartEvents.Add(eventRecord); + } + + private void OnDiscoveryMessage(object? sender, TestRunMessageEventArgs e) + { + var eventRecord = new EventRecord(sender, e); + AllEvents.Add(eventRecord); + DiscoveryMessageEvents.Add(eventRecord); + } +} diff --git a/test/vstest.ProgrammerTests/Fakes/FakeMessage.cs b/test/vstest.ProgrammerTests/Fakes/FakeMessage.cs index 4829c52b57..03a0d9c077 100644 --- a/test/vstest.ProgrammerTests/Fakes/FakeMessage.cs +++ b/test/vstest.ProgrammerTests/Fakes/FakeMessage.cs @@ -50,4 +50,9 @@ public FakeMessage(string messageType, T payload, int version = 0) /// Version of the message to allow the internal serializer to choose the correct serialization strategy. /// public int Version { get; } + + public override string ToString() + { + return $"{MessageType} {{{Payload}}}"; + } } diff --git a/test/vstest.ProgrammerTests/Fakes/FakeProcess.cs b/test/vstest.ProgrammerTests/Fakes/FakeProcess.cs index e6a6579704..9ce73ca5d4 100644 --- a/test/vstest.ProgrammerTests/Fakes/FakeProcess.cs +++ b/test/vstest.ProgrammerTests/Fakes/FakeProcess.cs @@ -86,4 +86,14 @@ internal void Exit() ExitCallback(this); } } + + public override string ToString() + { + var state = !Started + ? "not started" + : !Exited + ? "running" + : "exited"; + return $"{(Id != default ? Id : "")} {Name ?? ""} {state}"; + } } diff --git a/test/vstest.ProgrammerTests/Fakes/FakeTestBatchBuilder.cs b/test/vstest.ProgrammerTests/Fakes/FakeTestBatchBuilder.cs index cfecaeb09a..03b1a781d6 100644 --- a/test/vstest.ProgrammerTests/Fakes/FakeTestBatchBuilder.cs +++ b/test/vstest.ProgrammerTests/Fakes/FakeTestBatchBuilder.cs @@ -11,6 +11,7 @@ internal class FakeTestBatchBuilder public TimeSpan Duration { get; private set; } public int BatchSize { get; private set; } public static List> Empty => new(); + public string? Source { get; private set; } public FakeTestBatchBuilder() { @@ -45,6 +46,17 @@ internal FakeTestBatchBuilder WithBatchSize(int batchSize) return this; } + /// + /// Sets the dll path (source) to be the provided value. + /// + /// + /// + internal FakeTestBatchBuilder WithDllPath(string path) + { + Source = path; + return this; + } + internal List> Build() { if (BatchSize == 0 && TotalCount != 0) @@ -55,16 +67,17 @@ internal List> Build() var numberOfBatches = Math.DivRem(TotalCount, BatchSize, out int remainder); - // TODO: Add adapter uri, and dll name + var source = Source ?? "DummySourceFileName"; + // TODO: Add adapter uri // TODO: set duration var batches = Enumerable.Range(0, numberOfBatches) .Select(batchNumber => Enumerable.Range(0, BatchSize) - .Select((index) => new TestResult(new TestCase($"Test{batchNumber}-{index}", new Uri("some://uri"), "DummySourceFileName"))).ToList()).ToList(); + .Select((index) => new TestResult(new TestCase($"Test{batchNumber}-{index}", new Uri("some://uri"), source))).ToList()).ToList(); if (remainder > 0) { var reminderBatch = Enumerable.Range(0, remainder) - .Select((index) => new TestResult(new TestCase($"Test{numberOfBatches + 1}-{index}", new Uri("some://uri"), "DummySourceFileName"))).ToList(); + .Select((index) => new TestResult(new TestCase($"Test{numberOfBatches + 1}-{index}", new Uri("some://uri"), source))).ToList(); batches.Add(reminderBatch); } diff --git a/test/vstest.ProgrammerTests/Fakes/FakeTestDllBuilder.cs b/test/vstest.ProgrammerTests/Fakes/FakeTestDllBuilder.cs index 14e7bca260..47b53aca90 100644 --- a/test/vstest.ProgrammerTests/Fakes/FakeTestDllBuilder.cs +++ b/test/vstest.ProgrammerTests/Fakes/FakeTestDllBuilder.cs @@ -51,6 +51,7 @@ internal FakeTestDllBuilder WithTestBatches(List> testBatches) internal FakeTestDllBuilder WithTestCount(int totalCount, int? batchSize = null) { _testBatches = new FakeTestBatchBuilder() + .WithDllPath(_path) .WithTotalCount(totalCount) .WithBatchSize(batchSize ?? totalCount) .Build(); @@ -63,6 +64,7 @@ internal FakeTestDllFile Build() if (_testBatches == null) { _testBatches = new FakeTestBatchBuilder() + .WithDllPath(_path) .WithTotalCount(10) .WithBatchSize(5) .Build(); diff --git a/test/vstest.ProgrammerTests/Fakes/FakeTestHost.cs b/test/vstest.ProgrammerTests/Fakes/FakeTestHost.cs index ad438b6c6e..3a1cb11ab3 100644 --- a/test/vstest.ProgrammerTests/Fakes/FakeTestHost.cs +++ b/test/vstest.ProgrammerTests/Fakes/FakeTestHost.cs @@ -2,8 +2,7 @@ // Licensed under the MIT license. See LICENSE file in the project root for full license information. namespace vstest.ProgrammerTests.Fakes; - -internal class FakeTestHostFixture +internal class FakeTestHostFixture : IDisposable { public int Id { get; } public List Dlls { get; } @@ -34,4 +33,9 @@ public FakeTestHostFixture( // testhost before or after answering. fakeCommunicationChannel.Start(this); } + + public void Dispose() + { + try { FakeCommunicationChannel.Dispose(); } catch (ObjectDisposedException) { } + } } diff --git a/test/vstest.ProgrammerTests/Fakes/FakeTestHostResponsesBuilder.cs b/test/vstest.ProgrammerTests/Fakes/FakeTestHostResponsesBuilder.cs index e2a8c22a77..2142f4077e 100644 --- a/test/vstest.ProgrammerTests/Fakes/FakeTestHostResponsesBuilder.cs +++ b/test/vstest.ProgrammerTests/Fakes/FakeTestHostResponsesBuilder.cs @@ -132,4 +132,49 @@ internal List> Bui { return _responses; } + + internal FakeTestHostResponsesBuilder DiscoveryInitialize(FakeMessage fakeMessage) + { + AddPairWithFakeMessage(MessageType.DiscoveryInitialize, fakeMessage); + return this; + } + + internal FakeTestHostResponsesBuilder StartDiscovery(List> testResultBatches) + { + // Discovery returns back test cases, not test results, but it is easier to take test results, because + // we have a builder that can be re-used for both test run and test discovery. + + List messages; + if (testResultBatches.Count != 0) + { + // this will create as many test stats changes messages, as there are batches -1 + // the last batch will be sent as test run complete event + + // see TestRequestSender.OnDiscoveryMessageReceived to see how the vstest.console receives the data + List changeMessages = testResultBatches.Take(testResultBatches.Count - 1) + .Select(batch => new FakeMessage>(MessageType.TestCasesFound, batch.Select(testResult => testResult.TestCase).ToList())) + .ToList(); + + // TODO: if we send this incorrectly the handler just continues, check logs if we can understand it from there. We should at least write a warning. + // because otherwise it hangs. + FakeMessage completedMessage = new FakeMessage(MessageType.DiscoveryComplete, new DiscoveryCompletePayload + { + LastDiscoveredTests = testResultBatches.Last().Select(testResult => testResult.TestCase).ToList(), + }); + messages = changeMessages.Concat(new[] { completedMessage }).ToList(); + } + else + { + FakeMessage completedMessage = new FakeMessage(MessageType.DiscoveryComplete, new DiscoveryCompletePayload + { + LastDiscoveredTests = new List(), + }); + + messages = completedMessage.AsList(); + } + + AddPairWithMultipleFakeMessages(MessageType.StartDiscovery, messages); + + return this; + } } diff --git a/test/vstest.ProgrammerTests/Fakes/FakeTestRunEventsRegistrar.cs b/test/vstest.ProgrammerTests/Fakes/FakeTestRunEventsRegistrar.cs index 857966e62e..b6d7571b79 100644 --- a/test/vstest.ProgrammerTests/Fakes/FakeTestRunEventsRegistrar.cs +++ b/test/vstest.ProgrammerTests/Fakes/FakeTestRunEventsRegistrar.cs @@ -9,6 +9,7 @@ namespace vstest.ProgrammerTests.Fakes; internal class FakeTestRunEventsRegistrar : ITestRunEventsRegistrar { + public Guid Id { get; } = Guid.NewGuid(); public FakeTestRunEventsRegistrar(FakeErrorAggregator fakeErrorAggregator) { FakeErrorAggregator = fakeErrorAggregator; diff --git a/test/vstest.ProgrammerTests/Fakes/FakeTestRuntimeProvider.cs b/test/vstest.ProgrammerTests/Fakes/FakeTestRuntimeProvider.cs index 0547ce0aaa..0aa36d4629 100644 --- a/test/vstest.ProgrammerTests/Fakes/FakeTestRuntimeProvider.cs +++ b/test/vstest.ProgrammerTests/Fakes/FakeTestRuntimeProvider.cs @@ -12,6 +12,7 @@ internal class FakeTestRuntimeProvider : ITestRuntimeProvider { public FakeProcessHelper FakeProcessHelper { get; } public FakeCommunicationEndpoint FakeCommunicationEndpoint { get; } + public FakeCommunicationChannel FakeCommunicationChannel { get; } public FakeErrorAggregator FakeErrorAggregator { get; } public FakeProcess TestHostProcess { get; private set; } public FakeFileHelper FileHelper { get; } @@ -30,6 +31,7 @@ public FakeTestRuntimeProvider(FakeProcessHelper fakeProcessHelper, FakeProcess FileHelper = fakeFileHelper; TestDlls = fakeTestDlls; FakeCommunicationEndpoint = fakeCommunicationEndpoint; + FakeCommunicationChannel = fakeCommunicationEndpoint.Channel; FakeErrorAggregator = fakeErrorAggregator; var architectures = fakeTestDlls.Select(dll => dll.Architecture).Distinct().ToList(); @@ -123,4 +125,9 @@ public void SetCustomLauncher(ITestHostLauncher customLauncher) { throw new NotImplementedException(); } + + public override string ToString() + { + return $"{nameof(FakeTestRuntimeProvider)} - ({TestHostProcess.ToString() ?? ""}) - {FakeCommunicationChannel}"; + } } diff --git a/test/vstest.ProgrammerTests/Fakes/FakeTestRuntimeProviderManager.cs b/test/vstest.ProgrammerTests/Fakes/FakeTestRuntimeProviderManager.cs index 98ad226a32..e19ff8ebba 100644 --- a/test/vstest.ProgrammerTests/Fakes/FakeTestRuntimeProviderManager.cs +++ b/test/vstest.ProgrammerTests/Fakes/FakeTestRuntimeProviderManager.cs @@ -1,8 +1,6 @@ // Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT license. See LICENSE file in the project root for full license information. -using System.Collections.Concurrent; - using Microsoft.VisualStudio.TestPlatform.Common.Hosting; using Microsoft.VisualStudio.TestPlatform.ObjectModel.Host; @@ -15,33 +13,56 @@ public FakeTestRuntimeProviderManager(FakeErrorAggregator fakeErrorAggregator) FakeErrorAggregator = fakeErrorAggregator; } - public ConcurrentQueue TestRuntimeProviders { get; } = new(); - public List UsedTestRuntimeProviders { get; } = new(); + public List TestRuntimeProviders { get; } = new(); + public Queue TestRuntimeProvidersByOrder { get; } = new(); + public List> UsedTestRuntimeProviders { get; } = new(); public FakeErrorAggregator FakeErrorAggregator { get; } public void AddTestRuntimeProviders(params FakeTestRuntimeProvider[] runtimeProviders) { - // This is not a bug, I am registering each provider twice because TestPlatform resolves - // them twice for every request that does not run in-process. - foreach (var runtimeProvider in runtimeProviders) + TestRuntimeProviders.AddRange(runtimeProviders); + + // In cases where we don't have multi tfm run, we will be asked for + // a provider with multiple sources. In that case we don't know exactly which one to provide + // so we need to go by order. We also do this resolve twice for each source in parallel run + // because we first need to know if the provider is shared. So we add to the queue twice. + // This is brittle, but there is no way around this :( + foreach (var provider in runtimeProviders) { - TestRuntimeProviders.Enqueue(runtimeProvider); - TestRuntimeProviders.Enqueue(runtimeProvider); + TestRuntimeProvidersByOrder.Enqueue(provider); + TestRuntimeProvidersByOrder.Enqueue(provider); } } - public ITestRuntimeProvider GetTestHostManagerByRunConfiguration(string runConfiguration) + public ITestRuntimeProvider GetTestHostManagerByRunConfiguration(string _, List sources) { + var allMatchingProviders = TestRuntimeProviders + .Where(r => r.TestDlls.Select(dll => dll.Path) + .Any(path => sources.Contains(path))) + .ToList(); + if (allMatchingProviders.Count == 0) + { + throw new InvalidOperationException($"There are no FakeTestRuntimeProviders associated with any of the incoming sources, make sure your testhost fixture has at least one dll: {sources.JoinByComma()}"); + } - if (!TestRuntimeProviders.TryDequeue(out var next)) + if (allMatchingProviders.Count > 1) { - throw new InvalidOperationException("There are no more TestRuntimeProviders to be provided"); + // This is a single tfm run, or multiple dlls in the run have the same tfm. We need to provide + // providers by order. + if (!TestRuntimeProvidersByOrder.TryDequeue(out var provider)) + { + throw new InvalidOperationException("There are no more FakeTestRuntimeProviders to be provided."); + } + + UsedTestRuntimeProviders.Add(new ActionRecord(provider)); + return provider; } - UsedTestRuntimeProviders.Add(next); - return next; + var single = allMatchingProviders.Single(); + UsedTestRuntimeProviders.Add(new ActionRecord(single)); + return single; } public ITestRuntimeProvider GetTestHostManagerByUri(string hostUri) diff --git a/test/vstest.ProgrammerTests/Fakes/FakeTestSessionEventsHandler.cs b/test/vstest.ProgrammerTests/Fakes/FakeTestSessionEventsHandler.cs new file mode 100644 index 0000000000..a1791e77a4 --- /dev/null +++ b/test/vstest.ProgrammerTests/Fakes/FakeTestSessionEventsHandler.cs @@ -0,0 +1,48 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. + +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Client; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; + +namespace vstest.ProgrammerTests.Fakes; + +internal class FakeTestSessionEventsHandler : ITestSessionEventsHandler +{ + private readonly FakeErrorAggregator _fakeErrorAggregator; + + public FakeTestSessionEventsHandler(FakeErrorAggregator fakeErrorAggregator) + { + _fakeErrorAggregator = fakeErrorAggregator; + } + + public List AllEvents { get; } = new(); + public List LoggedMessages { get; } = new(); + public List RawMessages { get; } = new(); + public List StartTestSessionCompleteEvents { get; } = new(); + public List StopTestSessionCompleteEvents { get; } = new(); + + public void HandleLogMessage(TestMessageLevel level, string message) + { + var msg = new TestMessage(level, message); + AllEvents.Add(msg); + LoggedMessages.Add(msg); + } + + public void HandleRawMessage(string rawMessage) + { + AllEvents.Add(rawMessage); + RawMessages.Add(rawMessage); + } + + public void HandleStartTestSessionComplete(StartTestSessionCompleteEventArgs eventArgs) + { + AllEvents.Add(eventArgs); + StartTestSessionCompleteEvents.Add(eventArgs); + } + + public void HandleStopTestSessionComplete(StopTestSessionCompleteEventArgs eventArgs) + { + AllEvents.Add(eventArgs); + StopTestSessionCompleteEvents.Add(eventArgs); + } +} diff --git a/test/vstest.ProgrammerTests/Fakes/Fixture.cs b/test/vstest.ProgrammerTests/Fakes/Fixture.cs index e7360a9a96..60a38bfc35 100644 --- a/test/vstest.ProgrammerTests/Fakes/Fixture.cs +++ b/test/vstest.ProgrammerTests/Fakes/Fixture.cs @@ -12,18 +12,23 @@ using Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.TestRunAttachmentsProcessing; using Microsoft.VisualStudio.TestPlatform.ObjectModel; using Microsoft.VisualStudio.TestPlatform.ObjectModel.Client; +using Microsoft.VisualStudio.TestPlatform.Utilities; namespace vstest.ProgrammerTests.Fakes; internal class Fixture : IDisposable { + private readonly List _disposables = new(); + public FakeErrorAggregator ErrorAggregator { get; } = new(); public FakeProcessHelper ProcessHelper { get; } + public string LogName { get; } public FakeProcess CurrentProcess { get; } public FakeFileHelper FileHelper { get; } public FakeTestRuntimeProviderManager TestRuntimeProviderManager { get; } public FakeTestRunEventsRegistrar TestRunEventsRegistrar { get; } public FakeEnvironment Environment { get; } + public FakeTestDiscoveryEventsRegistrar TestDiscoveryEventsRegistrar { get; } public TestEngine? TestEngine { get; private set; } public TestPlatform? TestPlatform { get; private set; } public TestRunResultAggregator? TestRunResultAggregator { get; private set; } @@ -33,10 +38,16 @@ internal class Fixture : IDisposable public FakeDataCollectorAttachmentsProcessorsFactory? DataCollectorAttachmentsProcessorsFactory { get; private set; } public TestRunAttachmentsProcessingManager? TestRunAttachmentsProcessingManager { get; private set; } public TestRequestManager? TestRequestManager { get; private set; } - public List ExecutedTests => TestRunEventsRegistrar.RunChangedEvents.SelectMany(er => er.Data.NewTestResults).ToList(); public ProtocolConfig ProtocolConfig { get; internal set; } - public Fixture() + public List ExecutedTests => TestRunEventsRegistrar.RunChangedEvents.SelectMany(er => er.Data.NewTestResults).ToList(); + public List DiscoveredTests => TestDiscoveryEventsRegistrar.DiscoveredTestsEvents.SelectMany(er => er.Data.DiscoveredTestCases).ToList(); + + public List LoggedWarnings => TestRunEventsRegistrar.LoggedWarnings.Concat(TestDiscoveryEventsRegistrar.LoggedWarnings).ToList(); + + public FakeTestSessionEventsHandler TestSessionEventsHandler { get; } + + public Fixture(FixtureOptions? fixtureOptions = null) { // This type is compiled only in DEBUG, and won't exist otherwise. #if DEBUG @@ -51,21 +62,38 @@ public Fixture() } #endif - CurrentProcess = new FakeProcess(ErrorAggregator, @"X:\fake\vstest.console.exe", string.Empty, null, null, null, null, null); +#pragma warning disable CS0618 // Type or member is obsolete (to prevent use outside of test context) + FeatureFlag.Reset(); + fixtureOptions?.FeatureFlags?.ToList().ForEach(flag => ((FeatureFlag)FeatureFlag.Instance).SetFlag(flag.Key, flag.Value)); +#pragma warning restore CS0618 // Type or member is obsolete + + // This makes the run a bit slower, but at least we get info in the output window. We probably should add a mode where we don't + // use a file to write the output. Just trace listener. That would also be useful for UWP I think. + LogName = Path.GetTempPath() + $"/log_{Guid.NewGuid()}.txt"; + //EqtTrace.InitializeVerboseTrace(LogName); + + CurrentProcess = new FakeProcess(ErrorAggregator, @"X:\fake\vstest.console.exe"); ProcessHelper = new FakeProcessHelper(ErrorAggregator, CurrentProcess); FileHelper = new FakeFileHelper(ErrorAggregator); TestRuntimeProviderManager = new FakeTestRuntimeProviderManager(ErrorAggregator); TestRunEventsRegistrar = new FakeTestRunEventsRegistrar(ErrorAggregator); Environment = new FakeEnvironment(); + TestDiscoveryEventsRegistrar = new FakeTestDiscoveryEventsRegistrar(ErrorAggregator); + TestSessionEventsHandler = new FakeTestSessionEventsHandler(ErrorAggregator); ProtocolConfig = new ProtocolConfig(); } + public void Dispose() { - + foreach (var disposable in _disposables) + { + try { disposable.Dispose(); } catch (ObjectDisposedException) { } + } } internal void AddTestHostFixtures(params FakeTestHostFixture[] testhosts) { + _disposables.AddRange(testhosts); var providers = testhosts.Select(t => t.FakeTestRuntimeProvider).ToArray(); TestRuntimeProviderManager.AddTestRuntimeProviders(providers); } @@ -95,7 +123,14 @@ internal TestRequestManagerTestHelper BuildTestRequestManager( Task fakeMetricsPublisherTask = Task.FromResult(new FakeMetricsPublisher(ErrorAggregator)); - var commandLineOptions = CommandLineOptions.Instance; + var commandLineOptions = new CommandLineOptions + { + // We are acting like we are running under IDE. This is done because some settings are trying to grab the + // value from the pre-parsed settings in command line options. And some are looking at the actual run settings. + // Ultimately we should have a single point of truth for both scenarios, but now it is easier to just provide + // runsettings to the request. + IsDesignMode = true, + }; TestRequestManager testRequestManager = new( commandLineOptions, TestPlatform, diff --git a/test/vstest.ProgrammerTests/Fakes/FixtureOptions.cs b/test/vstest.ProgrammerTests/Fakes/FixtureOptions.cs new file mode 100644 index 0000000000..215c3649a6 --- /dev/null +++ b/test/vstest.ProgrammerTests/Fakes/FixtureOptions.cs @@ -0,0 +1,9 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. + +namespace vstest.ProgrammerTests.Fakes; + +internal class FixtureOptions +{ + public Dictionary? FeatureFlags { get; init; } +} diff --git a/test/vstest.ProgrammerTests/Fakes/KnownFrameworkStrings.cs b/test/vstest.ProgrammerTests/Fakes/KnownFrameworkStrings.cs index 1ecef8fc99..7d3e4ef36a 100644 --- a/test/vstest.ProgrammerTests/Fakes/KnownFrameworkStrings.cs +++ b/test/vstest.ProgrammerTests/Fakes/KnownFrameworkStrings.cs @@ -6,7 +6,7 @@ namespace vstest.ProgrammerTests.Fakes; internal static class KnownFrameworkStrings { public static string NetCore(int major, int minor = 0) => $".NETCoreApp,Version=v{major}.{minor}"; - private static string NetFramework(int major, int minor, int patch = 0) => $".NETFramework,Version=v{major}.{minor}.{patch}"; + private static string NetFramework(int major, int minor, int patch = 0) => $".NETFramework,Version=v{major}.{minor}{(patch != 0 ? $".{patch}" : null)}"; public static string Netcoreapp1 = NetCore(1); public static string Netcoreapp2 = NetCore(2); diff --git a/test/vstest.ProgrammerTests/Fakes/RequestResponsePair.cs b/test/vstest.ProgrammerTests/Fakes/RequestResponsePair.cs index 5384f9cace..a62e514130 100644 --- a/test/vstest.ProgrammerTests/Fakes/RequestResponsePair.cs +++ b/test/vstest.ProgrammerTests/Fakes/RequestResponsePair.cs @@ -35,4 +35,9 @@ public RequestResponsePair(TRequest request, IEnumerable responses, A public Action? AfterAction { get; } public List Responses { get; } public bool Debug { get; } + + public override string ToString() + { + return $"\u2193{Request} \u2191{Responses.FirstOrDefault()}"; + } } diff --git a/test/vstest.ProgrammerTests/Fakes/TestMessage.cs b/test/vstest.ProgrammerTests/Fakes/TestMessage.cs new file mode 100644 index 0000000000..9a9ca99dcb --- /dev/null +++ b/test/vstest.ProgrammerTests/Fakes/TestMessage.cs @@ -0,0 +1,18 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. + +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Logging; + +namespace vstest.ProgrammerTests.Fakes; + +internal class TestMessage +{ + public TestMessage(TestMessageLevel level, string message) + { + Level = level; + Message = message; + } + + public TestMessageLevel Level { get; } + public string Message { get; } +} diff --git a/test/vstest.ProgrammerTests/Fakes/TestRequestManagerHelper.cs b/test/vstest.ProgrammerTests/Fakes/TestRequestManagerHelper.cs index 8485518a1b..76e5d165fb 100644 --- a/test/vstest.ProgrammerTests/Fakes/TestRequestManagerHelper.cs +++ b/test/vstest.ProgrammerTests/Fakes/TestRequestManagerHelper.cs @@ -22,7 +22,7 @@ public TestRequestManagerTestHelper(FakeErrorAggregator errorAggregator, TestReq _debugOptions = debugOptions; } - public async Task ExecuteWithAbort(Action testRequsestManagerAction) + public async Task ExecuteWithAbort(Action testRequestManagerAction) { // We make sure the test is running for the timeout time at max and then we try to abort // if we aborted we write the error to aggregator @@ -38,18 +38,25 @@ public async Task ExecuteWithAbort(Action testRequsestManage var abortOnTimeout = Task.Run(async () => { // Wait until timeout or until we are cancelled. - await Task.Delay(TimeSpan.FromSeconds(Debugger.IsAttached ? _debugOptions.DebugTimeout : _debugOptions.Timeout), cancelAbort.Token); - if (Debugger.IsAttached && _debugOptions.BreakOnAbort) + try + { + await Task.Delay(TimeSpan.FromSeconds(Debugger.IsAttached ? _debugOptions.DebugTimeout : _debugOptions.Timeout), cancelAbort.Token); + + if (Debugger.IsAttached && _debugOptions.BreakOnAbort) + { + var errors = _errorAggregator.Errors; + // we will abort because we are hanging, look at errors and at concurrent stacks to see where we are hanging. + Debugger.Break(); + } + _errorAggregator.Add(new Exception("errr we aborted")); + _testRequestManager.AbortTestRun(); + } + catch (TaskCanceledException) { - var errors = _errorAggregator.Errors; - // we will abort because we are hanging, look at errors and at concurrent stacks to see where we are hanging. - Debugger.Break(); } - _errorAggregator.Add(new Exception("errr we aborted")); - _testRequestManager.AbortTestRun(); }); - testRequsestManagerAction(_testRequestManager); + testRequestManagerAction(_testRequestManager); cancelAbort.Cancel(); if (!abortOnTimeout.IsCanceled) diff --git a/test/vstest.ProgrammerTests/MultiTFMRunAndDiscovery.cs b/test/vstest.ProgrammerTests/MultiTFMRunAndDiscovery.cs new file mode 100644 index 0000000000..8905e5fa6d --- /dev/null +++ b/test/vstest.ProgrammerTests/MultiTFMRunAndDiscovery.cs @@ -0,0 +1,673 @@ +// Copyright (c) Microsoft Corporation. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. + +using Intent; +using FluentAssertions; +using vstest.ProgrammerTests.Fakes; + +using Microsoft.VisualStudio.TestPlatform.CommunicationUtilities.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Client; +using Microsoft.VisualStudio.TestPlatform.Utilities; +using Microsoft.VisualStudio.TestPlatform.ObjectModel.Client.Payloads; +using Microsoft.VisualStudio.TestPlatform.CommunicationUtilities; +using System.Reflection; + +namespace vstest.ProgrammerTests; + +public class MultiTFM +{ + public class MultiTFMDiscovery + { + [Test(@" + Given two test assemblies that have the same architecture + but have different target frameworks. + + When we run test discovery. + + Then two testhosts should be started that target the same framework as each assembly. + ")] + public async Task A() + { + // -- arrange + using var fixture = new Fixture(); + + var mstest1Dll = new FakeTestDllBuilder() + .WithPath(@"X:\fake\mstest1.dll") + .WithFramework(KnownFrameworkNames.Net5) // <--- + .WithArchitecture(Architecture.X64) + .WithTestCount(11, 5) + .Build(); + + var testhost1Process = new FakeProcess(fixture.ErrorAggregator, @"X:\fake\testhost1.exe"); + + var runTests1 = new FakeTestHostResponsesBuilder() + .VersionCheck(5) + .DiscoveryInitialize(FakeMessage.NoResponse) + .StartDiscovery(mstest1Dll.TestResultBatches) + .SessionEnd(FakeMessage.NoResponse, afterAction: _ => testhost1Process.Exit()) + .Build(); + + var testhost1 = new FakeTestHostFixtureBuilder(fixture) + .WithTestDll(mstest1Dll) + .WithProcess(testhost1Process) + .WithResponses(runTests1) + .Build(); + + // -- + + var mstest2Dll = new FakeTestDllBuilder() + .WithPath(@"X:\fake\mstest2.dll") + .WithFramework(KnownFrameworkNames.Net48) // <--- + .WithArchitecture(Architecture.X64) + .WithTestCount(21, 5) + .Build(); + + var testhost2Process = new FakeProcess(fixture.ErrorAggregator, @"X:\fake\testhost2.exe"); + + var runTests2 = new FakeTestHostResponsesBuilder() + .VersionCheck(5) + .DiscoveryInitialize(FakeMessage.NoResponse) + .StartDiscovery(mstest2Dll.TestResultBatches) + .SessionEnd(FakeMessage.NoResponse, _ => testhost2Process.Exit()) + // We actually do get asked to terminate multiple times. In the second host only. + .SessionEnd(FakeMessage.NoResponse) + .Build(); + + var testhost2 = new FakeTestHostFixtureBuilder(fixture) + .WithTestDll(mstest2Dll) + .WithProcess(testhost2Process) + .WithResponses(runTests2) + .Build(); + + fixture.AddTestHostFixtures(testhost1, testhost2); + + var testRequestManager = fixture.BuildTestRequestManager(); + + mstest1Dll.FrameworkName.Should().NotBe(mstest2Dll.FrameworkName); + + // -- act + var testDiscoveryPayload = new DiscoveryRequestPayload + { + Sources = new List { mstest1Dll.Path, mstest2Dll.Path }, + RunSettings = $"" + }; + + await testRequestManager.ExecuteWithAbort(tm => tm.DiscoverTests(testDiscoveryPayload, fixture.TestDiscoveryEventsRegistrar, fixture.ProtocolConfig)); + + // -- assert + fixture.AssertNoErrors(); + // We figure out the framework for each assembly so there should be no incompatibility warnings + fixture.LoggedWarnings.Should().NotContainMatch("Test run detected DLL(s) which would use different framework*"); + + fixture.ProcessHelper.Processes.Where(p => p.Started).Should().HaveCount(2); + var startWithSources1 = testhost1.FakeCommunicationChannel.ProcessedMessages.Single(m => m.Request.MessageType == MessageType.StartDiscovery); + var startWithSources1Text = startWithSources1.Request.GetRawMessage(); + // We sent mstest1.dll. + startWithSources1Text.Should().Contain("mstest1.dll"); + // And we sent net5 as the target framework, because that is the framework of mstest1.dll. + startWithSources1Text.Should().Contain(KnownFrameworkStrings.Net5); + + var startWithSources2 = testhost2.FakeCommunicationChannel.ProcessedMessages.Single(m => m.Request.MessageType == MessageType.StartDiscovery); + var startWithSources2Text = startWithSources2.Request.GetRawMessage(); + // We sent mstest2.dll. + startWithSources2Text.Should().Contain("mstest2.dll"); + // And we sent net48 as the target framework, because that is the framework of mstest2.dll. + startWithSources2Text.Should().Contain(mstest2Dll.FrameworkName.ToString()); + + fixture.DiscoveredTests.Should().HaveCount(mstest1Dll.TestCount + mstest2Dll.TestCount); + } + + [Test(@" + Given two test assemblies that have the same architecture + but have different target frameworks. + + When we run test discovery + and provide runsettings that define the desired target framework. + + Then two testhosts should be started that target the framework chosen by runsettings. + ")] + public async Task B() + { + // -- arrange + using var fixture = new Fixture(); + + var mstest1Dll = new FakeTestDllBuilder() + .WithPath(@"X:\fake\mstest1.dll") + .WithFramework(KnownFrameworkNames.Net5) // <--- + .WithArchitecture(Architecture.X64) + .WithTestCount(11, 5) + .Build(); + + var testhost1Process = new FakeProcess(fixture.ErrorAggregator, @"X:\fake\testhost1.exe"); + + var runTests1 = new FakeTestHostResponsesBuilder() + .VersionCheck(5) + .DiscoveryInitialize(FakeMessage.NoResponse) + .StartDiscovery(mstest1Dll.TestResultBatches) + .SessionEnd(FakeMessage.NoResponse, afterAction: _ => testhost1Process.Exit()) + .Build(); + + var testhost1 = new FakeTestHostFixtureBuilder(fixture) + .WithTestDll(mstest1Dll) + .WithProcess(testhost1Process) + .WithResponses(runTests1) + .Build(); + + // -- + + var mstest2Dll = new FakeTestDllBuilder() + .WithPath(@"X:\fake\mstest2.dll") + .WithFramework(KnownFrameworkNames.Net6) // <--- + .WithArchitecture(Architecture.X64) + .WithTestCount(21, 5) + .Build(); + + var testhost2Process = new FakeProcess(fixture.ErrorAggregator, @"X:\fake\testhost2.exe"); + + var runTests2 = new FakeTestHostResponsesBuilder() + .VersionCheck(5) + .DiscoveryInitialize(FakeMessage.NoResponse) + .StartDiscovery(mstest2Dll.TestResultBatches) + .SessionEnd(FakeMessage.NoResponse, _ => testhost2Process.Exit()) + // We actually do get asked to terminate multiple times. In the second host only. + .SessionEnd(FakeMessage.NoResponse) + .Build(); + + var testhost2 = new FakeTestHostFixtureBuilder(fixture) + .WithTestDll(mstest2Dll) + .WithProcess(testhost2Process) + .WithResponses(runTests2) + .Build(); + + fixture.AddTestHostFixtures(testhost1, testhost2); + + var testRequestManager = fixture.BuildTestRequestManager(); + + mstest1Dll.FrameworkName.Should().NotBe(mstest2Dll.FrameworkName); + + // -- act + var testDiscoveryPayload = new DiscoveryRequestPayload + { + Sources = new List { mstest1Dll.Path, mstest2Dll.Path }, + RunSettings = $"{KnownFrameworkStrings.Net7}" + }; + + await testRequestManager.ExecuteWithAbort(tm => tm.DiscoverTests(testDiscoveryPayload, fixture.TestDiscoveryEventsRegistrar, fixture.ProtocolConfig)); + + // -- assert + fixture.AssertNoErrors(); + // Runsettings will force NET7, so we should get a warning. + fixture.LoggedWarnings.Should().ContainMatch("Test run detected DLL(s) which would use different framework*"); + + fixture.ProcessHelper.Processes.Where(p => p.Started).Should().HaveCount(2); + var startWithSources1 = testhost1.FakeCommunicationChannel.ProcessedMessages.Single(m => m.Request.MessageType == MessageType.StartDiscovery); + var startWithSources1Text = startWithSources1.Request.GetRawMessage(); + // We sent mstest1.dll and net7 because that is what we have in settings. + startWithSources1Text.Should().Contain("mstest1.dll"); + startWithSources1Text.Should().Contain(KnownFrameworkStrings.Net7); + + var startWithSources2 = testhost2.FakeCommunicationChannel.ProcessedMessages.Single(m => m.Request.MessageType == MessageType.StartDiscovery); + var startWithSources2Text = startWithSources2.Request.GetRawMessage(); + // We sent mstest2.dll and net7 because that is what we have in settings. + startWithSources2Text.Should().Contain("mstest2.dll"); + startWithSources2Text.Should().Contain(KnownFrameworkStrings.Net7); + + fixture.DiscoveredTests.Should().HaveCount(mstest1Dll.TestCount + mstest2Dll.TestCount); + } + } + + public class MultiTFMExecution + { + [Test(@" + Given two test assemblies that have the same architecture + but have different target frameworks. + + When we execute tests. + + Then two testhosts should be started that target the same framework as each assembly. + ")] + public async Task C() + { + // -- arrange + using var fixture = new Fixture(); + + var mstest1Dll = new FakeTestDllBuilder() + .WithPath(@"X:\fake\mstest1.dll") + .WithFramework(KnownFrameworkNames.Net5) // <--- + .WithArchitecture(Architecture.X64) + .WithTestCount(11, 5) + .Build(); + + var testhost1Process = new FakeProcess(fixture.ErrorAggregator, @"X:\fake\testhost1.exe"); + + var runTests1 = new FakeTestHostResponsesBuilder() + .VersionCheck(5) + .ExecutionInitialize(FakeMessage.NoResponse) + .StartTestExecutionWithSources(mstest1Dll.TestResultBatches) + .SessionEnd(FakeMessage.NoResponse, afterAction: _ => testhost1Process.Exit()) + .Build(); + + var testhost1 = new FakeTestHostFixtureBuilder(fixture) + .WithTestDll(mstest1Dll) + .WithProcess(testhost1Process) + .WithResponses(runTests1) + .Build(); + + // -- + + var mstest2Dll = new FakeTestDllBuilder() + .WithPath(@"X:\fake\mstest2.dll") + .WithFramework(KnownFrameworkNames.Net6) // <--- + .WithArchitecture(Architecture.X64) + .WithTestCount(21, 5) + .Build(); + + var testhost2Process = new FakeProcess(fixture.ErrorAggregator, @"X:\fake\testhost2.exe"); + + var runTests2 = new FakeTestHostResponsesBuilder() + .VersionCheck(5) + .ExecutionInitialize(FakeMessage.NoResponse) + .StartTestExecutionWithSources(mstest2Dll.TestResultBatches) + .SessionEnd(FakeMessage.NoResponse, _ => testhost2Process.Exit()) + // We actually do get asked to terminate multiple times. In the second host only. + .SessionEnd(FakeMessage.NoResponse) + .Build(); + + var testhost2 = new FakeTestHostFixtureBuilder(fixture) + .WithTestDll(mstest2Dll) + .WithProcess(testhost2Process) + .WithResponses(runTests2) + .Build(); + + fixture.AddTestHostFixtures(testhost1, testhost2); + + var testRequestManager = fixture.BuildTestRequestManager(); + + mstest1Dll.FrameworkName.Should().NotBe(mstest2Dll.FrameworkName); + + // -- act + var testRunRequestPayload = new TestRunRequestPayload + { + Sources = new List { mstest1Dll.Path, mstest2Dll.Path }, + RunSettings = $"" + }; + + await testRequestManager.ExecuteWithAbort(tm => tm.RunTests(testRunRequestPayload, testHostLauncher: null, fixture.TestRunEventsRegistrar, fixture.ProtocolConfig)); + + // -- assert + fixture.AssertNoErrors(); + // We figure out the framework for each assembly so there should be no incompatibility warnings + fixture.LoggedWarnings.Should().NotContainMatch("Test run detected DLL(s) which would use different framework*"); + + fixture.ProcessHelper.Processes.Where(p => p.Started).Should().HaveCount(2); + var startWithSources1 = testhost1.FakeCommunicationChannel.ProcessedMessages.Single(m => m.Request.MessageType == MessageType.StartTestExecutionWithSources); + var startWithSources1Text = startWithSources1.Request.GetRawMessage(); + // We sent mstest1.dll. + startWithSources1Text.Should().Contain("mstest1.dll"); + // And we sent net5 as the target framework, because that is the framework of mstest1.dll. + startWithSources1Text.Should().Contain(KnownFrameworkStrings.Net5); + + var startWithSources2 = testhost2.FakeCommunicationChannel.ProcessedMessages.Single(m => m.Request.MessageType == MessageType.StartTestExecutionWithSources); + var startWithSources2Text = startWithSources2.Request.GetRawMessage(); + // We sent mstest2.dll. + startWithSources2Text.Should().Contain("mstest2.dll"); + // And we sent net48 as the target framework, because that is the framework of mstest2.dll. + startWithSources2Text.Should().Contain(KnownFrameworkStrings.Net6); + + fixture.ExecutedTests.Should().HaveCount(mstest1Dll.TestCount + mstest2Dll.TestCount); + } + + [Test(@" + Given two test assemblies that have the same architecture + but have different target frameworks. + + When we execute tests + and provide runsettings that define the desired target framework. + + Then two testhosts should be started that target the framework chosen by runsettings. + ")] + public async Task D() + { + // -- arrange + using var fixture = new Fixture(); + + var mstest1Dll = new FakeTestDllBuilder() + .WithPath(@"X:\fake\mstest1.dll") + .WithFramework(KnownFrameworkNames.Net5) // <--- + .WithArchitecture(Architecture.X64) + .WithTestCount(11, 5) + .Build(); + + var testhost1Process = new FakeProcess(fixture.ErrorAggregator, @"X:\fake\testhost1.exe"); + + var runTests1 = new FakeTestHostResponsesBuilder() + .VersionCheck(5) + .ExecutionInitialize(FakeMessage.NoResponse) + .StartTestExecutionWithSources(mstest1Dll.TestResultBatches) + .SessionEnd(FakeMessage.NoResponse, afterAction: _ => testhost1Process.Exit()) + .Build(); + + var testhost1 = new FakeTestHostFixtureBuilder(fixture) + .WithTestDll(mstest1Dll) + .WithProcess(testhost1Process) + .WithResponses(runTests1) + .Build(); + + // -- + + var mstest2Dll = new FakeTestDllBuilder() + .WithPath(@"X:\fake\mstest2.dll") + .WithFramework(KnownFrameworkNames.Net6) // <--- + .WithArchitecture(Architecture.X64) + .WithTestCount(21, 5) + .Build(); + + var testhost2Process = new FakeProcess(fixture.ErrorAggregator, @"X:\fake\testhost2.exe"); + + var runTests2 = new FakeTestHostResponsesBuilder() + .VersionCheck(5) + .ExecutionInitialize(FakeMessage.NoResponse) + .StartTestExecutionWithSources(mstest2Dll.TestResultBatches) + .SessionEnd(FakeMessage.NoResponse, _ => testhost2Process.Exit()) + // We actually do get asked to terminate multiple times. In the second host only. + .SessionEnd(FakeMessage.NoResponse) + .Build(); + + var testhost2 = new FakeTestHostFixtureBuilder(fixture) + .WithTestDll(mstest2Dll) + .WithProcess(testhost2Process) + .WithResponses(runTests2) + .Build(); + + fixture.AddTestHostFixtures(testhost1, testhost2); + + var testRequestManager = fixture.BuildTestRequestManager(); + + mstest1Dll.FrameworkName.Should().NotBe(mstest2Dll.FrameworkName); + + // -- act + var testRunRequestPayload = new TestRunRequestPayload + { + Sources = new List { mstest1Dll.Path, mstest2Dll.Path }, + RunSettings = $"{KnownFrameworkStrings.Net7}" + }; + + await testRequestManager.ExecuteWithAbort(tm => tm.RunTests(testRunRequestPayload, testHostLauncher: null, fixture.TestRunEventsRegistrar, fixture.ProtocolConfig)); + + // -- assert + fixture.AssertNoErrors(); + // We specify net7 which is not compatible with either, so we should get warnings + fixture.LoggedWarnings.Should().ContainMatch("Test run detected DLL(s) which would use different framework*"); + + fixture.ProcessHelper.Processes.Where(p => p.Started).Should().HaveCount(2); + var startWithSources1 = testhost1.FakeCommunicationChannel.ProcessedMessages.Single(m => m.Request.MessageType == MessageType.StartTestExecutionWithSources); + var startWithSources1Text = startWithSources1.Request.GetRawMessage(); + // We sent mstest1.dll. + startWithSources1Text.Should().Contain("mstest1.dll"); + startWithSources1Text.Should().Contain(KnownFrameworkStrings.Net7); + + var startWithSources2 = testhost2.FakeCommunicationChannel.ProcessedMessages.Single(m => m.Request.MessageType == MessageType.StartTestExecutionWithSources); + var startWithSources2Text = startWithSources2.Request.GetRawMessage(); + // We sent mstest2.dll. + startWithSources2Text.Should().Contain("mstest2.dll"); + startWithSources2Text.Should().Contain(KnownFrameworkStrings.Net7); + + fixture.ExecutedTests.Should().HaveCount(mstest1Dll.TestCount + mstest2Dll.TestCount); + } + } + + public class MultiTFMTestSessions + { + + [Test(@" + Given two test assemblies that have the same architecture + but have different target frameworks. + + When we execute tests + and provide runsettings that define the desired target framework. + + Then two testhosts should be started that target the framework chosen by runsettings. + ")] + public async Task E() + { + // -- arrange + using var fixture = new Fixture(); + + var mstest1Dll = new FakeTestDllBuilder() + .WithPath(@"X:\fake\mstest1.dll") + .WithFramework(KnownFrameworkNames.Net5) // <--- + .WithArchitecture(Architecture.X64) + .WithTestCount(11, 5) + .Build(); + + var testhost1Process = new FakeProcess(fixture.ErrorAggregator, @"X:\fake\testhost1.exe"); + + var runTests1 = new FakeTestHostResponsesBuilder() + .VersionCheck(5) + .ExecutionInitialize(FakeMessage.NoResponse) + .StartTestExecutionWithSources(mstest1Dll.TestResultBatches) + .SessionEnd(FakeMessage.NoResponse, afterAction: _ => testhost1Process.Exit()) + .Build(); + + var testhost1 = new FakeTestHostFixtureBuilder(fixture) + .WithTestDll(mstest1Dll) + .WithProcess(testhost1Process) + .WithResponses(runTests1) + .Build(); + + // -- + + var mstest2Dll = new FakeTestDllBuilder() + .WithPath(@"X:\fake\mstest2.dll") + .WithFramework(KnownFrameworkNames.Net6) // <--- + .WithArchitecture(Architecture.X64) + .WithTestCount(21, 5) + .Build(); + + var testhost2Process = new FakeProcess(fixture.ErrorAggregator, @"X:\fake\testhost2.exe"); + + var runTests2 = new FakeTestHostResponsesBuilder() + .VersionCheck(5) + .ExecutionInitialize(FakeMessage.NoResponse) + .StartTestExecutionWithSources(mstest2Dll.TestResultBatches) + .SessionEnd(FakeMessage.NoResponse, _ => testhost2Process.Exit()) + // We actually do get asked to terminate multiple times. In the second host only. + .SessionEnd(FakeMessage.NoResponse) + .Build(); + + var testhost2 = new FakeTestHostFixtureBuilder(fixture) + .WithTestDll(mstest2Dll) + .WithProcess(testhost2Process) + .WithResponses(runTests2) + .Build(); + + fixture.AddTestHostFixtures(testhost1, testhost2); + + var testRequestManager = fixture.BuildTestRequestManager(); + + mstest1Dll.FrameworkName.Should().NotBe(mstest2Dll.FrameworkName); + + // -- act + + var startTestSessionPayload = new StartTestSessionPayload + { + // We need to have a parallel run, otherwise we will create just a single proxy, + // because 1 is the maximum number of proxies to start for non-parallel run. + RunSettings = "0", + Sources = new[] { mstest1Dll.Path, mstest2Dll.Path } + }; + + await testRequestManager.ExecuteWithAbort(tm => tm.StartTestSession(startTestSessionPayload, testHostLauncher: null, fixture.TestSessionEventsHandler, fixture.ProtocolConfig)); + + // You need to pass this on, otherwise it will ignore the test session that you just started. This is a by product of being able to start multiple test sessions. + var testSessionInfo = fixture.TestSessionEventsHandler.StartTestSessionCompleteEvents.Single().TestSessionInfo; + + var testRunRequestPayload = new TestRunRequestPayload + { + Sources = new List { mstest1Dll.Path, mstest2Dll.Path }, + RunSettings = $"0", + TestSessionInfo = testSessionInfo, + }; + + await testRequestManager.ExecuteWithAbort(tm => tm.RunTests(testRunRequestPayload, testHostLauncher: null, fixture.TestRunEventsRegistrar, fixture.ProtocolConfig)); + + // -- assert + fixture.AssertNoErrors(); + // We figure out the framework for each assembly so there should be no incompatibility warnings + fixture.LoggedWarnings.Should().NotContainMatch("Test run detected DLL(s) which would use different framework*"); + + fixture.ProcessHelper.Processes.Where(p => p.Started).Should().HaveCount(2); + var startWithSources1 = testhost1.FakeCommunicationChannel.ProcessedMessages.Single(m => m.Request.MessageType == MessageType.StartTestExecutionWithSources); + var startWithSources1Text = startWithSources1.Request.GetRawMessage(); + // We sent mstest1.dll. + startWithSources1Text.Should().Contain("mstest1.dll"); + startWithSources1Text.Should().Contain(mstest1Dll.FrameworkName.ToString()); + + var startWithSources2 = testhost2.FakeCommunicationChannel.ProcessedMessages.Single(m => m.Request.MessageType == MessageType.StartTestExecutionWithSources); + var startWithSources2Text = startWithSources2.Request.GetRawMessage(); + // We sent mstest2.dll. + startWithSources2Text.Should().Contain("mstest2.dll"); + startWithSources2Text.Should().Contain(mstest2Dll.FrameworkName.ToString()); + + fixture.ExecutedTests.Should().HaveCount(mstest1Dll.TestCount + mstest2Dll.TestCount); + } + } +} + +public class MultiTFMRunAndDiscoveryCompatibilityMode +{ + [Test(@" + Given two test assemblies that have the same architecture + but have different target frameworks. + + When DISABLE_MULTI_TFM_RUN is set + and we execute tests. + + Then two testhosts are both started for the same TFM. + ")] + public async Task E() + { + // -- arrange + using var fixture = new Fixture( + new FixtureOptions + { + FeatureFlags = new Dictionary + { + [FeatureFlag.DISABLE_MULTI_TFM_RUN] = true + } + } + ); + + var mstest1Dll = new FakeTestDllBuilder() + .WithPath(@"X:\fake\mstest1.dll") + .WithFramework(KnownFrameworkNames.Net5) // <--- + .WithArchitecture(Architecture.X64) + .WithTestCount(2) + .Build(); + + var testhost1Process = new FakeProcess(fixture.ErrorAggregator, @"X:\fake\testhost1.exe"); + + var runTests1 = new FakeTestHostResponsesBuilder() + .VersionCheck(5) + .ExecutionInitialize(FakeMessage.NoResponse) + .StartTestExecutionWithSources(mstest1Dll.TestResultBatches) + .SessionEnd(FakeMessage.NoResponse, afterAction: _ => testhost1Process.Exit()) + .SessionEnd(FakeMessage.NoResponse) + .Build(); + + var testhost1 = new FakeTestHostFixtureBuilder(fixture) + .WithTestDll(mstest1Dll) + .WithProcess(testhost1Process) + .WithResponses(runTests1) + .Build(); + + // -- + + var mstest2Dll = new FakeTestDllBuilder() + .WithPath(@"X:\fake\mstest2.dll") + .WithFramework(KnownFrameworkNames.Net48) // <--- + .WithArchitecture(Architecture.X64) + // In reality, the dll would fail to load, and no tests would run from this dll, + // we simulate that by making it have 0 tests. + .WithTestCount(0) + .Build(); + + var testhost2Process = new FakeProcess(fixture.ErrorAggregator, @"X:\fake\testhost2.exe"); + + var runTests2 = new FakeTestHostResponsesBuilder() + .VersionCheck(5) + .ExecutionInitialize(FakeMessage.NoResponse) + .StartTestExecutionWithSources(mstest2Dll.TestResultBatches) + .SessionEnd(FakeMessage.NoResponse, _ => testhost2Process.Exit()) + .SessionEnd(FakeMessage.NoResponse) + .Build(); + + var testhost2 = new FakeTestHostFixtureBuilder(fixture) + .WithTestDll(mstest2Dll) + .WithProcess(testhost2Process) + .WithResponses(runTests2) + .Build(); + + fixture.AddTestHostFixtures(testhost1, testhost2); + + var testRequestManager = fixture.BuildTestRequestManager(); + + mstest1Dll.FrameworkName.Should().NotBe(mstest2Dll.FrameworkName); + + // -- act + var testRunRequestPayload = new TestRunRequestPayload + { + Sources = new List { mstest1Dll.Path, mstest2Dll.Path }, + RunSettings = $"", + }; + + await testRequestManager.ExecuteWithAbort(tm => tm.RunTests(testRunRequestPayload, testHostLauncher: null, fixture.TestRunEventsRegistrar, fixture.ProtocolConfig)); + + // -- assert + fixture.AssertNoErrors(); + // We unify the frameworks to netcoreapp1.0 (because the vstest.console dll we are loading is built for netcoreapp and prefers netcoreapp), and because the + // behavior is to choose the common oldest framework. We then log warning about incompatible sources. + fixture.LoggedWarnings.Should().ContainMatch($"Test run detected DLL(s) which would use different framework and platform versions*{KnownFrameworkNames.Netcoreapp1}*"); + + // We started both testhosts, even thought we know one of them is incompatible. + fixture.ProcessHelper.Processes.Where(p => p.Started).Should().HaveCount(2); + var startWithSources1 = testhost1.FakeCommunicationChannel.ProcessedMessages.Single(m => m.Request.MessageType == MessageType.StartTestExecutionWithSources); + var startWithSources1Text = startWithSources1.Request.GetRawMessage(); + // We sent mstest1.dll + startWithSources1Text.Should().Contain("mstest1.dll"); + // And we sent netcoreapp1.0 as the target framework, because that is the common fallback + startWithSources1Text.Should().Contain(KnownFrameworkStrings.Netcoreapp1); + + var startWithSources2 = testhost2.FakeCommunicationChannel.ProcessedMessages.Single(m => m.Request.MessageType == MessageType.StartTestExecutionWithSources); + var startWithSources2Text = startWithSources2.Request.GetRawMessage(); + // We sent mstest2.dll + startWithSources2Text.Should().Contain("mstest2.dll"); + // And we sent netcoreapp1.0 as the target framework, because that is the common fallback, even though the source is not compatible with it + startWithSources2Text.Should().Contain(KnownFrameworkStrings.Netcoreapp1); + + fixture.ExecutedTests.Should().HaveCount(mstest1Dll.TestCount); + } +} + +internal static class MessageExtensions +{ + private static MethodInfo? s_messageProperty; + + internal static string GetRawMessage(this Message request) + { + if (s_messageProperty == null) + { + s_messageProperty = request.GetType().GetProperty("RawMessage")!.GetGetMethod(); + } + + return (string)s_messageProperty!.Invoke(request, new object[0])!; + } +} + +// Test and improvmement ideas: +// TODO: passing null runsettings does not fail fast, instead it fails in Fakes settings code +// TODO: passing empty string fails in the xml parser code +// TODO: passing null sources and null testcases does not fail fast +// TODO: Just calling Exit, Close won't stop the run, we will keep waiting for test run to complete, I think in real life when we exit then Disconnected will be called on the vstest.console side, leading to abort flow. +//.StartTestExecutionWithSources(new FakeMessage(MessageType.TestMessage, new TestMessagePayload { MessageLevel = TestMessageLevel.Error, Message = "Loading type failed." }), afterAction: f => { /*f.Process.Exit();*/ f.FakeCommunicationEndpoint.Disconnect(); }) diff --git a/test/vstest.ProgrammerTests/UnitTest1.cs b/test/vstest.ProgrammerTests/UnitTest1.cs deleted file mode 100644 index cfe18c0054..0000000000 --- a/test/vstest.ProgrammerTests/UnitTest1.cs +++ /dev/null @@ -1,345 +0,0 @@ -// Copyright (c) Microsoft Corporation. All rights reserved. -// Licensed under the MIT license. See LICENSE file in the project root for full license information. - -using System.Diagnostics; -using System.Runtime.Versioning; - -using FluentAssertions; -using FluentAssertions.Extensions; - -using Microsoft.VisualStudio.TestPlatform.Client; -using Microsoft.VisualStudio.TestPlatform.CommandLine; -using Microsoft.VisualStudio.TestPlatform.CommandLine.Publisher; -using Microsoft.VisualStudio.TestPlatform.CommandLine.TestPlatformHelpers; -using Microsoft.VisualStudio.TestPlatform.CommandLineUtilities; -using Microsoft.VisualStudio.TestPlatform.CommunicationUtilities.ObjectModel; -using Microsoft.VisualStudio.TestPlatform.CrossPlatEngine; -using Microsoft.VisualStudio.TestPlatform.CrossPlatEngine.TestRunAttachmentsProcessing; -using Microsoft.VisualStudio.TestPlatform.ObjectModel; -using Microsoft.VisualStudio.TestPlatform.ObjectModel.Client; - -#if DEBUG -using Microsoft.VisualStudio.TestPlatform.CommunicationUtilities.Interfaces; -#endif - -using vstest.ProgrammerTests.Fakes; - -namespace vstest.ProgrammerTests; -// Tests are run by Intent library that is executed from our Program.Main. To debug press F5 in VS, and maybe mark just a single test with [Only]. -// To just run, press Ctrl+F5 to run without debugging. It will use short timeout for abort in case something is wrong with your test. - -public class TestDiscoveryTests -{ - public async Task GivenAnMSTestAssemblyWith108Tests_WhenTestsAreRun_Then108TestsAreExecuted() - { - // -- arrange - var fakeErrorAggregator = new FakeErrorAggregator(); - var commandLineOptions = CommandLineOptions.Instance; - - var fakeCurrentProcess = new FakeProcess(fakeErrorAggregator, @"X:\fake\vstest.console.exe"); - var fakeProcessHelper = new FakeProcessHelper(fakeErrorAggregator, fakeCurrentProcess); - - var fakeFileHelper = new FakeFileHelper(fakeErrorAggregator); - // TODO: Get framework name from constants - // TODO: have mstest1dll canned - var tests = new FakeTestBatchBuilder() - .WithTotalCount(108) - .WithDuration(100.Milliseconds()) - .WithBatchSize(10) - .Build(); - var mstest1Dll = new FakeTestDllFile(@"X:\fake\mstest1.dll", new FrameworkName(".NETCoreApp,Version=v5.0"), Architecture.X64, tests); - - List changeMessages = tests.Take(tests.Count - 1).Select(batch => // TODO: make the stats agree with the tests below - new FakeMessage(MessageType.TestRunStatsChange, - new TestRunChangedEventArgs(new TestRunStatistics(new Dictionary { [TestOutcome.Passed] = batch.Count }), batch, new List()) - )).ToList(); - FakeMessage completedMessage = new FakeMessage(MessageType.ExecutionComplete, new TestRunCompletePayload - { - // TODO: make the stats agree with the tests below - TestRunCompleteArgs = new TestRunCompleteEventArgs(new TestRunStatistics(new Dictionary { [TestOutcome.Passed] = 1 }), false, false, null, new System.Collections.ObjectModel.Collection(), TimeSpan.Zero), - LastRunTests = new TestRunChangedEventArgs(new TestRunStatistics(new Dictionary { [TestOutcome.Passed] = 1 }), tests.Last(), new List()), - }); - List messages = changeMessages.Concat(new[] { completedMessage }).ToList(); - var responses = new List> { - new RequestResponsePair(MessageType.VersionCheck, new FakeMessage(MessageType.VersionCheck, 5)), - new RequestResponsePair(MessageType.ExecutionInitialize, FakeMessage.NoResponse), - new RequestResponsePair(MessageType.StartTestExecutionWithSources, messages, false), - new RequestResponsePair(MessageType.SessionEnd, new [] { FakeMessage.NoResponse }, message => - { - // TODO: how do we associate this to the correct process? - var fp = fakeProcessHelper.Processes.Last(); - fakeProcessHelper.TerminateProcess(fp); - }), - }; - - var fakeCommunicationChannel = new FakeCommunicationChannel(responses, fakeErrorAggregator, 1); - fakeCommunicationChannel.Start(new object()); - var fakeCommunicationEndpoint = new FakeCommunicationEndpoint(fakeCommunicationChannel, fakeErrorAggregator); -#if DEBUG - TestServiceLocator.Clear(); - TestServiceLocator.Register(fakeCommunicationEndpoint.TestHostConnectionInfo.Endpoint, fakeCommunicationEndpoint); -#else - // This fools compiler into not being able to tell that the the rest of the code is unreachable. - var a = true; - if (a) - { - throw new InvalidOperationException("Tests cannot run in Release mode, because TestServiceLocator is compiled only for Debug, and so the tests will fail to setup channel and will hang."); - } -#endif - var fakeTestHostProcess = new FakeProcess(fakeErrorAggregator, @"C:\temp\testhost.exe"); - var fakeTestRuntimeProvider = new FakeTestRuntimeProvider(fakeProcessHelper, fakeTestHostProcess, fakeFileHelper, mstest1Dll.AsList(), fakeCommunicationEndpoint, fakeErrorAggregator); - var fakeTestRuntimeProviderManager = new FakeTestRuntimeProviderManager(fakeErrorAggregator); - fakeTestRuntimeProviderManager.AddTestRuntimeProviders(fakeTestRuntimeProvider); - var testEngine = new TestEngine(fakeTestRuntimeProviderManager, fakeProcessHelper); - - var testPlatform = new TestPlatform(testEngine, fakeFileHelper, fakeTestRuntimeProviderManager); - - var testRunResultAggregator = new TestRunResultAggregator(); - var fakeTestPlatformEventSource = new FakeTestPlatformEventSource(fakeErrorAggregator); - var fakeEnvironment = new FakeEnvironment(); - - var fakeAssemblyMetadataProvider = new FakeAssemblyMetadataProvider(fakeFileHelper, fakeErrorAggregator); - var inferHelper = new InferHelper(fakeAssemblyMetadataProvider); - - // This is most likely not the correctl place where to cut this off, plugin cache is probably the better place, - // but it is not injected, and I don't want to investigate this now. - var fakeDataCollectorAttachmentsProcessorsFactory = new FakeDataCollectorAttachmentsProcessorsFactory(fakeErrorAggregator); - var testRunAttachmentsProcessingManager = new TestRunAttachmentsProcessingManager(fakeTestPlatformEventSource, fakeDataCollectorAttachmentsProcessorsFactory); - - Task fakeMetricsPublisherTask = Task.FromResult(new FakeMetricsPublisher(fakeErrorAggregator)); - TestRequestManager testRequestManager = new( - commandLineOptions, - testPlatform, - testRunResultAggregator, - fakeTestPlatformEventSource, - inferHelper, - fakeMetricsPublisherTask, - fakeProcessHelper, - testRunAttachmentsProcessingManager, - fakeEnvironment); - - // -- act - - // TODO: this gives me run configuration that is way too complete, do we a way to generate "bare" runsettings? if not we should add them. Would be also useful to get - // runsettings from parameter set so people can use it - // TODO: TestSessionTimeout gives me way to abort the run without having to cancel it externally, but could probably still lead to hangs if that funtionality is broken - // TODO: few tries later, that is exactly the case when we abort, it still hangs on waiting to complete request, because test run complete was not sent - // var runConfiguration = new Microsoft.VisualStudio.TestPlatform.ObjectModel.RunConfiguration { TestSessionTimeout = 40_000 }.ToXml().OuterXml; - var runConfiguration = string.Empty; - var testRunRequestPayload = new TestRunRequestPayload - { - // TODO: passing null sources and null testcases does not fail fast - Sources = mstest1Dll.Path.AsList(), - // TODO: passing null runsettings does not fail fast, instead it fails in Fakes settings code - // TODO: passing empty string fails in the xml parser code - RunSettings = $"{runConfiguration}" - }; - - // var fakeTestHostLauncher = new FakeTestHostLauncher(); - var fakeTestRunEventsRegistrar = new FakeTestRunEventsRegistrar(fakeErrorAggregator); - var protocolConfig = new ProtocolConfig(); - - // TODO: we make sure the test is running 10 minutes at max and then we try to abort - // if we aborted we write the error to aggregator, this needs to be made into a pattern - // so we can avoid hanging if the run does not complete correctly - var cancelAbort = new CancellationTokenSource(); - var task = Task.Run(async () => - { - await Task.Delay(TimeSpan.FromSeconds(Debugger.IsAttached ? 100 : 10), cancelAbort.Token); - if (Debugger.IsAttached) - { - // we will abort because we are hanging, look at stacks to see what the problem is - Debugger.Break(); - } - fakeErrorAggregator.Add(new Exception("errr we aborted")); - testRequestManager.AbortTestRun(); - - }); - testRequestManager.RunTests(testRunRequestPayload, testHostLauncher: null, fakeTestRunEventsRegistrar, protocolConfig); - cancelAbort.Cancel(); - if (!task.IsCanceled) - { - await task; - } - // pattern end - - // -- assert - fakeErrorAggregator.Errors.Should().BeEmpty(); - fakeTestRunEventsRegistrar.RunChangedEvents.SelectMany(er => er.Data.NewTestResults).Should().HaveCount(108); - } - - public async Task GivenMultipleMsTestAssembliesThatUseTheSameTargetFrameworkAndArchitecture_WhenTestsAreRun_ThenAllTestsFromAllAssembliesAreRun() - { - // -- arrange - using var fixture = new Fixture(); - - var mstest1Dll = new FakeTestDllBuilder() - .WithPath(@"X:\fake\mstest1.dll") - .WithFramework(KnownFrameworkNames.Net5) - .WithArchitecture(Architecture.X64) - .WithTestCount(108, 10) - .Build(); - - var testhost1Process = new FakeProcess(fixture.ErrorAggregator, @"X:\fake\testhost1.exe"); - - var runTests1 = new FakeTestHostResponsesBuilder() - .VersionCheck(5) - .ExecutionInitialize(FakeMessage.NoResponse) - .StartTestExecutionWithSources(mstest1Dll.TestResultBatches) - .SessionEnd(FakeMessage.NoResponse, _ => testhost1Process.Exit()) - .Build(); - - var testhost1 = new FakeTestHostFixtureBuilder(fixture) - .WithTestDll(mstest1Dll) - .WithProcess(testhost1Process) - .WithResponses(runTests1) - .Build(); - - var mstest2Dll = new FakeTestDllBuilder() - .WithPath(@"X:\fake\mstest2.dll") - .WithFramework(KnownFrameworkNames.Net5) - .WithArchitecture(Architecture.X64) - .WithTestCount(50, 8) - .Build(); - - var testhost2Process = new FakeProcess(fixture.ErrorAggregator, @"X:\fake\testhost2.exe"); - - var runTests2 = new FakeTestHostResponsesBuilder() - .VersionCheck(5) - .ExecutionInitialize(FakeMessage.NoResponse) - .StartTestExecutionWithSources(mstest2Dll.TestResultBatches) - .SessionEnd(FakeMessage.NoResponse, f => f.Process.Exit()) - .Build(); - - var testhost2 = new FakeTestHostFixtureBuilder(fixture) - .WithTestDll(mstest2Dll) - .WithProcess(testhost2Process) - .WithResponses(runTests2) - .Build(); - - fixture.AddTestHostFixtures(testhost1, testhost2); - - var testRequestManager = fixture.BuildTestRequestManager(); - - // -- act - var runConfiguration = string.Empty; - var testRunRequestPayload = new TestRunRequestPayload - { - Sources = new List { mstest1Dll.Path, mstest2Dll.Path }, - - RunSettings = $"{runConfiguration}" - }; - - await testRequestManager.ExecuteWithAbort(tm => tm.RunTests(testRunRequestPayload, testHostLauncher: null, fixture.TestRunEventsRegistrar, fixture.ProtocolConfig)); - - // -- assert - fixture.AssertNoErrors(); - fixture.ExecutedTests.Should().HaveCount(mstest1Dll.TestCount + mstest2Dll.TestCount); - } - - public async Task GivenMultipleMsTestAssembliesThatUseDifferentTargetFrameworkAndTheSameArchitecture_WhenTestsAreRun_ThenTwoTesthostsAreStartedBothForTheSameTFM() - { - // TODO: make vstest.console not start testhosts for incompatible sources. - - // -- arrange - using var fixture = new Fixture(); - - var mstest1Dll = new FakeTestDllBuilder() - .WithPath(@"X:\fake\mstest1.dll") - .WithFramework(KnownFrameworkNames.Net5) // <--- - .WithArchitecture(Architecture.X64) - .WithTestCount(2) - .Build(); - - var testhost1Process = new FakeProcess(fixture.ErrorAggregator, @"X:\fake\testhost1.exe"); - - var runTests1 = new FakeTestHostResponsesBuilder() - .VersionCheck(5) - .ExecutionInitialize(FakeMessage.NoResponse) - .StartTestExecutionWithSources(mstest1Dll.TestResultBatches) - .SessionEnd(FakeMessage.NoResponse, afterAction: _ => testhost1Process.Exit()) - .Build(); - - var testhost1 = new FakeTestHostFixtureBuilder(fixture) - .WithTestDll(mstest1Dll) - .WithProcess(testhost1Process) - .WithResponses(runTests1) - .Build(); - - // -- - - var mstest2Dll = new FakeTestDllBuilder() - .WithPath(@"X:\fake\mstest2.dll") - .WithFramework(KnownFrameworkNames.Net48) // <--- - .WithArchitecture(Architecture.X64) - // In reality, the dll would fail to load, and no tests would run from this dll, - // we simulate that by making it have 0 tests. - .WithTestCount(0) - .Build(); - - var testhost2Process = new FakeProcess(fixture.ErrorAggregator, @"X:\fake\testhost2.exe"); - - var runTests2 = new FakeTestHostResponsesBuilder() - .VersionCheck(5) - .ExecutionInitialize(FakeMessage.NoResponse) - .StartTestExecutionWithSources(mstest2Dll.TestResultBatches) - .SessionEnd(FakeMessage.NoResponse, _ => testhost2Process.Exit()) - .Build(); - - var testhost2 = new FakeTestHostFixtureBuilder(fixture) - .WithTestDll(mstest2Dll) - .WithProcess(testhost2Process) - .WithResponses(runTests2) - .Build(); - - fixture.AddTestHostFixtures(testhost1, testhost2); - - var testRequestManager = fixture.BuildTestRequestManager(); - - mstest1Dll.FrameworkName.Should().NotBe(mstest2Dll.FrameworkName); - - // -- act - // TODO: Building whole default runconfiguration is needed here, because TestRequestManager does not ensure the basic settings are populated, - // and all methods that populate them just silently fail, so TestHostProvider does not get any useful settings. - var runConfiguration = new RunConfiguration().ToXml().OuterXml; - var testRunRequestPayload = new TestRunRequestPayload - { - Sources = new List { mstest1Dll.Path, mstest2Dll.Path }, - - RunSettings = $"{runConfiguration}" - }; - - await testRequestManager.ExecuteWithAbort(tm => tm.RunTests(testRunRequestPayload, testHostLauncher: null, fixture.TestRunEventsRegistrar, fixture.ProtocolConfig)); - - // -- assert - fixture.AssertNoErrors(); - // We unify the frameworks to netcoreapp1.0 (because the vstest.console dll we are loading is built for netcoreapp and prefers netcoreapp), and because the - // behavior is to choose the common oldest framework. We then log warning about incompatible sources. - fixture.TestRunEventsRegistrar.LoggedWarnings.Should().ContainMatch($"Test run detected DLL(s) which were built for different framework and platform versions*{KnownFrameworkNames.Netcoreapp1}*"); - - // We started both testhosts, even thought we know one of them is incompatible. - fixture.ProcessHelper.Processes.Where(p => p.Started).Should().HaveCount(2); - var startWithSources1 = testhost1.FakeCommunicationChannel.ProcessedMessages.Single(m => m.Request.MessageType == MessageType.StartTestExecutionWithSources); - var startWithSources1Text = startWithSources1.Request.Payload.Select(t => t.ToString()).JoinBySpace(); - // We sent mstest1.dll - startWithSources1Text.Should().Contain("mstest1.dll"); - // And we sent netcoreapp1.0 as the target framework - startWithSources1Text.Should().Contain(KnownFrameworkStrings.Netcoreapp1); - - var startWithSources2 = testhost2.FakeCommunicationChannel.ProcessedMessages.Single(m => m.Request.MessageType == MessageType.StartTestExecutionWithSources); - var startWithSources2Text = startWithSources2.Request.Payload.Select(t => t.ToString()).JoinBySpace(); - // We sent mstest2.dll - startWithSources2Text.Should().Contain("mstest2.dll"); - // And we sent netcoreapp1.0 as the target framework, even though it is incompatible - startWithSources2Text.Should().Contain(KnownFrameworkStrings.Netcoreapp1); - - fixture.ExecutedTests.Should().HaveCount(mstest1Dll.TestCount); - } -} - -// Test and improvmement ideas: -// TODO: passing null runsettings does not fail fast, instead it fails in Fakes settings code -// TODO: passing empty string fails in the xml parser code -// TODO: passing null sources and null testcases does not fail fast -// TODO: Just calling Exit, Close won't stop the run, we will keep waiting for test run to complete, I think in real life when we exit then Disconnected will be called on the vstest.console side, leading to abort flow. -//.StartTestExecutionWithSources(new FakeMessage(MessageType.TestMessage, new TestMessagePayload { MessageLevel = TestMessageLevel.Error, Message = "Loading type failed." }), afterAction: f => { /*f.Process.Exit();*/ f.FakeCommunicationEndpoint.Disconnect(); }) diff --git a/test/vstest.console.UnitTests/CommandLine/InferHelperTests.cs b/test/vstest.console.UnitTests/CommandLine/InferHelperTests.cs index 1ba3b02b91..e834673082 100644 --- a/test/vstest.console.UnitTests/CommandLine/InferHelperTests.cs +++ b/test/vstest.console.UnitTests/CommandLine/InferHelperTests.cs @@ -25,61 +25,57 @@ public class InferHelperTests private readonly Framework _frameworkNet47 = Framework.FromString(".NETFramework,Version=4.7"); private readonly Framework _frameworkCore10 = Framework.FromString(".NETCoreApp,Version=1.0"); private readonly Framework _frameworkCore11 = Framework.FromString(".NETCoreApp,Version=1.1"); - private readonly IDictionary _sourceFrameworks; - private readonly IDictionary _sourceArchitectures; public InferHelperTests() { _mockAssemblyHelper = new Mock(); _inferHelper = new InferHelper(_mockAssemblyHelper.Object); - _sourceFrameworks = new Dictionary(); - _sourceArchitectures = new Dictionary(); } [TestMethod] public void AutoDetectArchitectureShouldReturnDefaultArchitectureOnNullSources() { - Assert.AreEqual(_defaultArchitecture, _inferHelper.AutoDetectArchitecture(null, _sourceArchitectures, _defaultArchitecture)); + Assert.AreEqual(_defaultArchitecture, _inferHelper.AutoDetectArchitecture(null, _defaultArchitecture, out _)); } [TestMethod] public void AutoDetectArchitectureShouldReturnDefaultArchitectureOnEmptySources() { - Assert.AreEqual(_defaultArchitecture, _inferHelper.AutoDetectArchitecture(new List(0), _sourceArchitectures, _defaultArchitecture)); + Assert.AreEqual(_defaultArchitecture, _inferHelper.AutoDetectArchitecture(new List(0), _defaultArchitecture, out _)); } [TestMethod] public void AutoDetectArchitectureShouldReturnDefaultArchitectureOnNullItemInSources() { - Assert.AreEqual(_defaultArchitecture, _inferHelper.AutoDetectArchitecture(new List() { null }, _sourceArchitectures, _defaultArchitecture)); + Assert.AreEqual(_defaultArchitecture, _inferHelper.AutoDetectArchitecture(new List() { null! }, _defaultArchitecture, out _)); } [TestMethod] public void AutoDetectArchitectureShouldReturnDefaultArchitectureOnWhiteSpaceItemInSources() { - Assert.AreEqual(_defaultArchitecture, _inferHelper.AutoDetectArchitecture(new List() { " " }, _sourceArchitectures, _defaultArchitecture)); + Assert.AreEqual(_defaultArchitecture, _inferHelper.AutoDetectArchitecture(new List() { " " }, _defaultArchitecture, out _)); } [TestMethod] public void AutoDetectArchitectureShouldReturnCorrectArchForOneSource() { _mockAssemblyHelper.Setup(ah => ah.GetArchitecture(It.IsAny())).Returns(Architecture.X86); - Assert.AreEqual(Architecture.X86, _inferHelper.AutoDetectArchitecture(new List() { "1.dll" }, _sourceArchitectures, _defaultArchitecture)); + Assert.AreEqual(Architecture.X86, _inferHelper.AutoDetectArchitecture(new List() { "1.dll" }, _defaultArchitecture, out _)); _mockAssemblyHelper.Verify(ah => ah.GetArchitecture(It.IsAny())); } [TestMethod] public void AutoDetectArchitectureShouldReturnCorrectDefaultArchForNotDotNetAssembly() { - Assert.AreEqual(_defaultArchitecture, _inferHelper.AutoDetectArchitecture(new List() { "NotDotNetAssebly.appx" }, _sourceArchitectures, _defaultArchitecture)); + Assert.AreEqual(_defaultArchitecture, _inferHelper.AutoDetectArchitecture(new List() { "NotDotNetAssebly.appx" }, _defaultArchitecture, out _)); _mockAssemblyHelper.Verify(ah => ah.GetArchitecture(It.IsAny()), Times.Never); } [TestMethod] - public void AutoDetectArchitectureShouldSetAnyCpuArchForNotDotNetAssembly() + public void AutoDetectArchitectureShouldSetDefaultArchForNotDotNetAssembly() { - _inferHelper.AutoDetectArchitecture(new List() { "NotDotNetAssebly.appx" }, _sourceArchitectures, _defaultArchitecture); - Assert.AreEqual(Architecture.AnyCPU, _sourceArchitectures["NotDotNetAssebly.appx"]); + _inferHelper.AutoDetectArchitecture(new List() { "NotDotNetAssebly.appx" }, _defaultArchitecture, out var sourceArchitectures); + Assert.AreEqual(_defaultArchitecture, sourceArchitectures["NotDotNetAssebly.appx"]); } [TestMethod] @@ -87,7 +83,7 @@ public void AutoDetectArchitectureShouldReturnDefaultArchForAllAnyCpuAssemblies( { _mockAssemblyHelper.SetupSequence(ah => ah.GetArchitecture(It.IsAny())) .Returns(Architecture.AnyCPU).Returns(Architecture.AnyCPU).Returns(Architecture.AnyCPU); - Assert.AreEqual(_defaultArchitecture, _inferHelper.AutoDetectArchitecture(new List() { "AnyCPU1.dll", "AnyCPU2.exe", "AnyCPU3.dll" }, _sourceArchitectures, _defaultArchitecture)); + Assert.AreEqual(_defaultArchitecture, _inferHelper.AutoDetectArchitecture(new List() { "AnyCPU1.dll", "AnyCPU2.exe", "AnyCPU3.dll" }, _defaultArchitecture, out _)); _mockAssemblyHelper.Verify(ah => ah.GetArchitecture(It.IsAny()), Times.Exactly(3)); } @@ -96,7 +92,7 @@ public void AutoDetectArchitectureShouldReturnX86ArchIfOneX86AssemblyAndRestAnyC { _mockAssemblyHelper.SetupSequence(ah => ah.GetArchitecture(It.IsAny())) .Returns(Architecture.AnyCPU).Returns(Architecture.AnyCPU).Returns(Architecture.X86); - Assert.AreEqual(Architecture.X86, _inferHelper.AutoDetectArchitecture(new List() { "AnyCPU1.dll", "AnyCPU2.exe", "x86.dll" }, _sourceArchitectures, _defaultArchitecture)); + Assert.AreEqual(Architecture.X86, _inferHelper.AutoDetectArchitecture(new List() { "AnyCPU1.dll", "AnyCPU2.exe", "x86.dll" }, _defaultArchitecture, out _)); _mockAssemblyHelper.Verify(ah => ah.GetArchitecture(It.IsAny()), Times.Exactly(3)); } @@ -105,7 +101,7 @@ public void AutoDetectArchitectureShouldReturnARMArchIfOneARMAssemblyAndRestAnyC { _mockAssemblyHelper.SetupSequence(ah => ah.GetArchitecture(It.IsAny())) .Returns(Architecture.ARM).Returns(Architecture.ARM).Returns(Architecture.ARM); - Assert.AreEqual(Architecture.ARM, _inferHelper.AutoDetectArchitecture(new List() { "ARM1.dll", "ARM2.dll", "ARM3.dll" }, _sourceArchitectures, _defaultArchitecture)); + Assert.AreEqual(Architecture.ARM, _inferHelper.AutoDetectArchitecture(new List() { "ARM1.dll", "ARM2.dll", "ARM3.dll" }, _defaultArchitecture, out _)); _mockAssemblyHelper.Verify(ah => ah.GetArchitecture(It.IsAny()), Times.Exactly(3)); } @@ -114,7 +110,7 @@ public void AutoDetectArchitectureShouldReturnX64ArchIfOneX64AssemblyAndRestAnyC { _mockAssemblyHelper.SetupSequence(ah => ah.GetArchitecture(It.IsAny())) .Returns(Architecture.AnyCPU).Returns(Architecture.AnyCPU).Returns(Architecture.X64); - Assert.AreEqual(Architecture.X64, _inferHelper.AutoDetectArchitecture(new List() { "x64.dll", "AnyCPU2.exe", "x64.dll" }, _sourceArchitectures, _defaultArchitecture)); + Assert.AreEqual(Architecture.X64, _inferHelper.AutoDetectArchitecture(new List() { "x64.dll", "AnyCPU2.exe", "x64-2.dll" }, _defaultArchitecture, out _)); _mockAssemblyHelper.Verify(ah => ah.GetArchitecture(It.IsAny()), Times.Exactly(3)); } @@ -123,7 +119,7 @@ public void AutoDetectArchitectureShouldReturnDefaultArchOnConflictArches() { _mockAssemblyHelper.SetupSequence(ah => ah.GetArchitecture(It.IsAny())) .Returns(Architecture.AnyCPU).Returns(Architecture.X64).Returns(Architecture.X86); - Assert.AreEqual(_defaultArchitecture, _inferHelper.AutoDetectArchitecture(new List() { "AnyCPU1.dll", "x64.exe", "x86.dll" }, _sourceArchitectures, _defaultArchitecture)); + Assert.AreEqual(_defaultArchitecture, _inferHelper.AutoDetectArchitecture(new List() { "AnyCPU1.dll", "x64.exe", "x86.dll" }, _defaultArchitecture, out _)); _mockAssemblyHelper.Verify(ah => ah.GetArchitecture(It.IsAny()), Times.Exactly(3)); } @@ -133,11 +129,11 @@ public void AutoDetectArchitectureShouldPoulateSourceArchitectureDictionary() _mockAssemblyHelper.SetupSequence(ah => ah.GetArchitecture(It.IsAny())) .Returns(Architecture.AnyCPU).Returns(Architecture.X64).Returns(Architecture.X86); - Assert.AreEqual(_defaultArchitecture, _inferHelper.AutoDetectArchitecture(new List() { "AnyCPU1.dll", "x64.exe", "x86.dll" }, _sourceArchitectures, _defaultArchitecture)); - Assert.AreEqual(3, _sourceArchitectures.Count); - Assert.AreEqual(Architecture.AnyCPU, _sourceArchitectures["AnyCPU1.dll"]); - Assert.AreEqual(Architecture.X64, _sourceArchitectures["x64.exe"]); - Assert.AreEqual(Architecture.X86, _sourceArchitectures["x86.dll"]); + Assert.AreEqual(_defaultArchitecture, _inferHelper.AutoDetectArchitecture(new List() { "AnyCPU1.dll", "x64.exe", "x86.dll" }, _defaultArchitecture, out var sourceArchitectures)); + Assert.AreEqual(3, sourceArchitectures.Count); + Assert.AreEqual(_defaultArchitecture, sourceArchitectures["AnyCPU1.dll"]); + Assert.AreEqual(Architecture.X64, sourceArchitectures["x64.exe"]); + Assert.AreEqual(Architecture.X86, sourceArchitectures["x86.dll"]); _mockAssemblyHelper.Verify(ah => ah.GetArchitecture(It.IsAny()), Times.Exactly(3)); } @@ -147,32 +143,32 @@ public void AutoDetectArchitectureShouldReturnDefaultArchIfthereIsNotDotNetAssem { _mockAssemblyHelper.SetupSequence(ah => ah.GetArchitecture(It.IsAny())) .Returns(Architecture.AnyCPU); - Assert.AreEqual(_defaultArchitecture, _inferHelper.AutoDetectArchitecture(new List() { "AnyCPU1.dll", "NotDotNetAssebly.appx" }, _sourceArchitectures, _defaultArchitecture)); + Assert.AreEqual(_defaultArchitecture, _inferHelper.AutoDetectArchitecture(new List() { "AnyCPU1.dll", "NotDotNetAssebly.appx" }, _defaultArchitecture, out var sourceArchitectures)); _mockAssemblyHelper.Verify(ah => ah.GetArchitecture(It.IsAny()), Times.Exactly(1)); } [TestMethod] public void AutoDetectFrameworkShouldReturnDefaultFrameworkOnNullSources() { - Assert.AreEqual(_defaultFramework, _inferHelper.AutoDetectFramework(null, _sourceFrameworks)); + Assert.AreEqual(_defaultFramework, _inferHelper.AutoDetectFramework(null, out _)); } [TestMethod] public void AutoDetectFrameworkShouldReturnDefaultFrameworkOnEmptySources() { - Assert.AreEqual(_defaultFramework, _inferHelper.AutoDetectFramework(new List(0), _sourceFrameworks)); + Assert.AreEqual(_defaultFramework, _inferHelper.AutoDetectFramework(new List(0), out _)); } [TestMethod] public void AutoDetectFrameworkShouldReturnDefaultFrameworkOnNullItemInSources() { - Assert.AreEqual(_defaultFramework, _inferHelper.AutoDetectFramework(new List() { null }, _sourceFrameworks)); + Assert.AreEqual(_defaultFramework, _inferHelper.AutoDetectFramework(new List() { null! }, out _)); } [TestMethod] public void AutoDetectFrameworkShouldReturnDefaultFrameworkOnEmptyItemInSources() { - Assert.AreEqual(_defaultFramework.Name, _inferHelper.AutoDetectFramework(new List() { string.Empty }, _sourceFrameworks).Name); + Assert.AreEqual(_defaultFramework.Name, _inferHelper.AutoDetectFramework(new List() { string.Empty }, out _).Name); } [TestMethod] @@ -230,7 +226,7 @@ public void AutoDetectFrameworkShouldReturnHighestVersionFxOnSameFxName() .Returns(new FrameworkName(_frameworkNet46.Name)) .Returns(new FrameworkName(_frameworkNet47.Name)) .Returns(new FrameworkName(_frameworkNet45.Name)); - Assert.AreEqual(_frameworkNet47.Name, _inferHelper.AutoDetectFramework(new List() { "net46.dll", "net47.exe", "net45.dll" }, _sourceFrameworks).Name); + Assert.AreEqual(_frameworkNet47.Name, _inferHelper.AutoDetectFramework(new List() { "net46.dll", "net47.exe", "net45.dll" }, out _).Name); _mockAssemblyHelper.Verify(ah => ah.GetFrameWork(It.IsAny()), Times.Exactly(3)); } @@ -242,12 +238,12 @@ public void AutoDetectFrameworkShouldPopulatetheDictionaryForAllTheSources() .Returns(new FrameworkName(_frameworkNet47.Name)) .Returns(new FrameworkName(_frameworkNet45.Name)); - Assert.AreEqual(_frameworkNet47.Name, _inferHelper.AutoDetectFramework(new List() { "net46.dll", "net47.exe", "net45.dll" }, _sourceFrameworks).Name); + Assert.AreEqual(_frameworkNet47.Name, _inferHelper.AutoDetectFramework(new List() { "net46.dll", "net47.exe", "net45.dll" }, out var sourceFrameworks).Name); - Assert.AreEqual(3, _sourceFrameworks.Count); - Assert.AreEqual(_frameworkNet46.Name, _sourceFrameworks["net46.dll"].Name); - Assert.AreEqual(_frameworkNet47.Name, _sourceFrameworks["net47.exe"].Name); - Assert.AreEqual(_frameworkNet45.Name, _sourceFrameworks["net45.dll"].Name); + Assert.AreEqual(3, sourceFrameworks.Count); + Assert.AreEqual(_frameworkNet46.Name, sourceFrameworks["net46.dll"].Name); + Assert.AreEqual(_frameworkNet47.Name, sourceFrameworks["net47.exe"].Name); + Assert.AreEqual(_frameworkNet45.Name, sourceFrameworks["net45.dll"].Name); _mockAssemblyHelper.Verify(ah => ah.GetFrameWork(It.IsAny()), Times.Exactly(3)); } @@ -258,7 +254,7 @@ public void AutoDetectFrameworkShouldReturnHighestVersionFxOnEvenManyLowerVersio .Returns(new FrameworkName(_frameworkCore10.Name)) .Returns(new FrameworkName(_frameworkCore11.Name)) .Returns(new FrameworkName(_frameworkCore10.Name)); - Assert.AreEqual(_frameworkCore11.Name, _inferHelper.AutoDetectFramework(new List() { "netcore10_1.dll", "netcore11.dll", "netcore10_2.dll" }, _sourceFrameworks).Name); + Assert.AreEqual(_frameworkCore11.Name, _inferHelper.AutoDetectFramework(new List() { "netcore10_1.dll", "netcore11.dll", "netcore10_2.dll" }, out _).Name); _mockAssemblyHelper.Verify(ah => ah.GetFrameWork(It.IsAny()), Times.Exactly(3)); } @@ -266,7 +262,7 @@ private void SetupAndValidateForSingleAssembly(string assemblyName, Framework fx { _mockAssemblyHelper.Setup(sh => sh.GetFrameWork(assemblyName)) .Returns(new FrameworkName(fx.Name)); - Assert.AreEqual(fx.Name, _inferHelper.AutoDetectFramework(new List() { assemblyName }, _sourceFrameworks).Name); + Assert.AreEqual(fx.Name, _inferHelper.AutoDetectFramework(new List() { assemblyName }, out _).Name); if (verify) { _mockAssemblyHelper.Verify(ah => ah.GetFrameWork(assemblyName)); diff --git a/test/vstest.console.UnitTests/Processors/ListFullyQualifiedTestsArgumentProcessorTests.cs b/test/vstest.console.UnitTests/Processors/ListFullyQualifiedTestsArgumentProcessorTests.cs index 685d251dd8..73f2d93026 100644 --- a/test/vstest.console.UnitTests/Processors/ListFullyQualifiedTestsArgumentProcessorTests.cs +++ b/test/vstest.console.UnitTests/Processors/ListFullyQualifiedTestsArgumentProcessorTests.cs @@ -156,7 +156,7 @@ public void ExecutorExecuteShouldThrowTestPlatformException() var mockDiscoveryRequest = new Mock(); mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Throws(new TestPlatformException("DummyTestPlatformException")); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); ResetAndAddSourceToCommandLineOptions(true); @@ -174,7 +174,7 @@ public void ExecutorExecuteShouldThrowSettingsException() var mockDiscoveryRequest = new Mock(); mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Throws(new SettingsException("DummySettingsException")); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); ResetAndAddSourceToCommandLineOptions(true); var testRequestManager = new TestRequestManager(CommandLineOptions.Instance, mockTestPlatform.Object, TestRunResultAggregator.Instance, _mockTestPlatformEventSource.Object, _inferHelper, _mockMetricsPublisherTask, _mockProcessHelper.Object, _mockAttachmentsProcessingManager.Object, _mockEnvironment.Object); @@ -191,7 +191,7 @@ public void ExecutorExecuteShouldThrowInvalidOperationException() var mockDiscoveryRequest = new Mock(); mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Throws(new InvalidOperationException("DummyInvalidOperationException")); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); ResetAndAddSourceToCommandLineOptions(true); @@ -210,7 +210,7 @@ public void ExecutorExecuteShouldThrowOtherExceptions() var mockDiscoveryRequest = new Mock(); mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Throws(new Exception("DummyException")); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); ResetAndAddSourceToCommandLineOptions(true); @@ -300,7 +300,7 @@ private void RunListFullyQualifiedTestArgumentProcessorWithTraits(Mock dr.DiscoverAsync()).Raises(dr => dr.OnDiscoveredTests += null, new DiscoveredTestsEventArgs(list)); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); ResetAndAddSourceToCommandLineOptions(legitPath); var cmdOptions = CommandLineOptions.Instance; @@ -320,7 +320,7 @@ private void RunListFullyQualifiedTestArgumentProcessorExecuteWithMockSetup(Mock new TestCase("Test2", new Uri("http://FooTestUri2"), "Source2") }; mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Raises(dr => dr.OnDiscoveredTests += null, new DiscoveredTestsEventArgs(list)); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); ResetAndAddSourceToCommandLineOptions(legitPath); diff --git a/test/vstest.console.UnitTests/Processors/ListTestsArgumentProcessorTests.cs b/test/vstest.console.UnitTests/Processors/ListTestsArgumentProcessorTests.cs index 915ab48c47..f760df2b8a 100644 --- a/test/vstest.console.UnitTests/Processors/ListTestsArgumentProcessorTests.cs +++ b/test/vstest.console.UnitTests/Processors/ListTestsArgumentProcessorTests.cs @@ -159,7 +159,7 @@ public void ExecutorExecuteShouldThrowTestPlatformException() var mockDiscoveryRequest = new Mock(); mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Throws(new TestPlatformException("DummyTestPlatformException")); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); ResetAndAddSourceToCommandLineOptions(); @@ -176,7 +176,7 @@ public void ExecutorExecuteShouldThrowSettingsException() var mockDiscoveryRequest = new Mock(); mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Throws(new SettingsException("DummySettingsException")); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); ResetAndAddSourceToCommandLineOptions(); @@ -193,7 +193,7 @@ public void ExecutorExecuteShouldThrowInvalidOperationException() var mockDiscoveryRequest = new Mock(); mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Throws(new InvalidOperationException("DummyInvalidOperationException")); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); ResetAndAddSourceToCommandLineOptions(); @@ -210,7 +210,7 @@ public void ExecutorExecuteShouldThrowOtherExceptions() var mockDiscoveryRequest = new Mock(); mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Throws(new Exception("DummyException")); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); ResetAndAddSourceToCommandLineOptions(); @@ -268,7 +268,7 @@ private void RunListTestArgumentProcessorExecuteWithMockSetup(Mock dr.DiscoverAsync()).Raises(dr => dr.OnDiscoveredTests += null, new DiscoveredTestsEventArgs(list)); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); ResetAndAddSourceToCommandLineOptions(); diff --git a/test/vstest.console.UnitTests/Processors/RunSpecificTestsArgumentProcessorTests.cs b/test/vstest.console.UnitTests/Processors/RunSpecificTestsArgumentProcessorTests.cs index e630d08283..443c9aa606 100644 --- a/test/vstest.console.UnitTests/Processors/RunSpecificTestsArgumentProcessorTests.cs +++ b/test/vstest.console.UnitTests/Processors/RunSpecificTestsArgumentProcessorTests.cs @@ -196,8 +196,8 @@ public void ExecutorExecuteForValidSourceWithTestCaseFilterShouldRunTests() }; mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Raises(dr => dr.OnDiscoveredTests += null, new DiscoveredTestsEventArgs(list)); - mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockTestRunRequest.Object); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockTestRunRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); var testRequestManager = new TestRequestManager(CommandLineOptions.Instance, mockTestPlatform.Object, TestRunResultAggregator.Instance, _mockTestPlatformEventSource.Object, _inferHelper, _mockMetricsPublisherTask, _mockProcessHelper.Object, _mockAttachmentsProcessingManager.Object, _mockEnvironment.Object); var executor = GetExecutor(testRequestManager); @@ -207,7 +207,7 @@ public void ExecutorExecuteForValidSourceWithTestCaseFilterShouldRunTests() ArgumentProcessorResult argumentProcessorResult = executor.Execute(); _mockOutput.Verify(o => o.WriteLine(It.IsAny(), OutputLevel.Warning), Times.Never); - mockTestPlatform.Verify(o => o.CreateDiscoveryRequest(It.IsAny(), It.Is(c => c.TestCaseFilter == "Filter"), It.IsAny()), Times.Once()); + mockTestPlatform.Verify(o => o.CreateDiscoveryRequest(It.IsAny(), It.Is(c => c.TestCaseFilter == "Filter"), It.IsAny(), It.IsAny>()), Times.Once()); Assert.AreEqual(ArgumentProcessorResult.Success, argumentProcessorResult); } @@ -220,8 +220,8 @@ public void ExecutorExecuteShouldThrowTestPlatformExceptionThrownDuringDiscovery var mockDiscoveryRequest = new Mock(); mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Throws(new TestPlatformException("DummyTestPlatformException")); - mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockTestRunRequest.Object); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockTestRunRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); ResetAndAddSourceToCommandLineOptions(); var testRequestManager = new TestRequestManager(CommandLineOptions.Instance, mockTestPlatform.Object, TestRunResultAggregator.Instance, _mockTestPlatformEventSource.Object, _inferHelper, _mockMetricsPublisherTask, _mockProcessHelper.Object, _mockAttachmentsProcessingManager.Object, _mockEnvironment.Object); @@ -238,8 +238,8 @@ public void ExecutorExecuteShouldThrowInvalidOperationExceptionThrownDuringDisco var mockDiscoveryRequest = new Mock(); mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Throws(new InvalidOperationException("DummyInvalidOperationException")); - mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockTestRunRequest.Object); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockTestRunRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); ResetAndAddSourceToCommandLineOptions(); var testRequestManager = new TestRequestManager(CommandLineOptions.Instance, mockTestPlatform.Object, TestRunResultAggregator.Instance, _mockTestPlatformEventSource.Object, _inferHelper, _mockMetricsPublisherTask, _mockProcessHelper.Object, _mockAttachmentsProcessingManager.Object, _mockEnvironment.Object); @@ -256,8 +256,8 @@ public void ExecutorExecuteShouldThrowSettingsExceptionThrownDuringDiscovery() var mockDiscoveryRequest = new Mock(); mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Throws(new SettingsException("DummySettingsException")); - mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockTestRunRequest.Object); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockTestRunRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); ResetAndAddSourceToCommandLineOptions(); var testRequestManager = new TestRequestManager(CommandLineOptions.Instance, mockTestPlatform.Object, TestRunResultAggregator.Instance, _mockTestPlatformEventSource.Object, _inferHelper, _mockMetricsPublisherTask, _mockProcessHelper.Object, _mockAttachmentsProcessingManager.Object, _mockEnvironment.Object); @@ -281,8 +281,8 @@ public void ExecutorExecuteShouldThrowTestPlatformExceptionThrownDuringExecution mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Raises(dr => dr.OnDiscoveredTests += null, new DiscoveredTestsEventArgs(list)); mockTestRunRequest.Setup(dr => dr.ExecuteAsync()).Throws(new TestPlatformException("DummyTestPlatformException")); - mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockTestRunRequest.Object); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockTestRunRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); ResetAndAddSourceToCommandLineOptions(); var testRequestManager = new TestRequestManager(CommandLineOptions.Instance, mockTestPlatform.Object, TestRunResultAggregator.Instance, _mockTestPlatformEventSource.Object, _inferHelper, _mockMetricsPublisherTask, _mockProcessHelper.Object, _mockAttachmentsProcessingManager.Object, _mockEnvironment.Object); @@ -308,8 +308,8 @@ public void ExecutorExecuteShouldThrowSettingsExceptionThrownDuringExecution() mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Raises(dr => dr.OnDiscoveredTests += null, new DiscoveredTestsEventArgs(list)); mockTestRunRequest.Setup(dr => dr.ExecuteAsync()).Throws(new SettingsException("DummySettingsException")); - mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockTestRunRequest.Object); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockTestRunRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); ResetAndAddSourceToCommandLineOptions(); var testRequestManager = new TestRequestManager(CommandLineOptions.Instance, mockTestPlatform.Object, TestRunResultAggregator.Instance, _mockTestPlatformEventSource.Object, _inferHelper, _mockMetricsPublisherTask, _mockProcessHelper.Object, _mockAttachmentsProcessingManager.Object, _mockEnvironment.Object); @@ -335,8 +335,8 @@ public void ExecutorExecuteShouldThrowInvalidOperationExceptionThrownDuringExecu mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Raises(dr => dr.OnDiscoveredTests += null, new DiscoveredTestsEventArgs(list)); mockTestRunRequest.Setup(dr => dr.ExecuteAsync()).Throws(new InvalidOperationException("DummySettingsException")); - mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockTestRunRequest.Object); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockTestRunRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); ResetAndAddSourceToCommandLineOptions(); @@ -360,7 +360,7 @@ public void ExecutorExecuteShouldForValidSourcesAndNoTestsDiscoveredShouldLogWar CommandLineOptions.Instance.TestAdapterPath = new[] { @"C:\Foo" }; mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Raises(dr => dr.OnDiscoveredTests += null, new DiscoveredTestsEventArgs(new List())); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); var testRequestManager = new TestRequestManager(CommandLineOptions.Instance, mockTestPlatform.Object, TestRunResultAggregator.Instance, _mockTestPlatformEventSource.Object, _inferHelper, _mockMetricsPublisherTask, _mockProcessHelper.Object, _mockAttachmentsProcessingManager.Object, _mockEnvironment.Object); var executor = GetExecutor(testRequestManager); @@ -381,7 +381,7 @@ public void ExecutorExecuteShouldForValidSourcesAndNoTestsDiscoveredShouldLogApp ResetAndAddSourceToCommandLineOptions(); mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Raises(dr => dr.OnDiscoveredTests += null, new DiscoveredTestsEventArgs(new List())); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); var testRequestManager = new TestRequestManager(CommandLineOptions.Instance, mockTestPlatform.Object, TestRunResultAggregator.Instance, _mockTestPlatformEventSource.Object, _inferHelper, _mockMetricsPublisherTask, _mockProcessHelper.Object, _mockAttachmentsProcessingManager.Object, _mockEnvironment.Object); var executor = GetExecutor(testRequestManager); @@ -408,8 +408,8 @@ public void ExecutorExecuteShouldForValidSourcesAndValidSelectedTestsRunsTestsAn }; mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Raises(dr => dr.OnDiscoveredTests += null, new DiscoveredTestsEventArgs(list)); - mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockTestRunRequest.Object); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockTestRunRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); var testRequestManager = new TestRequestManager(CommandLineOptions.Instance, mockTestPlatform.Object, TestRunResultAggregator.Instance, _mockTestPlatformEventSource.Object, _inferHelper, _mockMetricsPublisherTask, _mockProcessHelper.Object, _mockAttachmentsProcessingManager.Object, _mockEnvironment.Object); var executor = GetExecutor(testRequestManager); @@ -436,8 +436,8 @@ public void ExecutorShouldRunTestsWhenTestsAreCommaSeparated() }; mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Raises(dr => dr.OnDiscoveredTests += null, new DiscoveredTestsEventArgs(list)); - mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockTestRunRequest.Object); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockTestRunRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); var testRequestManager = new TestRequestManager(CommandLineOptions.Instance, mockTestPlatform.Object, TestRunResultAggregator.Instance, _mockTestPlatformEventSource.Object, _inferHelper, _mockMetricsPublisherTask, _mockProcessHelper.Object, _mockAttachmentsProcessingManager.Object, _mockEnvironment.Object); var executor = GetExecutor(testRequestManager); @@ -465,8 +465,8 @@ public void ExecutorShouldRunTestsWhenTestsAreFiltered() }; mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Raises(dr => dr.OnDiscoveredTests += null, new DiscoveredTestsEventArgs(list)); - mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockTestRunRequest.Object); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockTestRunRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); var testRequestManager = new TestRequestManager(CommandLineOptions.Instance, mockTestPlatform.Object, TestRunResultAggregator.Instance, _mockTestPlatformEventSource.Object, _inferHelper, _mockMetricsPublisherTask, _mockProcessHelper.Object, _mockAttachmentsProcessingManager.Object, _mockEnvironment.Object); var executor = GetExecutor(testRequestManager); @@ -493,8 +493,8 @@ public void ExecutorShouldWarnWhenTestsAreNotAvailable() }; mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Raises(dr => dr.OnDiscoveredTests += null, new DiscoveredTestsEventArgs(list)); - mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockTestRunRequest.Object); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockTestRunRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); var testRequestManager = new TestRequestManager(CommandLineOptions.Instance, mockTestPlatform.Object, TestRunResultAggregator.Instance, _mockTestPlatformEventSource.Object, _inferHelper, _mockMetricsPublisherTask, _mockProcessHelper.Object, _mockAttachmentsProcessingManager.Object, _mockEnvironment.Object); var executor = GetExecutor(testRequestManager); @@ -522,8 +522,8 @@ public void ExecutorShouldRunTestsWhenTestsAreCommaSeparatedWithEscape() }; mockDiscoveryRequest.Setup(dr => dr.DiscoverAsync()).Raises(dr => dr.OnDiscoveredTests += null, new DiscoveredTestsEventArgs(list)); - mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockTestRunRequest.Object); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockTestRunRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); var testRequestManager = new TestRequestManager(CommandLineOptions.Instance, mockTestPlatform.Object, TestRunResultAggregator.Instance, _mockTestPlatformEventSource.Object, _inferHelper, _mockMetricsPublisherTask, _mockProcessHelper.Object, _mockAttachmentsProcessingManager.Object, _mockEnvironment.Object); var executor = GetExecutor(testRequestManager); @@ -553,8 +553,8 @@ public void ExecutorShouldDisplayWarningIfNoTestsAreExecuted() mockTestRunRequest.Setup(tr => tr.ExecuteAsync()).Returns(1).Raises(tr => tr.OnRunCompletion += null, new TestRunCompleteEventArgs(mockTestRunStats.Object, false, false, null, null, null, new TimeSpan())); - mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockTestRunRequest.Object); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockTestRunRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); ResetAndAddSourceToCommandLineOptions(); var testRequestManager = new TestRequestManager(CommandLineOptions.Instance, mockTestPlatform.Object, TestRunResultAggregator.Instance, _mockTestPlatformEventSource.Object, _inferHelper, _mockMetricsPublisherTask, _mockProcessHelper.Object, _mockAttachmentsProcessingManager.Object, _mockEnvironment.Object); @@ -584,8 +584,8 @@ public void ExecutorShouldNotDisplayWarningIfTestsAreExecuted() mockTestRunRequest.Setup(tr => tr.ExecuteAsync()).Returns(1).Raises(tr => tr.OnRunCompletion += null, new TestRunCompleteEventArgs(testRunStats, false, false, null, null, null, new TimeSpan())); - mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockTestRunRequest.Object); - mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockDiscoveryRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockTestRunRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockDiscoveryRequest.Object); ResetAndAddSourceToCommandLineOptions(); var testRequestManager = new TestRequestManager(CommandLineOptions.Instance, mockTestPlatform.Object, TestRunResultAggregator.Instance, _mockTestPlatformEventSource.Object, _inferHelper, _mockMetricsPublisherTask, _mockProcessHelper.Object, _mockAttachmentsProcessingManager.Object, _mockEnvironment.Object); diff --git a/test/vstest.console.UnitTests/Processors/RunTestsArgumentProcessorTests.cs b/test/vstest.console.UnitTests/Processors/RunTestsArgumentProcessorTests.cs index 5b24d74d09..9b9c7b9e9a 100644 --- a/test/vstest.console.UnitTests/Processors/RunTestsArgumentProcessorTests.cs +++ b/test/vstest.console.UnitTests/Processors/RunTestsArgumentProcessorTests.cs @@ -156,7 +156,7 @@ public void ExecutorExecuteShouldThrowTestPlatformException() var mockTestRunRequest = new Mock(); mockTestRunRequest.Setup(tr => tr.ExecuteAsync()).Throws(new TestPlatformException("DummyTestPlatformException")); - mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockTestRunRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockTestRunRequest.Object); ResetAndAddSourceToCommandLineOptions(); var testRequestManager = new TestRequestManager(CommandLineOptions.Instance, mockTestPlatform.Object, TestRunResultAggregator.Instance, _mockTestPlatformEventSource.Object, _inferHelper, _mockMetricsPublisherTask, _mockProcessHelper.Object, _mockAttachmentsProcessingManager.Object, _environment.Object); @@ -172,7 +172,7 @@ public void ExecutorExecuteShouldThrowSettingsException() var mockTestRunRequest = new Mock(); mockTestRunRequest.Setup(tr => tr.ExecuteAsync()).Throws(new SettingsException("DummySettingsException")); - mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockTestRunRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockTestRunRequest.Object); ResetAndAddSourceToCommandLineOptions(); var testRequestManager = new TestRequestManager(CommandLineOptions.Instance, mockTestPlatform.Object, TestRunResultAggregator.Instance, _mockTestPlatformEventSource.Object, _inferHelper, _mockMetricsPublisherTask, _mockProcessHelper.Object, _mockAttachmentsProcessingManager.Object, _environment.Object); @@ -188,7 +188,7 @@ public void ExecutorExecuteShouldThrowInvalidOperationException() var mockTestRunRequest = new Mock(); mockTestRunRequest.Setup(tr => tr.ExecuteAsync()).Throws(new InvalidOperationException("DummyInvalidOperationException")); - mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockTestRunRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockTestRunRequest.Object); ResetAndAddSourceToCommandLineOptions(); var testRequestManager = new TestRequestManager(CommandLineOptions.Instance, mockTestPlatform.Object, TestRunResultAggregator.Instance, _mockTestPlatformEventSource.Object, _inferHelper, _mockMetricsPublisherTask, _mockProcessHelper.Object, _mockAttachmentsProcessingManager.Object, _environment.Object); @@ -204,7 +204,7 @@ public void ExecutorExecuteShouldThrowOtherExceptions() var mockTestRunRequest = new Mock(); mockTestRunRequest.Setup(tr => tr.ExecuteAsync()).Throws(new Exception("DummyException")); - mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(mockTestRunRequest.Object); + mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(mockTestRunRequest.Object); ResetAndAddSourceToCommandLineOptions(); var testRequestManager = new TestRequestManager(CommandLineOptions.Instance, mockTestPlatform.Object, TestRunResultAggregator.Instance, _mockTestPlatformEventSource.Object, _inferHelper, _mockMetricsPublisherTask, _mockProcessHelper.Object, _mockAttachmentsProcessingManager.Object, _environment.Object); @@ -260,7 +260,7 @@ private ArgumentProcessorResult RunRunArgumentProcessorExecuteWithMockSetup(ITes var args = new TestRunCompleteEventArgs(mockTestRunStats.Object, false, false, null, null, null, new TimeSpan()); - mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Returns(testRunRequest); + mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Returns(testRunRequest); ResetAndAddSourceToCommandLineOptions(); diff --git a/test/vstest.console.UnitTests/TestPlatformHelpers/TestRequestManagerTests.cs b/test/vstest.console.UnitTests/TestPlatformHelpers/TestRequestManagerTests.cs index b7d0d23f75..fcbe03c2fd 100644 --- a/test/vstest.console.UnitTests/TestPlatformHelpers/TestRequestManagerTests.cs +++ b/test/vstest.console.UnitTests/TestPlatformHelpers/TestRequestManagerTests.cs @@ -90,9 +90,9 @@ public TestRequestManagerTests() _mockProcessHelper.Object, _mockAttachmentsProcessingManager.Object, _mockEnvironment.Object); - _mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())) + _mockTestPlatform.Setup(tp => tp.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())) .Returns(_mockDiscoveryRequest.Object); - _mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())) + _mockTestPlatform.Setup(tp => tp.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())) .Returns(_mockRunRequest.Object); _mockAssemblyMetadataProvider.Setup(a => a.GetArchitecture(It.IsAny())) .Returns(Architecture.X86); @@ -167,8 +167,8 @@ public void DiscoverTestsShouldReadTheBatchSizeFromSettingsAndSetItForDiscoveryC DiscoveryCriteria? actualDiscoveryCriteria = null; var mockDiscoveryRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options) => actualDiscoveryCriteria = discoveryCriteria).Returns(mockDiscoveryRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualDiscoveryCriteria = discoveryCriteria).Returns(mockDiscoveryRequest.Object); _testRequestManager.DiscoverTests(payload, new Mock().Object, _protocolConfig); Assert.AreEqual(15, actualDiscoveryCriteria!.FrequencyOfDiscoveredTestsEvent); @@ -186,8 +186,8 @@ public void DiscoverTestsShouldCallTestPlatformAndSucceed() var createDiscoveryRequestCalled = 0; DiscoveryCriteria? actualDiscoveryCriteria = null; var mockDiscoveryRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options) => + _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => { createDiscoveryRequestCalled++; actualDiscoveryCriteria = discoveryCriteria; @@ -241,8 +241,8 @@ public void DiscoverTestsShouldPassSameProtocolConfigInRequestData() IRequestData? actualRequestData = null; var mockDiscoveryRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); var mockDiscoveryRegistrar = new Mock(); @@ -293,8 +293,8 @@ public void DiscoverTestsShouldCollectMetrics() IRequestData? actualRequestData = null; var mockDiscoveryRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); _testRequestManager = new TestRequestManager( CommandLineOptions.Instance, @@ -343,8 +343,8 @@ public void DiscoverTestsShouldCollectTargetDeviceLocalMachineIfTargetDeviceStri IRequestData? actualRequestData = null; var mockDiscoveryRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); _testRequestManager = new TestRequestManager( CommandLineOptions.Instance, @@ -387,8 +387,8 @@ public void DiscoverTestsShouldCollectTargetDeviceIfTargetDeviceIsDevice() IRequestData? actualRequestData = null; var mockDiscoveryRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); _testRequestManager = new TestRequestManager( CommandLineOptions.Instance, @@ -431,8 +431,8 @@ public void DiscoverTestsShouldCollectTargetDeviceIfTargetDeviceIsEmulator() IRequestData? actualRequestData = null; var mockDiscoveryRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); _testRequestManager = new TestRequestManager( CommandLineOptions.Instance, @@ -475,8 +475,8 @@ public void DiscoverTestsShouldCollectCommands() IRequestData? actualRequestData = null; var mockDiscoveryRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); _testRequestManager = new TestRequestManager( CommandLineOptions.Instance, @@ -531,8 +531,8 @@ public void DiscoverTestsShouldCollectTestSettings() IRequestData? actualRequestData = null; var mockDiscoveryRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); _testRequestManager = new TestRequestManager( CommandLineOptions.Instance, @@ -578,8 +578,8 @@ public void DiscoverTestsShouldCollectVsmdiFile() IRequestData? actualRequestData = null; var mockDiscoveryRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); _testRequestManager = new TestRequestManager( CommandLineOptions.Instance, @@ -625,8 +625,8 @@ public void DiscoverTestsShouldCollectTestRunConfigFile() IRequestData? actualRequestData = null; var mockDiscoveryRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); _testRequestManager = new TestRequestManager( CommandLineOptions.Instance, @@ -671,8 +671,8 @@ public void DiscoverTestsShouldUpdateFrameworkAndPlatformIfNotSpecifiedInDesignM .Returns(new FrameworkName(Constants.DotNetFramework46)); DiscoveryCriteria? actualDiscoveryCriteria = null; var mockDiscoveryRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options) => actualDiscoveryCriteria = discoveryCriteria).Returns(mockDiscoveryRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualDiscoveryCriteria = discoveryCriteria).Returns(mockDiscoveryRequest.Object); _testRequestManager.DiscoverTests(payload, new Mock().Object, _protocolConfig); @@ -705,8 +705,8 @@ public void DiscoverTestsShouldNotUpdateFrameworkAndPlatformIfSpecifiedInDesignM .Returns(new FrameworkName(Constants.DotNetFramework451)); DiscoveryCriteria? actualDiscoveryCriteria = null; var mockDiscoveryRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options) => actualDiscoveryCriteria = discoveryCriteria).Returns(mockDiscoveryRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualDiscoveryCriteria = discoveryCriteria).Returns(mockDiscoveryRequest.Object); _testRequestManager.DiscoverTests(payload, new Mock().Object, _protocolConfig); @@ -737,8 +737,8 @@ public void DiscoverTestsShouldUpdateFrameworkAndPlatformInCommandLineScenariosI .Returns(new FrameworkName(Constants.DotNetFramework46)); DiscoveryCriteria? actualDiscoveryCriteria = null; var mockDiscoveryRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options) => actualDiscoveryCriteria = discoveryCriteria).Returns(mockDiscoveryRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualDiscoveryCriteria = discoveryCriteria).Returns(mockDiscoveryRequest.Object); _testRequestManager.DiscoverTests(payload, new Mock().Object, _protocolConfig); _mockAssemblyMetadataProvider.Verify(a => a.GetArchitecture(It.IsAny())); @@ -749,7 +749,7 @@ public void DiscoverTestsShouldUpdateFrameworkAndPlatformInCommandLineScenariosI } [TestMethod] - public void DiscoverTestsShouldNotUpdateFrameworkAndPlatformInCommandLineScenariosIfSpecifiedButInferred() + public void DiscoverTestsShouldNotInferAndUpdateFrameworkAndPlatformInCommandLineScenariosIfSpecified() { var payload = new DiscoveryRequestPayload() { @@ -762,6 +762,8 @@ public void DiscoverTestsShouldNotUpdateFrameworkAndPlatformInCommandLineScenari " }; _commandLineOptions.IsDesignMode = false; + + // specified architecture _commandLineOptions.TargetFrameworkVersion = Framework.DefaultFramework; _commandLineOptions.TargetArchitecture = Architecture.X86; _mockAssemblyMetadataProvider.Setup(a => a.GetArchitecture(It.IsAny())) @@ -771,17 +773,20 @@ public void DiscoverTestsShouldNotUpdateFrameworkAndPlatformInCommandLineScenari DiscoveryCriteria? actualDiscoveryCriteria = null; var mockDiscoveryRequest = new Mock(); _mockTestPlatform - .Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())) + .Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())) .Callback( - (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options) => actualDiscoveryCriteria = discoveryCriteria).Returns(mockDiscoveryRequest.Object); + (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualDiscoveryCriteria = discoveryCriteria).Returns(mockDiscoveryRequest.Object); _testRequestManager.DiscoverTests(payload, new Mock().Object, _protocolConfig); + + // we infer the architecture and framework, so we can print warning when they don't match settings. _mockAssemblyMetadataProvider.Verify(a => a.GetArchitecture(It.IsAny()), Times.Once); _mockAssemblyMetadataProvider.Verify(a => a.GetFrameWork(It.IsAny()), Times.Once); + // but we don't update the settings, to keep what user specified Assert.IsFalse(actualDiscoveryCriteria!.RunSettings.Contains(Constants.DotNetFramework46)); - Assert.IsFalse(actualDiscoveryCriteria.RunSettings.Contains(nameof(Architecture.ARM))); + Assert.IsFalse(actualDiscoveryCriteria!.RunSettings.Contains(nameof(Architecture.ARM))); } [TestMethod] @@ -850,8 +855,8 @@ public void RunTestsShouldReadTheBatchSizeFromSettingsAndSetItForTestRunCriteria TestRunCriteria? actualTestRunCriteria = null; var mockDiscoveryRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options) => actualTestRunCriteria = runCriteria).Returns(mockDiscoveryRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualTestRunCriteria = runCriteria).Returns(mockDiscoveryRequest.Object); _testRequestManager.RunTests(payload, new Mock().Object, new Mock().Object, _protocolConfig); Assert.AreEqual(15, actualTestRunCriteria!.FrequencyOfRunStatsChangeEvent); @@ -874,8 +879,8 @@ public void RunTestsShouldNotThrowForFramework35() TestRunCriteria? actualTestRunCriteria = null; var mockDiscoveryRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options) => actualTestRunCriteria = runCriteria).Returns(mockDiscoveryRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualTestRunCriteria = runCriteria).Returns(mockDiscoveryRequest.Object); _mockAssemblyMetadataProvider.Setup(a => a.GetFrameWork(It.IsAny())).Returns(new FrameworkName(Constants.DotNetFramework35)); var mockRunEventsRegistrar = new Mock(); @@ -899,8 +904,8 @@ public void RunTestsShouldPassSameProtocolConfigInRequestData() var mockProtocolConfig = new ProtocolConfig { Version = 6 }; IRequestData? actualRequestData = null; var mockDiscoveryRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); // Act. _testRequestManager.RunTests(payload, new Mock().Object, new Mock().Object, mockProtocolConfig); @@ -923,8 +928,8 @@ public void RunTestsShouldCollectCommands() var mockProtocolConfig = new ProtocolConfig { Version = 6 }; IRequestData? actualRequestData = null; var mockDiscoveryRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); _testRequestManager = new TestRequestManager( CommandLineOptions.Instance, @@ -988,8 +993,8 @@ public void RunTestsShouldCollectTelemetryForLegacySettings() var mockProtocolConfig = new ProtocolConfig { Version = 6 }; IRequestData? actualRequestData = null; var mockDiscoveryRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); _testRequestManager = new TestRequestManager( CommandLineOptions.Instance, @@ -1036,8 +1041,8 @@ public void RunTestsShouldCollectTelemetryForTestSettingsEmbeddedInsideRunSettin var mockProtocolConfig = new ProtocolConfig { Version = 6 }; IRequestData? actualRequestData = null; var mockDiscoveryRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); _testRequestManager = new TestRequestManager( CommandLineOptions.Instance, @@ -1082,8 +1087,8 @@ public void RunTestsShouldCollectMetrics() var mockProtocolConfig = new ProtocolConfig { Version = 6 }; IRequestData? actualRequestData = null; var mockDiscoveryRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualRequestData = requestData).Returns(mockDiscoveryRequest.Object); _testRequestManager = new TestRequestManager( CommandLineOptions.Instance, @@ -1122,8 +1127,8 @@ public void RunTestsWithSourcesShouldCallTestPlatformAndSucceed() var createRunRequestCalled = 0; TestRunCriteria? observedCriteria = null; var mockRunRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options) => + _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => { createRunRequestCalled++; observedCriteria = runCriteria; @@ -1180,7 +1185,7 @@ public void RunTestsMultipleCallsShouldNotRunInParallel() }; var mockRunRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())) + _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())) .Returns(mockRunRequest.Object); var mockRunEventsRegistrar1 = new Mock(); @@ -1319,11 +1324,11 @@ public void DiscoverTestsShouldUpdateDesignModeAndCollectSourceInformation(bool var designmode = $"{designModeValue}"; _mockTestPlatform.Verify( - tp => tp.CreateDiscoveryRequest(It.IsAny(), It.Is(dc => dc.RunSettings.Contains(designmode)), It.IsAny())); + tp => tp.CreateDiscoveryRequest(It.IsAny(), It.Is(dc => dc.RunSettings.Contains(designmode)), It.IsAny(), It.IsAny>())); var collectSourceInformation = $"{designModeValue}"; _mockTestPlatform.Verify( - tp => tp.CreateDiscoveryRequest(It.IsAny(), It.Is(dc => dc.RunSettings.Contains(collectSourceInformation)), It.IsAny())); + tp => tp.CreateDiscoveryRequest(It.IsAny(), It.Is(dc => dc.RunSettings.Contains(collectSourceInformation)), It.IsAny(), It.IsAny>())); } [TestMethod] @@ -1337,7 +1342,7 @@ public void DiscoverTestsShouldNotUpdateDesignModeIfUserHasSetDesignModeInRunSet var designmode = "False"; _mockTestPlatform.Verify( - tp => tp.CreateDiscoveryRequest(It.IsAny(), It.Is(dc => dc.RunSettings.Contains(designmode)), It.IsAny())); + tp => tp.CreateDiscoveryRequest(It.IsAny(), It.Is(dc => dc.RunSettings.Contains(designmode)), It.IsAny(), It.IsAny>())); } [DataTestMethod] @@ -1357,7 +1362,7 @@ public void RunTestsShouldUpdateDesignModeIfRunnerIsInDesignMode(bool designMode _testRequestManager.RunTests(payload, new Mock().Object, new Mock().Object, _protocolConfig); var designmode = $"{designModeValue}"; - _mockTestPlatform.Verify(tp => tp.CreateTestRunRequest(It.IsAny(), It.Is(rc => rc.TestRunSettings.Contains(designmode)), It.IsAny())); + _mockTestPlatform.Verify(tp => tp.CreateTestRunRequest(It.IsAny(), It.Is(rc => rc.TestRunSettings.Contains(designmode)), It.IsAny(), It.IsAny>())); } [DataTestMethod] @@ -1372,7 +1377,7 @@ public void DiscoverTestsShouldNotUpdateCollectSourceInformationIfUserHasSetItIn var collectSourceInformation = $"{val}"; _mockTestPlatform.Verify( - tp => tp.CreateDiscoveryRequest(It.IsAny(), It.Is(dc => dc.RunSettings.Contains(collectSourceInformation)), It.IsAny())); + tp => tp.CreateDiscoveryRequest(It.IsAny(), It.Is(dc => dc.RunSettings.Contains(collectSourceInformation)), It.IsAny(), It.IsAny>())); } [TestMethod] @@ -1396,8 +1401,8 @@ public void RunTestsShouldShouldUpdateFrameworkAndPlatformIfNotSpecifiedInDesign .Returns(new FrameworkName(Constants.DotNetFramework46)); TestRunCriteria? actualTestRunCriteria = null; var mockTestRunRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options) => actualTestRunCriteria = runCriteria).Returns(mockTestRunRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualTestRunCriteria = runCriteria).Returns(mockTestRunRequest.Object); _testRequestManager.RunTests(payload, new Mock().Object, new Mock().Object, _protocolConfig); @@ -1410,11 +1415,12 @@ public void RunTestsShouldShouldUpdateFrameworkAndPlatformIfNotSpecifiedInDesign } [TestMethod] - public void RunTestsShouldNotUpdateFrameworkAndPlatformIfSpecifiedInDesignModeButInferred() + public void RunTestsShouldNotUpdateFrameworkAndPlatformIfSpecifiedInDesignMode() { var payload = new TestRunRequestPayload() { Sources = new List() { "a.dll" }, + // specify architecture and framework RunSettings = $@" @@ -1432,16 +1438,18 @@ public void RunTestsShouldNotUpdateFrameworkAndPlatformIfSpecifiedInDesignModeBu .Returns(new FrameworkName(Constants.DotNetFramework451)); TestRunCriteria? actualTestRunCriteria = null; var mockTestRunRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options) => actualTestRunCriteria = runCriteria).Returns(mockTestRunRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualTestRunCriteria = runCriteria).Returns(mockTestRunRequest.Object); _testRequestManager.RunTests(payload, new Mock().Object, new Mock().Object, _protocolConfig); + // infer them so we can print warning when dlls are not compatible with runsettings _mockAssemblyMetadataProvider.Verify(a => a.GetArchitecture(It.IsAny()), Times.Once); _mockAssemblyMetadataProvider.Verify(a => a.GetFrameWork(It.IsAny()), Times.Once); + // but don't update runsettings because we want to keep what user specified Assert.IsTrue(actualTestRunCriteria!.TestRunSettings.Contains(Constants.DotNetFramework46)); - Assert.IsTrue(actualTestRunCriteria.TestRunSettings.Contains(nameof(Architecture.ARM))); + Assert.IsTrue(actualTestRunCriteria!.TestRunSettings.Contains(nameof(Architecture.ARM))); } [TestMethod] @@ -1449,11 +1457,12 @@ public void RunTestsShouldNotUpdateFrameworkAndPlatformIfSpecifiedInDesignModeBu [DataRow("X86")] [DataRow("ARM")] [DataRow("aRm")] - public void RunTestsShouldNotUpdatePlatformIfSpecifiedInDesignModeButInferred(string targetPlatform) + public void RunTestsShouldNotUpdatePlatformIfSpecifiedInDesignMode(string targetPlatform) { var payload = new TestRunRequestPayload() { Sources = new List() { "a.dll" }, + // Specify platform RunSettings = $@" @@ -1470,14 +1479,16 @@ public void RunTestsShouldNotUpdatePlatformIfSpecifiedInDesignModeButInferred(st .Returns(new FrameworkName(Constants.DotNetFramework451)); TestRunCriteria? actualTestRunCriteria = null; var mockTestRunRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options) => actualTestRunCriteria = runCriteria).Returns(mockTestRunRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualTestRunCriteria = runCriteria).Returns(mockTestRunRequest.Object); _testRequestManager.RunTests(payload, new Mock().Object, new Mock().Object, _protocolConfig); + // infer platform and framework so we can print warnings when dlls are not compatible with runsettings _mockAssemblyMetadataProvider.Verify(a => a.GetArchitecture(It.IsAny()), Times.Once); _mockAssemblyMetadataProvider.Verify(a => a.GetFrameWork(It.IsAny()), Times.Once); + // don't update it in runsettings to keep what user provided Assert.IsTrue(actualTestRunCriteria!.TestRunSettings.Contains(targetPlatform)); } @@ -1502,8 +1513,8 @@ public void RunTestsShouldUpdateFrameworkAndPlatformInCommandLineScenarios() .Returns(new FrameworkName(Constants.DotNetFramework46)); TestRunCriteria? actualTestRunCriteria = null; var mockTestRunRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options) => actualTestRunCriteria = runCriteria).Returns(mockTestRunRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualTestRunCriteria = runCriteria).Returns(mockTestRunRequest.Object); _testRequestManager.RunTests(payload, new Mock().Object, new Mock().Object, _protocolConfig); @@ -1515,7 +1526,7 @@ public void RunTestsShouldUpdateFrameworkAndPlatformInCommandLineScenarios() } [TestMethod] - public void RunTestsShouldNotpdateFrameworkAndPlatformInCommandLineScenariosIfSpecifiedButInferred() + public void RunTestsShouldNotpdateFrameworkAndPlatformInRunsettingsIfSpecifiedByCommandLine() { var payload = new TestRunRequestPayload() { @@ -1529,22 +1540,27 @@ public void RunTestsShouldNotpdateFrameworkAndPlatformInCommandLineScenariosIfSp }; _commandLineOptions.IsDesignMode = false; + + // specify architecture and framework _commandLineOptions.TargetArchitecture = Architecture.X86; _commandLineOptions.TargetFrameworkVersion = Framework.DefaultFramework; + _mockAssemblyMetadataProvider.Setup(a => a.GetArchitecture(It.IsAny())) .Returns(Architecture.ARM); _mockAssemblyMetadataProvider.Setup(a => a.GetFrameWork(It.IsAny())) .Returns(new FrameworkName(Constants.DotNetFramework46)); TestRunCriteria? actualTestRunCriteria = null; var mockTestRunRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options) => actualTestRunCriteria = runCriteria).Returns(mockTestRunRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualTestRunCriteria = runCriteria).Returns(mockTestRunRequest.Object); _testRequestManager.RunTests(payload, new Mock().Object, new Mock().Object, _protocolConfig); + // infer them so we can print warnings when the assemblies are not compatible _mockAssemblyMetadataProvider.Verify(a => a.GetArchitecture(It.IsAny()), Times.Once); _mockAssemblyMetadataProvider.Verify(a => a.GetFrameWork(It.IsAny()), Times.Once); + // but don't update them in runsettings so we keep what user specified Assert.IsFalse(actualTestRunCriteria!.TestRunSettings.Contains(Constants.DotNetFramework46)); Assert.IsFalse(actualTestRunCriteria.TestRunSettings.Contains(nameof(Architecture.ARM))); } @@ -1571,14 +1587,14 @@ public void RunTestsWithTestCasesShouldUpdateFrameworkAndPlatformIfNotSpecifiedI List archSources = new(), fxSources = new(); _commandLineOptions.IsDesignMode = true; - _mockAssemblyMetadataProvider.Setup(a => a.GetArchitecture(It.IsAny())).Callback(source => archSources.Add(source)) + _mockAssemblyMetadataProvider.Setup(a => a.GetArchitecture(It.IsAny())).Callback(archSources.Add) .Returns(Architecture.ARM); - _mockAssemblyMetadataProvider.Setup(a => a.GetFrameWork(It.IsAny())).Callback(source => fxSources.Add(source)) + _mockAssemblyMetadataProvider.Setup(a => a.GetFrameWork(It.IsAny())).Callback(fxSources.Add) .Returns(new FrameworkName(Constants.DotNetFramework46)); TestRunCriteria? actualTestRunCriteria = null; var mockTestRunRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options) => actualTestRunCriteria = runCriteria).Returns(mockTestRunRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualTestRunCriteria = runCriteria).Returns(mockTestRunRequest.Object); _testRequestManager.RunTests(payload, new Mock().Object, new Mock().Object, _protocolConfig); @@ -1712,8 +1728,8 @@ public void RunTestsShouldAddConsoleLoggerInRunSettingsInNonDesignMode() _commandLineOptions.IsDesignMode = false; TestRunCriteria? actualTestRunCriteria = null; var mockTestRunRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options) => actualTestRunCriteria = runCriteria).Returns(mockTestRunRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualTestRunCriteria = runCriteria).Returns(mockTestRunRequest.Object); _testRequestManager.RunTests(payload, new Mock().Object, new Mock().Object, _protocolConfig); @@ -1751,8 +1767,8 @@ public void RunTestsShouldAddConsoleLoggerInRunSettingsIfDesignModeSetFalseInRun _commandLineOptions.IsDesignMode = true; TestRunCriteria? actualTestRunCriteria = null; var mockTestRunRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options) => actualTestRunCriteria = runCriteria).Returns(mockTestRunRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualTestRunCriteria = runCriteria).Returns(mockTestRunRequest.Object); _testRequestManager.RunTests(payload, new Mock().Object, new Mock().Object, _protocolConfig); var loggerSettingsList = XmlRunSettingsUtilities.GetLoggerRunSettings(actualTestRunCriteria!.TestRunSettings).LoggerSettingsList; @@ -1791,9 +1807,9 @@ public void DiscoverTestsShouldAddConsoleLoggerInRunSettingsIfDesignModeSetFalse DiscoveryCriteria? actualDiscoveryCriteria = null; var mockDiscoveryRequest = new Mock(); _mockTestPlatform - .Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())) + .Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())) .Callback( - (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options) => actualDiscoveryCriteria = discoveryCriteria).Returns(mockDiscoveryRequest.Object); + (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualDiscoveryCriteria = discoveryCriteria).Returns(mockDiscoveryRequest.Object); _testRequestManager.DiscoverTests(payload, new Mock().Object, _protocolConfig); @@ -1825,8 +1841,8 @@ public void RunTestsShouldNotAddConsoleLoggerInRunSettingsInDesignMode() _commandLineOptions.IsDesignMode = false; TestRunCriteria? actualTestRunCriteria = null; var mockTestRunRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options) => actualTestRunCriteria = runCriteria).Returns(mockTestRunRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualTestRunCriteria = runCriteria).Returns(mockTestRunRequest.Object); _testRequestManager.RunTests(payload, new Mock().Object, new Mock().Object, _protocolConfig); Assert.IsFalse(actualTestRunCriteria!.TestRunSettings.Contains("LoggerRunSettings")); @@ -1849,9 +1865,9 @@ public void DiscoverTestsShouldAddConsoleLoggerInRunSettingsInNonDesignMode() DiscoveryCriteria? actualDiscoveryCriteria = null; var mockDiscoveryRequest = new Mock(); _mockTestPlatform - .Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())) + .Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())) .Callback( - (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options) => actualDiscoveryCriteria = discoveryCriteria).Returns(mockDiscoveryRequest.Object); + (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualDiscoveryCriteria = discoveryCriteria).Returns(mockDiscoveryRequest.Object); _testRequestManager.DiscoverTests(payload, new Mock().Object, _protocolConfig); @@ -1881,9 +1897,9 @@ public void DiscoverTestsShouldNotAddConsoleLoggerInRunSettingsInDesignMode() DiscoveryCriteria? actualDiscoveryCriteria = null; var mockDiscoveryRequest = new Mock(); _mockTestPlatform - .Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())) + .Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())) .Callback( - (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options) => actualDiscoveryCriteria = discoveryCriteria).Returns(mockDiscoveryRequest.Object); + (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualDiscoveryCriteria = discoveryCriteria).Returns(mockDiscoveryRequest.Object); _testRequestManager.DiscoverTests(payload, new Mock().Object, _protocolConfig); @@ -1923,8 +1939,8 @@ public void RunTestsShouldOverrideOnlyAssemblyNameIfConsoleLoggerAlreadyPresentI _commandLineOptions.IsDesignMode = false; TestRunCriteria? actualTestRunCriteria = null; var mockTestRunRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options) => actualTestRunCriteria = runCriteria).Returns(mockTestRunRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualTestRunCriteria = runCriteria).Returns(mockTestRunRequest.Object); _testRequestManager.RunTests(payload, new Mock().Object, new Mock().Object, _protocolConfig); var loggerSettingsList = XmlRunSettingsUtilities.GetLoggerRunSettings(actualTestRunCriteria!.TestRunSettings).LoggerSettingsList; @@ -1972,9 +1988,9 @@ public void DiscoverTestsShouldOverrideOnlyAssemblyNameIfConsoleLoggerAlreadyPre DiscoveryCriteria? actualDiscoveryCriteria = null; var mockDiscoveryRequest = new Mock(); _mockTestPlatform - .Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())) + .Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())) .Callback( - (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options) => actualDiscoveryCriteria = discoveryCriteria).Returns(mockDiscoveryRequest.Object); + (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualDiscoveryCriteria = discoveryCriteria).Returns(mockDiscoveryRequest.Object); _testRequestManager.DiscoverTests(payload, new Mock().Object, _protocolConfig); @@ -2024,8 +2040,8 @@ public void RunTestsShouldOverrideOnlyAssemblyNameIfConsoleLoggerAlreadyPresentI _commandLineOptions.IsDesignMode = false; TestRunCriteria? actualTestRunCriteria = null; var mockTestRunRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options) => actualTestRunCriteria = runCriteria).Returns(mockTestRunRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualTestRunCriteria = runCriteria).Returns(mockTestRunRequest.Object); _testRequestManager.RunTests(payload, new Mock().Object, new Mock().Object, _protocolConfig); var loggerSettingsList = XmlRunSettingsUtilities.GetLoggerRunSettings(actualTestRunCriteria!.TestRunSettings).LoggerSettingsList; @@ -2073,9 +2089,9 @@ public void DiscoverTestsShouldOverrideOnlyAssemblyNameIfConsoleLoggerAlreadyPre DiscoveryCriteria? actualDiscoveryCriteria = null; var mockDiscoveryRequest = new Mock(); _mockTestPlatform - .Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())) + .Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())) .Callback( - (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options) => actualDiscoveryCriteria = discoveryCriteria).Returns(mockDiscoveryRequest.Object); + (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => actualDiscoveryCriteria = discoveryCriteria).Returns(mockDiscoveryRequest.Object); _testRequestManager.DiscoverTests(payload, new Mock().Object, _protocolConfig); @@ -2202,10 +2218,11 @@ public void StartTestSessionShouldPassCorrectTelemetryOptedInOptionToTestPlatfor tp => tp.StartTestSession( It.IsAny(), It.IsAny(), - It.IsAny())) + It.IsAny(), + It.IsAny>())) .Returns(true) .Callback( - (IRequestData rd, StartTestSessionCriteria _, ITestSessionEventsHandler _) => Assert.IsTrue(rd.IsTelemetryOptedIn)); + (IRequestData rd, StartTestSessionCriteria _, ITestSessionEventsHandler _, Dictionary _) => Assert.IsTrue(rd.IsTelemetryOptedIn)); Environment.SetEnvironmentVariable("VSTEST_TELEMETRY_OPTEDIN", "1"); @@ -2248,10 +2265,11 @@ public void StartTestSessionShouldUpdateSettings() tp => tp.StartTestSession( It.IsAny(), It.IsAny(), - It.IsAny())) + It.IsAny(), + It.IsAny>())) .Returns(true) .Callback( - (IRequestData _, StartTestSessionCriteria criteria, ITestSessionEventsHandler _) => + (IRequestData _, StartTestSessionCriteria criteria, ITestSessionEventsHandler _, Dictionary _) => { Assert.IsTrue(criteria.RunSettings.Contains(Constants.DotNetFramework46)); Assert.IsTrue(criteria.RunSettings.Contains(nameof(Architecture.ARM))); @@ -2318,7 +2336,8 @@ public void StartTestSessionShouldBeSuccessful() tp => tp.StartTestSession( It.IsAny(), It.IsAny(), - It.IsAny())) + It.IsAny(), + It.IsAny>())) .Returns(true); _testRequestManager.StartTestSession( @@ -2510,8 +2529,8 @@ private void RunTestsIfThrowsExceptionShouldThrowOut(Exception exception) var createRunRequestCalled = 0; TestRunCriteria? observedCriteria = null; var mockRunRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options) => + _mockTestPlatform.Setup(mt => mt.CreateTestRunRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, TestRunCriteria runCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => { createRunRequestCalled++; observedCriteria = runCriteria; @@ -2535,8 +2554,8 @@ private void DiscoverTestsIfThrowsExceptionShouldThrowOut(Exception exception) DiscoveryCriteria? observedCriteria = null; var mockDiscoveryRequest = new Mock(); - _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny())).Callback( - (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options) => observedCriteria = discoveryCriteria).Returns(mockDiscoveryRequest.Object); + _mockTestPlatform.Setup(mt => mt.CreateDiscoveryRequest(It.IsAny(), It.IsAny(), It.IsAny(), It.IsAny>())).Callback( + (IRequestData requestData, DiscoveryCriteria discoveryCriteria, TestPlatformOptions options, Dictionary sourceToSourceDetailMap) => observedCriteria = discoveryCriteria).Returns(mockDiscoveryRequest.Object); mockDiscoveryRequest.Setup(mr => mr.DiscoverAsync()).Throws(exception);