From f2494ba9e16f905723f40f19252d7511727be97c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Bigorajski?= <72501021+lukasz-bigorajski@users.noreply.github.com> Date: Tue, 4 Jun 2024 15:25:30 +0200 Subject: [PATCH 01/17] Fix statistics configuration (#6126) - cherry pick MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --------- Co-authored-by: Ɓukasz Bigorajski --- designer/client/src/actions/nk/assignSettings.ts | 1 - .../pl/touk/nussknacker/ui/api/SettingsResources.scala | 10 +++++----- .../ui/server/AkkaHttpBasedRouteProvider.scala | 3 ++- .../nussknacker/ui/api/SettingsResourcesSpec.scala | 8 ++++++-- docs/Changelog.md | 4 ++++ 5 files changed, 17 insertions(+), 9 deletions(-) diff --git a/designer/client/src/actions/nk/assignSettings.ts b/designer/client/src/actions/nk/assignSettings.ts index ce754179ca7..89ce9c01e33 100644 --- a/designer/client/src/actions/nk/assignSettings.ts +++ b/designer/client/src/actions/nk/assignSettings.ts @@ -10,7 +10,6 @@ export type MetricsType = { export type UsageStatisticsReports = { enabled: boolean; - url?: string; }; export type SurveySettings = { diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/SettingsResources.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/SettingsResources.scala index 5607ee55fb7..1015bbe0163 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/SettingsResources.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/SettingsResources.scala @@ -5,7 +5,7 @@ import cats.data.Validated import de.heikoseeberger.akkahttpcirce.FailFastCirceSupport import io.circe.{Decoder, Encoder} import io.circe.generic.JsonCodec -import pl.touk.nussknacker.ui.config.{AnalyticsConfig, FeatureTogglesConfig} +import pl.touk.nussknacker.ui.config.{AnalyticsConfig, FeatureTogglesConfig, UsageStatisticsReportsConfig} import pl.touk.nussknacker.engine.api.CirceUtil.codecs._ import java.net.URL @@ -14,7 +14,8 @@ import scala.concurrent.ExecutionContext class SettingsResources( config: FeatureTogglesConfig, authenticationMethod: String, - analyticsConfig: Option[AnalyticsConfig] + analyticsConfig: Option[AnalyticsConfig], + usageStatisticsReportsConfig: UsageStatisticsReportsConfig )(implicit ec: ExecutionContext) extends Directives with FailFastCirceSupport @@ -36,8 +37,7 @@ class SettingsResources( intervalTimeSettings = config.intervalTimeSettings, testDataSettings = config.testDataSettings, redirectAfterArchive = config.redirectAfterArchive, - // TODO: It's disabled temporarily until we remove it on FE. We can remove it once it has been removed on FE. - usageStatisticsReports = UsageStatisticsReportsSettings(false, None) + usageStatisticsReports = UsageStatisticsReportsSettings(usageStatisticsReportsConfig.enabled) ) val authenticationSettings = AuthenticationSettings( @@ -145,4 +145,4 @@ object TopTabType extends Enumeration { analytics: Option[AnalyticsSettings] ) -@JsonCodec final case class UsageStatisticsReportsSettings(enabled: Boolean, url: Option[String]) +@JsonCodec final case class UsageStatisticsReportsSettings(enabled: Boolean) diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/server/AkkaHttpBasedRouteProvider.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/server/AkkaHttpBasedRouteProvider.scala index 11e77be15c8..c38680bd906 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/server/AkkaHttpBasedRouteProvider.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/server/AkkaHttpBasedRouteProvider.scala @@ -442,7 +442,8 @@ class AkkaHttpBasedRouteProvider( val settingsResources = new SettingsResources( featureTogglesConfig, authenticationResources.name, - analyticsConfig + analyticsConfig, + usageStatisticsReportsConfig ) val apiResourcesWithoutAuthentication: List[Route] = List( settingsResources.publicRoute(), diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/SettingsResourcesSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/SettingsResourcesSpec.scala index 512673c94dc..a3123291b84 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/SettingsResourcesSpec.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/SettingsResourcesSpec.scala @@ -9,7 +9,7 @@ import org.scalatest.{BeforeAndAfterAll, BeforeAndAfterEach} import pl.touk.nussknacker.test.PatientScalaFutures import pl.touk.nussknacker.test.base.it.NuResourcesTest import pl.touk.nussknacker.test.utils.domain.TestFactory.withoutPermissions -import pl.touk.nussknacker.ui.config.AnalyticsConfig +import pl.touk.nussknacker.ui.config.{AnalyticsConfig, UsageStatisticsReportsConfig} import pl.touk.nussknacker.ui.security.basicauth.BasicAuthenticationConfiguration class SettingsResourcesSpec @@ -25,11 +25,14 @@ class SettingsResourcesSpec private val authenticationConfig: BasicAuthenticationConfiguration = BasicAuthenticationConfiguration.create(testConfig) private val analyticsConfig: Option[AnalyticsConfig] = AnalyticsConfig(testConfig) + private val usageStatisticsReportsConfig: UsageStatisticsReportsConfig = + UsageStatisticsReportsConfig(true, None, None) private val settingsRoute = new SettingsResources( featureTogglesConfig, authenticationConfig.name, - analyticsConfig + analyticsConfig, + usageStatisticsReportsConfig ) // Values are exists at test/resources/application.conf @@ -44,6 +47,7 @@ class SettingsResourcesSpec data.intervalTimeSettings.processes shouldBe intervalTimeProcesses data.intervalTimeSettings.healthCheck shouldBe intervalTimeHealthCheck + data.usageStatisticsReports.enabled shouldBe true } } diff --git a/docs/Changelog.md b/docs/Changelog.md index aa864bca5db..befe54f56e3 100644 --- a/docs/Changelog.md +++ b/docs/Changelog.md @@ -1,5 +1,9 @@ # Changelog +1.15.1 (4 June 2024) +------------------------- +* [#6126](https://github.com/TouK/nussknacker/pull/6126) Fix statistics configuration. + 1.15.0 (17 May 2024) ------------------------- * [#5620](https://github.com/TouK/nussknacker/pull/5620) Nodes Api OpenApi-based documentation (e.g. `https://demo.nussknacker.io/api/docs`) From 92779e75279a7b410a6281abb5737c9e18b3ee51 Mon Sep 17 00:00:00 2001 From: ForrestFairy Date: Wed, 5 Jun 2024 08:53:45 +0200 Subject: [PATCH 02/17] [NU-1296] Ad-hoc tests with ververica in staging (#6127) - cherry pick * [NU-1296] Ad-hoc tests with ververica (#5611) * With a new default parameter "isTest" in a couple of places ad-hoc test works without adding "flink-dropwizard-metrics-deps" to classpath * Removed 'isTest' param as we can use already present ComponentUseCase * One place was hardcoded to give dummy TestMetrics and this caused the test to fail, now it also matches on the ComponentUseCase Also some cleaning * Ad-hoc test goes through as everything gets dummy metrics in FlinkEngineRuntimeContextImpl Need to find a way to parse ComponentUseCase, so it still will be serializable on Flink * MetricsSpec passes with this change * Work in progress, works everywhere except for FlinkProcessRegistrar * FlinkProcessRegistrar now also works - there is no serialization error, parts of compilerData have to be initialized before using it in function * Getting MetricsProviderForScenario for FlinkEngineRuntimeContextImpl and SourceMetricsFunction is moved to MetricsProviderForFlink Also renamed FlinkEngineRuntimeContextImpl method to 'apply' as it is used to create new instance of it * Name changed as we create MetricProvider, not just get it from somewhere --------- Co-authored-by: Szymon Bogusz --- .../ProcessingTypeDataReader.scala | 9 ++++++- docs/Changelog.md | 3 ++- .../FlinkEngineRuntimeContextImpl.scala | 24 ++++++++++++++++--- .../compiler/FlinkProcessCompilerData.scala | 6 +++-- .../compiler/MetricsProviderForFlink.scala | 19 +++++++++++++++ .../registrar/FlinkProcessRegistrar.scala | 15 ++++++++---- .../registrar/SourceMetricsFunction.scala | 8 ++++--- 7 files changed, 70 insertions(+), 14 deletions(-) create mode 100644 engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/compiler/MetricsProviderForFlink.scala diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/ProcessingTypeDataReader.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/ProcessingTypeDataReader.scala index 53839aa59e2..c6ab9a5752c 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/ProcessingTypeDataReader.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/ProcessingTypeDataReader.scala @@ -1,7 +1,14 @@ package pl.touk.nussknacker.ui.process.processingtype import com.typesafe.scalalogging.LazyLogging -import pl.touk.nussknacker.engine._ +import pl.touk.nussknacker.engine.{ + ConfigWithUnresolvedVersion, + DeploymentManagerDependencies, + DeploymentManagerProvider, + ModelData, + ModelDependencies, + ProcessingTypeConfig +} import pl.touk.nussknacker.engine.api.process.ProcessingType import pl.touk.nussknacker.engine.deployment.EngineSetupName import pl.touk.nussknacker.engine.util.Implicits.RichScalaMap diff --git a/docs/Changelog.md b/docs/Changelog.md index befe54f56e3..f0516761daf 100644 --- a/docs/Changelog.md +++ b/docs/Changelog.md @@ -1,8 +1,9 @@ # Changelog -1.15.1 (4 June 2024) +1.15.1 (5 June 2024) ------------------------- * [#6126](https://github.com/TouK/nussknacker/pull/6126) Fix statistics configuration. +* [#6127](https://github.com/TouK/nussknacker/pull/6127) Ad-hoc tests available in scenarios without `flink-dropwizard-metrics-deps` in classPath 1.15.0 (17 May 2024) ------------------------- diff --git a/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/compiler/FlinkEngineRuntimeContextImpl.scala b/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/compiler/FlinkEngineRuntimeContextImpl.scala index d4ebaa1fb0f..53fc8dc2ea8 100644 --- a/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/compiler/FlinkEngineRuntimeContextImpl.scala +++ b/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/compiler/FlinkEngineRuntimeContextImpl.scala @@ -2,15 +2,33 @@ package pl.touk.nussknacker.engine.process.compiler import org.apache.flink.api.common.functions.RuntimeContext import pl.touk.nussknacker.engine.api.JobData +import pl.touk.nussknacker.engine.api.process.ComponentUseCase import pl.touk.nussknacker.engine.api.runtimecontext.{ContextIdGenerator, IncContextIdGenerator} import pl.touk.nussknacker.engine.flink.api.FlinkEngineRuntimeContext +import pl.touk.nussknacker.engine.process.compiler.MetricsProviderForFlink.createMetricsProvider import pl.touk.nussknacker.engine.util.metrics.MetricsProviderForScenario -case class FlinkEngineRuntimeContextImpl(jobData: JobData, runtimeContext: RuntimeContext) - extends FlinkEngineRuntimeContext { - override val metricsProvider: MetricsProviderForScenario = new FlinkMetricsProviderForScenario(runtimeContext) +case class FlinkEngineRuntimeContextImpl( + jobData: JobData, + runtimeContext: RuntimeContext, + metricsProvider: MetricsProviderForScenario +) extends FlinkEngineRuntimeContext { override def contextIdGenerator(nodeId: String): ContextIdGenerator = new IncContextIdGenerator(jobData.metaData.name.value + "-" + nodeId + "-" + runtimeContext.getIndexOfThisSubtask) } + +object FlinkEngineRuntimeContextImpl { + +// This creates FlinkEngineRuntimeContextImpl with correct metricsProviderForScenario based on ComponentUseCase + def apply( + jobData: JobData, + runtimeContext: RuntimeContext, + componentUseCase: ComponentUseCase + ): FlinkEngineRuntimeContextImpl = { + val properMetricsProvider = createMetricsProvider(componentUseCase, runtimeContext) + new FlinkEngineRuntimeContextImpl(jobData, runtimeContext, properMetricsProvider) + } + +} diff --git a/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/compiler/FlinkProcessCompilerData.scala b/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/compiler/FlinkProcessCompilerData.scala index d9bf19554ce..5a9ab89750d 100644 --- a/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/compiler/FlinkProcessCompilerData.scala +++ b/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/compiler/FlinkProcessCompilerData.scala @@ -39,7 +39,7 @@ class FlinkProcessCompilerData( def open(runtimeContext: RuntimeContext, nodesToUse: List[_ <: NodeData]): Unit = { val lifecycle = compilerData.lifecycle(nodesToUse) lifecycle.foreach { - _.open(FlinkEngineRuntimeContextImpl(jobData, runtimeContext)) + _.open(FlinkEngineRuntimeContextImpl(jobData, runtimeContext, componentUseCase)) } } @@ -74,7 +74,9 @@ class FlinkProcessCompilerData( def restartStrategy: RestartStrategies.RestartStrategyConfiguration = exceptionHandler.restartStrategy def prepareExceptionHandler(runtimeContext: RuntimeContext): FlinkExceptionHandler = { - exceptionHandler.open(FlinkEngineRuntimeContextImpl(jobData, runtimeContext)) + exceptionHandler.open( + FlinkEngineRuntimeContextImpl(jobData, runtimeContext, componentUseCase) + ) exceptionHandler } diff --git a/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/compiler/MetricsProviderForFlink.scala b/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/compiler/MetricsProviderForFlink.scala new file mode 100644 index 00000000000..60457f82db0 --- /dev/null +++ b/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/compiler/MetricsProviderForFlink.scala @@ -0,0 +1,19 @@ +package pl.touk.nussknacker.engine.process.compiler + +import org.apache.flink.api.common.functions.RuntimeContext +import pl.touk.nussknacker.engine.api.process.ComponentUseCase +import pl.touk.nussknacker.engine.util.metrics.{MetricsProviderForScenario, NoOpMetricsProviderForScenario} + +object MetricsProviderForFlink { + + def createMetricsProvider( + componentUseCase: ComponentUseCase, + runtimeContext: RuntimeContext + ): MetricsProviderForScenario = { + componentUseCase match { + case ComponentUseCase.TestRuntime => NoOpMetricsProviderForScenario + case _ => new FlinkMetricsProviderForScenario(runtimeContext) + } + } + +} diff --git a/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/registrar/FlinkProcessRegistrar.scala b/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/registrar/FlinkProcessRegistrar.scala index 62b9f50f8d7..0a907feeaa8 100644 --- a/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/registrar/FlinkProcessRegistrar.scala +++ b/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/registrar/FlinkProcessRegistrar.scala @@ -140,12 +140,14 @@ class FlinkProcessRegistrar( ): FlinkCustomNodeContext = { val exceptionHandlerPreparer = (runtimeContext: RuntimeContext) => compilerDataForProcessPart(None)(runtimeContext.getUserCodeClassLoader).prepareExceptionHandler(runtimeContext) - val jobData = compilerData.jobData + val jobData = compilerData.jobData + val componentUseCase = compilerData.componentUseCase + FlinkCustomNodeContext( jobData, nodeComponentId.nodeId, compilerData.processTimeout, - convertToEngineRuntimeContext = FlinkEngineRuntimeContextImpl(jobData, _), + convertToEngineRuntimeContext = FlinkEngineRuntimeContextImpl(jobData, _, componentUseCase), lazyParameterHelper = new FlinkLazyParameterFunctionHelper( nodeComponentId, exceptionHandlerPreparer, @@ -182,7 +184,7 @@ class FlinkProcessRegistrar( val start = source .contextStream(env, nodeContext(nodeComponentInfoFrom(part), Left(ValidationContext.empty))) - .process(new SourceMetricsFunction(part.id), contextTypeInformation) + .process(new SourceMetricsFunction(part.id, compilerData.componentUseCase), contextTypeInformation) val asyncAssigned = registerInterpretationPart(start, part, InterpretationName) @@ -377,7 +379,12 @@ class FlinkProcessRegistrar( } else { val ti = InterpretationResultTypeInformation.create(typeInformationDetection, outputContexts) stream.flatMap( - new SyncInterpretationFunction(compilerDataForProcessPart(Some(part)), node, validationContext, useIOMonad), + new SyncInterpretationFunction( + compilerDataForProcessPart(Some(part)), + node, + validationContext, + useIOMonad + ), ti ) } diff --git a/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/registrar/SourceMetricsFunction.scala b/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/registrar/SourceMetricsFunction.scala index 386c400bda2..5174ba94be6 100644 --- a/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/registrar/SourceMetricsFunction.scala +++ b/engine/flink/executor/src/main/scala/pl/touk/nussknacker/engine/process/registrar/SourceMetricsFunction.scala @@ -3,16 +3,18 @@ package pl.touk.nussknacker.engine.process.registrar import org.apache.flink.configuration.Configuration import org.apache.flink.streaming.api.functions.ProcessFunction import org.apache.flink.util.Collector -import pl.touk.nussknacker.engine.process.compiler.FlinkMetricsProviderForScenario +import pl.touk.nussknacker.engine.api.process.ComponentUseCase +import pl.touk.nussknacker.engine.process.compiler.MetricsProviderForFlink.createMetricsProvider import pl.touk.nussknacker.engine.util.metrics.common.OneSourceMetrics -private[registrar] class SourceMetricsFunction[T](sourceId: String) extends ProcessFunction[T, T] { +private[registrar] class SourceMetricsFunction[T](sourceId: String, componentUseCase: ComponentUseCase) + extends ProcessFunction[T, T] { @transient private var metrics: OneSourceMetrics = _ override def open(parameters: Configuration): Unit = { metrics = new OneSourceMetrics(sourceId) - val metricsProvider = new FlinkMetricsProviderForScenario(getRuntimeContext) + val metricsProvider = createMetricsProvider(componentUseCase, getRuntimeContext) metrics.registerOwnMetrics(metricsProvider) } From 1e51470f08329cbf22a4a27f8e11f66b304eede4 Mon Sep 17 00:00:00 2001 From: GitHub Action Release bot Date: Wed, 5 Jun 2024 10:32:15 +0000 Subject: [PATCH 03/17] Setting version to 1.15.1 --- version.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.sbt b/version.sbt index d132801becc..b8308b60108 100644 --- a/version.sbt +++ b/version.sbt @@ -1 +1 @@ -version in ThisBuild := "1.15.1-SNAPSHOT" +version in ThisBuild := "1.15.1" From 13a235d35eaf7888700d64c892f7c961990daf9e Mon Sep 17 00:00:00 2001 From: GitHub Action Release bot Date: Wed, 5 Jun 2024 11:16:56 +0000 Subject: [PATCH 04/17] Setting version to 1.15.2-SNAPSHOT --- version.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.sbt b/version.sbt index b8308b60108..8d30035f422 100644 --- a/version.sbt +++ b/version.sbt @@ -1 +1 @@ -version in ThisBuild := "1.15.1" +version in ThisBuild := "1.15.2-SNAPSHOT" From 41325b59ebdd1dca50b047631f7131f181c2e4ac Mon Sep 17 00:00:00 2001 From: Adrian Matwiejuk <73438286+wrzontek@users.noreply.github.com> Date: Fri, 7 Jun 2024 10:54:22 +0200 Subject: [PATCH 05/17] Fix last action data determining (#6134) --- .../DBFetchingProcessRepository.scala | 25 +++++++--- .../repository/ProcessActionRepository.scala | 12 ++--- .../repository/ProcessDBQueryRepository.scala | 4 +- ...sControlCheckingConfigScenarioHelper.scala | 4 ++ .../test/utils/domain/ScenarioHelper.scala | 20 ++++++++ .../ui/api/ProcessesResourcesSpec.scala | 46 +++++++++++++++++++ docs/Changelog.md | 6 +++ 7 files changed, 102 insertions(+), 15 deletions(-) diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DBFetchingProcessRepository.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DBFetchingProcessRepository.scala index 569a5854785..9a4f22a4995 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DBFetchingProcessRepository.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DBFetchingProcessRepository.scala @@ -76,10 +76,18 @@ abstract class DBFetchingProcessRepository[F[_]: Monad]( actionRepository .getLastActionPerProcess(ProcessActionState.FinishedStates, Some(ScenarioActionName.StateActions)) ) - // for last deploy action we are not interested in ExecutionFinished deploys - we don't want to show them in the history + // For last deploy action we are interested in Deploys that are Finished (not ExecutionFinished) and that are not Cancelled + // so that the presence of such an action means that the process is currently deployed lastDeployedActionPerProcess <- fetchActionsOrEmpty( - actionRepository.getLastActionPerProcess(Set(ProcessActionState.Finished), Some(Set(ScenarioActionName.Deploy))) - ) + actionRepository + .getLastActionPerProcess( + ProcessActionState.FinishedStates, + Some(Set(ScenarioActionName.Deploy, ScenarioActionName.Cancel)) + ) + ).map(_.filter { case (_, action) => + action.actionName == ScenarioActionName.Deploy && action.state == ProcessActionState.Finished + }) + latestProcesses <- fetchLatestProcessesQuery(query, lastDeployedActionPerProcess.keySet, isDeployed).result } yield latestProcesses .map { case ((_, processVersion), process) => @@ -180,10 +188,13 @@ abstract class DBFetchingProcessRepository[F[_]: Monad]( processVersion = processVersion, lastActionData = actions.headOption, lastStateActionData = actions.find(a => ScenarioActionName.StateActions.contains(a.actionName)), - // for last deploy action we are not interested in ExecutionFinished deploys - we don't want to show them in the history - lastDeployedActionData = actions.headOption.filter(a => - a.actionName == ScenarioActionName.Deploy && a.state == ProcessActionState.Finished - ), + // For last deploy action we are interested in Deploys that are Finished (not ExecutionFinished) and that are not Cancelled + // so that the presence of such an action means that the process is currently deployed + lastDeployedActionData = actions + .find(action => Set(ScenarioActionName.Deploy, ScenarioActionName.Cancel).contains(action.actionName)) + .filter(action => + action.actionName == ScenarioActionName.Deploy && action.state == ProcessActionState.Finished + ), isLatestVersion = isLatestVersion, tags = Some(tags), history = Some( diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ProcessActionRepository.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ProcessActionRepository.scala index 6566f986709..618ff0aaca0 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ProcessActionRepository.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ProcessActionRepository.scala @@ -304,7 +304,11 @@ class DbProcessActionRepository( actionState: Set[ProcessActionState], actionNamesOpt: Option[Set[ScenarioActionName]] ): DB[Map[ProcessId, ProcessAction]] = { - val query = processActionsTable + val queryWithActionNamesFilter = actionNamesOpt + .map(actionNames => processActionsTable.filter { action => action.actionName.inSet(actionNames) }) + .getOrElse(processActionsTable) + + val finalQuery = queryWithActionNamesFilter .filter(_.state.inSet(actionState)) .groupBy(_.processId) .map { case (processId, group) => (processId, group.map(_.performedAt).max) } @@ -318,11 +322,7 @@ class DbProcessActionRepository( .map { case ((processId, action), comment) => processId -> (action, comment) } run( - actionNamesOpt - .map(actionNames => query.filter { case (_, (entity, _)) => entity.actionName.inSet(actionNames) }) - .getOrElse(query) - .result - .map(_.toMap.mapValuesNow(toFinishedProcessAction)) + finalQuery.result.map(_.toMap.mapValuesNow(toFinishedProcessAction)) ) } diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ProcessDBQueryRepository.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ProcessDBQueryRepository.scala index 876f2d47873..3047d409004 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ProcessDBQueryRepository.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ProcessDBQueryRepository.scala @@ -33,7 +33,7 @@ trait ProcessDBQueryRepository[F[_]] extends Repository[F] with NuTables { protected def fetchLatestProcessesQuery( query: ProcessEntityFactory#ProcessEntity => Rep[Boolean], - lastDeployedActionPerProcess: Set[ProcessId], + deployedProcesses: Set[ProcessId], isDeployed: Option[Boolean] )(implicit fetchShape: ScenarioShapeFetchStrategy[_], loggedUser: LoggedUser): Query[ ( @@ -55,7 +55,7 @@ trait ProcessDBQueryRepository[F[_]] extends Repository[F] with NuTables { .filter { case ((_, _), process) => isDeployed match { case None => true: Rep[Boolean] - case Some(dep) => process.id.inSet(lastDeployedActionPerProcess) === dep + case Some(dep) => process.id.inSet(deployedProcesses) === dep } } diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/WithAccessControlCheckingConfigScenarioHelper.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/WithAccessControlCheckingConfigScenarioHelper.scala index 0788b292c7e..9f99290faea 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/WithAccessControlCheckingConfigScenarioHelper.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/WithAccessControlCheckingConfigScenarioHelper.scala @@ -50,4 +50,8 @@ trait WithAccessControlCheckingConfigScenarioHelper { rawScenarioHelper.createDeployedCanceledExampleScenario(scenarioName, category.stringify, isFragment = false) } + def createDeployedWithCustomActionScenario(scenarioName: ProcessName, category: TestCategory): ProcessId = { + rawScenarioHelper.createDeployedWithCustomActionScenario(scenarioName, category.stringify, isFragment = false) + } + } diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ScenarioHelper.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ScenarioHelper.scala index 365d664ec50..9c8f2b4c3d8 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ScenarioHelper.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ScenarioHelper.scala @@ -96,6 +96,18 @@ private[test] class ScenarioHelper(dbRef: DbRef, designerConfig: Config)(implici } yield id).futureValue } + def createDeployedWithCustomActionScenario( + scenarioName: ProcessName, + category: String, + isFragment: Boolean + ): ProcessId = { + (for { + id <- prepareValidScenario(scenarioName, category, isFragment) + _ <- prepareDeploy(id, processingTypeBy(category)) + _ <- prepareCustomAction(id) + } yield id).futureValue + } + def createArchivedExampleScenario(scenarioName: ProcessName, category: String, isFragment: Boolean): ProcessId = { (for { id <- prepareValidScenario(scenarioName, category, isFragment) @@ -130,6 +142,14 @@ private[test] class ScenarioHelper(dbRef: DbRef, designerConfig: Config)(implici ) } + private def prepareCustomAction(scenarioId: ProcessId): Future[_] = { + val actionName = ScenarioActionName("Custom") + val comment = DeploymentComment.unsafe(UserComment("Execute custom action")).toComment(actionName) + dbioRunner.run( + actionRepository.addInstantAction(scenarioId, VersionId.initialVersionId, actionName, Some(comment), None) + ) + } + private def prepareValidScenario( scenarioName: ProcessName, category: String, diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ProcessesResourcesSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ProcessesResourcesSpec.scala index ff7ebdc501c..9ab7c543ec8 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ProcessesResourcesSpec.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ProcessesResourcesSpec.scala @@ -828,6 +828,52 @@ class ProcessesResourcesSpec } } + test("should provide the same proper scenario state when fetching all scenarios and one scenario") { + createDeployedWithCustomActionScenario(processName, category = Category1) + + Get(s"/api/processes") ~> withReaderUser() ~> applicationRoute ~> check { + status shouldEqual StatusCodes.OK + val loadedProcess = responseAs[List[ScenarioWithDetails]] + + loadedProcess.head.lastAction should matchPattern { + case Some( + ProcessAction(_, _, _, _, _, _, ScenarioActionName("Custom"), ProcessActionState.Finished, _, _, _, _) + ) => + } + loadedProcess.head.lastStateAction should matchPattern { + case Some( + ProcessAction(_, _, _, _, _, _, ScenarioActionName("DEPLOY"), ProcessActionState.Finished, _, _, _, _) + ) => + } + loadedProcess.head.lastDeployedAction should matchPattern { + case Some( + ProcessAction(_, _, _, _, _, _, ScenarioActionName("DEPLOY"), ProcessActionState.Finished, _, _, _, _) + ) => + } + } + + Get(s"/api/processes/$processName") ~> withReaderUser() ~> applicationRoute ~> check { + status shouldEqual StatusCodes.OK + val loadedProcess = responseAs[ScenarioWithDetails] + + loadedProcess.lastAction should matchPattern { + case Some( + ProcessAction(_, _, _, _, _, _, ScenarioActionName("Custom"), ProcessActionState.Finished, _, _, _, _) + ) => + } + loadedProcess.lastStateAction should matchPattern { + case Some( + ProcessAction(_, _, _, _, _, _, ScenarioActionName("DEPLOY"), ProcessActionState.Finished, _, _, _, _) + ) => + } + loadedProcess.lastDeployedAction should matchPattern { + case Some( + ProcessAction(_, _, _, _, _, _, ScenarioActionName("DEPLOY"), ProcessActionState.Finished, _, _, _, _) + ) => + } + } + } + test("not allow to save process if already exists") { val processName = ProcessName("p1") saveProcess(processName, ProcessTestData.sampleScenarioGraph, category = Category1) { diff --git a/docs/Changelog.md b/docs/Changelog.md index f0516761daf..5c5e5eba584 100644 --- a/docs/Changelog.md +++ b/docs/Changelog.md @@ -1,5 +1,11 @@ # Changelog +1.15.2 (7 June 2024) +------------------------- +* [#6134](https://github.com/TouK/nussknacker/pull/6134) Fixes in determining `lastStateActionData` and `lastDeployedActionData` for Scenario. + * Deployed version of scenario is now shown properly even if other actions followed deploy. + * Scenario state is now not impacted by actions that don't actually change it. + 1.15.1 (5 June 2024) ------------------------- * [#6126](https://github.com/TouK/nussknacker/pull/6126) Fix statistics configuration. From 1c191e659d0ed512c71b8b86166503cf60273025 Mon Sep 17 00:00:00 2001 From: GitHub Action Release bot Date: Fri, 7 Jun 2024 11:09:43 +0000 Subject: [PATCH 06/17] Setting version to 1.15.2 --- version.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.sbt b/version.sbt index 8d30035f422..767ebb6aafb 100644 --- a/version.sbt +++ b/version.sbt @@ -1 +1 @@ -version in ThisBuild := "1.15.2-SNAPSHOT" +version in ThisBuild := "1.15.2" From edf0dbbbd2365c1647ea40235be7343d96c4e375 Mon Sep 17 00:00:00 2001 From: GitHub Action Release bot Date: Fri, 7 Jun 2024 11:55:42 +0000 Subject: [PATCH 07/17] Setting version to 1.15.3-SNAPSHOT --- version.sbt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.sbt b/version.sbt index 767ebb6aafb..a6159a41369 100644 --- a/version.sbt +++ b/version.sbt @@ -1 +1 @@ -version in ThisBuild := "1.15.2" +version in ThisBuild := "1.15.3-SNAPSHOT" From 14b18f26831ea669e5a7ee90240b2f6a8c02ddbf Mon Sep 17 00:00:00 2001 From: Adrian Matwiejuk <73438286+wrzontek@users.noreply.github.com> Date: Fri, 7 Jun 2024 14:59:16 +0200 Subject: [PATCH 08/17] Fix last action data determining (#6134) (#6156) --- .../DBFetchingProcessRepository.scala | 25 +++++++--- .../repository/ProcessActionRepository.scala | 12 ++--- .../repository/ProcessDBQueryRepository.scala | 4 +- ...sControlCheckingConfigScenarioHelper.scala | 4 ++ .../test/utils/domain/ScenarioHelper.scala | 20 ++++++++ .../ui/api/ProcessesResourcesSpec.scala | 46 +++++++++++++++++++ docs/Changelog.md | 6 +++ 7 files changed, 102 insertions(+), 15 deletions(-) diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DBFetchingProcessRepository.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DBFetchingProcessRepository.scala index 4e30e2b671b..4b6dc3ec814 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DBFetchingProcessRepository.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DBFetchingProcessRepository.scala @@ -76,10 +76,18 @@ abstract class DBFetchingProcessRepository[F[_]: Monad]( actionRepository .getLastActionPerProcess(ProcessActionState.FinishedStates, Some(ScenarioActionName.StateActions)) ) - // for last deploy action we are not interested in ExecutionFinished deploys - we don't want to show them in the history + // For last deploy action we are interested in Deploys that are Finished (not ExecutionFinished) and that are not Cancelled + // so that the presence of such an action means that the process is currently deployed lastDeployedActionPerProcess <- fetchActionsOrEmpty( - actionRepository.getLastActionPerProcess(Set(ProcessActionState.Finished), Some(Set(ScenarioActionName.Deploy))) - ) + actionRepository + .getLastActionPerProcess( + ProcessActionState.FinishedStates, + Some(Set(ScenarioActionName.Deploy, ScenarioActionName.Cancel)) + ) + ).map(_.filter { case (_, action) => + action.actionName == ScenarioActionName.Deploy && action.state == ProcessActionState.Finished + }) + latestProcesses <- fetchLatestProcessesQuery(query, lastDeployedActionPerProcess.keySet, isDeployed).result } yield latestProcesses .map { case ((_, processVersion), process) => @@ -180,10 +188,13 @@ abstract class DBFetchingProcessRepository[F[_]: Monad]( processVersion = processVersion, lastActionData = actions.headOption, lastStateActionData = actions.find(a => ScenarioActionName.StateActions.contains(a.actionName)), - // for last deploy action we are not interested in ExecutionFinished deploys - we don't want to show them in the history - lastDeployedActionData = actions.headOption.filter(a => - a.actionName == ScenarioActionName.Deploy && a.state == ProcessActionState.Finished - ), + // For last deploy action we are interested in Deploys that are Finished (not ExecutionFinished) and that are not Cancelled + // so that the presence of such an action means that the process is currently deployed + lastDeployedActionData = actions + .find(action => Set(ScenarioActionName.Deploy, ScenarioActionName.Cancel).contains(action.actionName)) + .filter(action => + action.actionName == ScenarioActionName.Deploy && action.state == ProcessActionState.Finished + ), isLatestVersion = isLatestVersion, tags = Some(tags), history = Some( diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ProcessActionRepository.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ProcessActionRepository.scala index 0fd0b991783..bc649f42bbc 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ProcessActionRepository.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ProcessActionRepository.scala @@ -307,7 +307,11 @@ class DbProcessActionRepository( actionState: Set[ProcessActionState], actionNamesOpt: Option[Set[ScenarioActionName]] ): DB[Map[ProcessId, ProcessAction]] = { - val query = processActionsTable + val queryWithActionNamesFilter = actionNamesOpt + .map(actionNames => processActionsTable.filter { action => action.actionName.inSet(actionNames) }) + .getOrElse(processActionsTable) + + val finalQuery = queryWithActionNamesFilter .filter(_.state.inSet(actionState)) .groupBy(_.processId) .map { case (processId, group) => (processId, group.map(_.performedAt).max) } @@ -321,11 +325,7 @@ class DbProcessActionRepository( .map { case ((processId, action), comment) => processId -> (action, comment) } run( - actionNamesOpt - .map(actionNames => query.filter { case (_, (entity, _)) => entity.actionName.inSet(actionNames) }) - .getOrElse(query) - .result - .map(_.toMap.mapValuesNow(toFinishedProcessAction)) + finalQuery.result.map(_.toMap.mapValuesNow(toFinishedProcessAction)) ) } diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ProcessDBQueryRepository.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ProcessDBQueryRepository.scala index 20e7e4efc4d..434c120eeb2 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ProcessDBQueryRepository.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/ProcessDBQueryRepository.scala @@ -39,7 +39,7 @@ trait ProcessDBQueryRepository[F[_]] extends Repository[F] with NuTables { protected def fetchLatestProcessesQuery( query: ProcessEntityFactory#ProcessEntity => Rep[Boolean], - lastDeployedActionPerProcess: Set[ProcessId], + deployedProcesses: Set[ProcessId], isDeployed: Option[Boolean] )(implicit fetchShape: ScenarioShapeFetchStrategy[_], loggedUser: LoggedUser): Query[ ( @@ -61,7 +61,7 @@ trait ProcessDBQueryRepository[F[_]] extends Repository[F] with NuTables { .filter { case ((_, _), process) => isDeployed match { case None => true: Rep[Boolean] - case Some(dep) => process.id.inSet(lastDeployedActionPerProcess) === dep + case Some(dep) => process.id.inSet(deployedProcesses) === dep } } diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/WithAccessControlCheckingConfigScenarioHelper.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/WithAccessControlCheckingConfigScenarioHelper.scala index 0788b292c7e..9f99290faea 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/WithAccessControlCheckingConfigScenarioHelper.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/base/it/WithAccessControlCheckingConfigScenarioHelper.scala @@ -50,4 +50,8 @@ trait WithAccessControlCheckingConfigScenarioHelper { rawScenarioHelper.createDeployedCanceledExampleScenario(scenarioName, category.stringify, isFragment = false) } + def createDeployedWithCustomActionScenario(scenarioName: ProcessName, category: TestCategory): ProcessId = { + rawScenarioHelper.createDeployedWithCustomActionScenario(scenarioName, category.stringify, isFragment = false) + } + } diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ScenarioHelper.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ScenarioHelper.scala index f8bd0f13d38..6888461cbe5 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ScenarioHelper.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ScenarioHelper.scala @@ -98,6 +98,18 @@ private[test] class ScenarioHelper(dbRef: DbRef, designerConfig: Config)(implici } yield id).futureValue } + def createDeployedWithCustomActionScenario( + scenarioName: ProcessName, + category: String, + isFragment: Boolean + ): ProcessId = { + (for { + id <- prepareValidScenario(scenarioName, category, isFragment) + _ <- prepareDeploy(id, processingTypeBy(category)) + _ <- prepareCustomAction(id) + } yield id).futureValue + } + def createArchivedExampleScenario(scenarioName: ProcessName, category: String, isFragment: Boolean): ProcessId = { (for { id <- prepareValidScenario(scenarioName, category, isFragment) @@ -132,6 +144,14 @@ private[test] class ScenarioHelper(dbRef: DbRef, designerConfig: Config)(implici ) } + private def prepareCustomAction(scenarioId: ProcessId): Future[_] = { + val actionName = ScenarioActionName("Custom") + val comment = DeploymentComment.unsafe(UserComment("Execute custom action")).toComment(actionName) + dbioRunner.run( + actionRepository.addInstantAction(scenarioId, VersionId.initialVersionId, actionName, Some(comment), None) + ) + } + private def prepareValidScenario( scenarioName: ProcessName, category: String, diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ProcessesResourcesSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ProcessesResourcesSpec.scala index 369e6800789..ee2da78b5fb 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ProcessesResourcesSpec.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ProcessesResourcesSpec.scala @@ -885,6 +885,52 @@ class ProcessesResourcesSpec } } + test("should provide the same proper scenario state when fetching all scenarios and one scenario") { + createDeployedWithCustomActionScenario(processName, category = Category1) + + Get(s"/api/processes") ~> withReaderUser() ~> applicationRoute ~> check { + status shouldEqual StatusCodes.OK + val loadedProcess = responseAs[List[ScenarioWithDetails]] + + loadedProcess.head.lastAction should matchPattern { + case Some( + ProcessAction(_, _, _, _, _, _, ScenarioActionName("Custom"), ProcessActionState.Finished, _, _, _, _) + ) => + } + loadedProcess.head.lastStateAction should matchPattern { + case Some( + ProcessAction(_, _, _, _, _, _, ScenarioActionName("DEPLOY"), ProcessActionState.Finished, _, _, _, _) + ) => + } + loadedProcess.head.lastDeployedAction should matchPattern { + case Some( + ProcessAction(_, _, _, _, _, _, ScenarioActionName("DEPLOY"), ProcessActionState.Finished, _, _, _, _) + ) => + } + } + + Get(s"/api/processes/$processName") ~> withReaderUser() ~> applicationRoute ~> check { + status shouldEqual StatusCodes.OK + val loadedProcess = responseAs[ScenarioWithDetails] + + loadedProcess.lastAction should matchPattern { + case Some( + ProcessAction(_, _, _, _, _, _, ScenarioActionName("Custom"), ProcessActionState.Finished, _, _, _, _) + ) => + } + loadedProcess.lastStateAction should matchPattern { + case Some( + ProcessAction(_, _, _, _, _, _, ScenarioActionName("DEPLOY"), ProcessActionState.Finished, _, _, _, _) + ) => + } + loadedProcess.lastDeployedAction should matchPattern { + case Some( + ProcessAction(_, _, _, _, _, _, ScenarioActionName("DEPLOY"), ProcessActionState.Finished, _, _, _, _) + ) => + } + } + } + test("not allow to save process if already exists") { val processName = ProcessName("p1") saveProcess(processName, ProcessTestData.sampleScenarioGraph, category = Category1) { diff --git a/docs/Changelog.md b/docs/Changelog.md index 99711e5addf..553175a5e7b 100644 --- a/docs/Changelog.md +++ b/docs/Changelog.md @@ -9,6 +9,12 @@ * Deployments API returns correct status of deployment instead of returning always the last deployment's status * [#6121](https://github.com/TouK/nussknacker/pull/6121) Add functionality to reorder columns within the table editor. +1.15.2 (7 June 2024) +------------------------- +* [#6134](https://github.com/TouK/nussknacker/pull/6134) Fixes in determining `lastStateActionData` and `lastDeployedActionData` for Scenario. + * Deployed version of scenario is now shown properly even if other actions followed deploy. + * Scenario state is now not impacted by actions that don't actually change it. + 1.15.1 (5 June 2024) ------------------------- * [#6126](https://github.com/TouK/nussknacker/pull/6126) Fix statistics configuration. From b3b1165f9dd6cc77d57966654f63cb902ba91c8f Mon Sep 17 00:00:00 2001 From: Maciej Cichanowicz <30436981+Elmacioro@users.noreply.github.com> Date: Fri, 7 Jun 2024 18:03:46 +0200 Subject: [PATCH 09/17] [NU-1649] Fix situation where no credentials were provided and anonymous access is not permitted (#6158) --- .../NuDesignerApiAvailableToExposeYamlSpec.scala | 2 +- .../nussknacker/ui/security/api/AuthManager.scala | 14 ++++++++------ 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NuDesignerApiAvailableToExposeYamlSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NuDesignerApiAvailableToExposeYamlSpec.scala index a7c3f2d6a6c..ce615f9b4f2 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NuDesignerApiAvailableToExposeYamlSpec.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NuDesignerApiAvailableToExposeYamlSpec.scala @@ -166,7 +166,7 @@ object NuDesignerApiAvailableToExpose { val basicAuth = auth .basic[Option[String]]() .map(_.map(PassedAuthCredentials))(_.map(_.value)) - .withPossibleImpersonation() + .withPossibleImpersonation(false) Try(clazz.getConstructor(classOf[EndpointInput[PassedAuthCredentials]])) .map(_.newInstance(basicAuth)) diff --git a/security/src/main/scala/pl/touk/nussknacker/ui/security/api/AuthManager.scala b/security/src/main/scala/pl/touk/nussknacker/ui/security/api/AuthManager.scala index b1876a8c1c0..ee28f359598 100644 --- a/security/src/main/scala/pl/touk/nussknacker/ui/security/api/AuthManager.scala +++ b/security/src/main/scala/pl/touk/nussknacker/ui/security/api/AuthManager.scala @@ -49,7 +49,7 @@ class AuthManager(protected val authenticationResources: AuthenticationResources def authenticationEndpointInput(): EndpointInput[AuthCredentials] = authenticationResources .authenticationMethod() - .withPossibleImpersonation() + .withPossibleImpersonation(authenticationResources.getAnonymousRole.isDefined) def authorize(user: AuthenticatedUser): Either[AuthorizationError, LoggedUser] = { if (user.roles.nonEmpty) @@ -100,24 +100,26 @@ object AuthManager { implicit class ImpersonationConsideringInputEndpoint(underlying: EndpointInput[Option[PassedAuthCredentials]]) { - def withPossibleImpersonation(): EndpointInput[AuthCredentials] = { + def withPossibleImpersonation(anonymousAccessEnabled: Boolean): EndpointInput[AuthCredentials] = { underlying .and(impersonationHeaderEndpointInput) - .map(mappedAuthenticationEndpointInput) + .map { mappedAuthenticationEndpointInput(anonymousAccessEnabled) } } private def impersonationHeaderEndpointInput: EndpointIO.Header[Option[String]] = header[Option[String]](impersonateHeaderName) - private def mappedAuthenticationEndpointInput - : Mapping[(Option[PassedAuthCredentials], Option[String]), AuthCredentials] = + private def mappedAuthenticationEndpointInput( + anonymousAccessEnabled: Boolean + ): Mapping[(Option[PassedAuthCredentials], Option[String]), AuthCredentials] = Mapping.fromDecode[(Option[PassedAuthCredentials], Option[String]), AuthCredentials] { case (Some(passedCredentials), None) => DecodeResult.Value(passedCredentials) case (Some(passedCredentials), Some(identity)) => DecodeResult.Value( ImpersonatedAuthCredentials(passedCredentials, ImpersonatedUserIdentity(identity)) ) - case (None, None) => DecodeResult.Value(NoCredentialsProvided) + case (None, None) if !anonymousAccessEnabled => DecodeResult.Missing + case (None, None) => DecodeResult.Value(NoCredentialsProvided) // In case of a situation where we receive impersonation header without credentials of an impersonating user // we return DecodeResult.Missing instead of NoCredentialsProvided as we require impersonating user to be authenticated case (None, Some(_)) => DecodeResult.Missing From ae1d2bb6ba001cba3b6cdb2961bc7e1765e584f8 Mon Sep 17 00:00:00 2001 From: mkl <158037696+mk-software-pl@users.noreply.github.com> Date: Mon, 10 Jun 2024 09:10:07 +0200 Subject: [PATCH 10/17] [NU-1618] sync with installation example repo (#6138) --- .github/workflows/release.yml | 33 +++++++++++------- .../scripts/publishNuInstallationExample.sh | 34 +++++++++++++++++++ examples/installation/.env | 1 + examples/installation/docker-compose.yml | 2 +- 4 files changed, 56 insertions(+), 14 deletions(-) create mode 100755 .github/workflows/scripts/publishNuInstallationExample.sh create mode 100644 examples/installation/.env diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 4340ff32ff6..d3c7d9c34ea 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -2,6 +2,10 @@ name: Release on: workflow_dispatch: inputs: + release_version: + description: "Released Nussknacker version" + required: true + type: string backport_release: description: "Backport release - 'latest' tag won't be updated on docker" required: true @@ -12,22 +16,20 @@ on: required: true default: true type: boolean - release_version: - description: "Fill it if backport or RC. Otherwise with-defaults will be used" - required: false - type: string jobs: build: runs-on: ubuntu-latest env: - SBT_RELEASE_NEXT_VERSION: ${{ (inputs.release_version && format('{0} {1} with-defaults', 'release-version', inputs.release_version)) || 'with-defaults' }} - NU_DOCKER_UPDATE_LATEST: ${{ inputs.backport_release == false && inputs.release_candidate == false }} + NUSSKNACKER_VERSION: ${{ inputs.release_version }} + NEXT_VERSION_RELEASE: ${{ inputs.backport_release == false && inputs.release_candidate == false }} + SBT_RELEASE_NEXT_VERSION: ${{ format('{0} {1} with-defaults', 'release-version', inputs.release_version) }} + NU_INSTALLATION_EXAMPLE_ACCESS_TOKEN: ${{ secrets.NU_INSTALLATION_EXAMPLE_PAT }} steps: - name: "Output variables" run: | echo Backport release is ${{ inputs.backport_release }}, Release Candidate is ${{ inputs.release_candidate }}. - echo Docker update latest is ${{ env.NU_DOCKER_UPDATE_LATEST }} + echo Docker update latest is ${{ env.NEXT_VERSION_RELEASE }} echo SBT release next version is ${{ env.SBT_RELEASE_NEXT_VERSION }} - name: "Validate release branch name" @@ -109,22 +111,27 @@ jobs: SONATYPE_USERNAME: ${{ secrets.SONATYPE_USER }} SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }} # dockerUpBranchLatest is set to false because branch latest tags are used by cypress tests and - dockerUpLatest: ${{ env.NU_DOCKER_UPDATE_LATEST }} + dockerUpLatest: ${{ env.NEXT_VERSION_RELEASE }} # we don't update release-xxx-latest tag, because the image built during the release doesn't contain developer's extensions which are required by cypress tests dockerUpBranchLatest: false run: sbt 'release ${{ env.SBT_RELEASE_NEXT_VERSION }} skip-tests' - name: "Push to master" - if: ${{ env.NU_DOCKER_UPDATE_LATEST == 'true' }} + if: ${{ env.NEXT_VERSION_RELEASE == 'true' }} run: git push origin HEAD:master -f + - name: Push installation example to GH nussknacker-installation-example repository + if: ${{ env.NEXT_VERSION_RELEASE == 'true' }} + run: | + ./.github/workflows/scripts/publishNuInstallationExample.sh + - name: "Read nussknacker short description from file" - if: ${{ env.NU_DOCKER_UPDATE_LATEST == 'true' }} + if: ${{ env.NEXT_VERSION_RELEASE == 'true' }} id: short_nu run: echo "::set-output name=short::$(cat dockerhub/nussknacker/short.txt)" - name: "Dockerhub publish readme nussknacker" - if: ${{ env.NU_DOCKER_UPDATE_LATEST == 'true' }} + if: ${{ env.NEXT_VERSION_RELEASE == 'true' }} uses: peter-evans/dockerhub-description@v3 with: username: ${{ secrets.DOCKERHUB_USER }} @@ -134,12 +141,12 @@ jobs: readme-filepath: "dockerhub/nussknacker/README.md" - name: "Read nussknacker-lite-runtime-app short description from file" - if: ${{ env.NU_DOCKER_UPDATE_LATEST == 'true' }} + if: ${{ env.NEXT_VERSION_RELEASE == 'true' }} id: short_nu_lite run: echo "::set-output name=short::$(cat dockerhub/nussknacker-lite-runtime-app/short.txt)" - name: "Dockerhub publish readme nussknacker-lite-runtime-app" - if: ${{ env.NU_DOCKER_UPDATE_LATEST == 'true' }} + if: ${{ env.NEXT_VERSION_RELEASE == 'true' }} uses: peter-evans/dockerhub-description@v3 with: username: ${{ secrets.DOCKERHUB_USER }} diff --git a/.github/workflows/scripts/publishNuInstallationExample.sh b/.github/workflows/scripts/publishNuInstallationExample.sh new file mode 100755 index 00000000000..524da6f2a68 --- /dev/null +++ b/.github/workflows/scripts/publishNuInstallationExample.sh @@ -0,0 +1,34 @@ +#!/bin/bash -ex + +if [ -z "$NU_INSTALLATION_EXAMPLE_ACCESS_TOKEN" ]; then + echo "NU_INSTALLATION_EXAMPLE_ACCESS_TOKEN variable has to be defined" + exit 1 +fi + +if [ -z "$NUSSKNACKER_VERSION" ]; then + echo "NUSSKNACKER_VERSION variable has to be defined" + exit 1 +fi + +cleanup() { + rm -rf nu-installation-example-repo +} + +cleanup # just for sure + +trap cleanup EXIT + +git clone "https://$NU_INSTALLATION_EXAMPLE_ACCESS_TOKEN@github.com/TouK/nussknacker-installation-example.git" nu-installation-example-repo +cd nu-installation-example-repo +git remote set-url origin "https://$NU_INSTALLATION_EXAMPLE_ACCESS_TOKEN@github.com/TouK/nussknacker-installation-example.git" + +rm -rf ./* +cp -r ../examples/installation/* . +echo "NUSSKNACKER_VERSION=$NUSSKNACKER_VERSION" > .env + +git config user.email "actions@github.com" +git config user.name "GitHub Actions" +git add . +git commit -m "Publishing $NUSSKNACKER_VERSION installation example" +git tag "$NUSSKNACKER_VERSION" +git push -f origin master --tags diff --git a/examples/installation/.env b/examples/installation/.env new file mode 100644 index 00000000000..a18c5f80247 --- /dev/null +++ b/examples/installation/.env @@ -0,0 +1 @@ +NUSSKNACKER_VERSION=1.15.1 diff --git a/examples/installation/docker-compose.yml b/examples/installation/docker-compose.yml index f2e0d59e84b..3df7090dc23 100644 --- a/examples/installation/docker-compose.yml +++ b/examples/installation/docker-compose.yml @@ -23,7 +23,7 @@ services: ### Nussknacker designer service designer: - image: touk/nussknacker:1.15.0_scala-2.12 + image: touk/nussknacker:${NUSSKNACKER_VERSION}_scala-2.12 restart: unless-stopped environment: CONFIG_FILE: "/opt/nussknacker/conf/application.conf,/opt/nussknacker/conf/application-customizations.conf" From 293bfee2ac3d968ebb17e3b823f97588f0079258 Mon Sep 17 00:00:00 2001 From: Dawid Poliszak Date: Mon, 10 Jun 2024 10:37:01 +0200 Subject: [PATCH 11/17] fix flaky screenshots (#6151) * Temporary skip flaky test Co-authored-by: Dzuming --------- Co-authored-by: github-actions[bot] <41898282+github-actions[bot]@users.noreply.github.com> Co-authored-by: Dzuming --- ...should have counts button and modal #3.png | Bin 49172 -> 48612 bytes designer/client/cypress/e2e/tableEditor.cy.ts | 3 ++- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Process with data should have counts button and modal #3.png b/designer/client/cypress/e2e/__image_snapshots__/electron/Linux/Process with data should have counts button and modal #3.png index afea25dd9dfe3791fc8cd1416c8acdc6aaa9232b..24489cf6740b9d6116d629018892c4cd712095f9 100644 GIT binary patch literal 48612 zcmd?R1yt4F*C(u^pnxC((jXupA>FA`(n^Z)Re^~^~Im6D@*9BU*clrWaWbI!bfeOjKm{V z=gVu27|yD`>6`0QU*q4WJXW}5aV5>^as<=6cbEf-Zy2x4esdx?_+~&fc3t_iEZda< z>VQ4cprMD?W!}EuQ~VgeU)3wH>%MB|6Xcpw9dU6YcnKd5J|Zp*@6y1B5pve6%=rr$ z>i>2Jze*hl?+{(k+D+d**Z;!rMTvTOC$;&Z1bno#{ZB6&*S#-$UX5n$T*t&=l6+fF zRUaEX_4L~lYI^P*xxjaIrzG)JF76JqgKYzyAkK6hv3aY?xP2+p3NDAh;7Z*l5PR}h!!$|Q@lDwA8Kl~jt+bKCD*^V zvU79y@VKvB$<7)Z8R6r$+f$R7gjL^Cn4?vTNsDpN#*Q6C_~6~3#fR9%g^J30C|j7{^AUP|Irv!rMj5o33*V*ZPRGhXty773c#SEEgy`D555(*NdN`}`$5 zcvDdL?cmH^uD+htlkkY>*!D7B1=~x@RW2bX%eQng3JnwJan3zcs169$wRy}p(xX}Q zYbpP((gOyiRP&$j2(i6NFhoT~Lr4b_m+fQhHk#^7@~0GzP#&hdy#{dAD1;=t{+SXm~hohTmS+ac%S#W{!4zzxc`Y8$sLZ z-|{DpH_V%9D=Ap5#AOv(PmGbxyFS z-14r?(WyrFGqhRhb}{SImsRanW4VTT$oa+EiSotddm~9Kdx2vnIrReLsm2UVJy*wj z`8wRD|4awsb70}qJSiU0M9#<`sed@EkF&3$AMS2^?%dlPj>bHQnpZaA*7rPn5SvlB z)fQ}Q;i)gq;&bAVEtmZVn{@w5rd+haVZ-E5o(g{U;I!Qk;R2gpYjI>YYSE8M6;nj} zzFuP{J~zR2cXub|`OG)C$kIAv!|wWmDoK&Ssv^;1igo6d^VJH(U9qgr&Hm4-ww|uD z)G;CxAN}ai#+;9s?!-1X?)dB27V1CqthtTQ&wtTRBcE`HdBFEt!p;tJ#Heuav*Ghe zuM@f;0@nBQp+s#1nY-d|JNwL=a+H@TC!K9sdJ?#&9YwfavQvipCnf3TYUFpz^@X+U z?8fhot_wL6AQIU^GbBsfB=oD=Haz@ubc==Y^(d5Vo?4(b=sD&;!2;2VdI*s+HYMEE z;KAjVbw{rCak`|N;nU$Z5xD-%jn=mOx*RcJ4Fd>#i1)}SE+IQZJz4SYpDVllOhQE?RELGH>j)oJnqTOeZ{k~?= zrK4?UVRC-=kc9)DxeTv5skwRLUi|=r@o=|*u&SWHTZNI24_6+4kQ*k{S(rY{V#l0# zB8!X?cPcr2PV+IR|LpBCBC@`-M|4$UU+$@$1>>oHxqE;SJ$wG?W0In*=`-hhUOfKI z-VkaZsQi5{W^MSl?1*u8OcR_1k`;H5+s>5~RGy-tor4QSn+)a`wK-4s0pL@|i9c}mhjc^N}9X8*! zvifPh%W9?GF+W#mcW!j=K>|;f780FBuQ;Ygm~T_bIYO}f@!{H3X{8OvZA5J}vvRJ< zje>TDt%s%bnygZ~ZQKeaB_+kG)fKm6E_sPWn?@p=zfjj69ekD4ts!V# zx7#KwFgL}YUTyxyx-L^=>m!!_*6_y{Rogo-v4uqztKq`u9X_gGpN8e6o<-J72zSL| z(F@Q;qxm;=bac;l6i0IB>ia8Axh2Ojk9<+fAdSh)^K7s?og<;Nno79SL&`ND8AyPO zJ>3>cM!>52Vcb@P8V`XV?(kXH=2ogZkB|C;1m(I{w+^O8K3Lk0SZLPc#2^{^P(^dc z+!qSLQksX&-$Ib(QT@Jj22a!Mfo9`B4-#j17ANS2N5m#+E2Mksw@I7Mxb)_>W<#tP z(jTFriI3N;PyIl|M`J3brRbleI5WMpttaP8iN1Q6?PZ zj^CPG#3uRS<63U{6!u|@Zf#p`dE%)3^!2}SBlTpsD}?eFM)qLf_`2Dz3ORlXr|r-#e9;o*KFg)Mn_Ysw&%jA{4-ba z@6F3xwqnmqRQXcOj)a_+)vMP6Ips7#L`TEWHX=MYXW3akZsgqa{A@2?(io41;npl(Cs6*j(3%}5vh&`S)pJn{BKM-Lbjlab@T%p?<=H*VjKgdKiL z$!bNtErsJ|j6|iKh>HVZN?d>*q2msxi={PN3-_%2y-~p%5c>b(5`6LOpZSo`Jq@p1 zz47;*#D71J{$FybycJPT7>kgpPkHXVH-o`Qq?-U?{fA1hEkqzVpw2E-s0ZrWXO~Fy zJ+gCLJ3G4%I%K;;k#AN;W16mEaXRk>LxuAT=%50S>cwnbE1WXp<67#mNI7}9m@S`N z_12A9!(bswCW=+p7w$|M1-Slapm zNCk{J7TwRwZsSFWAuh+AroTM<8PJiOZ}voO!TpRwDa<)+mx$GHQRC};k#>C?5?AiepOgJifHoXci>Hmw~xK$I7@IR zd$2R*0m9|%x6+HLW6uP@3wnB#t9+XVU3~VP2z%uI(`t;RsXXRrrYHsm2LI%0T=w}y zm(N+5#v~%IE`^c~nGOC-#&#)TAR*D(qd3OS@V`0jy%0*mzvEJq!2K1`k#_b)ubdOU z`+j2rF%-Ys+1<4;!fl}$`%L^qCI2NICs*m{A0%a6O4Wz%uPp!Jd)n;uGhF45Ola1t zg=Jh_(eSj+L4U6xN^ISl5|S+RX~>Kp>3I7AWM8%vfDqtVsF3 zG!ONi2mY=knp&8SKwgbzY%Nv7&D263YlcyZwKC}UOs|yl@06PKYN+Nur7T>E%iAO7 z_d6jSHtu4;C)sVfnnER%#1y?T)34VoBQKk!YeSMkBJ#fN=86G7Ik~_eE|t1?X%kiK zf2I?a{e&m#ctXjBs5u1U83z~fJUKpUoO1;I;>p8L{m@Z1HdRdbhKR;)5lieFelWhX zzdiwOZa~1qnDXGj@sqYgTF0ZcF|WAyE%&gS4Gup|q8J2j zqWF>P&&M=u(p*M1R69Epf@)i(yYtHx41q__6>(_ovq;!?KbCO(#t>-z)?F6i`1I-gW6UciY1 zE9UEh_L{?$)3dnU{q~8D&iN^>g>7WP*-)WQq#>GHsl~V=Cub=;wh!bEm5k$B7Oko< z68k$US#KEqaMvV}eh%^V&OB4Bntt{A98x!_8nW_*;MY8q*mxCK9HFTj&@m;YsEE^7 zx5H{(#~s#M%3!}eVESiGtm!93wSWgNdEw6o2?IrXuhB!C!vf@FkKRBx63}Gn2Q`I#lw__BwlNx>#$#Zw9cLl00Lnll0m7| zWP?Dk%(64j76G#N%ul=|-O*l|ZZ zh=)2r;ZfS0>eXxH758*JOZA!HjgTIsy@*z1?H&v%|;AtFz_YDB-reM;iDBjuy3Fv;PN zl%GOulv#|0s2@!E7lz~dV%}+4?JDPo*r|+exFq&CSIzKu+xlp$ZYbT_A!7oM#Y4|4 zXs6e6)P2jzz8N|JqD*mFrchQMypFv^`&pY1wO^ih4{J7srKW2|qE8gq&kz+?lY98i zdS55FV$M+%Xp{$VeWJKiL}a9o2Ivbt<6fw{ zj3(DR-ze<<`U@ znA2h53Jf+-Nq%+&*z(|Mo!{D63Aqq34vU1*DiO*xfI2wM-Wtowbk1kK9Ne#UQ};sT z(~Sx~CM5U)DVa7+ps%NIn8HOsm9@I#B;pO2e+Peh)1OY1I#B1v)&UTF(TK#>=Sp`Q zrozYP|2P-zIt%k-W7GptXsbKq&G6q}++R%1vYC0D#P1MeUH7~@Eu;O2R{mV+@?}rm z`jm#A*#d@Jx2FA*EfP*CaWH6h_+}qVob$@YGCe9zmxPB3{5_MwFuFY`x-yahJ+q7L3@z#)7KtM>_z;{WNznJ)mqD-rDuEbrKoML9R z68;e+ey5V1ENRR~x+`#F8PAU@%EeJkcpbh0Pu66kS$g3qS+Y94ILL99d_N~@fg zIf>YwFU)2lt{xI)JiM%LC0#Xr_uKlYf`kdTp2JJEMMpZiY&%xrcfGh3%>}QXl8_GN zwR$%j&M`oW-3pEnKDH5BBmJ4zszFun11as}M->4g%14X-7xv(`F0zsm2yn>4k$w4J*Ad;J-((2pPDBw%YVPZU#lwP*}3?zH+_M?^;} zEcMo2MMnprfUVb8x`^%#U&YRFnSlGB#>BaWg<{har6?w~cM;;pw$wBqqlUOy^c7MO zL{xQ>>gv2{LI<~ri5nm@@e@%t#{BrL$ts~szwnZ!u$J8~hikz&fnFSa?=Fu>ve>NM z9^rSgqs=^8GwV;3;aKQr|SOss3ZTF~(iM z;!1E#;bnYRhgJE&+uq#?^#pi$C?FaU5z!f7u4g>=!Sl`L4oIixpR>&Y5OQCfE-^Q; zNf~h{L@w%|5GjfAyp0e)nc38cf@QARYCB0fJYu;s$nA1o?!{sELSG7dS0j4#R(vNoe%qTQyDp36=2g!zaHxz=5%?rLWn#EuLkS@v zERLsTu{m5s(~i|u(H{Oy&Ks!vKGCPIJz>%s8+&zPi>qSZ-M$LgAjHdsCd2Q~;*7s` zWJE-3Sn3~Cih-nr8jfj`f6bp$;>pCVD;yd|N3*NeB)mmLUM)gY?v3wb+qq`uw`-0H zmBy-0E+O?(ItH6l=a62tI9Z($NU_~Ld+U!dA^OxDjPcoRwOw2p{_Hc++x%~7+r1WY zYzuFb5rqc?ps^oG4j~tFqHFi9*|HTWDmwmaVs_|NY_fW1@=-L`N6gfnc^#a6H;Pajj zxglf%9}=jCp?({zx4rI8sYeN5&tc2Cl=R`N-lyu`e}7Niz218>CchED;b57qN8Rxu zMcDh~x#h*FS;ryloUY0UG!PO(_x7-VT1~XA9U(vj9E}Fbb71!G+_3_UU~!> z(WpQVIerZ>cKds9q61_Z*qeY1Z*gc8Zm++ae|6(`t2K-|+LjhWnd>v@@-oTS-<6?b zyRkEl)x~-L!8c$*sv`Nloem|pKLwp;GwbsPfNFGD%cQ~57g8vCkA^b{wujB1iEG~- zHyX|q$cn9yjx``^_U*AvAwhQ@G-JEBV~35Bn_tZ#`RVYnm)jnP*iuiSNT(fA- z?PH{h%S#(7cU@6(9HGV@OoFZXUEIm0Ld|CddcjD4g|t&Wpm*lSv75GxoCy}CM7Lc8 z{2I@`tMFE0i>-?UX!NH`#_ey)RPIj5r;aQVdunvAVs>>E!52A| z;cReM#k|*i$~)uDtEnImmx_SEpmw#szJ6W&DP=1fTD+jW=m`#artyjT8@OdTSq0Lu zdYkEK0}(`)21J(g$?*2qae}U<%qd8j06D1_R(K$d%?{^?lTCU$2(R+`me?#G#AS5? zibcf(WHV<%7R|T76#gdxME2hj;Yw3f3pr)97qn-1mrWdQE)b@%H6<9AZ z01`!$4msu2&l>;-u%Mkd8ZYNk#R=M9juAP*jr^F=px4C5}b3#$}_I z@Vw!xa&bAV)D5BF2!I@6xDs-t3DhS9F>lnZf;L9_J#nH&7_~iWmQzb!?W5BxCHk2D zr&A$ybG0wCiC~FTD;r_DFPIu_lna z;=|)Zke=IEE<9%!)|VnL{-!V8Ol}6`Pi!U z%~ixvP9s12P0sD10xb$TD=W%nlK9V`vkbTQw`aV~LbcA%q+kXu+Y3B&m7aW7KxBdT z?(Y0a%Q(iX9&eC)`&Y3g)d>D=i@_2`)ZVyFB%sYbI*yw6W@kcH)$2lo+?`#$ZFT}Hm=(8`^D#lnXlZLTCL;9k1#AX{i+ZhvaYrTNS1ssO z?LD5N9$_f_qxRpwus;^1<<4t*M~`|q_d`_DW8m42&-yi<$S9zASd(HB4O{>Q2ZRQ< zvjWiuE$_W*-s90g6_6LDvMI&4bN`)3$oU^%1J&P-!Pb@t=nj&~*bkNc0gD=KGpiHG z*~v2T$;r#6$!5A$MgwhKnw&R8QSj0IjX(EIePnkh@-F7o1w}_cI_aHYI0E=W#>Y-J zLgEJ~o-@J1!RQO3R-*$$IW zPF68Zv!CX_y2X6E2fMTa0@0`U=(2ACm;3K;_1`~O(IFvo5FNNHiFEXtPcsKZ0O3r? z=uV*6H)Kvs7@hn1rk1k+InH^aPoj%3rF^9OHBk7ZH|x3<*2Rhq5#i1#No);H zio;S|m4h|7uU&RsGWZU)kWeC&IhoEJnDURQLuGGR0Pn;94Ty-Rm0rFijR9`YBKGQ6 z*mH1G4cnETRh#j!Kn^Asa!C-Qq2obG5h&g*4L^GX8Op@O^Y0C^2Cah)9< z_t+c46R#bfM5Ls6w<8-7?ejEzY`Ai=FPWe(V~zm``Vep*#+)3`0wPj@$-_fz?d)$; zGw>0~z0?+rf#-r5F2aJx)OBjtOEl==s`z=ENfe;O|2&2c1{z+JYs}W!X4czT5v;>oowMSWU0y;cJ~B@*kT!*-U--X{95<8b$^+!#A8URV{k{rl*IKIiB z#*q}ZRY}fjVA`T|fYDB_Q>}I{QE?+>&f01*zvrn|;b4fjbgC>?MpHR$X1~r`0AYi? z5M&aIk?#Ux8-i~~!mAGkzp2?n?F!TAyzO0P`8$cqiK2m-i#Zr1Bhaxm!!F$YT0Wb& zI(cDNi~goWDMUc*4F%43NuW$F|32ebJAUI9b@=|CVv3+YUEexAytix*n?*oC0F>k? za9|qQv1_y|UdTdAgX4z4TRN~iJTSrsG7z-He%>0ZcNA{mF!l6`VBGo-5IdzuB?Vh3 zRj1zrF}rzfazf^)3?pAVu!(V+Ej}bA1<}na6QG?qWGLpZfMlFzk#StJ)vW^9st2sm zE!W*l+cN9%cU4pQ4I?A5L4;k;iXRIwso$5@c14em*V3)M(>`oz`!5&p$3I3#_k>i* zbpQt6#It1#@;>_xnvVmhpFhf;dFVMWFoI-hHvCJ$SyDI1=&(U=D4+1LnL6*y)sY3L zk++>hQo`$#yvNrU4w#^Q>CDZ+aQ9DYCAr77;PG|(2M>I*{c%4%iFgb^=*NCP4oWvl zRZ8wzwkN8L-U^Og0%|1a@IqS3jQ&8qpo5GpgpQUgQbu2%fvqo;?sS$8N)HOrM?}PT z!f1Eeu#7u9x=jX%24O=PkCk3r57zifchYu~1)5yS%CJ)?Mc>j$V$Q50#Yd!teiSYu z-9;UEv-_Sm%sN)f$RVbeXVC5|f4&9C>WxPk+L^0p@1lyi{qe*#mK=*^7R{0bV+KVe z;s10Z4C<|ha@3N^1O&KWtELjAxCG^=RcnBO*NJ^SV??6-1NN(GIjanhysnLtD-F+|{i3nCAAIR;wG@Q$0ECc3b! zYuXEH{X6=Y`$Dq@MCP53k18rD9A2uMgG)2?MbCF{PoDSHSR+`Jojr)=0>z${5SJC1 zl_5-O2-D|oxZA^fRe-~6g=Aw{8h>SSxy*zfjFunVa!eL1GyR8;0BQxh@Z(J#$1NQc z$^)i@BE%^9A4o0^=#PJONMJ{4pz3NuuZ;0dnSgG%(p&|ws+fNWpGH9&r>yYBW8t zwuouZ-;89se#{!i@zpQr$iA&jR}AH7B^T3TJj-w;92n#pD>B94T414vY+o(NlgH&a zclV`g;bT`5??Ggg|D~Fq8$BlPs{T`9?OBZFhtl@ zK!myf>lcSgM4qo0?))zb760wqiJanK1b;gZFWf*af#(-T$v5XtB>#69q1 zy1630ijjbKYg*dr?d<4$T^mq;x$}2?mJqdL}$pSg+ zln30f=k0xk_rd!^{jAf9Jb$t)He?$7fAO`w0S-0M(j2)iT*09f2-5%}5JJWi2M+Cr z#6(1du^5-jFLDt34FIZw4Usf5b@u*G{${qzAaqvE7klpF027V{m~wg6m@%OTJge8&1sZ3iv3!VsSKZ5Y@CJ4I6LpTeACJ>l;uhM6>(!rgy#in)1v? z6!+ZI@lAcYZUN8iDksw_bAM2uDGe`g{ERPolo=QrYz}0O<@r9Eqe!)5`XYYveKJ)) zz~1TwzdO?BEw@R*S;ytJ?rwcLjXDMphqKw*4oWH8auNX}0n zrR#nLG@LlDbz@FWPOzV^IBtr`=iNEz2VeGQ!IUgyNsiWCusR-veRQ_Jn2ffoV0Q0&W0x8gH{h26ae2k= zf~M#gxKs?#;M6Wu&Hx|)D!38ciH+8O&v?0u>da$M(Zr>52LATtce#DWJM!p>G*>tvk?H73C7FF#~EBpf`S-Lt@ZrbZg$k zmoBpH7$n}Q+53snom7qDPQV39v)lukXQm@Nxlm0Jq(2K44p_sB?AGb!2j|RmzHQFd zwe1H~r!)-q)wa$Q;hRiVJ_11#)ei!a!_h&A%W0*h*#7EIlhKCBSHHeRLhFmf+{On~ zCG!oKZ~#ec=a&%G_Us|vn~l*haC}RgE)03C#$|xN>Y?DVVeKz$;%#!g*z}?`Ap$8y zD^>?ff18~>0rdTKDWC@^OWLY}43U<>pjmE1nk@Luxb=W+>sO5jk>yY8*4x0Nj~!kP z6sXxRcvf6NB<$;qRnAL?B@2Ut*}@0~Ev*NwM-%iQy~SqQ!$oO*Tjq1mR$}2#7^xmS zkom_7YnW#{l8YT^tFWnRkD_-7n9GiLwlZ9?Y(J{q93rgcYTG?)-Dv9OKqI4(A^8950YW5U+i5(Fp8m_Z4-sSyH`yJK_ma|pXC zHeC|an1~Xuv(RFoh}EyTu`OSycpRmUO$cvT>V}_kTRp{So-sI!p%{H|;fc1Xm=KaF z)BK$n2Lt7PTS4oQo&9cQp{#K~9++XMu=Q4+t#M%wmbdpF@x;O*poXoYPzS9jKys8LeM z#Qs%Q>GluF9%$egCtK{~ftf7R*Kv=F8LJuuK;VjCdNYWjtSfVqkI5cBIw*!LLK_Kf zL~C}tJ5OX4so{72V++fZ({r$B{oD^CFTlLs*GGptmjZ!Q!mQ`8i&z=NWeN+7H>W!mSN0+zM7^& zP!$%RjP`-S7L{mfJ~fSk)3;LV4g^u|ob@rsH0R0H^*(0C~gs?VM9Wne37 zpuHV3x3;x%Z8vvGEXCv40VvG;e%xVhR7M`3(&fud$MtdHhoJSL zcI7Gg4WU4bCR*I`#)1+2K%>RCgT z*iKO;cB{F&e<=AMcSlQ2~)~06B{^l3bEl=t7S%e2Frs8R;vIEqj^hC97Pb+-dqrxQ9sSIll)N;?A+0GdyKd^5EN z5331e+%ag#IRXpD8@flp8*S!p92xNkd6~mi(+kzx1Z~5p9h5z2u=nE}+z2?Qf|80+ zO*vyL`_S_a+KVeXbkHl7GetjrwS@)^>wPxR!tx z&H_nsZ*`lPE(qhpq9ZVS}-#yq`s z(xDu}rgT%wEoXV43UKs63e~TV>IuA6ee-MRbHdC@0&trX~98jGURU%@v&nQd7Txcpg|vnY zsHU92cEJI_I~$Bd-A$%bT$Yo!Yd|i8#(&xNKlw}Nqs;`&>hBN4;$;i1lK%-aX!%RC zZ+Gn0DfRI_wKLm;stp zCp|Ch0U_Hk!&w9+K={kU`CL#|>ckCAOwCJ?*&fi{7VIff4dKU?UZ6YQ&2b4T7hbEcv-rdjqc0EEOraRKsZt~(xICZz~06DDAvtEDrJoeV( z>(uXmJpFsQ^;rB7d6pR zQ$;sUN&drIB<(C32Y5Nvgx4$B*{E_rc;{nLrz0&B|!bt~<|>TQ{bl5XYxjw&6UrwqIRA9}+9HO6+Kz zHbf*KAe=`uFX11B*YsMQj#qu>-(?Yg$EVia6CI6}y?dWKgf5TQc5wquo7Yl4pOo_S z&EY*W$>7Qo_j6TBe^Jx1gr4`JE^V>Nz;LBc zi?I~NLppYJbLfR7-@X}bl-(E|TUc4|a(La|ih#;i40XHD=J1dNhf1t@ zKe0kaQASejcLpbxxx&4*F?;ob@wVs>WbPB6a8f-6KEROfaSaBjOTY8+&DpNvU2tB8 zolV87L1dodE4^h+L`D)M+voPxV|C%ZpNMgHOT3AxY1((sU5#&FQ>2ed=&JTtJybou zJI&a>AU3A&Z870Y3knG`JO2Fa)s)9ujPS?Lj^4cN!4;!4=*(64a^+pY7AJKCq`VTl zGhR4(^kZdtJc7me4lG)_f%oYt$$xq)kN!HkoE&uYu!JXOS$Jv>e-QE(CbVaV){cEQ zrmAmmYI+AJ8jD`EqW9`mww$l${U{k)U7kY3!e#`!NlNwdBaI)c`-ca6Glw}?CMIS( zg6leb4BTrZF!FoM%vi(wiS;F>`*8Z`L1(`HX2#3$9hF|)$`*~t`u4Sf=+(8UI!wq( zaQF8si5xy#1xQJME3|;HKykGOMzL|nvzAuG$l(KzYNr|54h6rk4oNSJh zr|(uwt?vkq)2vzj*|7Gbe{k(+nOJ)UFJi9C!lyi%P&-qWAk+S&FP_6%z+!p%lteuy zW@tONt+&=6LOL#4)GLCBqu6;Q-&-qEYX5Mi9nHjy^x5(oLFlfu7|z7iaau!$L&&|o@w{tpYN0QwaYwu@b3)OjJ7_ZZ8VUppObjNSq&Tu zAf~AMkByIgWX`^c7s2+FF{o*kfLZ(f;3At5Hz|kQ6ZzSl=+qn?1b!(HQScr*ySa@P zX4fPnObD1HpE)Pbwg%a(FY2INXEV-teh1bM+C0KPRv(~>!z7*Sq_MP83R127r79gW zbFdpOWg64ULJ(|~`pMe9JgYO;a89~l<8Pj;R(fiA-!Z~dh3SS38sxJFIUFm+q{C9_ z4uAD=zkHggp{1#5@Y>W@J*)qTtE-^oZ{6Z=-)2Xu4_t;JSe|8>*sA_<{8srl+kM=o zJs#|+=8aL00c9doYggkIA%%~YA{kkkJUxC6I$AD+;mm0aXJ!yS(Z$D(FE7fPQv7yd zYoK1SL*6O0hol3;ky3+AQh6|qS2(czMpH*;M!oVDn(N8VJ(i?T)3>@8ShRbRB=b0! zm)73rnwEwiIi5a+jctFjfwsV^*AuG0VQ>c@UwS)T@`o}3pkp~hYxCeSct?Ec3m)xj zNjoY9@-mLEbstj`nK#jL8t{)4#N^!#W)Y=r< zeC@4cfoaC{yE>?|2`$CqKfb@27etd*Z0Y6Y%RP*}QNzT$LDw35M@p@s6K{CSTLeI( z*+9s(xuS1vg_c6l_wr7uFzxeaQX9MO_H!!rG*fQUoZpquI6B!Ic6fMIkAd}^4(E?! z4I_q|kl&%xVsD-C+_AXWHnSa_B$916=csL2EI)jUu$kFlf0Yn8Yl!CPTnDwoYSs|}+kcNKM ztq-j{yh`daVs_ebuDSH!(DnjP)eVOnS*k?Fag=AsqEj0W1O?S)o`LT0Y;1JyA*wdU zE*(A8T-ldT4T(b8Zo zR_OxUqUmFrdQ`61qvXcPhuxqmm>hPIiB!|l(u$Km#f3bey3K}9!fD27rt#RS$R2rH zoBJ$oz1oWgW~UY-R2=a>kPp`NVK&l1o}S%sR^h)H$by4s?a5n8w6+go`jo%i&lwR5 z3YnW9wuRj&J6BJ4Rz<$=lLe|9{7~a9ND@CNSSLAHf29g}?>!_T5qF{4DFnuFf8?TO z?F@#yVEIDD=giDOw;%jv0NZ{r3c4OUnrwO-L*}wQ>Gh~f5Ke@2U1B+?tXi=-VLq^T z)SW2mUAFfIGF^aEU<&SA#cbo*QECF)2T%Gg8_US>7{q*td(RZ^1Av6BzYA%g^!PDN zVM#@)>3K-ujhk4D)njr$NChhS4k(3Zbq(07>=z%KCJGV3mBwj9mlzwJlPeTB{4Ef% zR^f<48tN9hr*dUG{G*W{J_In)4ZmG!h=_b4+1`S;v-^N4irJS5?6*}u>s?+w`gH5+8pW1CgMZbYmK-b9(u!9y8Dg6$~|q^d#DEjj<$w#Blr#~<>(+-S(w64LD`AYLQ4=YGlW=F%fL zO|#7R>Ix+L7b2J*vaV9-gk90IGf0LP(QGOo&-@iXMbrB8*k^f2nq@`c>3c}SPncEN z&42b>C;g(hHr|ql`UK@}5kTH)igcP1=tG|#S1Aj-pI*)1F_kU3_;EuOZVxG&gS6B+ z+Sm6Qgx;yz0p)v02-0cqt~vh^zFqpRS8>j^2ni+SG{O+g71gPCd^7Q(v&21Z2v|7x zbQ|yJ9h{@;4w0f2yVzmQ3?gb>TkVI6s^Syoi53_Z*xWc7rYrCob+-p9F*ox2L|;Dv zPrh-)b72Hd%;jlUU4fu!{%_wzD~)4`Ip;KI;d~Wf5SNPj9qnNerpPG{HjMC;h4bd8 z-MQ+FeaIi#VEobiQ%~*dlE;7Kx)@6-#nW16h&<)y-#P*cN42FoPsF0Y(4}KR#Hx6+V-~K-rVb2|cX!!D~esv?2jFQqK zY}c~0QxyWq#;-8V1aBN?Up3IFqKlI@QuDp|oqof$_L>>gLuxUBuWEAM&@IkjcEmOO@%aP$3P0i}i!&S&-c!U(qyB+scuVL|Xc(z>9O~NuT zgMF2sz&F#;S&E{Bo1|CQ*^q5`(ln(3TLG!&Di{UctBJB z6Y7Z|0@Q^7%NI(Ms+Jc0cc<6kdBI$FTt(=Dt2oSF=wnuYXLo74GOJJj3G@2*u5Zsz zI6h~lVY-nj4z}HVG4Mqc8hZdsz^OvEpvv!CJwsJB@=qiPkGaWH!}4>Ue&Yu0rh4$D zd(`lZ-<~?)b@d<wio;66OJ0jUt6N(FnhyXuiM}de?G?rfpPpjnO~AD1NN94`kq;OUnA>YVd;^7EsN$ioYJ6q*}?h>HWYVcWb_qpS1h&r&CAKj z^=!VZcxK4^!D-A4GTTm+J#Ga4!U3Bk7E$_NN=%4|&hoO>?}}knR943H4@)+i|Na`# zdAeG$uEnXFVN!-+13hd1i8-$v4+ zow&>Ntf)OB@J+RzG@IGEDeO!Tq@IasN<5TJW-3dFs=lb<4Ii(AK)&S$oNWo{ixQce zv9%>4;|Kdst6!bVyF~q6n{lyeXKqgi2PptsG-T#=>wk|~isOfra~=2tRKvrHYOgaE zCJKXDFTInp`|LGv$Ohk)JvGFyqot{B(c5x z0IuxjR!w*jca5kl?$GDDshah#$#5UnFw4kv!qOh@?p>#r;O$Io_okcjlAEl-fj+4u z=`=$E8@tbvgHrCQh4emB3bT_w?KO;%oh_wXPiQjI@nAXn`f1(nLQ_THzcdm1zJ-lN&Mrp=gx z4%@rsj{_F`86~DgG@qP}XYrDy4E;;H1^4LajK3K*I!)B=zO4$@$j~mH7P6XhX_#ou zU9nT=w^LwAs-W*Lw-wo{+^GA*RajC~ba#4GkzOISkAcb! z`7HBFWLDoIFl~SW?e!WOHMvztAPqj!X|G1VC#o#6rvLMfHV>ghseQ|5rG6Ok@$uxs zt_hONKO^HUTE1%bH(_sYDrWAOK9P|zoj-m}z%u>b-|dEu{Uq!}!QguVjxw~a^A#Frw%QW_zrluyS`9WF^;a%N{ z=RCkFOo#o}2|6~Pq;5RFKR4SAB>I`hv8{*S&9%Qku?g)Hy zyvG6cvZ$~{V`z7;2?(PWq4k;C0VVGAXlLge`P%y7PVhJQ^9Vt|Pr^T;*cc;YP86F? zkj8M~b06F0vv_U~QJAG8S~&b;h7!C=i;7SxjY1iI9?IGGH zl1|&>=T$d1omapNRyWT4U%;vZ8cHsE&kWJIFI*k5H5C^JR&6M`)oL5ZPV` zwqEv!U@lah8Aovq;F}P6UM33_#!u&qE07BD^7MMcu#M{0CSqz_$gg`*c0w8X_)+=C z@|zbVr6oVQy1E``FR>%w%|pR2^av@){426+k5)N}=&NEOD}rd*i) zJTL3e@uk8kLIQ_MsAs2KM}~(OLjS)8f{&a=K_n0W2TK>bomY2rS=V)xE6jKR`bv4W zyGOBXv_Uz18V^KLGFu#|$YMoHt^8f5f~2_)!%z@LH;-Ibba&=^n%_=}qS)ZO!r#+< zxsXI6d|bIK&oMm(?QZ)}-yHUvpV~F2qo;a%+@@EZx*r9UWoK&}pHkGbskvRrnG#PDL{o^Chx^;o4qH2KqyccXfXFI9WD^f+ z5-M9i0aBwZ=&h|B~N|PEoRYIN)`tlbJ#~(33$%O;^wm) z{c806mulvJqKd&g-PS>62;1>+Y^onVR{CWbQsqpCAP|45)87FReI#Dk_%a**?RZfO zZs3?+ZTn;u#otPdph~FbX$MM8mdEpXT#+)qkPNf^+@UWM=j&@n_FUyo2QGD5-uY8FunoqmwJ6pttZz2e_$0k|@wj|}YB|1Z?NbySvL zw=QggsDOk>BP!h@T>^p%NJ)43&<)Zc(ybyT(jtg7(j_e|-QC@daOVB{z3=$;KKtF@ z*k^y|8{?dR9FFICxNEI7=QXeETJx4MTxIq6>MrovaO!=VdRgSU;M$QuYrh#aHD~C4 z7Wf9z1mwE%7&R-A5!&X34Am(0x%pEAJ0WK-4f{Ov%nJe6Q*Y~VR8O(X9W~eCcWF~N znJVQGEagAuIuWp3orFi&yCC^hhFDBOg5H#KJu&zD4M3nxt9n+~AekmFM8zGh$G+Ej zXuY)DX}7}n0pY-JL}k%{9s4|9)7mQc{=@C7-#N_;XNo8wWUeM&mO6dfOuvVO&sM1S zXyr1mjh#N#WBsy4*38sz4Q*XPHo3eGTbDOlaN%>)hN!K`Py zO;~iA8`Y}p(b(Pnp^W4El;%JX6x@)8PGkQmjUbS~1xh2_GyV?*FG#k;{30J&sN9j1 z6qsO9Ybbrw3ZbrA*vx>I9v(I28BUXQ98#AfpT@oY8=nzhN94C#B1RAv5#W}Tgz&`h zf8YPRf3Tqav$cn8YvAftzjtH+hjBbKWv#9a)%;gKJU4efqe8?&%T6d)R`Kyk-#WTz z#jhKrL1?cZ+1a+E;e%t}9gl}aK12t&cWWO`(<}`j+`o8XZmFZ%^MC&nTm&rlehm!A zjg7Z$5xGYSaWux<5N6aY3;?%EerteRY;In&Cq1NUxX`a1YEmM4xE^10|ENK69OM_4 zz-l%7L{I_UA-kq#91EdefavJ+Oi4qSCiQ_f!_(r49SBrfh30D_UprMLAUjnAbW;W8 zSvQbEB}k12EV=Ex$Sp}_^Lsp0x3^AE?tH#47cTMGt={wyU)WD50EXwohYwEbQ1Bo{ zks*u5v?v0c>L;d`3Ko7qz?+-actc)A7A-VG;*3cg)NF9C{KxXBA0$4T;X$Om-|?Y6 z3X=LM4lFQ`8HnyMYfXvvXKFeq0Ku!<;V^^f2$m**lSV!8TN1W*hir$B4^wQcnIB_M zV6`P|VOFArG#EUj%-2TB@Z(Xje_l~jrMwd&?)W{1TA+tgIbPN*I3NA*` zs*Z-}Lg`heU=7_+RZGkSKtJqBvbTl(y>;Gnwz8_rd!*x)p?|YXGsQ?@3F2wh;%{8` zjWVs>jX6=k0|C7VSB5Sw6`FC0lg_d@|_rBv}&O{A7toxH^n?xKCyx88tsQfB`hvZu7}hEoe->;{9PE4aI6S>nkjJ$;$Y;@`0`{%e!4{)~<_!8Oq^; z?wY@#F7*qmi{>|_l&P&%EjOT+4m}zKGi;Eug+X2a7{0CTt5qux2?E<0>p^x@yLQbQ z-+>&*f4C^vMTsQgj8A=IN138BAW=9b&>_*R>9e2*4)#G{df98U_J9IU*Hb6us(9Y0 zt^4SSwAX1m(PY2-qC>8tmkDd(cRclmn%F%I9v$qnq`JF!JwwkTJD6xV3ghMc?=KN7 zowa}E`V1^>th@BQzDktG>-G@N+JJ)bk$nGtgePxN; zg5X99nF|&eW<-Eif)*%sS^f;lIp=R<0IH_-$hBp$f?5<)Y{%pw{=aq)SH2M6JUj{$ ziD;@J7gggG)6jH`qmO98*}LZpDpC@!+u6Oik>W%fpZrtjgNwj2>sk09`cFlw>5=Rb zQbdO}M{!|eU<9NiT^%gbIDx~tMZ6y@m4YLd_t<{g?!q|Sf#VUey@_)^Y6i<)awPi< zko~(JYIL$;*T#dFO;Za(fv2HP#$7*fvV=_Es($VAugqeSGiEcI1Nk$@fgj*E*c2fH~gOh}sp*t`o$b&UA^Kt0c87}SatXJm+%dv^`M zg>p?6|0+aV<3-Kb=xFX_{Ta|(v}mgJ1!0m;PIhoa15G~#*8=GT?`vDq$elGry#PTx zy4sEj0@TrOBspz`LE-UeJG;Hbe&iQm9XZkC->E0JU>iW!k{#(pF-E~l!^^6b259JE zk(59w$aS%uz6U@A6i9uVJ_D2nqEcq0E5)mNP$!ggb;etpjM*Zb{I4)BhQK z-gIDIyV^VSScxU3rY7Z;zrU+6tgTBXN;wiCCW?p2O}FGR(ZHV5T%QNkee@u=#dw)$ z*hPKExSx{B7+`>awSq*hF*jaThbFfa%4HTwl2IT6t z)7Lk1Y-j6<5b+HZ-ISiC&6VL$xE^u=tQkePdC00LD zrZy)D-^eIocQ;r*8a_reY`~+(e{HYZT|x;CYiRPy$}3i5)K=Rq9~KdDyDCmC9CULO zjKmXh@tKJf2AU3y$EP0fumY3oKwAZQBQfHWNg^D2agV_J)HSHWiR^H-$2UE&v@{>p zECsc2^1nE*Pm9j8I-s!O-lzVN|7#0ng)fD!J!mz!2Lf@p;pOQ4Kk^U`nw#ms#`)nY zdv#4q%LiEgNOzb10QNgjyoFhQZJ#+<9V#P@=rWp>TYQO-Js>e)llwe$TYpF$N){Zh zFG%j=%Vv9aU26e;@U=~&{$q}BIEywpj9y)OPTG!oRcwJb)R?wb3&ip-F-Zm8eczE3 zAIXkYp8JDEX{Zc!F#{afi1S0M>A!oCqPu&G!cv4Bw+eWIlnP8IbU`&h4SY=niD0mF z;T9m5Ml+y#ko*K&APOK8`G&;Rf`ycDxEeh#Z!2>+HxBV9YzR$j{CriEt@>se{Nv!{ zx!8B~Q%CU6VC-eSf!~?t4)XFZof4KV;GN6mOSqk>|4sG^R|x#g4FAFcn0n=(&=laP z0s7xx{0(IR73L#_DSUB?2Mps^%LC?JLRQ~{fWJTd=dTanfjA&Fv97turgIa)%?M}z z&%gW&;YtIbK4&_}Nf1k2AnX6OX84hIzOi>Gs2S_uAU}K11D`Ccdj$lB)?`XF*aQ>x zM|F&$-yd&-%IGlN{e=Pn0Rfmn6hMM8TAAsLwE=a0nTp*A_Owr*e|ls=n`dne>^D}y zCFxKzRSxXxvlK^Mf{ui~M2i;?R!SkEctpSqfE6tqge8EJ69t^Xe%2@J%|&DHzj%)m zrFL$|-A5v)al8KK$}#c{);S9C1$US!&ji7X+qn8GY-Oq6@&R1Hts%pyZF@6SfdmQ< z-j?uOD+OTmo1w->&NyNun?oIyqqVU9=c#|Sa<&E!vI_{5KK=6~ylY7<5ezggMU048 z2g&Rb<(ahe_{)C@vlO5?V@kOUI_au8MyYk|^Pc&aX-XW8VT)lID0>INmEYl+nT%(d z2$UfiPrz*$1Gb<6EEgrXHh5r7D9Ta)MUz!szY2oLBg=uy3=EPWrq?yIdm=ma+(oOW zL8lZ{5OC%FT{M@bqNZj8D%3oSsw7CY5U0*6U5$n+*dT;J6)nD!;>@u$GvEVB$?1?g zxVeB}dsp=eR0m0&lUrce5}gA;!rUP(l~5jhplByWN>a3rveFjv4NHL5Wv{+zBr(1; zWD=i);uH=H%BJU}DOWU)B@gVVp$!y)>2r9PQU(@`>83wVRZHDT>YzUl*x){K?tc{? z`xN|{H3W&Mpym`-W^6&$Qd}vvT^ux4fKp@uurLT^ZNu$+LzNq_6ZYtLzLWk<-$#*I zL&5z|_e{E*a&8=v?QCya6NVcy%`O#O!&=u6O{kf{z;y9#DF!*`b6m8b1JOpo>@0po zti8y!tbhE+-{WokfdUvVd8|2aolj}MEV9Fxe{uc^wRVbx)7i;!yETrF>37&}u8%-o zV6Zvuy$Yqtq}Dh7i4K*YBj61>JS)!`%(droapw&LA{p^wNV|JMO?FBSY$-p6i+H$F z6_}|66%x2kqQjO4ri0=|fh&r6sLKtCtM1hH-Vf8EkKn8{UYLses#20j>+De=BUtrwBm!}TW?BiVmk_l-qO`Zg0C zilq^aNWshV9}t*QpW1e#M*sL=Tldue>?VcK?G0Xz&2;(q%mQfoNwiX=d{=-Liv}Cd z>!Ko!D(5%^1QEkPKpTojrYlc{4QU8G&Y8f>G`AmjlCE)`x2r#m0zk(P%f8_M{#66H z;9nh8>tZtf!wIpu{yXdmr6};BY{j*UcD(aUnynD_#(M#IayvNkEnrzLj83gF9=qeB z)+1WNKYnbBPY6xEUiI4RP2zV%rSNzFeG!0))_E9n_>KbwcC}0u49;L4$(&vcB(XUjX~X-kcSm77)H+4k4uD zxI;{gIO3Lamh1I2!(|tf+uRt6@EtD% z5#>HFCstgts6EvzH3SXr`BpCnGM0|d*tH;vQmtQIdAc*JFFY7bo>F^haO@8EnCKU1irk) z4{l^+WZFZl5+mO@;29*+YvvQKM71{p1oZu32NRhMjJsgEv|^A)R;?omm@{iTn7No@ zxy0wd5K>sl4-GZ$0SQ5P6F_Zth?b?N_P{=EAl8-#8Ax*D?r|`Q{a%EU-drCD0dYTx z*Lj=N?X*nC(ohl%)ra|#BtQNg>a?Ghy3qc~^+`ao805Zi>>)6jAf?kQ$*9MDx$aEB zJfSI;N4|(*HMt8O7&tT7#k>N}by*34#ld7?V1Q%+WKY|qyF1(%SQlF}>GZeKkgQ$cBxmKb-@gag0k!`&f-(9CQ%Naub^m5-0$A5VBiSL+PE4YEy5FPI4m>%%uTNrT$%{?_u@+sU3b(h0|cZv!HWj zynb3ofn6U;p?sgLF*OmdoRf))d7Y}Y-$K{Fnj!?rC?O=R9RV@r-+np0=JeMgL|*^>#os*b zzd82byeoiFn7(xnH_3O1r6O-%Y%49k94NPR3)S@-o{F1M!A!7m^@Ml_Bl5 zt@ssiZ_we1v+9rA05>{4hU|QJNgb@ySTP5fc3d&M5ffTLSQy>5vYD-~dVx>3S_?3; zm^xLG;65Kbdi`_k?}xYSOiqXp15r6d*RL-}IvwLMQqa0n-6Y!pb68C+N)H#A1;&`K zYkcO-y$bH3aq+t2!e~xw{Ec#Oc~Wj3GDA@{3ve(~jXQ2wtM3;UprlC;m2+mMv@R9^W1?H@4S*+FF-zW)q5V5K!L?hm33) z$6A;T7R7>j`DGf4qkcm4FP8QHTHxsQp<#@$zZyMN@-{f?<9%BDZJ9S8W>90Ubd9B5 z#@a9P9xZAAV!V@A*xryGA}LEYTIvg06Jo$dMP;A1`3qM<@eL?2qg2^oV7$(?JDnWW zH_BjrwA_i5&kw;M4tB0B?o}eNj!hy!>tB9q1msJeRX^8fL5li*vJ*$`)YMel{aPxR zbPzzTrlxNzG>mEx@r@>YXc%=Q-46!8xD{ArL4qDEvB3frJWX^370sHbyApN^LTC>C z{CrEv)E_#z(GXDL+f;u90uQ?MyNn0APU_Zv`xdiu256WoJaf{2@Et9Ycy zRLDo;I>Nz<{wZhoSW*MF1i4_aqftx6hyvmTzn2c+&2%6Rf2K%?mF*HrxlEivv$C82 zK$}2fPmK`WAOuvD0rWN?9f^V>_nRbqK2Sg)b}$t4)i|S-o-k;!(v|({B~qfsg$c!- zqIE;Xhu9E*0Yy$B^JO3*gkAe15*7DVEzM~ab7^JD5q$f z^9yuY|A3IRCv02_$nMKdMWRM=*&B_LI+bM@R$pyWbM*(8bJnLu8gJ)6l7@5wiq(ZQsw_zynb zTEfSF37h~(RuDPVOzN{{i${)E39T? z=SUCBFD$O{d``}6#OY0rP(nyZ2$mR!@}1Q+b0Th=hj?ji2EC>^lVi&~jK+n;k*-IU zu$RZr5sToT)ecSlgr>06ONNeO5fPl!r= z^-9WdLEl1btgf#iCV%_-^1$9>97oD))y!42qu)!X<-ViozArvXi!RpTf$eoa$2$l@ z)I#F2Fgv8Nz`m_Oe;YiZqN(zEu4@F$=ziM|vd0c%inN&Wii=ZTXAzH9-f1l)!TGVb zUEfs`zr5b(AL{f*`8xra32j%?Y-u~Rt0ZieMb%t5o4zaFfG&|d`#n7P3oP1HJbRQ` z03w3m}ckuIH_Z;vu!RowvRg9aDce*d*DpAZ3>B$ml;7(M|htJ3F`s3T>j2;nJ zP1s%&leQp#Dt%>JFRVjl+hUJYOss+Qcnd%9_h#%BAMdUdQ}q0NTc=wI&&oNoWbIZ) z?!P>N}_iw96f#p&F&m(BeSga5~&W z*U+%F3(wPRv`ewT%sdGW7M+d zqYuI? z2loWngEkW^0r97=YlReP#RFSR#BOS83I*72qen0#vximf(W>g|Gq7{#)hntqcQQH9 zKZ4Q%#8Ya9hTfmPy;PxAn3xqs10-)_+wd?k+$4TO1J%cmk-Cxb?|lq|zN%!vv1dQ} zsYpmjrl+)Z6Ttfo}rtnyyFgos?&Et7hhhj>iV#Rf2KQ z4-jUmVd&~l#zQs2nt@t`l<4q#>FCxb8<{m;KjZy$d@4LF?ZYu^+={MHihqfdpNGdT z^AeG&Mm0{rJ-BFfhvb;Ubt&>bICt^-6ZWmK82*-tocqPe>ufZWrd8_Y(eSkK2%KWm zga<-9!{0@VSU;y5;wc?1!)vd1Fx;m7Yl^H{*<0V^%DEu+r;Oa-)tn9;1D_u+d102?ze|+b^}WE>fSVtEv*ch&KlDh~w3(IgJXNIAcqhpD=iaG*1aU1cP8A zD9rT^NlL}D;=)IypmK59utvh;F)6%G^AsJr;|8Zf=-FNC^MXzlw)dwA&hhCBiM?w4 zBO(Z3`oP5Wu8d;}gq|3A_G1IwJiM|>TEcLz)F9iy$-$%CYn9^*vt5oVHTA)R)ZaP9 zLIWcAWj{tlghziAIUhgMz^)FsH@(1DQd;`^oXP_qLddI~Ph#A>gLqpzWUz^T@%)2O zAdva_PVM(d$yVhcT!16Jc_B!d&7dsH-6JNKRTVSfH|na=6BfE0`h zD<2PDCL}07%Cp8`)F=vdD)=f_S4nJ*@f;zL8@669821c^&zuy=YRHRB9^GZWs{J(2 zG;ye_3gA7`RBi$^P3FloQD=d^ZWvf zR_U+R>=(s03t5MY&D+n&OzG&D^e2+=_%4l1ixOw6?_x#4WL3g1u>{;jX+h z0)j`5^r^4rkV~OSV4-?+N*#E;G9DY{))9O#G5?1S1d3+;nIZvvg4;;&k+WvA-g-`Y z=uT!*_{p+*#G0wB|!TtVaL)&u&%6{e9$9=q&Nq zpXJnpk5ynI>&ro+^=VqX<;bUEEsADjjX#AR!qg8Qy%lQx^DBt<2dvv$GCv!|p9Y~q zzNwDItxR#@OdC%xPq&#~J^>2UDIU!P;qPcR8wYh=&zk*Q3ta`!ye?A46YTH5CLJy% z2hTAi8Gg*a52e%RYYw6YCUD`~+cy>kj7ZM|rl)bs_Q*PVo`n4EKO#V)1^ic6_`m+a zk5e&tH22SV;RbO$psDk?e>E1|eZ4lkv4v|or*5E$Wa}X9lGJGwwU+x|$M;VgKyLBB zakBq^|D;v>4O}wfx4Lx+8S?_;qo8XTA;>PjThqq#D>Lodp2UAWAWFt`$oTNz+PD8Z zJ|Lj1f627@3(L&h9Eo(cM<%pE%4UhV>CqFd>r4!)C3Z|WTpOEDP5Qs!jBqhF=&?yI3889N%)%zuCaxmA<8A z(`W3o&9jdIS6ufntD`tbifx6MJ9C#b+xTetP5OWlOUgVnVGbXU^=USkPOz9Y z&9OS`RtyXb1RvdmL=gDigdmwC8Q;ED=;_u}@AJgB3nN9;yz>_m-1CAhG+kpJ8sD_Y zX7=`-FzDLeo1MF}IWrzou~U;)PeYwT$hEC#(r;x5-&6=#pRT&AWm)4IfzMtu~vW0Vk;hY!Zn%I#ch>R`l8pW}IhI z5eB$ZP0hg+*GZ6Nu(K>?w|2;X$16Jt^0XaTlh9x!RjK zC9NTXdr{j>ROvNy*e7ljZ4Q!HvYp@qn8v0~;bshCG-T*`Mu>W~%? z&Jbfa4_1(lS90GH?OO@pv9JOF!2KXUn2*BY82xT-wBW4dQ66Vrj`h2>njt5QC@`qt zJ`Pt(bhXY^0Nj9va^zr42^8p&&Af%iYt0`$+*;DK8B^rE%GzAdnGm2?i55yl#Sy|o zAzfqi$Yx)URp69YXMSVgJv;DB&%gKO&bE5kuJ-bro`e|;Ex%y=EaYWkk=PbAGJ@I?Ii9d#^o2g4F4mRb zRha4yWQpg$1EAAl3!i1MINMZQSo5zI)a0*bvl06XuwzZ|(>`?%sx7VaJKi(N)rxg03S@>+kdacy?~He~x2=y$O+o_ZN&$btr_BtWn8jKX;Ch!KE~%JM)t zI@G9u@ZNOiKLlk&qjx&;j=dSeDzoNoNM#66opG9zJ|a+3ctFlu zZ2T(}O#fNMCvLr9aT9J%_OqZv4BFm`>}{`_HxjAIa$e5q3iNQI+PO(t2yxbsDdJ7A zu=YvE%HIbDUTw|7e&gIwCScS7~f41VE&L?73F7BJoWqrM7pn$`K4VkV4=d z=Z3*g;oWRdD{p^b@*s`BUS8gYzHe;I@pI{o$ej)neeNHo0tf3&TkT~VP`@ucSFp8B zO-#Jy>K<3{^N&0Rht&k&!l9( zO*sBl;q!ON1gAH6xHTa~{fL+&aCzOLVl<7$>-eBHvs>rqP#2DvoHYi@*^LS+Xhk|& z#_90RsE6*ox8~bFNz4Ai1cgcjq5@QMrhh#MDE)#R-H3@)hxnymOT+GkbwjY`D9p7T zG1zSkd#-Ep=9=wMlS6+;QL->Kl?>$puLR2pF*#LZfXvs~`P50;aKq(ecn%nPO*rmr zyZFc$Ry0L(2~Ao9U=}F1pB~wGopN~8ugPgXQrGwUT82D4M^wL(Qg6l*oREBP#aSm3 z4z3wCTu=2NC0#v@^QupSf)cT;#`UsL?X>+v`>nYX0l&k9mX79Eph{$d!RuTBu;AMq zAB0~=;yeIYU=T2%2;cy?FpUxSA1eVAG!RC?pq7;J+MZb43=2Fw!h-&FHy01?U5ACR zI6D$~BPu8P*)xeGSncV3Ca1iGX<|Fnv_I4^Pl|Jrn%Oa7*+3{mv_1|PLuY$|XdxHg z2C7tzD&oNDSuoh}=srNExN#xGL*K4CG`;>TN%T+rJr@eq+COqN;aamJBhN@CWFOB( z6a2HU0YjaH$8v~IK~xjF$nJ!j zn)(VP_iI^7q6O3;gJtWcFjr_9auQ3v;D>klCwB&o8TTCX;CFIZ)-t$hKb`_0^<}G0 z?wbP4c`Y^M(KIwt6-c;B##}DKM$y#CMMy*|{tDeluZXa;Sm`BH!e%Ep~W^QJnH|1-uHV=@$#_ zlWSXc@=z&C;;{&wG5!N2gC6r0J5BnYAYF``pY&$J@Me3I*?3XZ_aqz$yW!Bp{@>%; z!?D4^MWe1vHjeM*wgRWHJ$1|B3sUHB%!lOFE$-+!R2@2VAXwJ>kgppNC92&jx7E*H zg0Ej6U@ZJ{BIf@C?lL+G+T7a`{|k$r9sz(wkUoNYw;t{6ZO!LA{;#9o-fLT2T;|*G zc5U9f^&YhiL+@|jK93kc z=_|sh*H|vt@poq%JNW~A_f}Z<+Ph$^qqlW0M566^c-|>@5Kk+hX58jfe@zY>zdvg0 z0Tl*jFr$^Z=T1&n(>k@MCc_MKd-KI!idVw>%LGh5#K+WC)zLt8)y$GtSg5<2d-1Ty zG^ESm9_@o*G9l8`WRb9#g7Euz;+T!)&;nnw6{F%-+gVB);3I{(2*I;p(ryaLp;9NQ zBi0M-z*CRcSGi)!*uxF7wz@ z65gga8L%^i8yp;796bGV@$A|G#RDByLMZN(Qfiu;-$R9rUudedpzDl=a?{>T>Aaz- zy0>>`TjCb77!9}}Z z61I03_t6W^$_4mMbgJ*jg1`~)9T&%BE9e-MXgcLuzJ?xeI__;8-UKaJO-mhw$i+Cg z?w~w237$%Sel<2f_Q-Po=0+9Ir_lz}5(lAp?Vg3JLcUnbb(MpT%g(*^-ERcXc6V2P zEr;;YG1KXe`U_Va%o`CfpGRsxG~XtCM2vYcNNHwz_SY;%9UEaV7?KIZ+L!wdjh3H; z#`8D9pKY0qd((xBzmJQRkuu{~jCpAN01NB&6-F(u5b358oXat+HmobvBVHn@@@)FU zB>YmH2J3GlO1SyhoK9JHodx`4KBv-KWOqO|X^i)30&EZ*{Y`-$CFSDfw-nQ+nTreb z7FB7~8T??K+_QXIpud4rW^WM| zq0~x2xjcRujJ*DbYfzEUurnZ(Y;B5?y?oC@ww3-;0rb%Lc_oFLum&>6DsI4;h`BO4 zS&UcanzPag+ti#mMN{MooSNLpzZ%+bJo+HobZM!(3%x-2kJjwy+$|uf#ZA=EV7lLB zseTjhPA1DeIMUTI5)OM!?+UW{l(S)jafv0)ZDgC4OWE+W{CK-ADO`d0CVv3@n3jo{M(c9vetb8zoo${%<35jKF%h|Y z6azsU>SX*r6ec2Fu#%4t)W}9LbLj&Lqv4)@qO7YP^}$P!K{#ORKjwugRYQ zuZzC9e12)YPvg0i`>*)RO{KSQdnHGC7*?nkP4BoAx%mfP7f{?< znXUWj&N8_^F9GBEdi~^mtiyVe%J$JsC{XH`c7&rVIdr*A3IjP}UvH)72*nC(hJB)a zZK)QRnoTysW;sO<5#MWdRih2;yC#Li;a$%ZH8ek4+1gVxQxX-xuIYJZ4IipPHJ@gq zCrL`*owA+uLqA`u@7~*t{o$HT1P!%PDk|QpUX*+;{h~=KOG&@hTOt*Zv@WNsm3D_# zg@m-0piIV`Z9A|b_43aaJKJ16aN$HLUC;P(76yOl3q@9qIK89E(;0HUiFO}iVUv7{ z@7`~dN8S^`wT#t?@4!2*26N9$<6fA_P%>TD0mVCQS$|ciX6*Wv9+{|wMGra#Q{w?P zkK8?$fZMo^P|oe=@4(V!yKZxBZtiy5brlI46J8XY7mpecG`OK&YsFvDzR_g#NRPl( z;O*oJPIP6-^MG)F$RL^+me5GdKJ}W~``ducbaO#j{ zyo6}j7{;IV<5X0^!67BT`!P}?RF*mD)j7C4f6@9lONo8u4;S>!nV!(j1Y92&kfQx+ z+j|O0(24I@QBn!sU?sbK+Zn$~PzzqSBdypNMk-a%h^bn%&v*TL(9*znk_0FgAp<;t zc^TG|Dyj}|286A->c2YLQ#>}vfdF~1Q?1sJlKDez-gT4UU(0oVU{2j$Da&N}XswyzI`M9cV_c1YK!cG-G;t)%zYx^@W1uY23g- zN!A{dN4ty5fM}KFAaExa6j9%2N0gDH6Y;r(hajnij$OYcHP^HYT=7Ng1&U0Pc98v;25`k(#%z9cxsLuDtosHjqD zmwU6Dy$&D1M0f&2_wA8_&9TNUSY&nbTEdjs*&Tpid!$;0-oOMEMX!2dbohdvem9y- z5a-$MA8veewc?-s;!PQr)oI;2kMqpdl$4l`uki7yC$9@6rl()oh-@zJC!G*-oWSuiF?u_DO!^V|e9dx-;C?Td; zpjB9=4h@L!$2+3 z>2?r863D7NX(ea}R5n^+8H;zILtrplfAsF*B>nq!)68Y{VyyQ6etayXp=U zA~^#uZ>y7#fx*wK<`)YQpX&u!@(CSU!n!FvNwmy#4f0}QS2;O(GrDVDfj1{!#VnfAf$$k%H^jnE4qFRu&a?!Z)tx7SJHuu{4p!s%@v>H@ujI`4Y< z!+7ki$BUT9Ka)A0OoTx*V?)pm3>e+E=eX)=zHUkaS_78VNkPnqT=@=7hMs;3yiPvh zsiJ~{j(&>h`}Yy;9d1Go0qw%vhH4-2Rk&;13!MHS523f2WoZ(6ODI>ug3eJFR>=P#M7Agkh?49*8r1_s8Vk&$HDoOJL}B$wE51mCCW zNcNX});QK4A;@x!1D8GsJ#X98-=?;$YPi2}61xg44u8vdN96YQ^$d9}EziP;$fs}+ zIW4WwitJ^#~X5rv3@v^camriBj+__tAS zFqxn~OcmLPy@|D&?%Fvb#uGgP%w;FQae-Y%5AHL>Vot1y;q2VK2nc6z%8MJe0;4!N z`#(lR0dFkB&gAhjtwR&Gwn|2`zSow00>dAMXTNdL6dKvem6jgSzRmVwcW5kEQ8k8H z+Ys^2DkE9`+aZ(GsR!t1QVD;k#!xHeD69xYK%_#t>^B9KW0QvB z2WemRzrmAZ&r+&Ofv*%FUZlpWo^L6%0*#2QDdygOSimcYr1!PNDPfi>T2`iIAA7nBia z_vZl26`yyueMMn9Fd~1l9e5qIF*yZ=#elLqL8;M$SF~2UFvP@s;(LGGJfrDmFZi~; z4(fCxAP%JhXAY<&UPkP;nW^!Q5K4k)qlB05eSD0U-mf>~r&fzs;5iFJ>=c2*R^PW$ zWYR~+Jpf`pMfCw1jkNacb{~mKOFP3|PG%fQwmb}Oo?-oz9yukgju`|khetiNci;kL zp!r|?Bovs2fYDJ`phk?p&Rf#+@kPNEl7D?AFE8IR^MG2QeyrVR^)vs&0-IlLElKd| zFo;_8m}vNr6dLMXX6LMov#pObqHLUK7%LY>q{3) zSj_=o@}dX3#gSe!FaAdR;ak<6D4YFhMd<`=z5dfwsEO*$rWcj1ZSAL4`VgJwkfYS; z>g?=%Phu5%Bvh`mUJi`~_6L;rEXNu?z|x@Sjl>en%ImGtWUv@3^^55k{uK6A`oXdm zBVe7M^04^RwFkUlbMw|8-v-xeUAFKh?35*T%tQA&z2*LI4LJyqw=C~hS%!5HSAGCh zJorkJH=4Rio8g2N8i6=r!baI;4XQjI+%=O2){4hGcDIK~G z<&QHKL_(0QX@TqmlJcj#suNF#bSN#@fMSGf=>f5_^{A@vboB;284G&VS%b$<50?o+ zwMjnQkQhm&@CSho)S)_yKLu$&ldi>Z?PvnzXSKDbiqhqS>L>^ zz1Pyx3LYmDFL=6lq8wIMz3RB?i<&v}P$m<6=_I zEttUKN}P&x^vqMMsoyL1j;;DViv8Z!>Y?p!HMK~9Fi7L<5mmYfQOkIi8wc;tk4o&HA4vE4v>Hg`RE@r=!Vu ztRum0`%*)r*2~-A6PJ1YmpoBqzBxcsT3~G0UQIuHO~H-y=7OPz)F2FmeHgFnG^3>q zVMpDr)HLo(qn@&>aeP1K!daAbB4d?*Rz`D!?c^D<`kn&<=Z)ilgEe=wizfA`+6t^v zBL!*YsXTU>ol#TZ-iVd0bz^%25r9HUgl(q+t}09I(VX+(3w$UxUHR8AEyNpck>Nen z3{yMhx(%B+lek^^-wjbnf%X`_7>&NW+K&TX7$}0yr(Jo;Kjh|hjY zs;-C4_ZW{(?0-&lMtOKty#)o+y&bUg(nLAm%u}rCHlM$UyLGT5*(^D?VNqZkH10kJ z#}+ObUek`bdRL{S%O!%WsbVlX*zBg+RtO#|!t#cMO2oc;+hU?p#!LfOv6LbTL6l#s zIYNObYk+jbR)=N<8_! zdz(}83cI#uWEQ@y$lWLa`UJ4*6|EA9ggd8%TzYps@4N*;G#sj;xnIq;(R~?mWZ?Em zPRu0?L708o3U+d+TYwL}!l?EFMmo?oN(5=WNFfvZOim7?9Gf&n?hkMD8js`Wt@~(O8`u+# z;%PHyURMa1C*A41VVwv)adB+#!@8tmmRzTDrI~4XA~izMeONtWYR!3 zl>_{a4!M?=!1efI=@{Y!`%8WLr_53@!!;*dCtYWwr4 zJKqL!Z|6TeKpv4N@9)SN;Y@So78WLDKV?GOU2BFN(?{%X1@3$zr)i-3J6Sf~0mb10 zlgKF=nQH}%)LrGAr&jCo*AOGg;ll5G@z>Kjw5Fi!I@RjXygoIWdh0JuqBG@hrQ+HR z#P0*Y%}tzRfz29!w+1?M{WqVAWcB}tPyfRo|34ob#yaP@(2U1DBBngUGl$JA^SjTH zYdQaMiir>@?XiMhzyiI8AeuTU>&|WW0hXXpZ7a)OqT7g)E7V4(ib#zGl)|*ID{i$P zX6t2S;~m7;qm)b)*4WOd%=%{%@XrT{pf!Uf0Qk=NQ!0K8sGF&SNuQ{*$UqIi?RIt@-GkqY z>OYYI96v*q^M9IVQ2?awO_JWIVU97tY6~Y6p*9TkXksd}Wa3rB2@CP&ZE=e#Ff-o# z_Y{lkFak(du;BtC>>hTf&*0*@SDolT+iQo<$xCK?wnvbJQ#|;?jRI;6R_nG~5LZg@ zT`!9PArr%E{0NBc$?-pq&L2ZqiaaWWk(RtC88ADDKt=Ywmz;zg$gSt%YFxTvnl z?ro7RO9_4gVg?w#m%n^{m272e zTLY%}um$&a07~R(V6=vC&1yO~bQ8+Hd9o9y==kQo-_NkfZhVn^O3{Y4=kne*N|f8< zi1?c^k=c;bn~Kv*&d2tTkuDT9k1)XL2&v=!MFG)J zUM*bITAB${N)ZM<1JA{UPswuhxrjXd`{MO{w@zK&BFnm&A3OU7XUgBXJ?O4Ts)hI} z|HUuWjaHF}LX`o#xf%+@Uc!-O6#w52zvVij^o3svk>;LBznDoYh{6CX4wN9Ww_ywZ zu#*dP)x(AY0?AsiB|f#lQoRj6)dQ;=s88_1N~-OC&7ut+siuANL#jRvO@w3sEvPM% z6e*B8+UqhU{ffX<@&Z62$UfYFC*f%>PR{rYfyPszUI-Sz`t#H|ihFExlmH+@L-(Q1 zD1bus1m1K~>|tov-(PXEWeA1r=5cz>kA2t3?dt-{qW1?~Hm`t}Rf?4T3OXO3Qs#tj zj5ZC0FV<>ZIfaKf%Xx>L(EY7W~Es$KrsJ=G7Y^ldLUwPYhDuEI2aK|n&P+DU zmVSN%HVII%9Z^Ex(FM@5!0;5eWij+t-9evl^jr+cbVxFQj)tMO@sI3j%}&AnTa19g_&+odQ->%hod zM_py4f=ZsfP%Q)lk3Ql$$Z$iOHEujETl73mNBrL+;Z^Kz=NFHz`b7JPjUzQANDRnM zAmCG=OoVDI`l@!=U7+~G3jUAOA)2H$6Z7+NB~2oT;oelit_Uv}K~Nyj5Kxc(=-I}W z1B+FQ#AUGts_4Dt@c>|QsDK6mk4)yl!~=o*K^`7?m*C|c2x5kaiR@2*D^eTH=7tMB zoPf2E11GNaoEobUpflQ9ML79{TU@6 zSJ+vl%jSOldqg-4%kMUc*{M+<4U^QFub|@Q9)ynN+Cbr;)<%k^_quZOU+*wa<9;xP zc#O~(3CxGSpakMWmHXti1$ht3-4h@z2*EM@?t~0ntCMnMBQEUj^>_QthhDhq(ZjBs zEhuW+G)q2!k^$EawBsF^Qb|B;A)%tLc;aOQ)cLI7Z!j~nTjg=Q)Ue!;@Ni-06)ZU2 zN5n@7R@T=4TXhGZaUMRj%{5>p1STlSx15A6Z|gs!{+s^K)*WDHQl3J&lD(SXz(=>I z?zNeTxzRA2fQ^Ba0l-j(kLmmZ=;$}e4sl^`z_;|@`mYb4-0eD;kp=$y5^(L`tNz4G zcwKDBFH~18T*O;lH4A{^A~_ki0$vKMHuwSz%0_EPz?YCN$id{@U7T*Mu2zvo7^4>> z2M8@}w{0#7QIx!?@)OBY%uqf5v&VTLw)fKCR@|}}y95Q=O3`Yg4y%YWHLxx^XPMuk zF&9U|&C9W^p%xE(X_)gH_#`Bdz(KFW%rDaP`>m`bk60(oPgL@(I7ePnP**43lFX@? z?W{^kD*-K}A?Cb?{PRym&4nNecaKgUtH%Vwa&kDrY#a=X%~b7ibnVaCZnq1t7j{#( z5~SsSW^83ECaB9c4jTTV`|VrlEsJaGDbH!I<r`jcx`b|as0Cgl7XfqhlI~fiN_*ILVketusD_dhYh^M;5a+GD1zTmnIa< zhT&a^x3qA|;Pscxw%R@^LIM02J%gnOcR8g|?3`eZTU5wzQJ<69FS3*mLo5kMuu4(_8;wPxta1jZ#LEl2_KSz!}Y0V%{e-kyu_oSVJ~%3 zPVX=&oSrpkq-N*yvnMHjxD#M)^vM@0%AKMyaA@nlga=;X!%*^RUD(sEn(JNZPL$5V z^t9BkcRJGX0k4|v*I zOpHP<&dcjEds^s>>@Ef_Zg>|1GaVRbaKJ#XGc9A%=1&<+czl5t%`ovkBp!r+Ww~jO)u84>T z4XIM^@gwh`!=s|vzuuBQS#^+i*nWq{i8m6q>f7uuIGS{4q;kP_70~{4qoO}cC@S85 zZT`p2cx9v2`*+eH4>xpDTV{M6)x%B)gg-Mp9uRTV3U}uvgeje~TtC?gPThl6Z2>pQ z_%Fl)Z}Z_p&!Tf@+-hiWvifgL(m) zJWe3rUB;<5wPXUxos=Y~ArQO`0S|+&@cOGBGrbbj#X!qg+o+c}y{MDS3x`x;Brwa!$oSRPL21@roy+4fbx%CS6C*`* zu)>@R<2hZ$;j!LszRh+CLD2uG>pG+1?!NUaqUKc+LJ}?FRp!-WqKoJ;j2aQ$Xro6b z+KA{SA|!g9C?Q%zbfS0B!ia7JVHl#9aL>s7-*xYYYb|Tdhx2Ra?EUP0_TJCg-=fej zh`-fBUBKl!bl}nG7m;hm#<$EXo|ON*hKZ~|vo4PcHZOpJ_!2`=r2lFgU>a6%-;HwE z-f`;L?NY9^-;MeZ<7oJ!6)REQ$2OqW?{e~C!ZG{eZ7DCjiUJlUcNHW~Pct#@ zjE7sz$p<5PBG7ch=x@)R@}MRRg}qICU~n=I?KFNCq1s8`DW215=)icC6)^V;XjyOCHQIKeX z>t^?_{6Y3%+^uw3$Cv9KC|1w0Jo&sTAuc{YIg}Ce!PpVx9UeYZSH}CdDgkV9xtj*5 zd{T4qY%n%bp98QK^z`&F;P}8~UD*z8K<;A(4t8`V(5poDP zE2vq}3&`p|(HS^W9${Ja7NjGC0a8$C(s<);)zvR-uJy)a3k)g+(nHnSRwp-DjZJQ8 znYgW4pZXgu;;cGLOHjOtIv7q<^^Q?AJ4jDiS+Bf)lM!7IE|r>(n3%4I3=V2ws_ZA3 zE78OdyPuDeUVEiXO;szE>4tj|p2>LKJs8v-xtUE2CcZTg&w8Z@&$ZAfH{XUi+BDV# zCC&N7HJ(scu6HRkoOb#d4|Xpqa{^Lgo!*sQe?zF{enP^NdW1;9;AxC0(0ai3^Ps7_ z(pYM;xzs=Dj!9u*NfB|UtXR2r_^-c|Z;7EVc6~HhH7;z+8gr-VZ=?24syUw=MpFgb z7#2oH(i|k*_nBFys#<0SNa_WiSL>Q;%US49ih}EH*YVM&57Nc~xN6%}Xfc6uVt`+6 z2N^6C4y&8WCthdikuoaq!E_mwE-xn+Z}*l`wfT)i{YS5o6N3T35L+|w`dTwwp_-W~ zceR`EXA&HEr)JKL>wo#OKSd%G<4)lZ&K_K|_{upnKF8P?Bw7NjTW-Ha%W@CI^2R+! zT2UY;4QC^xz=Bw=fK@^&cz<;NRlff1H!JJ0ni)ae(x?54cAjo1fP`YwhzOuZ5pg+R z1r|!jP&AplK)QDs?~T@qVUf73LPoYMcuZUWLb&Uu*ct)=C;+bCoh`Xbe@O1PtBa5;&#|~D09HGYno|Fx|yKjF2NDqz69NDD&~-> zpKB|=@)nVlWcH5^da%dFXnJp8n^jN*9+SL<4y!XFUqwm=^8s`uF?;^W;fAiWHv&k~(z zvHZzZTq0`L`o`z;UI&s(;cQ zEw1y|KwWJ7SG7LlBC>NoUQ3gS55Ea57X!s{mYP=d{0(Ws*3%sR)f~a!|1B#t?;9j% zWjY|g4$LTo#|ohT>8`spMI>YlB;-?S4i(Qt1MZmlH1^uA@e`smSP3%cv> zuK{{mRvD_oYD2tr1v|s3e|%Z6#E6S0Z#gfA0v5X{3}U0f6keH-QX++F2bS6^Wm4iv z$q!tlbbt2q#uRaz8-91twhJ>c5G?CLJ>EO_b-Es+RITKaOq!^lUe)>2A$KJkrzCHUYW+ zr-BsNZ3+$palFK@Qu=bYJyILldldDB9NfS>#W0ITaK*|1z+OgwTbE!;3No=blmrOU z?;t2maDcXkbw3!eX~3!3BqD)*9*&a^(dW<{5kPJYj_Cbks;-28u)srnQ!adg}@xt+b6mgG-ymWCZlUx%I~`-zNU5qHb9ybkAPBTEH8jZ(z`a;a)bU z^V__{=Kt|!d>gA3IP%i?A?!!V!To%|lwgvbdwbcli8!s=EG>4np;{j_*L%2@=dr6Z zB9QR;Pc4U4VrT-AkKD6BYsbVbr~1i{ z9Ko`8{MVg9#BY>4t`oQVC33Xl!{CYwS!uzpR$2 z>0oyb%}7lJw|*(T+wEgvfcq5`U-({Ca zp!kj>fV~qzZHfRd@miCUn30#F`N8+8zG0(J7WaV6WAT**Xd>aRvw@ZKW9s^FvV*Rc zmgmA8VL~?Tv^b3{nXcdt-in{DPm%Y3|6&3W)l(SJt!8p^+yDwzojs_?CKGwsypwb$ z^K+K-)lu~QC7Zft6Y?f29U(8 zasEP)VHM3^S=UhZY@%Etg(OHMH&A8~kFCEx<7c~2t()CEUMkA$?QDRaVfT_*7LX7k z=8|c18S2PL?jm-DfIz6WpV<;}0NSWz+UgLteb%M$MzUIiJNqhh6|xtEh%s5xKS_ci z;Xj#xke8qUns5oyHdWb)C$GjpeeCK?uq3Dj0l5Twl~HJsE=-$=Yb|IM&3(aXtx18x zu?KdiwUAP!QD~yhG!Gpi>p;5~lW%t_3#F2ZJyvYf$U)P4rRL`(E-EJzZqyFPe#0c> z0NcGeaRt@AVW>Ak{#}!m{_eWxypc>irtbUIuLH?5;uHyw6KO#4tV^?@FC3W z+MHrQ%kZH>P^&WGdB%bywx@S)mtPQZ3uVeWGaG&%UukES=_rjL21fhN*$r7Uw&0V! zTn8;sQ+*~dDe=rW8oZk_ddlfwXC$J!<~skhc+#r-2~lWDu*4-Zu2(2xHneD-2K`Q}74 z_!3&Nxy){bzgOMaCg7t(P%1V-S!ccbrh&Gpq^X^;cL~!Sk=8m_b{%XkBGLpQ64${z zOq0WlTRVE*7`=&3;7xt3FF7*B{#fqQ=an%+Ca z-txDv)m=bnTMgw$T`8_gFUZPL@tk{VJkZL?JLxS%TJJ2EkplI)TupMQ3*yoJ6}llM z2AayCa7Y;)uTU?llYofRz`FTzA^PSP&BwIIOV7iOOKU#8@|1e(KELEe8MU)z_dN^| zz-IYif))p0_HezjdV97qZFLnT=H*Z=U^Dex&<8&WKZH6cm{?p)f@g2`d{ckGnd&&| zHw#;tX?R-k6%KjQot~&r^;F^T^l52aq7LE2f;{>FKTpf`myO^=HyQ8k2{~fS#{im% zyF4|5?pdhUduFJvuHuP><+av`S1tY@E!Y!m`q{a~XzCeFjk;?O6?Z|Hx8_Zvf!Z^~ zku7}`M8T}HuPv=Ivg^qHC}A?5im94*x{MK=?vz(ov%~0P(`iMm z#Kz4n7MJP+^*|1Oc+cmL=lHx4^JVkP0-6uy&6>~^JD=63k4OTp^p9P2XKN;}CQWN= z61B`LU6a#)rA^4iToPxQq zCWJ0U!^Zti* z2!Rg-$h>HOO)&qX*Nj+yh755fn#1dax^t)|p6o^NNp3;n;!kd_q4BSb+e~@%zws=c z&0YbDDOL21Kp!RccLGVLnSwKoh-n;25118H&i#DweGz5Rjl(XE1jVD^mtrL!E@G zby4ALd_YZIop}-}FG#6RU-;mNn-JZ|S;f}WK+TuRIB!Khnl68`ZcdMlsiH>$TZLu;bOAf8RoxVUXNiGr)p-{ zs7*z7@>zAZk{xj^Hv7Cg$By0mBT)s8b9)>|`ji9&;ZR@&`O-u*_t^kevsn?aeqT~k z@BW;OJ;Hy;t^=R&?wzU4gRB!wxgGC45Rfu&$)$Nxwdl0_8Gjsq%1ffoDi$2iPD9v( z57V75Zx@(>^M+^|y;qtQJqaKPI?tz3ScZup`dxOp29gy(mxjI2%37l6%%dp5CbQn6 zEfRYAm4(qyRGJ|T@-7EgjiU?d83DX(ZGLZd^*+h`WH%XKr})PYx}Zhc3;I44pE(Y{ zefCrlPgP)aPFo7hc3@tPUACUy)<&`RbZ^)hnylAc9uk;St_^d&w_UK0u)H`#L!O-F z!Vc5Qum1lA?LU6Yf7?W*ZQf9=QO{Ma|Kv zf51a^*n%%4Q18*>_L8E)59CZ8u_Ax$vW0#-5fDuIX@BD5=`LW$rwhkEC_3xCH;5d2 zd-0@c`nka7=u)}o>NjO6frHV`wz9I`mbi^xo!xV8t=?P;y(a-D68cE9$#e zP^aLfOMV1DDnhoixPqHqfz=$W)YgGR=%FPbeq-pKMQoKw5m*ZcV7Hp_SWgvzyBv{> z&$|^xLotWwv`VQx1B4wBqsTRkhYP<77Q|BFJTIR5}p1g8hm@EIqlwaseZl)Ft zgs8z5Oz4Jiwu=^Nkves@-IxOmp+%b55_I>577RqF7JXAL{E|YIL>=(1u{xxQaybxU zQYdsyEHqXgLUJ+akE<6_h$IZ^JlCSkqZf@@gPu*(H>*H8V(${{CAl^YByR{B>!3S^ zghA}vE?!cX!>n{QvJg z=iC$bjq&aq?~eEO7`kBZz3ThEIp;6t!dF)MIr?p)+m|j~LKk}>EPv_J6(9I_=jIJ~ z=ZZ^WIBf82L{w}<RHamUF#i5&|EM{bEi-S3@)35v4--YNy1-#x`^SPFNRE~gCqdi~=5 z^}E0M#0>VWC!7td&I%rjRIOy3)^uNv@sW3H z>39cEUU(0q;{Scyo%9CsDFx^Ma2WL^n;l-Xfi95sXWX>$T+cXt<8*C=MIJBeMdo1(2*)WlTYs5b?ZeC%CnShHiZ@8QD1 zuQ+<7ruO7TSwYt7SoHdYY2D?hG~d*h7E}?tmA6Wath$3oHg_zy^t)p%cu8h)D;%A= zwYUBKqx*{mFz%&)i=q`;Gt@m5k?d`#73xlYN%qBgA*e&M;KSTR&J*Nn+}rcR45JGd ziJQhJ$l*mDK-7@F6Ja^8hGwVz)i#>jiCH=wyJVy@iz&j2mo;XhZSwZ_@JCX`1^V9< z%5m(PdszvqE%B?po=DXckjS*_wH%Y8sg=Y(qg@)6tKfAGl_xARP^O%M7iM~+EZ}Us2TG3BP6az9!UKtwmz2@WJ;7uTrkVQE@<8x7|=bv-CSdU?(z5>!`#yPtx6p zD3JKi!dP@7_Ia-vp35=DllPY^`CV@upszm9j_LRH4P5Y$sh)Ikq3Grpj1F3RrA^4Q ztFgX3=7EJxmMlK!7eFfLGcQFvWkVDfETK20OwZ%8`T$wl?tTB^Fi%-#HwmC%-QI%*LeHWTX2({5_#7@slVsW=*&Nw zx9Gg;ClzKOF_2d@(7Bt5U6+uea4uOgG5pyN_k)k0?@Ukng~!O+yeAELnq#k5k?B}m zN{U3$+n$f7AFhzO?{mn@_Y(fDVXD8er@L8YUts8dZD9I@oGbrygrhH8J&e%2`f>cL z``51D*j!uQ>&txUzaG!W>fU-g!oA`~J@Ys9!W$7oI}sMttTCMN;&-$s! zS0qA?HzaIKNX&v!(zRvFZD?~EX14MUerognFm8;^9T~ zJs9^m{7m0&Dp1aRmTF{UGI0CvAW1-0q7EVpfUGpLHM#F#Q5Gjjz~OU)t8ePS%JTY+ zi^yBydzRV<8|vPhM3rtRROlKt@u6*{ai^+XqK&^Zo)PmF8MP`;WF?gjb{cmJN%uBs zSDy5cbcrq-?n_CsRvT6#FF9P4{=^5nPyI(7FJR;$D3Hb9UX1gm>ChU_#NE7;s zuIi6%6o%(P@zX!Z)EE1{qO(o7`QCo=Uai_CxY0aZ0Zf`WpQL~P-~ z3yk;asDq5Brj=drNo9=E!@kH@pJEBc$;_TCUOPt{;wj@lsiUp6 zJ3puExE3Ew$TF*8wmcYdf;A};*)x8ztXPecC#4a@6t zsyq&oD!*n#&%gPE+pMb<;=+=~Iy!T4Q41hux%0Qu%=aF(b3>)+N3vU)zcTau`T61T zF|%323wK$fNra!sma0mrlWO(kW9aZXIv~zLc$WNx>YKS+m+_KfnzrSje$OsFood(ZJWc=Ne;P3x~|3BH3-U)HXiEMBEb=lss*|bD9`4z6! z&h}8@1#{uTqx*te6z*Iy{QL<)rmtCTR%@Wn3FnyozIxDGq0H45!Pwd28;qUk0O^0C z{;}}6T*AqLLQ@by!@Mb<(E*XM)R*ntlXS<_fhd=S+DHOwhndDJW_pQL&DiZ)T8ntd-bJK~bv@8bjF}CaCtn z^%wejp0e(}!E%@rE0NDCghus9wa zITV}r2W%yK|Grnm2uJ*AKGN8n$n<~-l77XX&gi3kiQUo_H!CZvtz(lW<2ZiTpaa1e zkKQG{1+O{&v+a@HMJr0uk&?91(kPCV66T%4BY??9y#=)7H7B?2UXr=LcSQ-lqgKp{ zDa0LvVfspo$7c4LxjT0(Dt=g5XzGq&igfm!%4uxIU+#wwhKtR*I!ZKke(QGcEY9Ww zcnIUW5k&P!z=Np&`}VH}xgw(>9H`xsQ!^c>CYSRqZ>d10;p1XhI8vUNfn5IvPHaR- z*l}ueIYcgB@?_vz4Ex*1;%|ux=iOSpHrHS}HD{+xRpNg7pJqL^FcqHNctp?VEhtdY zQ!auH*+NQG6i*`(6_u8bu5QI+sk`^PBep^JiJ!T0Zu(Q%doP=U^)}9LhQ5@~cVRwr zjA75}Hr$EGd%I@vRw1)(c8hYbY~fOd(Gr1I?I4!NO-PZLEA>ZuT{=45&1`0~T7Ldx zB!W(NCA!zSc9yM!3r$bR1W$BsonVkmtyfpQ8O;-n{jvJ<_YXyDq#U&2(r7Ie)_%8s-Ks^^fiZWQ!EpLR*@sdVMZa^;y^6G;R3fE85Lxq1aN6;ZM?7VNI1ZKE)$ognfSUawLLIPqk^!P zJwq;=&eh4*ur?GI2M=$mzr-FBVBb*H#vLzhX1j)abx&8e>yIr2qYI-3ceVnEd7^-4 zc?Tyv5TFbrA28RR6mUIUj_pU_x?H|x=@{93hs#y(`q~jT17z*JDt=Q}Wk1&Cr?|o_ z6&7l@F6aYnF0k$?A-mOWp)6DyKAK3F@!#pn_C#E0b2~?x`EQm=)FjJ=wcZKs2s-+= zLvdkuzWlmm5;3Y;AZ4*E#_(V}WHm zhIB$uL#9Ofe$m_UkJ>Y2_j#!?+SkvpaPV+@vcw30KYfJ{*-depzR10NAue5upyJPT zN?Nn7XO73IC7Z8m9Qh`4hkU3Q1w-(ph30*7s^qa4{ujM)O+Y#6lDo+Y2_WwB&~|#0!NbjF}sZrJ1U=0&ysC9AVUO+<8{n z)l9=`HhbsFX?s}o=gvOg+^ISB`tYH>p^55KSeV4>k%P|6-8_Zw1i}F0CEZpX_sMzv<{B~z*-b32 zDu+3+_`;N@q@`jLM?MxacikjJp$MAd&}k8XmBO+&!6c`m;-kMXew88Sj7--Xb9&ES zsMdIIxHD_46uo_fO>ib?q`c5{LF0$XMBrR}v~_7_cQ(86T_mjw&nDI)RY0e!dFSUd z8LNpw_k0!?l(mr}t#KOjtzlG|$r&r{rC&d;(@2ZlCB^n0+dVMI*E^zPfUn$}Uc40= z`Z5po!ieipEJxkCq#lml!VGrC9UPy5#uNz{M zGtkEd@k8az2?>~rkEmk8Bs@>Ne>^HvGNee(<0DUe;rZpUo7me|H!Qb&CF8mM$i~d- zYAVSA3a;`wTnlcl->{@+C8_}4nlI?W=jrL`fQw(DT609hTV(1xHuR04a(f#UC^@Y9 zkCDwY_aP)Mxx2#%)qDD!QpWoS&8gWiSmQA!OJy2V?w`&C5OaJC7Sa@!)w)D$v~(K! z<>w`Y$Uv>hx8^1Rog#i@rnU<>12c((Vuo5sI(e+{3Fo7h)=o;ssVl0gLnnqy zC9YgO%~ul_OHKhmbM3~VHP!pe=PD5XWidZ5xidt{|9(h&Bu@u#s?nD;d&D=-)M|4d zQl%NO9&O=AanB1Txa~e^i+EJugPne~>YqF4zIe07yY+&e`B$Puq&)54o!_4iMgy;& zvbaZaagR}WmK-7M;Ric2a8q_D7^M2`G8INWSy&Of9So3cwj@{!3SN|cYMAM+Y$D%F zTp+8Tvtn6&?~X-E9GnWVXS4jv%Qz$?+B4Ov@m`lmvyO6^{B_3E6iX_?=phw4MYL%+ zuyW|X|AID))VV)UvV|F|EkiMJ>8VsMgGmeSH&8-PP$oJnas}$&O@+OX7MlxhYiUWc zO1&MfkfC#(m8}p9iv^%hzM(0Hr@$NY8Fr zHlJO;N9OMWN8NogpPz8UzNuzwF%H+S-;m9fe&mLUZNVGwpqMT18*;GMykw&jL!I6& zy4ADl8BRAR5kGwYlyIMfyCZwdylG$np9yf?nm~CTWHSm1WR`UI$~(~rEohJ&oLjBh zcyx$*O*un2_cia!WG6)pmorxexZEj#(j5*vB1mbH$S@imUZZw8SZ(|0F5scxzAr$+ zXXzC@?ilUpA7Oo;Ho8gsYXF(xr`(9#`ProxZkf-X2eBFE^!HWUvZe#);$u>utVeTuI%2Gg)}W^5oa_1m{7^jP;KD^P#PIl`@b z@*z-lk2ZUcDleL>XBY7ARGZ>IdGa2lp-(VeCO0SE&#CP?La=Z3>4qIT>_3(4)$7YU z*PEN&bdC#_NtqxO5bN%MnqLphLv%vu9r5y?=?Zer$(5CM*Q*;8Q13vl**1*2nB&?l zpED4QI_H;&aL1%Zt4znlrN6;qIK>L&04XkJW@IyDPJUa-URE3GCD=FwkENvUb?D2D&I&z(TX>E?_m`u&Fw z-=X4%MsKz9D`mZEE{-}MXCRAbjWD{1d7MAex$cSu)$u-);2!h9TPX38X9p6J1g_*I zW5b?+3xAvz`9r4o2qMp0yN%$wl0sM;~uQI#{fpRqiFG*Ru0C%yMH2ZZnjXIi3>!{>|o~)fUFjZi&+^6YF=C z8~ISuF*IcsM_8ETU2FH#*EWAiY>sA#cZoI&qJ3fI{~xij+~WET8XiLb&V%+>gC{rY z8Yemu&ae;ycp7i>W+b!*`=7%5|IH2lg@qK+AH(g^DRwi3#19{y*0~Qic68%?ijDPF z2ZE9oKrWq}cRV{gTkv*`dY#MQb&GQSGmighFwm+Iy|7t@{1LQXT4t95B%pbf4+BbH#KQf2CdL zCPAc@=tRbQoQ&@$z9V>;CMZR>;tw}Tp7F*LgN_)PS57zV?sI(KnvBveGVYC&uda^s z4FL66(`xdUoSW2tdZqu#2c1lyFCeR`y5!(zcP)%YhN9>zGJ$VvAE=eK z;QL1u-gn^xs_2d)Jt^X*bib6--S&RJhuDkC7IQ^VTMLC;JQfGJktLtr9ha<`m@qn6 z&5=1^)|3gPMeuA=QAcMcn!}zYe*v2SwCC^2nQbx^`%|{frels%t=_78Gt_vG&Vm90 z7{~~C2*@Kb0V>JMXUJDq9voXAMjp)6HR`#k;CGoTrYqe{blaq3$=~XTsQ^rUD}cm_ zsXtS3E?xa(a~tk2E;wz5(Qap{GIucFSqNZ>-r~=SLv3~L>mayuR99EKaLy>wUAw-u zwSYQQb#SN3$tKOdJ16T^1tF}2RQbkxz+pKtP&!oe0;FEs%T^emBRqRw{p5v2_vBk( zn^V1*8dTJr398ok*v^9QL!NDVsVXoBO%2xJ95k=Ek3916IZfb8=H>`B# z$LpvH3ql0r7io}^5wQ4V^X(&LSvmT5DlH>pQzl$=<>st{QZB0DR>20)gADBFAr?~< zkPIF6)`vdhC&aZa+fFfP?2`iIu$_K*mr?BjD;*vdR@pgdc+~enaj-=Y{eS0p=gUvO zqaecI#M{%wui@K!`fb=u;Qxe#8Fu9Zfr8NK>JmVKdgG-(Oo?~*cB}ptV9F)JXcY~|eL2pL=7vpKjtx|52oHsbV_iPK%y9Y^5ReX+Wru5Dj;hC6 z#lg$*ToSm=g_4;47GXa>zbM`{eSqm!r?Z4z%~Y(8rxp;xQcj}O5k2)c1#3D8({KR!7z3GNrZQ-=_>#p9VF-F0XDc`>NWb|sxgJaSx zlQ89?MiJJGNpqS$29QE0`hRhiUfOvsQ_n~LB%@B8=Ur|`w>94B)5YBG^Eb~o>*WN- zFj!M1!9t-A@7`^rRi0oV?m}?0hmVh%=0B&qro1Rch2h497S?m=&7n#vDBzWMnI350 zLc95MdS#|wigY;dw4xZ0@j%2H2um@d1F<7Bdo#z6Tq>Nd+WF##JyiaImg?e~pXvUL z)C;51e`&+LZLaQVjwkR2F55Tt+Z%rXKWV)Fu`w_ImPp;)ELb_0EFRIGf#)M|8c4;c z>%_GPmotGHk%KPMTN6M0i+0f}K|dHOcV!wgTlX25)}OE(_+$xGa!Nmj`g$ZPi&R+V z*51}s8iuLL`PeHk3ys_N$JE57l2Ena$aGF+HFN;8@+);}==ExTuLeqRNo zdOp6$fDVSr^0CqP4gRgGqt=(0z$^roz1UaH0pK*kjLZHWkR84QlArBsc7;0qyu8Kc zfx+#bR7^~XaijcJJ3IO8rlarLKGSXPfeZKghZVXaopOp;@(q<3PP4~z;<$&$BOHT| zUYB`6VfdXQcNHmq%W9_{*%PPORAYnHxMacJg$qgfeHo-rA=Ab2VvC89{=XqOIK*#gJ~!s)m6_EN=DZzEF3C!*TQ!{ZY*DFD5IecLnGLc(pP`&5B# zXlK0S`zW94!edJv8Y&&f3!aMr10)-rI^99f{@mt_Uid1+v3Mk-2<#mvfd34LU>q;0 zm>2}sSDPhyxcRyvYZhe1L$aO5g(VN^;{jE=7C!K910_zNnOL9(!X_jkL`>f5wjIz! zgjB*e0TY5Yy{$Yy&3l(%vb%;lP#fqoFtYk-&2dZ#Q3E`x)Ko+HTxqGCG}SQX^{N$4 zLSRCTw65S~p@i8jjl`=lg z<$E`UhR4-O^cu>RE6^hNR%ZMHv%Xp2sfOESMLI>j{(|m&>xwa(Wu8kQ&nIf~fDlDX z3#fP7Z4Q_ONdK#DuPSgPq|L}=syC@2Nvj3AI|?xnMyrO0McD-V<>;?l*on%KJHBn zdIp~Av^bm`T&U;Z#N~E1+SWln2#ib%0e#b%ytB4_%3^bZBz$(ufmtf&Vd-1_ahD^p zKj!DY`frFXS7;iZIush-g&G?C_foyTB|#RUM*a0 zXHA03_1vpi&sD28b&vO}Kc~_wg(ZZv9%jhs8tQCcSa3gV)s2CmOG(KOOP^BCVNmC@ zpUqZIn2nB{&>rck0)uaAvWDkj>&%u`%qc#IU+&hjp^(Mc%jmr8FJqukf%9-4d)2>w z>XP^0lV5b!euhi2;4Zz}dkq`=A0N=ONHMbj;yuc=wj>2FxAL&B<{PejAW9m)2+Wa& z>JX30hbvcKbhk7xK4gBEm1QD+8;m)W-b5Ns#*l&WbAk#QGP3R=>mI1<+wzdEp22 z-Y}7;r)_ewvX3qEV|N1>ezO&nhGwmjFo4I7R0DiqCrV135tXM`7=*G$0@T4jg*5sr zMb4E$q_L4i&@PnTvSMs@ejW={5HXk|sDp@&y)@X$rUInm=gijTW=fCANLeI(Yg zDA{ppR0|41w$yIzXrp-eE0G>n1Pwhsd`$`O$@_CPF^~vB%|H-%KcN~5LW_t;N~VG2 zJezKOu$rVhL(4#yB$j*}-p9=l-fNhgN3ULvvr&D{(Aa2(_}jPr{Wj^4`Q$B5*Dq`` zId;QBoQAXw{GP&-PWGGG?Z}T+JAtP$i-QjE?Ki)_2~u%jpw8_MJ!C-22B`RU&JSCc zY+ytVA9cm~e-*2T)1_#;O2-R$D?{@_#;er3R3e}lz1Y)oadjyT{SS`RlfQ-Ni!kKk zZ%OdlmI~!m(CW2@PNO@bc}Q~$TgXjhfp|ol z$C)t#1RM~m{nU#m0V*C_ADiGrfTe*ryQ0P42Nh9so`M7hf&jR;+scNUm|QxGRim!Y zWfC0LZKV?flTzyHp4x5j313v0H8Z1CmfO6QfkVM8<1>M;k=ZRE?fK;rS;GEZ9-gv6 zq_yUEayQFT&qCDhqP3*cE)TW9F$v4UOg{26$ck;bkmU6as-NxFtk|K||3oBK1=O56 zFh28fU~l&Zx%H(H)W~*UWpJQ>fvAB&&ev$3Z`({(ZoB>hs?p|m1s0!mb>0Ho6OBqT z5@(@}>U{=iD5&_69TO5B_f%UBOIcn;r3J!$(S2Zz!0Wwor1V+_o|>mA-+|75e)gy| zG2rW}LqiUxW7Xc1-#;%|y>zaG@qb)1zB+9GsV<3v0o3RFkY>t9`^cE;nF)U7a#ie; zrsRFQDN#Gu4lYZWvh2ILmaUg5sfNsp<9zwVF5RRlDQVWL4TRuLCsKf|@#J9KcWlym z{T>xZye>`G-?YDQO=H#&_k;0{GQPY()W-(@QziS+Kk+naK?}|3uf-~(B-dVXd{fW7 zQEc4*(}GA(Pfy!7$XEC9m-Cz9(#zmo7)_L44;I+HL|kv52WCNKf~(GLfpS}&^T-L1 zEhPSB?|YyrR#v(&s!7fR5&2gez;3PGOj`S=sW1!kB|iV<;gfWy3HV&@brN|)CVrfW zC*?4|)%rkFd@38$V8*WEh$BRfWw$()4Qy;0E~a1*Q0AkXsl#F;n|BX{QcDK|sc3mT zwLOk4fk-+?V%93su^pdZ-d!8>C^77yW~KAW{I72hIltg88NidD0M3R>%xZNp1%5Vd zl4hwDW8{9$94NG#;jEPJ{e26(z|H#y@(K#Vpv_VTlS|Kd?Kw^rV)Nal+87R8Onpjzwc->L$=3`0X@{M>c~b8Vk%2b#Uz?05AR z-vO)JQj<>L@orqR3yY6$_t5p0%TXhQ_+GSsF5`u<*zQc;JvSR|4V`d%|G+T#N0|~5 zevQ*$x8n^bp}N~5#IF1Nrdy_CVx;v797K~XHmWEx=gYi%E5v75*S$Hnoxjplg{ z?NldyDi)>`zOT0Y$Xoy=vICr6}L6g`UH?HW39A0-V< zFcd@W{mR^jbc{_oM#(oMohIF4I58-hgc8>(_IIYzcOb3KmwZ>txB}ch>Dfqx3J4oo zz;_V*kz+K&+!RRkwAX9TvOF%%-%tNM6xU=VKT@-|NbV|(=o9D)YH4kqfrT0+utD$V z=bvvp{OxjBbMO~!4=1P*TQgpd5L*U{$mw13NPo*@uU{30Tu_XxmL+k>U4OjOrRhe* zSQfLsJ0Kc*C2Fcl$NNqvcjH;C&Jwywfu!+Isx3b4`Cy(&wKkaehpha6_%?*oKstu$ zYs8Ilw?)(=%S+TksHQ!hHRb<&`)`W}%|A!|e3=5%yk6&TV}OqC|CY_nw0Rl~A|eR2 zT#(_rQ#$WI|9jwz3N%DF5;pn&`UbR5TUw~opai6~OOf70+ohr62n?L!JJjhJn4QDl zw4b)%RyjR#78Y$px>%s}xCfwOm92-jF(IiQo1gHq!r0kgf_hoL?A#SJmxEMF!< ztLQl+ke>T2zMvJJ`x1Tpqd#T1!TM$v2k7V_B?AATIy>xtw!ec_MubmcHMTT#X7F7h zr$yjE>Ye~4DtHY0miML~YPy`_t-azh1gZ2io_}j}1ttrzS=8)L&(5G@0aRYp3hZ=> zr7?H*GDk;;&y4EOrQ_9;Y)k`=8#WTMYh1D^u&%dCa62+}vMCD*T6_IcO|2tl?{(DCkPE^V)fj0gy~dGtFWKz3xWMW1 zmz$+JF1Cq*|7{Ni3~;;bKG1IupWjayL+G@5^#yHWr$b;^diU1&bL&kas6JQM*QNF* zzXQw2)9HT|!JyfgyEKu@<*~l>Y;TeddD~=;C?fmxa>eB{C$`D;aRtmAaiAXbO4<6Y z+lmfSBy0h(yg#%L51(&Y@y4%?=Ecz@ILAW=!tWegYlG^uyOcD7UDz+10NK45eb$X6d-JFO#eqK=9y1m5Ab=9m9nrwL`l-CnR(0pT5{%55C7k^%-zvN9-OR z2+c06hFE<2Bwwu4?GR}`^N&diLRRTA9Kra%%&K*jaXWpo^; zr*0u!v9Y~ccPjKVv;j`m<{yN`ht>PO3{g4WU#|^ZdmUzR-`C!t%V6R?CHTp||HUdt z#)a9fPc|a^@I6z_d4dFO2AFi$F1FoP1uUX63AwCpVvAzx54CY1Vphfyp(GJk97wy+ zh7hPf+HxOpF!br&Cfjps7Uae0K=-c&5(j*)326{`>#1Nk_ubZk@NcWZ;GOFD!P09m zn&OEXV;QgcKJuVO$WWax4oxuAF|a%k8Fp7c>@jjwHmO_DxwZ z8_p90^%zfJi3Ps;`@0A>uu41d7QBsO!i%(b?1qbtq_|wD)*u$5+Z$1W6b=-E_Xp~-~&wzY@EsyA!`WIi5v()(g z$E(VBI_-s~i`{nYsx=F-B-C6e9>s7bA^p@4GYsIhw+y4dMjC!2e3EJ2408wMaM?m* zvCyGez#RY22n#p5ZaQPfwm@u)F1DxdnY0)3IogvHQ>30bSeEMu1qno3tu5+?gW#x zaT|*2lG96PP-Q|Wo29{|6fse|5&;@7EOV_4mHVG~i=RfF4(_Z9+2FMMzBDu%oV<}k z4H7$i1sDJpapb!JpZf>0Dk=?q?lr>DqyVj~n0asZoskabfr+zgSY_1&kY>NIu;kM@V z8#|%Ac^mB>)uVV#L`q4qtdC}%7hDA#57~OKOn__^NIypXd*xWAP6vnShjq&Xl;CYc z*TBPvr2+L*KfBnEM1Zr-&d!HKCI+j^S+anR1HH`_RR4HaPSzqi^A??M&TU3~D@dDQ%NLxEkQ2Ie#}~c%69sqmqPv3v6t$WM zZ1mn9Sk2Le7KBv5x8_UeK(pMaJZ6Vc-rOvTM8bPJ7ZNdWryLQ$v2Js1^3dyi`!?gu zoD~nkohjM;>A~Zy%T-fF1g$5v`Z>_XgI!~#PYc8t?1e3>vN-4i2m_VAQP+tX?JA!p zIr+JySSxMiZW9{Foaj$itS%-i91n^O37B2(w$f&tDuH~XkfZQEluj}0F%WnPt21!R zSGpQl>K!)r7NBeSFWa=}7gu{B-c+#h6AK!Bz^T5ongFT#8Om;B!No5uXw_~#I=q&o z(}n#H!yL4AQtYsa@A8ee5qMx;ttkQb1lZWyG8?L`mr)%w`!(h~ABcSjPZ~f(TT`J> z25HAwoMfx#6Fbw%67-GfS18{mB{JTXez(_q_mY00y|?Ys%S@(7Ctyw)Z=2 z4WvG(%J>@4L8JqHJYYmcrL{s>A=)$SBcE<}Q&~5LnlTc!LI6$#ZF`_DHnz`MSvdzY zf_VKN=JCIK5W=&^pP+H(IhuxP=`c%md1KcWR$59KU1*KyM>ExRhhq#{klM~+p9UM@)pv@B*%1P&qN#tpb zuGX)1okaY+bC=)tXLVU$I70IzKQdzxIR1^dDqFhgt__F^=U_tT%U~7Lhhwh3uP`UO zm8onTnaa8=`vORt8}#)!n2wkGgL2ONMbrFv+zl0aX^txrE$&SdAUYZ}EhIViilS8? zNFVPmd(EYC2_i`d(R5aQopPs7_e0J-=AlN>a3U znrJ*YlTMmbMmHKsVmaEG(_dNl$QmocE<8NEmQdpWS{R4XnIh%ts!6nuQ?o-7 z*Sst5X^!)=8>vJ}jU0qTLb``|RO>&6U+_;c_f@u#=417rHwdA<>3CH8THGZyq zwFxb6v8i-C(fG%Y)w31k?$eF!79Z~<1}$!CDe(SmsKKGtM7{s<&eNM)OHMW?SLm+) z{pCJ84)WgpkH2L9ZtgsN_5^-|pV9wM_<{iW9djm`zgH+Ie17@8%fnq4t<$Slu#ihW z{N&*5xP4#lfB%IGkeB_Y@?-sGt}OnU!LRmH(~!TiO2J$3ds^RoX3+RpdsZ%A;2?Jw zJB-_9m3@VCqv;KMQh(_a(sG+OVwVppEB*YmH=bzzSQGr%sy8k5U4gE9nkTH3&>`|Y zCC#>S_@6myaBjMnRF`Y8qb544CGC`*{zrB%C%M}dwG)oU&o26f&SJoXJadGR%J*rJc zTyKW8epDKX2s9eY!h-dSG;M|%G8I^z*`awUoVN^7j^yS4n73W4z@+A$y>5^l%aNp> zNnKh|1}%xr+7rE56Xht$JuT!%M#F%}ccyI%=X13tre>lPzsp7lj`7MSsc8T1Wh-5@ zkEhDy1Xn#6CTHt@-X-cSCLVQx!*pwV>p77(1RLK!dGlrBch4GG+-(12G6i+3=*{nU zZx!jbckGfwdO3N}x{K zPY$HBE!$(g{grccBPhKe8Ao>2MQQ8Jd6BAB6TK^mq=_HqSZLWQBtXkJC8VdPho@#l zrDs z4cnYj#Qk&Lmi64L&M}YpFz*r*%OtLo=Jma{qTIwVxa#aKH9p%4&`s~!nO$OIWo4D! zTN(1be^z0=JgU#RG1y)B3EyU;FXnXJOs&)f{eK*%F3T|Thgua`OQ>Qjx7{*-vvm9d z9BZ`NIT3UQh0pX6x?6E#A1fVNm(lr3u^haamR-7(1DCgUw%GLa2q5UTmMs43D#ezrsQ`c3ixi^th74p{!!of$CvG$nkQ;5SFWDeXzw+V>KPa$^{2l^VGWEX z;c<(obhO%0-reYr{xW$7P80|uf=h^zRJL(5IPpgEak)CGygV8jA)jvm8YP|3 z1+j^G;X5YcK7*6>XQ$EgQp;h@R9tK2=oAn%ijzzSgT+R-I%E3`yDw`h1TGIyJbDy( zUQhigA|YsbwJ~#by!2ykbyoLjsuofOMaX03S7prykBXDsa`qkw~N%;6nd};p4@=a}LR@8PqROo*9 zGe_*#uZhcx%v*;$D^!Zz_zd3LuNRsvG1#pNbZi8B$l`da$41=2k9-{bUYEIjCN!4R z*?oR_?{#J8JB|19RrS?&nLX7$3BOmXUfn8G9>`H?c7b$JM+w}`?-ON`aEb7OvI9AW zva){B9jiX!EjI4Tx=t@9B{7qmS5P!-y&=BJ`IxfrF=K|zts?Y=~0`&OaRWF^OgePb}PXxFAg zk{a8qj~)JZZ#Zb+!8mofoBb=#!o)PI)j;2~O^>xFPoK+fP3VJ?mH2Jmcns`|c0JaR zPoIQM)c6e?cV?UPSh2{?{YqLBnV2BzEtgmIi`Io48vK7TLuz`e&l>&W#S02fMlv=O zXFfRUEmU_;aXFmv6xrG9^2%uk;NzSTgA9{Cjk@_cMV#_e)QL@>Lp z8^MW5bc<8cKTMQP_cyU}D-K3BaG2x?3Ij(Dw#wgrroX$p%0qkOssV3&&{nbwx7+fS z%O!4K-Qt%kyXxvbm?$Z|(I*H@NllgeRUdCMly$f^MX6!GQ8GcRk-|&|mo+-3=7>eq z^|8m}n`Q-Xgi?fBm7PBisxlT9p=WepF~=+FFkc*x%Zk29iI)3yiW&ziXWqLI>sp@4ydqZ*R>5=2zT{N}@ zHR;mw@<~W)bYZQ+CFjY%+E$i7d>090ielBiwr|H%da=o}6PYvDm5a&j@8|c`qV~!g zTEC|mcSS7oN}eHQ`p)9fM{eFhw$XHaA#%R@%$7F_u%B#WQ{ztgVifmn{EqIC@JH%# ze>#b`W>y>D6r1bGi6`p( z^UuwVg#F*{SKSKs(c8`v(d&)~Oi&*#aH&-&-zR-y<#!o>L5*a*_{O8;o)>#JO!M>d z?mfqN4b5#1>XIfTc04BV&QB=bwp)YJ3cg~k0Vmp=!3o!Pl>gNQRf<9GU4$C*9W^qS zMR(ll{DoibwUjiLU30>TW~3@C?sV!WffS8elZtxm3rt6BZVitC-W|=Y zo8vG>C1QCvg~i2vu-#v+4D?L0xTvMP8x|UU-pdLR5I@XmI3)9OafvPBxy%Q-HG$c7 zi$sl&-Zj%uWw^NqdrDrTs97yMl|#^)59a#>nUAR&-5sqHpW<}1&VLt2E!ps%M+Ob( z8yM)YZi(hNTCq8|aZAUQ`E0cAM3v2UiCk#J1C^$N>{Z#>zf!Wuwfnm0?%goif3u@d z7Q-XC#wIFem(k#;v743k37ze4{v4v(t)E0k4RRl&pW!{iras4vhcm@UeuE3oT@hF$ zwi+CUt~(?B*0suJ(WdH zz<=z_${JAFdS2f=V3_qZnIY-yQxB?0E(-eRCHC8uPzF5A2z(C85u@dLq%cT zw;jLqZYkBA@AC=V2zs*OcOk`@oA4aw=g_WAO@VL!JRsTQYE^~p&vbKT1)453y|pxT zo3+8)5Z%+yzARvW>0OFqcjLZ3y7Sv`(;@UFT2v#=gCr{}n|sup7>{V4qfF0WJH&fb zk)j&9%2>R3*t@{^%(x@Z?ezSR7C-meCqN5y6(m1qTeKyDNPZBY-GEzYXlTNQ{=wLW ze|IEVs&oQbQXaP(*)87)uDF*yUkVkGOEuK9I9YccZ+OEV8Xg`HmMoBKZIegC!s6S} zA*o@1aQ;j|`RbL{jt;#j85zby^_gz&!I6B6KL^*mKeD>6vyM|!u=@G5Jaj8NAqGgi zI#v*O!`2u^nS>4%6_u5ZjU`&LvUBGb^ZM!WJ9#RDb+x zl@Gv~#6?ZBJQ%;)!N9=K($UeN$GXM2L`WDXxX}zX|7)?%m=a)#=Wg~~8X}_DxTMc& z65@Rva6SJbx?elD2zW}lAa`g@w)lSZ@=V@iKjbl5S_)@W4X1h1{fkbq@+Kg(HYhtBeIq@3&_`%utUE_NH(zDkNWANYMdUGPTS<+NQQ3+6D%y!{6)X5qk+MB-=xUsoC^Om2YZ`8 z9OhI3Nw9kP-GfyZH~;OfpOZZVesKP&bRZ0hpzZzmM_Ka^B?EuxyVo_FBlXQuiVu}z zPB7h`|ILpNU1rN|-%7Qop%buK-oSKouUmniB@jBvs;Qj#bOAoP9m0E9Hrt`S} zImk4oC;KIR(QwIIaVOCm$~j|ytGT#1wc!2Bg|8BowHKPGxEHMII(VJ!s=vMV&~!UP zXUn~sx;VU+c2uw5CU8P=^_mKJ_*M0I!B**2kY*S zMb-A@zKjnX+1Jt6_xzWgA3UHAiHoai&%3GNC4%seNNuc2H2=xMr(`K91S(aQG%(;B z(?+s4)%l8(Br|m2VKcc@9Y<00rTUD*NF)s!z?xhfD<;*Kz@_17eC)ZM8qMr<2P>BX zvNwlla){8*qSzhumHrerP}Q$*8jkTx&_C23m8D2dIx=b{3J|6IekPPSIhcVOW8D##=HAYjeKtkMC`~t>5NB7&q5Y z`N}=u%|EjcM6^5*c(?I(SX5L10Hdt&JdfP~GzHE-f8qoR0!s)vw22~fu5D6J1Q0x~ zJv}1ewxVzsi4JWf=*uOh^24QOVIiCdTTrReMuCj^<}Zwt?n-)Tw0C3e6i3{;AiZ(JHlrqmb5X?$v4J3~{8U^f64 zTb%#X6?|!n==39tANif9fv%<&=j$&qd77G;rB)Mv_!0j0f#Hxm+Ts}pTv3zOiVBOIJk%&+283{!z~3Ab$R{Qt1No^Ty>+A=`{Lc>^Chye>UaNcd=Vw)4*6Y> zX+W1bN_YN$SX=5wX@bjvQv%sJ=-Iivg?UpLxq_#sRsTmTS^qDnX}>5x-cfD&^T&&E z?X`T3w5^)D72uR+eIUn!<=#HZa;Lb!Te6MkA*C=+$i#M6d{fIOk(ifGbK7AAt^lJt zo!g5XL%Gsk_|s-M5=((Ihf+#CN4QkTdZZm0-I zQS0tOcGO$lJRclX>CL`)SXZa1{X@4~8?Q5=A^aBQ;;{JxLPF8;{wQBIWG)j^VXYOs z1K5Zhb1TdM8}Iyf_AJiTmkSKq3to*1*=p}4l&}2TE7bn+w#NoIUk4C%x$IZ$@$eiX zoBPtMj^Uy=k|#=17S^`+AsqrWF8~HVo@UzKgtj)}5i%i7Ir)bP;b>QC!knY{t?vg` zf0Js2Tnj&@cWE@W_FPV^R^QB-^@TDH0z;wEiC~vTrC-}W_CUbq^1ye|emY#}-kn_Z zwPsK)4grW`;CXxAuvCVrErL$k%IbC|*ZMtkGJHCxh_s1c<}5oPlAR0iy)1h1Bq>RU z+j)yFlXLanqetk4e>#bJzIG~?mX+VZBJA83@E)}OiGTlN%WbERcPLOn=G0+Mv04(p zWRBEz;3psiUT0LlHR-yyHXdRY|0>^Z$87yh{~@irHBmIU(;Clvw*!!=so-AD>q~%`MSdk_l^#cti{u95tX5UXIG>^Lm2vJ@Q7- zA((fT&NzrmHgu25O~aX9elYhspn>85RWHe^Yp$W~dauJSw~tsa{+EH*zpAW>0Y}2q zq_tUAjaCd%M*=ADmtZ>6;%8geB!PUiF3h(biWSPa9*H~>#aOEU-5*=FvvO4&m#OZP z+nCsk7t@DJsE}=HBtSCMLugy2UjQ@`#+Lv=7F}24anuaL*tfYTN8iEA)dtD&Th3Ui z6pV4IIL@4H@pt8^5jjE0gtAYtNDkSYG0Aj6X@~;6m}k<7M<(Rj1Vvc^bQZaE14Y5y z<-hmGXE}e@J1F5PE`YgheR25`14T}=5&!S6)Bil6reXsU1lT6|HbAJe*UFpRi-ZQx zj_ntIc7rMThQpvc!Mng}iv2b|!_TlUh6(`aG&h9wKyUiCKJNSznk;U)5(dG&T!+v< ze5|m|-~odD?krEyKFO|rTb9Th#chWAxQrR(V6f)q)%jgcae$|`q|#po(D0K(#;lSq zJQdsqON+dEM(;Rj<1kJ0QJ@_-$RsZI`g0sZY6nc2{@8v z|KvOH@eOP)G!a8?`sEJ=OP6E2REM6rMH}8L8>5sItU!tWxx9phMMFk-@RcGW5GY4| zG^$o+d!jrZ8J2t%7%?J(Mi0Gg`f6pBx1b&^Nv$#PLtnSt{_Qhqn&(e7y>{5SKQ=Vz z0Y`h))FD{}{G*4Kw7@bxGcACZFE_H($c*YC8a?c79gV1NQ0zwOSg!02?~OgfOJc@G)PN#cXtYB+-vRq zo_+TI?{j|d`=0aRd=j6ypF8Fp;~Lj>%`s(uh(_5_>nO}`u&U;J9>DE<^{%X>

->&4YA^b~jr+O)@`4?Cp`k93efD7#6~* zIH1%-M$Y&IMs-m8O>GZ(vRntI*qz_pVtthxp57K!*l#ZPxYA_EeI&OuFO;@U_afpV z8WOx~;j%KwH#Xb06F%25>}&}_90~;pJ*uiP4}eDvSPh6*FAs{|A?ySUOynOJ`$CM0 zZ-I1E)E@Xv4!K$p#td-CUO^ti2p1)a7ES=^^L|jLdO-vzO45wWYz4=|1DUH`-qVS4 zxi7?jM$*?G_b-Kq>pnR=S>@2G#xS~|qcT;>7ws~QjN!Kz*bNA%mWE$P;>*Cy)WJz5 zlM+K@Ixx-{6ut3~GOF9diU@Iu5`O?~p#zUfWun$L5h$`?imAjG1V0t(9PE*Dpf9>z zyTs(&V{gI6aBILSQ@!FkSGJPjW#_iP?6bej`t~q6%`_8tsZkXm2AU4<7)~AB4kj52 zgCk}9jAX7~u#)HnB?it{#BWG@S3JI_!j9{Ye;@2YP|NiiwK-Kf7(f3#HfRIE3851K z#YMx=u08djnU^?Y-8`4CM#D6!-#a{58w(Zw4yAB%vcB?r5h{n_VHt})eBS`6Wn|+(;83zz+J%7BXTHd3Fv=llj9I8K_=dD?HiXIge zHIShmFveAlRLk&%#HQ=fO?3-)hkwC$rl~m}#R02~JOrf75UC+mRbN921Lm_=kisu- zCpUV_!{kNVrLx)Ecqo%lgMhPs`oBRMT2j)V2z0ym-WTQ!@)GHtzyA?6ru+Z(2V5?% zRo4jMUI_tE{{9Xmq`B#3Dx7D^2=c!;2V~rSXG$x<-!l;^6193ju)Ev)*k}(JA+_JX zbO##eqHOilZwJ6^wJ=Q2?-B61x!=yN&JmG_+_~K|-tjSKfeOdp$rFzA`XT&G)A|0< z-UG%{)SLIG65$uGZvKC8E8&SgiDFd#h7+sO)o9GA$1Y+g^=O&pyvVJK5L%M7F{EQ# z)Q|D;XF+U%6ODOnL=TbWA_msivY9^9PPi$k-xmGcEh}_O%@3dy!cZXvBsFa}yG6G` zX}(6=dwSvB2P-7`wOe?AekoOJvBTTECdhee#LRWE@=y zw76()_v=jj$IGvnG>T#*WW7%r|J&o@sO8&MEyYziMK(1(m{LKq@gt2uq_U$&(DDZJ?5s$*`2rQ%<$f1zW=#`6EVWuNW63CMMwMs9Mnye{vUruf9kbJz% z3ro-shlHSU9BO21m{+xy)7OiTIXY0r$Qw_N@I0gF${^ErZOW!2r`Q;+cA+%t)`m4ad>G`r6fBMRPYj#>$Z}P= z&n)D{MdpZbid5O`se!xw2Ua$A1BVs8BY<|txjxP(yZmYfH_P|X^jEf-D_kO4wQXWM2jQM`$qOlB zK{Tbop9yY*wb(H+F>akmy?D_IQcxr?9CG%ny@CBic{}3!%x=OKLU=70Pv!BuHFr|f zBLh{}4l}A5qULRSIqs+EolpZ3Ew&gX2AMJAj&nJOWz!!tGVv0PQIk%%q%9!k$dzmh zMFT~v?y%LfSE50$`XfF--w)aqB%UNY69N9xA6l?twYBMP2BntMd^7|||5Ld2Fr$or zrO{Hi${NT`=OLs;n358UY~=yP^K`Ao43#QUf{xEmJKFoa*c#8+He%a3E&D$f6reFE z(KS}_Ij%Ag2zGp3`;|nH&Z0RRwDju4W6X^vm?SuvF0NumqJ)8^)vcdAD`B*KsWKBXvKaDjo_-kC} zU!=#6*c{i&UiTzm0)}b9SZ^(`qUkS}mThRbpP`t90Z`U(Y$6rWZD}lA!-1`Bl>tca ze-#Zf>n+16)qR8~cf9qfvb zj4oWR?zPHlKN&30VmhJ_>IOjaRlUN4pBCHvzQ$ul;Cy>c73&Cv&l8sFw`JvW$E(5e zNGjjf7#}?X48OvN;|UMQXdu63$mgf3oUVKKJf02muv96ep@2>P;?9R!he+3jvyjHLcmL_W>t z-uThN$FJ{qoNk*xi)a%4t339`=!Qltn!20c)yM_q;|g|E;P&hqf%1cpAbAw(5g>@j z_pzBzlzs*y!VWYh(ra6eLd^y91W4pd_P~dMfZJEu@fTQRnyxCrd;^4Z(@@Ko+bO7h z5M`EFUUek)t=bLbzlt9ubI^fXU)XrwUomVg+SmZE$MF^4Dk>t%D{vTn6q-OMqk6&! zf;f&Gz*oc=G+g8NQIA#byAv)=2vtig;8SZ5Wn8c+TVpjV96mA*Tk4tVcR^5DfnM8) z(E2h3Ak>^yYX2${<`=B|KEgWZ)a+|3__tEn@FXP#m6et4)<^EZ6GW7EH0-dZbVnkY z#JbL^f`Ro+?PXi`;HPtQ^Sn^~c99OJKPl{y8_ZS;ff{d@rd2jEU2UEPAp(IL0x9(_ z0Yzi~R~#)PARuPOlau(HEQ6@Jcx#@c3jMPD4GD%+{Yj11%{ubj|cb)SkS?pPdaqIoLuX5AKyPxLuN0 z3d%Y}%4Bl;UOZ=$%XxP1zy%#q&VYJyJ2Vhfgn+MBX8stITl#^YnD#(*;f{^gg4JAE$!DjS_wif3$1xc&eyjuRf( zxOVv~{G>hxz(jzgqu(Oi0W#i$hYz7WyD4GUk#oF`bk4dx+a)Oj_iZvCM%BLf_E#a((d6~mb#3taKUxRNkl+w zyLH;V4e+E_s3p8&0uTz6!#}_i(Ch14|M0(m16719{u=|s8~(@tM#KLNdJz^Gk3r>8 zEBIJe1)a#L@c#MQ-{2UIw@04|4CDtpR* z5J1wpUcFF8hODzDumq>1R&UOF3Hc65pThVuT)twJ>w37}$#Iaa*r$Q|cu7%E5{3cSXKMKUno%ypJ>p|9{7Kk5+`JBYIYppp*5z8rU|rdVEHzTP|oq|c@wj-gB_Rum4Q zgHOi@U6nUo-34?Kt|trFh7jxnL;@r)(tC=aq>_)6Mr03L*^C@>ieAJYxP3!Nk%g~Q z_>AgE!iB=It(kFHo-0S?qt>hO`4t_BnfP<5gu)L!*-CmqOk`v!A1!1bxq||{kk7C% z+g%3TtJQR|LBL|QY9|FxT{A@BcrwEN3?;|TZpzW8{iQYFK6$;r<{ZcC=>vI*)w;=1 zz~ydddY0qxQL)MHLrB^ny@mU948gLKP?>an05h=4;t!Da2-ZuJm4IhEJKm52X;I+z zo!M62Am_{b!6~joeV!vX--&5`T^q{evdm-djH`PWrUc+1wsc#6~5!ZsRd>^U|OLZZoU8->bm8m?$NtQK$gjLcVPG|1yypjC3L|W1laf1w&z5myww%+kP9X+-hcPx?V=92|ECL{uHAc!U>kIekyULQRUHimYVhj_++;wyS0H zTy8K^EfPvcBxP==?t~-2%@z9$QYv_GAXQVKo|A)%6S9?$z#iZQRX}-&@|SmHsn*i0 z&tKtWPD>QF`MO>9BilZda!}TRQSG`C5Q%_$zJbZ@B4*(eKH$<&kUw4y70bU;&YhNl zrUd}~(4+Fx!K|7=I+)-5^Kcv#!9T%v5q}=46Q#%63%Ja2Teu0N1~F68TN9H2khT%E zJOf-BvoMId#_RS9JK9*;;bx^>v3>!IrcC0o*nfb*h!p~ky9=@cpxbAXvb27=n5TP{ zNaGWbRe*qPUjJ@Tl>D^hCzNfWP@==^vW7@uZ|bC9dj3Nmu(KRjmUd9&dKoCKf5DP9 z{$q;zN6k84(|c66rab+D0*0J^r@ShO&wVOPovT_NyZ{X#osgscpR`1;aGKh_QayoMp` z{cG92(csJD85y-XUBiT{W3!l}GdIuSg^BYwxuh zbKd`Z=q*ztT-<*JhJf$hERIQoKM|QQHI=1CVCTB10Sv3HU(4T_ZT61_2ZEdV~2?)f5$^>BQgff7)#ZZY+ZWTrou!}u#-5+|Gutj&_1S=0fSuo zoH$H(;w*4A0p3Xm*U3QEY}ZgcDkAqCzdS)({jY#ku{jWyI0se$&5jgIFd)-9r4W}2 z<*hwHj9uqw;B7rR;x+CozQzECkIb!C;ed%FqNCr}e|Q0eZr?i7%4jM@V6xbSHuQc@ z(e9Dq;T_YD7vyh5Ir?$DpoW;jzCjEJPgOXWxhDC`o=T?oC(-eNW4t%_rCBk|v6SbH zm2S!fe{f&l-lIoEqW=VwlT2ax2VN_dNPgL$z&s5MaI)?SW1!p+zvHR{evtYPKVh_! zJT9OGBo4pBX0+ynR5YjHn2?dDS@k?J zaB-x{fVZ6<<%oPI)w=#7`qo5cBPhKH>Op4W=zMxTlF}g{%mQ=*MW9ow{LWw>-g1Uq z9;d`{4lO-zO#kvlxzi$F7C4>_TfVV@vm_1}yMGO(rH?52d^NO)AXLjpNs%)SS?QO{ z$|^#2_U*r-U<4xke2q7gEsM|tR&e)TFhnk)JIuU?qhJHfE>E0_Ka>jMW!nY|98%xb9Eh4Q=#Ai+oc({no3JHGJ#~l6mT0) zi{Pi8a*~?~;%kIt(?k`^ZiYKZR=n6TBsfy6PX$ZP=k~KAaXhgFK~uO?hVCptUxn%3 zaA7xX;6wx=Sk)5^40RTeCjW&u4N9iNSQ(6ASsVJ(^)cfSl~Da!2tVR+1aA_or5PK@ zl#gV?Gtz-5#7&JNH(7NTn1P(pay&2s$ysQZec)Km?@czjF=RE_2t$?_cv)Dt{X^%O z0W3N(9t2YZO#P4JytK0KL{DtG^%CmP=2sHz52zk{&3qur00Ur1Y&JR^WJX`^Uy-Gu zS-Le}K#ro6K;?%*(r7Q(XJmBryW>8)BFvrn@pjwW)wbA0X51dS2fZF3^FkJl6zC{8 zo?!$05uo<4=R2bAWQkI41ye1Gg^N<&^r_bAZcv)=+(A>ihwa!BwwLtm9`-v?Emosh z25?opkAg}+X!+tth2*9Aw7Y+p67WAn%-Wy>05!XO)k^91cGztP`^q!%$}N3?nx(F&fuE}6I*?XUGB9=T{R7ea{_i(x9J_gYw$CpTZGZPH zzR+;oJ{q3gcd>yqh@W6D5dC|LY~VOY%uV#JcAloJU8fkxog=lnyj7k&KN&@8`cjO> zx4g*(T;8QS5ADu0I;JaZ`|v^gp(3}zr8f10@GCzl2tJyUa0C~8rftnOCB;!P}aF@-k_wo|cz z$+mu_#Yd*y{_z{w&5;o_1qSy%8t`( zT8d-#SGk$#K{#RYN`MCYtRCM8{h6eJNA-<`wr_+vXI@$_% zghdrrf!(a1s%o_USLJNAlRk_FD~ZB$KWYK&JT2^$C<)A*++4Wbs%N9*SXXv|Z#}3- zOV|I9B=Ye0@!<_L4aHK`&|tT2dQBr0-`w{3e10;SzJ-$%FUtKAS2$ik;;(63x5G^u z!mh;UT(lD1S{P}s#)shC=-kxH9z4FhjXXY$e;!|U&cJuKK^mc}1;M~TNgmsK#NyD- z05vq>&!u{5xPj}X8!hQd=$&0%bom)STE+sua7{p0pxNIQ+8+|aoBQF7m60v++A{}^ zMFIi=nP+n=yBv0_D?#Id`JO%mbtI%4yxNP3JE#-I(T*uS0=;P))5IjOlA|KM2aw$gfAz_7Vy;{n+2-`(Lf`+-TnYux^C^-r+RPhz@7jtI^C`>+dEhg$Bh6B zLcX2)jEs-X4JO}Sh3&)a=Ewv0Kj()>!vY<{5c%ln=mOjAd-$-l7kGMK%2)h4omJv5 z@1O_q9Lzt*Ptpy)={J#VMk~x-^u`JQ>iy$f^{*=vNlZrFsQXrFut2owDsY{f8r(ic z!`Kq6);12w+WD3@4>6m*Dr7fL4MhQ}RTyuK?^&u10+=EZmZ&>lS_3d5n0I7c*TSGZ z>6IB9D;wMF!6_s0r>H3XnT3VF-n<}1q0clS&KNYEislJ?+r8QT^XEMde$+^HpE<)5 ziiydrdCjiyZVI;wYu=l=${ls$+dr6tNgn&}1>T{geITDXn5-Q8;bUyigii+v{~NS@ z!#XkRHfiO&QKaW%-C|2F+Pkwu=&BO~PpWVI@csLzI0Gynii4#ZET_fuhv(-sLx!}v zcuK7}Pz((X0lDew>4}=<8-n~sa)X0|1L9CdPR@fLbrgYz8V>scY;0`xQ&X@6+O1lh zGpR&{BXn~-*efQ61n5yZ=ovxJOyB$`li)`~X;Pie<_gk)=k&yq&!k$KM{hUB?AP0E zt#p)->Duk=>+T;>opARfMXIN?xcDss0s>;#7pK5}*hATBLPQA~)BW6Xm&lw|jwc9O zy1T!Fi!6jxg{xXzT*M$f3GE@VxOCsY+m_=I zG`{CH+AS`%sQw>+|8eCo5m4I`cT4irvg`Bv$=Z%w|0)4&z7p5)R@n%coc9>{{rKhC zq?~LCWo-+}8pp=&K%RT^W?z2b$Qo9%k?BKk%K&=>`kSUj~ zG%`BIYIyQzaC`-h)b@|OfozS~*jdMuGP%q_;}Sk8X|Cv)7&b#yoFU~XI>;HX*T+)f zD{|_}GV(UJUE;CCO-$%PL{xOl^HtAXGHHJJ5W-NhY|kHge!eMmOic67Men-1qF7Tu zx+{LNNf#GKzH+Yc$DJNNN^`B6o4{^E@STM@GyVCxoYT;*&i>j%ptMpTTHNPi^7hfr z)4PmYg*8|ER)!|JB4yp1F}&{VKy&?t>%o7AmKOD_7XNJnB)nJ^minOsP2 z`uAPeo2M*V$u8-P6%1K}=;a#sBO-EL9=1A5P-cp?F`p)c{vo`!LeY{cR$oVot|V%s zkA?^gGJla+jvw2Q`jzzKTQ(3}_h8gs_K5kXB`V9p{24Q&^04x1Pj3~>@TKe7T zl4@s%D79L9w+#)`F05CGA@0#_2?x(B>|i=rU6P6e$Cy22B^DkN3G1YV3tc;gJ7caM z4vRKAj~(`+X~t!TLkn$@wk|Z7U*R93gOjV1-;op%vD==lOC|DoZER?QZ%|T17dP-< zzS_d9jhR|9Y_Q~hg0Y?=OUw`BzLzw6f$Qd8efu`%vUBO>de3O}rfZ&$ z@4oBF|HJoB`CW&5XH>gb)NN0*Okg`i7;@4kvNu??;d3gR;+ z0btSpO;=F!Ra^xwVE|4Ko&>6^7Uf7RG|k%=kOS|8sDL+gCi`x^duztz^4B>DLb~!p^iV|31F| z@^GVn5Z(X6kNto9mB#6}aLRuhudc(RKFu~ciAB4O31%O76wXWQ^Ig+kr0_R-b{YyI6Qn$wBMKiRp;JKcGf?C?~ zVHSz(!Q^2v==njE_rAbz9{Mmzo><*HDNB*~i`KA5qhWe<=kvMk!3hZ<7bxkcCB1Rx zD!8prUPFv3C@s|kOcWzze;u9Bvx~~$uw_#q*tPS@x9J-`faDuKRk4hCE!7P_n-f`A zH%9@TTy8$Co=5~f2{w8{f|?eDwz(^4vtsx=YBJ|lAx^WamGpIj>XFKaY}k~$yGGi5 z6)7wL_rWjj)m!i^Do0hqQcFnKqb{7=&`s*%;}iV+P~u=Sdn0(Kbsg_1yHPzmoa#=_ ze7)bcOzIT6Ju+cWvqB~?>b&)imm-lp3?z1CYf+4(Bm)z*0%X9Bw#U zmV;^sgX#`eTJ7~Iv?P2P*h@=Ga00h3DaL+Pcq3uSt$ao@;lYEHm5IL3nIE6dFRc6p zQA|=fhZ@<`>B_-wed1c=9rednqHtf3PAxW~cQuq*fTiMsiQ8I6R$63neC_%TJ~od7 zMu>5DDM`96SdUgH#*cj-Y417!Z7qJy~KSW5NiRN}&JkNldu#Li$@tSYA`&0rk)Dbv2N zn_q4fcCTxjQ?2j`p4YW_C*@@p?UWKnTl^d6D8Kr)#h=Q!`F(DcN>!Uj!62I;@`j!C z_WBwOpbJ@eHh06x>mkmBp*ly$%Cj>4g(k-pVY|DQgRhWwq@bI_5b8@!VL^GtcWSdF z%8FudTEX$UQ<2cU#5e!5n<(6(8@_cdK8*KZOShXoMikhF(uoa#T5pr{_$^Nzs1aD>z39TxfXal|qY@NbxeQl}uH;CWF^)Z8`Aek128XXK({wE;GMOcaYA4De#{Q zFJxR1Vv71Ne+dcUlBk=0=IqRyYOo0P4pNyBfBF=qH#4{K1n>je8kxUHs&Y_GE>qd| zL?c`ywj=ii|H+#rH&&M#N_{BOo=Lr^R~6M`XYQ7{h(Z!A8J>i#@i8BQE!In>cmI5x zU?ktw9u7M@;ZHGr`R+1$9T2iyh9U#pCf2sL&W69BYeIOqG#4#ua0QPF^*wqLZwOBt z<`%uiQspEbsE%c8JSw)|Y*DqDHL*mp2leFC)kD3s@+IRIs zouTLf{M55D3ra4?AodOnd?Cd}niZif*Y+>#As|A`AYmq$LGLLHD&Y{=j?4p^kYi~$ zZW=)g4#h>N&FoeeBtD0FAcc8CR7|8aloi2)d)CWGldg0Oj3oea^>hqOyy`!`e{Jag zow!83%m4HF{%&U($?q`?kDwQ+$r{T?<|mfef#Z%_g(l-#fE->Dz-`3nl#;fwwWnd^ z|2#&yMGV^m%+G3hm$wGc378K3S@+fd=L@Dl$$IV`t7$^0us0@L{jU`N&y%|-1EW|o z$lwK~-(SPzClNj)fN~xWxhBY*O&omAN9?~{>{DzSUFfb~j2FJHe}sXomXMqNi=1M! z!gc$aEP5SO*ZO}NH&xA9@7pv*3t%NXA zWWb#n7;eF#B$0;g1X@Dl1$xZpeL;-(ONG_ET2vjoFPB+?O^BZIdX=eC_L#eIdnfT< zTtk95;A6~n*W|wFUdWId!^|-Ce1VOC#MrxTN9S_5BAOi~EzfD8nRr=Km11pIVDxK5 z7sealA{I=zJ;DDHr?bIS@`nU~{#h*Jg}A{(4xx)+Y@_P>;^EeCXug%>EZNriQ>4(c z!;-M)>g`Lvvi*H%8*`^FDEYfW$MAaTUNbF~j*(F#i~}K!GbW|o3^nhWm*>5!hp!D5 zt&j9rwI6AC!=Np^YU>#pAOG(D`wgfX$DIZNiWQwbYgH)PC8tNl-=y z!-t5O!Ca%a`T*_g}n&@YbGkbO}LVu?ntZ){09ZYsM21oDWkzn{LlvU?x|Jo)tT*M ztMdf2q*-{cXzy=a9ar}g0emp8-k&u7CrB%LfoFn9Uw*LLEgfuPG+UH94FL*SKyn`* z&ArR~)M@jXx)Rghk3mBAg*f#Q;k*76QYMtISSi)G!mu#wXI zU^ScI4ZiOS>mkG#E0ZBV@+b-l3!!CCR2M(ihlH4BmQ0N*nh;W_y)Iu0;(?smpXK4+ z#yg@EckTq8>^3=j2-)7&7s~y@@hF44yE{}MP*P_<@5Td~R6QLNllthS8#@;LcjL8W zt=pI!F04Q8?cGT*gg2s;67jAc1gPzQ-hCusTFK_o^rzNNj<}&AmMaz|HRlFa%bN3$ z9<;I>*tMpmW{lm&rleh1vPkfekmha)BW^-ZC;|73Zcn}bd*8|WXsg&LjIHU9qGG^C0VNc(JVpgO5?i&{*&_yg*Ia~a~gA@-BTG>I{bP@T+L7z{>b{Ht=2-xKDWm@*pIq{>2GRd>o=BC!QKEuFglm|Q_ae+SSiIJmkPzP z`M`qb?+*Kvq}*;^+}dQ~-Ji27u2>Va^$iT#<49hj)l#> zCx0o#S$_`A!Nc=Y#(5#q?AlR_sxjxZ<2mda%5cNwR&Q4w#kA#wI9y6T7WUlc@Y6!$ zW)%F#L&}}*SD1Q~Eo@eO3|YK<(56ui-_L_jTFJ%!cscT`)w#ogM@zAfsF2S5S#TOu@m!isHC#$$KT+OYI4p|$^(t$| zIcC4G_2e1xq zacpp>`HPO9tj5Eo1h9Dq66#RHD2tq1J*qW0>s!X|c=O92n*coz89|_bhh_Hx{Vjm7 z<`ziTPW|73w>J3SL?m2;fWa}}g)M~Sd06w-_g#(p=TF9RLelPI)UcNd>af~Ctf26_ z-eRQ%@k!b7Y@}C>)4sPX^!9$MO~yj#Rzd0$5b#Cioy+`mH!3>1?bc+NK}YwEZ$^eG zPRoZX&!t`%Oo!tU=UmK*m3n%)A`x1>Kh`iEDpmDI<9N>K6 z%#!f%xM%iwd&z|2_ZZGV89yL)=ZNTrsT-{@hW-%O`3to=mU|XDJYcMi--vwCTtYT$ zK56R@(Kc&tddknsy>fnI;hDk<(yF81&t4C><985tFD&EqC1tW zhCLi?kLI|(+wMClcf~$_@PPBW{=KBK7%um_@UeIq39|e&^RYrI{pn4Ou%bzTDZ=+FdIIVuY9Vw?B zu(qESF&9wJ#qx##1MWWgY(vbs#0PAg8AKjJQ?Sp5Ua>lbwp$4>ToRZnM8VEOxjrpT z?+e~(_s3CxW01?qN(OJd9Y8Id!LP$fL6j4-bSG5-5M=faq)*J5HJKb zZA95$@t=YNsfUB>>MoQiipr+2Nl?q51(L2qp2vbkM2{qLl{s9`%;<#9%pXEKh_AIq z7BpdGzV~?+2y?5y=olIXdgX@^spR>|M;20}%uZIbY3H7Xj-p3>jEV1PrIKp?)evXY z*O$-bex1MQVw_1TasSrt?e=OR-J41X66Vmq14KdRm$Kme;ccB~R|z0K>GYP`qql^S zb|&0tbvjOIMoHwU#KHJN^UCW&JH()(oKTp?5vqi>9PRrO)RO(!16ef1G|?l%i3YL|F<-7+x0 zbRY3@QJ~w}f_4BbODd?w5b&m4Z3Wg<9?b-W8TF23)AuwCjs`%u4t|)Prmt%mS`&{c z%fU6+Tav;8EnXJaLaejq*5;l7cY-KFk}}fil|^`g6Bll#oX?Z&{sz#0|zMaJHU79Zq=Z{G|+eei7nx1pemVNZ1Dl9iBt z`^7O-&gA-^^Cur<$U4H678aK3t$dw&?>n7@)Q}U^*WvW#vj-8d_oA`xEz+zC!z}=~ zuAd!@tUPWP7;M<{a>b^jHzm<+l`Te|L%lqg&84AM@|sy$IqJ!@Cp2|uo9ZI7{jK~U zDvGe!e8|5BHFTHLj8U@E;b1YPM;Yhp!R_X>&2!aKYZ{~ogD2||WXnbSi~YOP&I(%~ z#Wa;ju7W(lc9e8M0jtqwlprW9&pLEKZ0}6`O^9;C#ZiAeL2RO&AYg7`lo5FnGMi(_ zQGL{NfhQ4DR8&?_)NDwzI9IJ$5i8x~l>*(Y<*W|>M{Npig$Lu^=2di+-JE%m>4aA& zhQG1$G_m#N>L@|4`JgTL3}zcV;3rQf(-{aNBNc7m%xu;8!LZS>H1utGBK_3xv64;h zR%do2%6I#odvMn?0#ki>;c>xPdbCID-hf^VaXhZj>A8Y8FnR87aOCID`ntL@N+25U zxb_8M6S?^m;)|rHJzO5>g&k&`Qj-do-P!M z@!HNZoQEZRQ|h3p=C>zDkIJE0(j}KBt^MK0L8EWJwX6+*_BMojs0&$@wY0ndkCE$( zxK}~<7g_xE+HuJjrYK54fy zF9#U70`ZJkZ09x%Jc8gfpyCzCg;N574*!^{h2yv=5>YGPY+q7?{bKRlyVYcVFxW9Y zbRK3K9syp^9K3XT;?1pEeV3LDKYjXygeYpvE4fC$1B14= z311bG*x2rEIB-JyiAGgr2WIGSHrxch(?do=9jRR9c?kJ{pX-Mk;U31Wm)%Ex5hiZJ zgR~t^;1`fgj2FRn^yd*9d~rcdv)17w<*TtG^I53|>XZr+T=4;TimA%d+z@+{2iI8r zT_gNu(i^`f7+C=JG3@NUMjZXcw44OjM+Ek0fG1B3Q(7K89=TsEfl<}T0mB1jlTeFl zXCn{F!Im!~yFpAq${j#3bL;&PGwZsL6g&c|H0))x(VogT{tdUX^q}B4x00S2>q2AJ ztf*(9r)O#$7{CYY4l~M09i+OIk#0ZO#QHhtz=3f0@eK`05g-9nM{-V1aQYw&aQb<_ zLfv)j2dUq5XP{;(F&)xb&FZ>{h}nC??{cf4igSA;%ICZ^wyl7r4k{0&wZ{$8jM(P`y2C|u%N#YwUE)w&*Uoy;wA7^Z< z#meU&NZ7FPd`gdu{0yJ@*$=8PJtLz;IMif;B^u6~gGtrw1w;>14a~=F{Xe5tKLJ2> z74UuU4e&h9U%(4tV6dq8P=Ni*4&*Vt`2-km68T$~Pnw(b^pXviiUVMOkwS|eU6?o55_v| z7DEScd>9fZua{!s;qC12&LPPF7{CikmrGA@kW}x_E<2SOi-M!l+3XixvL5AL*BOOk zb5qkzeFjCKNrIWQD!iH&ID;eMW_<07-r>Z$y=6XCo2U&tgM~HJ zcZVVop9}DSiXZ0d(K3x^QZkKy(y}DN?b6Jyf}r!>D_fH57kd%g(t>dhDwzQ@Y0zU;=gH>5h^$Dn!Heq;utA|L%9aNtrzywDmGNJ{11x;6P zsC+hkG7(b$C(Ol{)7o!goiAW~V<}E4@_lyDATE$N4gL?&TxD?2vs%MhG))pGC>{J( zbva%MZm4p`)6C}IKdah@+(ZZUQK8cm^ycf2KnJK{T~I{LQuhbci@><#4_t`BDER<6 z3xZQ~^TUfM+S~Lz{Je-?3BoK%%qYZ(IT$#QWA0x*zYAj=0ryc|q`2gFpDNw`_!9gW zNdR9kq@)Hov$mfDn#W&UBmiMtU&l17zONqZhAY3uhPipU4P(T%1I6pUz(czoi;$2K z0$JCHXi=!T3yWD>AqFr(Ttb1n0&AXO6A1*$v$Ge$f5h^K^${TkG8yqemf8(lj+b{M z6r9yX74ATr=SJ0{7EFK7i1+n_>$>2#2s^TsIw4O+aF zrPBLxDe!_4WhH}4Ec)L;4Z(&~2*1l{aNLDqqn9xK90+P)PU3KQg2O-T+1`Dq&F;J* zM_MiC7r$QDl#)hPw&)$1Oj9{d4N9BLWwqGhR&44M7ar%+TLPOqyG0mlM9;>Dm|#ZTXZ?DE;zE5NZ~-xl2iq$V za(!RS2LQ|KzCm1sS}|Equl;BHLq=L6_|Mi;;XmOoDD0D`C10(s4Ew%151m`k3G=HW zqujC_8CBnxv=pC675|7r4A5q$zBu3sd}T?59Nu**ubP$b$MgnD2xp|^#w1@^W8-8b za4$_zc7lYH1g4;BE@%8G!letRIe~xs#_X1FObk7}NlZ1j<85f&pubdY&>2@2te?)T zpsw7~>D+<%M210@!(28E4W__-4}@8z!(J-58)3K0p!!*TBjO3+4|(x!h~)EjYI5aQ zAycdDtA(=v_B$dnT0$^cl3r<80aXX>AIP58P9zo){li;><6r+Gquz^LZv8Vo^U~$huv$SlQRm! zL?XapvpuEUb^BhS&22jK944DwEldvEmxx9H_F>d(mmcL+O6e+&o-QbMb0e!kboW!X z^i8v1)ZikjEz#PL-a+rRfy_;rw%lhJ2onWm9FoQiuXoOS@>C4Y=FiP;!72xao-E1M z(r;-7IQy)j+P{D|)kF{ZlbR9L2doWexzE%~%o}z@+*bzjFaz36fDFSnSOkZd(K361 ztRohcuozq-%AtVS{OneX&kPxz<#Y&yyM$Tq^Bsj_8L&JRzS(>X^`Shtj9PLlIT}%p zTY6e!UoPj#7Y z3N)Zkf%((7J{x;Djxe9IYOUUEVyHU$`qIbUn`js-e?bsKeu&aJ4D$FM`ki(bItXqb zF5Zd8k#8cW55pP)>ctoE^1y)TuMpGlJ-}{ioD6{`B6v33DBcI1<`EJ7D?l;94FeUZ z2bf4}+OunQso*PW%Qy$xF6!Rz8@twy`p{1O@AS*QCPYYRc(6I@`m|#y1ok7lj`H6g2?pkygJoNI2+7YRdteKt*lRU6h)1f(x(DM% z%73F;P6vp{YsrhKzUBmIL;^G|kPgYit7=8g+%Q+@Gzp-97E@_ZT2-3*((m%uA zGjgN2mF_=FE&jr?aE~v>oC;?;1~0<-O7@dvb&L$Vv|F?nVM5*x3trN*rW3v3)}j#l zFfri<69C*F7X<%iQ_A0Cox6VrQ}hB0(lk74wy%!dp^3F^+N*q?cYxT`nJ6Ftz?l8eVU2?^^yfDCfa*z!9m=A~)vy&>!Z3|5cTZpLcM39h$OeGy?y3@>aw z5DeTT@jf8O`rob!je{`*2U15H!Yy6?RTrfH&6%+pbryJm$01ZX`yt@h@gvH#Z{I#? zE0j4%Pq|n?SX`2Fx4PEy?#v-w;xkrsY^)71&KeFB-^$D1Yk#*tybn%s*3zx28W;&n4VF2Nt=YsOTXFBdR#iTnfK_$%D&X%H@y%yxq?L3)U?*4>7fM zusmSn#YBpJ+o^a_;}Z&4^(zstz6DKe-Y3uLF~6z_NCxmy&1HfQCp>f6=ma?k5k;!` z9oYap)1}D}dxeA$px0`Kz(Mk{sM^YLy;YAVwGtjf&cKI~%81Ki4>(FG)q({La@es* zz>-)!Hxy)(WXSfNagW146=MjU&XIxZIafsQadHGXiNO>Xee3iXOWkRi-t|F zD(x*L6r~;k-(LN4A6rRNQxvL-)fhF1uezv3v6z4;~`Z}aUZ0b;$6+B zU86z&+tsK%J(rhLZ91y)d;Yv*2If^2K@N6YQF~F+?11>P)xP|Ba;03 z&YrNeJn!=~WfuA*32Z`T0`dqT`kTflpPeE;y#qn4v~&nz2RxduA5ri=?|BOoHD-4S zHBk@B?i0{|M{{ri4QZldE(6U36@Y%j%DoX)wZb=(k1;OB&v$^!L-fI1BuzU?HgYwn;XW1YZf8TDqje&&C@j78qNSOE_M)j5% z5y9>Vz`Qq%F}cDfg=q{Re8lj$ph=SsQVZ980DWw~IMl-esaXK-3Otw6-S*O-2=I7| zAo;1TSnK7f8_-X{3#y;75bM${-X{H`hXjrR6e zCFA(0g6luoG=k$iXR0_ho$0q74dqrI-I2L^Xd$qi1v>4fNb`uIER`}sR1X0kq&r*U zBXu9P5_KP~bTN=pOvcJ7VU2zLNJzo6FWeSweThBbfA8M7)$tr?hrgg~mk4{2Dhh=y ztGS#N^tgTMU`g4N<@^z)NxslW^+h2C|}j!cH^&hI^a~rm!Ajp@&C7q{FhbaBztH4{PlJXx4pI1 zR2Jr!5V<4O)S^IwkhdpY|kkGVuo z`U9@O@bxvOx^^A%)Yk}Bvb|gf4_#ic14&g-ihs*hi^uF*&ovwzxQnuq?!8LN3>lq? zxjy_Ixrjy^Cq+j`s*N;#u7F$siGMLlYTI=ZHP!Kt+90bQVb8;kgX}jbqSn@TfN?w~ zp{R)BlZ@jNw?H?UfnMWYY0<S&;g3eh z^4#^!?VFI9ib50f3SWND(Nrz-)=E=vx9pYaX-4dcWR1` z&)v(0h7EU>B}fh~&nUS3s*J7*<&yz4M8gQ-dwi0$5$tZ{Y*XBw@`s)2{Y7&I&WEH? z(eJ(N(QG{eHts$?*b$ERfRKD|Is@y<1h403J9{qPq_;a@W3?o8cSU5FK8UGihkY{c z{NBMUhqj5s=h60RCyUCIQ5xQ$RBlLmdg`O>;`|T2XH&0y8Lo=;u|iYjH;^2ZA&)+> ztI~$dwib|to|n&s{xksD2XiHvDc{}Q6$P<#iRQZd4+%f7XQ*b%F3AIh6FPcpcXwuq zpiURaLq_OS4Zq-Wr8(!ozuXr;#~z{9)=kd4wzKgDSlKFFTp@6{O=pWqCfFw(9tG^p zAkKkkrW8v{NtVmzA3Q(Px+2Zm+no_@K*+=3Ny{+u?y6LRz0SM0^FzyzhljtL=IffH ztA|Ij7NpfQ-L^_^2Gk!3fS*n0#yUo!ZX;o$&Ld?^^77Jv6g7SH={L$#8oR#cF5qEw zKEJbhW1_d~!rJbmjZi4aN*qx0NWjI_rE{y$q#NxyzyrF^9k6>gqL40Eap)9M4$Gui zpx(q;O- zVP+uJ573Zu>qLCxLP@6!mAy7j8nJo-oXXPnLLZI-x7YlP;ebxO_4r<)^`7IDoKi)7 zNWHprWN@ct`%smp%*u+ZCRNn^l}W>@G`b>2d3m!?8cebDI+Lv9;MK6pU#J;K6MDY2 zW!vuWjag%p6Vl^MZoAW;W0Sp)f5tM;LP>TQuw+~D+C$Jdw zR2lKc#wZ1Lg^CN8^C0#TQ&7s5Haxx=oQTrt`UYZS&gH-R!wv?5cnP4F{q;j!CjurR zTa$G0%qpc^Y2fBjI^eD(BbUx$#zYVI^X+c}3l+QqF~)APyres$-klcP^L6jl zE8;weY)kf?S@xU1>#@;2==>OAiRj*3bBEfN|L*y{-^Xo$&%i#im$2pXyFeHXX)S0+ zmSa)gvM^-%#5AmgQ!f2-EDje?`jAmvkEK;0dK5q5Uh34v{UZDCbKvh>98Pd;{egdu zQ(63MTRv3SS(svOfN!2mOi6qOCI!jg-Dzh8PZ~GL0q`_z-fw#(- zEw^c)90dt~J6P@_;tmfr0v6FQ_;0!QDH0$W^LSgRFj*SaE+W{B{M1+QfaFrNj=N+3 z*K7a+w!kFa+}MmOcWJdEM$aZ-1Y=m#BHZLoB1Wt_lMtQXFiNSf*?)a>eVP>=P4Q*B zuI=MA2E@DaNeYqHZALPIyMG1!^c^_`1o98?V|T%WGybuxAYJTlK+tHE07ECRzQ&8? zO&Y<;sQkm0fzSg+sVUR3 z-xO|!hA*-rCdw<=QtR%Mjx??|_F*~UZ{Ot6(+!^o4`d*!kq**h8E7+^tZx^_T~lWI zcXL3Sz<`RqcaCjxWhKm{kN?yC$KDgh7G!Ws9o@N|0nzT7smeP*@K1ON{^fJA_%xo? z$ss6Oj#86MU1X0^YXd}Yx5`Poo)y|o4Th#JpUMxZ-RTSf3KLP%JgqC^^ozMJrzJM! z@0RABl9ShccAdF%?g}8|`x{+zI7-KDU?)Awbb(G)`(cc>5kan-A=#wUXJIq z=&^IOF^SA>;G;h4r7AoFzB%jQ>v`FW(~g}PvHAK%v%YA*@&3Z2y7_cQRCypEOO}*lbK(5A}AAwgN_0F)@A|fGGdl z^YomTbNrg&lQ*#divw;D?ap#Qq~_f1YsrXIjmv{a(~JflK>+yhl+|u4G`(}DcfC_i z;m7U{#=FR_V==pjfH(5aj8vNJPc_%+Alulo#qK{|GGJh6 zq2vlJil&gB6~iyw*t*!Fwm}2rDG+8xnUi!Pumh^iX)N#E|-VVOQ}u zr;pH}17){){%TSJ8(s{v5)B-V@?Sev8y>AD?NGBcbV!X=Q*8vRs@cLq3!ZP=2}9V- zv_YX|K}}a`YgL%l9-aXjS|RY0Xq2{Xn6a_VOe5aZ)rqlvdOc18-EgV145eoNNaYKqdiXpb1Z}gsk_!dG*H5;x=%mw5^fzTJm55dNIPpDu{c1E`eFy z;{p+-02fu~6hd)EtS+B`I9Ke@pY|Sl$?54CPhNwJS0BhkVb+Ed%c^Qp_k#`$&&yIZ z3xRlJK*L)3ds6uaqKOV~?xVgw+#tvpp%1KisEgXQd(-FDPP&jlPJ;x$_vKDX)1%sT zWWW<5Z=7W5G0TQpqN#cmN@?R_15;Lm1_@k|ztAZ+cejy~^S6n5t!S1oKcboe+={0X z&5XX;+JdwRuOCNGqt_(IMIsdATUNFgbS8UQ4-lqV1juB);#O!ctCA@NI zKz`c%K84-)n35YF`L4&z*jAZ|1^@!HzU7_|yA_Neh#~a-Af)M^R%d(4s7+vZwSl&s z(o$&_pQF32R~_2Y*OaMs`3NOjlk?ekK}uXzIr3Sf4Zs?J{QF&1;zo@OtYRmCLIsNS zXwEu2fjDmYk|$mDf2s0+11Qv%LJWd?5~+mw(Axmkc#4GM1!=3m7oZmaY%BK~vDl(w zS8Is)_B7yVfsra?5qd?CB^3-ae9(soCYo1qxHVevKKAzcUbA z{=9pfUm@@_N`xK?*=W%&7n{Q_v!OyeF~-AEU8yASjpx1RQnoZ zmHc_aE@H#nb?=k(N?+faXVq#t6AV^5{b(VF0q_fm@v~t~8{DHfT$-w&O3&5<9p?=NK&Wkuz=9%xVr5@_S0tKgRCB_ zbfWz6F`pb-kvQppiz=gpuM5v+d#c+EM}((H9+3hWAm=*NHp}&R6B?!a?t5B&#zibo z>V8W!kq&t{Gnb(pd^VprTuu%`Sk9m`>egHy9vg!ny_xO>9Y%^o#@2tWSF`F^;YhzLSU&cV?qn%%(7WF-c{W0XV$GlyS~R62FL6j(@dk&7S4x30w80u zfIw{XpT$Nn5AFnhIh&uL-YT{KTppzqILqFlb`+mWttH{$#RrVV0ed}mM8TR@)vSYx zWCX9uGCj-WuE1^))94!5Po@#ys7|T)h_hvsgTn zJ*2niSzAbUcP4-M%&wo@JF&8K03sZSr8t`4mJy#3ZE(bIwTTh9q6M4$YXKju+qh6? zuXh1OGCVRulmp{Wj|M9}#ZN9P@3Py>W!<{4ZpWcY+XMs%VBYs)FLJxy07@W+OEDCr z%ql~V(God%7&?WerwxIR_@mvlNV+6in zT8!XHY`KD$j}B)bloe=DEw_a6ke7 z>^L<9Xo_NS!Y9;ly!Kl;7|OUC#*kqOhrk$$!-+NPG=^o)9)F?aWWK4Hy%HKG136UG zZc834L#wA(u3QCR&M6lsTyJvG$&dP0&5O4VH!ftYSoI_EZy|_wgW2?3*`C)YuIA(b z#SVd0-MN&rQS^El4V3deOW}!jJ**q$8cdD&${}$m7SK(rR)k6WV_)X&Y!m z4*lAM3PCt&R#T5aB)b>aPssCcV%WkWo|+i4BsQDVUX+J`TY+11p*EL)dA&;}fFsMe{vogN-qFgAMrd{I8d3%rl=_ zXJ|B9|6Q4}R!XJEE**34UYJeAYr4Qg84YRxGI&Vi>OU>)VuVFXq(CN3L^w_5^XR9XOUD^^W8;iMmK zG4x7*FfX&eLQchDyIt*8u&kpKZGOwz}s&CBW1Z+5X;OYD&*kVmA{$IEwS zPh3b`G?XVl{yp$>9y(FnJgIz|qeUpx);pb+S}Zg@=?mhjgzA%k)(yDR`o`9M^XVIE zRx0U{hWfk$^3(zl7%{#r3e#PKRv0^>fxvyne!L(j-?H7Tv_ue|M2zYMFE|Vw%_+Yw zv4~F2CvV~*-&mg<(hG281>Qvpb%$|7pn79&GDzYgCRCohv4MFdD5xU0Cwng7oMO6H z##`%p{3BhdDb77MvyHy8)j^tyc%SUQF~p~I`1h18YlW+Xk8=Xj=wqn!&= z$c41{Vf%M7sD0fK9y}IN^O-a4ce)Qo|C?oP^+QTkB`Ur*C?}j!y4N%6pI;h1d6B)v za3)8d(|J@N>Hm014_LqNRjz6o!r>r*`<9LICI2I|kL_xMebC_N@X{B{;63!=H9<;W z7gEfv{DY*-ddu(Gfe^u=4}tRjINXPacvfH|qp3H6Clz4FHf}eYRHZNF`aq*4ltuj^5|^9GkpbYa7V9l@B$;?6C7p zG(viNrRPVm6hHie8>AYWl)8)13O^i1q!LXe%S;7U4oT%)M_=q&-b9JLKJ@xkIj`sK z>c**MvnW2FH1xi<+(v5Itu7ozeHfee{oiD#6c;xH1X|UV2OO)-Kk&^UBCcC=SyA(3 zbYySmfU8irBfhCO?gWfkHlQh_8JLoD=4NKJ!9e_6#FDN)!%cb}H$pfA|2|rbn(Wn( z$=a?he32YZ4Y{^IFAAbVq&$rshCYhQW}b?+er5;+@9XF9A^LZBp=C4#S_by?Lltvp zrl63?xusj6*WoUMTO_-?OZZI_ehH^h<1LU)2h8PZi#lmE_Z)pZ>O*p}NwRM>8Y@(J z#kh-*fQx!h>g%_cV+MnDX3YKEoQ2Xa1VFi}eVA|PKRT7evRVNCPHAZxXAZni^E_~Q zKUn3b%%AWooNNo)S7K(C0rpsd{*NCu<++%?#R*HRn>VJ-Yz}f(P}c!At|d0&jneyj zvU5AXEy_@vk29pCt|Mv_wJhOdeQ{)JpVm5_V`aA3^x*Y;oNpoB9P_Hmla?#?v@KJw zO3HnYmWHqNzB-eA4#JOgTMXxfU%tXxO+q9I7p2$Vm>wtBdBuF>}wHvo;N68 z@KWF^XQP?28s@uI)acL~dUo|saX~&%%pAOnobc5ZYftabv{|1LhQsx1@)JiNW(#;( zDW4vly!tH}?go7Y-|6$5oZCdO&rNvdYqz5zL!9&n#dXAGzq`84cDFo$^YXdF(uZ@# znLnK5yGwReef|B%6N!<=5Rug5-aaCaH2#X-*SFtx@8#K08H&lwA&Q<&Kn=eB8aDsP zZqC>W_Ozw1F9d8lG0B;kx&ZQ3;3Lr4+<%itYHQv3nhp#b#spq^wh7$bZL$g5&DOX* zHGpWNz#=#M{~YWicLh|2cTtY4r`osxR=Kw#ZzLA41w&LRrJI>%8_y^Ao zcW+d#*(Y~oZG8$3BucGHOf#sr+AR%g+BMAwM02X=3BDb_n)}J6)sWZ`4u2%VplVNk zShSZVsBi9-m&9i#gpX#hlon+qr@tCU5OJTy1KQ^9 zF_n5u>0Q#@xZpULA;2?s71JS6-11ZT$(+{rhnLt=iXk7IItXOLK+so<8cXq&FOVus&b1`=`Y5FGda`=leKNo7+p?)BwXy`@gk_ zFDNn=)gBi7PfKzxBRA(tcU0t7XhB;v!B)53Y2sqhp<U9-0NnH2)SsWR9^AJT%&t1(9s*m~0p zd=R1bNxxC9CA#LZn2Ez`8DB>1#iYjcJ*Tn+^BT})aCnbk z)HyOhL1JEVr(gfYlmf)^fE2p!S_0B3c&Qn?+uAuxbDi%bd# zL{=~56tNV%Wq{-g!{LRo;22W!Uqj)bp(>^S$0@-b`Xcf~WMa$eR2hUyoX!*O(9V;y YjA<+_>At3gXTYB}#6YuH { }); // For now, it's a separate test. However, we can merge it should display rich table editor when fixed - it("should change columns position", () => { + // TODO: Fix flaky tests on CI + it.skip("should change columns position", () => { cy.viewport("macbook-15"); cy.visitNewProcess(seed, "table", "Default"); cy.intercept("POST", "/api/nodes/*/validation", (request) => { From 6f7996e29d0ce3f6b1b6150636396eeebd529f46 Mon Sep 17 00:00:00 2001 From: gskrobisz Date: Mon, 10 Jun 2024 12:35:19 +0200 Subject: [PATCH 12/17] Fixed width for actions dialogs when error message is too long (#6130) --- .../client/src/components/modals/CustomActionDialog.tsx | 8 ++++---- .../toolbars/actions/buttons/CustomActionButton.tsx | 2 ++ .../scenarioActions/buttons/CancelDeployButton.tsx | 2 ++ .../scenarioActions/buttons/CustomActionButton.tsx | 2 ++ .../toolbars/scenarioActions/buttons/DeployButton.tsx | 2 ++ designer/client/src/stylesheets/variables.ts | 2 ++ 6 files changed, 14 insertions(+), 4 deletions(-) diff --git a/designer/client/src/components/modals/CustomActionDialog.tsx b/designer/client/src/components/modals/CustomActionDialog.tsx index 4d135797da4..7eae5aa9079 100644 --- a/designer/client/src/components/modals/CustomActionDialog.tsx +++ b/designer/client/src/components/modals/CustomActionDialog.tsx @@ -110,7 +110,7 @@ export function CustomActionDialog(props: WindowContentProps(); - const confirm = useCallback(async () => { + const confirmAction = useCallback(async () => { await HttpService.customAction(processName, action.name, value, comment).then((response) => { if (response.isSuccess) { dispatch(loadProcessState(processName)); @@ -124,10 +124,10 @@ export function CustomActionDialog(props: WindowContentProps [ - { title: t("dialog.button.cancel", "cancel"), action: () => props.close(), classname: LoadingButtonTypes.secondaryButton }, - { title: t("dialog.button.confirm", "confirm"), action: () => confirm() }, + { title: t("dialog.button.cancel", "Cancel"), action: () => props.close(), classname: LoadingButtonTypes.secondaryButton }, + { title: t("dialog.button.confirm", "Ok"), action: () => confirmAction() }, ], - [confirm, props, t], + [confirmAction, props, t], ); return ( diff --git a/designer/client/src/components/toolbars/actions/buttons/CustomActionButton.tsx b/designer/client/src/components/toolbars/actions/buttons/CustomActionButton.tsx index 219d780fff7..98f07725c6c 100644 --- a/designer/client/src/components/toolbars/actions/buttons/CustomActionButton.tsx +++ b/designer/client/src/components/toolbars/actions/buttons/CustomActionButton.tsx @@ -7,6 +7,7 @@ import { StatusType } from "../../../Process/types"; import { ToolbarButton } from "../../../toolbarComponents/toolbarButtons"; import { ToolbarButtonProps } from "../../types"; import UrlIcon from "../../../UrlIcon"; +import { ACTION_DIALOG_WIDTH } from "../../../../stylesheets/variables"; type CustomActionProps = { action: CustomAction; @@ -39,6 +40,7 @@ export default function CustomActionButton(props: CustomActionProps) { open({ title: action.name, kind: WindowKind.customAction, + width: ACTION_DIALOG_WIDTH, meta: action, }) } diff --git a/designer/client/src/components/toolbars/scenarioActions/buttons/CancelDeployButton.tsx b/designer/client/src/components/toolbars/scenarioActions/buttons/CancelDeployButton.tsx index be7076bfd25..cc14a764dcb 100644 --- a/designer/client/src/components/toolbars/scenarioActions/buttons/CancelDeployButton.tsx +++ b/designer/client/src/components/toolbars/scenarioActions/buttons/CancelDeployButton.tsx @@ -10,6 +10,7 @@ import { WindowKind, useWindows } from "../../../../windowManager"; import { ToggleProcessActionModalData } from "../../../modals/DeployProcessDialog"; import { ToolbarButton } from "../../../toolbarComponents/toolbarButtons"; import { ToolbarButtonProps } from "../../types"; +import { ACTION_DIALOG_WIDTH } from "../../../../stylesheets/variables"; export default function CancelDeployButton(props: ToolbarButtonProps) { const { t } = useTranslation(); @@ -33,6 +34,7 @@ export default function CancelDeployButton(props: ToolbarButtonProps) { open({ title: message, kind: WindowKind.deployProcess, + width: ACTION_DIALOG_WIDTH, meta: { action }, }) } diff --git a/designer/client/src/components/toolbars/scenarioActions/buttons/CustomActionButton.tsx b/designer/client/src/components/toolbars/scenarioActions/buttons/CustomActionButton.tsx index 219d780fff7..98f07725c6c 100644 --- a/designer/client/src/components/toolbars/scenarioActions/buttons/CustomActionButton.tsx +++ b/designer/client/src/components/toolbars/scenarioActions/buttons/CustomActionButton.tsx @@ -7,6 +7,7 @@ import { StatusType } from "../../../Process/types"; import { ToolbarButton } from "../../../toolbarComponents/toolbarButtons"; import { ToolbarButtonProps } from "../../types"; import UrlIcon from "../../../UrlIcon"; +import { ACTION_DIALOG_WIDTH } from "../../../../stylesheets/variables"; type CustomActionProps = { action: CustomAction; @@ -39,6 +40,7 @@ export default function CustomActionButton(props: CustomActionProps) { open({ title: action.name, kind: WindowKind.customAction, + width: ACTION_DIALOG_WIDTH, meta: action, }) } diff --git a/designer/client/src/components/toolbars/scenarioActions/buttons/DeployButton.tsx b/designer/client/src/components/toolbars/scenarioActions/buttons/DeployButton.tsx index ce3bc88fcf5..740601021fb 100644 --- a/designer/client/src/components/toolbars/scenarioActions/buttons/DeployButton.tsx +++ b/designer/client/src/components/toolbars/scenarioActions/buttons/DeployButton.tsx @@ -11,6 +11,7 @@ import { WindowKind } from "../../../../windowManager"; import { ToggleProcessActionModalData } from "../../../modals/DeployProcessDialog"; import { ToolbarButton } from "../../../toolbarComponents/toolbarButtons"; import { ToolbarButtonProps } from "../../types"; +import { ACTION_DIALOG_WIDTH } from "../../../../stylesheets/variables"; export default function DeployButton(props: ToolbarButtonProps) { const dispatch = useDispatch(); @@ -49,6 +50,7 @@ export default function DeployButton(props: ToolbarButtonProps) { open({ title: message, kind: WindowKind.deployProcess, + width: ACTION_DIALOG_WIDTH, meta: { action, displayWarnings: true }, }) } diff --git a/designer/client/src/stylesheets/variables.ts b/designer/client/src/stylesheets/variables.ts index f01ac5a192a..ee5592258a0 100644 --- a/designer/client/src/stylesheets/variables.ts +++ b/designer/client/src/stylesheets/variables.ts @@ -6,3 +6,5 @@ export const PANEL_BUTTON_SIZE = SIDEBAR_WIDTH / 4; export const PANEL_BUTTON_SMALL_SIZE = SIDEBAR_WIDTH / 6; export const MODAL_HEADER_HEIGHT = 30; + +export const ACTION_DIALOG_WIDTH = 652; From c3ef5f97837de0079df6cefcaf4a54eee6317a41 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Bigorajski?= <72501021+lukasz-bigorajski@users.noreply.github.com> Date: Mon, 10 Jun 2024 21:43:29 +0200 Subject: [PATCH 13/17] =?UTF-8?q?[NU-1658]=20Add=20possiblity=20to=20confi?= =?UTF-8?q?gure=20EXACTLY=5FONCE=20delivery=20mode=20for=20=E2=80=A6=20(#6?= =?UTF-8?q?136)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- build.sbt | 3 +- docs/Changelog.md | 1 + .../model/Flink.md | 4 +- .../model/ModelConfiguration.md | 2 +- docs/integration/KafkaIntegration.md | 17 ++-- docs/operations_guide/Flink.md | 68 ++++++++++++++++ .../PartitionByKeyFlinkKafkaProducer.scala | 16 +++- .../defaultmodel/FinkExactlyOnceItSpec.scala | 79 +++++++++++++++++++ .../defaultmodel/FlinkWithKafkaSuite.scala | 11 ++- .../LiteKafkaComponentProviderSpec.scala | 50 ++++++++++++ .../LiteKafkaComponentProvider.scala | 24 ++++-- .../nussknacker/engine/spel/Implicits.scala | 2 + .../engine/spel/SpelExtension.scala | 13 +++ .../engine/kafka/RichKafkaConsumer.scala | 6 ++ .../engine/kafka/KafkaConfig.scala | 9 ++- 15 files changed, 281 insertions(+), 24 deletions(-) create mode 100644 engine/flink/tests/src/test/scala/pl/touk/nussknacker/defaultmodel/FinkExactlyOnceItSpec.scala create mode 100644 engine/lite/components/kafka-tests/src/test/scala/pl/touk/nussknacker/engine/lite/components/LiteKafkaComponentProviderSpec.scala create mode 100644 scenario-api/src/main/scala/pl/touk/nussknacker/engine/spel/SpelExtension.scala diff --git a/build.sbt b/build.sbt index dea3f8b16f2..aea3b55b329 100644 --- a/build.sbt +++ b/build.sbt @@ -1274,7 +1274,8 @@ lazy val liteKafkaComponentsTests: Project = (project in lite("components/kafka- libraryDependencies ++= { Seq( "org.scalacheck" %% "scalacheck" % scalaCheckV % Test, - "org.scalatestplus" %% s"scalacheck-$scalaCheckVshort" % scalaTestPlusV % Test + "org.scalatestplus" %% s"scalacheck-$scalaCheckVshort" % scalaTestPlusV % Test, + "org.scalatestplus" %% "mockito-4-11" % scalaTestPlusV % Test, ) }, ) diff --git a/docs/Changelog.md b/docs/Changelog.md index 553175a5e7b..be6077e8fc1 100644 --- a/docs/Changelog.md +++ b/docs/Changelog.md @@ -8,6 +8,7 @@ * [#5982](https://github.com/TouK/nussknacker/pull/5982) Batch processing mode related improvements: * Deployments API returns correct status of deployment instead of returning always the last deployment's status * [#6121](https://github.com/TouK/nussknacker/pull/6121) Add functionality to reorder columns within the table editor. +* [#6136](https://github.com/TouK/nussknacker/pull/6136) Add possibility to configure kafka exactly-once delivery for flink. 1.15.2 (7 June 2024) ------------------------- diff --git a/docs/installation_configuration_guide/model/Flink.md b/docs/installation_configuration_guide/model/Flink.md index 8ee9dfd56f6..1d2b647a14b 100644 --- a/docs/installation_configuration_guide/model/Flink.md +++ b/docs/installation_configuration_guide/model/Flink.md @@ -58,9 +58,9 @@ Out of the box, Nussknacker provides following ExceptionHandler types: - VerboselyLogging - log error to Flink logs on `error` level, together with all variables (should be used mainly for debugging) - Kafka - send errors to Kafka topic, see [common config](../../integration/KafkaIntegration.md#exception-handling) for the details. -### Configuring restart strategies +### Configuring restart strategies -We rely on Flink restart strategies described [in documentation](https://ci.apache.org/projects/flink/flink-docs-stable/docs/dev/execution/task_failure_recovery/). +We rely on Flink restart strategies described [in documentation](https://nightlies.apache.org/flink/flink-docs-release-1.18/docs/ops/state/task_failure_recovery/). It's also possible to configure restart strategies per scenario, using additional properties. ``` diff --git a/docs/installation_configuration_guide/model/ModelConfiguration.md b/docs/installation_configuration_guide/model/ModelConfiguration.md index aa7051af1fa..84ce7a4ab03 100644 --- a/docs/installation_configuration_guide/model/ModelConfiguration.md +++ b/docs/installation_configuration_guide/model/ModelConfiguration.md @@ -157,7 +157,7 @@ Fields `title`, `icon`, `url` can contain templates: `$componentId` nad `$compon You can override default grouping of basic components in toolbox panels with `componentsGroupMapping` setting. Component names are keys, while values are toolbox panels name (e.g. sources, enrichers etc.) | -## Scenario properties +## Scenario properties It's possible to add additional properties for scenario. They can be used for allowing more detailed scenario information (e.g. pass information about marketing campaign target etc.), diff --git a/docs/integration/KafkaIntegration.md b/docs/integration/KafkaIntegration.md index a60ba79cce9..f116e7773aa 100644 --- a/docs/integration/KafkaIntegration.md +++ b/docs/integration/KafkaIntegration.md @@ -187,6 +187,8 @@ Important thing to remember is that Kafka server addresses/Schema Registry addre | kafkaProperties."azure.tenant.id" | High | string | | (Azure-only) Azure's tenant id | | kafkaProperties."azure.client.id" | High | string | | (Azure-only) Azure's client id | | kafkaProperties."azure.client.secret" | High | string | | (Azure-only) Azure's client secret | +| kafkaProperties."transaction.timeout.ms" | Medium | number | 600000 | Transaction timeout in millis for transactional producer [transaction timeout](https://kafka.apache.org/documentation/#producerconfigs_transaction.timeout.ms) | +| kafkaProperties."isolation.level" | High | string | | Controls how to read messages written transactionally. [isolation.level](https://kafka.apache.org/documentation/#consumerconfigs_isolation.level) | | kafkaProperties | Medium | map | | Additional configuration of [producers](https://kafka.apache.org/documentation/#producerconfigs) or [consumers](https://kafka.apache.org/documentation/#consumerconfigs) | | useStringForKey | Medium | boolean | true | Kafka message keys will be in the string format (not in Avro) | | kafkaEspProperties.forceLatestRead | Medium | boolean | false | If scenario is restarted, should offsets of source consumers be reset to latest (can be useful in test enrivonments) | @@ -279,13 +281,14 @@ Important thing to remember is that Kafka server addresses/schema registry addre See [common config](#available-configuration-options) for the details of Kafka configuration, the table below presents additional options available only in Flink engine: -| Name | Importance | Type | Default value | Description | -|---------------------------------------------------|------------|----------------------------|--------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| kafkaEspProperties.defaultMaxOutOfOrdernessMillis | Medium | long | 60000 (60 seconds) | Configuration of [bounded out of orderness watermark generator](https://ci.apache.org/projects/flink/flink-docs-stable/docs/dev/datastream/event-time/built_in/#fixed-amount-of-lateness) used by Kafka sources | -| idleTimeout.enabled | Medium | boolean | true | Enabling [idleness](https://nightlies.apache.org/flink/flink-docs-stable/docs/connectors/datastream/kafka/#idleness) used by Kafka sources | -| idleTimeout.duration | Medium | long | 3 minutes | Configuration of [idle timout](https://nightlies.apache.org/flink/flink-docs-stable/docs/connectors/datastream/kafka/#idleness) used by Kafka sources | -| consumerGroupNamingStrategy | Low | processId/processId-nodeId | processId-nodeId | How consumer groups for sources should be named | -| avroKryoGenericRecordSchemaIdSerialization | Low | boolean | true | Should Avro messages from topics registered in schema registry be serialized in optimized way, by serializing only schema id, not the whole schema | +| Name | Importance | Type | Default value | Description | +|---------------------------------------------------|------------|------------------------------------------|--------------------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| kafkaEspProperties.defaultMaxOutOfOrdernessMillis | Medium | long | 60000 (60 seconds) | Configuration of [bounded out of orderness watermark generator](https://ci.apache.org/projects/flink/flink-docs-stable/docs/dev/datastream/event-time/built_in/#fixed-amount-of-lateness) used by Kafka sources | +| idleTimeout.enabled | Medium | boolean | true | Enabling [idleness](https://nightlies.apache.org/flink/flink-docs-stable/docs/connectors/datastream/kafka/#idleness) used by Kafka sources | +| idleTimeout.duration | Medium | long | 3 minutes | Configuration of [idle timout](https://nightlies.apache.org/flink/flink-docs-stable/docs/connectors/datastream/kafka/#idleness) used by Kafka sources | +| consumerGroupNamingStrategy | Low | processId/processId-nodeId | processId-nodeId | How consumer groups for sources should be named | +| avroKryoGenericRecordSchemaIdSerialization | Low | boolean | true | Should Avro messages from topics registered in schema registry be serialized in optimized way, by serializing only schema id, not the whole schema | +| sinkDeliveryGuarantee | Medium | enum (EXACTLY_ONCE, AT_LEAST_ONCE, NONE) | AT_LEAST_ONCE | Configuration of [fault tolerance semantic](https://nightlies.apache.org/flink/flink-docs-release-1.18/docs/connectors/datastream/kafka/#fault-tolerance) | ### Configuration for Lite engine diff --git a/docs/operations_guide/Flink.md b/docs/operations_guide/Flink.md index 9cd51406b6a..abba85405bd 100644 --- a/docs/operations_guide/Flink.md +++ b/docs/operations_guide/Flink.md @@ -29,6 +29,74 @@ In particular, one must not forget that the Flink connector (when checkpoints ar * Commits the offsets to Kafka only during checkpoint - so offsets returned by Kafka almost always will not be correct. * Ignore offsets in Kafka when it’s started with the checkpointed state - topic offsets are also saved in the checkpointed state. +#### End-to-end Exactly-once event processing + +Nussknacker allows you to process events in the Exactly-once Semantics. This feature is provided by Flink. +An important note is that we guarantee that the message is delivered once and only once via kafka sink in the scenario. +If a fault occurs between checkpoints and after an operation that performs some side effects, such as enrichment, +it possible that this action could be repeated. +To read more about it see: [Flink blog post](https://flink.apache.org/2018/02/28/an-overview-of-end-to-end-exactly-once-processing-in-apache-flink-with-apache-kafka-too/) and +[Flink fault tolerance](https://nightlies.apache.org/flink/flink-docs-release-1.18/docs/learn-flink/fault_tolerance/#exactly-once-guarantees). +More information about certain connectors can be found here: [section with fault tolerance for connectors](https://nightlies.apache.org/flink/flink-docs-release-1.18/docs/connectors/datastream/guarantees/). +Kafka connector specific information is provided at: [this section of Kafka connector documentation](https://nightlies.apache.org/flink/flink-docs-release-1.18/docs/connectors/datastream/kafka/#fault-tolerance). + +In order to achieve End-to-end Exactly-once event processing, you need to check multiple places of configuration: +- Flink cluster configuration: + - Configure checkpointing. + A Prerequisite is to have persistent storage for the state, and you should make sure that this is configured. + In Exactly-once events are committed during checkpoint and consequently the output events will be visible within + the time range specified by the checkpoint interval. It's essential to configure proper interval in a range of + 1-10 seconds (interval should be configured in nussknacker - `Configure the checkpointing interval` section). + Such a short interval has large overhead on Flink, and you should consider configuring: + - Incremental checkpoints: [Flink docs](https://nightlies.apache.org/flink/flink-docs-release-1.18/docs/dev/datastream/fault-tolerance/checkpointing/#state-backend-incremental). + - Unaligned checkpoints: [Flink docs](https://nightlies.apache.org/flink/flink-docs-release-1.18/docs/ops/state/checkpointing_under_backpressure/#unaligned-checkpoints). + Additionally you have to ensure that checkpointing mode is set to `EXACTLY_ONCE` [Flink docs](https://nightlies.apache.org/flink/flink-docs-release-1.18/docs/deployment/config/#execution-checkpointing-mode). + - Ensure task failure recovery is configured. + [Configuring restart strategies](../installation_configuration_guide/model/Flink.md#configuring-restart-strategies). + The main purpose of Exactly-once is to don't miss any events and don't have duplicated events during failures. + If the task failure recovery is not configured, after failure task will not be running and has failed state. + As a result, there is no opportunity to use Exactly-once mechanism. +- Nussknacker configuration: + - Configure the property `components.kafka.config.deliveryGuarantee` to: "EXACTLY_ONCE", e.g. + ``` + kafkaConfig { + kafkaProperties { + "bootstrap.servers": ${?KAFKA_ADDRESS} + "schema.registry.url": ${?SCHEMA_REGISTRY_URL} + "auto.offset.reset": ${?KAFKA_AUTO_OFFSET_RESET} + "isolation.level": "read_committed" + } + sinkDeliveryGuarantee: "EXACTLY_ONCE" + } + ``` + - Configure the checkpointing interval: + Default value for the interval is 10 minutes which is not acceptable for Exactly-once. + Therefore, it needs to be configured to appropriate value. + - Interval can be configured globally for all scenarios via: + [Flink model configuration](../installation_configuration_guide/model/Flink.md#flink-specific-model-configuration). + - You can override global interval in a scenario by setting `Checkpoint interval in seconds` in a scenario + properties on UI. + - Configure Flink Kafka producer `transaction.timeout.ms` to be equal to: "maximum checkpoint duration + maximum + restart duration" in property `kafkaProperties."transaction.timeout.ms"` + ([kafkaConfig](../integration/KafkaIntegration.md#available-configuration-options)) or data loss may happen when + Kafka expires an uncommitted transaction. + - Ensure Flink Kafka consumer `isolation.level` is set to `read_committed` + ([kafkaConfig](../integration/KafkaIntegration.md#available-configuration-options)) if you plan consuming events + from transactional source. +- Kafka cluster configuration: + - Ensure your Kafka version supports transactions. + - Ensure that Kafka broker `transaction.max.timeout.ms` + [Kafka docs](https://kafka.apache.org/documentation/#brokerconfigs_transaction.max.timeout.ms) is greater than + producer `transaction.timeout.ms`. + - Ensure that your Kafka broker cluster has at least three brokers + [Kafka docs](https://kafka.apache.org/documentation/#producerconfigs_transactional.id). + - According to: [current presentation](https://www.confluent.io/events/current/2023/3-flink-mistakes-we-made-so-you-wont-have-to/) + you should also consider configuring `transactional.id.expiration.ms` due to the fact that every transactionId + metadata is stored on kafka on every checkpoint. The metadata weights around 300 bytes so accumulated size could be + big. As a result we should configure a policy which will expire transactionId. +- Application consuming Nussknacker's messages configuration: + - Ensure your consumer has `isolation.level` set to: `read_committed`. + ## Nussknacker and Flink cluster diff --git a/engine/flink/kafka-components-utils/src/main/scala/pl/touk/nussknacker/engine/kafka/PartitionByKeyFlinkKafkaProducer.scala b/engine/flink/kafka-components-utils/src/main/scala/pl/touk/nussknacker/engine/kafka/PartitionByKeyFlinkKafkaProducer.scala index 18e4d1f4550..bf8fd1eb577 100644 --- a/engine/flink/kafka-components-utils/src/main/scala/pl/touk/nussknacker/engine/kafka/PartitionByKeyFlinkKafkaProducer.scala +++ b/engine/flink/kafka-components-utils/src/main/scala/pl/touk/nussknacker/engine/kafka/PartitionByKeyFlinkKafkaProducer.scala @@ -2,6 +2,7 @@ package pl.touk.nussknacker.engine.kafka import com.github.ghik.silencer.silent import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer +import org.apache.kafka.clients.producer.ProducerConfig import pl.touk.nussknacker.engine.kafka.serialization.FlinkSerializationSchemaConversions.wrapToFlinkSerializationSchema @silent("deprecated") @@ -11,12 +12,21 @@ object PartitionByKeyFlinkKafkaProducer { config: KafkaConfig, topic: String, serializationSchema: serialization.KafkaSerializationSchema[T], - clientId: String, - semantic: FlinkKafkaProducer.Semantic = FlinkKafkaProducer.Semantic.AT_LEAST_ONCE + clientId: String ): FlinkKafkaProducer[T] = { val props = KafkaUtils.toProducerProperties(config, clientId) // we set default to 10min, as FlinkKafkaProducer logs warn if not set - props.setProperty("transaction.timeout.ms", "600000") + props.putIfAbsent(ProducerConfig.TRANSACTION_TIMEOUT_CONFIG, "600000") + val semantic = config.sinkDeliveryGuarantee match { + case Some(value) => + value match { + case SinkDeliveryGuarantee.ExactlyOnce => FlinkKafkaProducer.Semantic.EXACTLY_ONCE + case SinkDeliveryGuarantee.AtLeastOnce => FlinkKafkaProducer.Semantic.AT_LEAST_ONCE + case SinkDeliveryGuarantee.None => FlinkKafkaProducer.Semantic.NONE + } + // AT_LEAST_ONCE is default + case None => FlinkKafkaProducer.Semantic.AT_LEAST_ONCE + } new FlinkKafkaProducer[T](topic, wrapToFlinkSerializationSchema(serializationSchema), props, semantic) } diff --git a/engine/flink/tests/src/test/scala/pl/touk/nussknacker/defaultmodel/FinkExactlyOnceItSpec.scala b/engine/flink/tests/src/test/scala/pl/touk/nussknacker/defaultmodel/FinkExactlyOnceItSpec.scala new file mode 100644 index 00000000000..850ceca6ce5 --- /dev/null +++ b/engine/flink/tests/src/test/scala/pl/touk/nussknacker/defaultmodel/FinkExactlyOnceItSpec.scala @@ -0,0 +1,79 @@ +package pl.touk.nussknacker.defaultmodel + +import com.typesafe.config.Config +import com.typesafe.config.ConfigValueFactory.fromAnyRef +import com.typesafe.scalalogging.LazyLogging +import io.circe.Json +import org.scalatest.time.{Millis, Seconds, Span} +import pl.touk.nussknacker.defaultmodel.SampleSchemas.RecordSchemas +import pl.touk.nussknacker.engine.build.ScenarioBuilder +import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess +import pl.touk.nussknacker.engine.kafka.KafkaTestUtils.richConsumer +import pl.touk.nussknacker.engine.schemedkafka.KafkaUniversalComponentTransformer +import pl.touk.nussknacker.engine.spel.SpelExtension._ +import pl.touk.nussknacker.test.{KafkaConfigProperties, PatientScalaFutures} + +import scala.jdk.CollectionConverters._ + +class FinkExactlyOnceItSpec + extends FlinkWithKafkaSuite + with PatientScalaFutures + with LazyLogging + with WithKafkaComponentsConfig { + + override val avroAsJsonSerialization: Boolean = true + + override def kafkaComponentsConfig: Config = super.kafkaComponentsConfig + .withValue("config.sinkDeliveryGuarantee", fromAnyRef("EXACTLY_ONCE")) + .withValue(KafkaConfigProperties.property("config", "isolation.level"), fromAnyRef("read_committed")) + + private val inputOutputMessage = + """ + |{ + | "first": "Jan", + | "last": "Kowalski" + |} + |""".stripMargin + + test("should read message from kafka and write message in transaction to kafka on checkpoint") { + val topicConfig = createAndRegisterAvroTopicConfig("cash-transactions", RecordSchemas) + kafkaClient.createTopic(topicConfig.input, partitions = 1) + kafkaClient.createTopic(topicConfig.output, partitions = 1) + + val sendResult = sendAsJson(inputOutputMessage, topicConfig.input).futureValue + logger.info(s"Messages sent successful: $sendResult") + + run(buildScenario(topicConfig)) { + val consumer = kafkaClient.createConsumer() + val result = consumer.consumeWithJson[Json](topicConfig.output).take(1).head + result.message() shouldEqual parseJson(inputOutputMessage) + eventually(timeout(Span(2, Seconds)), interval(Span(100, Millis))) { + // https://stackoverflow.com/a/56183132 + // if message is committed with transaction there is additional control batch in a log + consumer.getEndOffsets(topicConfig.output).values().asScala.head shouldEqual 2 + } + } + } + + private def buildScenario(topicConfig: TopicConfig): CanonicalProcess = + ScenarioBuilder + .streaming("exactly-once-test") + .parallelism(1) + .source( + "read committed source", + "kafka", + KafkaUniversalComponentTransformer.topicParamName.value -> s"'${topicConfig.input}'".spel, + KafkaUniversalComponentTransformer.schemaVersionParamName.value -> s"'1'".spel + ) + .emptySink( + "end", + "kafka", + KafkaUniversalComponentTransformer.sinkKeyParamName.value -> "".spel, + KafkaUniversalComponentTransformer.topicParamName.value -> s"'${topicConfig.output}'".spel, + KafkaUniversalComponentTransformer.schemaVersionParamName.value -> s"'1'".spel, + KafkaUniversalComponentTransformer.sinkRawEditorParamName.value -> s"false".spel, + "first" -> "#input.first".spel, + "last" -> "#input.last".spel + ) + +} diff --git a/engine/flink/tests/src/test/scala/pl/touk/nussknacker/defaultmodel/FlinkWithKafkaSuite.scala b/engine/flink/tests/src/test/scala/pl/touk/nussknacker/defaultmodel/FlinkWithKafkaSuite.scala index fba5949ca5d..f942cb907ce 100644 --- a/engine/flink/tests/src/test/scala/pl/touk/nussknacker/defaultmodel/FlinkWithKafkaSuite.scala +++ b/engine/flink/tests/src/test/scala/pl/touk/nussknacker/defaultmodel/FlinkWithKafkaSuite.scala @@ -1,6 +1,6 @@ package pl.touk.nussknacker.defaultmodel -import com.typesafe.config.ConfigFactory +import com.typesafe.config.{Config, ConfigFactory} import com.typesafe.config.ConfigValueFactory.fromAnyRef import io.confluent.kafka.schemaregistry.ParsedSchema import io.confluent.kafka.serializers.{KafkaAvroDeserializer, KafkaAvroSerializer} @@ -55,7 +55,8 @@ abstract class FlinkWithKafkaSuite with BeforeAndAfterAll with BeforeAndAfter with WithConfig - with Matchers { + with Matchers + with WithKafkaComponentsConfig { private lazy val creator: DefaultConfigCreator = new TestDefaultConfigCreator @@ -109,7 +110,7 @@ abstract class FlinkWithKafkaSuite protected def avroAsJsonSerialization = false - private lazy val kafkaComponentsConfig = ConfigFactory + override def kafkaComponentsConfig: Config = ConfigFactory .empty() .withValue( KafkaConfigProperties.bootstrapServersProperty("config"), @@ -290,3 +291,7 @@ class TestDefaultConfigCreator extends DefaultConfigCreator { Seq(LoggingListener) } + +trait WithKafkaComponentsConfig { + def kafkaComponentsConfig: Config +} diff --git a/engine/lite/components/kafka-tests/src/test/scala/pl/touk/nussknacker/engine/lite/components/LiteKafkaComponentProviderSpec.scala b/engine/lite/components/kafka-tests/src/test/scala/pl/touk/nussknacker/engine/lite/components/LiteKafkaComponentProviderSpec.scala new file mode 100644 index 00000000000..8983892b8ed --- /dev/null +++ b/engine/lite/components/kafka-tests/src/test/scala/pl/touk/nussknacker/engine/lite/components/LiteKafkaComponentProviderSpec.scala @@ -0,0 +1,50 @@ +package pl.touk.nussknacker.engine.lite.components + +import com.typesafe.config.ConfigFactory +import com.typesafe.config.ConfigValueFactory.{fromAnyRef, fromMap} +import org.scalatest.funsuite.AnyFunSuite +import org.scalatest.matchers.must.Matchers +import org.scalatestplus.mockito.MockitoSugar +import pl.touk.nussknacker.engine.api.process.ProcessObjectDependencies +import pl.touk.nussknacker.engine.schemedkafka.schemaregistry.SchemaRegistryClientFactory +import pl.touk.nussknacker.test.ProcessUtils.convertToAnyShouldWrapper + +import scala.jdk.CollectionConverters._ + +class LiteKafkaComponentProviderSpec extends AnyFunSuite with MockitoSugar with Matchers { + + val schemaRegistryClientFactory: SchemaRegistryClientFactory = mock[SchemaRegistryClientFactory] + val sut: LiteKafkaComponentProvider = new LiteKafkaComponentProvider(schemaRegistryClientFactory) + + test("should create kafka components from configuration") { + val emptyConfig = ConfigFactory.empty() + val result = sut.create(emptyConfig, ProcessObjectDependencies.withConfig(emptyConfig)) + + result.map(_.name).distinct shouldBe List("kafka") + result.map(_.component.getClass.getSimpleName) shouldBe + List("UniversalKafkaSourceFactory", "UniversalKafkaSinkFactory") + } + + test("should throw exception if idleness is passed in configuration") { + val config = ConfigFactory + .empty() + .withValue("kafka.idleTimeout", fromMap(Map("enabled" -> "true", "duration" -> "3 seconds").asJava)) + + val ex = intercept[IllegalArgumentException](sut.create(config, ProcessObjectDependencies.withConfig(config))) + + ex.getMessage shouldBe "Idleness is a Flink specific feature and is not supported in Lite Kafka sources. " + + "Please remove the idleness config from your Lite Kafka sources config." + } + + test("should throw exception if sinkDeliveryGuarantee is passed in configuration") { + val config = ConfigFactory + .empty() + .withValue("kafka.sinkDeliveryGuarantee", fromAnyRef("EXACTLY_ONCE")) + + val ex = intercept[IllegalArgumentException](sut.create(config, ProcessObjectDependencies.withConfig(config))) + + ex.getMessage shouldBe "SinkDeliveryGuarantee is a Flink specific feature and is not supported in Lite Kafka " + + "config. Please remove the sinkDeliveryGuarantee property from your Lite Kafka config." + } + +} diff --git a/engine/lite/components/kafka/src/main/scala/pl/touk/nussknacker/engine/lite/components/LiteKafkaComponentProvider.scala b/engine/lite/components/kafka/src/main/scala/pl/touk/nussknacker/engine/lite/components/LiteKafkaComponentProvider.scala index 6de5512f7e5..317c7fda134 100644 --- a/engine/lite/components/kafka/src/main/scala/pl/touk/nussknacker/engine/lite/components/LiteKafkaComponentProvider.scala +++ b/engine/lite/components/kafka/src/main/scala/pl/touk/nussknacker/engine/lite/components/LiteKafkaComponentProvider.scala @@ -40,12 +40,7 @@ class LiteKafkaComponentProvider(schemaRegistryClientFactory: SchemaRegistryClie val universalSerdeProvider = UniversalSchemaBasedSerdeProvider.create(schemaRegistryClientFactory) - if (KafkaConfig.parseConfig(dependencies.config).idleTimeout.isDefined) { - throw new IllegalArgumentException( - "Idleness is a Flink specific feature and is not supported in Lite Kafka sources. " + - "Please remove the idleness config from your Lite Kafka sources config." - ) - } + validateConfiguration(dependencies.config) List( ComponentDefinition( @@ -72,4 +67,21 @@ class LiteKafkaComponentProvider(schemaRegistryClientFactory: SchemaRegistryClie override def isCompatible(version: NussknackerVersion): Boolean = true override def isAutoLoaded: Boolean = true + + private def validateConfiguration(config: Config): Unit = { + val kafkaConfig = KafkaConfig.parseConfig(config) + if (kafkaConfig.idleTimeout.isDefined) { + throw new IllegalArgumentException( + "Idleness is a Flink specific feature and is not supported in Lite Kafka sources. " + + "Please remove the idleness config from your Lite Kafka sources config." + ) + } + if (kafkaConfig.sinkDeliveryGuarantee.isDefined) { + throw new IllegalArgumentException( + "SinkDeliveryGuarantee is a Flink specific feature and is not supported in Lite Kafka config. " + + "Please remove the sinkDeliveryGuarantee property from your Lite Kafka config." + ) + } + } + } diff --git a/scenario-api/src/main/scala/pl/touk/nussknacker/engine/spel/Implicits.scala b/scenario-api/src/main/scala/pl/touk/nussknacker/engine/spel/Implicits.scala index a505447186b..b8a0d821501 100644 --- a/scenario-api/src/main/scala/pl/touk/nussknacker/engine/spel/Implicits.scala +++ b/scenario-api/src/main/scala/pl/touk/nussknacker/engine/spel/Implicits.scala @@ -4,6 +4,8 @@ import pl.touk.nussknacker.engine.graph.expression.Expression import scala.language.implicitConversions +// TODO: Should be replaced with: pl.touk.nussknacker.engine.spel.SpelExtension +@Deprecated object Implicits { implicit def asSpelExpression(expression: String): Expression = Expression.spel(expression) diff --git a/scenario-api/src/main/scala/pl/touk/nussknacker/engine/spel/SpelExtension.scala b/scenario-api/src/main/scala/pl/touk/nussknacker/engine/spel/SpelExtension.scala new file mode 100644 index 00000000000..5769f50225d --- /dev/null +++ b/scenario-api/src/main/scala/pl/touk/nussknacker/engine/spel/SpelExtension.scala @@ -0,0 +1,13 @@ +package pl.touk.nussknacker.engine.spel + +import pl.touk.nussknacker.engine.graph.expression.Expression + +trait SpelExtension { + + implicit class SpelExpresion(expression: String) { + def spel: Expression = Expression.spel(expression) + } + +} + +object SpelExtension extends SpelExtension diff --git a/utils/kafka-test-utils/src/main/scala/pl/touk/nussknacker/engine/kafka/RichKafkaConsumer.scala b/utils/kafka-test-utils/src/main/scala/pl/touk/nussknacker/engine/kafka/RichKafkaConsumer.scala index c0f2b56941e..a1cda3fb5cf 100644 --- a/utils/kafka-test-utils/src/main/scala/pl/touk/nussknacker/engine/kafka/RichKafkaConsumer.scala +++ b/utils/kafka-test-utils/src/main/scala/pl/touk/nussknacker/engine/kafka/RichKafkaConsumer.scala @@ -41,6 +41,12 @@ class RichKafkaConsumer[K, M](consumer: Consumer[K, M]) extends LazyLogging { LazyList.continually(()).flatMap(new Poller(secondsToWait)) } + def getEndOffsets(topic: String, secondsToWait: Int = DefaultSecondsToWait) = { + val partitions = fetchTopicPartitions(topic, secondsToWait) + consumer.assign(partitions.asJava) + consumer.endOffsets(partitions.asJava) + } + private def fetchTopicPartitions(topic: String, secondsToWait: Int) = { implicit val patienceConfig: PatienceConfig = PatienceConfig(Span(secondsToWait, Seconds), Span(100, Millis)) // We have to repeat it in eventually - partitionsFor with duration parameter sometimes just returns empty list diff --git a/utils/kafka-utils/src/main/scala/pl/touk/nussknacker/engine/kafka/KafkaConfig.scala b/utils/kafka-utils/src/main/scala/pl/touk/nussknacker/engine/kafka/KafkaConfig.scala index eca6c49cfda..67bfd2f46b2 100644 --- a/utils/kafka-utils/src/main/scala/pl/touk/nussknacker/engine/kafka/KafkaConfig.scala +++ b/utils/kafka-utils/src/main/scala/pl/touk/nussknacker/engine/kafka/KafkaConfig.scala @@ -28,7 +28,8 @@ case class KafkaConfig( schemaRegistryCacheConfig: SchemaRegistryCacheConfig = SchemaRegistryCacheConfig(), avroAsJsonSerialization: Option[Boolean] = None, kafkaAddress: Option[String] = None, - idleTimeout: Option[IdlenessConfig] = None + idleTimeout: Option[IdlenessConfig] = None, + sinkDeliveryGuarantee: Option[SinkDeliveryGuarantee.Value] = None ) { def schemaRegistryClientKafkaConfig = SchemaRegistryClientKafkaConfig( @@ -117,3 +118,9 @@ object IdlenessConfig { val DefaultDuration: FiniteDuration = 3 minutes val DefaultConfig: IdlenessConfig = IdlenessConfig(enabled = true, duration = DefaultDuration) } + +object SinkDeliveryGuarantee extends Enumeration { + val ExactlyOnce: SinkDeliveryGuarantee.Value = Value("EXACTLY_ONCE") + val AtLeastOnce: SinkDeliveryGuarantee.Value = Value("AT_LEAST_ONCE") + val None: SinkDeliveryGuarantee.Value = Value("NONE") +} From 5482bd022b2b228eab5efc9922ce6619686a960b Mon Sep 17 00:00:00 2001 From: Arek Burdach Date: Wed, 12 Jun 2024 07:23:54 +0200 Subject: [PATCH 14/17] [NU-1686] Deployment statuses synchronization (#6155) --- .../api/deployment/DeploymentManager.scala | 13 ++++ .../api/deployment/DeploymentStatusName.scala | 37 +++++++++++ ...CachingProcessStateDeploymentManager.scala | 9 ++- .../simple/SimpleDeploymentStatus.scala | 27 ++++++++ .../deployment/simple/SimpleStateStatus.scala | 23 ++++++- .../testing/DeploymentManagerStub.scala | 5 +- ...ingProcessStateDeploymentManagerSpec.scala | 22 +++---- .../V1_052__AddDeploymentStatusDetails.sql | 3 + .../ui/api/DeploymentApiHttpService.scala | 17 +++-- .../description/DeploymentApiEndpoints.scala | 47 ++++++++++++-- .../ui/db/entity/BaseEntityFactory.scala | 18 ++++- .../newdeployment/DeploymentCommand.scala | 2 +- .../DeploymentEntityFactory.scala | 64 ++++++++++++++++-- .../process/newdeployment/DeploymentId.scala | 19 ------ .../newdeployment/DeploymentRepository.scala | 30 ++++++++- .../newdeployment/DeploymentService.scala | 61 +++++++---------- ...entsStatusesSynchronizationScheduler.scala | 58 +++++++++++++++++ .../DeploymentsStatusesSynchronizer.scala | 65 +++++++++++++++++++ .../InvalidDeploymentManagerStub.scala | 2 + .../server/AkkaHttpBasedRouteProvider.scala | 30 ++++++++- .../test/mock/MockDeploymentManager.scala | 2 + .../test/utils/domain/TestFactory.scala | 7 +- ...DeploymentApiHttpServiceBusinessSpec.scala | 24 ++++--- ...DeploymentApiHttpServiceBusinessSpec.scala | 18 ++--- ...tApiHttpServiceDeploymentCommentSpec.scala | 6 +- docs-internal/api/nu-designer-openapi.yaml | 30 ++++++++- docs/Changelog.md | 4 +- docs/MigrationGuide.md | 4 +- ...DevelopmentDeploymentManagerProvider.scala | 19 +++--- .../MockableDeploymentManagerProvider.scala | 10 +-- .../periodic/PeriodicDeploymentManager.scala | 7 +- .../periodic/DeploymentManagerStub.scala | 2 + .../management/periodic/FlinkClientStub.scala | 2 +- .../engine/management/FlinkRestManager.scala | 63 ++++++++++++------ .../management/rest/CachedFlinkClient.scala | 21 ++++-- .../engine/management/rest/FlinkClient.scala | 2 +- .../management/rest/HttpFlinkClient.scala | 26 ++++---- .../rest/CachedFlinkClientTest.scala | 14 ++-- .../engine/embedded/DeploymentStrategy.scala | 4 +- .../embedded/EmbeddedDeploymentManager.scala | 24 ++++++- .../RequestResponseDeploymentStrategy.scala | 6 +- .../StreamingDeploymentStrategy.scala | 12 ++-- .../k8s/manager/K8sDeploymentManager.scala | 10 ++- .../engine/deployment/DeploymentId.scala | 8 ++- .../engine/newdeployment/DeploymentId.scala | 16 +++++ 45 files changed, 682 insertions(+), 211 deletions(-) create mode 100644 designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/DeploymentStatusName.scala create mode 100644 designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/simple/SimpleDeploymentStatus.scala create mode 100644 designer/server/src/main/resources/db/migration/common/V1_052__AddDeploymentStatusDetails.sql delete mode 100644 designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentId.scala create mode 100644 designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/synchronize/DeploymentsStatusesSynchronizationScheduler.scala create mode 100644 designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/synchronize/DeploymentsStatusesSynchronizer.scala create mode 100644 extensions-api/src/main/scala/pl/touk/nussknacker/engine/newdeployment/DeploymentId.scala diff --git a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/DeploymentManager.scala b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/DeploymentManager.scala index caee8a5106c..69981c2c077 100644 --- a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/DeploymentManager.scala +++ b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/DeploymentManager.scala @@ -3,6 +3,7 @@ package pl.touk.nussknacker.engine.api.deployment import pl.touk.nussknacker.engine.api.deployment.inconsistency.InconsistentStateDetector import pl.touk.nussknacker.engine.api.process.{ProcessIdWithName, ProcessName} import pl.touk.nussknacker.engine.deployment.CustomActionDefinition +import pl.touk.nussknacker.engine.newdeployment import scala.concurrent.ExecutionContext.Implicits._ import scala.concurrent.Future @@ -32,6 +33,8 @@ trait DeploymentManagerInconsistentStateHandlerMixIn { trait DeploymentManager extends AutoCloseable { + def deploymentSynchronisationSupport: DeploymentSynchronisationSupport + def processCommand[Result](command: DMScenarioCommand[Result]): Future[Result] final def getProcessState(idWithName: ProcessIdWithName, lastStateAction: Option[ProcessAction])( @@ -71,3 +74,13 @@ trait DeploymentManager extends AutoCloseable { Future.failed(new NotImplementedError()) } + +sealed trait DeploymentSynchronisationSupport + +trait DeploymentSynchronisationSupported extends DeploymentSynchronisationSupport { + + def getDeploymentStatusesToUpdate: Future[Map[newdeployment.DeploymentId, DeploymentStatus]] + +} + +case object NoDeploymentSynchronisationSupport extends DeploymentSynchronisationSupport diff --git a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/DeploymentStatusName.scala b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/DeploymentStatusName.scala new file mode 100644 index 00000000000..efc78650ca9 --- /dev/null +++ b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/DeploymentStatusName.scala @@ -0,0 +1,37 @@ +package pl.touk.nussknacker.engine.api.deployment + +import io.circe.Codec +import io.circe.generic.extras.semiauto.deriveUnwrappedCodec + +sealed trait DeploymentStatus { + def name: DeploymentStatusName +} + +final case class NoAttributesDeploymentStatus(override val name: DeploymentStatusName) extends DeploymentStatus + +final case class ProblemDeploymentStatus(description: String) extends DeploymentStatus { + override def name: DeploymentStatusName = ProblemDeploymentStatus.name +} + +object ProblemDeploymentStatus { + def name: DeploymentStatusName = DeploymentStatusName("PROBLEM") + + def extractDescription(status: DeploymentStatus): Option[String] = + status match { + case ProblemDeploymentStatus(description) => + Some(description) + case _ => + None + } + +} + +final case class DeploymentStatusName(value: String) { + override def toString: String = value +} + +object DeploymentStatusName { + + implicit val codec: Codec[DeploymentStatusName] = deriveUnwrappedCodec[DeploymentStatusName] + +} diff --git a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/cache/CachingProcessStateDeploymentManager.scala b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/cache/CachingProcessStateDeploymentManager.scala index d0f317eb850..be4135f3cab 100644 --- a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/cache/CachingProcessStateDeploymentManager.scala +++ b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/cache/CachingProcessStateDeploymentManager.scala @@ -12,8 +12,11 @@ import scala.concurrent.ExecutionContext.Implicits._ import scala.concurrent.Future import scala.concurrent.duration._ -class CachingProcessStateDeploymentManager(delegate: DeploymentManager, cacheTTL: FiniteDuration) - extends DeploymentManager { +class CachingProcessStateDeploymentManager( + delegate: DeploymentManager, + cacheTTL: FiniteDuration, + override val deploymentSynchronisationSupport: DeploymentSynchronisationSupport +) extends DeploymentManager { private val cache: AsyncCache[ProcessName, List[StatusDetails]] = Caffeine .newBuilder() @@ -68,7 +71,7 @@ object CachingProcessStateDeploymentManager extends LazyLogging { scenarioStateCacheTTL .map { cacheTTL => logger.debug(s"Wrapping DeploymentManager: $delegate with caching mechanism with TTL: $cacheTTL") - new CachingProcessStateDeploymentManager(delegate, cacheTTL) + new CachingProcessStateDeploymentManager(delegate, cacheTTL, delegate.deploymentSynchronisationSupport) } .getOrElse { logger.debug(s"Skipping ProcessState caching for DeploymentManager: $delegate") diff --git a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/simple/SimpleDeploymentStatus.scala b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/simple/SimpleDeploymentStatus.scala new file mode 100644 index 00000000000..2fc8a26d064 --- /dev/null +++ b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/simple/SimpleDeploymentStatus.scala @@ -0,0 +1,27 @@ +package pl.touk.nussknacker.engine.api.deployment.simple + +import pl.touk.nussknacker.engine.api.deployment.{ + DeploymentStatus, + DeploymentStatusName, + NoAttributesDeploymentStatus, + ProblemDeploymentStatus +} + +object SimpleDeploymentStatus { + + object Problem { + + private val DefaultDescription = "There are some problems with deployment." + + val Failed: ProblemDeploymentStatus = ProblemDeploymentStatus(DefaultDescription) + + } + + val DuringDeploy: DeploymentStatus = NoAttributesDeploymentStatus(DeploymentStatusName("DURING_DEPLOY")) + val Running: DeploymentStatus = NoAttributesDeploymentStatus(DeploymentStatusName("RUNNING")) + val Finished: DeploymentStatus = NoAttributesDeploymentStatus(DeploymentStatusName("FINISHED")) + val Restarting: DeploymentStatus = NoAttributesDeploymentStatus(DeploymentStatusName("RESTARTING")) + val DuringCancel: DeploymentStatus = NoAttributesDeploymentStatus(DeploymentStatusName("DURING_CANCEL")) + val Canceled: DeploymentStatus = NoAttributesDeploymentStatus(DeploymentStatusName("CANCELED")) + +} diff --git a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/simple/SimpleStateStatus.scala b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/simple/SimpleStateStatus.scala index 21c142222d8..4a506d224d2 100644 --- a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/simple/SimpleStateStatus.scala +++ b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/simple/SimpleStateStatus.scala @@ -2,13 +2,34 @@ package pl.touk.nussknacker.engine.api.deployment.simple import pl.touk.nussknacker.engine.api.deployment.StateStatus.StatusName import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus.ProblemStateStatus.defaultActions -import pl.touk.nussknacker.engine.api.deployment.{ScenarioActionName, StateDefinitionDetails, StateStatus} +import pl.touk.nussknacker.engine.api.deployment.{ + DeploymentStatus, + NoAttributesDeploymentStatus, + NoAttributesStateStatus, + ProblemDeploymentStatus, + ScenarioActionName, + StateDefinitionDetails, + StateStatus +} import pl.touk.nussknacker.engine.api.process.VersionId import java.net.URI object SimpleStateStatus { + def fromDeploymentStatus(deploymentStatus: DeploymentStatus): StateStatus = { + deploymentStatus match { + case NoAttributesDeploymentStatus(name) => NoAttributesStateStatus(name.value) + // We assume that all deployment status have default allowedActions. Non-default allowedActions have only + // statuses that are not deployment statuses but scenario statuses. + case ProblemDeploymentStatus(description) => ProblemStateStatus(description) + case other => + throw new IllegalArgumentException( + s"Problem during conversion of deployment status to scenario status. Not expected deployment status: $other" + ) + } + } + // Represents general problem. final case class ProblemStateStatus(description: String, allowedActions: List[ScenarioActionName] = defaultActions) extends StateStatus { diff --git a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/testing/DeploymentManagerStub.scala b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/testing/DeploymentManagerStub.scala index ec2534b63d3..9f8b846dc8b 100644 --- a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/testing/DeploymentManagerStub.scala +++ b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/testing/DeploymentManagerStub.scala @@ -2,13 +2,14 @@ package pl.touk.nussknacker.engine.testing import cats.data.{Validated, ValidatedNel} import com.typesafe.config.Config -import pl.touk.nussknacker.engine.api.StreamMetaData import pl.touk.nussknacker.engine.api.component.ScenarioPropertyConfig import pl.touk.nussknacker.engine.api.definition._ import pl.touk.nussknacker.engine.api.deployment._ import pl.touk.nussknacker.engine.api.deployment.simple.{SimpleProcessStateDefinitionManager, SimpleStateStatus} +import pl.touk.nussknacker.engine.api.StreamMetaData import pl.touk.nussknacker.engine.api.process.{ProcessIdWithName, ProcessName} import pl.touk.nussknacker.engine.deployment.CustomActionDefinition +import pl.touk.nussknacker.engine.newdeployment import pl.touk.nussknacker.engine.{ BaseModelData, DeploymentManagerDependencies, @@ -50,6 +51,8 @@ class DeploymentManagerStub extends BaseDeploymentManager with StubbingCommands override def customActionsDefinitions: List[CustomActionDefinition] = Nil + override def deploymentSynchronisationSupport: DeploymentSynchronisationSupport = NoDeploymentSynchronisationSupport + override def close(): Unit = {} } diff --git a/designer/deployment-manager-api/src/test/scala/pl/touk/nussknacker/engine/api/deployment/cache/CachingProcessStateDeploymentManagerSpec.scala b/designer/deployment-manager-api/src/test/scala/pl/touk/nussknacker/engine/api/deployment/cache/CachingProcessStateDeploymentManagerSpec.scala index 8f2b12ee902..99633fddfc7 100644 --- a/designer/deployment-manager-api/src/test/scala/pl/touk/nussknacker/engine/api/deployment/cache/CachingProcessStateDeploymentManagerSpec.scala +++ b/designer/deployment-manager-api/src/test/scala/pl/touk/nussknacker/engine/api/deployment/cache/CachingProcessStateDeploymentManagerSpec.scala @@ -8,12 +8,7 @@ import org.scalatest.funsuite.AnyFunSuite import org.scalatest.matchers.should.Matchers import org.scalatestplus.mockito.MockitoSugar import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus -import pl.touk.nussknacker.engine.api.deployment.{ - DataFreshnessPolicy, - DeploymentManager, - StatusDetails, - WithDataFreshnessStatus -} +import pl.touk.nussknacker.engine.api.deployment._ import pl.touk.nussknacker.engine.api.process.ProcessName import pl.touk.nussknacker.engine.deployment.ExternalDeploymentId import pl.touk.nussknacker.test.PatientScalaFutures @@ -30,8 +25,9 @@ class CachingProcessStateDeploymentManagerSpec with OptionValues { test("should ask delegate for a fresh state each time") { - val delegate = prepareDMReturningRandomStates - val cachingManager = new CachingProcessStateDeploymentManager(delegate, 10 seconds) + val delegate = prepareDMReturningRandomStates + val cachingManager = + new CachingProcessStateDeploymentManager(delegate, 10 seconds, NoDeploymentSynchronisationSupport) val results = List( cachingManager.getProcessStatesDeploymentIdNow(DataFreshnessPolicy.Fresh), @@ -44,8 +40,9 @@ class CachingProcessStateDeploymentManagerSpec } test("should cache state for DataFreshnessPolicy.CanBeCached") { - val delegate = prepareDMReturningRandomStates - val cachingManager = new CachingProcessStateDeploymentManager(delegate, 10 seconds) + val delegate = prepareDMReturningRandomStates + val cachingManager = + new CachingProcessStateDeploymentManager(delegate, 10 seconds, NoDeploymentSynchronisationSupport) val firstInvocation = cachingManager.getProcessStatesDeploymentIdNow(DataFreshnessPolicy.CanBeCached) firstInvocation.cached shouldBe false @@ -57,8 +54,9 @@ class CachingProcessStateDeploymentManagerSpec } test("should reuse state updated by DataFreshnessPolicy.Fresh during reading with DataFreshnessPolicy.CanBeCached") { - val delegate = prepareDMReturningRandomStates - val cachingManager = new CachingProcessStateDeploymentManager(delegate, 10 seconds) + val delegate = prepareDMReturningRandomStates + val cachingManager = + new CachingProcessStateDeploymentManager(delegate, 10 seconds, NoDeploymentSynchronisationSupport) val resultForFresh = cachingManager.getProcessStatesDeploymentIdNow(DataFreshnessPolicy.Fresh) resultForFresh.cached shouldBe false diff --git a/designer/server/src/main/resources/db/migration/common/V1_052__AddDeploymentStatusDetails.sql b/designer/server/src/main/resources/db/migration/common/V1_052__AddDeploymentStatusDetails.sql new file mode 100644 index 00000000000..5e2f5fa9c39 --- /dev/null +++ b/designer/server/src/main/resources/db/migration/common/V1_052__AddDeploymentStatusDetails.sql @@ -0,0 +1,3 @@ +ALTER TABLE "deployments" ADD COLUMN "status_name" VARCHAR(255) NOT NULL; +ALTER TABLE "deployments" ADD COLUMN "status_problem_description" VARCHAR(1022); +ALTER TABLE "deployments" ADD COLUMN "status_modified_at" TIMESTAMP NOT NULL; diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpService.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpService.scala index d37b6cf4d9b..860ec1e55bb 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpService.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpService.scala @@ -1,5 +1,6 @@ package pl.touk.nussknacker.ui.api +import pl.touk.nussknacker.engine.api.deployment.ProblemDeploymentStatus import pl.touk.nussknacker.ui.api.description.DeploymentApiEndpoints import pl.touk.nussknacker.ui.api.description.DeploymentApiEndpoints.Dtos._ import pl.touk.nussknacker.ui.process.newactivity.ActivityService @@ -56,10 +57,18 @@ class DeploymentApiHttpService( { deploymentId => deploymentService .getDeploymentStatus(deploymentId) - .map(_.left.map { - case DeploymentService.DeploymentNotFoundError(id) => DeploymentNotFoundError(id) - case DeploymentService.NoPermissionError => NoPermissionError - }) + .map( + _.map { statusWithModifiedAt => + GetDeploymentStatusResponse( + statusWithModifiedAt.value.name, + ProblemDeploymentStatus.extractDescription(statusWithModifiedAt.value), + statusWithModifiedAt.modifiedAt.toInstant + ) + }.left.map { + case DeploymentService.DeploymentNotFoundError(id) => DeploymentNotFoundError(id) + case DeploymentService.NoPermissionError => NoPermissionError + } + ) } } } diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/description/DeploymentApiEndpoints.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/description/DeploymentApiEndpoints.scala index 2cd32798850..2b10e33fdc4 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/description/DeploymentApiEndpoints.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/description/DeploymentApiEndpoints.scala @@ -9,23 +9,26 @@ import pl.touk.nussknacker.engine.api.context.ProcessCompilationError.{ ExpressionParserCompilationError, MissingRequiredProperty } -import pl.touk.nussknacker.engine.api.deployment.StateStatus.StatusName +import pl.touk.nussknacker.engine.api.deployment.DeploymentStatusName +import pl.touk.nussknacker.engine.api.deployment.simple.SimpleDeploymentStatus import pl.touk.nussknacker.engine.api.parameter.ParameterName import pl.touk.nussknacker.engine.api.process.ProcessName +import pl.touk.nussknacker.engine.newdeployment.DeploymentId import pl.touk.nussknacker.restmodel.BaseEndpointDefinitions import pl.touk.nussknacker.restmodel.BaseEndpointDefinitions.SecuredEndpoint import pl.touk.nussknacker.restmodel.validation.PrettyValidationErrors -import pl.touk.nussknacker.restmodel.validation.ValidationResults.{NodeValidationError, UIGlobalError, ValidationErrors} +import pl.touk.nussknacker.restmodel.validation.ValidationResults.{UIGlobalError, ValidationErrors} import pl.touk.nussknacker.security.AuthCredentials import pl.touk.nussknacker.ui.api.BaseHttpService.CustomAuthorizationError -import pl.touk.nussknacker.ui.process.newdeployment.DeploymentId import pl.touk.nussknacker.ui.process.repository.ApiCallComment import sttp.model.StatusCode +import sttp.tapir.Codec.PlainCodec import sttp.tapir.EndpointIO.{Example, Info} import sttp.tapir._ import sttp.tapir.derevo.schema import sttp.tapir.json.circe.jsonBody +import java.time.{Instant, LocalDateTime, ZoneId, ZoneOffset} import java.util.UUID class DeploymentApiEndpoints(auth: EndpointInput[AuthCredentials]) extends BaseEndpointDefinitions { @@ -115,7 +118,8 @@ class DeploymentApiEndpoints(auth: EndpointInput[AuthCredentials]) extends BaseE ) .withSecurity(auth) - lazy val getDeploymentStatusEndpoint: SecuredEndpoint[DeploymentId, GetDeploymentStatusError, StatusName, Any] = + lazy val getDeploymentStatusEndpoint + : SecuredEndpoint[DeploymentId, GetDeploymentStatusError, GetDeploymentStatusResponse, Any] = baseNuApiEndpoint .summary("Get status of a deployment") .tag("Deployments") @@ -123,7 +127,26 @@ class DeploymentApiEndpoints(auth: EndpointInput[AuthCredentials]) extends BaseE .in( "deployments" / deploymentIdPathCapture / "status" ) - .out(statusCode(StatusCode.Ok).and(stringBody)) + .out( + statusCode(StatusCode.Ok).and( + jsonBody[GetDeploymentStatusResponse].examples( + List( + Example.of( + GetDeploymentStatusResponse(SimpleDeploymentStatus.Running.name, None, exampleInstant), + Some("RUNNING status") + ), + Example.of( + GetDeploymentStatusResponse( + SimpleDeploymentStatus.Problem.Failed.name, + Some(SimpleDeploymentStatus.Problem.Failed.description), + exampleInstant + ), + Some("PROBLEM status") + ) + ) + ) + ) + ) .errorOut( oneOf[GetDeploymentStatusError]( oneOfVariantValueMatcher[DeploymentNotFoundError]( @@ -145,6 +168,8 @@ class DeploymentApiEndpoints(auth: EndpointInput[AuthCredentials]) extends BaseE private lazy val exampleDeploymentId = DeploymentId(UUID.fromString("a9a1e269-0b71-4582-a948-603482d27298")) + private lazy val exampleInstant = LocalDateTime.of(2024, 1, 1, 0, 0, 0).atZone(ZoneOffset.UTC).toInstant + private lazy val deploymentIdPathCapture = path[DeploymentId]("deploymentId") .copy(info = Info @@ -163,6 +188,9 @@ object DeploymentApiEndpoints { implicit val scenarioNameSchema: Schema[ProcessName] = Schema.string[ProcessName] + implicit val deploymentIdCodec: PlainCodec[DeploymentId] = + Codec.uuid.map(DeploymentId(_))(_.value) + // TODO: scenario graph version / the currently active version instead of the latest @derive(encoder, decoder, schema) final case class RunDeploymentRequest( @@ -171,6 +199,15 @@ object DeploymentApiEndpoints { comment: Option[ApiCallComment] ) + implicit val deploymentStatusNameCodec: Schema[DeploymentStatusName] = Schema.string[DeploymentStatusName] + + @derive(encoder, decoder, schema) + final case class GetDeploymentStatusResponse( + name: DeploymentStatusName, + problemDescription: Option[String], + modifiedAt: Instant + ) + implicit val nodeDeploymentDataCodec: Schema[NodeDeploymentData] = Schema.string[SqlFilteringExpression].as implicit val nodesDeploymentDataCodec: Schema[NodesDeploymentData] = Schema diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/db/entity/BaseEntityFactory.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/db/entity/BaseEntityFactory.scala index e8a837a9fb4..68b7f22e920 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/db/entity/BaseEntityFactory.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/db/entity/BaseEntityFactory.scala @@ -1,8 +1,14 @@ package pl.touk.nussknacker.ui.db.entity import pl.touk.nussknacker.engine.api.deployment.ProcessActionState.ProcessActionState -import pl.touk.nussknacker.engine.api.deployment.{ProcessActionId, ProcessActionState, ScenarioActionName} +import pl.touk.nussknacker.engine.api.deployment.{ + DeploymentStatusName, + ProcessActionId, + ProcessActionState, + ScenarioActionName +} import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessName, VersionId} +import pl.touk.nussknacker.engine.newdeployment.DeploymentId import slick.ast.BaseTypedType import slick.jdbc.{JdbcProfile, JdbcType} @@ -24,10 +30,16 @@ trait BaseEntityFactory { implicit def processActionIdMapping: BaseColumnType[ProcessActionId] = MappedColumnType.base[ProcessActionId, UUID](_.value, ProcessActionId.apply) - implicit def scenarioActionName: JdbcType[ScenarioActionName] with BaseTypedType[ScenarioActionName] = + implicit def scenarioActionName: BaseColumnType[ScenarioActionName] = MappedColumnType.base[ScenarioActionName, String](_.toString, ScenarioActionName.apply) - implicit def processActionState: JdbcType[ProcessActionState] with BaseTypedType[ProcessActionState] = + implicit def processActionState: BaseColumnType[ProcessActionState] = MappedColumnType.base[ProcessActionState, String](_.toString, ProcessActionState.withName) + protected implicit def deploymentIdMapping: BaseColumnType[DeploymentId] = + MappedColumnType.base[DeploymentId, UUID](_.value, DeploymentId.apply) + + implicit def deploymentStatusName: BaseColumnType[DeploymentStatusName] = + MappedColumnType.base[DeploymentStatusName, String](_.value, DeploymentStatusName.apply) + } diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentCommand.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentCommand.scala index 19f77423c47..fa608d740a2 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentCommand.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentCommand.scala @@ -2,7 +2,7 @@ package pl.touk.nussknacker.ui.process.newdeployment import pl.touk.nussknacker.engine.api.component.NodesDeploymentData import pl.touk.nussknacker.engine.api.process.ProcessName -import pl.touk.nussknacker.ui.listener.Comment +import pl.touk.nussknacker.engine.newdeployment.DeploymentId import pl.touk.nussknacker.ui.security.api.LoggedUser sealed trait DeploymentCommand { diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentEntityFactory.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentEntityFactory.scala index 8abec7a6aa6..789b7646ca7 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentEntityFactory.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentEntityFactory.scala @@ -1,13 +1,19 @@ package pl.touk.nussknacker.ui.process.newdeployment +import pl.touk.nussknacker.engine.api.deployment.{ + DeploymentStatus, + DeploymentStatusName, + NoAttributesDeploymentStatus, + ProblemDeploymentStatus +} import pl.touk.nussknacker.engine.api.process.ProcessId +import pl.touk.nussknacker.engine.newdeployment.DeploymentId import pl.touk.nussknacker.ui.db.entity.{BaseEntityFactory, ProcessEntityData, ProcessEntityFactory} -import pl.touk.nussknacker.ui.process.newdeployment.DeploymentEntityFactory.DeploymentEntityData +import pl.touk.nussknacker.ui.process.newdeployment.DeploymentEntityFactory.{DeploymentEntityData, WithModifiedAt} import slick.lifted.{ForeignKeyQuery, ProvenShape, TableQuery => LTableQuery} import slick.sql.SqlProfile.ColumnOption.NotNull import java.sql.Timestamp -import java.util.UUID trait DeploymentEntityFactory extends BaseEntityFactory { self: ProcessEntityFactory => @@ -26,8 +32,22 @@ trait DeploymentEntityFactory extends BaseEntityFactory { self: ProcessEntityFac def createdBy: Rep[String] = column[String]("created_by", NotNull) + def statusName: Rep[DeploymentStatusName] = column[DeploymentStatusName]("status_name", NotNull) + + def statusProblemDescription: Rep[Option[String]] = column[Option[String]]("status_problem_description") + + def statusModifiedAt: Rep[Timestamp] = column[Timestamp]("status_modified_at", NotNull) + override def * : ProvenShape[DeploymentEntityData] = - (id, scenarioId, createdAt, createdBy) <> (DeploymentEntityData.apply _ tupled, DeploymentEntityData.unapply) + ( + id, + scenarioId, + createdAt, + createdBy, + statusName, + statusProblemDescription, + statusModifiedAt + ) <> (createEntity _ tupled, extractFieldsFromEntity) private def scenarios_fk: ForeignKeyQuery[ProcessEntityFactory#ProcessEntity, ProcessEntityData] = foreignKey("deployments_scenarios_fk", scenarioId, processesTable)( @@ -38,8 +58,37 @@ trait DeploymentEntityFactory extends BaseEntityFactory { self: ProcessEntityFac } - protected implicit def deploymentIdMapping: BaseColumnType[DeploymentId] = - MappedColumnType.base[DeploymentId, UUID](_.value, DeploymentId.apply) + private def createEntity( + id: DeploymentId, + scenarioId: ProcessId, + createdAt: Timestamp, + createdBy: String, + statusName: DeploymentStatusName, + statusProblemDescription: Option[String], + statusModifiedAt: Timestamp + ) = { + val status = if (statusName == ProblemDeploymentStatus.name) { + ProblemDeploymentStatus( + statusProblemDescription.getOrElse(throw new IllegalStateException("Problem status without description")) + ) + } else { + NoAttributesDeploymentStatus(statusName) + } + DeploymentEntityData(id, scenarioId, createdAt, createdBy, WithModifiedAt(status, statusModifiedAt)) + } + + private def extractFieldsFromEntity(entity: DeploymentEntityData) = { + val statusProblemDescription = ProblemDeploymentStatus.extractDescription(entity.statusWithModifiedAt.value) + Option( + entity.id, + entity.scenarioId, + entity.createdAt, + entity.createdBy, + entity.statusWithModifiedAt.value.name, + statusProblemDescription, + entity.statusWithModifiedAt.modifiedAt + ) + } } @@ -49,7 +98,10 @@ object DeploymentEntityFactory { id: DeploymentId, scenarioId: ProcessId, createdAt: Timestamp, - createdBy: String + createdBy: String, + statusWithModifiedAt: WithModifiedAt[DeploymentStatus] ) + case class WithModifiedAt[T](value: T, modifiedAt: Timestamp) + } diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentId.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentId.scala deleted file mode 100644 index 01b317fa731..00000000000 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentId.scala +++ /dev/null @@ -1,19 +0,0 @@ -package pl.touk.nussknacker.ui.process.newdeployment - -import sttp.tapir.Codec -import sttp.tapir.Codec.PlainCodec - -import java.util.UUID - -final case class DeploymentId(value: UUID) { - override def toString: String = value.toString -} - -object DeploymentId { - - def generate: DeploymentId = DeploymentId(UUID.randomUUID()) - - implicit val deploymentIdCodec: PlainCodec[DeploymentId] = - Codec.uuid.map(DeploymentId(_))(_.value) - -} diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentRepository.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentRepository.scala index 8ec8eaa3599..33a97f8dc33 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentRepository.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentRepository.scala @@ -1,7 +1,10 @@ package pl.touk.nussknacker.ui.process.newdeployment +import cats.implicits.{toFoldableOps, toTraverseOps} import db.util.DBIOActionInstances._ import org.postgresql.util.{PSQLException, PSQLState} +import pl.touk.nussknacker.engine.api.deployment.{DeploymentStatus, ProblemDeploymentStatus} +import pl.touk.nussknacker.engine.newdeployment.DeploymentId import pl.touk.nussknacker.ui.db.entity.ProcessEntityData import pl.touk.nussknacker.ui.db.{DbRef, NuTables} import pl.touk.nussknacker.ui.process.newdeployment.DeploymentEntityFactory.DeploymentEntityData @@ -11,10 +14,11 @@ import pl.touk.nussknacker.ui.process.newdeployment.DeploymentRepository.{ } import slick.jdbc.JdbcProfile -import java.sql.SQLIntegrityConstraintViolationException +import java.sql.{SQLIntegrityConstraintViolationException, Timestamp} +import java.time.Clock import scala.concurrent.ExecutionContext -class DeploymentRepository(dbRef: DbRef)(implicit ec: ExecutionContext) extends NuTables { +class DeploymentRepository(dbRef: DbRef, clock: Clock)(implicit ec: ExecutionContext) extends NuTables { override protected val profile: JdbcProfile = dbRef.profile @@ -48,6 +52,28 @@ class DeploymentRepository(dbRef: DbRef)(implicit ec: ExecutionContext) extends ) } + def updateDeploymentStatuses(statusesToUpdate: Map[DeploymentId, DeploymentStatus]): DB[Set[DeploymentId]] = { + statusesToUpdate.toList + .map { case (id, status) => + val problemDescription = ProblemDeploymentStatus.extractDescription(status) + toEffectAll( + deploymentsTable + .filter(d => + d.id === id && (d.statusName =!= status.name || d.statusProblemDescription =!= problemDescription) + ) + .map(d => (d.statusName, d.statusProblemDescription, d.statusModifiedAt)) + .update((status.name, problemDescription, Timestamp.from(clock.instant()))) + .map { result => + if (result > 0) Set(id) else Set.empty[DeploymentId] + } + ) + } + .sequence + .map(_.combineAll) + // For the performance reasons it is better to run all updates in the one session, transactionally should enforce it + .transactionally + } + } object DeploymentRepository { diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentService.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentService.scala index 632a0544445..8341dcb749c 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentService.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentService.scala @@ -4,24 +4,19 @@ import cats.Applicative import cats.data.EitherT import db.util.DBIOActionInstances._ import pl.touk.nussknacker.engine.api.component.NodesDeploymentData -import pl.touk.nussknacker.engine.api.deployment.StateStatus.StatusName -import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus -import pl.touk.nussknacker.engine.api.deployment.{ - DMRunDeploymentCommand, - DMValidateScenarioCommand, - DataFreshnessPolicy, - DeploymentUpdateStrategy -} +import pl.touk.nussknacker.engine.api.deployment._ +import pl.touk.nussknacker.engine.api.deployment.simple.SimpleDeploymentStatus import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessName, VersionId} import pl.touk.nussknacker.engine.api.{ProcessVersion => RuntimeVersionData} import pl.touk.nussknacker.engine.deployment.{DeploymentData, DeploymentId => LegacyDeploymentId, ExternalDeploymentId} +import pl.touk.nussknacker.engine.newdeployment.DeploymentId import pl.touk.nussknacker.restmodel.validation.ValidationResults.ValidationErrors import pl.touk.nussknacker.security.Permission import pl.touk.nussknacker.security.Permission.Permission import pl.touk.nussknacker.ui.db.entity.{ProcessEntityData, ProcessVersionEntityData} import pl.touk.nussknacker.ui.process.deployment.DeploymentManagerDispatcher import pl.touk.nussknacker.ui.process.deployment.LoggedUserConversions.LoggedUserOps -import pl.touk.nussknacker.ui.process.newdeployment.DeploymentEntityFactory.DeploymentEntityData +import pl.touk.nussknacker.ui.process.newdeployment.DeploymentEntityFactory.{DeploymentEntityData, WithModifiedAt} import pl.touk.nussknacker.ui.process.newdeployment.DeploymentService._ import pl.touk.nussknacker.ui.process.repository.{DBIOActionRunner, ScenarioMetadataRepository} import pl.touk.nussknacker.ui.process.version.ScenarioGraphVersionService @@ -52,18 +47,15 @@ class DeploymentService( def getDeploymentStatus( id: DeploymentId - )(implicit loggedUser: LoggedUser): Future[Either[GetDeploymentStatusError, StatusName]] = + )(implicit loggedUser: LoggedUser): Future[Either[GetDeploymentStatusError, WithModifiedAt[DeploymentStatus]]] = (for { deploymentWithScenarioMetadata <- getDeploymentById(id) - _ <- checkPermission[Future]( + _ <- checkPermission[Future, GetDeploymentStatusError]( user = loggedUser, category = deploymentWithScenarioMetadata.scenarioMetadata.processCategory, permission = Permission.Read ) - statusOpt <- getDeploymentStatusFromDeploymentManager(id, deploymentWithScenarioMetadata.scenarioMetadata) - // TODO: Distinguish between: during deploy status and finished but with job information removed from Flink/K8s side - status = statusOpt.getOrElse(SimpleStateStatus.DuringDeploy) - } yield status.name).value + } yield deploymentWithScenarioMetadata.deployment.statusWithModifiedAt).value def runDeployment(command: RunDeploymentCommand): DB[Either[RunDeploymentError, DeploymentForeignKeys]] = (for { @@ -90,12 +82,20 @@ class DeploymentService( private def saveDeployment( command: RunDeploymentCommand, scenarioMetadata: ProcessEntityData - ): EitherT[DB, RunDeploymentError, Unit] = + ): EitherT[DB, RunDeploymentError, Unit] = { + val now = Timestamp.from(clock.instant()) EitherT( deploymentRepository.saveDeployment( - DeploymentEntityData(command.id, scenarioMetadata.id, Timestamp.from(clock.instant()), command.user.id) + DeploymentEntityData( + command.id, + scenarioMetadata.id, + now, + command.user.id, + WithModifiedAt(SimpleDeploymentStatus.DuringDeploy, now) + ) ) ).leftMap(e => ConflictingDeploymentIdError(e.id)) + } private def validateUsingDeploymentManager( scenarioMetadata: ProcessEntityData, @@ -181,29 +181,18 @@ class DeploymentService( LegacyDeploymentId(id.toString) } - private def getDeploymentById(id: DeploymentId) = + private def getDeploymentById( + id: DeploymentId + ): EitherT[Future, GetDeploymentStatusError, DeploymentRepository.DeploymentWithScenarioMetadata] = EitherT.fromOptionF(dbioRunner.run(deploymentRepository.getDeploymentById(id)), DeploymentNotFoundError(id)) - private def checkPermission[F[_]: Applicative](user: LoggedUser, category: String, permission: Permission) = + private def checkPermission[F[_]: Applicative, Error >: NoPermissionError.type]( + user: LoggedUser, + category: String, + permission: Permission + ): EitherT[F, Error, Unit] = EitherT.cond[F](user.can(category, permission), (), NoPermissionError) - private def getDeploymentStatusFromDeploymentManager(deploymentId: DeploymentId, scenarioMetadata: ProcessEntityData)( - implicit loggedUser: LoggedUser - ) = { - implicit val freshnessPolicy: DataFreshnessPolicy = DataFreshnessPolicy.Fresh - EitherT.right[GetDeploymentStatusError]( - dmDispatcher - .deploymentManagerUnsafe(scenarioMetadata.processingType) - .getProcessStates(scenarioMetadata.name) - .map { result => - val legacyDeploymentId = toLegacyDeploymentId(deploymentId) - result.value - .find(_.deploymentId.contains(legacyDeploymentId)) - .map(_.status) - } - ) - } - } object DeploymentService { diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/synchronize/DeploymentsStatusesSynchronizationScheduler.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/synchronize/DeploymentsStatusesSynchronizationScheduler.scala new file mode 100644 index 00000000000..ae315e6ad65 --- /dev/null +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/synchronize/DeploymentsStatusesSynchronizationScheduler.scala @@ -0,0 +1,58 @@ +package pl.touk.nussknacker.ui.process.newdeployment.synchronize + +import akka.actor.{ActorSystem, Cancellable} +import com.typesafe.config.Config +import com.typesafe.scalalogging.LazyLogging +import net.ceedubs.ficus.Ficus._ +import net.ceedubs.ficus.readers.ArbitraryTypeReader._ + +import scala.concurrent.Await +import scala.concurrent.duration._ +import scala.util.Try + +// TODO: Properly handle HA setup: synchronizeAll() should be invoked only on one instance of designer in a time +class DeploymentsStatusesSynchronizationScheduler( + actorSystem: ActorSystem, + synchronizer: DeploymentsStatusesSynchronizer, + config: DeploymentsStatusesSynchronizationConfig +) extends AutoCloseable + with LazyLogging { + + @volatile private var scheduledJob: Option[Cancellable] = None + + import actorSystem.dispatcher + + def start(): Unit = { + scheduledJob = Some( + actorSystem.scheduler.scheduleAtFixedRate(0 seconds, config.delayBetweenSynchronizations) { () => + Try(Await.result(synchronizer.synchronizeAll(), config.synchronizationTimeout)).failed.foreach { ex => + logger.error( + s"Error while synchronizing deployments statuses. Synchronization will be retried in ${config.delayBetweenSynchronizations}", + ex + ) + } + } + ) + } + + override def close(): Unit = { + scheduledJob.map(_.cancel()) + } + +} + +final case class DeploymentsStatusesSynchronizationConfig( + delayBetweenSynchronizations: FiniteDuration = 1 second, + synchronizationTimeout: FiniteDuration = 10 seconds +) + +object DeploymentsStatusesSynchronizationConfig { + + val ConfigPath = "deploymentStatusesSynchronization" + + def parse(config: Config): DeploymentsStatusesSynchronizationConfig = + config + .getAs[DeploymentsStatusesSynchronizationConfig](ConfigPath) + .getOrElse(DeploymentsStatusesSynchronizationConfig()) + +} diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/synchronize/DeploymentsStatusesSynchronizer.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/synchronize/DeploymentsStatusesSynchronizer.scala new file mode 100644 index 00000000000..9d132ecb689 --- /dev/null +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/synchronize/DeploymentsStatusesSynchronizer.scala @@ -0,0 +1,65 @@ +package pl.touk.nussknacker.ui.process.newdeployment.synchronize + +import cats.implicits.toTraverseOps +import com.typesafe.scalalogging.LazyLogging +import pl.touk.nussknacker.engine.api.deployment.{ + DeploymentManager, + DeploymentSynchronisationSupport, + DeploymentSynchronisationSupported, + NoDeploymentSynchronisationSupport +} +import pl.touk.nussknacker.ui.process.newdeployment.DeploymentRepository +import pl.touk.nussknacker.ui.process.processingtype.ProcessingTypeDataProvider +import pl.touk.nussknacker.ui.process.repository.DBIOActionRunner +import pl.touk.nussknacker.ui.security.api.NussknackerInternalUser + +import scala.concurrent.{ExecutionContext, Future} + +class DeploymentsStatusesSynchronizer( + repository: DeploymentRepository, + synchronizationSupport: ProcessingTypeDataProvider[DeploymentSynchronisationSupport, _], + dbioActionRunner: DBIOActionRunner +)(implicit ec: ExecutionContext) + extends LazyLogging { + + def synchronizeAll(): Future[Unit] = { + synchronizationSupport + .all(NussknackerInternalUser.instance) + .toList + .map { case (processingType, manager) => + manager match { + case synchronisationSupported: DeploymentSynchronisationSupported => + logger.trace(s"Running synchronization of deployments statuses for processing type: $processingType") + for { + statusesByDeploymentId <- synchronisationSupported.getDeploymentStatusesToUpdate + updateResult <- dbioActionRunner.run(repository.updateDeploymentStatuses(statusesByDeploymentId)) + _ = { + Option(updateResult).filterNot(_.isEmpty) match { + case None => + logger.trace( + s"Synchronization of deployments statuses for processing type: $processingType finished. No deployment status was changed" + ) + case Some(changes) => + logger.debug( + changes.mkString( + s"Synchronization of deployments statuses for processing type: $processingType finished. Deployments ", + ", ", + " statuses were changed" + ) + ) + } + } + } yield () + case NoDeploymentSynchronisationSupport => + logger.trace( + s"Synchronization of deployments statuses for processing type: $processingType is not supported, skipping." + ) + Future.unit + } + + } + .sequence + .map(_ => ()) + } + +} diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/InvalidDeploymentManagerStub.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/InvalidDeploymentManagerStub.scala index e56bbdb5c16..501dd6ba3ca 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/InvalidDeploymentManagerStub.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/processingtype/InvalidDeploymentManagerStub.scala @@ -43,5 +43,7 @@ object InvalidDeploymentManagerStub extends DeploymentManager { override def customActionsDefinitions: List[CustomActionDefinition] = List.empty + override def deploymentSynchronisationSupport: DeploymentSynchronisationSupport = NoDeploymentSynchronisationSupport + override def close(): Unit = () } diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/server/AkkaHttpBasedRouteProvider.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/server/AkkaHttpBasedRouteProvider.scala index fca1ad94244..692a6b84749 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/server/AkkaHttpBasedRouteProvider.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/server/AkkaHttpBasedRouteProvider.scala @@ -63,6 +63,11 @@ import pl.touk.nussknacker.ui.process.deployment.{ import pl.touk.nussknacker.ui.process.fragment.{DefaultFragmentRepository, FragmentResolver} import pl.touk.nussknacker.ui.process.migrate.{HttpRemoteEnvironment, ProcessModelMigrator, TestModelMigrations} import pl.touk.nussknacker.ui.process.newactivity.ActivityService +import pl.touk.nussknacker.ui.process.newdeployment.synchronize.{ + DeploymentsStatusesSynchronizationConfig, + DeploymentsStatusesSynchronizationScheduler, + DeploymentsStatusesSynchronizer +} import pl.touk.nussknacker.ui.process.newdeployment.{DeploymentRepository, DeploymentService} import pl.touk.nussknacker.ui.process.processingtype.{ProcessingTypeData, ProcessingTypeDataReload} import pl.touk.nussknacker.ui.process.repository._ @@ -118,14 +123,34 @@ class AkkaHttpBasedRouteProvider( DefaultProcessingTypeDeployedScenariosProvider(dbRef, _), sttpBackend, ) + deploymentRepository = new DeploymentRepository(dbRef, Clock.systemDefaultZone()) + dbioRunner = DBIOActionRunner(dbRef) + deploymentsStatusesSynchronizer = new DeploymentsStatusesSynchronizer( + deploymentRepository, + processingTypeDataProvider.mapValues( + _.deploymentData.validDeploymentManagerOrStub.deploymentSynchronisationSupport + ), + dbioRunner + ) + _ <- Resource.fromAutoCloseable( + IO { + val scheduler = new DeploymentsStatusesSynchronizationScheduler( + system, + deploymentsStatusesSynchronizer, + DeploymentsStatusesSynchronizationConfig.parse(resolvedConfig) + ) + scheduler.start() + scheduler + } + ) } yield { val analyticsConfig = AnalyticsConfig(resolvedConfig) val migrations = processingTypeDataProvider.mapValues(_.designerModelData.modelData.migrations) val modelBuildInfo = processingTypeDataProvider.mapValues(_.designerModelData.modelData.buildInfo) - implicit val dbioRunner: DBIOActionRunner = DBIOActionRunner(dbRef) - val commentRepository = new CommentRepository(dbRef) + implicit val implicitDbioRunner: DBIOActionRunner = dbioRunner + val commentRepository = new CommentRepository(dbRef) val actionRepository = new DbProcessActionRepository(dbRef, commentRepository, modelBuildInfo) val processRepository = DBFetchingProcessRepository.create(dbRef, actionRepository) // TODO: get rid of Future based repositories - it is easier to use everywhere one implementation - DBIOAction based which allows transactions handling @@ -353,7 +378,6 @@ class AkkaHttpBasedRouteProvider( val scenarioGraphVersionRepository = new ScenarioGraphVersionRepository(dbRef) val scenarioGraphVersionService = new ScenarioGraphVersionService(scenarioGraphVersionRepository, processValidator, scenarioResolver) - val deploymentRepository = new DeploymentRepository(dbRef) val deploymentService = new DeploymentService( scenarioMetadataRepository, diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockDeploymentManager.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockDeploymentManager.scala index 7a555cc1cb6..3f4c5c3b0af 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockDeploymentManager.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockDeploymentManager.scala @@ -278,6 +278,8 @@ class MockDeploymentManager( Future.successful(()) } + override def deploymentSynchronisationSupport: DeploymentSynchronisationSupport = NoDeploymentSynchronisationSupport + } class MockManagerProvider(deploymentManager: DeploymentManager = new MockDeploymentManager()) diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/TestFactory.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/TestFactory.scala index 216051ba7aa..49e2c261f51 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/TestFactory.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/TestFactory.scala @@ -28,7 +28,6 @@ import pl.touk.nussknacker.ui.definition.ScenarioPropertiesConfigFinalizer import pl.touk.nussknacker.ui.process.NewProcessPreparer import pl.touk.nussknacker.ui.process.deployment.ScenarioResolver import pl.touk.nussknacker.ui.process.fragment.{DefaultFragmentRepository, FragmentResolver} -import pl.touk.nussknacker.ui.process.newdeployment.DeploymentRepository import pl.touk.nussknacker.ui.process.processingtype.{ ProcessingTypeDataProvider, ScenarioParametersService, @@ -36,7 +35,7 @@ import pl.touk.nussknacker.ui.process.processingtype.{ ValueWithRestriction } import pl.touk.nussknacker.ui.process.repository._ -import pl.touk.nussknacker.ui.process.version.{ScenarioGraphVersionRepository, ScenarioGraphVersionService} +import pl.touk.nussknacker.ui.process.version.ScenarioGraphVersionRepository import pl.touk.nussknacker.ui.security.api.{LoggedUser, RealLoggedUser} import pl.touk.nussknacker.ui.uiresolving.UIProcessResolver import pl.touk.nussknacker.ui.validation.UIProcessValidator @@ -177,10 +176,6 @@ object TestFactory { def newProcessActivityRepository(dbRef: DbRef) = new DbProcessActivityRepository(dbRef, newCommentRepository(dbRef)) - def newDeploymentRepository(dbRef: DbRef) = new DeploymentRepository(dbRef) - - def newScenarioRepository(dbRef: DbRef) = new ScenarioMetadataRepository(dbRef) - def asAdmin(route: RouteWithUser): Route = route.securedRouteWithErrorHandling(adminUser()) diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/BaseDeploymentApiHttpServiceBusinessSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/BaseDeploymentApiHttpServiceBusinessSpec.scala index 4023afa2f5c..ece8bae2fb9 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/BaseDeploymentApiHttpServiceBusinessSpec.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/BaseDeploymentApiHttpServiceBusinessSpec.scala @@ -5,24 +5,23 @@ import io.restassured.RestAssured.`given` import io.restassured.module.scala.RestAssuredSupport.AddThenToResponse import org.apache.commons.io.FileUtils import org.hamcrest.Matchers.{anyOf, equalTo} -import org.scalatest.Suite +import org.scalatest.{LoneElement, Suite} import org.scalatest.concurrent.Eventually -import org.scalatest.concurrent.PatienceConfiguration.Interval +import org.scalatest.concurrent.PatienceConfiguration.{Interval, Timeout} import org.scalatest.matchers.should.Matchers import org.scalatest.time.{Seconds, Span} -import org.scalatest.{LoneElement, Suite} import org.testcontainers.containers.BindMode -import pl.touk.nussknacker.engine.api.deployment.StateStatus +import pl.touk.nussknacker.engine.api.deployment.DeploymentStatusName import pl.touk.nussknacker.engine.build.ScenarioBuilder import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess import pl.touk.nussknacker.engine.flink.test.docker.FileSystemBind import pl.touk.nussknacker.engine.graph.expression.Expression +import pl.touk.nussknacker.engine.newdeployment.DeploymentId import pl.touk.nussknacker.test.base.it.NuItTest import pl.touk.nussknacker.test.config.{ WithBusinessCaseRestAssuredUsersExtensions, WithFlinkContainersDeploymentManager } -import pl.touk.nussknacker.ui.process.newdeployment.DeploymentId import java.io.File import java.nio.file.attribute.PosixFilePermissions @@ -126,19 +125,18 @@ trait BaseDeploymentApiHttpServiceBusinessSpec extends WithFlinkContainersDeploy super.afterAll() } - protected def waitForDeploymentStatusMatches( + protected def waitForDeploymentStatusNameMatches( requestedDeploymentId: DeploymentId, - expectedStatus: StateStatus + expectedStatusName: DeploymentStatusName ): Unit = { - // A little bit longer interval than default to avoid too many log entries of requests - eventually(Interval(Span(2, Seconds))) { - checkDeploymentStatusMatches(requestedDeploymentId, expectedStatus) + eventually(Timeout(Span(120, Seconds)), Interval(Span(3, Seconds))) { + checkDeploymentStatusNameMatches(requestedDeploymentId, expectedStatusName) } } - protected def checkDeploymentStatusMatches( + protected def checkDeploymentStatusNameMatches( requestedDeploymentId: DeploymentId, - expectedStatuses: StateStatus* + expectedStatusNames: DeploymentStatusName* ): Unit = { given() .when() @@ -146,7 +144,7 @@ trait BaseDeploymentApiHttpServiceBusinessSpec extends WithFlinkContainersDeploy .get(s"$nuDesignerHttpAddress/api/deployments/$requestedDeploymentId/status") .Then() .statusCode(200) - .body(anyOf(expectedStatuses.map(status => equalTo[String](status.name)): _*)) + .body("name", anyOf(expectedStatusNames.map(statusName => equalTo[String](statusName.value)): _*)) } protected def getLoneFileFromLoneOutputTransactionsSummaryPartitionWithGivenName( diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpServiceBusinessSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpServiceBusinessSpec.scala index 2b1ef317af1..4e39ac55d02 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpServiceBusinessSpec.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpServiceBusinessSpec.scala @@ -7,7 +7,8 @@ import org.apache.commons.io.FileUtils import org.scalatest.LoneElement import org.scalatest.freespec.AnyFreeSpecLike import org.scalatest.matchers.should.Matchers -import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus +import pl.touk.nussknacker.engine.api.deployment.simple.SimpleDeploymentStatus +import pl.touk.nussknacker.engine.newdeployment.DeploymentId import pl.touk.nussknacker.test.base.it.{NuItTest, WithBatchConfigScenarioHelper} import pl.touk.nussknacker.test.config.{WithBatchDesignerConfig, WithBusinessCaseRestAssuredUsersExtensions} import pl.touk.nussknacker.test.{ @@ -15,7 +16,6 @@ import pl.touk.nussknacker.test.{ RestAssuredVerboseLoggingIfValidationFails, VeryPatientScalaFutures } -import pl.touk.nussknacker.ui.process.newdeployment.DeploymentId import java.nio.charset.StandardCharsets import java.nio.file.Path @@ -81,7 +81,7 @@ class DeploymentApiHttpServiceBusinessSpec .Then() .statusCode(202) .verifyApplicationState { - waitForDeploymentStatusMatches(requestedDeploymentId, SimpleStateStatus.Finished) + waitForDeploymentStatusNameMatches(requestedDeploymentId, SimpleDeploymentStatus.Finished.name) } .verifyExternalState { val resultFile = getLoneFileFromLoneOutputTransactionsSummaryPartitionWithGivenName("date=2024-01-01") @@ -119,7 +119,7 @@ class DeploymentApiHttpServiceBusinessSpec .applicationState { createSavedScenario(scenario) runDeployment(firstDeploymentId) - waitForDeploymentStatusMatches(firstDeploymentId, SimpleStateStatus.Finished) + waitForDeploymentStatusNameMatches(firstDeploymentId, SimpleDeploymentStatus.Finished.name) } .when() .basicAuthAdmin() @@ -128,13 +128,13 @@ class DeploymentApiHttpServiceBusinessSpec .Then() .statusCode(202) .verifyApplicationState { - checkDeploymentStatusMatches( + checkDeploymentStatusNameMatches( secondDeploymentId, - SimpleStateStatus.DuringDeploy, - SimpleStateStatus.Running, - SimpleStateStatus.Finished + SimpleDeploymentStatus.DuringDeploy.name, + SimpleDeploymentStatus.Running.name, + SimpleDeploymentStatus.Finished.name ) - checkDeploymentStatusMatches(firstDeploymentId, SimpleStateStatus.Finished) + checkDeploymentStatusNameMatches(firstDeploymentId, SimpleDeploymentStatus.Finished.name) } } } diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpServiceDeploymentCommentSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpServiceDeploymentCommentSpec.scala index 3a5c9412dde..80660b5fad1 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpServiceDeploymentCommentSpec.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpServiceDeploymentCommentSpec.scala @@ -8,7 +8,8 @@ import org.apache.commons.io.FileUtils import org.scalatest.LoneElement import org.scalatest.freespec.AnyFreeSpecLike import org.scalatest.matchers.should.Matchers -import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus +import pl.touk.nussknacker.engine.api.deployment.simple.SimpleDeploymentStatus +import pl.touk.nussknacker.engine.newdeployment.DeploymentId import pl.touk.nussknacker.test.base.it.{NuItTest, WithBatchConfigScenarioHelper} import pl.touk.nussknacker.test.config.{WithBatchDesignerConfig, WithBusinessCaseRestAssuredUsersExtensions} import pl.touk.nussknacker.test.{ @@ -16,7 +17,6 @@ import pl.touk.nussknacker.test.{ RestAssuredVerboseLoggingIfValidationFails, VeryPatientScalaFutures } -import pl.touk.nussknacker.ui.process.newdeployment.DeploymentId import java.nio.charset.StandardCharsets import java.nio.file.Path @@ -129,7 +129,7 @@ class DeploymentApiHttpServiceDeploymentCommentSpec .Then() .statusCode(202) .verifyApplicationState { - waitForDeploymentStatusMatches(requestedDeploymentId, SimpleStateStatus.Finished) + waitForDeploymentStatusNameMatches(requestedDeploymentId, SimpleDeploymentStatus.Finished.name) } .verifyExternalState { val resultFile = getLoneFileFromLoneOutputTransactionsSummaryPartitionWithGivenName("date=2024-01-01") diff --git a/docs-internal/api/nu-designer-openapi.yaml b/docs-internal/api/nu-designer-openapi.yaml index 12480d4c97a..f0b65e9604a 100644 --- a/docs-internal/api/nu-designer-openapi.yaml +++ b/docs-internal/api/nu-designer-openapi.yaml @@ -785,9 +785,19 @@ paths: '200': description: '' content: - text/plain: + application/json: schema: - type: string + $ref: '#/components/schemas/GetDeploymentStatusResponse' + examples: + RUNNING status: + value: + name: RUNNING + modifiedAt: '2024-01-01T00:00:00Z' + PROBLEM status: + value: + name: PROBLEM + problemDescription: There are some problems with deployment. + modifiedAt: '2024-01-01T00:00:00Z' '400': description: 'Invalid value for: header Nu-Impersonate-User-Identity, Invalid value for: path parameter deploymentId' @@ -3437,6 +3447,22 @@ components: type: string nodeId: type: string + GetDeploymentStatusResponse: + title: GetDeploymentStatusResponse + type: object + required: + - name + - modifiedAt + properties: + name: + type: string + problemDescription: + type: + - string + - 'null' + modifiedAt: + type: string + format: date-time HealthCheckProcessErrorResponseDto: title: HealthCheckProcessErrorResponseDto type: object diff --git a/docs/Changelog.md b/docs/Changelog.md index be6077e8fc1..92e3abc6c4c 100644 --- a/docs/Changelog.md +++ b/docs/Changelog.md @@ -5,8 +5,10 @@ * [#6053](https://github.com/TouK/nussknacker/pull/6053) Added impersonation mechanism support in Nu API for BasicAuth security module. * [#6008](https://github.com/TouK/nussknacker/pull/6008) Add embedded QuestDB as database for FE statistics. -* [#5982](https://github.com/TouK/nussknacker/pull/5982) Batch processing mode related improvements: +* [#5982](https://github.com/TouK/nussknacker/pull/5982) [#6155](https://github.com/TouK/nussknacker/pull/6155) Batch processing mode related improvements: * Deployments API returns correct status of deployment instead of returning always the last deployment's status + * Deployments API returns more information about status of a deployment: problem description and status modification time + * Status of a deployment is cached on the Designer side - in case of retention of finished job on Flink, status is still returned as FINISHED * [#6121](https://github.com/TouK/nussknacker/pull/6121) Add functionality to reorder columns within the table editor. * [#6136](https://github.com/TouK/nussknacker/pull/6136) Add possibility to configure kafka exactly-once delivery for flink. diff --git a/docs/MigrationGuide.md b/docs/MigrationGuide.md index 83e717953f4..0ea791c33c2 100644 --- a/docs/MigrationGuide.md +++ b/docs/MigrationGuide.md @@ -30,12 +30,14 @@ To see the biggest differences please consult the [changelog](Changelog.md). It does not represent anonymous access to the designer anymore but simply represents passing no credentials. * `AuthenticationConfiguration` has one additional Boolean property `isAdminImpersonationPossible` which defines whether admin users can be impersonated by users with the `Impersonate` permission. The property is set to `false` by default for `BasicAuthenticationConfiguration`, `OAuth2Configuration` and `DummyAuthenticationConfiguration`. -* [#6087](https://github.com/TouK/nussknacker/pull/6087) `DeploymentManager` API changes: +* [#6087](https://github.com/TouK/nussknacker/pull/6087) [#6155](https://github.com/TouK/nussknacker/pull/6155) `DeploymentManager` API changes: * `DMRunDeploymentCommand.savepointPath` was replaced by `updateStrategy: DeploymentUpdateStrategy` * In places where `savepointPath = None` was passed, the `DeploymentUpdateStrategy.ReplaceDeploymentWithSameScenarioName(StateRestoringStrategy.RestoreStateFromReplacedJobSavepoint)` should be passed * In places where `savepointPath = Some(path)` was passed, the `DeploymentUpdateStrategy.ReplaceDeploymentWithSameScenarioName(StateRestoringStrategy.RestoreStateFromCustomSavepoint(path))` should be passed * `DMValidateScenarioCommand.updateStrategy` was added * In every place should the `DeploymentUpdateStrategy.ReplaceDeploymentWithSameScenarioName(StateRestoringStrategy.RestoreStateFromReplacedJobSavepoint)` should be passed + * `deploymentSynchronisationSupport` field was added for purpose of synchronisation of statuses. If synchronisation mechanism is not used in context of given DM, + you should return `NoDeploymentSynchronisationSupport` object. The synchronisation mechanism is used by `/api/deployments/{deploymentId}/status` endpoint. Other endpoints don't use it. ### Configuration changes diff --git a/engine/development/deploymentManager/src/main/scala/pl/touk/nussknacker/development/manager/DevelopmentDeploymentManagerProvider.scala b/engine/development/deploymentManager/src/main/scala/pl/touk/nussknacker/development/manager/DevelopmentDeploymentManagerProvider.scala index e9a58c5277f..c3137cd660a 100644 --- a/engine/development/deploymentManager/src/main/scala/pl/touk/nussknacker/development/manager/DevelopmentDeploymentManagerProvider.scala +++ b/engine/development/deploymentManager/src/main/scala/pl/touk/nussknacker/development/manager/DevelopmentDeploymentManagerProvider.scala @@ -5,22 +5,21 @@ import cats.data.{Validated, ValidatedNel} import com.typesafe.config.Config import com.typesafe.scalalogging.LazyLogging import pl.touk.nussknacker.development.manager.DevelopmentStateStatus._ +import pl.touk.nussknacker.engine._ import pl.touk.nussknacker.engine.api.ProcessVersion import pl.touk.nussknacker.engine.api.component.ScenarioPropertyConfig +import pl.touk.nussknacker.engine.api.definition.{ + DateParameterEditor, + LiteralIntegerValidator, + MandatoryParameterValidator, + StringParameterEditor +} import pl.touk.nussknacker.engine.api.deployment._ import pl.touk.nussknacker.engine.api.deployment.simple.{SimpleProcessStateDefinitionManager, SimpleStateStatus} import pl.touk.nussknacker.engine.api.process.ProcessName import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess import pl.touk.nussknacker.engine.deployment._ import pl.touk.nussknacker.engine.management.{FlinkProcessTestRunner, FlinkStreamingPropertiesConfig} -import pl.touk.nussknacker.engine._ -import pl.touk.nussknacker.engine.api.definition.{ - DateParameterEditor, - LiteralIntegerValidator, - MandatoryParameterValidator, - StringParameterEditor, - TextareaParameterEditor -} import java.net.URI import java.util.UUID @@ -36,8 +35,8 @@ class DevelopmentDeploymentManager(actorSystem: ActorSystem, modelData: BaseMode with LazyLogging with DeploymentManagerInconsistentStateHandlerMixIn { - import pl.touk.nussknacker.engine.ModelData._ import SimpleStateStatus._ + import pl.touk.nussknacker.engine.ModelData._ // Use these "magic" description values to simulate deployment/validation failure private val descriptionForValidationFail = "validateFail" @@ -222,6 +221,8 @@ class DevelopmentDeploymentManager(actorSystem: ActorSystem, modelData: BaseMode TimeUnit.SECONDS ) + override def deploymentSynchronisationSupport: DeploymentSynchronisationSupport = NoDeploymentSynchronisationSupport + } class DevelopmentDeploymentManagerProvider extends DeploymentManagerProvider { diff --git a/engine/development/deploymentManager/src/main/scala/pl/touk/nussknacker/development/manager/MockableDeploymentManagerProvider.scala b/engine/development/deploymentManager/src/main/scala/pl/touk/nussknacker/development/manager/MockableDeploymentManagerProvider.scala index c86836100e4..a0e14ef1dfd 100644 --- a/engine/development/deploymentManager/src/main/scala/pl/touk/nussknacker/development/manager/MockableDeploymentManagerProvider.scala +++ b/engine/development/deploymentManager/src/main/scala/pl/touk/nussknacker/development/manager/MockableDeploymentManagerProvider.scala @@ -13,13 +13,7 @@ import pl.touk.nussknacker.engine.deployment.{CustomActionDefinition, CustomActi import pl.touk.nussknacker.engine.management.FlinkStreamingPropertiesConfig import pl.touk.nussknacker.engine.testing.StubbingCommands import pl.touk.nussknacker.engine.testmode.TestProcess.TestResults -import pl.touk.nussknacker.engine.{ - BaseModelData, - DeploymentManagerDependencies, - DeploymentManagerProvider, - MetaDataInitializer, - deployment -} +import pl.touk.nussknacker.engine._ import java.util.concurrent.atomic.AtomicReference import scala.concurrent.Future @@ -128,6 +122,8 @@ object MockableDeploymentManagerProvider { } } + override def deploymentSynchronisationSupport: DeploymentSynchronisationSupport = NoDeploymentSynchronisationSupport + override def close(): Unit = {} } diff --git a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicDeploymentManager.scala b/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicDeploymentManager.scala index d2647e4f04b..0f4040c4174 100644 --- a/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicDeploymentManager.scala +++ b/engine/flink/management/periodic/src/main/scala/pl/touk/nussknacker/engine/management/periodic/PeriodicDeploymentManager.scala @@ -16,7 +16,7 @@ import pl.touk.nussknacker.engine.management.periodic.service.{ PeriodicProcessListenerFactory, ProcessConfigEnricherFactory } -import pl.touk.nussknacker.engine.{BaseModelData, DeploymentManagerDependencies} +import pl.touk.nussknacker.engine.{BaseModelData, DeploymentManagerDependencies, newdeployment} import slick.jdbc import slick.jdbc.JdbcProfile @@ -210,4 +210,9 @@ class PeriodicDeploymentManager private[periodic] ( override def customActionsDefinitions: List[CustomActionDefinition] = customActionsProvider.customActions + // TODO We don't handle deployment synchronization on periodic DM because it currently uses it's own deployments and + // its statuses synchronization mechanism (see PeriodicProcessService.synchronizeDeploymentsStates) + // We should move periodic mechanism to the core and reuse new synchronization mechanism also in this case. + override def deploymentSynchronisationSupport: DeploymentSynchronisationSupport = NoDeploymentSynchronisationSupport + } diff --git a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/DeploymentManagerStub.scala b/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/DeploymentManagerStub.scala index b37c5a7e1ee..840b82342dc 100644 --- a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/DeploymentManagerStub.scala +++ b/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/DeploymentManagerStub.scala @@ -44,4 +44,6 @@ class DeploymentManagerStub extends BaseDeploymentManager with StubbingCommands Future.successful(WithDataFreshnessStatus.fresh(jobStatus.toList)) } + override def deploymentSynchronisationSupport: DeploymentSynchronisationSupport = NoDeploymentSynchronisationSupport + } diff --git a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/FlinkClientStub.scala b/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/FlinkClientStub.scala index f4aa97aa9e2..824fd004417 100644 --- a/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/FlinkClientStub.scala +++ b/engine/flink/management/periodic/src/test/scala/pl/touk/nussknacker/engine/management/periodic/FlinkClientStub.scala @@ -10,7 +10,7 @@ import scala.concurrent.Future class FlinkClientStub extends FlinkClient { - override def findJobsByName(jobName: String)( + override def getJobsOverviews()( implicit freshnessPolicy: DataFreshnessPolicy ): Future[WithDataFreshnessStatus[List[flinkRestModel.JobOverview]]] = ??? diff --git a/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkRestManager.scala b/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkRestManager.scala index 15b1bccb706..1f6aa68ab4f 100644 --- a/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkRestManager.scala +++ b/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkRestManager.scala @@ -1,18 +1,18 @@ package pl.touk.nussknacker.engine.management +import cats.implicits.toTraverseOps import com.typesafe.scalalogging.LazyLogging import org.apache.flink.api.common.JobStatus import pl.touk.nussknacker.engine.api.ProcessVersion import pl.touk.nussknacker.engine.api.deployment._ -import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus -import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus.ProblemStateStatus +import pl.touk.nussknacker.engine.api.deployment.simple.{SimpleDeploymentStatus, SimpleStateStatus} import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessName, VersionId} import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess -import pl.touk.nussknacker.engine.deployment.{DeploymentId, ExternalDeploymentId, User} +import pl.touk.nussknacker.engine.deployment.{DeploymentId, ExternalDeploymentId} import pl.touk.nussknacker.engine.management.FlinkRestManager.JobDetails import pl.touk.nussknacker.engine.management.rest.FlinkClient import pl.touk.nussknacker.engine.management.rest.flinkRestModel.JobOverview -import pl.touk.nussknacker.engine.{BaseModelData, DeploymentManagerDependencies} +import pl.touk.nussknacker.engine.{BaseModelData, DeploymentManagerDependencies, newdeployment} import scala.concurrent.Future @@ -37,11 +37,12 @@ class FlinkRestManager( val preparedName = modelData.namingStrategy.prepareName(name.value) client - .findJobsByName(preparedName) + .getJobsOverviews() .flatMap(result => Future .sequence( result.value + .filter(_.name == preparedName) .map(job => withJobDetails(job.jid, name).map { jobDetails => // TODO: return error when there's no correct version in process @@ -51,7 +52,7 @@ class FlinkRestManager( logger.debug(s"No correct job details in deployed scenario: ${job.name}") } StatusDetails( - mapJobStatus(job), + SimpleStateStatus.fromDeploymentStatus(mapJobStatus(job)), jobDetails.flatMap(_.deploymentId), Some(ExternalDeploymentId(job.jid)), version = jobDetails.map(_.version), @@ -66,28 +67,50 @@ class FlinkRestManager( ) } - private def toJobStatus(overview: JobOverview): JobStatus = { - import org.apache.flink.api.common.JobStatus - JobStatus.valueOf(overview.state) - } + override val deploymentSynchronisationSupport: DeploymentSynchronisationSupport = + new DeploymentSynchronisationSupported { + + override def getDeploymentStatusesToUpdate: Future[Map[newdeployment.DeploymentId, DeploymentStatus]] = { + client.getJobsOverviews()(DataFreshnessPolicy.Fresh).map(_.value).flatMap { jobsOverviews => + jobsOverviews + .map { jobOverview => + val status = mapJobStatus(jobOverview) + client.getJobConfig(jobOverview.jid).map { jobConfig => + jobConfig.`user-config` + .get("deploymentId") + .flatMap(_.asString) + .flatMap(newdeployment.DeploymentId.fromString) + .map(_ -> status) + } + } + .sequence + .map(_.flatten.toMap) + } + } + + } // NOTE: Flink <1.10 compatibility - protected to make it easier to work with Flink 1.9, JobStatus changed package, so we use String in case class - protected def mapJobStatus(overview: JobOverview): StateStatus = { + protected def mapJobStatus(overview: JobOverview): DeploymentStatus = { toJobStatus(overview) match { - case JobStatus.RUNNING if ensureTasksRunning(overview) => SimpleStateStatus.Running - case s if checkDuringDeployForNotRunningJob(s) => SimpleStateStatus.DuringDeploy - case JobStatus.FINISHED => SimpleStateStatus.Finished - case JobStatus.RESTARTING => SimpleStateStatus.Restarting - case JobStatus.CANCELED => SimpleStateStatus.Canceled - case JobStatus.CANCELLING => SimpleStateStatus.DuringCancel + case JobStatus.RUNNING if ensureTasksRunning(overview) => SimpleDeploymentStatus.Running + case s if checkDuringDeployForNotRunningJob(s) => SimpleDeploymentStatus.DuringDeploy + case JobStatus.FINISHED => SimpleDeploymentStatus.Finished + case JobStatus.RESTARTING => SimpleDeploymentStatus.Restarting + case JobStatus.CANCELED => SimpleDeploymentStatus.Canceled + case JobStatus.CANCELLING => SimpleDeploymentStatus.DuringCancel // The job is not technically running, but should be in a moment - case JobStatus.RECONCILING | JobStatus.CREATED | JobStatus.SUSPENDED => SimpleStateStatus.Running - case JobStatus.FAILING => ProblemStateStatus.Failed // redeploy allowed, handle with restartStrategy - case JobStatus.FAILED => ProblemStateStatus.Failed // redeploy allowed, handle with restartStrategy + case JobStatus.RECONCILING | JobStatus.CREATED | JobStatus.SUSPENDED => SimpleDeploymentStatus.Running + case JobStatus.FAILING | JobStatus.FAILED => + SimpleDeploymentStatus.Problem.Failed // redeploy allowed, handle with restartStrategy case _ => throw new IllegalStateException() // TODO: drop support for Flink 1.11 & inline `checkDuringDeployForNotRunningJob` so we could benefit from pattern matching exhaustive check } + } + private def toJobStatus(overview: JobOverview): JobStatus = { + import org.apache.flink.api.common.JobStatus + JobStatus.valueOf(overview.state) } protected def ensureTasksRunning(overview: JobOverview): Boolean = { diff --git a/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/rest/CachedFlinkClient.scala b/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/rest/CachedFlinkClient.scala index 36b669ac555..52496950d1d 100644 --- a/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/rest/CachedFlinkClient.scala +++ b/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/rest/CachedFlinkClient.scala @@ -16,6 +16,10 @@ import scala.concurrent.duration.FiniteDuration class CachedFlinkClient(delegate: FlinkClient, jobsOverviewCacheTTL: FiniteDuration, jobsConfigCacheSize: Int) extends FlinkClient { + // In scala 2.12, Unit is not an AnyRef, so it is impossible to use it with buildAsync. + // TODO: switch to Unit after migration to >= 2.13 only scala version(s) + private val jobsOverviewCacheSingleKey = "" + private val jobsOverviewCache: AsyncCache[String, List[JobOverview]] = Caffeine .newBuilder() @@ -31,20 +35,23 @@ class CachedFlinkClient(delegate: FlinkClient, jobsOverviewCacheTTL: FiniteDurat override def deleteJarIfExists(jarFileName: String): Future[Unit] = delegate.deleteJarIfExists(jarFileName) - override def findJobsByName( - jobName: String - )(implicit freshnessPolicy: DataFreshnessPolicy): Future[WithDataFreshnessStatus[List[JobOverview]]] = + override def getJobsOverviews()( + implicit freshnessPolicy: DataFreshnessPolicy + ): Future[WithDataFreshnessStatus[List[JobOverview]]] = freshnessPolicy match { case Fresh => - val resultFuture = delegate.findJobsByName(jobName) - jobsOverviewCache.put(jobName, resultFuture.map(_.value).toJava.toCompletableFuture) + val resultFuture = delegate.getJobsOverviews() + jobsOverviewCache.put(jobsOverviewCacheSingleKey, resultFuture.map(_.value).toJava.toCompletableFuture) resultFuture case CanBeCached => - Option(jobsOverviewCache.getIfPresent(jobName)) + Option(jobsOverviewCache.getIfPresent(jobsOverviewCacheSingleKey)) .map(_.toScala.map(WithDataFreshnessStatus.cached)) .getOrElse( jobsOverviewCache - .get(jobName, (_, _) => delegate.findJobsByName(jobName).map(_.value).toJava.toCompletableFuture) + .get( + jobsOverviewCacheSingleKey, + (_, _) => delegate.getJobsOverviews().map(_.value).toJava.toCompletableFuture + ) .toScala .map(WithDataFreshnessStatus.fresh) ) diff --git a/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/rest/FlinkClient.scala b/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/rest/FlinkClient.scala index 861caba943a..8b39faeb80a 100644 --- a/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/rest/FlinkClient.scala +++ b/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/rest/FlinkClient.scala @@ -17,7 +17,7 @@ trait FlinkClient { def deleteJarIfExists(jarFileName: String): Future[Unit] - def findJobsByName(jobName: String)( + def getJobsOverviews()( implicit freshnessPolicy: DataFreshnessPolicy ): Future[WithDataFreshnessStatus[List[JobOverview]]] diff --git a/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/rest/HttpFlinkClient.scala b/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/rest/HttpFlinkClient.scala index 54d5b094132..dcdc84aa12a 100644 --- a/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/rest/HttpFlinkClient.scala +++ b/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/rest/HttpFlinkClient.scala @@ -80,10 +80,10 @@ class HttpFlinkClient(config: FlinkConfig, flinkUrl: Uri)( .recoverWith(recoverWithMessage("delete jar")) } - def findJobsByName( - jobName: String - )(implicit freshnessPolicy: DataFreshnessPolicy): Future[WithDataFreshnessStatus[List[JobOverview]]] = { - logger.trace(s"Checking fetching scenario $jobName state") + override def getJobsOverviews()( + implicit freshnessPolicy: DataFreshnessPolicy + ): Future[WithDataFreshnessStatus[List[JobOverview]]] = { + logger.trace(s"Fetching jobs overview") basicRequest .readTimeout(config.scenarioStateRequestTimeout) .get(flinkUrl.addPath("jobs", "overview")) @@ -92,7 +92,6 @@ class HttpFlinkClient(config: FlinkConfig, flinkUrl: Uri)( .flatMap(SttpJson.failureToFuture) .map { jobs => jobs.jobs - .filter(_.name == jobName) .sortBy(_.`last-modification`) .reverse } @@ -100,7 +99,7 @@ class HttpFlinkClient(config: FlinkConfig, flinkUrl: Uri)( .recoverWith(recoverWithMessage("retrieve Flink jobs")) } - def getJobConfig(jobId: String): Future[flinkRestModel.ExecutionConfig] = { + override def getJobConfig(jobId: String): Future[flinkRestModel.ExecutionConfig] = { basicRequest .get(flinkUrl.addPath("jobs", jobId, "config")) .response(asJson[JobConfig]) @@ -140,7 +139,7 @@ class HttpFlinkClient(config: FlinkConfig, flinkUrl: Uri)( } } - def cancel(deploymentId: ExternalDeploymentId): Future[Unit] = { + override def cancel(deploymentId: ExternalDeploymentId): Future[Unit] = { basicRequest .patch(flinkUrl.addPath("jobs", deploymentId.value)) .send(backend) @@ -149,14 +148,17 @@ class HttpFlinkClient(config: FlinkConfig, flinkUrl: Uri)( } - def makeSavepoint(deploymentId: ExternalDeploymentId, savepointDir: Option[String]): Future[SavepointResult] = { + override def makeSavepoint( + deploymentId: ExternalDeploymentId, + savepointDir: Option[String] + ): Future[SavepointResult] = { val savepointRequest = basicRequest .post(flinkUrl.addPath("jobs", deploymentId.value, "savepoints")) .body(SavepointTriggerRequest(`target-directory` = savepointDir, `cancel-job` = false)) processSavepointRequest(deploymentId, savepointRequest, "make savepoint") } - def stop(deploymentId: ExternalDeploymentId, savepointDir: Option[String]): Future[SavepointResult] = { + override def stop(deploymentId: ExternalDeploymentId, savepointDir: Option[String]): Future[SavepointResult] = { // because of https://issues.apache.org/jira/browse/FLINK-28758 we can't use '/stop' endpoint, // so jobs ends up in CANCELED state, not FINISHED - we should switch back when we get rid of old Kafka source val stopRequest = basicRequest @@ -182,7 +184,7 @@ class HttpFlinkClient(config: FlinkConfig, flinkUrl: Uri)( private val timeoutExtractor = DeeplyCheckingExceptionExtractor.forClass[TimeoutException] - def runProgram( + override def runProgram( jarFile: File, mainClass: String, args: List[String], @@ -217,7 +219,7 @@ class HttpFlinkClient(config: FlinkConfig, flinkUrl: Uri)( } } - def getClusterOverview: Future[ClusterOverview] = { + override def getClusterOverview: Future[ClusterOverview] = { basicRequest .get(flinkUrl.addPath("overview")) .response(asJson[ClusterOverview]) @@ -225,7 +227,7 @@ class HttpFlinkClient(config: FlinkConfig, flinkUrl: Uri)( .flatMap(SttpJson.failureToFuture) } - def getJobManagerConfig: Future[Configuration] = { + override def getJobManagerConfig: Future[Configuration] = { basicRequest .get(flinkUrl.addPath("jobmanager", "config")) .response(asJson[List[KeyValueEntry]]) diff --git a/engine/flink/management/src/test/scala/pl/touk/nussknacker/engine/management/rest/CachedFlinkClientTest.scala b/engine/flink/management/src/test/scala/pl/touk/nussknacker/engine/management/rest/CachedFlinkClientTest.scala index 8ac8ce3e446..b896ef1dfc4 100644 --- a/engine/flink/management/src/test/scala/pl/touk/nussknacker/engine/management/rest/CachedFlinkClientTest.scala +++ b/engine/flink/management/src/test/scala/pl/touk/nussknacker/engine/management/rest/CachedFlinkClientTest.scala @@ -28,13 +28,13 @@ class CachedFlinkClientTest val cachingFlinkClient = new CachedFlinkClient(delegate, 10 seconds, 10) val results = List( - cachingFlinkClient.findJobsByName("foo")(DataFreshnessPolicy.Fresh).futureValue, - cachingFlinkClient.findJobsByName("foo")(DataFreshnessPolicy.Fresh).futureValue, + cachingFlinkClient.getJobsOverviews()(DataFreshnessPolicy.Fresh).futureValue, + cachingFlinkClient.getJobsOverviews()(DataFreshnessPolicy.Fresh).futureValue, ) results.map(_.cached) should contain only false - verify(delegate, times(2)).findJobsByName(any[String])(any[DataFreshnessPolicy]) + verify(delegate, times(2)).getJobsOverviews()(any[DataFreshnessPolicy]) } test("should cache jobs by name for DataFreshnessPolicy.CanBeCached") { @@ -42,13 +42,13 @@ class CachedFlinkClientTest val cachingFlinkClient = new CachedFlinkClient(delegate, 10 seconds, 10) val results = List( - cachingFlinkClient.findJobsByName("foo")(DataFreshnessPolicy.CanBeCached).futureValue, - cachingFlinkClient.findJobsByName("foo")(DataFreshnessPolicy.CanBeCached).futureValue, + cachingFlinkClient.getJobsOverviews()(DataFreshnessPolicy.CanBeCached).futureValue, + cachingFlinkClient.getJobsOverviews()(DataFreshnessPolicy.CanBeCached).futureValue, ) results.map(_.cached) should contain allOf (false, true) - verify(delegate, times(1)).findJobsByName(any[String])(any[DataFreshnessPolicy]) + verify(delegate, times(1)).getJobsOverviews()(any[DataFreshnessPolicy]) } test("should cache job configs by default") { @@ -69,7 +69,7 @@ class CachedFlinkClientTest private def prepareMockedFlinkClient: FlinkClient = { val delegate = mock[FlinkClient] - when(delegate.findJobsByName(any[String])(any[DataFreshnessPolicy])).thenAnswer { _: InvocationOnMock => + when(delegate.getJobsOverviews()(any[DataFreshnessPolicy])).thenAnswer { _: InvocationOnMock => val jobs = List( JobOverview( "123", diff --git a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/DeploymentStrategy.scala b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/DeploymentStrategy.scala index d6fa5c0dac9..0bce946e9a1 100644 --- a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/DeploymentStrategy.scala +++ b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/DeploymentStrategy.scala @@ -2,7 +2,7 @@ package pl.touk.nussknacker.engine.embedded import pl.touk.nussknacker.engine.ModelData import pl.touk.nussknacker.engine.api.JobData -import pl.touk.nussknacker.engine.api.deployment.StateStatus +import pl.touk.nussknacker.engine.api.deployment.{DeploymentStatus, DeploymentStatusName, StateStatus} import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess import pl.touk.nussknacker.engine.lite.api.runtimecontext.LiteEngineRuntimeContextPreparer @@ -30,6 +30,6 @@ trait DeploymentStrategy { trait Deployment extends AutoCloseable { - def status(): StateStatus + def status(): DeploymentStatus } diff --git a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManager.scala b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManager.scala index 002f221d0db..951e65e4e7f 100644 --- a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManager.scala +++ b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/EmbeddedDeploymentManager.scala @@ -7,6 +7,7 @@ import com.typesafe.scalalogging.LazyLogging import pl.touk.nussknacker.engine.ModelData.BaseModelDataExt import pl.touk.nussknacker.engine.api._ import pl.touk.nussknacker.engine.api.deployment._ +import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus.ProblemStateStatus import pl.touk.nussknacker.engine.api.process.ProcessName import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess @@ -17,6 +18,7 @@ import pl.touk.nussknacker.engine.lite.api.runtimecontext.LiteEngineRuntimeConte import pl.touk.nussknacker.engine.lite.metrics.dropwizard.{DropwizardMetricsProviderFactory, LiteMetricRegistryFactory} import pl.touk.nussknacker.engine.{BaseModelData, CustomProcessValidator, DeploymentManagerDependencies, ModelData} import pl.touk.nussknacker.lite.manager.{LiteDeploymentManager, LiteDeploymentManagerProvider} +import pl.touk.nussknacker.engine.newdeployment import scala.concurrent.duration.{DurationInt, FiniteDuration} import scala.concurrent.{Await, ExecutionContext, Future} @@ -207,7 +209,10 @@ class EmbeddedDeploymentManager( .map { interpreterData => StatusDetails( status = interpreterData.scenarioDeployment - .fold(ex => ProblemStateStatus(s"Scenario compilation errors"), _.status()), + .fold( + _ => ProblemStateStatus(s"Scenario compilation errors"), + deployment => SimpleStateStatus.fromDeploymentStatus(deployment.status()) + ), deploymentId = Some(interpreterData.deploymentId), externalDeploymentId = Some(ExternalDeploymentId(interpreterData.deploymentId.value)), version = Some(interpreterData.processVersion) @@ -218,6 +223,23 @@ class EmbeddedDeploymentManager( ) } + override def deploymentSynchronisationSupport: DeploymentSynchronisationSupport = + new DeploymentSynchronisationSupported { + + override def getDeploymentStatusesToUpdate: Future[Map[newdeployment.DeploymentId, DeploymentStatus]] = + Future.successful( + ( + for { + (_, interpreterData) <- deployments.toList + newDeployment <- interpreterData.deploymentId.toNewDeploymentIdOpt + status = interpreterData.scenarioDeployment + .fold(_ => ProblemDeploymentStatus(s"Scenario compilation errors"), deployment => deployment.status()) + } yield newDeployment -> status + ).toMap + ) + + } + override def processStateDefinitionManager: ProcessStateDefinitionManager = EmbeddedProcessStateDefinitionManager override def close(): Unit = { diff --git a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/requestresponse/RequestResponseDeploymentStrategy.scala b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/requestresponse/RequestResponseDeploymentStrategy.scala index e8d6262636f..3721d4dbac1 100644 --- a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/requestresponse/RequestResponseDeploymentStrategy.scala +++ b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/requestresponse/RequestResponseDeploymentStrategy.scala @@ -11,8 +11,8 @@ import com.typesafe.config.Config import com.typesafe.scalalogging.LazyLogging import pl.touk.nussknacker.engine.ModelData import pl.touk.nussknacker.engine.api.context.ProcessCompilationError.FatalUnknownError -import pl.touk.nussknacker.engine.api.deployment.StateStatus -import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus +import pl.touk.nussknacker.engine.api.deployment.{DeploymentStatus, StateStatus} +import pl.touk.nussknacker.engine.api.deployment.simple.{SimpleDeploymentStatus, SimpleStateStatus} import pl.touk.nussknacker.engine.api.process.ProcessName import pl.touk.nussknacker.engine.api.{JobData, MetaData, RequestResponseMetaData} import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess @@ -118,7 +118,7 @@ class RequestResponseDeploymentStrategy(httpConfig: HttpBindingConfig, config: R class RequestResponseDeployment(path: String, interpreter: RequestResponseRunnableScenarioInterpreter) extends Deployment { - override def status(): StateStatus = SimpleStateStatus.Running + override def status(): DeploymentStatus = SimpleDeploymentStatus.Running override def close(): Unit = { slugToScenarioRoute.remove(path) diff --git a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/streaming/StreamingDeploymentStrategy.scala b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/streaming/StreamingDeploymentStrategy.scala index 0c6eb738e16..fd8282a033a 100644 --- a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/streaming/StreamingDeploymentStrategy.scala +++ b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/streaming/StreamingDeploymentStrategy.scala @@ -1,8 +1,8 @@ package pl.touk.nussknacker.engine.embedded.streaming import com.typesafe.scalalogging.LazyLogging -import pl.touk.nussknacker.engine.api.deployment.StateStatus -import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus +import pl.touk.nussknacker.engine.api.deployment.{DeploymentStatus, StateStatus} +import pl.touk.nussknacker.engine.api.deployment.simple.{SimpleDeploymentStatus, SimpleStateStatus} import pl.touk.nussknacker.engine.api.{JobData, LiteStreamMetaData, ProcessVersion} import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess import pl.touk.nussknacker.engine.embedded.{Deployment, DeploymentStrategy} @@ -56,10 +56,10 @@ class StreamingDeploymentStrategy extends DeploymentStrategy with LazyLogging { class StreamingDeployment(interpreter: KafkaTransactionalScenarioInterpreter) extends Deployment { - override def status(): StateStatus = interpreter.status() match { - case TaskStatus.Running => SimpleStateStatus.Running - case TaskStatus.DuringDeploy => SimpleStateStatus.DuringDeploy - case TaskStatus.Restarting => SimpleStateStatus.Restarting + override def status(): DeploymentStatus = interpreter.status() match { + case TaskStatus.Running => SimpleDeploymentStatus.Running + case TaskStatus.DuringDeploy => SimpleDeploymentStatus.DuringDeploy + case TaskStatus.Restarting => SimpleDeploymentStatus.Restarting case other => throw new IllegalStateException(s"Not supporter task status: $other") } diff --git a/engine/lite/k8sDeploymentManager/src/main/scala/pl/touk/nussknacker/k8s/manager/K8sDeploymentManager.scala b/engine/lite/k8sDeploymentManager/src/main/scala/pl/touk/nussknacker/k8s/manager/K8sDeploymentManager.scala index d4599315a2b..5845a998dbc 100644 --- a/engine/lite/k8sDeploymentManager/src/main/scala/pl/touk/nussknacker/k8s/manager/K8sDeploymentManager.scala +++ b/engine/lite/k8sDeploymentManager/src/main/scala/pl/touk/nussknacker/k8s/manager/K8sDeploymentManager.scala @@ -9,9 +9,9 @@ import pl.touk.nussknacker.engine.api._ import pl.touk.nussknacker.engine.api.deployment._ import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessName} import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess -import pl.touk.nussknacker.engine.deployment.{DeploymentData, DeploymentId, ExternalDeploymentId, User} +import pl.touk.nussknacker.engine.deployment.ExternalDeploymentId import pl.touk.nussknacker.engine.util.Implicits.RichScalaMap -import pl.touk.nussknacker.engine.{BaseModelData, DeploymentManagerDependencies} +import pl.touk.nussknacker.engine.{BaseModelData, DeploymentManagerDependencies, newdeployment} import pl.touk.nussknacker.k8s.manager.K8sDeploymentManager._ import pl.touk.nussknacker.k8s.manager.K8sUtils.{sanitizeLabel, sanitizeObjectName, shortHash} import pl.touk.nussknacker.k8s.manager.deployment.K8sScalingConfig.DividingParallelismConfig @@ -381,6 +381,12 @@ class K8sDeploymentManager( override protected def executionContext: ExecutionContext = dependencies.executionContext + // TODO We don't handle deployment synchronization on k8s DM because with current resources model it wasn't trivial to implement it. + // The design of resources is that each scenario has only one k8s deployment and we don't want to rollout this deployment when + // when nothing important is changed (e.g. deploymentId is changed). We should rethink if we want to handle multiple deployments + // for each scenario in this case and where store the deploymentId + override def deploymentSynchronisationSupport: DeploymentSynchronisationSupport = NoDeploymentSynchronisationSupport + } object K8sDeploymentManager { diff --git a/extensions-api/src/main/scala/pl/touk/nussknacker/engine/deployment/DeploymentId.scala b/extensions-api/src/main/scala/pl/touk/nussknacker/engine/deployment/DeploymentId.scala index c4f02c500b7..ef667f4afc0 100644 --- a/extensions-api/src/main/scala/pl/touk/nussknacker/engine/deployment/DeploymentId.scala +++ b/extensions-api/src/main/scala/pl/touk/nussknacker/engine/deployment/DeploymentId.scala @@ -3,6 +3,7 @@ package pl.touk.nussknacker.engine.deployment import io.circe.generic.extras.semiauto.{deriveUnwrappedDecoder, deriveUnwrappedEncoder} import io.circe.{Decoder, Encoder} import pl.touk.nussknacker.engine.api.deployment.ProcessActionId +import pl.touk.nussknacker.engine.newdeployment import java.util.UUID import scala.util.Try @@ -10,8 +11,13 @@ import scala.util.Try //id generated by Nussknacker - in contrary to ExternalDeploymentId which is generated by e.g. Flink // TODO: This class will be replaced by DeploymentId from newdeployment package, see docs in newdeployment.DeploymentService final case class DeploymentId(value: String) extends AnyVal { - override def toString: String = value + override def toString: String = value + def toActionIdOpt: Option[ProcessActionId] = Try(UUID.fromString(value)).toOption.map(ProcessActionId(_)) + + def toNewDeploymentIdOpt: Option[newdeployment.DeploymentId] = + Try(UUID.fromString(value)).toOption.map(newdeployment.DeploymentId(_)) + } object DeploymentId { diff --git a/extensions-api/src/main/scala/pl/touk/nussknacker/engine/newdeployment/DeploymentId.scala b/extensions-api/src/main/scala/pl/touk/nussknacker/engine/newdeployment/DeploymentId.scala new file mode 100644 index 00000000000..b1155ee592d --- /dev/null +++ b/extensions-api/src/main/scala/pl/touk/nussknacker/engine/newdeployment/DeploymentId.scala @@ -0,0 +1,16 @@ +package pl.touk.nussknacker.engine.newdeployment + +import java.util.UUID +import scala.util.Try + +final case class DeploymentId(value: UUID) { + override def toString: String = value.toString +} + +object DeploymentId { + + def fromString(str: String): Option[DeploymentId] = Try(UUID.fromString(str)).toOption.map(DeploymentId(_)) + + def generate: DeploymentId = DeploymentId(UUID.randomUUID()) + +} From 07526d11e3c02656348a22abedc5fa73edeb7f07 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mateusz=20S=C5=82abek?= Date: Wed, 12 Jun 2024 12:32:25 +0200 Subject: [PATCH 15/17] fix aggregate window offset configuration doc (#6171) (#6173) --- docs/installation_configuration_guide/model/Flink.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/installation_configuration_guide/model/Flink.md b/docs/installation_configuration_guide/model/Flink.md index 1d2b647a14b..31c689491ad 100644 --- a/docs/installation_configuration_guide/model/Flink.md +++ b/docs/installation_configuration_guide/model/Flink.md @@ -86,7 +86,7 @@ It's also possible to configure restart strategies per scenario, using additiona ### Flink Component provider configuration #### Configuring offset for Tumbling aggregate time windows -`components.base.aggregateWindowsConfig.tumblingWindowsOffset` - use [ISO_8601 Duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format to configure it. +`components.baseUnbounded.aggregateWindowsConfig.tumblingWindowsOffset` - use [ISO_8601 Duration](https://en.wikipedia.org/wiki/ISO_8601#Durations) format to configure it. This configuration is optional, by default offset is equal 0. This setting applies only to windows in tumbling aggregate. Might be useful when you need daily windows to be aligned to different than UTC timezone. See example in Flink [docs](https://nightlies.apache.org/flink/flink-docs-master/docs/dev/datastream/operators/windows/#tumbling-windows) From cb359ffbd55bb08c5868c7f7e6bde7984fbd05d4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=C5=81ukasz=20Bigorajski?= <72501021+lukasz-bigorajski@users.noreply.github.com> Date: Wed, 12 Jun 2024 14:31:48 +0200 Subject: [PATCH 16/17] [MAINTENANCE] Remove deprecated annotation (#6177) --- .../main/scala/pl/touk/nussknacker/engine/spel/Implicits.scala | 1 - 1 file changed, 1 deletion(-) diff --git a/scenario-api/src/main/scala/pl/touk/nussknacker/engine/spel/Implicits.scala b/scenario-api/src/main/scala/pl/touk/nussknacker/engine/spel/Implicits.scala index b8a0d821501..e0bab19067f 100644 --- a/scenario-api/src/main/scala/pl/touk/nussknacker/engine/spel/Implicits.scala +++ b/scenario-api/src/main/scala/pl/touk/nussknacker/engine/spel/Implicits.scala @@ -5,7 +5,6 @@ import pl.touk.nussknacker.engine.graph.expression.Expression import scala.language.implicitConversions // TODO: Should be replaced with: pl.touk.nussknacker.engine.spel.SpelExtension -@Deprecated object Implicits { implicit def asSpelExpression(expression: String): Expression = Expression.spel(expression) From d82f03355e6ac9d27176d796b6b3d3a6bfc22154 Mon Sep 17 00:00:00 2001 From: Arek Burdach Date: Thu, 13 Jun 2024 00:21:14 +0200 Subject: [PATCH 17/17] [NU-1685] newdeployment.DeploymentService: Ensure that only one deployment is performed for each scenario at a time (#6172) --- .../api/deployment/DeploymentStatus.scala | 67 ++++++++ .../api/deployment/DeploymentStatusName.scala | 37 ----- .../simple/SimpleDeploymentStatus.scala | 27 --- .../deployment/simple/SimpleStateStatus.scala | 8 +- ...053__AddDeploymentStatusNameConstraint.sql | 2 + .../ui/api/DeploymentApiHttpService.scala | 24 ++- .../description/DeploymentApiEndpoints.scala | 47 ++++-- .../pl/touk/nussknacker/ui/db/SqlStates.scala | 9 + .../process/newactivity/ActivityService.scala | 31 ++-- .../DeploymentEntityFactory.scala | 9 +- .../newdeployment/DeploymentRepository.scala | 45 ++--- .../newdeployment/DeploymentService.scala | 157 ++++++++++++------ ...entsStatusesSynchronizationScheduler.scala | 2 +- .../DeploymentsStatusesSynchronizer.scala | 29 +++- .../DBFetchingProcessRepository.scala | 3 +- .../process/repository/DBIOActionRunner.scala | 34 +++- .../DbProcessActivityRepository.scala | 5 +- .../ui/process/repository/Repository.scala | 4 +- .../version/ScenarioGraphVersionService.scala | 26 ++- .../server/AkkaHttpBasedRouteProvider.scala | 7 +- .../multiple-category-designer.conf | 8 +- ...category-used-more-than-once-designer.conf | 10 +- .../simple-streaming-use-case-designer.conf | 4 +- .../mock/MockFetchingProcessRepository.scala | 2 +- .../test/utils/domain/ProcessTestData.scala | 12 +- .../test/utils/domain/TestFactory.scala | 15 +- .../api/AppApiHttpServiceBusinessSpec.scala | 14 +- .../api/AppApiHttpServiceSecuritySpec.scala | 28 ++-- ...DeploymentApiHttpServiceBusinessSpec.scala | 43 ++++- ...tApiHttpServiceDeploymentCommentSpec.scala | 4 +- .../api/NodesApiHttpServiceBusinessSpec.scala | 66 +------- .../api/NodesApiHttpServiceSecuritySpec.scala | 64 ------- .../ui/api/ProcessesResourcesSpec.scala | 15 +- ...ParametersApiHttpServiceBusinessSpec.scala | 2 +- ...ParametersApiHttpServiceSecuritySpec.scala | 8 +- ...StatisticsApiHttpServiceBusinessSpec.scala | 4 +- .../newdeployment/DeploymentServiceTest.scala | 118 +++++++++++++ docs/Changelog.md | 3 +- .../MockableDeploymentManagerProvider.scala | 98 +++++++---- .../exception/DefaultExceptionConsumers.scala | 2 +- .../engine/management/FlinkRestManager.scala | 18 +- .../RequestResponseDeploymentStrategy.scala | 5 +- .../StreamingDeploymentStrategy.scala | 9 +- .../util/logging/LazyLoggingWithTraces.scala | 29 ++-- 44 files changed, 679 insertions(+), 475 deletions(-) create mode 100644 designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/DeploymentStatus.scala delete mode 100644 designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/DeploymentStatusName.scala delete mode 100644 designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/simple/SimpleDeploymentStatus.scala create mode 100644 designer/server/src/main/resources/db/migration/common/V1_053__AddDeploymentStatusNameConstraint.sql create mode 100644 designer/server/src/main/scala/pl/touk/nussknacker/ui/db/SqlStates.scala create mode 100644 designer/server/src/test/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentServiceTest.scala diff --git a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/DeploymentStatus.scala b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/DeploymentStatus.scala new file mode 100644 index 00000000000..9c1bda6b670 --- /dev/null +++ b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/DeploymentStatus.scala @@ -0,0 +1,67 @@ +package pl.touk.nussknacker.engine.api.deployment + +import enumeratum.EnumEntry.UpperSnakecase +import enumeratum.{Enum, EnumEntry} +import io.circe.Codec +import io.circe.generic.extras.semiauto.deriveUnwrappedCodec + +// Currently DeploymentStatus are limited set of allowed statuses. Only ProblemDeploymentStatus can have different +// descriptions depending on DM implementation. It makes implementation of logic based on statuses easier. In case +// if we have requirement to make it more flexible, we can relax this restriction. +sealed trait DeploymentStatus extends EnumEntry with UpperSnakecase { + def name: DeploymentStatusName = DeploymentStatusName(entryName) +} + +sealed abstract class NoAttributesDeploymentStatus extends DeploymentStatus + +final case class ProblemDeploymentStatus(description: String) extends DeploymentStatus { + override def name: DeploymentStatusName = ProblemDeploymentStatus.name +} + +object DeploymentStatus extends Enum[DeploymentStatus] { + + override def values = findValues + + object Problem { + + private val DefaultDescription = "There are some problems with deployment." + + val Failed: ProblemDeploymentStatus = ProblemDeploymentStatus(DefaultDescription) + + val FailureDuringDeploymentRequesting: ProblemDeploymentStatus = ProblemDeploymentStatus( + "There were some problems with deployment requesting" + ) + + } + + case object DuringDeploy extends NoAttributesDeploymentStatus + case object Running extends NoAttributesDeploymentStatus + case object Finished extends NoAttributesDeploymentStatus + case object Restarting extends NoAttributesDeploymentStatus + case object DuringCancel extends NoAttributesDeploymentStatus + case object Canceled extends NoAttributesDeploymentStatus + +} + +object ProblemDeploymentStatus { + def name: DeploymentStatusName = DeploymentStatusName("PROBLEM") + + def extractDescription(status: DeploymentStatus): Option[String] = + status match { + case problem: ProblemDeploymentStatus => + Some(problem.description) + case _: NoAttributesDeploymentStatus => + None + } + +} + +final case class DeploymentStatusName(value: String) { + override def toString: String = value +} + +object DeploymentStatusName { + + implicit val codec: Codec[DeploymentStatusName] = deriveUnwrappedCodec[DeploymentStatusName] + +} diff --git a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/DeploymentStatusName.scala b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/DeploymentStatusName.scala deleted file mode 100644 index efc78650ca9..00000000000 --- a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/DeploymentStatusName.scala +++ /dev/null @@ -1,37 +0,0 @@ -package pl.touk.nussknacker.engine.api.deployment - -import io.circe.Codec -import io.circe.generic.extras.semiauto.deriveUnwrappedCodec - -sealed trait DeploymentStatus { - def name: DeploymentStatusName -} - -final case class NoAttributesDeploymentStatus(override val name: DeploymentStatusName) extends DeploymentStatus - -final case class ProblemDeploymentStatus(description: String) extends DeploymentStatus { - override def name: DeploymentStatusName = ProblemDeploymentStatus.name -} - -object ProblemDeploymentStatus { - def name: DeploymentStatusName = DeploymentStatusName("PROBLEM") - - def extractDescription(status: DeploymentStatus): Option[String] = - status match { - case ProblemDeploymentStatus(description) => - Some(description) - case _ => - None - } - -} - -final case class DeploymentStatusName(value: String) { - override def toString: String = value -} - -object DeploymentStatusName { - - implicit val codec: Codec[DeploymentStatusName] = deriveUnwrappedCodec[DeploymentStatusName] - -} diff --git a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/simple/SimpleDeploymentStatus.scala b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/simple/SimpleDeploymentStatus.scala deleted file mode 100644 index 2fc8a26d064..00000000000 --- a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/simple/SimpleDeploymentStatus.scala +++ /dev/null @@ -1,27 +0,0 @@ -package pl.touk.nussknacker.engine.api.deployment.simple - -import pl.touk.nussknacker.engine.api.deployment.{ - DeploymentStatus, - DeploymentStatusName, - NoAttributesDeploymentStatus, - ProblemDeploymentStatus -} - -object SimpleDeploymentStatus { - - object Problem { - - private val DefaultDescription = "There are some problems with deployment." - - val Failed: ProblemDeploymentStatus = ProblemDeploymentStatus(DefaultDescription) - - } - - val DuringDeploy: DeploymentStatus = NoAttributesDeploymentStatus(DeploymentStatusName("DURING_DEPLOY")) - val Running: DeploymentStatus = NoAttributesDeploymentStatus(DeploymentStatusName("RUNNING")) - val Finished: DeploymentStatus = NoAttributesDeploymentStatus(DeploymentStatusName("FINISHED")) - val Restarting: DeploymentStatus = NoAttributesDeploymentStatus(DeploymentStatusName("RESTARTING")) - val DuringCancel: DeploymentStatus = NoAttributesDeploymentStatus(DeploymentStatusName("DURING_CANCEL")) - val Canceled: DeploymentStatus = NoAttributesDeploymentStatus(DeploymentStatusName("CANCELED")) - -} diff --git a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/simple/SimpleStateStatus.scala b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/simple/SimpleStateStatus.scala index 4a506d224d2..1ca7cd99450 100644 --- a/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/simple/SimpleStateStatus.scala +++ b/designer/deployment-manager-api/src/main/scala/pl/touk/nussknacker/engine/api/deployment/simple/SimpleStateStatus.scala @@ -19,14 +19,10 @@ object SimpleStateStatus { def fromDeploymentStatus(deploymentStatus: DeploymentStatus): StateStatus = { deploymentStatus match { - case NoAttributesDeploymentStatus(name) => NoAttributesStateStatus(name.value) + case noAttributes: NoAttributesDeploymentStatus => NoAttributesStateStatus(noAttributes.name.value) // We assume that all deployment status have default allowedActions. Non-default allowedActions have only // statuses that are not deployment statuses but scenario statuses. - case ProblemDeploymentStatus(description) => ProblemStateStatus(description) - case other => - throw new IllegalArgumentException( - s"Problem during conversion of deployment status to scenario status. Not expected deployment status: $other" - ) + case problem: ProblemDeploymentStatus => ProblemStateStatus(problem.description) } } diff --git a/designer/server/src/main/resources/db/migration/common/V1_053__AddDeploymentStatusNameConstraint.sql b/designer/server/src/main/resources/db/migration/common/V1_053__AddDeploymentStatusNameConstraint.sql new file mode 100644 index 00000000000..5011bde0302 --- /dev/null +++ b/designer/server/src/main/resources/db/migration/common/V1_053__AddDeploymentStatusNameConstraint.sql @@ -0,0 +1,2 @@ +ALTER TABLE "deployments" ADD CONSTRAINT "deployments_status_name_check" CHECK ( + "status_name" in ('DURING_DEPLOY', 'RUNNING', 'FINISHED', 'RESTARTING', 'DURING_CANCEL', 'CANCELED', 'PROBLEM' )); diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpService.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpService.scala index 860ec1e55bb..7d0d3db4c9c 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpService.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpService.scala @@ -35,15 +35,21 @@ class DeploymentApiHttpService( request.comment ) .map(_.left.map { - case UnderlyingServiceError(DeploymentService.ConflictingDeploymentIdError(id)) => - ConflictingDeploymentIdError(id) - case UnderlyingServiceError(DeploymentService.ScenarioNotFoundError(scenarioName)) => - ScenarioNotFoundError(scenarioName) - case UnderlyingServiceError(DeploymentService.NoPermissionError) => NoPermissionError - case UnderlyingServiceError(DeploymentService.ScenarioGraphValidationError(errors)) => - ScenarioGraphValidationError(errors) - case UnderlyingServiceError(DeploymentService.DeployValidationError(message)) => - DeployValidationError(message) + case UnderlyingServiceError(err) => + err match { + case DeploymentService.ConflictingDeploymentIdError(id) => + ConflictingDeploymentIdError(id) + case DeploymentService + .ConcurrentDeploymentsForScenarioArePerformedError(scenarioName, concurrentDeploymentsIds) => + ConcurrentDeploymentsForScenarioArePerformedError(scenarioName, concurrentDeploymentsIds) + case DeploymentService.ScenarioNotFoundError(scenarioName) => + ScenarioNotFoundError(scenarioName) + case DeploymentService.NoPermissionError => NoPermissionError + case DeploymentService.ScenarioGraphValidationError(errors) => + ScenarioGraphValidationError(errors) + case DeploymentService.DeployValidationError(message) => + DeployValidationError(message) + } case ActivityService.CommentValidationError(message) => CommentValidationError(message) }) } diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/description/DeploymentApiEndpoints.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/description/DeploymentApiEndpoints.scala index 2b10e33fdc4..04ae62670f3 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/description/DeploymentApiEndpoints.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/api/description/DeploymentApiEndpoints.scala @@ -1,5 +1,6 @@ package pl.touk.nussknacker.ui.api.description +import cats.data.NonEmptyList import derevo.circe.{decoder, encoder} import derevo.derive import pl.touk.nussknacker.engine.api.NodeId @@ -9,8 +10,7 @@ import pl.touk.nussknacker.engine.api.context.ProcessCompilationError.{ ExpressionParserCompilationError, MissingRequiredProperty } -import pl.touk.nussknacker.engine.api.deployment.DeploymentStatusName -import pl.touk.nussknacker.engine.api.deployment.simple.SimpleDeploymentStatus +import pl.touk.nussknacker.engine.api.deployment.{DeploymentStatus, DeploymentStatusName} import pl.touk.nussknacker.engine.api.parameter.ParameterName import pl.touk.nussknacker.engine.api.process.ProcessName import pl.touk.nussknacker.engine.newdeployment.DeploymentId @@ -28,7 +28,7 @@ import sttp.tapir._ import sttp.tapir.derevo.schema import sttp.tapir.json.circe.jsonBody -import java.time.{Instant, LocalDateTime, ZoneId, ZoneOffset} +import java.time.{Instant, LocalDateTime, ZoneOffset} import java.util.UUID class DeploymentApiEndpoints(auth: EndpointInput[AuthCredentials]) extends BaseEndpointDefinitions { @@ -58,13 +58,15 @@ class DeploymentApiEndpoints(auth: EndpointInput[AuthCredentials]) extends BaseE .out(statusCode(StatusCode.Accepted)) .errorOut( oneOf[RunDeploymentError]( - oneOfVariant[ConflictingDeploymentIdError]( + oneOfVariant[ConflictRunDeploymentError]( StatusCode.Conflict, - plainBody[ConflictingDeploymentIdError] - .example( - Example.of( - summary = Some("Deployment with id {deploymentId} already exists"), - value = ConflictingDeploymentIdError(exampleDeploymentId) + plainBody[ConflictRunDeploymentError] + .examples( + List( + Example.of( + summary = Some("Deployment with id {deploymentId} already exists"), + value = ConflictingDeploymentIdError(exampleDeploymentId) + ) ) ) ), @@ -132,13 +134,13 @@ class DeploymentApiEndpoints(auth: EndpointInput[AuthCredentials]) extends BaseE jsonBody[GetDeploymentStatusResponse].examples( List( Example.of( - GetDeploymentStatusResponse(SimpleDeploymentStatus.Running.name, None, exampleInstant), + GetDeploymentStatusResponse(DeploymentStatus.Running.name, None, exampleInstant), Some("RUNNING status") ), Example.of( GetDeploymentStatusResponse( - SimpleDeploymentStatus.Problem.Failed.name, - Some(SimpleDeploymentStatus.Problem.Failed.description), + DeploymentStatus.Problem.Failed.name, + Some(DeploymentStatus.Problem.Failed.description), exampleInstant ), Some("PROBLEM status") @@ -220,7 +222,14 @@ object DeploymentApiEndpoints { sealed trait BadRequestRunDeploymentError extends RunDeploymentError - final case class ConflictingDeploymentIdError(id: DeploymentId) extends RunDeploymentError + sealed trait ConflictRunDeploymentError extends RunDeploymentError + + final case class ConflictingDeploymentIdError(id: DeploymentId) extends ConflictRunDeploymentError + + final case class ConcurrentDeploymentsForScenarioArePerformedError( + scenarioName: ProcessName, + concurrentDeploymentsIds: NonEmptyList[DeploymentId] + ) extends ConflictRunDeploymentError final case class ScenarioNotFoundError(scenarioName: ProcessName) extends BadRequestRunDeploymentError @@ -244,10 +253,14 @@ object DeploymentApiEndpoints { case DeployValidationError(message) => message } - implicit val conflictingDeploymentIdErrorCodec: Codec[String, ConflictingDeploymentIdError, CodecFormat.TextPlain] = - BaseEndpointDefinitions.toTextPlainCodecSerializationOnly[ConflictingDeploymentIdError](err => - s"Deployment with id ${err.id} already exists" - ) + implicit val conflictingDeploymentIdErrorCodec: Codec[String, ConflictRunDeploymentError, CodecFormat.TextPlain] = + BaseEndpointDefinitions.toTextPlainCodecSerializationOnly[ConflictRunDeploymentError] { + case ConflictingDeploymentIdError(id) => s"Deployment with id $id already exists" + case ConcurrentDeploymentsForScenarioArePerformedError(scenarioName, concurrentDeploymentsIds) => + s"Deployment can't be run because only a single deployment per scenario can be run at a time. " + + s"Currently the scenario [$scenarioName] has running deployments with ids: " + + s"${concurrentDeploymentsIds.toList.sortBy(_.value).mkString(",")}".stripMargin + } implicit val deploymentNotFoundErrorCodec: Codec[String, DeploymentNotFoundError, CodecFormat.TextPlain] = BaseEndpointDefinitions.toTextPlainCodecSerializationOnly[DeploymentNotFoundError](err => diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/db/SqlStates.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/db/SqlStates.scala new file mode 100644 index 00000000000..6863c5fa1bc --- /dev/null +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/db/SqlStates.scala @@ -0,0 +1,9 @@ +package pl.touk.nussknacker.ui.db + +// This is a copy of necessary sql states based od PSQLState class but without binding to psql classes +object SqlStates { + + val UniqueViolation = "23505" + val SerializationFailure = "40001" + +} diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newactivity/ActivityService.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newactivity/ActivityService.scala index d5eea2a5fde..e436b4ef51b 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newactivity/ActivityService.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newactivity/ActivityService.scala @@ -2,7 +2,6 @@ package pl.touk.nussknacker.ui.process.newactivity import cats.data.EitherT import cats.implicits.toTraverseOps -import db.util.DBIOActionInstances._ import pl.touk.nussknacker.engine.api.deployment.ScenarioActionName import pl.touk.nussknacker.engine.api.process.{ProcessId, VersionId} import pl.touk.nussknacker.ui.api.DeploymentCommentSettings @@ -29,24 +28,22 @@ class ActivityService( ): Future[Either[ActivityError[ErrorType], Unit]] = { toActivityCommandConverter.convert(command) match { case RunDeploymentActivityCommand(command) => - dbioRunner.run( - (for { - validatedCommentOpt <- validateDeploymentCommentWhenPassed(comment) - keys <- runDeployment(command) - _ <- saveCommentWhenPassed[RunDeploymentError]( - validatedCommentOpt, - keys.scenarioId, - keys.scenarioGraphVersionId, - command.user - ) - } yield ()).value - ) + (for { + validatedCommentOpt <- validateDeploymentCommentWhenPassed(comment) + keys <- runDeployment(command) + _ <- saveCommentWhenPassed[RunDeploymentError]( + validatedCommentOpt, + keys.scenarioId, + keys.scenarioGraphVersionId, + command.user + ) + } yield ()).value } } private def validateDeploymentCommentWhenPassed(comment: Option[Comment]) = { EitherT - .fromEither[DB]( + .fromEither[Future]( DeploymentComment .createDeploymentComment(comment, deploymentCommentSettings) .toEither @@ -66,7 +63,11 @@ class ActivityService( user: LoggedUser ) = EitherT.right[ActivityError[ErrorType]]( - commentOpt.map(commentRepository.saveComment(scenarioId, scenarioGraphVersionId, user, _)).sequence + commentOpt + .map(comment => + dbioRunner.run(commentRepository.saveComment(scenarioId, scenarioGraphVersionId, user, comment)) + ) + .sequence ) } diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentEntityFactory.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentEntityFactory.scala index 789b7646ca7..70e72f4df03 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentEntityFactory.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentEntityFactory.scala @@ -1,11 +1,6 @@ package pl.touk.nussknacker.ui.process.newdeployment -import pl.touk.nussknacker.engine.api.deployment.{ - DeploymentStatus, - DeploymentStatusName, - NoAttributesDeploymentStatus, - ProblemDeploymentStatus -} +import pl.touk.nussknacker.engine.api.deployment.{DeploymentStatus, DeploymentStatusName, ProblemDeploymentStatus} import pl.touk.nussknacker.engine.api.process.ProcessId import pl.touk.nussknacker.engine.newdeployment.DeploymentId import pl.touk.nussknacker.ui.db.entity.{BaseEntityFactory, ProcessEntityData, ProcessEntityFactory} @@ -72,7 +67,7 @@ trait DeploymentEntityFactory extends BaseEntityFactory { self: ProcessEntityFac statusProblemDescription.getOrElse(throw new IllegalStateException("Problem status without description")) ) } else { - NoAttributesDeploymentStatus(statusName) + DeploymentStatus.withName(statusName.value) } DeploymentEntityData(id, scenarioId, createdAt, createdBy, WithModifiedAt(status, statusModifiedAt)) } diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentRepository.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentRepository.scala index 33a97f8dc33..eb76a9100b4 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentRepository.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentRepository.scala @@ -2,11 +2,11 @@ package pl.touk.nussknacker.ui.process.newdeployment import cats.implicits.{toFoldableOps, toTraverseOps} import db.util.DBIOActionInstances._ -import org.postgresql.util.{PSQLException, PSQLState} -import pl.touk.nussknacker.engine.api.deployment.{DeploymentStatus, ProblemDeploymentStatus} +import pl.touk.nussknacker.engine.api.deployment.{DeploymentStatus, DeploymentStatusName, ProblemDeploymentStatus} +import pl.touk.nussknacker.engine.api.process.ProcessId import pl.touk.nussknacker.engine.newdeployment.DeploymentId import pl.touk.nussknacker.ui.db.entity.ProcessEntityData -import pl.touk.nussknacker.ui.db.{DbRef, NuTables} +import pl.touk.nussknacker.ui.db.{DbRef, NuTables, SqlStates} import pl.touk.nussknacker.ui.process.newdeployment.DeploymentEntityFactory.DeploymentEntityData import pl.touk.nussknacker.ui.process.newdeployment.DeploymentRepository.{ ConflictingDeploymentIdError, @@ -14,7 +14,7 @@ import pl.touk.nussknacker.ui.process.newdeployment.DeploymentRepository.{ } import slick.jdbc.JdbcProfile -import java.sql.{SQLIntegrityConstraintViolationException, Timestamp} +import java.sql.{SQLException, Timestamp} import java.time.Clock import scala.concurrent.ExecutionContext @@ -24,15 +24,18 @@ class DeploymentRepository(dbRef: DbRef, clock: Clock)(implicit ec: ExecutionCon import profile.api._ + def getScenarioDeploymentsInNotMatchingStatus( + scenarioId: ProcessId, + statusNames: Set[DeploymentStatusName] + ): DB[Seq[DeploymentEntityData]] = { + toEffectAll(deploymentsTable.filter(d => d.scenarioId === scenarioId && !(d.statusName inSet statusNames)).result) + } + def saveDeployment(deployment: DeploymentEntityData): DB[Either[ConflictingDeploymentIdError, Unit]] = { toEffectAll(deploymentsTable += deployment).asTry.map( _.map(_ => Right(())) .recover { - // for postgres - case e: PSQLException if e.getSQLState == PSQLState.UNIQUE_VIOLATION.getState => - Left(ConflictingDeploymentIdError(deployment.id)) - // for other dbs, e.g. hsql - case _: SQLIntegrityConstraintViolationException => + case e: SQLException if e.getSQLState == SqlStates.UniqueViolation => Left(ConflictingDeploymentIdError(deployment.id)) } .get @@ -55,17 +58,8 @@ class DeploymentRepository(dbRef: DbRef, clock: Clock)(implicit ec: ExecutionCon def updateDeploymentStatuses(statusesToUpdate: Map[DeploymentId, DeploymentStatus]): DB[Set[DeploymentId]] = { statusesToUpdate.toList .map { case (id, status) => - val problemDescription = ProblemDeploymentStatus.extractDescription(status) toEffectAll( - deploymentsTable - .filter(d => - d.id === id && (d.statusName =!= status.name || d.statusProblemDescription =!= problemDescription) - ) - .map(d => (d.statusName, d.statusProblemDescription, d.statusModifiedAt)) - .update((status.name, problemDescription, Timestamp.from(clock.instant()))) - .map { result => - if (result > 0) Set(id) else Set.empty[DeploymentId] - } + updateDeploymentStatus(id, status).map(updated => if (updated) Set(id) else Set.empty[DeploymentId]) ) } .sequence @@ -74,6 +68,19 @@ class DeploymentRepository(dbRef: DbRef, clock: Clock)(implicit ec: ExecutionCon .transactionally } + def updateDeploymentStatus(id: DeploymentId, status: DeploymentStatus): DB[Boolean] = { + val problemDescription = ProblemDeploymentStatus.extractDescription(status) + toEffectAll( + deploymentsTable + .filter(d => d.id === id && (d.statusName =!= status.name || d.statusProblemDescription =!= problemDescription)) + .map(d => (d.statusName, d.statusProblemDescription, d.statusModifiedAt)) + .update((status.name, problemDescription, Timestamp.from(clock.instant()))) + .map { result => + if (result > 0) true else false + } + ) + } + } object DeploymentRepository { diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentService.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentService.scala index 8341dcb749c..6012bcd430d 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentService.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentService.scala @@ -1,11 +1,11 @@ package pl.touk.nussknacker.ui.process.newdeployment import cats.Applicative -import cats.data.EitherT +import cats.data.{EitherT, NonEmptyList} +import com.typesafe.scalalogging.LazyLogging import db.util.DBIOActionInstances._ import pl.touk.nussknacker.engine.api.component.NodesDeploymentData import pl.touk.nussknacker.engine.api.deployment._ -import pl.touk.nussknacker.engine.api.deployment.simple.SimpleDeploymentStatus import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessName, VersionId} import pl.touk.nussknacker.engine.api.{ProcessVersion => RuntimeVersionData} import pl.touk.nussknacker.engine.deployment.{DeploymentData, DeploymentId => LegacyDeploymentId, ExternalDeploymentId} @@ -32,7 +32,6 @@ import scala.util.control.NonFatal // it joins multiple responsibilities like activity log (currently called "actions") and deployments management. // Also, because of the fact that periodic mechanism is build as a plug-in (DeploymentManager), some deployment related // operations (run now operation) is modeled as a CustomAction. Eventually, we should: -// - Split activity log and deployments management // - Move periodic mechanism into to the designer's core // - Remove CustomAction // After we do this, we can remove legacy classes and fully switch to the new once. @@ -43,7 +42,8 @@ class DeploymentService( dmDispatcher: DeploymentManagerDispatcher, dbioRunner: DBIOActionRunner, clock: Clock -)(implicit ec: ExecutionContext) { +)(implicit ec: ExecutionContext) + extends LazyLogging { def getDeploymentStatus( id: DeploymentId @@ -57,28 +57,70 @@ class DeploymentService( ) } yield deploymentWithScenarioMetadata.deployment.statusWithModifiedAt).value - def runDeployment(command: RunDeploymentCommand): DB[Either[RunDeploymentError, DeploymentForeignKeys]] = + def runDeployment(command: RunDeploymentCommand): Future[Either[RunDeploymentError, DeploymentForeignKeys]] = (for { scenarioMetadata <- getScenarioMetadata(command) - _ <- checkPermission( + _ <- checkPermission[Future, RunDeploymentError]( user = command.user, category = scenarioMetadata.processCategory, permission = Permission.Deploy ) - _ <- saveDeployment(command, scenarioMetadata) scenarioGraphVersion <- EitherT( scenarioGraphVersionService.getValidResolvedLatestScenarioGraphVersion(scenarioMetadata, command.user) ).leftMap[RunDeploymentError](error => ScenarioGraphValidationError(error.errors)) _ <- validateUsingDeploymentManager(scenarioMetadata, scenarioGraphVersion, command.user) - _ <- runDeploymentUsingDeploymentManager(scenarioMetadata, scenarioGraphVersion, command) + // We keep deployments metrics (used by counts mechanism) keyed by scenario name. + // Because of that we can't run more than one deployment for scenario in a time. + // TODO: We should key metrics by deployment id and remove this limitation + // Saving of deployment is the final step before deployment request because we want to store only requested deployments + _ <- saveDeploymentEnsuringNoConcurrentDeploymentsForScenario(command, scenarioMetadata) + _ <- runDeploymentUsingDeploymentManagerAsync(scenarioMetadata, scenarioGraphVersion, command) } yield DeploymentForeignKeys(scenarioMetadata.id, scenarioGraphVersion.id)).value - private def getScenarioMetadata(command: RunDeploymentCommand): EitherT[DB, RunDeploymentError, ProcessEntityData] = + private def getScenarioMetadata( + command: RunDeploymentCommand + ): EitherT[Future, RunDeploymentError, ProcessEntityData] = EitherT.fromOptionF( - scenarioMetadataRepository.getScenarioMetadata(command.scenarioName), + dbioRunner.run(scenarioMetadataRepository.getScenarioMetadata(command.scenarioName)), ScenarioNotFoundError(command.scenarioName) ) + private def saveDeploymentEnsuringNoConcurrentDeploymentsForScenario( + command: RunDeploymentCommand, + scenarioMetadata: ProcessEntityData + ): EitherT[Future, RunDeploymentError, Unit] = { + EitherT(dbioRunner.runInSerializableTransactionWithRetry((for { + nonFinishedDeployments <- getConcurrentlyPerformedDeploymentsForScenario(scenarioMetadata) + _ <- checkNoConcurrentDeploymentsForScenario(nonFinishedDeployments, scenarioMetadata.name) + _ <- saveDeployment(command, scenarioMetadata) + } yield ()).value)) + } + + private def getConcurrentlyPerformedDeploymentsForScenario(scenarioMetadata: ProcessEntityData) = { + val nonPerformingDeploymentStatuses = + Set(DeploymentStatus.Canceled.name, DeploymentStatus.Finished.name, ProblemDeploymentStatus.name) + EitherT.right( + deploymentRepository.getScenarioDeploymentsInNotMatchingStatus( + scenarioMetadata.id, + nonPerformingDeploymentStatuses + ) + ) + } + + private def checkNoConcurrentDeploymentsForScenario( + nonFinishedDeployments: Seq[DeploymentEntityData], + scenarioName: ProcessName + ) = { + EitherT.fromEither( + NonEmptyList + .fromList(nonFinishedDeployments.toList) + .map(conflictingDeployments => + Left(ConcurrentDeploymentsForScenarioArePerformedError(scenarioName, conflictingDeployments.map(_.id))) + ) + .getOrElse(Right(())) + ) + } + private def saveDeployment( command: RunDeploymentCommand, scenarioMetadata: ProcessEntityData @@ -91,7 +133,7 @@ class DeploymentService( scenarioMetadata.id, now, command.user.id, - WithModifiedAt(SimpleDeploymentStatus.DuringDeploy, now) + WithModifiedAt(DeploymentStatus.DuringDeploy, now) ) ) ).leftMap(e => ConflictingDeploymentIdError(e.id)) @@ -101,7 +143,7 @@ class DeploymentService( scenarioMetadata: ProcessEntityData, scenarioGraphVersion: ProcessVersionEntityData, user: LoggedUser - ): EitherT[DB, RunDeploymentError, Unit] = { + ): EitherT[Future, RunDeploymentError, Unit] = { val runtimeVersionData = RuntimeVersionData( versionId = scenarioGraphVersion.id, processName = scenarioMetadata.name, @@ -117,35 +159,31 @@ class DeploymentService( NodesDeploymentData.empty ) for { - result <- EitherT[DB, RunDeploymentError, Unit]( - toEffectAll( - DB.from( - dmDispatcher - .deploymentManagerUnsafe(scenarioMetadata.processingType)(user) - .processCommand( - DMValidateScenarioCommand( - runtimeVersionData, - dumbDeploymentData, - scenarioGraphVersion.jsonUnsafe, - DeploymentUpdateStrategy.DontReplaceDeployment - ) - ) - .map(_ => Right(())) - // TODO: more explicit way to pass errors from DM - .recover { case NonFatal(ex) => - Left(DeployValidationError(ex.getMessage)) - } + result <- EitherT[Future, RunDeploymentError, Unit]( + dmDispatcher + .deploymentManagerUnsafe(scenarioMetadata.processingType)(user) + .processCommand( + DMValidateScenarioCommand( + runtimeVersionData, + dumbDeploymentData, + scenarioGraphVersion.jsonUnsafe, + DeploymentUpdateStrategy.DontReplaceDeployment + ) ) - ) + .map(_ => Right(())) + // TODO: more explicit way to pass errors from DM + .recover { case NonFatal(ex) => + Left(DeployValidationError(ex.getMessage)) + } ) } yield result } - private def runDeploymentUsingDeploymentManager( + private def runDeploymentUsingDeploymentManagerAsync( scenarioMetadata: ProcessEntityData, scenarioGraphVersion: ProcessVersionEntityData, command: RunDeploymentCommand - ): EitherT[DB, RunDeploymentError, Option[ExternalDeploymentId]] = { + ): EitherT[Future, RunDeploymentError, Unit] = { val runtimeVersionData = RuntimeVersionData( versionId = scenarioGraphVersion.id, processName = scenarioMetadata.name, @@ -159,22 +197,42 @@ class DeploymentService( additionalDeploymentData = Map.empty, command.nodesDeploymentData ) - EitherT.right( - toEffectAll( - DB.from( - dmDispatcher - .deploymentManagerUnsafe(scenarioMetadata.processingType)(command.user) - .processCommand( - DMRunDeploymentCommand( - runtimeVersionData, - deploymentData, - scenarioGraphVersion.jsonUnsafe, - DeploymentUpdateStrategy.DontReplaceDeployment - ) - ) + dmDispatcher + .deploymentManagerUnsafe(scenarioMetadata.processingType)(command.user) + .processCommand( + DMRunDeploymentCommand( + runtimeVersionData, + deploymentData, + scenarioGraphVersion.jsonUnsafe, + DeploymentUpdateStrategy.DontReplaceDeployment ) ) - ) + .map { externalDeploymentId => + logger.debug( + s"Deployment [${command.id}] successfully requested. External deployment id is: $externalDeploymentId" + ) + } + .failed + .foreach(handleFailureDuringDeploymentRequesting(command.id, _)) + EitherT.pure(()) + } + + private def handleFailureDuringDeploymentRequesting( + deploymentId: DeploymentId, + ex: Throwable + ): Unit = { + logger.warn(s"Deployment [$deploymentId] requesting finished with failure. Status will be marked as PROBLEM", ex) + dbioRunner + .run( + deploymentRepository.updateDeploymentStatus( + deploymentId, + DeploymentStatus.Problem.FailureDuringDeploymentRequesting + ) + ) + .failed + .foreach { ex => + logger.warn(s"Exception during marking deployment [$deploymentId] status as PROBLEM", ex) + } } private def toLegacyDeploymentId(id: DeploymentId) = { @@ -205,6 +263,11 @@ object DeploymentService { final case class ConflictingDeploymentIdError(id: DeploymentId) extends RunDeploymentError + final case class ConcurrentDeploymentsForScenarioArePerformedError( + scenarioName: ProcessName, + concurrentDeploymentsIds: NonEmptyList[DeploymentId] + ) extends RunDeploymentError + final case class ScenarioNotFoundError(scenarioName: ProcessName) extends RunDeploymentError final case class DeploymentNotFoundError(id: DeploymentId) extends GetDeploymentStatusError diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/synchronize/DeploymentsStatusesSynchronizationScheduler.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/synchronize/DeploymentsStatusesSynchronizationScheduler.scala index ae315e6ad65..a80b1bd965f 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/synchronize/DeploymentsStatusesSynchronizationScheduler.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/synchronize/DeploymentsStatusesSynchronizationScheduler.scala @@ -27,7 +27,7 @@ class DeploymentsStatusesSynchronizationScheduler( actorSystem.scheduler.scheduleAtFixedRate(0 seconds, config.delayBetweenSynchronizations) { () => Try(Await.result(synchronizer.synchronizeAll(), config.synchronizationTimeout)).failed.foreach { ex => logger.error( - s"Error while synchronizing deployments statuses. Synchronization will be retried in ${config.delayBetweenSynchronizations}", + s"Error during deployments statuses synchronization. Synchronization will be retried in ${config.delayBetweenSynchronizations}", ex ) } diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/synchronize/DeploymentsStatusesSynchronizer.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/synchronize/DeploymentsStatusesSynchronizer.scala index 9d132ecb689..6f883f8da6a 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/synchronize/DeploymentsStatusesSynchronizer.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/newdeployment/synchronize/DeploymentsStatusesSynchronizer.scala @@ -4,23 +4,27 @@ import cats.implicits.toTraverseOps import com.typesafe.scalalogging.LazyLogging import pl.touk.nussknacker.engine.api.deployment.{ DeploymentManager, + DeploymentStatus, DeploymentSynchronisationSupport, DeploymentSynchronisationSupported, NoDeploymentSynchronisationSupport } +import pl.touk.nussknacker.engine.newdeployment +import pl.touk.nussknacker.engine.util.logging.LazyLoggingWithTraces import pl.touk.nussknacker.ui.process.newdeployment.DeploymentRepository import pl.touk.nussknacker.ui.process.processingtype.ProcessingTypeDataProvider import pl.touk.nussknacker.ui.process.repository.DBIOActionRunner import pl.touk.nussknacker.ui.security.api.NussknackerInternalUser import scala.concurrent.{ExecutionContext, Future} +import scala.util.control.NonFatal class DeploymentsStatusesSynchronizer( repository: DeploymentRepository, synchronizationSupport: ProcessingTypeDataProvider[DeploymentSynchronisationSupport, _], dbioActionRunner: DBIOActionRunner )(implicit ec: ExecutionContext) - extends LazyLogging { + extends LazyLoggingWithTraces { def synchronizeAll(): Future[Unit] = { synchronizationSupport @@ -31,20 +35,29 @@ class DeploymentsStatusesSynchronizer( case synchronisationSupported: DeploymentSynchronisationSupported => logger.trace(s"Running synchronization of deployments statuses for processing type: $processingType") for { - statusesByDeploymentId <- synchronisationSupported.getDeploymentStatusesToUpdate + statusesByDeploymentId <- synchronisationSupported.getDeploymentStatusesToUpdate.recover { + case NonFatal(ex) => + logger.debugWithTraceStack( + s"Error during fetching of deployment statuses for processing type [$processingType]: ${ex.getMessage}. Synchronisation will be skipped", + ex + ) + Map.empty[newdeployment.DeploymentId, DeploymentStatus] + } updateResult <- dbioActionRunner.run(repository.updateDeploymentStatuses(statusesByDeploymentId)) _ = { Option(updateResult).filterNot(_.isEmpty) match { case None => - logger.trace( - s"Synchronization of deployments statuses for processing type: $processingType finished. No deployment status was changed" + // TODO: Change to trace + logger.info( + s"Synchronization for processing type [$processingType] finished. Fetched deployment statuses: $statusesByDeploymentId. No deployment status was changed" ) case Some(changes) => - logger.debug( + // TODO: Change to debug + logger.info( changes.mkString( - s"Synchronization of deployments statuses for processing type: $processingType finished. Deployments ", + s"Synchronization for processing type [$processingType] finished. Fetched deployment statuses: $statusesByDeploymentId. Statuses for deployments ", ", ", - " statuses were changed" + " were changed" ) ) } @@ -52,7 +65,7 @@ class DeploymentsStatusesSynchronizer( } yield () case NoDeploymentSynchronisationSupport => logger.trace( - s"Synchronization of deployments statuses for processing type: $processingType is not supported, skipping." + s"Synchronization for processing type [$processingType] is not supported. Skipping." ) Future.unit } diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DBFetchingProcessRepository.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DBFetchingProcessRepository.scala index 4b6dc3ec814..d21bcbacbc3 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DBFetchingProcessRepository.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DBFetchingProcessRepository.scala @@ -35,7 +35,8 @@ object DBFetchingProcessRepository { abstract class DBFetchingProcessRepository[F[_]: Monad]( protected val dbRef: DbRef, actionRepository: ProcessActionRepository -) extends FetchingProcessRepository[F] +)(protected implicit val ec: ExecutionContext) + extends FetchingProcessRepository[F] with LazyLogging { import api._ diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DBIOActionRunner.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DBIOActionRunner.scala index cb2e197ce52..5b36b4c227e 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DBIOActionRunner.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DBIOActionRunner.scala @@ -1,13 +1,17 @@ package pl.touk.nussknacker.ui.process.repository import db.util.DBIOActionInstances._ -import pl.touk.nussknacker.ui.db.DbRef -import slick.jdbc.JdbcProfile +import pl.touk.nussknacker.ui.db.{DbRef, SqlStates} +import pl.touk.nussknacker.ui.process.repository.DBIOActionRunner.TransactionsRunAttemptsExceedException +import slick.jdbc.{JdbcProfile, TransactionIsolation} -import scala.concurrent.Future +import java.sql.SQLException +import scala.concurrent.duration.{DurationInt, FiniteDuration} +import scala.concurrent.{ExecutionContext, Future} import scala.language.higherKinds +import scala.util.{Failure, Success, Try} -class DBIOActionRunner(dbRef: DbRef) { +class DBIOActionRunner(dbRef: DbRef)(implicit ec: ExecutionContext) { protected lazy val profile: JdbcProfile = dbRef.profile protected lazy val api: profile.API = profile.api @@ -16,6 +20,23 @@ class DBIOActionRunner(dbRef: DbRef) { def runInTransaction[T](action: DB[T]): Future[T] = run(action.transactionally) + def runInSerializableTransactionWithRetry[T]( + action: DB[T], + maxRetries: Int = 10, + initialDelay: FiniteDuration = 10.millis + ): Future[T] = { + val transactionAction = action.transactionally.withTransactionIsolation(TransactionIsolation.Serializable) + def doRun(): Future[Try[T]] = { + run(transactionAction).map(Success(_)).recover { + case ex: SQLException if ex.getSQLState == SqlStates.SerializationFailure => Failure(ex) + } + } + retry + .JitterBackoff(maxRetries, initialDelay) + .apply(doRun()) + .map(_.fold[T](ex => throw new TransactionsRunAttemptsExceedException(ex, maxRetries), identity)) + } + def run[T](action: DB[T]): Future[T] = dbRef.db.run(action) @@ -23,6 +44,9 @@ class DBIOActionRunner(dbRef: DbRef) { object DBIOActionRunner { - def apply(db: DbRef): DBIOActionRunner = new DBIOActionRunner(db) + def apply(db: DbRef)(implicit ec: ExecutionContext): DBIOActionRunner = new DBIOActionRunner(db) + + class TransactionsRunAttemptsExceedException(cause: Throwable, limit: Int) + extends Exception(s"Transactions exceeded $limit attempts limit", cause) } diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DbProcessActivityRepository.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DbProcessActivityRepository.scala index 7330d571aff..135a9d68591 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DbProcessActivityRepository.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/DbProcessActivityRepository.scala @@ -34,8 +34,9 @@ trait ProcessActivityRepository { } -final case class DbProcessActivityRepository(protected val dbRef: DbRef, commentRepository: CommentRepository) - extends ProcessActivityRepository +final case class DbProcessActivityRepository(protected val dbRef: DbRef, commentRepository: CommentRepository)( + protected implicit val ec: ExecutionContext +) extends ProcessActivityRepository with LazyLogging with BasicRepository with NuTables { diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/Repository.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/Repository.scala index df42917a8d9..291e86c36b6 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/Repository.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/repository/Repository.scala @@ -4,7 +4,7 @@ import db.util.DBIOActionInstances.DB import pl.touk.nussknacker.ui.db.DbRef import slick.jdbc.JdbcProfile -import scala.concurrent.Future +import scala.concurrent.{ExecutionContext, Future} import scala.language.higherKinds trait Repository[F[_]] { @@ -30,6 +30,8 @@ trait BasicRepository extends Repository[Future] { import api._ + protected implicit def ec: ExecutionContext + private val dbioRunner = DBIOActionRunner(dbRef) override def run[R]: (DB[R]) => Future[R] = a => dbioRunner.run(a.transactionally) diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/version/ScenarioGraphVersionService.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/version/ScenarioGraphVersionService.scala index 0317e1788e1..c8c71d4102f 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/version/ScenarioGraphVersionService.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/process/version/ScenarioGraphVersionService.scala @@ -5,29 +5,31 @@ import pl.touk.nussknacker.restmodel.validation.ValidationResults.ValidationErro import pl.touk.nussknacker.ui.db.entity.{ProcessEntityData, ProcessVersionEntityData} import pl.touk.nussknacker.ui.process.deployment.ScenarioResolver import pl.touk.nussknacker.ui.process.processingtype.ProcessingTypeDataProvider +import pl.touk.nussknacker.ui.process.repository.DBIOActionRunner import pl.touk.nussknacker.ui.process.version.ScenarioGraphVersionService.ScenarioGraphValidationError import pl.touk.nussknacker.ui.security.api.LoggedUser import pl.touk.nussknacker.ui.validation.UIProcessValidator -import slick.dbio.DBIO -import db.util.DBIOActionInstances._ import scala.concurrent.{ExecutionContext, Future} class ScenarioGraphVersionService( scenarioGraphVersionRepository: ScenarioGraphVersionRepository, scenarioValidator: ProcessingTypeDataProvider[UIProcessValidator, _], - scenarioResolver: ProcessingTypeDataProvider[ScenarioResolver, _] + scenarioResolver: ProcessingTypeDataProvider[ScenarioResolver, _], + dbioRunner: DBIOActionRunner )(implicit ec: ExecutionContext) { def getValidResolvedLatestScenarioGraphVersion( scenarioMetadata: ProcessEntityData, user: LoggedUser - ): DB[Either[ScenarioGraphValidationError, ProcessVersionEntityData]] = { + ): Future[Either[ScenarioGraphValidationError, ProcessVersionEntityData]] = { (for { scenarioGraphVersion <- EitherT.right[ScenarioGraphValidationError]( - scenarioGraphVersionRepository.getLatestScenarioGraphVersion(scenarioMetadata.id) + dbioRunner.run( + scenarioGraphVersionRepository.getLatestScenarioGraphVersion(scenarioMetadata.id) + ) ) - _ <- EitherT.fromEither[DB] { + _ <- EitherT.fromEither[Future] { val validationResult = scenarioValidator .forProcessingTypeUnsafe(scenarioMetadata.processingType)(user) .validateCanonicalProcess(scenarioGraphVersion.jsonUnsafe, scenarioMetadata.isFragment)(user) @@ -36,14 +38,10 @@ class ScenarioGraphVersionService( } // TODO: scenario was already resolved during validation - use it here resolvedCanonicalProcess <- EitherT.right[ScenarioGraphValidationError]( - toEffectAll( - DBIO.from( - Future.fromTry( - scenarioResolver - .forProcessingTypeUnsafe(scenarioMetadata.processingType)(user) - .resolveScenario(scenarioGraphVersion.jsonUnsafe)(user) - ) - ) + Future.fromTry( + scenarioResolver + .forProcessingTypeUnsafe(scenarioMetadata.processingType)(user) + .resolveScenario(scenarioGraphVersion.jsonUnsafe)(user) ) ) entityWithUpdateScenarioGraph = scenarioGraphVersion.copy(json = Some(resolvedCanonicalProcess)) diff --git a/designer/server/src/main/scala/pl/touk/nussknacker/ui/server/AkkaHttpBasedRouteProvider.scala b/designer/server/src/main/scala/pl/touk/nussknacker/ui/server/AkkaHttpBasedRouteProvider.scala index 692a6b84749..4439cc00366 100644 --- a/designer/server/src/main/scala/pl/touk/nussknacker/ui/server/AkkaHttpBasedRouteProvider.scala +++ b/designer/server/src/main/scala/pl/touk/nussknacker/ui/server/AkkaHttpBasedRouteProvider.scala @@ -377,7 +377,12 @@ class AkkaHttpBasedRouteProvider( val scenarioMetadataRepository = new ScenarioMetadataRepository(dbRef) val scenarioGraphVersionRepository = new ScenarioGraphVersionRepository(dbRef) val scenarioGraphVersionService = - new ScenarioGraphVersionService(scenarioGraphVersionRepository, processValidator, scenarioResolver) + new ScenarioGraphVersionService( + scenarioGraphVersionRepository, + processValidator, + scenarioResolver, + dbioRunner + ) val deploymentService = new DeploymentService( scenarioMetadataRepository, diff --git a/designer/server/src/test/resources/config/access-control-checking/multiple-category-designer.conf b/designer/server/src/test/resources/config/access-control-checking/multiple-category-designer.conf index 85b897ec9e3..bbaa5a8ff8a 100644 --- a/designer/server/src/test/resources/config/access-control-checking/multiple-category-designer.conf +++ b/designer/server/src/test/resources/config/access-control-checking/multiple-category-designer.conf @@ -36,9 +36,7 @@ baseModelConfig { scenarioTypes { streaming1 { deploymentConfig { - restUrl: "http://localhost:8081" - jobManagerTimeout: 1m - type: "flinkStreaming" + type: "mockable" } modelConfig: ${baseModelConfig} { # All below I guess is needed for configuration parsing tests - they are runned on one scenario type @@ -114,9 +112,7 @@ scenarioTypes { } streaming2 { deploymentConfig { - restUrl: "http://localhost:8081" - jobManagerTimeout: 1m - type: "flinkStreaming" + type: "mockable" } modelConfig: ${baseModelConfig} category: "Category2" diff --git a/designer/server/src/test/resources/config/business-cases/category-used-more-than-once-designer.conf b/designer/server/src/test/resources/config/business-cases/category-used-more-than-once-designer.conf index dccd5cf8d91..3742472e0d9 100644 --- a/designer/server/src/test/resources/config/business-cases/category-used-more-than-once-designer.conf +++ b/designer/server/src/test/resources/config/business-cases/category-used-more-than-once-designer.conf @@ -15,9 +15,8 @@ baseModelConfig { scenarioTypes { streaming1 { deploymentConfig { - restUrl: "http://localhost:8081" - jobManagerTimeout: 1m - type: "flinkStreaming" + type: "mockable" + id: "1" engineSetupName: "Flink 1" } modelConfig: ${baseModelConfig} @@ -25,9 +24,8 @@ scenarioTypes { } streaming2 { deploymentConfig { - restUrl: "http://localhost:8082" - jobManagerTimeout: 1m - type: "flinkStreaming" + type: "mockable" + id: "2" engineSetupName: "Flink 2" } modelConfig: ${baseModelConfig} diff --git a/designer/server/src/test/resources/config/business-cases/simple-streaming-use-case-designer.conf b/designer/server/src/test/resources/config/business-cases/simple-streaming-use-case-designer.conf index 087a1d85774..aaa24c0f8a8 100644 --- a/designer/server/src/test/resources/config/business-cases/simple-streaming-use-case-designer.conf +++ b/designer/server/src/test/resources/config/business-cases/simple-streaming-use-case-designer.conf @@ -35,9 +35,7 @@ baseModelConfig { scenarioTypes { streaming { deploymentConfig { - restUrl: "http://localhost:8081" - jobManagerTimeout: 1m - type: "flinkStreaming" + type: "mockable" } modelConfig: ${baseModelConfig} { # All below I guess is needed for configuration parsing tests - they are runned on one scenario type diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockFetchingProcessRepository.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockFetchingProcessRepository.scala index e5fefd2c1ef..b027c7317cf 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockFetchingProcessRepository.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/mock/MockFetchingProcessRepository.scala @@ -39,7 +39,7 @@ object MockFetchingProcessRepository { class MockFetchingProcessRepository private ( override protected val dbRef: DbRef, processes: List[ScenarioWithDetailsEntity[CanonicalProcess]] -)(implicit ec: ExecutionContext) +)(protected implicit val ec: ExecutionContext) extends FetchingProcessRepository[Future] with BasicRepository { diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ProcessTestData.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ProcessTestData.scala index 0e28e89bdc6..bfa0d3d296a 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ProcessTestData.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/ProcessTestData.scala @@ -389,11 +389,13 @@ object ProcessTestData { ProcessAdditionalFields( description = None, properties = Map( - "maxEvents" -> "", - "parallelism" -> "1", - "numberOfThreads" -> "1", - "spillStateToDisk" -> "true", - "environment" -> "test" + "maxEvents" -> "", + "parallelism" -> "1", + "numberOfThreads" -> "1", + "spillStateToDisk" -> "true", + "environment" -> "test", + "checkpointIntervalInSeconds" -> "", + "useAsyncInterpretation" -> "", ), metaDataType = "StreamMetaData" ) diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/TestFactory.scala b/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/TestFactory.scala index 49e2c261f51..77dff6398a5 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/TestFactory.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/test/utils/domain/TestFactory.scala @@ -28,6 +28,7 @@ import pl.touk.nussknacker.ui.definition.ScenarioPropertiesConfigFinalizer import pl.touk.nussknacker.ui.process.NewProcessPreparer import pl.touk.nussknacker.ui.process.deployment.ScenarioResolver import pl.touk.nussknacker.ui.process.fragment.{DefaultFragmentRepository, FragmentResolver} +import pl.touk.nussknacker.ui.process.newdeployment.DeploymentRepository import pl.touk.nussknacker.ui.process.processingtype.{ ProcessingTypeDataProvider, ScenarioParametersService, @@ -35,12 +36,13 @@ import pl.touk.nussknacker.ui.process.processingtype.{ ValueWithRestriction } import pl.touk.nussknacker.ui.process.repository._ -import pl.touk.nussknacker.ui.process.version.ScenarioGraphVersionRepository +import pl.touk.nussknacker.ui.process.version.{ScenarioGraphVersionRepository, ScenarioGraphVersionService} import pl.touk.nussknacker.ui.security.api.{LoggedUser, RealLoggedUser} import pl.touk.nussknacker.ui.uiresolving.UIProcessResolver import pl.touk.nussknacker.ui.validation.UIProcessValidator import sttp.client3.testing.SttpBackendStub +import java.time.Clock import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future import scala.jdk.CollectionConverters._ @@ -159,6 +161,13 @@ object TestFactory { def newDummyWriteProcessRepository(): DBProcessRepository = newWriteProcessRepository(dummyDbRef) + def newScenarioGraphVersionService(dbRef: DbRef) = new ScenarioGraphVersionService( + newScenarioGraphVersionRepository(dbRef), + mapProcessingTypeDataProvider(Streaming.stringify -> processValidator), + scenarioResolverByProcessingType, + newDBIOActionRunner(dbRef) + ) + def newScenarioGraphVersionRepository(dbRef: DbRef) = new ScenarioGraphVersionRepository(dbRef) def newFragmentRepository(dbRef: DbRef): DefaultFragmentRepository = @@ -176,6 +185,10 @@ object TestFactory { def newProcessActivityRepository(dbRef: DbRef) = new DbProcessActivityRepository(dbRef, newCommentRepository(dbRef)) + def newScenarioMetadataRepository(dbRef: DbRef) = new ScenarioMetadataRepository(dbRef) + + def newDeploymentRepository(dbRef: DbRef, clock: Clock) = new DeploymentRepository(dbRef, clock) + def asAdmin(route: RouteWithUser): Route = route.securedRouteWithErrorHandling(adminUser()) diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/AppApiHttpServiceBusinessSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/AppApiHttpServiceBusinessSpec.scala index 384f62677df..c949f482199 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/AppApiHttpServiceBusinessSpec.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/AppApiHttpServiceBusinessSpec.scala @@ -35,7 +35,7 @@ class AppApiHttpServiceBusinessSpec .applicationState { createDeployedExampleScenario(ProcessName("id1")) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map("id1" -> SimpleStateStatus.Running) ) } @@ -62,7 +62,7 @@ class AppApiHttpServiceBusinessSpec createDeployedExampleScenario(ProcessName("id2")) createDeployedExampleScenario(ProcessName("id3")) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map( "id1" -> ProblemStateStatus.FailedToGet, "id2" -> SimpleStateStatus.Running, @@ -89,7 +89,7 @@ class AppApiHttpServiceBusinessSpec createDeployedCanceledExampleScenario(ProcessName("id1")) createDeployedExampleScenario(ProcessName("id2")) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map("id2" -> ProblemStateStatus.shouldBeRunning(VersionId(1L), "admin")) ) } @@ -112,7 +112,7 @@ class AppApiHttpServiceBusinessSpec createDeployedExampleScenario(ProcessName("id1")) createDeployedExampleScenario(ProcessName("id2")) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map( "id1" -> SimpleStateStatus.Running, "id2" -> SimpleStateStatus.Running, @@ -137,7 +137,7 @@ class AppApiHttpServiceBusinessSpec .applicationState { createDeployedExampleScenario(ProcessName("id1")) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map("id1" -> SimpleStateStatus.Running) ) } @@ -162,7 +162,7 @@ class AppApiHttpServiceBusinessSpec .applicationState { createDeployedExampleScenario(ProcessName("id1")) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map("id1" -> SimpleStateStatus.Running) ) } @@ -251,7 +251,7 @@ class AppApiHttpServiceBusinessSpec createDeployedExampleScenario(ProcessName("id2")) createDeployedExampleScenario(ProcessName("id3")) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map( "id1" -> ProblemStateStatus.FailedToGet, "id2" -> SimpleStateStatus.Running, diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/AppApiHttpServiceSecuritySpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/AppApiHttpServiceSecuritySpec.scala index 6462e7a3eac..d59d0388878 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/AppApiHttpServiceSecuritySpec.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/AppApiHttpServiceSecuritySpec.scala @@ -37,7 +37,7 @@ class AppApiHttpServiceSecuritySpec createDeployedExampleScenario(ProcessName("id1"), category = Category1) createDeployedExampleScenario(ProcessName("id2"), category = Category2) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map( "id1" -> SimpleStateStatus.Running, "id2" -> SimpleStateStatus.Running @@ -68,7 +68,7 @@ class AppApiHttpServiceSecuritySpec createDeployedExampleScenario(ProcessName("id2"), category = Category1) createDeployedExampleScenario(ProcessName("id3"), category = Category2) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map( "id1" -> ProblemStateStatus.FailedToGet, "id2" -> SimpleStateStatus.Running, @@ -98,7 +98,7 @@ class AppApiHttpServiceSecuritySpec createDeployedExampleScenario(ProcessName("id2"), category = Category1) createDeployedExampleScenario(ProcessName("id3"), category = Category2) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map( "id1" -> ProblemStateStatus.FailedToGet, "id2" -> SimpleStateStatus.Running, @@ -122,7 +122,7 @@ class AppApiHttpServiceSecuritySpec createDeployedExampleScenario(ProcessName("id2"), category = Category2) createDeployedExampleScenario(ProcessName("id3"), category = Category2) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map( "id1" -> SimpleStateStatus.ProblemStateStatus.Failed, "id2" -> SimpleStateStatus.ProblemStateStatus.Failed, @@ -154,7 +154,7 @@ class AppApiHttpServiceSecuritySpec createDeployedExampleScenario(ProcessName("id1"), category = Category1) createDeployedExampleScenario(ProcessName("id2"), category = Category2) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map( "id1" -> SimpleStateStatus.NotDeployed, "id2" -> SimpleStateStatus.NotDeployed @@ -183,7 +183,7 @@ class AppApiHttpServiceSecuritySpec createDeployedExampleScenario(ProcessName("id2"), category = Category1) createDeployedExampleScenario(ProcessName("id3"), category = Category2) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map( "id1" -> ProblemStateStatus.FailedToGet, "id2" -> SimpleStateStatus.Running, @@ -206,7 +206,7 @@ class AppApiHttpServiceSecuritySpec createDeployedExampleScenario(ProcessName("id1"), category = Category1) createDeployedExampleScenario(ProcessName("id2"), category = Category2) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map( "id1" -> SimpleStateStatus.NotDeployed, "id2" -> SimpleStateStatus.NotDeployed @@ -317,7 +317,7 @@ class AppApiHttpServiceSecuritySpec createDeployedExampleScenario(ProcessName("id2"), category = Category1) createDeployedExampleScenario(ProcessName("id3"), category = Category2) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map( "id1" -> ProblemStateStatus.FailedToGet, "id2" -> SimpleStateStatus.Running, @@ -345,7 +345,7 @@ class AppApiHttpServiceSecuritySpec createDeployedExampleScenario(ProcessName("id2"), category = Category1) createDeployedExampleScenario(ProcessName("id3"), category = Category2) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map( "id1" -> ProblemStateStatus.FailedToGet, "id2" -> SimpleStateStatus.Running, @@ -369,7 +369,7 @@ class AppApiHttpServiceSecuritySpec createDeployedExampleScenario(ProcessName("id2"), category = Category1) createDeployedExampleScenario(ProcessName("id3"), category = Category2) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map( "id1" -> ProblemStateStatus.FailedToGet, "id2" -> SimpleStateStatus.Running, @@ -400,7 +400,7 @@ class AppApiHttpServiceSecuritySpec createDeployedExampleScenario(ProcessName("id2"), category = Category1) createDeployedExampleScenario(ProcessName("id3"), category = Category2) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map( "id1" -> ProblemStateStatus.FailedToGet, "id2" -> SimpleStateStatus.Running, @@ -421,7 +421,7 @@ class AppApiHttpServiceSecuritySpec createDeployedExampleScenario(ProcessName("id2"), category = Category1) createDeployedExampleScenario(ProcessName("id3"), category = Category2) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map( "id1" -> ProblemStateStatus.FailedToGet, "id2" -> SimpleStateStatus.Running, @@ -445,7 +445,7 @@ class AppApiHttpServiceSecuritySpec createDeployedExampleScenario(ProcessName("id2"), category = Category1) createDeployedExampleScenario(ProcessName("id3"), category = Category2) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map( "id1" -> ProblemStateStatus.FailedToGet, "id2" -> SimpleStateStatus.Running, @@ -469,7 +469,7 @@ class AppApiHttpServiceSecuritySpec createDeployedExampleScenario(ProcessName("id2"), category = Category1) createDeployedExampleScenario(ProcessName("id3"), category = Category2) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map( "id1" -> ProblemStateStatus.FailedToGet, "id2" -> SimpleStateStatus.Running, diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpServiceBusinessSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpServiceBusinessSpec.scala index 4e39ac55d02..622bc34f517 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpServiceBusinessSpec.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpServiceBusinessSpec.scala @@ -1,5 +1,6 @@ package pl.touk.nussknacker.ui.api +import cats.implicits.toTraverseOps import com.typesafe.scalalogging.LazyLogging import io.restassured.RestAssured.`given` import io.restassured.module.scala.RestAssuredSupport.AddThenToResponse @@ -7,7 +8,7 @@ import org.apache.commons.io.FileUtils import org.scalatest.LoneElement import org.scalatest.freespec.AnyFreeSpecLike import org.scalatest.matchers.should.Matchers -import pl.touk.nussknacker.engine.api.deployment.simple.SimpleDeploymentStatus +import pl.touk.nussknacker.engine.api.deployment.DeploymentStatus import pl.touk.nussknacker.engine.newdeployment.DeploymentId import pl.touk.nussknacker.test.base.it.{NuItTest, WithBatchConfigScenarioHelper} import pl.touk.nussknacker.test.config.{WithBatchDesignerConfig, WithBusinessCaseRestAssuredUsersExtensions} @@ -19,6 +20,8 @@ import pl.touk.nussknacker.test.{ import java.nio.charset.StandardCharsets import java.nio.file.Path +import scala.concurrent.ExecutionContext.Implicits.global +import scala.concurrent.Future import scala.jdk.CollectionConverters._ class DeploymentApiHttpServiceBusinessSpec @@ -81,7 +84,7 @@ class DeploymentApiHttpServiceBusinessSpec .Then() .statusCode(202) .verifyApplicationState { - waitForDeploymentStatusNameMatches(requestedDeploymentId, SimpleDeploymentStatus.Finished.name) + waitForDeploymentStatusNameMatches(requestedDeploymentId, DeploymentStatus.Finished.name) } .verifyExternalState { val resultFile = getLoneFileFromLoneOutputTransactionsSummaryPartitionWithGivenName("date=2024-01-01") @@ -111,7 +114,31 @@ class DeploymentApiHttpServiceBusinessSpec } } - "when invoked twice with different deployment id should" - { + "when invoked twice with different deployment id, run concurrently" - { + "return conflict status code" in { + `given`() + .applicationState { + createSavedScenario(scenario) + } + + def requestDeployment(id: DeploymentId): Future[Int] = + Future { + `given`() + .when() + .basicAuthAdmin() + .jsonBody(correctDeploymentRequest) + .put(s"$nuDesignerHttpAddress/api/deployments/$id") + .statusCode() + } + + List( + requestDeployment(DeploymentId.generate), + requestDeployment(DeploymentId.generate) + ).sequence.futureValue.toSet shouldBe Set(202, 409) + } + } + + "when invoked twice with different deployment id, run one by one should" - { "return status of correct deployment" in { val firstDeploymentId = DeploymentId.generate val secondDeploymentId = DeploymentId.generate @@ -119,7 +146,7 @@ class DeploymentApiHttpServiceBusinessSpec .applicationState { createSavedScenario(scenario) runDeployment(firstDeploymentId) - waitForDeploymentStatusNameMatches(firstDeploymentId, SimpleDeploymentStatus.Finished.name) + waitForDeploymentStatusNameMatches(firstDeploymentId, DeploymentStatus.Finished.name) } .when() .basicAuthAdmin() @@ -130,11 +157,11 @@ class DeploymentApiHttpServiceBusinessSpec .verifyApplicationState { checkDeploymentStatusNameMatches( secondDeploymentId, - SimpleDeploymentStatus.DuringDeploy.name, - SimpleDeploymentStatus.Running.name, - SimpleDeploymentStatus.Finished.name + DeploymentStatus.DuringDeploy.name, + DeploymentStatus.Running.name, + DeploymentStatus.Finished.name ) - checkDeploymentStatusNameMatches(firstDeploymentId, SimpleDeploymentStatus.Finished.name) + checkDeploymentStatusNameMatches(firstDeploymentId, DeploymentStatus.Finished.name) } } } diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpServiceDeploymentCommentSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpServiceDeploymentCommentSpec.scala index 80660b5fad1..aedab7354bb 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpServiceDeploymentCommentSpec.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/DeploymentApiHttpServiceDeploymentCommentSpec.scala @@ -8,7 +8,7 @@ import org.apache.commons.io.FileUtils import org.scalatest.LoneElement import org.scalatest.freespec.AnyFreeSpecLike import org.scalatest.matchers.should.Matchers -import pl.touk.nussknacker.engine.api.deployment.simple.SimpleDeploymentStatus +import pl.touk.nussknacker.engine.api.deployment.DeploymentStatus import pl.touk.nussknacker.engine.newdeployment.DeploymentId import pl.touk.nussknacker.test.base.it.{NuItTest, WithBatchConfigScenarioHelper} import pl.touk.nussknacker.test.config.{WithBatchDesignerConfig, WithBusinessCaseRestAssuredUsersExtensions} @@ -129,7 +129,7 @@ class DeploymentApiHttpServiceDeploymentCommentSpec .Then() .statusCode(202) .verifyApplicationState { - waitForDeploymentStatusNameMatches(requestedDeploymentId, SimpleDeploymentStatus.Finished.name) + waitForDeploymentStatusNameMatches(requestedDeploymentId, DeploymentStatus.Finished.name) } .verifyExternalState { val resultFile = getLoneFileFromLoneOutputTransactionsSummaryPartitionWithGivenName("date=2024-01-01") diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NodesApiHttpServiceBusinessSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NodesApiHttpServiceBusinessSpec.scala index 1950a53b795..d76084423ca 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NodesApiHttpServiceBusinessSpec.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NodesApiHttpServiceBusinessSpec.scala @@ -592,38 +592,6 @@ class NodesApiHttpServiceBusinessSpec | "fieldName": "numberOfThreads", | "errorType": "SaveAllowed", | "details": null - | }, - | { - | "typ": "UnknownProperty", - | "message": "Unknown property parallelism", - | "description": "Property parallelism is not known", - | "fieldName": "parallelism", - | "errorType": "SaveAllowed", - | "details": null - | }, - | { - | "typ": "UnknownProperty", - | "message": "Unknown property checkpointIntervalInSeconds", - | "description": "Property checkpointIntervalInSeconds is not known", - | "fieldName": "checkpointIntervalInSeconds", - | "errorType": "SaveAllowed", - | "details": null - | }, - | { - | "typ": "UnknownProperty", - | "message": "Unknown property spillStateToDisk", - | "description": "Property spillStateToDisk is not known", - | "fieldName": "spillStateToDisk", - | "errorType": "SaveAllowed", - | "details": null - | }, - | { - | "typ": "UnknownProperty", - | "message": "Unknown property useAsyncInterpretation", - | "description": "Property useAsyncInterpretation is not known", - | "fieldName": "useAsyncInterpretation", - | "errorType": "SaveAllowed", - | "details": null | } | ], | "validationPerformed": true @@ -646,7 +614,7 @@ class NodesApiHttpServiceBusinessSpec | "numberOfThreads": "1", | "spillStateToDisk": "true", | "environment": "test", - | "useAsyncInterpretation": "1" + | "useAsyncInterpretation": "true" | }, | "metaDataType": "StreamMetaData" | }, @@ -669,38 +637,6 @@ class NodesApiHttpServiceBusinessSpec | "details": null | }, | { - | "typ": "UnknownProperty", - | "message": "Unknown property parallelism", - | "description": "Property parallelism is not known", - | "fieldName": "parallelism", - | "errorType": "SaveAllowed", - | "details": null - | }, - | { - | "typ": "UnknownProperty", - | "message": "Unknown property checkpointIntervalInSeconds", - | "description": "Property checkpointIntervalInSeconds is not known", - | "fieldName": "checkpointIntervalInSeconds", - | "errorType": "SaveAllowed", - | "details": null - | }, - | { - | "typ": "UnknownProperty", - | "message": "Unknown property spillStateToDisk", - | "description": "Property spillStateToDisk is not known", - | "fieldName": "spillStateToDisk", - | "errorType": "SaveAllowed", - | "details": null - | }, - | { - | "typ": "UnknownProperty", - | "message": "Unknown property useAsyncInterpretation", - | "description": "Property useAsyncInterpretation is not known", - | "fieldName": "useAsyncInterpretation", - | "errorType": "SaveAllowed", - | "details": null - | }, - | { | "typ": "ScenarioNameValidationError", | "message": "Invalid scenario name . Only digits, letters, underscore (_), hyphen (-) and space in the middle are allowed", | "description": "Provided scenario name is invalid for this category. Please enter valid name using only specified characters.", diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NodesApiHttpServiceSecuritySpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NodesApiHttpServiceSecuritySpec.scala index 961e2ece404..3c9f22a722c 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NodesApiHttpServiceSecuritySpec.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/NodesApiHttpServiceSecuritySpec.scala @@ -551,38 +551,6 @@ class NodesApiHttpServiceSecuritySpec | "fieldName": "numberOfThreads", | "errorType": "SaveAllowed", | "details": null - | }, - | { - | "typ": "UnknownProperty", - | "message": "Unknown property parallelism", - | "description": "Property parallelism is not known", - | "fieldName": "parallelism", - | "errorType": "SaveAllowed", - | "details": null - | }, - | { - | "typ": "UnknownProperty", - | "message": "Unknown property checkpointIntervalInSeconds", - | "description": "Property checkpointIntervalInSeconds is not known", - | "fieldName": "checkpointIntervalInSeconds", - | "errorType": "SaveAllowed", - | "details": null - | }, - | { - | "typ": "UnknownProperty", - | "message": "Unknown property spillStateToDisk", - | "description": "Property spillStateToDisk is not known", - | "fieldName": "spillStateToDisk", - | "errorType": "SaveAllowed", - | "details": null - | }, - | { - | "typ": "UnknownProperty", - | "message": "Unknown property useAsyncInterpretation", - | "description": "Property useAsyncInterpretation is not known", - | "fieldName": "useAsyncInterpretation", - | "errorType": "SaveAllowed", - | "details": null | } | ], | "validationPerformed": true @@ -677,38 +645,6 @@ class NodesApiHttpServiceSecuritySpec | "fieldName": "numberOfThreads", | "errorType": "SaveAllowed", | "details": null - | }, - | { - | "typ": "UnknownProperty", - | "message": "Unknown property parallelism", - | "description": "Property parallelism is not known", - | "fieldName": "parallelism", - | "errorType": "SaveAllowed", - | "details": null - | }, - | { - | "typ": "UnknownProperty", - | "message": "Unknown property checkpointIntervalInSeconds", - | "description": "Property checkpointIntervalInSeconds is not known", - | "fieldName": "checkpointIntervalInSeconds", - | "errorType": "SaveAllowed", - | "details": null - | }, - | { - | "typ": "UnknownProperty", - | "message": "Unknown property spillStateToDisk", - | "description": "Property spillStateToDisk is not known", - | "fieldName": "spillStateToDisk", - | "errorType": "SaveAllowed", - | "details": null - | }, - | { - | "typ": "UnknownProperty", - | "message": "Unknown property useAsyncInterpretation", - | "description": "Property useAsyncInterpretation is not known", - | "fieldName": "useAsyncInterpretation", - | "errorType": "SaveAllowed", - | "details": null | } | ], | "validationPerformed": true diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ProcessesResourcesSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ProcessesResourcesSpec.scala index ee2da78b5fb..54000bd4925 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ProcessesResourcesSpec.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ProcessesResourcesSpec.scala @@ -114,7 +114,7 @@ class ProcessesResourcesSpec test("return single process") { createDeployedExampleScenario(processName, category = Category1) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map(processName.value -> SimpleStateStatus.Running) ) @@ -181,7 +181,7 @@ class ProcessesResourcesSpec test("not allow to archive still running process") { createDeployedExampleScenario(processName, category = Category1) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map(processName.value -> SimpleStateStatus.Running) ) @@ -242,7 +242,7 @@ class ProcessesResourcesSpec test("should not allow to rename deployed process") { createDeployedExampleScenario(processName, category = Category1) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map(processName.value -> SimpleStateStatus.Running) ) @@ -268,7 +268,7 @@ class ProcessesResourcesSpec */ ignore("should not allow to rename process with running state") { createEmptyScenario(processName, category = Category1) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map(processName.value -> SimpleStateStatus.Running) ) @@ -500,7 +500,7 @@ class ProcessesResourcesSpec createDeployedCanceledExampleScenario(secondProcessor, category = Category1) createDeployedExampleScenario(thirdProcessor, category = Category1) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map( secondProcessor.value -> SimpleStateStatus.Canceled, thirdProcessor.value -> SimpleStateStatus.Running @@ -573,6 +573,7 @@ class ProcessesResourcesSpec createProcessRequest(processName, category = Category1, isFragment = false) { code => code shouldBe StatusCodes.Created + forScenarioReturned(processName)(_ => ()) doUpdateProcess(command, processName) { forScenarioReturned(processName) { process => process.history.map(_.size) shouldBe Some(1) @@ -1012,7 +1013,7 @@ class ProcessesResourcesSpec test("should return status for single deployed process") { createDeployedExampleScenario(processName, category = Category1) - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map(processName.value -> SimpleStateStatus.Running) ) @@ -1092,7 +1093,7 @@ class ProcessesResourcesSpec } private def verifyProcessWithStateOnList(expectedName: ProcessName, expectedStatus: Option[StateStatus]): Unit = { - MockableDeploymentManager.configure( + MockableDeploymentManager.configureScenarioStatuses( Map(processName.value -> SimpleStateStatus.Running) ) diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ScenarioParametersApiHttpServiceBusinessSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ScenarioParametersApiHttpServiceBusinessSpec.scala index 9f4f42da545..17f3efe6747 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ScenarioParametersApiHttpServiceBusinessSpec.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ScenarioParametersApiHttpServiceBusinessSpec.scala @@ -30,7 +30,7 @@ class ScenarioParametersApiHttpServiceBusinessSpec | { | "processingMode": "Unbounded-Stream", | "category": "Category1", - | "engineSetupName": "Flink" + | "engineSetupName": "Mockable" | } | ], | "engineSetupErrors": {} diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ScenarioParametersApiHttpServiceSecuritySpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ScenarioParametersApiHttpServiceSecuritySpec.scala index 7a26f27e76b..7d4eeead531 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ScenarioParametersApiHttpServiceSecuritySpec.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/ScenarioParametersApiHttpServiceSecuritySpec.scala @@ -34,12 +34,12 @@ class ScenarioParametersApiHttpServiceSecuritySpec | { | "processingMode": "Unbounded-Stream", | "category": "Category1", - | "engineSetupName": "Flink" + | "engineSetupName": "Mockable" | }, | { | "processingMode": "Unbounded-Stream", | "category": "Category2", - | "engineSetupName": "Flink" + | "engineSetupName": "Mockable" | } | ], | "engineSetupErrors": {} @@ -59,7 +59,7 @@ class ScenarioParametersApiHttpServiceSecuritySpec | { | "processingMode": "Unbounded-Stream", | "category": "Category1", - | "engineSetupName": "Flink" + | "engineSetupName": "Mockable" | } | ], | "engineSetupErrors": {} @@ -122,7 +122,7 @@ class ScenarioParametersApiHttpServiceSecuritySpec | { | "processingMode": "Unbounded-Stream", | "category": "Category1", - | "engineSetupName": "Flink" + | "engineSetupName": "Mockable" | } | ], | "engineSetupErrors": {} diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/StatisticsApiHttpServiceBusinessSpec.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/StatisticsApiHttpServiceBusinessSpec.scala index f927a527da6..6f6759f3c18 100644 --- a/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/StatisticsApiHttpServiceBusinessSpec.scala +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/api/StatisticsApiHttpServiceBusinessSpec.scala @@ -90,9 +90,9 @@ class StatisticsApiHttpServiceBusinessSpec (NodesMin.name, equalTo("2")), (NodesAverage.name, equalTo("2")), (ActiveScenarioCount.name, equalTo("0")), - (UnknownDMCount.name, equalTo("0")), + (UnknownDMCount.name, equalTo("1")), (LiteEmbeddedDMCount.name, equalTo("0")), - (FlinkDMCount.name, equalTo("1")), + (FlinkDMCount.name, equalTo("0")), (LiteK8sDMCount.name, equalTo("0")), (FragmentCount.name, equalTo("0")), (BoundedStreamCount.name, equalTo("0")), diff --git a/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentServiceTest.scala b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentServiceTest.scala new file mode 100644 index 00000000000..060db18490c --- /dev/null +++ b/designer/server/src/test/scala/pl/touk/nussknacker/ui/process/newdeployment/DeploymentServiceTest.scala @@ -0,0 +1,118 @@ +package pl.touk.nussknacker.ui.process.newdeployment + +import org.scalatest.BeforeAndAfterEach +import org.scalatest.funsuite.AnyFunSuite +import org.scalatest.matchers.should.Matchers +import pl.touk.nussknacker.development.manager.MockableDeploymentManagerProvider.MockableDeploymentManager +import pl.touk.nussknacker.engine.api.component.NodesDeploymentData +import pl.touk.nussknacker.engine.api.deployment.{DeploymentStatus, ProblemDeploymentStatus} +import pl.touk.nussknacker.engine.api.process.ProcessName +import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess +import pl.touk.nussknacker.engine.newdeployment.DeploymentId +import pl.touk.nussknacker.test.base.db.WithHsqlDbTesting +import pl.touk.nussknacker.test.config.WithSimplifiedDesignerConfig.TestProcessingType.Streaming +import pl.touk.nussknacker.test.utils.domain.{ProcessTestData, TestFactory} +import pl.touk.nussknacker.test.utils.scalas.DBIOActionValues +import pl.touk.nussknacker.test.{EitherValuesDetailedMessage, PatientScalaFutures} +import pl.touk.nussknacker.ui.process.deployment.DeploymentManagerDispatcher +import pl.touk.nussknacker.ui.process.processingtype.{ProcessingTypeDataProvider, ValueWithRestriction} +import pl.touk.nussknacker.ui.process.repository.DBIOActionRunner +import pl.touk.nussknacker.ui.process.repository.ProcessRepository.CreateProcessAction + +import java.time.{Clock, Instant, ZoneOffset} +import scala.concurrent.ExecutionContext.Implicits.global +import scala.util.Failure + +class DeploymentServiceTest + extends AnyFunSuite + with Matchers + with PatientScalaFutures + with WithHsqlDbTesting + with DBIOActionValues + with EitherValuesDetailedMessage + with BeforeAndAfterEach { + + override protected val dbioRunner: DBIOActionRunner = DBIOActionRunner(testDbRef) + + private val writeScenarioRepository = TestFactory.newWriteProcessRepository(testDbRef, modelVersions = None) + + private val service = { + val clock = Clock.fixed(Instant.ofEpochMilli(0), ZoneOffset.UTC) + val scenarioMetadataRepository = TestFactory.newScenarioMetadataRepository(testDbRef) + new DeploymentService( + scenarioMetadataRepository, + TestFactory.newScenarioGraphVersionService(testDbRef), + TestFactory.newDeploymentRepository(testDbRef, clock), + new DeploymentManagerDispatcher( + ProcessingTypeDataProvider.withEmptyCombinedData( + Map(Streaming.stringify -> ValueWithRestriction.anyUser(new MockableDeploymentManager(modelDataOpt = None))) + ), + TestFactory.newFutureFetchingScenarioRepository(testDbRef) + ), + dbioRunner, + clock + ) + } + + test("request deployment and provide status for it") { + val scenarioName = ProcessName("validScenario") + val scenario = ProcessTestData.validProcessWithName(scenarioName) + saveSampleScenario(scenario) + + val deploymentId = DeploymentId.generate + val user = TestFactory.adminUser() + service + .runDeployment( + RunDeploymentCommand(deploymentId, scenarioName, NodesDeploymentData.empty, user) + ) + .futureValue + .rightValue + + val status = service.getDeploymentStatus(deploymentId)(user).futureValue.rightValue + status.value shouldEqual DeploymentStatus.DuringDeploy + } + + test("deployment which ended up with failure during request should has problem status") { + val scenarioName = ProcessName("scenarioCausingFailure") + val scenario = ProcessTestData.validProcessWithName(scenarioName) + saveSampleScenario(scenario) + val deploymentId = DeploymentId.generate + MockableDeploymentManager.configureDeploymentResults( + Map(deploymentId -> Failure(new Exception("Some failure during deployment"))) + ) + + val user = TestFactory.adminUser() + service + .runDeployment( + RunDeploymentCommand(deploymentId, scenarioName, NodesDeploymentData.empty, user) + ) + .futureValue + .rightValue + + eventually { + val status = service.getDeploymentStatus(deploymentId)(user).futureValue.rightValue + status.value.name shouldEqual ProblemDeploymentStatus.name + } + } + + private def saveSampleScenario(scenario: CanonicalProcess) = { + writeScenarioRepository + .saveNewProcess( + CreateProcessAction( + processName = scenario.name, + category = "fooCategory", + canonicalProcess = scenario, + processingType = Streaming.stringify, + isFragment = false, + forwardedUserName = None + ) + )(TestFactory.adminUser()) + .dbioActionValues + } + + override def beforeEach(): Unit = { + MockableDeploymentManager.clean() + super.beforeEach() + } + +} diff --git a/docs/Changelog.md b/docs/Changelog.md index 92e3abc6c4c..7a1dece8b06 100644 --- a/docs/Changelog.md +++ b/docs/Changelog.md @@ -5,10 +5,11 @@ * [#6053](https://github.com/TouK/nussknacker/pull/6053) Added impersonation mechanism support in Nu API for BasicAuth security module. * [#6008](https://github.com/TouK/nussknacker/pull/6008) Add embedded QuestDB as database for FE statistics. -* [#5982](https://github.com/TouK/nussknacker/pull/5982) [#6155](https://github.com/TouK/nussknacker/pull/6155) Batch processing mode related improvements: +* [#5982](https://github.com/TouK/nussknacker/pull/5982) [#6155](https://github.com/TouK/nussknacker/pull/6155) [#6172](https://github.com/TouK/nussknacker/pull/6172) Batch processing mode related improvements: * Deployments API returns correct status of deployment instead of returning always the last deployment's status * Deployments API returns more information about status of a deployment: problem description and status modification time * Status of a deployment is cached on the Designer side - in case of retention of finished job on Flink, status is still returned as FINISHED + * Overlapping deployment metrics/counts workaround: Ensure that only one deployment is performed for each scenario at a time * [#6121](https://github.com/TouK/nussknacker/pull/6121) Add functionality to reorder columns within the table editor. * [#6136](https://github.com/TouK/nussknacker/pull/6136) Add possibility to configure kafka exactly-once delivery for flink. diff --git a/engine/development/deploymentManager/src/main/scala/pl/touk/nussknacker/development/manager/MockableDeploymentManagerProvider.scala b/engine/development/deploymentManager/src/main/scala/pl/touk/nussknacker/development/manager/MockableDeploymentManagerProvider.scala index a0e14ef1dfd..b66b95c7f6b 100644 --- a/engine/development/deploymentManager/src/main/scala/pl/touk/nussknacker/development/manager/MockableDeploymentManagerProvider.scala +++ b/engine/development/deploymentManager/src/main/scala/pl/touk/nussknacker/development/manager/MockableDeploymentManagerProvider.scala @@ -5,59 +5,58 @@ import cats.data.ValidatedNel import com.typesafe.config.Config import io.circe.Json import pl.touk.nussknacker.development.manager.MockableDeploymentManagerProvider.MockableDeploymentManager +import pl.touk.nussknacker.engine.ModelData.BaseModelDataExt +import pl.touk.nussknacker.engine._ +import pl.touk.nussknacker.engine.api.component.ScenarioPropertyConfig import pl.touk.nussknacker.engine.api.definition.{NotBlankParameterValidator, StringParameterEditor} import pl.touk.nussknacker.engine.api.deployment._ import pl.touk.nussknacker.engine.api.deployment.simple.{SimpleProcessStateDefinitionManager, SimpleStateStatus} import pl.touk.nussknacker.engine.api.process.{ProcessIdWithName, ProcessName} -import pl.touk.nussknacker.engine.deployment.{CustomActionDefinition, CustomActionParameter} -import pl.touk.nussknacker.engine.management.FlinkStreamingPropertiesConfig +import pl.touk.nussknacker.engine.deployment.{CustomActionDefinition, CustomActionParameter, ExternalDeploymentId} +import pl.touk.nussknacker.engine.management.{FlinkProcessTestRunner, FlinkStreamingPropertiesConfig} +import pl.touk.nussknacker.engine.newdeployment.DeploymentId import pl.touk.nussknacker.engine.testing.StubbingCommands import pl.touk.nussknacker.engine.testmode.TestProcess.TestResults -import pl.touk.nussknacker.engine._ import java.util.concurrent.atomic.AtomicReference +import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.Future import scala.concurrent.duration.FiniteDuration +import scala.util.Try class MockableDeploymentManagerProvider extends DeploymentManagerProvider { + import net.ceedubs.ficus.Ficus._ + import net.ceedubs.ficus.readers.ArbitraryTypeReader._ + override def createDeploymentManager( modelData: BaseModelData, deploymentManagerDependencies: DeploymentManagerDependencies, config: Config, scenarioStateCacheTTL: Option[FiniteDuration] ): ValidatedNel[String, DeploymentManager] = - valid(MockableDeploymentManager) + valid(new MockableDeploymentManager(Some(modelData))) override def metaDataInitializer(config: Config): MetaDataInitializer = FlinkStreamingPropertiesConfig.metaDataInitializer + override def scenarioPropertiesConfig(config: Config): Map[String, ScenarioPropertyConfig] = + FlinkStreamingPropertiesConfig.properties + override val name: String = "mockable" + + override def engineSetupIdentity(config: Config): Any = + config.getAs[String]("id").getOrElse("") } object MockableDeploymentManagerProvider { type ScenarioName = String - // note: At the moment this manager cannot be used in tests which are executed in parallel. It can be obviously - // improved, but there is no need to do it ATM. - object MockableDeploymentManager extends DeploymentManager with StubbingCommands { + class MockableDeploymentManager(modelDataOpt: Option[BaseModelData]) extends DeploymentManager with StubbingCommands { - private val scenarioStatuses = new AtomicReference[Map[ScenarioName, StateStatus]](Map.empty) - private val testResults = new AtomicReference[Map[ScenarioName, TestResults[Json]]](Map.empty) - - def configure(scenarioStates: Map[ScenarioName, StateStatus]): Unit = { - scenarioStatuses.set(scenarioStates) - } - - def configureTestResults(scenarioTestResults: Map[ScenarioName, TestResults[Json]]): Unit = { - testResults.set(scenarioTestResults) - } - - def clean(): Unit = { - scenarioStatuses.set(Map.empty) - testResults.set(Map.empty) - } + private lazy val testRunnerOpt = + modelDataOpt.map(modelData => new FlinkProcessTestRunner(modelData.asInvokableModelData)) override def resolve( idWithName: ProcessIdWithName, @@ -102,21 +101,29 @@ object MockableDeploymentManagerProvider { override def getProcessStates(name: ProcessName)( implicit freshnessPolicy: DataFreshnessPolicy ): Future[WithDataFreshnessStatus[List[StatusDetails]]] = { - val status = scenarioStatuses.get().getOrElse(name.value, SimpleStateStatus.NotDeployed) + val status = MockableDeploymentManager.scenarioStatuses.get().getOrElse(name.value, SimpleStateStatus.NotDeployed) Future.successful(WithDataFreshnessStatus.fresh(List(StatusDetails(status, None)))) } override def processCommand[Result](command: DMScenarioCommand[Result]): Future[Result] = { command match { - case DMTestScenarioCommand(scenarioName, _, _) => - Future.successful { - testResults - .get() - .getOrElse( - scenarioName.value, - throw new IllegalArgumentException(s"Tests results not mocked for scenario [${scenarioName.value}]") - ) + case DMRunDeploymentCommand(_, deploymentData, _, _) => + Future { + deploymentData.deploymentId.toNewDeploymentIdOpt + .flatMap(MockableDeploymentManager.deploymentResults.get().get) + .flatMap(_.get) } + case DMTestScenarioCommand(scenarioName, scenario, testData) => + MockableDeploymentManager.testResults + .get() + .get(scenarioName.value) + .map(Future.successful) + .orElse(testRunnerOpt.map(_.test(scenario, testData))) + .getOrElse( + throw new IllegalArgumentException( + s"Tests results not mocked for scenario [${scenarioName.value}] and no model data provided" + ) + ) case other => super.processCommand(other) } @@ -127,4 +134,33 @@ object MockableDeploymentManagerProvider { override def close(): Unit = {} } + // note: At the moment this manager cannot be used in tests which are executed in parallel. It can be obviously + // improved, but there is no need to do it ATM. + object MockableDeploymentManager { + + private val scenarioStatuses = new AtomicReference[Map[ScenarioName, StateStatus]](Map.empty) + private val testResults = new AtomicReference[Map[ScenarioName, TestResults[Json]]](Map.empty) + private val deploymentResults = + new AtomicReference[Map[DeploymentId, Try[Option[ExternalDeploymentId]]]](Map.empty) + + def configureScenarioStatuses(scenarioStates: Map[ScenarioName, StateStatus]): Unit = { + MockableDeploymentManager.scenarioStatuses.set(scenarioStates) + } + + def configureDeploymentResults(deploymentResults: Map[DeploymentId, Try[Option[ExternalDeploymentId]]]): Unit = { + MockableDeploymentManager.deploymentResults.set(deploymentResults) + } + + def configureTestResults(scenarioTestResults: Map[ScenarioName, TestResults[Json]]): Unit = { + MockableDeploymentManager.testResults.set(scenarioTestResults) + } + + def clean(): Unit = { + MockableDeploymentManager.scenarioStatuses.set(Map.empty) + MockableDeploymentManager.deploymentResults.set(Map.empty) + MockableDeploymentManager.testResults.set(Map.empty) + } + + } + } diff --git a/engine/flink/components-utils/src/main/scala/pl/touk/nussknacker/engine/flink/util/exception/DefaultExceptionConsumers.scala b/engine/flink/components-utils/src/main/scala/pl/touk/nussknacker/engine/flink/util/exception/DefaultExceptionConsumers.scala index 01a35a14010..ac0f3298d85 100644 --- a/engine/flink/components-utils/src/main/scala/pl/touk/nussknacker/engine/flink/util/exception/DefaultExceptionConsumers.scala +++ b/engine/flink/components-utils/src/main/scala/pl/touk/nussknacker/engine/flink/util/exception/DefaultExceptionConsumers.scala @@ -26,7 +26,7 @@ case class BrieflyLoggingExceptionConsumer(processMetaData: MetaData, params: Ma with LazyLoggingWithTraces { override def consume(e: NuExceptionInfo[NonTransientException]): Unit = { - warnWithDebugStack( + logger.warnWithDebugStack( s"${processMetaData.name}: Exception: ${e.throwable.getMessage} (${e.throwable.getClass.getName}), params: $params", e.throwable ) diff --git a/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkRestManager.scala b/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkRestManager.scala index 1f6aa68ab4f..c050f1d5777 100644 --- a/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkRestManager.scala +++ b/engine/flink/management/src/main/scala/pl/touk/nussknacker/engine/management/FlinkRestManager.scala @@ -5,7 +5,7 @@ import com.typesafe.scalalogging.LazyLogging import org.apache.flink.api.common.JobStatus import pl.touk.nussknacker.engine.api.ProcessVersion import pl.touk.nussknacker.engine.api.deployment._ -import pl.touk.nussknacker.engine.api.deployment.simple.{SimpleDeploymentStatus, SimpleStateStatus} +import pl.touk.nussknacker.engine.api.deployment.simple.SimpleStateStatus import pl.touk.nussknacker.engine.api.process.{ProcessId, ProcessName, VersionId} import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess import pl.touk.nussknacker.engine.deployment.{DeploymentId, ExternalDeploymentId} @@ -93,16 +93,16 @@ class FlinkRestManager( // NOTE: Flink <1.10 compatibility - protected to make it easier to work with Flink 1.9, JobStatus changed package, so we use String in case class protected def mapJobStatus(overview: JobOverview): DeploymentStatus = { toJobStatus(overview) match { - case JobStatus.RUNNING if ensureTasksRunning(overview) => SimpleDeploymentStatus.Running - case s if checkDuringDeployForNotRunningJob(s) => SimpleDeploymentStatus.DuringDeploy - case JobStatus.FINISHED => SimpleDeploymentStatus.Finished - case JobStatus.RESTARTING => SimpleDeploymentStatus.Restarting - case JobStatus.CANCELED => SimpleDeploymentStatus.Canceled - case JobStatus.CANCELLING => SimpleDeploymentStatus.DuringCancel + case JobStatus.RUNNING if ensureTasksRunning(overview) => DeploymentStatus.Running + case s if checkDuringDeployForNotRunningJob(s) => DeploymentStatus.DuringDeploy + case JobStatus.FINISHED => DeploymentStatus.Finished + case JobStatus.RESTARTING => DeploymentStatus.Restarting + case JobStatus.CANCELED => DeploymentStatus.Canceled + case JobStatus.CANCELLING => DeploymentStatus.DuringCancel // The job is not technically running, but should be in a moment - case JobStatus.RECONCILING | JobStatus.CREATED | JobStatus.SUSPENDED => SimpleDeploymentStatus.Running + case JobStatus.RECONCILING | JobStatus.CREATED | JobStatus.SUSPENDED => DeploymentStatus.Running case JobStatus.FAILING | JobStatus.FAILED => - SimpleDeploymentStatus.Problem.Failed // redeploy allowed, handle with restartStrategy + DeploymentStatus.Problem.Failed // redeploy allowed, handle with restartStrategy case _ => throw new IllegalStateException() // TODO: drop support for Flink 1.11 & inline `checkDuringDeployForNotRunningJob` so we could benefit from pattern matching exhaustive check } diff --git a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/requestresponse/RequestResponseDeploymentStrategy.scala b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/requestresponse/RequestResponseDeploymentStrategy.scala index 3721d4dbac1..05682c5ac9f 100644 --- a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/requestresponse/RequestResponseDeploymentStrategy.scala +++ b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/requestresponse/RequestResponseDeploymentStrategy.scala @@ -11,8 +11,7 @@ import com.typesafe.config.Config import com.typesafe.scalalogging.LazyLogging import pl.touk.nussknacker.engine.ModelData import pl.touk.nussknacker.engine.api.context.ProcessCompilationError.FatalUnknownError -import pl.touk.nussknacker.engine.api.deployment.{DeploymentStatus, StateStatus} -import pl.touk.nussknacker.engine.api.deployment.simple.{SimpleDeploymentStatus, SimpleStateStatus} +import pl.touk.nussknacker.engine.api.deployment.DeploymentStatus import pl.touk.nussknacker.engine.api.process.ProcessName import pl.touk.nussknacker.engine.api.{JobData, MetaData, RequestResponseMetaData} import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess @@ -118,7 +117,7 @@ class RequestResponseDeploymentStrategy(httpConfig: HttpBindingConfig, config: R class RequestResponseDeployment(path: String, interpreter: RequestResponseRunnableScenarioInterpreter) extends Deployment { - override def status(): DeploymentStatus = SimpleDeploymentStatus.Running + override def status(): DeploymentStatus = DeploymentStatus.Running override def close(): Unit = { slugToScenarioRoute.remove(path) diff --git a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/streaming/StreamingDeploymentStrategy.scala b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/streaming/StreamingDeploymentStrategy.scala index fd8282a033a..f762e6ea85d 100644 --- a/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/streaming/StreamingDeploymentStrategy.scala +++ b/engine/lite/embeddedDeploymentManager/src/main/scala/pl/touk/nussknacker/engine/embedded/streaming/StreamingDeploymentStrategy.scala @@ -1,8 +1,7 @@ package pl.touk.nussknacker.engine.embedded.streaming import com.typesafe.scalalogging.LazyLogging -import pl.touk.nussknacker.engine.api.deployment.{DeploymentStatus, StateStatus} -import pl.touk.nussknacker.engine.api.deployment.simple.{SimpleDeploymentStatus, SimpleStateStatus} +import pl.touk.nussknacker.engine.api.deployment.DeploymentStatus import pl.touk.nussknacker.engine.api.{JobData, LiteStreamMetaData, ProcessVersion} import pl.touk.nussknacker.engine.canonicalgraph.CanonicalProcess import pl.touk.nussknacker.engine.embedded.{Deployment, DeploymentStrategy} @@ -57,9 +56,9 @@ class StreamingDeploymentStrategy extends DeploymentStrategy with LazyLogging { class StreamingDeployment(interpreter: KafkaTransactionalScenarioInterpreter) extends Deployment { override def status(): DeploymentStatus = interpreter.status() match { - case TaskStatus.Running => SimpleDeploymentStatus.Running - case TaskStatus.DuringDeploy => SimpleDeploymentStatus.DuringDeploy - case TaskStatus.Restarting => SimpleDeploymentStatus.Restarting + case TaskStatus.Running => DeploymentStatus.Running + case TaskStatus.DuringDeploy => DeploymentStatus.DuringDeploy + case TaskStatus.Restarting => DeploymentStatus.Restarting case other => throw new IllegalStateException(s"Not supporter task status: $other") } diff --git a/utils/utils/src/main/scala/pl/touk/nussknacker/engine/util/logging/LazyLoggingWithTraces.scala b/utils/utils/src/main/scala/pl/touk/nussknacker/engine/util/logging/LazyLoggingWithTraces.scala index 2e7dcfb131a..ced173d8f24 100644 --- a/utils/utils/src/main/scala/pl/touk/nussknacker/engine/util/logging/LazyLoggingWithTraces.scala +++ b/utils/utils/src/main/scala/pl/touk/nussknacker/engine/util/logging/LazyLoggingWithTraces.scala @@ -1,23 +1,28 @@ package pl.touk.nussknacker.engine.util.logging -import com.typesafe.scalalogging.LazyLogging +import com.typesafe.scalalogging.{LazyLogging, Logger} //sometimes we want to log on high level (e.g. WARN/ERROR) to find them easily but show stacktraces only at DEBUG level trait LazyLoggingWithTraces extends LazyLogging { - def infoWithDebugStack(msg: => String, ex: Throwable): Unit = { - logger.debug(msg, ex) - logger.info(msg) - } + implicit class LoggerExtension(logger: Logger) { - def warnWithDebugStack(msg: => String, ex: Throwable): Unit = { - logger.debug(msg, ex) - logger.warn(msg) - } + def warnWithDebugStack(msg: => String, ex: Throwable): Unit = { + if (logger.underlying.isDebugEnabled()) { + logger.debug(msg, ex) + } else { + logger.warn(msg) + } + } + + def debugWithTraceStack(msg: => String, ex: Throwable): Unit = { + if (logger.underlying.isTraceEnabled()) { + logger.trace(msg, ex) + } else { + logger.debug(msg) + } + } - def errorWithDebugStack(msg: => String, ex: Throwable): Unit = { - logger.debug(msg, ex) - logger.error(msg) } }