Skip to content

Commit

Permalink
chore(dependencies): Upgrade Spring Boot to 2.2.1 (#3333)
Browse files Browse the repository at this point in the history
* fix(templated-pipelines-v1): Expected artifacts correctly inherited/merged

TemplateMerge.mergeDistinct() was merging 2 list of different types into one. Upcoming Jackson library update  would have broken compilation of TemplatedPipelineModelMutator if left unmodified.

* chore(dependencies): Upgrade Spring Boot to 2.2.1

Adjusting code for Jedis 3, Jackson 2.10 upgrades.

* fix(artifacts): Always persisting `pipeline.expectedArtifacts.boundArtifact`

Artifact resolution wasn't always persisting `pipeline.expectedArtifacts.boundArtifact` depending on the expectedArtifact's type when passed to `ArtifactResolver.resolveArtifacts()`.
Only `ExpectedArtifact` would get their `boundArtifact` updated; if a Map was used then a copy of that Map would be updated instead and this isn't returned to the caller.
The update of Jackson to 2.10 made it so that `boundArtifact` would always be updated on a copy of the passed `expectedArtifact` no matter its type, this commit is an attempt to fix both problems.
A better fix would be to make it clear what type is accepted and expected, but that's a much larger refactor.

* chore(dependencies): Upgrade Spring Boot to 2.2.1 cont'd

* jackson-module-kotlin changed the serialization behaviour for fields starting with 'is': FasterXML/jackson-module-kotlin#80
* jackson 2.10's objectMapper.convertValue() now converts everything even if the source is already from the target type. The fixed test was expected a value from a not fully converted Map.

* chore(dependencies): bump kork and keiko
  • Loading branch information
Pierre Delagrave authored and mergify[bot] committed Dec 17, 2019
1 parent df82368 commit df71ed9
Show file tree
Hide file tree
Showing 21 changed files with 147 additions and 39 deletions.
4 changes: 2 additions & 2 deletions gradle.properties
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@
fiatVersion=1.9.2
enablePublishing=false
spinnakerGradleVersion=7.0.1
korkVersion=6.22.2
korkVersion=7.2.1
org.gradle.parallel=true
keikoVersion=2.14.2
keikoVersion=3.0.0
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ package com.netflix.spinnaker.orca.pipeline.model

import com.fasterxml.jackson.annotation.JsonAnyGetter
import com.fasterxml.jackson.annotation.JsonAnySetter
import com.fasterxml.jackson.annotation.JsonProperty
import com.fasterxml.jackson.databind.annotation.JsonDeserialize
import com.netflix.spinnaker.kork.artifacts.model.Artifact
import com.netflix.spinnaker.kork.artifacts.model.ExpectedArtifact
Expand All @@ -31,8 +32,11 @@ interface Trigger {
val parameters: Map<String, Any>
val artifacts: List<Artifact>
val notifications: List<Map<String, Any>>
@get:JsonProperty("rebake")
var isRebake: Boolean
@get:JsonProperty("dryRun")
var isDryRun: Boolean
@get:JsonProperty("strategy")
var isStrategy: Boolean
var resolvedExpectedArtifacts: List<ExpectedArtifact>
@set:JsonAnySetter
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
import static java.util.stream.Collectors.toList;
import static org.apache.commons.lang3.StringUtils.isEmpty;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.netflix.spinnaker.kork.artifacts.model.Artifact;
Expand Down Expand Up @@ -228,14 +229,14 @@ public ArtifactResolver(

public void resolveArtifacts(@Nonnull Map pipeline) {
Map<String, Object> trigger = (Map<String, Object>) pipeline.get("trigger");

List<?> originalExpectedArtifacts =
Optional.ofNullable((List<?>) pipeline.get("expectedArtifacts")).orElse(emptyList());

List<ExpectedArtifact> expectedArtifacts =
Optional.ofNullable((List<?>) pipeline.get("expectedArtifacts"))
.map(
list ->
list.stream()
.map(it -> objectMapper.convertValue(it, ExpectedArtifact.class))
.collect(toList()))
.orElse(emptyList());
originalExpectedArtifacts.stream()
.map(it -> objectMapper.convertValue(it, ExpectedArtifact.class))
.collect(toList());

List<Artifact> receivedArtifactsFromPipeline =
Optional.ofNullable((List<?>) pipeline.get("receivedArtifacts"))
Expand Down Expand Up @@ -288,12 +289,41 @@ public void resolveArtifacts(@Nonnull Map pipeline) {
objectMapper.readValue(
objectMapper.writeValueAsString(expectedArtifacts),
List.class)); // Add the actual expectedArtifacts we included in the ids.

updateExpectedArtifacts(originalExpectedArtifacts, expectedArtifacts);
} catch (IOException e) {
throw new ArtifactResolutionException(
"Failed to store artifacts in trigger: " + e.getMessage(), e);
}
}

private void updateExpectedArtifacts(
List<?> originalExpectedArtifacts, List<ExpectedArtifact> updatedExpectedArtifacts)
throws JsonProcessingException {

for (Object artifact : originalExpectedArtifacts) {
if (artifact instanceof ExpectedArtifact) {
ExpectedArtifact ea = (ExpectedArtifact) artifact;
ea.setBoundArtifact(
findExpectedArtifactById(updatedExpectedArtifacts, ea.getId()).getBoundArtifact());
} else {
Map<String, Object> ea = (Map<String, Object>) artifact;
ea.put(
"boundArtifact",
objectMapper.readValue(
objectMapper.writeValueAsString(
findExpectedArtifactById(updatedExpectedArtifacts, (String) ea.get("id"))
.getBoundArtifact()),
Map.class));
}
}
}

private ExpectedArtifact findExpectedArtifactById(
List<ExpectedArtifact> expectedArtifacts, String id) {
return expectedArtifacts.stream().filter(it -> id.equals(it.getId())).findFirst().get();
}

private List<Artifact> getPriorArtifacts(final Map<String, Object> pipeline) {
// set pageSize to a single record to avoid hydrating all of the stored Executions for
// the pipeline, since getArtifactsForPipelineId only uses the most recent Execution from the
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -462,10 +462,10 @@ class ArtifactResolverSpec extends Specification {
bound.findAll({ a -> expectedBound.contains(a) }).size() == bound.size()
}

def "resolveArtifacts sets the bound artifact on an expected artifact"() {
def "resolveArtifacts sets the bound artifact on an expected artifact when the expectedArtifact is ExpectedArtifact"() {
given:
def matchArtifact = Artifact.builder().type("docker/.*").build()
def expectedArtifact = ExpectedArtifact.builder().matchArtifact(matchArtifact).build()
def expectedArtifact = ExpectedArtifact.builder().matchArtifact(matchArtifact).id("id1").build()
def receivedArtifact = Artifact.builder().name("my-artifact").type("docker/image").build()
def pipeline = [
id: "abc",
Expand All @@ -483,10 +483,31 @@ class ArtifactResolverSpec extends Specification {
pipeline.expectedArtifacts[0].boundArtifact == receivedArtifact
}

def "resolveArtifacts sets the bound artifact on an expected artifact when the expectedArtifact is Map<String, Object>"() {
given:
def matchArtifact = Artifact.builder().type("docker/.*").build()
def expectedArtifact = toMap(ExpectedArtifact.builder().matchArtifact(matchArtifact).id("id1").build())
def receivedArtifact = toMap(Artifact.builder().name("my-artifact").type("docker/image").build())
def pipeline = [
id: "abc",
trigger: [:],
expectedArtifacts: [expectedArtifact],
receivedArtifacts: [receivedArtifact],
]
def artifactResolver = makeArtifactResolver()

when:
artifactResolver.resolveArtifacts(pipeline)

then:
pipeline.expectedArtifacts.size() == 1
pipeline.expectedArtifacts[0].boundArtifact == receivedArtifact
}

def "resolveArtifacts adds received artifacts to the trigger, skipping duplicates"() {
given:
def matchArtifact = Artifact.builder().name("my-pipeline-artifact").type("docker/.*").build()
def expectedArtifact = ExpectedArtifact.builder().matchArtifact(matchArtifact).build()
def expectedArtifact = ExpectedArtifact.builder().matchArtifact(matchArtifact).id("id1").build()
def receivedArtifact = Artifact.builder().name("my-pipeline-artifact").type("docker/image").build()
def triggerArtifact = Artifact.builder().name("my-trigger-artifact").type("docker/image").build()
def bothArtifact = Artifact.builder().name("my-both-artifact").type("docker/image").build()
Expand All @@ -512,7 +533,7 @@ class ArtifactResolverSpec extends Specification {
def "resolveArtifacts is idempotent"() {
given:
def matchArtifact = Artifact.builder().name("my-pipeline-artifact").type("docker/.*").build()
def expectedArtifact = ExpectedArtifact.builder().matchArtifact(matchArtifact).build()
def expectedArtifact = ExpectedArtifact.builder().matchArtifact(matchArtifact).id("id1").build()
def receivedArtifact = Artifact.builder().name("my-pipeline-artifact").type("docker/image").build()
def triggerArtifact = Artifact.builder().name("my-trigger-artifact").type("docker/image").build()
def bothArtifact = Artifact.builder().name("my-both-artifact").type("docker/image").build()
Expand All @@ -539,4 +560,8 @@ class ArtifactResolverSpec extends Specification {
private List<Artifact> extractTriggerArtifacts(Map<String, Object> trigger) {
return objectMapper.convertValue(trigger.artifacts, new TypeReference<List<Artifact>>(){});
}

private Map<String, Object> toMap(Object value) {
return objectMapper.convertValue(value, Map.class)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ class ContextParameterProcessorSpec extends Specification {
result.test == source.test
summary[escapedExpression].size() == 1
summary[escapedExpression][0].level as String == ExpressionEvaluationSummary.Result.Level.ERROR.name()
summary[escapedExpression][0].exceptionType == SpelEvaluationException
summary[escapedExpression][0].exceptionType == SpelEvaluationException.typeName

where:
testCase | desc
Expand All @@ -184,7 +184,7 @@ class ContextParameterProcessorSpec extends Specification {
result.test == source.test
summary[escapedExpression].size() == 1
summary[escapedExpression][0].level as String == ExpressionEvaluationSummary.Result.Level.ERROR.name()
summary[escapedExpression][0].exceptionType == SpelEvaluationException
summary[escapedExpression][0].exceptionType == SpelEvaluationException.typeName

where:
testCase | desc
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -175,6 +175,7 @@ class DependentPipelineStarterSpec extends Specification {
name : "triggered",
id : "triggered",
expectedArtifacts: [[
id: "id1",
matchArtifact: [
kind: "gcs",
name: "gs://test/file.yaml",
Expand Down Expand Up @@ -234,6 +235,7 @@ class DependentPipelineStarterSpec extends Specification {
name : "triggered",
id : "triggered",
expectedArtifacts: [[
id: "id1",
matchArtifact: [
kind: "gcs",
name: "gs://test/file.yaml",
Expand Down Expand Up @@ -303,6 +305,7 @@ class DependentPipelineStarterSpec extends Specification {
name : "triggered",
id : "triggered",
expectedArtifacts: [[
id: "id1",
matchArtifact: [
kind: "gcs",
name: "gs://test/file.yaml",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,6 @@

import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.netflix.spinnaker.kork.artifacts.model.ExpectedArtifact;
import com.netflix.spinnaker.orca.front50.PipelineModelMutator;
import com.netflix.spinnaker.orca.pipelinetemplate.exceptions.TemplateLoaderException;
import com.netflix.spinnaker.orca.pipelinetemplate.loader.TemplateLoader;
Expand All @@ -29,6 +28,7 @@
import com.netflix.spinnaker.orca.pipelinetemplate.v1schema.render.RenderContext;
import com.netflix.spinnaker.orca.pipelinetemplate.v1schema.render.RenderUtil;
import com.netflix.spinnaker.orca.pipelinetemplate.v1schema.render.Renderer;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
Expand Down Expand Up @@ -128,7 +128,6 @@ private void applyConfigurationsFromTemplate(
}
}

@SuppressWarnings("unchecked")
private void applyConfigurations(
PipelineConfiguration configuration, Map<String, Object> pipeline) {
if (configuration.getConcurrentExecutions() != null) {
Expand Down Expand Up @@ -165,7 +164,7 @@ private void applyConfigurations(
TemplateMerge.mergeDistinct(
pipelineTemplateObjectMapper.convertValue(
pipeline.get("expectedArtifacts"),
new TypeReference<List<ExpectedArtifact>>() {}),
new TypeReference<List<HashMap<String, Object>>>() {}),
configuration.getExpectedArtifacts()));
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,8 +123,7 @@ public List<V2StageDefinition> getStages() {
return Collections.emptyList();
}
ObjectMapper oj = new ObjectMapper();
TypeReference v2StageDefTypeRef = new TypeReference<List<V2StageDefinition>>() {};
return oj.convertValue(pipelineStages, v2StageDefTypeRef);
return oj.convertValue(pipelineStages, new TypeReference<List<V2StageDefinition>>() {});
}

public void setStages(List<V2StageDefinition> stages) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,44 @@ class TemplatedPipelineModelMutatorSpec extends Specification {
0 * subject.applyConfigurationsFromTemplate(_, _, pipeline)
}

def "should merge expectedArtifacts when configured to inherit them"() {
given:
def pipeline = [
config: [
schema: '1',
pipeline: [
template: [
source: 'static-template'
]
],
configuration: [
inherit: ['expectedArtifacts'],
expectedArtifacts: [
[
id: 'artifact1'
] as NamedHashMap
]
]
]
]

when:
subject.mutate(pipeline)

then:
1 * templateLoader.load(_) >> { [new PipelineTemplate(
schema: '1',
configuration: new Configuration(
expectedArtifacts: [
[
id: 'artifact2'
] as NamedHashMap
]
)
)]}
pipeline.expectedArtifacts.size() == 2
}

def "should apply configurations from template if template is statically sourced"() {
given:
def pipeline = [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ import org.springframework.context.annotation.Configuration
import org.springframework.context.annotation.Primary
import redis.clients.jedis.Jedis
import redis.clients.jedis.JedisCluster
import redis.clients.util.Pool
import redis.clients.jedis.util.Pool
import java.time.Clock
import java.util.Optional

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ import redis.clients.jedis.HostAndPort
import redis.clients.jedis.Jedis
import redis.clients.jedis.JedisCluster
import redis.clients.jedis.Protocol
import redis.clients.util.Pool
import redis.clients.jedis.util.Pool
import java.net.URI
import java.time.Clock
import java.time.Duration
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import com.fasterxml.jackson.module.kotlin.readValue
import com.netflix.spinnaker.orca.q.pending.PendingExecutionService
import com.netflix.spinnaker.q.Message
import redis.clients.jedis.Jedis
import redis.clients.util.Pool
import redis.clients.jedis.util.Pool

class RedisPendingExecutionService(
private val pool: Pool<Jedis>,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ import org.springframework.context.annotation.Bean
import org.springframework.context.annotation.Configuration
import org.springframework.test.context.junit4.SpringRunner
import redis.clients.jedis.Jedis
import redis.clients.util.Pool
import redis.clients.jedis.util.Pool

@Configuration
class RedisTestConfig {
Expand Down
3 changes: 0 additions & 3 deletions orca-redis/orca-redis.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,6 @@ apply from: "$rootDir/gradle/kotlin.gradle"
apply from: "$rootDir/gradle/spock.gradle"

dependencies {
api("redis.clients:jedis:2.10.2") {
force = true
}
api("com.netflix.spinnaker.kork:kork-jedis")

implementation(project(":orca-core"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

import java.net.URI;
import redis.clients.jedis.Protocol;
import redis.clients.util.JedisURIHelper;
import redis.clients.jedis.util.JedisURIHelper;

public class RedisConnectionInfo {
public boolean hasPassword() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,10 @@

import org.jetbrains.annotations.NotNull;
import redis.clients.jedis.JedisCluster;
import redis.clients.jedis.params.SetParams;

public class RedisClusterNotificationClusterLock implements NotificationClusterLock {

private final JedisCluster cluster;

public RedisClusterNotificationClusterLock(JedisCluster cluster) {
Expand All @@ -13,6 +15,10 @@ public RedisClusterNotificationClusterLock(JedisCluster cluster) {
@Override
public boolean tryAcquireLock(@NotNull String notificationType, long lockTimeoutSeconds) {
String key = "lock:" + notificationType;
return "OK".equals(cluster.set(key, "\uD83D\uDD12", "NX", "EX", lockTimeoutSeconds));
// assuming lockTimeoutSeconds will be < 2147483647
return "OK"
.equals(
cluster.set(
key, "\uD83D\uDD12", SetParams.setParams().nx().ex((int) lockTimeoutSeconds)));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import com.netflix.spinnaker.kork.jedis.RedisClientDelegate;
import com.netflix.spinnaker.kork.jedis.RedisClientSelector;
import javax.annotation.Nonnull;
import redis.clients.jedis.params.SetParams;

public class RedisNotificationClusterLock implements NotificationClusterLock {

Expand All @@ -32,7 +33,14 @@ public boolean tryAcquireLock(@Nonnull String notificationType, long lockTimeout
String key = "lock:" + notificationType;
return redisClientDelegate.withCommandsClient(
client -> {
return "OK".equals(client.set(key, "\uD83D\uDD12", "NX", "EX", lockTimeoutSeconds));
return "OK"
.equals(
client
// assuming lockTimeoutSeconds will be < 2147483647
.set(
key,
"\uD83D\uDD12",
SetParams.setParams().nx().ex((int) lockTimeoutSeconds)));
});
}
}
Loading

0 comments on commit df71ed9

Please sign in to comment.