Skip to content

Commit

Permalink
Revert "chore(dependencies): Upgrade Spring Boot to 2.2.1 (spinnaker#…
Browse files Browse the repository at this point in the history
…3333)"

This reverts commit df71ed9.
  • Loading branch information
louisjimenez committed Dec 20, 2019
1 parent 53a3faa commit 783e7f9
Show file tree
Hide file tree
Showing 21 changed files with 41 additions and 149 deletions.
8 changes: 4 additions & 4 deletions gradle.properties
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#Tue Dec 17 19:07:50 UTC 2019
fiatVersion=1.11.0
#Tue Dec 10 20:06:41 UTC 2019
fiatVersion=1.9.2
enablePublishing=false
korkVersion=7.3.0
spinnakerGradleVersion=7.0.1
keikoVersion=3.0.0
korkVersion=6.22.2
org.gradle.parallel=true
keikoVersion=2.14.2
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@ package com.netflix.spinnaker.orca.pipeline.model

import com.fasterxml.jackson.annotation.JsonAnyGetter
import com.fasterxml.jackson.annotation.JsonAnySetter
import com.fasterxml.jackson.annotation.JsonProperty
import com.fasterxml.jackson.databind.annotation.JsonDeserialize
import com.netflix.spinnaker.kork.artifacts.model.Artifact
import com.netflix.spinnaker.kork.artifacts.model.ExpectedArtifact
Expand All @@ -32,11 +31,8 @@ interface Trigger {
val parameters: Map<String, Any>
val artifacts: List<Artifact>
val notifications: List<Map<String, Any>>
@get:JsonProperty("rebake")
var isRebake: Boolean
@get:JsonProperty("dryRun")
var isDryRun: Boolean
@get:JsonProperty("strategy")
var isStrategy: Boolean
var resolvedExpectedArtifacts: List<ExpectedArtifact>
@set:JsonAnySetter
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,6 @@
import static java.util.stream.Collectors.toList;
import static org.apache.commons.lang3.StringUtils.isEmpty;

import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.netflix.spinnaker.kork.artifacts.model.Artifact;
Expand Down Expand Up @@ -229,14 +228,14 @@ public ArtifactResolver(

public void resolveArtifacts(@Nonnull Map pipeline) {
Map<String, Object> trigger = (Map<String, Object>) pipeline.get("trigger");

List<?> originalExpectedArtifacts =
Optional.ofNullable((List<?>) pipeline.get("expectedArtifacts")).orElse(emptyList());

List<ExpectedArtifact> expectedArtifacts =
originalExpectedArtifacts.stream()
.map(it -> objectMapper.convertValue(it, ExpectedArtifact.class))
.collect(toList());
Optional.ofNullable((List<?>) pipeline.get("expectedArtifacts"))
.map(
list ->
list.stream()
.map(it -> objectMapper.convertValue(it, ExpectedArtifact.class))
.collect(toList()))
.orElse(emptyList());

List<Artifact> receivedArtifactsFromPipeline =
Optional.ofNullable((List<?>) pipeline.get("receivedArtifacts"))
Expand Down Expand Up @@ -289,41 +288,12 @@ public void resolveArtifacts(@Nonnull Map pipeline) {
objectMapper.readValue(
objectMapper.writeValueAsString(expectedArtifacts),
List.class)); // Add the actual expectedArtifacts we included in the ids.

updateExpectedArtifacts(originalExpectedArtifacts, expectedArtifacts);
} catch (IOException e) {
throw new ArtifactResolutionException(
"Failed to store artifacts in trigger: " + e.getMessage(), e);
}
}

private void updateExpectedArtifacts(
List<?> originalExpectedArtifacts, List<ExpectedArtifact> updatedExpectedArtifacts)
throws JsonProcessingException {

for (Object artifact : originalExpectedArtifacts) {
if (artifact instanceof ExpectedArtifact) {
ExpectedArtifact ea = (ExpectedArtifact) artifact;
ea.setBoundArtifact(
findExpectedArtifactById(updatedExpectedArtifacts, ea.getId()).getBoundArtifact());
} else {
Map<String, Object> ea = (Map<String, Object>) artifact;
ea.put(
"boundArtifact",
objectMapper.readValue(
objectMapper.writeValueAsString(
findExpectedArtifactById(updatedExpectedArtifacts, (String) ea.get("id"))
.getBoundArtifact()),
Map.class));
}
}
}

private ExpectedArtifact findExpectedArtifactById(
List<ExpectedArtifact> expectedArtifacts, String id) {
return expectedArtifacts.stream().filter(it -> id.equals(it.getId())).findFirst().get();
}

private List<Artifact> getPriorArtifacts(final Map<String, Object> pipeline) {
// set pageSize to a single record to avoid hydrating all of the stored Executions for
// the pipeline, since getArtifactsForPipelineId only uses the most recent Execution from the
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -462,10 +462,10 @@ class ArtifactResolverSpec extends Specification {
bound.findAll({ a -> expectedBound.contains(a) }).size() == bound.size()
}

def "resolveArtifacts sets the bound artifact on an expected artifact when the expectedArtifact is ExpectedArtifact"() {
def "resolveArtifacts sets the bound artifact on an expected artifact"() {
given:
def matchArtifact = Artifact.builder().type("docker/.*").build()
def expectedArtifact = ExpectedArtifact.builder().matchArtifact(matchArtifact).id("id1").build()
def expectedArtifact = ExpectedArtifact.builder().matchArtifact(matchArtifact).build()
def receivedArtifact = Artifact.builder().name("my-artifact").type("docker/image").build()
def pipeline = [
id: "abc",
Expand All @@ -483,31 +483,10 @@ class ArtifactResolverSpec extends Specification {
pipeline.expectedArtifacts[0].boundArtifact == receivedArtifact
}

def "resolveArtifacts sets the bound artifact on an expected artifact when the expectedArtifact is Map<String, Object>"() {
given:
def matchArtifact = Artifact.builder().type("docker/.*").build()
def expectedArtifact = toMap(ExpectedArtifact.builder().matchArtifact(matchArtifact).id("id1").build())
def receivedArtifact = toMap(Artifact.builder().name("my-artifact").type("docker/image").build())
def pipeline = [
id: "abc",
trigger: [:],
expectedArtifacts: [expectedArtifact],
receivedArtifacts: [receivedArtifact],
]
def artifactResolver = makeArtifactResolver()

when:
artifactResolver.resolveArtifacts(pipeline)

then:
pipeline.expectedArtifacts.size() == 1
pipeline.expectedArtifacts[0].boundArtifact == receivedArtifact
}

def "resolveArtifacts adds received artifacts to the trigger, skipping duplicates"() {
given:
def matchArtifact = Artifact.builder().name("my-pipeline-artifact").type("docker/.*").build()
def expectedArtifact = ExpectedArtifact.builder().matchArtifact(matchArtifact).id("id1").build()
def expectedArtifact = ExpectedArtifact.builder().matchArtifact(matchArtifact).build()
def receivedArtifact = Artifact.builder().name("my-pipeline-artifact").type("docker/image").build()
def triggerArtifact = Artifact.builder().name("my-trigger-artifact").type("docker/image").build()
def bothArtifact = Artifact.builder().name("my-both-artifact").type("docker/image").build()
Expand All @@ -533,7 +512,7 @@ class ArtifactResolverSpec extends Specification {
def "resolveArtifacts is idempotent"() {
given:
def matchArtifact = Artifact.builder().name("my-pipeline-artifact").type("docker/.*").build()
def expectedArtifact = ExpectedArtifact.builder().matchArtifact(matchArtifact).id("id1").build()
def expectedArtifact = ExpectedArtifact.builder().matchArtifact(matchArtifact).build()
def receivedArtifact = Artifact.builder().name("my-pipeline-artifact").type("docker/image").build()
def triggerArtifact = Artifact.builder().name("my-trigger-artifact").type("docker/image").build()
def bothArtifact = Artifact.builder().name("my-both-artifact").type("docker/image").build()
Expand All @@ -560,8 +539,4 @@ class ArtifactResolverSpec extends Specification {
private List<Artifact> extractTriggerArtifacts(Map<String, Object> trigger) {
return objectMapper.convertValue(trigger.artifacts, new TypeReference<List<Artifact>>(){});
}

private Map<String, Object> toMap(Object value) {
return objectMapper.convertValue(value, Map.class)
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -161,7 +161,7 @@ class ContextParameterProcessorSpec extends Specification {
result.test == source.test
summary[escapedExpression].size() == 1
summary[escapedExpression][0].level as String == ExpressionEvaluationSummary.Result.Level.ERROR.name()
summary[escapedExpression][0].exceptionType == SpelEvaluationException.typeName
summary[escapedExpression][0].exceptionType == SpelEvaluationException

where:
testCase | desc
Expand All @@ -184,7 +184,7 @@ class ContextParameterProcessorSpec extends Specification {
result.test == source.test
summary[escapedExpression].size() == 1
summary[escapedExpression][0].level as String == ExpressionEvaluationSummary.Result.Level.ERROR.name()
summary[escapedExpression][0].exceptionType == SpelEvaluationException.typeName
summary[escapedExpression][0].exceptionType == SpelEvaluationException

where:
testCase | desc
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,6 @@ class DependentPipelineStarterSpec extends Specification {
name : "triggered",
id : "triggered",
expectedArtifacts: [[
id: "id1",
matchArtifact: [
kind: "gcs",
name: "gs://test/file.yaml",
Expand Down Expand Up @@ -235,7 +234,6 @@ class DependentPipelineStarterSpec extends Specification {
name : "triggered",
id : "triggered",
expectedArtifacts: [[
id: "id1",
matchArtifact: [
kind: "gcs",
name: "gs://test/file.yaml",
Expand Down Expand Up @@ -305,7 +303,6 @@ class DependentPipelineStarterSpec extends Specification {
name : "triggered",
id : "triggered",
expectedArtifacts: [[
id: "id1",
matchArtifact: [
kind: "gcs",
name: "gs://test/file.yaml",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.netflix.spinnaker.kork.artifacts.model.ExpectedArtifact;
import com.netflix.spinnaker.orca.front50.PipelineModelMutator;
import com.netflix.spinnaker.orca.pipelinetemplate.exceptions.TemplateLoaderException;
import com.netflix.spinnaker.orca.pipelinetemplate.loader.TemplateLoader;
Expand All @@ -28,7 +29,6 @@
import com.netflix.spinnaker.orca.pipelinetemplate.v1schema.render.RenderContext;
import com.netflix.spinnaker.orca.pipelinetemplate.v1schema.render.RenderUtil;
import com.netflix.spinnaker.orca.pipelinetemplate.v1schema.render.Renderer;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
Expand Down Expand Up @@ -128,6 +128,7 @@ private void applyConfigurationsFromTemplate(
}
}

@SuppressWarnings("unchecked")
private void applyConfigurations(
PipelineConfiguration configuration, Map<String, Object> pipeline) {
if (configuration.getConcurrentExecutions() != null) {
Expand Down Expand Up @@ -164,7 +165,7 @@ private void applyConfigurations(
TemplateMerge.mergeDistinct(
pipelineTemplateObjectMapper.convertValue(
pipeline.get("expectedArtifacts"),
new TypeReference<List<HashMap<String, Object>>>() {}),
new TypeReference<List<ExpectedArtifact>>() {}),
configuration.getExpectedArtifacts()));
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,8 @@ public List<V2StageDefinition> getStages() {
return Collections.emptyList();
}
ObjectMapper oj = new ObjectMapper();
return oj.convertValue(pipelineStages, new TypeReference<List<V2StageDefinition>>() {});
TypeReference v2StageDefTypeRef = new TypeReference<List<V2StageDefinition>>() {};
return oj.convertValue(pipelineStages, v2StageDefTypeRef);
}

public void setStages(List<V2StageDefinition> stages) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -67,44 +67,6 @@ class TemplatedPipelineModelMutatorSpec extends Specification {
0 * subject.applyConfigurationsFromTemplate(_, _, pipeline)
}

def "should merge expectedArtifacts when configured to inherit them"() {
given:
def pipeline = [
config: [
schema: '1',
pipeline: [
template: [
source: 'static-template'
]
],
configuration: [
inherit: ['expectedArtifacts'],
expectedArtifacts: [
[
id: 'artifact1'
] as NamedHashMap
]
]
]
]

when:
subject.mutate(pipeline)

then:
1 * templateLoader.load(_) >> { [new PipelineTemplate(
schema: '1',
configuration: new Configuration(
expectedArtifacts: [
[
id: 'artifact2'
] as NamedHashMap
]
)
)]}
pipeline.expectedArtifacts.size() == 2
}

def "should apply configurations from template if template is statically sourced"() {
given:
def pipeline = [
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ import org.springframework.context.annotation.Configuration
import org.springframework.context.annotation.Primary
import redis.clients.jedis.Jedis
import redis.clients.jedis.JedisCluster
import redis.clients.jedis.util.Pool
import redis.clients.util.Pool
import java.time.Clock
import java.util.Optional

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ import redis.clients.jedis.HostAndPort
import redis.clients.jedis.Jedis
import redis.clients.jedis.JedisCluster
import redis.clients.jedis.Protocol
import redis.clients.jedis.util.Pool
import redis.clients.util.Pool
import java.net.URI
import java.time.Clock
import java.time.Duration
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import com.fasterxml.jackson.module.kotlin.readValue
import com.netflix.spinnaker.orca.q.pending.PendingExecutionService
import com.netflix.spinnaker.q.Message
import redis.clients.jedis.Jedis
import redis.clients.jedis.util.Pool
import redis.clients.util.Pool

class RedisPendingExecutionService(
private val pool: Pool<Jedis>,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ import org.springframework.context.annotation.Bean
import org.springframework.context.annotation.Configuration
import org.springframework.test.context.junit4.SpringRunner
import redis.clients.jedis.Jedis
import redis.clients.jedis.util.Pool
import redis.clients.util.Pool

@Configuration
class RedisTestConfig {
Expand Down
3 changes: 3 additions & 0 deletions orca-redis/orca-redis.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@ apply from: "$rootDir/gradle/kotlin.gradle"
apply from: "$rootDir/gradle/spock.gradle"

dependencies {
api("redis.clients:jedis:2.10.2") {
force = true
}
api("com.netflix.spinnaker.kork:kork-jedis")

implementation(project(":orca-core"))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

import java.net.URI;
import redis.clients.jedis.Protocol;
import redis.clients.jedis.util.JedisURIHelper;
import redis.clients.util.JedisURIHelper;

public class RedisConnectionInfo {
public boolean hasPassword() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,8 @@

import org.jetbrains.annotations.NotNull;
import redis.clients.jedis.JedisCluster;
import redis.clients.jedis.params.SetParams;

public class RedisClusterNotificationClusterLock implements NotificationClusterLock {

private final JedisCluster cluster;

public RedisClusterNotificationClusterLock(JedisCluster cluster) {
Expand All @@ -15,10 +13,6 @@ public RedisClusterNotificationClusterLock(JedisCluster cluster) {
@Override
public boolean tryAcquireLock(@NotNull String notificationType, long lockTimeoutSeconds) {
String key = "lock:" + notificationType;
// assuming lockTimeoutSeconds will be < 2147483647
return "OK"
.equals(
cluster.set(
key, "\uD83D\uDD12", SetParams.setParams().nx().ex((int) lockTimeoutSeconds)));
return "OK".equals(cluster.set(key, "\uD83D\uDD12", "NX", "EX", lockTimeoutSeconds));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
import com.netflix.spinnaker.kork.jedis.RedisClientDelegate;
import com.netflix.spinnaker.kork.jedis.RedisClientSelector;
import javax.annotation.Nonnull;
import redis.clients.jedis.params.SetParams;

public class RedisNotificationClusterLock implements NotificationClusterLock {

Expand All @@ -33,14 +32,7 @@ public boolean tryAcquireLock(@Nonnull String notificationType, long lockTimeout
String key = "lock:" + notificationType;
return redisClientDelegate.withCommandsClient(
client -> {
return "OK"
.equals(
client
// assuming lockTimeoutSeconds will be < 2147483647
.set(
key,
"\uD83D\uDD12",
SetParams.setParams().nx().ex((int) lockTimeoutSeconds)));
return "OK".equals(client.set(key, "\uD83D\uDD12", "NX", "EX", lockTimeoutSeconds));
});
}
}
Loading

0 comments on commit 783e7f9

Please sign in to comment.