Skip to content

Commit

Permalink
Merge branch 'master' into add_sysv_init_functions
Browse files Browse the repository at this point in the history
  • Loading branch information
elasticmachine authored Aug 5, 2020
2 parents 6e941f9 + 3fb77fb commit a890585
Show file tree
Hide file tree
Showing 41,573 changed files with 2,917,487 additions and 1,370,459 deletions.
The diff you're trying to view is too large. We only load the first 3000 changed files.
32 changes: 30 additions & 2 deletions .backportrc.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,33 @@
{
"upstream": "elastic/kibana",
"branches": [{ "name": "7.x", "checked": true }, "7.4", "7.3", "7.2", "7.1", "7.0", "6.8", "6.7", "6.6", "6.5", "6.4", "6.3", "6.2", "6.1", "6.0", "5.6"],
"labels": ["backport"]
"targetBranchChoices": [
{ "name": "master", "checked": true },
{ "name": "7.x", "checked": true },
"7.9",
"7.8",
"7.7",
"7.6",
"7.5",
"7.4",
"7.3",
"7.2",
"7.1",
"7.0",
"6.8",
"6.7",
"6.6",
"6.5",
"6.4",
"6.3",
"6.2",
"6.1",
"6.0",
"5.6"
],
"targetPRLabels": ["backport"],
"branchLabelMapping": {
"^v8.0.0$": "master",
"^v7.10.0$": "7.x",
"^v(\\d+).(\\d+).\\d+$": "$1.$2"
}
}
14 changes: 11 additions & 3 deletions .browserslistrc
Original file line number Diff line number Diff line change
@@ -1,3 +1,11 @@
last 2 versions
> 5%
Safari 7 # for PhantomJS support: https://github.com/elastic/kibana/issues/27136
[production]
last 2 Firefox versions
last 2 Chrome versions
last 2 Safari versions
> 0.25%
not ie 11

[dev]
last 1 chrome versions
last 1 firefox versions
last 1 safari versions
38 changes: 38 additions & 0 deletions .ci/Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
# NOTE: This Dockerfile is ONLY used to run certain tasks in CI. It is not used to run Kibana or as a distributable.
# If you're looking for the Kibana Docker image distributable, please see: src/dev/build/tasks/os_packages/docker_generator/templates/dockerfile.template.ts

ARG NODE_VERSION=10.21.0

FROM node:${NODE_VERSION} AS base

RUN apt-get update && \
apt-get -y install xvfb gconf-service libasound2 libatk1.0-0 libc6 libcairo2 libcups2 \
libdbus-1-3 libexpat1 libfontconfig1 libgcc1 libgconf-2-4 libgdk-pixbuf2.0-0 libglib2.0-0 \
libgtk-3-0 libnspr4 libpango-1.0-0 libpangocairo-1.0-0 libstdc++6 libx11-6 libx11-xcb1 libxcb1 \
libxcomposite1 libxcursor1 libxdamage1 libxext6 libxfixes3 libxi6 libxrandr2 libxrender1 libxss1 \
libxtst6 ca-certificates fonts-liberation libappindicator1 libnss3 lsb-release xdg-utils wget openjdk-8-jre && \
rm -rf /var/lib/apt/lists/*

RUN curl -sSL https://dl.google.com/linux/linux_signing_key.pub | apt-key add - \
&& sh -c 'echo "deb [arch=amd64] http://dl.google.com/linux/chrome/deb/ stable main" >> /etc/apt/sources.list.d/google.list' \
&& apt-get update \
&& apt-get install -y rsync jq bsdtar google-chrome-stable \
--no-install-recommends \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*

RUN LATEST_VAULT_RELEASE=$(curl -s https://api.github.com/repos/hashicorp/vault/tags | jq --raw-output .[0].name[1:]) \
&& curl -L https://releases.hashicorp.com/vault/${LATEST_VAULT_RELEASE}/vault_${LATEST_VAULT_RELEASE}_linux_amd64.zip -o vault.zip \
&& unzip vault.zip \
&& rm vault.zip \
&& chmod +x vault \
&& mv vault /usr/local/bin/vault

RUN groupadd -r kibana && useradd -r -g kibana kibana && mkdir /home/kibana && chown kibana:kibana /home/kibana

COPY ./bash_standard_lib.sh /usr/local/bin/bash_standard_lib.sh
RUN chmod +x /usr/local/bin/bash_standard_lib.sh

COPY ./runbld /usr/local/bin/runbld
RUN chmod +x /usr/local/bin/runbld

USER kibana
32 changes: 32 additions & 0 deletions .ci/Jenkinsfile_baseline_capture
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
#!/bin/groovy

library 'kibana-pipeline-library'
kibanaLibrary.load()

kibanaPipeline(timeoutMinutes: 120) {
githubCommitStatus.trackBuild(params.commit, 'kibana-ci-baseline') {
ciStats.trackBuild {
catchError {
withEnv([
'CI_PARALLEL_PROCESS_NUMBER=1'
]) {
parallel([
'oss-visualRegression': {
workers.ci(name: 'oss-visualRegression', size: 's-highmem', ramDisk: true) {
kibanaPipeline.functionalTestProcess('oss-visualRegression', './test/scripts/jenkins_visual_regression.sh')()
}
},
'xpack-visualRegression': {
workers.ci(name: 'xpack-visualRegression', size: 's-highmem', ramDisk: true) {
kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh')()
}
},
])
}
}

kibanaPipeline.sendMail()
slackNotifications.onFailure()
}
}
}
70 changes: 70 additions & 0 deletions .ci/Jenkinsfile_baseline_trigger
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
#!/bin/groovy

def MAXIMUM_COMMITS_TO_CHECK = 10
def MAXIMUM_COMMITS_TO_BUILD = 5

if (!params.branches_yaml) {
error "'branches_yaml' parameter must be specified"
}

def additionalBranches = []

def branches = readYaml(text: params.branches_yaml) + additionalBranches

library 'kibana-pipeline-library'
kibanaLibrary.load()

withGithubCredentials {
branches.each { branch ->
if (branch == '6.8') {
// skip 6.8, it is tracked but we don't need snapshots for it and haven't backported
// the baseline capture scripts to it.
return;
}

stage(branch) {
def commits = getCommits(branch, MAXIMUM_COMMITS_TO_CHECK, MAXIMUM_COMMITS_TO_BUILD)

commits.take(MAXIMUM_COMMITS_TO_BUILD).each { commit ->
catchErrors {
githubCommitStatus.create(commit, 'pending', 'Baseline started.', 'kibana-ci-baseline')

build(
propagate: false,
wait: false,
job: 'elastic+kibana+baseline-capture',
parameters: [
string(name: 'branch_specifier', value: branch),
string(name: 'commit', value: commit),
]
)
}
}
}
}
}

def getCommits(String branch, maximumCommitsToCheck, maximumCommitsToBuild) {
print "Getting latest commits for ${branch}..."
def commits = githubApi.get("repos/elastic/kibana/commits?sha=${branch}").take(maximumCommitsToCheck).collect { it.sha }
def commitsToBuild = []

for (commit in commits) {
print "Getting statuses for ${commit}"
def status = githubApi.get("repos/elastic/kibana/statuses/${commit}").find { it.context == 'kibana-ci-baseline' }
print "Commit '${commit}' already built? ${status ? 'Yes' : 'No'}"

if (!status) {
commitsToBuild << commit
} else {
// Stop at the first commit we find that's already been triggered
break
}

if (commitsToBuild.size() >= maximumCommitsToBuild) {
break
}
}

return commitsToBuild.reverse() // We want the builds to trigger oldest-to-newest
}
52 changes: 52 additions & 0 deletions .ci/Jenkinsfile_coverage
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
#!/bin/groovy

library 'kibana-pipeline-library'
kibanaLibrary.load() // load from the Jenkins instance

kibanaPipeline(timeoutMinutes: 240) {
catchErrors {
def timestamp = new Date(currentBuild.startTimeInMillis).format("yyyy-MM-dd'T'HH:mm:ss'Z'", TimeZone.getTimeZone("UTC"))
withEnv([
"TIME_STAMP=${timestamp}",
'CODE_COVERAGE=1', // Enables coverage. Needed for multiple ci scripts, such as remote.ts, test/scripts/*.sh, schema.js, etc.
]) {
workers.base(name: 'coverage-worker', size: 'l', ramDisk: false, bootstrapped: false) {
catchError {
kibanaCoverage.runTests()
kibanaTeamAssign.load('team_assignment', "### Upload Team Assignment JSON")
handleIngestion(TIME_STAMP)
}
handleFail()
}
}
kibanaPipeline.sendMail()
}
}

def handleIngestion(timestamp) {
def previousSha = handlePreviousSha()
kibanaPipeline.downloadCoverageArtifacts()
kibanaCoverage.prokLinks("### Process HTML Links")
kibanaCoverage.collectVcsInfo("### Collect VCS Info")
kibanaCoverage.generateReports("### Merge coverage reports")
kibanaCoverage.uploadCombinedReports()
kibanaCoverage.ingest(env.JOB_NAME, BUILD_NUMBER, BUILD_URL, timestamp, previousSha, '### Ingest && Upload')
kibanaCoverage.uploadCoverageStaticSite(timestamp)
}

def handlePreviousSha() {
def previous = kibanaCoverage.downloadPrevious('### Download OLD Previous')
kibanaCoverage.uploadPrevious('### Upload NEW Previous')
return previous
}

def handleFail() {
def buildStatus = buildUtils.getBuildStatus()
if(params.NOTIFY_ON_FAILURE && buildStatus != 'SUCCESS' && buildStatus != 'ABORTED' && buildStatus != 'UNSTABLE') {
slackNotifications.sendFailedBuild(
channel: '#kibana-qa',
username: 'Kibana QA'
)
}
}

143 changes: 143 additions & 0 deletions .ci/Jenkinsfile_flaky
Original file line number Diff line number Diff line change
@@ -0,0 +1,143 @@
#!/bin/groovy

library 'kibana-pipeline-library'
kibanaLibrary.load()

def CI_GROUP_PARAM = params.CI_GROUP

// Looks like 'oss:ciGroup:1', 'oss:firefoxSmoke', or 'all:serverMocha'
def JOB_PARTS = CI_GROUP_PARAM.split(':')
def IS_XPACK = JOB_PARTS[0] == 'xpack'
def JOB = JOB_PARTS[1]
def NEED_BUILD = JOB != 'serverMocha'
def CI_GROUP = JOB_PARTS.size() > 2 ? JOB_PARTS[2] : ''
def EXECUTIONS = params.NUMBER_EXECUTIONS.toInteger()
def AGENT_COUNT = getAgentCount(EXECUTIONS)

def worker = getWorkerFromParams(IS_XPACK, JOB, CI_GROUP)

def workerFailures = []

currentBuild.displayName += trunc(" ${params.GITHUB_OWNER}:${params.branch_specifier}", 24)
currentBuild.description = "${params.CI_GROUP}<br />Agents: ${AGENT_COUNT}<br />Executions: ${params.NUMBER_EXECUTIONS}"

kibanaPipeline(timeoutMinutes: 180) {
def agents = [:]
for(def agentNumber = 1; agentNumber <= AGENT_COUNT; agentNumber++) {
def agentNumberInside = agentNumber
def agentExecutions = floor(EXECUTIONS/AGENT_COUNT) + (agentNumber <= EXECUTIONS%AGENT_COUNT ? 1 : 0)
agents["agent-${agentNumber}"] = {
catchErrors {
print "Agent ${agentNumberInside} - ${agentExecutions} executions"

workers.functional('flaky-test-runner', {
if (NEED_BUILD) {
if (!IS_XPACK) {
kibanaPipeline.buildOss()
if (CI_GROUP == '1') {
runbld("./test/scripts/jenkins_build_kbn_sample_panel_action.sh", "Build kbn tp sample panel action for ciGroup1")
}
} else {
kibanaPipeline.buildXpack()
}
}
}, getWorkerMap(agentNumberInside, agentExecutions, worker, workerFailures))()
}
}
}

parallel(agents)

currentBuild.description += ", Failures: ${workerFailures.size()}"

if (workerFailures.size() > 0) {
print "There were ${workerFailures.size()} test suite failures."
print "The executions that failed were:"
print workerFailures.join("\n")
print "Please check 'Test Result' and 'Pipeline Steps' pages for more info"
}
}

def getWorkerFromParams(isXpack, job, ciGroup) {
if (!isXpack) {
if (job == 'serverMocha') {
return kibanaPipeline.functionalTestProcess('serverMocha', {
kibanaPipeline.bash(
"""
source src/dev/ci_setup/setup_env.sh
node scripts/mocha
""",
"run `node scripts/mocha`"
)
})
} else if (job == 'accessibility') {
return kibanaPipeline.functionalTestProcess('kibana-accessibility', './test/scripts/jenkins_accessibility.sh')
} else if (job == 'firefoxSmoke') {
return kibanaPipeline.functionalTestProcess('firefoxSmoke', './test/scripts/jenkins_firefox_smoke.sh')
} else if(job == 'visualRegression') {
return kibanaPipeline.functionalTestProcess('visualRegression', './test/scripts/jenkins_visual_regression.sh')
} else {
return kibanaPipeline.ossCiGroupProcess(ciGroup)
}
}

if (job == 'accessibility') {
return kibanaPipeline.functionalTestProcess('xpack-accessibility', './test/scripts/jenkins_xpack_accessibility.sh')
} else if (job == 'firefoxSmoke') {
return kibanaPipeline.functionalTestProcess('xpack-firefoxSmoke', './test/scripts/jenkins_xpack_firefox_smoke.sh')
} else if(job == 'visualRegression') {
return kibanaPipeline.functionalTestProcess('xpack-visualRegression', './test/scripts/jenkins_xpack_visual_regression.sh')
} else {
return kibanaPipeline.xpackCiGroupProcess(ciGroup)
}
}

def getWorkerMap(agentNumber, numberOfExecutions, worker, workerFailures, maxWorkerProcesses = 12) {
def workerMap = [:]
def numberOfWorkers = Math.min(numberOfExecutions, maxWorkerProcesses)

for(def i = 1; i <= numberOfWorkers; i++) {
def workerExecutions = floor(numberOfExecutions/numberOfWorkers + (i <= numberOfExecutions%numberOfWorkers ? 1 : 0))

workerMap["agent-${agentNumber}-worker-${i}"] = { workerNumber ->
for(def j = 0; j < workerExecutions; j++) {
print "Execute agent-${agentNumber} worker-${workerNumber}: ${j}"
withEnv([
"REMOVE_KIBANA_INSTALL_DIR=1",
]) {
catchErrors {
try {
worker(workerNumber)
} catch (ex) {
workerFailures << "agent-${agentNumber} worker-${workerNumber}-${j}"
throw ex
}
}
}
}
}
}

return workerMap
}

def getAgentCount(executions) {
// Increase agent count every 24 worker processess, up to 3 agents maximum
return Math.min(3, 1 + floor(executions/24))
}

def trunc(str, length) {
if (str.size() >= length) {
return str.take(length) + "..."
}

return str;
}

// All of the real rounding/truncating methods are sandboxed
def floor(num) {
return num
.toString()
.split('\\.')[0]
.toInteger()
}
Loading

0 comments on commit a890585

Please sign in to comment.