Skip to content

Commit

Permalink
Merge pull request #321 from nilo19/fix-hollow-nodes-count
Browse files Browse the repository at this point in the history
Fix calculating ready hollow nodes and all more logs.
  • Loading branch information
feiskyer authored Mar 24, 2020
2 parents 91d52c7 + 42b0cc7 commit 30d3e37
Showing 1 changed file with 6 additions and 2 deletions.
8 changes: 6 additions & 2 deletions tests/kubemark/build-kubemark.sh
Original file line number Diff line number Diff line change
Expand Up @@ -220,8 +220,8 @@ while :
do
total_retry+=1
none_count=$(kubectl get no | awk '{print $3}' | grep -c "<none>")
node_count=$(kubectl get no | grep -c "hollow")
if [ "${node_count}" -eq "${KUBEMARK_SIZE}" ] && [ "${none_count}" -eq 0 ]; then
node_count=$(kubectl get no | grep -c "hollow" | awk '{print $2}' | grep -c "^Ready$")
if [ "${node_count}" -eq "${KUBEMARK_SIZE}" ] && [ "${none_count}" -eq 0 ]; then
break
else
echo "there're ${node_count} ready hollow nodes, ${none_count} <none> nodes, will retry after 10 seconds"
Expand All @@ -234,6 +234,8 @@ do
fi
done

echo "all hollow nodes are ready, starting test with clusterloader2"

export KUBE_CONFIG="${WORKING_DIR}/_output/${KUBEMARK_CLUSTER_DNS_PREFIX}/kubeconfig/kubeconfig.${LOCATION}.json"

# Test by clusterloader2
Expand All @@ -257,6 +259,7 @@ echo "fetching all test configs"
git clone https://github.com/kubernetes-sigs/cloud-provider-azure.git
cp -r cloud-provider-azure/tests/kubemark/configs "${WORKING_DIR}"

echo "configuring clusterloader2"
# Clusterloader2 testing strategy config paths
# It supports setting up multiple test strategy. Each testing strategy is individual and serial.
TEST_CONFIG="${TEST_CONFIG:-${WORKING_DIR}/configs/density/config.yaml}"
Expand All @@ -275,6 +278,7 @@ if [ ! -d "${REPORT_DIR}" ]; then
echo "report directory created"
fi

echo "downloading clusterloader2"
curl -o clusterloader2 "${CLUSTERLOADER2_BIN_URL}"
CLUSTERLOADER2="${WORKING_DIR}/clusterloader2"
chmod +x "${CLUSTERLOADER2}"
Expand Down

0 comments on commit 30d3e37

Please sign in to comment.