Skip to content

Commit

Permalink
Revert "Allow pytest to return success when no tests are collected fo…
Browse files Browse the repository at this point in the history
…r flaky test suite (DataDog#17990)"

This reverts commit dd5dd64.
  • Loading branch information
vivek-datadog authored and ravindrasojitra-crest committed Aug 5, 2024
1 parent 941a8c3 commit 7d34fe4
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 108 deletions.
87 changes: 6 additions & 81 deletions .github/workflows/test-target.yml
Original file line number Diff line number Diff line change
Expand Up @@ -212,79 +212,23 @@ jobs:
if: inputs.standard && !inputs.minimum-base-package
env:
DDEV_TEST_ENABLE_TRACING: "${{ inputs.repo == 'core' && '1' || '0' }}"
run: |
if [ "${{ inputs.pytest-args }}" = "-m flaky" ]; then
set +e # Disable immediate exit
ddev test --cov --junit ${{ inputs.target }} -- ${{ inputs.pytest-args }}
exit_code=$?
if [ $exit_code -eq 5 ]; then
# Flaky test count can be zero, this is done to avoid pipeline failure
echo "No tests were collected."
exit 0
else
exit $exit_code
fi
else
ddev test --cov --junit ${{ inputs.target }} ${{ inputs.pytest-args != '' && format('-- {0}', inputs.pytest-args) || '' }}
fi
run: ddev test --cov --junit ${{ inputs.target }} ${{ inputs.pytest-args != '' && format('-- {0}', inputs.pytest-args) || '' }}

- name: Run Unit & Integration tests with minimum version of base package
if: inputs.standard && inputs.minimum-base-package
run: |
if [ "${{ inputs.pytest-args }}" = "-m flaky" ]; then
set +e # Disable immediate exit
ddev test --compat --recreate --junit ${{ inputs.target }} -- ${{ inputs.pytest-args }}
exit_code=$?
if [ $exit_code -eq 5 ]; then
# Flaky test count can be zero, this is done to avoid pipeline failure
echo "No tests were collected."
exit 0
else
exit $exit_code
fi
else
ddev test --compat --recreate --junit ${{ inputs.target }} ${{ inputs.pytest-args != '' && format('-- {0}', inputs.pytest-args) || '' }}
fi
run: ddev test --compat --recreate --junit ${{ inputs.target }} ${{ inputs.pytest-args != '' && format('-- {0}', inputs.pytest-args) || '' }}

- name: Run E2E tests with latest base package
if: inputs.standard && inputs.repo == 'core' && !inputs.minimum-base-package
env:
DD_API_KEY: "${{ secrets.DD_API_KEY }}"
run: |
if [ "${{ inputs.pytest-args }}" = "-m flaky" ]; then
set +e # Disable immediate exit
ddev env test --base --new-env --junit ${{ inputs.target }} -- ${{ inputs.pytest-args }}
exit_code=$?
if [ $exit_code -eq 5 ]; then
# Flaky test count can be zero, this is done to avoid pipeline failure
echo "No tests were collected."
exit 0
else
exit $exit_code
fi
else
ddev env test --base --new-env --junit ${{ inputs.target }} ${{ inputs.pytest-args != '' && format('-- {0}', inputs.pytest-args) || '' }}
fi
run: ddev env test --base --new-env --junit ${{ inputs.target }} ${{ inputs.pytest-args != '' && format('-- all {0}', inputs.pytest-args) || '' }}

- name: Run E2E tests
if: inputs.standard && inputs.repo != 'core'
env:
DD_API_KEY: "${{ secrets.DD_API_KEY }}"
run: |
if [ "${{ inputs.pytest-args }}" = "-m flaky" ]; then
set +e # Disable immediate exit
ddev env test --new-env --junit ${{ inputs.target }} -- ${{ inputs.pytest-args }}
exit_code=$?
if [ $exit_code -eq 5 ]; then
# Flaky test count can be zero, this is done to avoid pipeline failure
echo "No tests were collected."
exit 0
else
exit $exit_code
fi
else
ddev env test --new-env --junit ${{ inputs.target }} ${{ inputs.pytest-args != '' && format('-- {0}', inputs.pytest-args) || '' }}
fi
run: ddev env test --new-env --junit ${{ inputs.target }} ${{ inputs.pytest-args != '' && format('-- all {0}', inputs.pytest-args) || '' }}

- name: Run benchmarks
if: inputs.benchmark
Expand All @@ -299,21 +243,7 @@ jobs:
env:
DD_API_KEY: "${{ secrets.DD_API_KEY }}"
DDEV_TEST_ENABLE_TRACING: "${{ inputs.repo == 'core' && '1' || '0' }}"
run: |
if [ "${{ inputs.pytest-args }}" = "-m flaky" ]; then
set +e # Disable immediate exit
ddev env test --base --new-env --junit ${{ inputs.target }}:latest -- ${{ inputs.pytest-args }}
exit_code=$?
if [ $exit_code -eq 5 ]; then
# Flaky test count can be zero, this is done to avoid pipeline failure
echo "No tests were collected."
exit 0
else
exit $exit_code
fi
else
ddev env test --base --new-env --junit ${{ inputs.target }}:latest ${{ inputs.pytest-args != '' && format('-- {0}', inputs.pytest-args) || '' }}
fi
run: ddev env test --base --new-env --junit ${{ inputs.target }}:latest ${{ inputs.pytest-args != '' && format('-- all {0}', inputs.pytest-args) || '' }}

- name: View trace log
if: inputs.repo == 'core' && always()
Expand Down Expand Up @@ -342,12 +272,7 @@ jobs:
path: "${{ env.TEST_RESULTS_BASE_DIR }}"

- name: Upload coverage data
if: >
inputs.standard &&
!github.event.repository.private &&
always() &&
inputs.pytest-args != '-m flaky'
# Flaky tests will have low coverage, don't upload it to avoid pipeline failure
if: inputs.standard && !github.event.repository.private && always()
uses: codecov/codecov-action@v4
with:
token: ${{ secrets.CODECOV_TOKEN }}
Expand Down
28 changes: 1 addition & 27 deletions ddev/src/ddev/cli/test/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -199,33 +199,7 @@ def test(
base_command.append('--memray')

if e2e:
# Convert pytest_args to a list if it's a tuple
pytest_args_list = list(pytest_args) if isinstance(pytest_args, tuple) else pytest_args

# Initialize a list to hold indices of '-m' options and their values to be removed
indices_to_remove = []
marker_values = []

# Iterate over pytest_args_list to find '-m' or '--markers' options and their values
for i, arg in enumerate(pytest_args_list):
if arg in ('-m', '--markers') and i + 1 < len(pytest_args_list):
indices_to_remove.extend([i, i + 1])
marker_values.append(pytest_args_list[i + 1])

# Reverse sort indices_to_remove to avoid index shifting issues during removal
indices_to_remove.sort(reverse=True)

# Remove the '-m' options and their values from pytest_args_list
for index in indices_to_remove:
pytest_args_list.pop(index)

# After removing the '-m' options and their values
# Convert the modified pytest_args_list back to a tuple
pytest_args = tuple(pytest_args_list)

# Construct the combined marker expression with extracted marker values and 'e2e'
combined_marker = " and ".join(marker_values) + " and e2e" if marker_values else "e2e"
base_command.extend(('-m', combined_marker))
base_command.extend(('-m', 'e2e'))
global_env_vars[EndToEndEnvVars.PARENT_PYTHON] = sys.executable

app.display_debug(f'Targets: {", ".join(targets)}')
Expand Down

0 comments on commit 7d34fe4

Please sign in to comment.