-
Notifications
You must be signed in to change notification settings - Fork 25
/
Copy pathtox.ini
234 lines (175 loc) · 10.2 KB
/
tox.ini
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
# This is work in progress, testing workflow in local/CI is gradually being transferred to tox
# Usage instructions:
# `tox` will run all "envlist" tests sequentially, `tox --parallel` same tests in parallel (much faster).
# "othertests" are advisable to run sequentially as parallel running can lead to segmentation faults and weird errors. Hence,
# the reason for their separation. `tox -m othertests` will run these tests sequentially.
# Run specific selection of tests with `tox -e pretest,<list-of-tests>,posttest,kill-servers` e.g., `tox -e pretest,test-api,test-launcher,posttest,kill-servers`
# `--parallel` flag can also be passed when running specific selections.
# For packaging, build wheels for specific platform with `tox -e build-wheel -- <platform_name>`.
# If `tox -e build-wheel` is run without passing a platform, tox will automatically build the ffl wheels based on the operating system
# on which it is executing: windows -> "win_amd64", linux -> "manylinux_2_17_x86_64", mac -> "any"
# For html documentation generation, run `tox -e doc-html`. To clean previously existing documentation before generating
# a new one, run `tox -e doc-clean,doc-html`
# Current tox configuration can automatically detect DPF server installation in these cases:
# - Unified install
# - if ANSYS_DPF_PATH is set and points to a valid DPF server installation
# Which means invoking tox with previous commands necessitate having a server available via above methods
# Understandably for development purposes, more flexibility may be desired and there are various ways of invoking tox to achieve desired effects.
# For example, to use a standalone dpf server (present in ansys_dpf_server_win_v2025.1.pre0/ directory) in editable mode in each tox environment,
# you can do something like `tox -m othertests -x testenv.deps+="-e ansys_dpf_server_win_v2025.1.pre0"`.
# The tox documentation should be consulted for a quick overview of different CLI flags that can be used to customize invocation.
[tox]
description = Default tox environment list and core configurations
envlist = pretest,test-{api,launcher,server,local_server,multi_server,api_entry,custom_type_field,operators},posttest,kill-servers
labels =
othertests = pretest,test-{workflow,remote_workflow,remote_operator,service},posttest,kill-servers
ciparalleltests = test-{api,launcher,local_server,multi_server,api_entry,custom_type_field,operators},kill-servers
isolated_build_env = build
[testenv]
description = Default configuration for test environments, unless overridden
uv_seed = true
pass_env =
PACKAGE_NAME
MODULE
ANSYS_DPF_ACCEPT_LA
ANSYSLMD_LICENSE_FILE
AWP_ROOT*
ANSYS_DPF_PATH
deps =
-r requirements/requirements_test.txt
[testenv:build-wheel]
description = Environment for custom build of package wheels
skip_install = True
deps =
-r requirements/requirements_build.txt
commands =
python .ci/build_wheel.py -p {posargs:{on_platform}} -w
[testenv:kill-servers]
description = Environment for clearing running servers
depends = test-{api,launcher,server,local_server,multi_server,remote_workflow,remote_operator,workflow,service,api_entry,custom_type_field,operators}
deps =
psutil
commands_pre =
skip_install = True
commands =
python -c "import psutil; proc_name = 'Ans.Dpf.Grpc'; nb_procs = len([proc.kill() for proc in psutil.process_iter() if proc_name in proc.name()]); \
print(f'Killed \{nb_procs} \{proc_name} processes.')"
[testenv:pretest]
description = Environment to organize test files prior to testing
skip_install = True
deps =
commands_pre =
commands =
python -c "\
import os, shutil; \
test_data=['test_launcher','test_server','test_local_server','test_multi_server','test_workflow','test_remote_workflow','test_remote_operator','test_service','test_custom_type_field']; \
[(os.makedirs(d, exist_ok=True), shutil.copy('tests/conftest.py', d), shutil.copy(f'tests/\{d}.py', d) if os.path.exists(f'tests/\{d}.py') else None) for d in test_data]; \
[os.remove(f'tests/\{d}.py') for d in test_data if os.path.exists(f'tests/\{d}.py')]"
[testenv:posttest]
description = Environment to revert test files to original state after testing
depends = pretest, test-{api,launcher,server,local_server,multi_server,remote_workflow,remote_operator,workflow,service,api_entry,custom_type_field,operators}
skip_install = True
deps =
commands_pre =
commands =
python -c "\
import os, shutil; \
test_data=['test_launcher','test_server','test_local_server','test_multi_server','test_workflow','test_remote_workflow','test_remote_operator','test_service', 'test_custom_type_field']; \
[shutil.move(f'\{d}/\{d}.py', f'tests/\{d}.py') for d in test_data if os.path.exists(f'\{d}/\{d}.py')]; \
[shutil.rmtree(d) for d in test_data if os.path.exists(d)]"
[testenv:test-{api,launcher,server,local_server,multi_server,remote_workflow,remote_operator,workflow,service,api_entry,custom_type_field,operators}]
description = Environment where project testing configuration is defined
depends = pretest
setenv =
# Pytest extra arguments
COVERAGE = --cov=ansys.dpf.core --log-level=ERROR --cov-report=
RERUNS = --reruns=2 --reruns-delay=1
DEBUG = -v -s --durations=10 --durations-min=1.0
COVERAGE_FILE = {work_dir}/.cov/.coverage.{env_name}
api: JUNITXML = --junitxml=tests/junit/test-results.xml
launcher: JUNITXML = --junitxml=tests/junit/test-results2.xml
server: JUNITXML = --junitxml=tests/junit/test-results3.xml
local_server: JUNITXML = --junitxml=tests/junit/test-results4.xml
multi_server: JUNITXML = --junitxml=tests/junit/test-results5.xml
remote_workflow: JUNITXML = --junitxml=tests/junit/test-results6.xml
remote_operator: JUNITXML = --junitxml=tests/junit/test-results7.xml
workflow: JUNITXML = --junitxml=tests/junit/test-results8.xml
service: JUNITXML = --junitxml=tests/junit/test-results9.xml
api_entry: JUNITXML = --junitxml=tests/junit/test-results10.xml
custom_type_field: JUNITXML = --junitxml=tests/junit/test-results11.xml
operators: JUNITXML = --junitxml=tests/junit/test-results12.xml
# Tests sets
api: PYTEST_PYTHON_FILES = tests
launcher: PYTEST_PYTHON_FILES = test_launcher
server: PYTEST_PYTHON_FILES = test_server
local_server: PYTEST_PYTHON_FILES = test_local_server
multi_server: PYTEST_PYTHON_FILES = test_multi_server
remote_workflow: PYTEST_PYTHON_FILES = test_remote_workflow
remote_operator: PYTEST_PYTHON_FILES = test_remote_operator
workflow: PYTEST_PYTHON_FILES = test_workflow
service: PYTEST_PYTHON_FILES = test_service
api_entry: PYTEST_PYTHON_FILES = tests/entry
custom_type_field: PYTEST_PYTHON_FILES = test_custom_type_field
operators: PYTEST_PYTHON_FILES = tests/operators
TEMP = {env_tmp_dir}
TMP = {env_tmp_dir}
commands =
python -m pytest {env:PYTEST_PYTHON_FILES} {env:DEBUG} {env:RERUNS} {env:JUNITXML} {env:COVERAGE} {posargs}
[testenv:covreport]
skip_install = true
deps = coverage
change_dir = {work_dir}/.cov
commands =
coverage combine
coverage xml
coverage erase # deletes only .coverage data file, otherwise codecov action will generate coverage.xml report again
[testenv:doc-{clean,links,html}]
description = Environment for documentation generation
setenv =
SOURCE_DIR = doc/source
BUILD_DIR = doc/build
BUILDER_OPTS = --color -j auto
links: BUILDER = linkcheck
html: BUILDER = html
skip_install =
clean: True
extras =
html: plotting
deps =
clean:
links,html: -r requirements/requirements_docs.txt
commands_pre =
# Clear any running servers that may be locking resources
html,links: python -c "import psutil; proc_name = 'Ans.Dpf.Grpc'; nb_procs = len([proc.kill() for proc in psutil.process_iter() if proc_name in proc.name()]); \
html,links: print(f'Killed \{nb_procs} \{proc_name} processes.')"
commands =
# Remove previously rendered documentation
clean: python -c "import shutil, sys; shutil.rmtree(sys.argv[1], ignore_errors=True)" "{toxinidir}/{env:BUILD_DIR}"
# Ensure vtk compatibility
html: python -m pip uninstall --yes vtk
html: python -m pip install --extra-index-url https://wheels.vtk.org vtk-osmesa==9.2.20230527.dev0
# Clean files from previous build
html: python -c "\
html: from os.path import exists; import shutil; \
html: [(shutil.rmtree(p) if exists(p) else None) for p in ['{env:SOURCE_DIR}/images/auto-generated']]; \
html: [(shutil.move(src, dst) if exists(src) else None) for src, dst in \
html: [('{env:SOURCE_DIR}/examples/07-python-operators/plugins', '{env:SOURCE_DIR}/_temp/plugins'), \
html: ('{env:SOURCE_DIR}/examples/04-advanced/02-volume_averaged_stress', '{env:SOURCE_DIR}/_temp/04_advanced'), \
html: ('{env:SOURCE_DIR}/examples/12-fluids/02-fluids_results', '{env:SOURCE_DIR}/_temp/12_fluids')]]; \
html: [shutil.rmtree(p) for p in ['{env:SOURCE_DIR}/examples'] if exists(p)]; \
html: [(shutil.move(src, dst) if exists(src) else None) for src, dst in \
html: [('{env:SOURCE_DIR}/_temp/plugins', '{env:SOURCE_DIR}/examples/07-python-operators/plugins'), \
html: ('{env:SOURCE_DIR}/_temp/04_advanced', '{env:SOURCE_DIR}/examples/04-advanced/02-volume_averaged_stress'), \
html: ('{env:SOURCE_DIR}/_temp/12_fluids', '{env:SOURCE_DIR}/examples/12-fluids/02-fluids_results')]]; \
html: [shutil.rmtree(p) for p in ['{env:SOURCE_DIR}/_temp'] if exists(p)]"
# Build documentation
html,links: {env_bin_dir}/sphinx-build -b {env:BUILDER} {env:SOURCE_DIR} {env:BUILD_DIR}/{env:BUILDER} {env:BUILDER_OPTS}
# Patch pyVista issue with elemental plots by copying necessary images
html: python -c "\
html: import os, shutil, glob; os.makedirs('build/html/_images', exist_ok=True); \
html: [(shutil.copy(src, 'build/html/_images') if os.path.exists(src) else print(f'Source not found: {src}')) for src in \
html: glob.glob('{env:SOURCE_DIR}/examples/04-advanced/02-volume_averaged_stress/*') + glob.glob('{env:SOURCE_DIR}/examples/12-fluids/02-fluids_results/*')]"
commands_post =
# Clear any running servers that may be locking resources
html,links: python -c "import psutil; proc_name = 'Ans.Dpf.Grpc'; nb_procs = len([proc.kill() for proc in psutil.process_iter() if proc_name in proc.name()]); \
html,links: print(f'Killed \{nb_procs} \{proc_name} processes.')"