From 8d4ae01499ed76f7d7c91e0468ac92bcdfd17767 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Mar 2024 13:16:50 +0530 Subject: [PATCH 01/36] MAINT: Bump pandas from 1.3.5 to 2.2.1 (#4345) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 00b5a02d078..b413582bb00 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -50,7 +50,7 @@ tests = [ "osmnx", "pandas==1.3.5; python_version == '3.7'", "pandas==2.0.3; python_version == '3.8'", - "pandas==2.1.1; python_version > '3.9'", + "pandas==2.2.1; python_version > '3.9'", "pytest==8.0.2", "pytest-cov==4.1.0", "pytest-xdist==3.5.0", @@ -119,7 +119,7 @@ full = [ "numpy==1.26.4; python_version > '3.9'", "pandas==1.3.5; python_version == '3.7'", "pandas==2.0.3; python_version == '3.8'", - "pandas==2.1.1; python_version > '3.9'", + "pandas==2.2.1; python_version > '3.9'", "osmnx", "vtk==9.2.6", "pyvista==0.43.3; python_version > '3.7'", @@ -139,7 +139,7 @@ all = [ "pandas==1.3.5; python_version == '3.7'", "pandas==2.0.3; python_version == '3.9'", "pandas==2.0.3; python_version == '3.8'", - "pandas==2.1.1; python_version > '3.9'", + "pandas==2.2.1; python_version > '3.9'", "osmnx", "vtk==9.2.6", "pyvista==0.43.3; python_version > '3.7'", From e3d98ca779fd55a79178b597281a2695910161df Mon Sep 17 00:00:00 2001 From: Lorenzo Vecchietti <58366962+lorenzovecchietti@users.noreply.github.com> Date: Mon, 11 Mar 2024 09:20:45 +0100 Subject: [PATCH 02/36] Enhancement/temp dep&transient bc (#4062) * add tests * add classes that ease the creation of boundary dictionaries * add classes that ease the creation of boundary dictionaries * fix check * enhance assign_stationary_wall to support new classes * enhance assign_stationary_wall to support new classes * enhance assign_source to support new classes * enhance assign_*_block to support new classes * enhance assign*_free_opening to support new classes * enhance assign*_resistance to support new classes * enhance assign_recirculation_opening to support new classes * enhance assign_conducting_plate to support new classes * enhance assign_conducting_plate* to support new classes * fix * Update Boundary.py * Update pyaedt/modules/Boundary.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/icepak.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/icepak.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/icepak.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/icepak.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/icepak.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/icepak.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/icepak.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/icepak.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/icepak.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/icepak.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/icepak.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/modules/Boundary.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/modules/Boundary.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/modules/Boundary.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/modules/Boundary.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/modules/Boundary.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/modules/Boundary.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/modules/Boundary.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/modules/Boundary.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/modules/Boundary.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/modules/Boundary.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/modules/Boundary.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/modules/Boundary.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/modules/Boundary.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/modules/Boundary.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Update pyaedt/icepak.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Update pyaedt/icepak.py Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Apply suggestions from code review Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * Apply suggestions from code review Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * fix docstring style * improve coverage * fix input sanitizing * fix tests * Apply suggestions from code review Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> * update docstring * update docstring --------- Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- _unittest/test_98_Icepak.py | 67 ++- pyaedt/icepak.py | 845 ++++++++++++++++++++---------------- pyaedt/modules/Boundary.py | 232 +++++++++- 3 files changed, 757 insertions(+), 387 deletions(-) diff --git a/_unittest/test_98_Icepak.py b/_unittest/test_98_Icepak.py index 98a5bbec313..771b0a22ce0 100644 --- a/_unittest/test_98_Icepak.py +++ b/_unittest/test_98_Icepak.py @@ -1484,6 +1484,71 @@ def test_73_conducting_plate(self): with pytest.raises(AttributeError): self.aedtapp.assign_conducting_plate_with_conductance([box_face.id, "surfPlateTest"]) - def test_74_native_component_load(self, add_app): + def test_74_boundary_conditions_dictionaries(self): + box1 = self.aedtapp.modeler.create_box([5, 5, 5], [1, 2, 3]) + ds_temp = self.aedtapp.create_dataset( + "ds_temp3", [1, 2, 3], [3, 2, 1], is_project_dataset=False, xunit="cel", yunit="W" + ) + bc1 = self.aedtapp.create_temp_dep_assignment(ds_temp.name) + assert bc1 + assert bc1.dataset_name == "ds_temp3" + assert self.aedtapp.assign_solid_block(box1.name, bc1) + + self.aedtapp.solution_type = "Transient" + + ds_time = self.aedtapp.create_dataset( + "ds_time3", [1, 2, 3], [3, 2, 1], is_project_dataset=False, xunit="s", yunit="W" + ) + bc2 = self.aedtapp.create_dataset_transient_assignment(ds_time.name) + rect = self.aedtapp.modeler.create_rectangle(self.aedtapp.PLANE.XY, [0, 0, 0], [20, 10]) + assert bc2 + assert self.aedtapp.assign_conducting_plate_with_resistance(rect.name, total_power=bc2) + + cylinder = self.aedtapp.modeler.create_cylinder(0, [-10, -10, -10], 1, 50) + bc3 = self.aedtapp.create_sinusoidal_transient_assignment("1W", "3", "2", "0.5s") + assert bc3 + assert self.aedtapp.assign_solid_block(cylinder.name, bc3) + + bc4 = self.aedtapp.create_square_wave_transient_assignment("3m_per_sec", "0.5s", "3s", "1s", "0.5m_per_sec") + assert bc4 + assert self.aedtapp.assign_free_opening( + self.aedtapp.modeler["Region"].faces[0].id, flow_type="Velocity", velocity=[bc4, 0, 0] + ) + + bondwire = self.aedtapp.modeler.create_bondwire([0, 0, 0], [1, 2, 3]) + bc5 = self.aedtapp.create_linear_transient_assignment("0.01W", "5") + assert bc5 + assert self.aedtapp.assign_solid_block(bondwire.name, bc5) + + box2 = self.aedtapp.modeler.create_box([15, 15, 15], [1, 2, 3]) + bc6 = self.aedtapp.create_exponential_transient_assignment("0W", "4", "2") + assert bc6 + assert self.aedtapp.assign_power_law_resistance( + box2.name, + total_power=bc6, + power_law_constant=1.5, + power_law_exponent="3", + ) + + box = self.aedtapp.modeler.create_box([25, 25, 25], [1, 2, 3]) + box.solve_inside = False + bc7 = self.aedtapp.create_powerlaw_transient_assignment("0.5kg_per_s", "10", "0.3") + assert bc7 + assert self.aedtapp.assign_recirculation_opening( + [box.top_face_x.id, box.bottom_face_x.id], + box.top_face_x.id, + assignment_value=bc6, + flow_assignment=bc7, + start_time="0s", + end_time="10s", + ) + + ds1_temp = self.aedtapp.create_dataset( + "ds_temp3", [1, 2, 3], [3, 2, 1], is_project_dataset=True, xunit="cel", yunit="W" + ) + assert not self.aedtapp.create_temp_dep_assignment(ds1_temp.name) + assert not self.aedtapp.create_temp_dep_assignment("nods") + + def test_75_native_component_load(self, add_app): app = add_app(application=Icepak, project_name=native_import, subfolder=test_subfolder) assert len(app.native_components) == 1 diff --git a/pyaedt/icepak.py b/pyaedt/icepak.py index ca1f661140b..f8608ea8c7f 100644 --- a/pyaedt/icepak.py +++ b/pyaedt/icepak.py @@ -31,9 +31,16 @@ from pyaedt.generic.settings import settings from pyaedt.modeler.cad.components_3d import UserDefinedComponent from pyaedt.modeler.geometry_operators import GeometryOperators +from pyaedt.modules.Boundary import BoundaryDictionary from pyaedt.modules.Boundary import BoundaryObject +from pyaedt.modules.Boundary import ExponentialDictionary +from pyaedt.modules.Boundary import LinearDictionary from pyaedt.modules.Boundary import NativeComponentObject from pyaedt.modules.Boundary import NetworkObject +from pyaedt.modules.Boundary import PieceWiseLinearDictionary +from pyaedt.modules.Boundary import PowerLawDictionary +from pyaedt.modules.Boundary import SinusoidalDictionary +from pyaedt.modules.Boundary import SquareWaveDictionary from pyaedt.modules.Boundary import _create_boundary from pyaedt.modules.monitor_icepak import Monitor @@ -654,7 +661,7 @@ def create_source_power( """Create a source power for a face. .. deprecated:: 0.6.71 - This method is replaced by `assign_source`. + This method is replaced by :obj:`~Icepak.assign_source`. Parameters ---------- @@ -745,7 +752,7 @@ def create_network_block( """Create a network block. .. deprecated:: 0.6.27 - This method will be replaced by `create_two_resistor_network_block`. + This method is replaced by :obj:`~Icepak.create_two_resistor_network_block`. Parameters ---------- @@ -3538,50 +3545,30 @@ def assign_stationary_wall( or ``"Heat Transfer Coefficient"``. name : str, optional Name of the boundary condition. The default is ``None``. - temperature : str or float or dict, optional + temperature : str or float or dict or BoundaryDictionary, optional Temperature to assign to the wall. This parameter is relevant if ``ext_condition="Temperature"``. If a float value is specified, the - unit is degrees Celsius. A dictionary can be used for transient - assignment. The dictionary should contain three keys: ``"Type"``, ``"Function"``, and - ``"Values"``. - - - The value for the ``"Type"`` key must be ``"Transient"``. - - Accepted values for the ``"Function"`` key are: ``"Linear"``, ``"Power Law"``, ``"Exponential"``, - ``"Sinusoidal"``, ``"Square Wave"``, and ``"Piecewise Linear"``. - The ``"Values"`` key contains a list of strings containing the parameters - required by the ``"Function"`` key selection. For example, ``"Linear"`` requires two parameters: - the value of the variable at t=0 and the slope of the line. The parameters required by - each ``Function`` key selection is in Icepak documentation. The parameters must contain the - units where needed. The default is ``"0cel"``. - heat_flux : str or float or dict, optional + unit is degrees Celsius. Assign a transient condition using the + result of a function with the ``create_*_transient_assignment`` pattern. + The default is ``"0cel"``. + heat_flux : str or float or dict or BoundaryDictionary, optional Heat flux to assign to the wall. This parameter is relevant if ``ext_condition="Temperature"``. If a float value is specified, - the unit is irrad_W_per_m2. A dictionary can be used for temperature-dependent or transient - assignment. The dictionary should contain three keys: ``"Type"``, ``"Function"``, and - ``"Values"``. - - - The value for the ``"Type"`` key must be ``"Transient"``. - - Accepted values for the ``"Function"`` key are: ``"Linear"``, ``"Power Law"``, ``"Exponential"``, - ``"Sinusoidal"``, ``"Square Wave"`` and ``"Piecewise Linear"``. - ``"Values"`` contains a list of strings containing the parameters - required by the ``"Function"`` selection (e.g. ``"Linear"`` requires two parameters: - the value of the variable at t=0 and the slope of the line). The parameters required by - each ``Function`` option is in Icepak documentation. The parameters must contain the - units where needed. The default is ``"0irrad_W_per_m2"``. - htc : str or float or dict, optional + the unit is irrad_W_per_m2. Assign a transient condition using the + result of a function with the ``create_*_transient_assignment`` pattern. + the unit is ``irrad_W_per_m2``. Assign a transient condition using the + result of a function with the ``create_*_transient_assignment`` pattern. + The default is ``"0irrad_W_per_m2"``. + htc : str or float or dict or BoundaryDictionary, optional Heat transfer coefficient to assign to the wall. This parameter is relevant if ``ext_condition="Heat Transfer Coefficient"``. If a - float value is specified, the unit is w_per_m2kel. - A dictionary can be used for temperature dependent or transient - assignment. The dictionary should contain three keys: ``"Type"``, ``"Function"``, and - ``"Values"``. Accepted ``"Type"`` values are: ``"Temp Dep"`` and ``"Transient"``. - - Accepted values for the ``"Function"`` key are: ``"Linear"``, ``"Power Law"``, ``"Exponential"``, - ``"Sinusoidal"``, ``"Square Wave"`` and ``"Piecewise Linear"``. ``"Temp Dep"`` only - support the latter. ``"Values"`` contains a list of strings containing the parameters - required by the ``"Function"`` selection (e.g. ``"Linear"`` requires two parameters: - the value of the variable at t=0 and the slope of the line). The parameters required by - each ``Function`` option is in Icepak documentation. The parameters must contain the - units where needed. The default is ``"0w_per_m2kel"``. + float value is specified, the unit is ``w_per_m2kel``. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. + the ``create_*_transient_assignment`` pattern. + Assign a temperature-dependent condition using the result of a + function with the pattern ``create_temp_dep_assignment``. + The default is ``"0w_per_m2kel"``. thickness : str or float, optional Thickness of the wall. If a float value is specified, the unit is the current unit system set in Icepak. The default is ``"0mm"``. @@ -3643,21 +3630,13 @@ def assign_stationary_wall( ext_surf_rad_material : str, optional Surface material for the external surface radiation option. This parameter is relevant if ``ext_surf_rad=True``. The default is ``"Stainless-steel-cleaned"``. - ext_surf_rad_ref_temp : str or float or dict, optional + ext_surf_rad_ref_temp : str or float or dict or BoundaryDictionary, optional Reference temperature for the external surface radiation option. This parameter - is relevant if ``ext_surf_rad=True``. If a float value is specified, the default - unit is degrees Celsius. A dictionary can be used for transient - assignment. The dictionary should contain three keys: ``"Type"``, ``"Function"``, and - ``"Values"``. - - - The value for the ``"Type"`` key must be ``"Transient"``. - - Accepted values for the ``"Function"`` key are: ``"Linear"``, ``"Power Law"``, ``"Exponential"``, - ``"Sinusoidal"``, ``"Square Wave"`` and ``"Piecewise Linear"``. - ``"Values"`` contains a list of strings containing the parameters - required by the ``"Function"`` selection (e.g. ``"Linear"`` requires two parameters: - the value of the variable at t=0 and the slope of the line). The parameters required by - each ``Function`` option is in Icepak documentation. The parameters must contain the - units where needed. The default is ``"AmbientTemp"``. + is relevant if ``ext_surf_rad=True``. If a float value is specified, the default + unit is degrees Celsius. + Assign a transient condition using the result of a function with + the pattern ``create_*_transient_assignment``. + The default is ``"AmbientTemp"``. ext_surf_rad_view_factor : str or float, optional View factor for the external surface radiation option. The default is ``"1"``. @@ -3717,7 +3696,7 @@ def assign_stationary_wall( ("Heat Flux", heat_flux, boundary_condition == "Heat Flux") ]: if to_add: - if isinstance(assignment_value, dict): + if isinstance(assignment_value, (dict, BoundaryDictionary)): assignment_value = self._parse_variation_data( quantity, assignment_value["Type"], @@ -3785,20 +3764,12 @@ def assign_stationary_wall_with_heat_flux( Name of the surface object or ID of the face. name : str, optional Name of the boundary condition. The default is ``None``. - heat_flux : str or float or dict, optional - Heat flux to assign to the wall. If a float value is - specified, the unit is ``irrad_W_per_m2``. A dictionary can be used for transient - assignment. The dictionary should contain three keys: ``"Type"``, ``"Function"``, and - ``"Values"``. - - - The value for the ``"Type"`` key must be ``"Transient"``. - - Accepted values for the ``"Function"`` key are: ``"Linear"``, ``"Power Law"``, ``"Exponential"``, - ``"Sinusoidal"``, ``"Square Wave"`` and ``"Piecewise Linear"``. - ``"Values"`` contains a list of strings containing the parameters - required by the ``"Function"`` selection (e.g. ``"Linear"`` requires two parameters: - the value of the variable at t=0 and the slope of the line). The parameters required by - each ``Function`` option is in Icepak documentation. The parameters must contain the - units where needed. The default is ``"0irrad_W_per_m2"``. + heat_flux : str or float or dict or BoundaryDictionary, optional + Heat flux to assign to the wall. This parameter is relevant if + ``ext_condition="Temperature"``. If a float value is specified, + the unit is ``irrad_W_per_m2``. Assign a transient condition using the + result of a function with the ``create_*_transient_assignment`` pattern. + The default is ``"0irrad_W_per_m2"``. thickness : str or float, optional Thickness of the wall. If a float value is specified, the unit is the current unit system set in Icepak. The default is ``"0mm"``. @@ -3855,20 +3826,12 @@ def assign_stationary_wall_with_temperature( Name of the surface object or ID of the face. name : str, optional Name of the boundary condition. The default is ``None``. - temperature : str or float or dict, optional - Temperature to assign to the wall. If a float value is specified, - the unit is degrees Celsius. A dictionary can be used for transient - assignment. The dictionary should contain three keys: ``"Type"``, ``"Function"``, and - ``"Values"``. - - - The value for the ``"Type"`` key must be ``"Transient"``. - - Accepted values for the ``"Function"`` key are: ``"Linear"``, ``"Power Law"``, ``"Exponential"``, - ``"Sinusoidal"``, ``"Square Wave"``, and ``"Piecewise Linear"``. - The ``"Values"`` key contains a list of strings containing the parameters - required by the ``"Function"`` key selection. For example, ``"Linear"`` requires two parameters: - the value of the variable at t=0 and the slope of the line. The parameters required by - each ``Function`` key selection is in Icepak documentation. The parameters must contain the - units where needed. The default is ``"0cel"``. + temperature : str or float or dict or BoundaryDictionary, optional + Temperature to assign to the wall. This parameter is relevant if + ``ext_condition="Temperature"``. If a float value is specified, the + unit is degrees Celsius. Assign a transient condition using the + result of a function with the ``create_*_transient_assignment`` pattern. + The default is ``"0cel"``. thickness : str or float, optional Thickness of the wall. If a float value is specified used, the unit is the current unit system set in Icepak. The default is ``"0mm"``. @@ -3941,18 +3904,15 @@ def assign_stationary_wall_with_htc( Name of the surface object or id of the face. name : str, optional Name of the boundary condition. The default is ``None``. - htc : str or float or dict, optional - Heat transfer coefficient to assign to the wall. If a float value - is specified, the unit is ``w_per_m2kel``. A dictionary can be used for temperature dependent or transient - assignment. The dictionary should contain three keys: ``"Type"``, ``"Function"``, and - ``"Values"``. Accepted ``"Type"`` values are: ``"Temp Dep"`` and ``"Transient"``. - - Accepted values for the ``"Function"`` key are: ``"Linear"``, ``"Power Law"``, ``"Exponential"``, - ``"Sinusoidal"``, ``"Square Wave"`` and ``"Piecewise Linear"``. ``"Temp Dep"`` only - support the latter. ``"Values"`` contains a list of strings containing the parameters - required by the ``"Function"`` selection (e.g. ``"Linear"`` requires two parameters: - the value of the variable at t=0 and the slope of the line). The parameters required by - each ``Function`` option is in Icepak documentation. The parameters must contain the - units where needed. The default is ``"0w_per_m2kel"``. + htc : str or float or dict or BoundaryDictionary, optional + Heat transfer coefficient to assign to the wall. This parameter + is relevant if ``ext_condition="Heat Transfer Coefficient"``. If a + float value is specified, the unit is ``w_per_m2kel``. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. + Assign a temperature-dependent condition using the result of a + function with the pattern ``create_temp_dep_assignment``. + The default is ``"0w_per_m2kel"``. thickness : str or float, optional Thickness of the wall. If a float value is specified, the unit is the current unit system set in Icepak. The default is ``"0mm"``. @@ -3986,15 +3946,15 @@ def assign_stationary_wall_with_htc( Flow direction for the correlation option. This parameter is relevant if ``ht_correlation_type="Forced Convection"``. The default is ``"X"``. ht_correlation_value_type : str, optional - Value type for the forced convection correlation option. This - parameter is relevant if ``ht_correlation_type="Forced Convection"``. - Options are "Average Values" and "Local Values". The default - is ``"Average Values"``. + Value type for the forced convection correlation option. This + parameter is relevant if ``ht_correlation_type="Forced Convection"``. + Options are ``"Average Values"`` and ``"Local Values"``. The default + is ``"Average Values"``. ht_correlation_free_stream_velocity : str or float, optional - Free stream flow velocity. This parameter is relevant if - ``ht_correlation_type="Forced Convection"``. If a float - value is specified, ``m_per_sec`` is the unit. The default - is ``"1m_per_sec"``. + Free stream flow velocity. This parameter is relevant if + ``ht_correlation_type="Forced Convection"``. If a float + value is specified, ``m_per_sec`` is the unit. The default + is ``"1m_per_sec"``. ht_correlation_surface : str, optional Surface for the natural convection correlation option. This parameter is relevant if ``ht_correlation_type="Natural Convection"``. Options are "Top", @@ -4013,20 +3973,13 @@ def assign_stationary_wall_with_htc( ext_surf_rad_material : str, optional Surface material for the external surface radiation option. This parameter is relevant if ``ext_surf_rad=True``. The default is ``"Stainless-steel-cleaned"``. - ext_surf_rad_ref_temp : str or float or dict, optional - Reference temperature for the external surface radiation option. This - parameter is relevant if ``ext_surf_rad=True``. If a float value is - specified, the default unit is degrees Celsius. - A dictionary can be used for temperature dependent or transient - assignment. The dictionary should contain three keys: ``"Type"``, ``"Function"``, and - ``"Values"``. Accepted ``"Type"`` values are: ``"Temp Dep"`` and ``"Transient"``. - - Accepted values for the ``"Function"`` key are: ``"Linear"``, ``"Power Law"``, ``"Exponential"``, - ``"Sinusoidal"``, ``"Square Wave"`` and ``"Piecewise Linear"``. ``"Temp Dep"`` only - support the latter. ``"Values"`` contains a list of strings containing the parameters - required by the ``"Function"`` selection (e.g. ``"Linear"`` requires two parameters: - the value of the variable at t=0 and the slope of the line). The parameters required by - each ``Function`` option is in Icepak documentation. The parameters must contain the - units where needed. The default is ``"AmbientTemp"``. + ext_surf_rad_ref_temp : str or float or dict or BoundaryDictionary, optional + Reference temperature for the external surface radiation option. This parameter + is relevant if ``ext_surf_rad=True``. If a float value is specified, the default + unit is degrees Celsius. + Assign a transient condition using the result of a function with + the pattern ``create_*_transient_assignment``. + The default is ``"AmbientTemp"``. ext_surf_rad_view_factor : str or float, optional View factor for the external surface radiation option. The default is ``"1"``. @@ -4193,30 +4146,26 @@ def assign_source( thermal_condition : str Thermal condition. Accepted values are ``"Total Power"``, ``"Surface Heat"``, ``"Temperature"``. - assignment_value : str or dict - Value and units of the input power, surface heat or temperature (depending on - ``thermal_condition``). A dictionary can be used for temperature dependent or transient - assignment. The dictionary should contain three keys: ``"Type"``, ``"Function"``, and - ``"Values"``. Accepted ``"Type"`` values are: ``"Temp Dep"`` and ``"Transient"``. - - Accepted values for the ``"Function"`` key are: ``"Linear"``, ``"Power Law"``, ``"Exponential"``, - ``"Sinusoidal"``, ``"Square Wave"`` and ``"Piecewise Linear"``. ``"Temp Dep"`` only - support the latter. ``"Values"`` contains a list of strings containing the parameters - required by the ``"Function"`` selection (e.g. ``"Linear"`` requires two parameters: - the value of the variable at t=0 and the slope of the line). The parameters required by - each ``Function`` option is in Icepak documentation. The parameters must contain the - units where needed. + assignment_value : str or dict or BoundaryDictionary + Value and units of the input power, surface heat, or temperature (depending on + ``thermal_condition``). + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. + Assign a temperature-dependent condition using the result of a + function with the ``create_temp_dep_assignment`` pattern. boundary_name : str, optional Name of the source boundary. The default is ``None``, in which case the boundary name is generated automatically. radiate : bool, optional Whether to enable radiation. The default is ``False``. voltage_current_choice : str or bool, optional - Whether to assign ``"Voltage"`` or ``"Current"`` or none of them. The default is - ``False`` (none of them is assigned). - voltage_current_value : str or dict, optional - Value and units of current or voltage assignment. A dictionary can be used for - transient assignment. The dictionary must be structured as described for the - ``assignment_value`` argument. The default is ``None``. + Whether to assign the ``"Voltage"`` or ``"Current"`` option. The default is + ``False``, in which case neither option is assigned. + voltage_current_value : str or dict or BoundaryDictionary, optional + Value and units of current or voltage assignment. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. + The default is ``None``. Returns ------- @@ -4253,7 +4202,7 @@ def assign_source( props["Thermal Condition"] = thermal_condition for quantity, value in default_values.items(): if quantity == thermal_condition: - if isinstance(assignment_value, dict): + if isinstance(assignment_value, (dict, BoundaryDictionary)): assignment_value = self._parse_variation_data( quantity, assignment_value["Type"], @@ -4273,7 +4222,7 @@ def assign_source( props["Voltage/Current Option"] = voltage_current_choice for quantity, value in default_values.items(): if voltage_current_choice == quantity: - if isinstance(voltage_current_value, dict): + if isinstance(voltage_current_value, (dict, BoundaryDictionary)): if voltage_current_value["Type"] == "Temp Dep": self.logger.error("Voltage or Current assignment does not support temperature dependence.") return None @@ -4419,39 +4368,31 @@ def assign_solid_block( ---------- object_name : str or list Object name or a list of object names. - power_assignment : str or dict + power_assignment : str or dict or BoundaryDictionary String with the value and units of the power assignment or with - ``"Joule Heating"``. For a temperature-dependent or transient - assignment, a dictionary can be used. The dictionary should contain three keys: - ``"Type"``, ``"Function"``, and ``"Values"``. - - For the ``"Type"`` key, accepted values are ``"Temp Dep"`` and ``"Transient"``. - - For the ``"Function"`` key, acceptable values depend on the ``"Type"`` key - selection. When the ``"Type"`` key is set to ``"Temp Dep"``, the only - accepted value is ``"Piecewise Linear"``. When the ``"Type"`` key is - set to ``"Transient"``, acceptable values are `"Exponential"``, `"Linear"``, - ``"Piecewise Linear"``, ``"Power Law"``, ``"Sinusoidal"``, and ``"SquareWave"``. - - For the ``"Values"`` key, a list of strings contain the parameters required by - the ``"Function"`` key selection. For example, when``"Linear"`` is set as the - ``"Function"`` key, two parameters are required: the value of the variable - at t=0 and the slope of the line. For the parameters required by each - ``"Function"`` key selection, see the Icepak documentation. The parameters - must contain the units where needed. + ``" If you don't want to assign a specific power but set a joule heating + dissipation, use ``power_assignment="Joule Heating"``. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. + Assign a temperature-dependent condition using the result of a + function with the ``create_temp_dep_assignment`` pattern. boundary_name : str, optional Name of the source boundary. The default is ``None``, in which case the boundary name is automatically generated. - htc : float, str, or dict, optional + htc : float, str, or dict or BoundaryDictionary, optional String with the value and units of the heat transfer coefficient for the - external conditions. If a float is provided, the ``"w_per_m2kel"`` unit is used. - For a temperature-dependent or transient - assignment, a dictionary can be used. For more information, see the - description for the preceding ``power_assignment`` parameter. The - default is ``None``, in which case no external condition is applied. - ext_temperature : float, str or dict, optional + external conditions. If a float is provided, the unit is ``"w_per_m2kel"``. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern . + Assign a temperature-dependent condition using the result of a + function with the pattern ``create_temp_dep_assignment``. + The default is ``None``, in which case no external condition is applied. + ext_temperature : float, str or dict or BoundaryDictionary, optional String with the value and units of temperature for the external conditions. If a float is provided, the ``"cel"`` unit is used. - For a transient assignment, a dictionary can be used. For more information, - see the description for the preceding ``power_assignment`` parameter. The - default is ``"AmbientTemp"``, which is used if the ``htc`` parameter is not + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. + The default is ``"AmbientTemp"``, which is used if the ``htc`` parameter is not set to ``None``. Returns @@ -4477,7 +4418,7 @@ def assign_solid_block( if ext_temperature != "AmbientTemp" and ext_temperature is not None and not htc: self.logger.add_error_message("Set an argument for ``htc`` or remove the ``ext_temperature`` argument.") return None - if isinstance(ext_temperature, dict) and ext_temperature["Type"] == "Temp Dep": + if isinstance(ext_temperature, (dict, BoundaryDictionary)) and ext_temperature["Type"] == "Temp Dep": self.logger.add_error_message( 'It is not possible to use a "Temp Dep" assignment for ' "temperature assignment." ) @@ -4491,7 +4432,7 @@ def assign_solid_block( ) return None props = {"Block Type": "Solid", "Objects": object_name} - if isinstance(power_assignment, dict): + if isinstance(power_assignment, (dict, BoundaryDictionary)): assignment_value = self._parse_variation_data( "Total Power", power_assignment["Type"], @@ -4512,7 +4453,7 @@ def assign_solid_block( if htc: props["Use External Conditions"] = True for quantity, assignment in [("Temperature", ext_temperature), ("Heat Transfer Coefficient", htc)]: - if isinstance(assignment, dict): + if isinstance(assignment, (dict, BoundaryDictionary)): assignment_value = self._parse_variation_data( quantity, assignment["Type"], @@ -4546,30 +4487,22 @@ def assign_hollow_block( assignment_type : str Type of the boundary assignment. Options are ``"Heat Transfer Coefficient"``, ``"Heat Flux"``, ``"Temperature"``, and ``"Total Power"``. - assignment_value : str or dict - String with value and units of the assignment. If ``"Total Power"`` is + assignment_value : str or dict or BoundaryDictionary + String with a value and units of the assignment. If ``"Total Power"`` is the assignment type, ``"Joule Heating"`` can be used. - For a temperature-dependent or transient assignment, a dictionary can be used. - The dictionary should contain three keys: ``"Type"``, ``"Function"``, and ``"Values"``. - - For the ``"Type"`` key, accepted values are ``"Temp Dep"`` and ``"Transient"``. - - For the ``"Function"`` key, acceptable values depend on the ``"Type"`` key selection. When the ``"Type"`` - key is set to ``"Temp Dep"``, the only accepted value is ``"Piecewise Linear"``. - When the ``"Type"`` key is set to ``"Transient"``, acceptable values are `"Exponential"``, `"Linear"``, - ``"Piecewise Linear"``, ``"Power Law"``, ``"Sinusoidal"``, and ``"Square Wave"``. - - For the ``"Values"`` key, a list of strings contain the parameters required by the ``"Function"`` - key selection. For example, when``"Linear"`` is set as the ``"Function"`` key, two parameters are required: - the value of the variable at t=0 and the slope of the line. - For the parameters required by each ``"Function"`` key selection, see the Icepak documentation. - The parameters must contain the units where needed. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. + Assign a temperature-dependent condition using the result of a + function with the pattern ``create_temp_dep_assignment``. boundary_name : str, optional Name of the source boundary. The default is ``None``, in which case the boundary is automatically generated. - external_temperature : str, dict or float, optional + external_temperature : str, dict or float or BoundaryDictionary, optional String with the value and unit of the temperature for the heat transfer coefficient. If a float value is specified, the ``"cel"`` unit is automatically added. - For a transient assignment, a dictionary can be used as described for the - ``assignment_value`` argument. Temperature dependent assignment is not supported. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. The default is ``"AmbientTemp"``. Returns @@ -4625,7 +4558,7 @@ def assign_hollow_block( props = {"Block Type": "Hollow", "Objects": object_name, "Thermal Condition": thermal_condition[0]} if thermal_condition[0] == "Fixed Heat": props["Use Total Power"] = thermal_condition[1] == "Total Power" - if isinstance(assignment_value, dict): + if isinstance(assignment_value, (dict, BoundaryDictionary)): assignment_value_dict = self._parse_variation_data( thermal_condition[1], assignment_value["Type"], @@ -4641,7 +4574,7 @@ def assign_hollow_block( else: props[thermal_condition[1]] = assignment_value if thermal_condition[0] == "Internal Conditions": - if isinstance(external_temperature, dict): + if isinstance(external_temperature, (dict, BoundaryDictionary)): if external_temperature["Type"] == "Temp Dep": self.logger.add_error_message('It is not possible to use "Temp Dep" for a temperature assignment.') return None @@ -4731,44 +4664,42 @@ def assign_free_opening( IDs or object names is also accepted. boundary_name : str, optional Boundary name. Default is ``None``, in which case the name is generated automatically. - temperature : str or float or dict, optional + temperature : str or float or dict or BoundaryDictionary, optional Prescribed temperature at the boundary. If a string is set, a variable name or a number with the unit is expected. If a float is set, the unit ``'cel'`` is - automatically added. Also, a dictionary containing the keys ``'Function'`` and ``'Values'`` - can be passed to set a transient behaviour. The acceptable values associated with those - keys can be found in the Icepak documentation. Default is ``"AmbientTemp"``. - radiation_temperature : str or float, optional + automatically added. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. + radiation_temperature : str or floaty, optional Prescribed radiation temperature at the boundary. If a string is set, a variable name or a number with the unit is expected. If a float is set, the unit ``'cel'`` is - automatically added. Also, a dictionary containing the keys ``'Function'`` and - ``'Values'`` can be passed to set a transient behaviour. - The acceptable values associated with those keys can be found in the Icepak documentation. + automatically added. Default is ``"AmbientRadTemp"``. flow_type : int or str, optional Prescribed radiation flow type at the boundary. Available options are ``"Pressure"``, ``"Velocity"``, and ``"Mass Flow"``. The default is ``"Pressure"``. - pressure : float or str or dict, optional + pressure : float or str or dict or BoundaryDictionary, optional Prescribed pressure (static or total coherently with flow type) at the boundary. If a string is set, a variable name or a number with the unit is expected. If a float is set, - the unit ``'pascal'`` is automatically added. Also, a dictionary containing the keys - ``'Function'`` and ``'Values'`` can be passed to set a transient behavior. The acceptable - values associated with those keys can be found in the Icepak documentation. + the unit ``'pascal'`` is automatically added. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. The default is ``"AmbientPressure"``. no_reverse_flow : bool, optional Option to block reverse flow at the boundary. Default is ``False``. velocity : list, optional Prescribed velocity at the boundary. If a list of strings is set, a variable name or a number with the unit is expected for each element. If list of floats is set, the unit ``'m_per_sec'`` - is automatically added. Also, a dictionary containing the keys ``'Function'`` and - ``'Values'`` can be passed in one or more vector element to set a transient behaviour. - The acceptable values associated with those keys can be found in the Icepak documentation. + is automatically added. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern as an element of the list. Default is ``["0m_per_sec", "0m_per_sec", "0m_per_sec"]``. - mass_flow_rate : float or str or dict, optional + mass_flow_rate : float or str or dict or BoundaryDictionary, optional Prescribed pressure (static or total coherently with flow type) at the boundary. If a string is set, a variable name or a number with the unit is expected. If a float is set, - the unit ``'kg_per_s'`` is automatically added. Also, a dictionary containing the keys - ``'Function'`` and ``'Values'`` can be passed to set a transient behaviour. The acceptable - values associated with those keys can be found in the Icepak documentation. + the unit ``'kg_per_s'`` is automatically added. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. Default is ``"0kg_per_s"``. inflow : bool, optional Prescribe if the imposed mass flow is an inflow or an outflow. Default is ``"True"``, @@ -4796,15 +4727,16 @@ def assign_free_opening( """ # Sanitize input for i in range(len(velocity)): - if not isinstance(velocity[i], str) and not isinstance(velocity[i], dict): + if not isinstance(velocity[i], str) and not isinstance(velocity[i], (dict, BoundaryDictionary)): velocity[i] = str(velocity[i]) + "m_per_sec" - if not isinstance(mass_flow_rate, str) and not isinstance(mass_flow_rate, dict): + if not isinstance(mass_flow_rate, str) and not isinstance(mass_flow_rate, (dict, BoundaryDictionary)): mass_flow_rate = str(mass_flow_rate) + "kg_per_s" - if not isinstance(temperature, str) and not isinstance(temperature, dict): + if not isinstance(temperature, str) and not isinstance(temperature, (dict, BoundaryDictionary)): temperature = str(temperature) + "cel" - if not isinstance(radiation_temperature, str) and not isinstance(radiation_temperature, dict): + if not isinstance(radiation_temperature, str) and not isinstance(radiation_temperature, (dict, + BoundaryDictionary)): radiation_temperature = str(radiation_temperature) + "cel" - if not isinstance(pressure, str) and not isinstance(pressure, dict): + if not isinstance(pressure, str) and not isinstance(pressure, (dict, BoundaryDictionary)): pressure = str(pressure) + "pascal" # Dict creation props = {} @@ -4846,7 +4778,7 @@ def assign_free_opening( ("Z Velocity", velocity[2]), ] for quantity, assignment in possible_transient_properties: - if isinstance(assignment, dict): + if isinstance(assignment, (dict, BoundaryDictionary)): if not self.solution_type == "Transient": self.logger.error("Transient assignment is supported only in transient designs.") return None @@ -4888,30 +4820,28 @@ def assign_pressure_free_opening( Parameters ---------- assignment : int or str or list - Integer indicating a face ID or a string indicating an object name. A list of face - IDs or object names is also accepted. + Integer indicating a face ID or a string indicating an object name. A list of face + IDs or object names is also accepted. boundary_name : str, optional Boundary name. Default is ``None``, in which case the name is generated automatically. - temperature : str or float or dict, optional + temperature : str or float or dict or BoundaryDictionary, optional Prescribed temperature at the boundary. If a string is set, a variable name or a number with the unit is expected. If a float is set, the unit ``'cel'`` is - automatically added. Also, a dictionary containing the keys ``'Function'`` and ``'Values'`` - can be passed to set a transient behaviour. The acceptable values associated with those - keys can be found in the Icepak documentation. Default is ``"AmbientTemp"``. - radiation_temperature : str or float, optional + automatically added. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. + radiation_temperature : str or floaty, optional Prescribed radiation temperature at the boundary. If a string is set, a variable name or a number with the unit is expected. If a float is set, the unit ``'cel'`` is - automatically added. Also, a dictionary containing the keys ``'Function'`` and - ``'Values'`` can be passed to set a transient behaviour. - The acceptable values associated with those keys can be found in the Icepak documentation. + automatically added. Default is ``"AmbientRadTemp"``. - pressure : float or str or dict, optional + pressure : float or str or dict or BoundaryDictionary, optional Prescribed pressure (static or total coherently with flow type) at the boundary. If a string is set, a variable name or a number with the unit is expected. If a float is set, - the unit ``'pascal'`` is automatically added. Also, a dictionary containing the keys - ``'Function'`` and ``'Values'`` can be passed to set a transient behavior. The - acceptable values associated with those keys can be found in the Icepak - documentation. The default is ``"AmbientPressure"``. + the unit ``'pascal'`` is automatically added. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. + The default is ``"AmbientPressure"``. no_reverse_flow : bool, optional Option to block reverse flow at the boundary. Default is ``False``. @@ -4962,32 +4892,30 @@ def assign_velocity_free_opening( IDs or object names is also accepted. boundary_name : str, optional Boundary name. Default is ``None``, in which case the name is generated automatically. - temperature : str or float or dict, optional + temperature : str or float or dict or BoundaryDictionary, optional Prescribed temperature at the boundary. If a string is set, a variable name or a number with the unit is expected. If a float is set, the unit ``'cel'`` is - automatically added. Also, a dictionary containing the keys ``'Function'`` and ``'Values'`` - can be passed to set a transient behaviour. The acceptable values associated with those - keys can be found in the Icepak documentation. Default is ``"AmbientTemp"``. - radiation_temperature : str or float, optional + automatically added. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. + radiation_temperature : str or floaty, optional Prescribed radiation temperature at the boundary. If a string is set, a variable name or a number with the unit is expected. If a float is set, the unit ``'cel'`` is - automatically added. Also, a dictionary containing the keys ``'Function'`` and - ``'Values'`` can be passed to set a transient behaviour. - The acceptable values associated with those keys can be found in the Icepak documentation. + automatically added. Default is ``"AmbientRadTemp"``. - pressure : float or str or dict, optional + pressure : float or str or dict or BoundaryDictionary, optional Prescribed pressure (static or total coherently with flow type) at the boundary. If a string is set, a variable name or a number with the unit is expected. If a float is set, - the unit ``'pascal'`` is automatically added. Also, a dictionary containing the keys - ``'Function'`` and ``'Values'`` can be passed to set a transient behavior. The - acceptable values associated with those keys can be found in the Icepak - documentation. The default is ``"AmbientPressure"``. + the unit ``'pascal'`` is automatically added. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. + The default is ``"AmbientPressure"``. velocity : list, optional Prescribed velocity at the boundary. If a list of strings is set, a variable name or a number with the unit is expected for each element. If list of floats is set, the unit ``'m_per_sec'`` - is automatically added. Also, a dictionary containing the keys ``'Function'`` and - ``'Values'`` can be passed in one or more vector element to set a transient behaviour. - The acceptable values associated with those keys can be found in the Icepak documentation. + is automatically added. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern as an element of the list. Default is ``["0m_per_sec", "0m_per_sec", "0m_per_sec"]``. @@ -5035,37 +4963,35 @@ def assign_mass_flow_free_opening( Parameters ---------- assignment : int or str or list - Integer indicating a face ID or a string indicating an object name. A list of face - IDs or object names is also accepted. + Integer indicating a face ID or a string indicating an object name. A list of face + IDs or object names is also accepted. boundary_name : str, optional Boundary name. The default is ``None``, in which case the name is generated automatically. - temperature : str or float or dict, optional + temperature : str or float or dict or BoundaryDictionary, optional Prescribed temperature at the boundary. If a string is set, a variable name or a number with the unit is expected. If a float is set, the unit ``'cel'`` is - automatically added. Also, a dictionary containing the keys ``'Function'`` and ``'Values'`` - can be passed to set a transient behaviour. The acceptable values associated with those - keys can be found in the Icepak documentation. Default is ``"AmbientTemp"``. - radiation_temperature : str or float, optional + automatically added. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. + radiation_temperature : str or floaty, optional Prescribed radiation temperature at the boundary. If a string is set, a variable name or a number with the unit is expected. If a float is set, the unit ``'cel'`` is - automatically added. Also, a dictionary containing the keys ``'Function'`` and - ``'Values'`` can be passed to set a transient behaviour. - The acceptable values associated with those keys can be found in the Icepak documentation. + automatically added. Default is ``"AmbientRadTemp"``. - pressure : float or str or dict, optional + pressure : float or str or dict or BoundaryDictionary, optional Prescribed pressure (static or total coherently with flow type) at the boundary. If a string is set, a variable name or a number with the unit is expected. If a float is set, - the unit ``'pascal'`` is automatically added. Also, a dictionary containing the keys - ``'Function'`` and ``'Values'`` can be passed to set a transient behavior. The - acceptable values associated with those keys can be found in the Icepak - documentation. The default is ``"AmbientPressure"``. - mass_flow_rate : float or str or dict, optional + the unit ``'pascal'`` is automatically added. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. + The default is ``"AmbientPressure"``. + mass_flow_rate : float or str or dict or BoundaryDictionary, optional Prescribed pressure (static or total coherently with flow type) at the boundary. If a string is set, a variable name or a number with the unit is expected. If a float is set, - the unit ``'kg_per_s'`` is automatically added. Also, a dictionary containing the keys - ``'Function'`` and ``'Values'`` can be passed to set a transient behaviour. The acceptable - values associated with those keys can be found in the Icepak documentation. - Default is ``"0kg_per_s"``. + the unit ``'kg_per_s'`` is automatically added. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. + The default is ``"0kg_per_s"``. inflow : bool, optional Prescribe if the imposed mass flow is an inflow or an outflow. Default is ``"True"``, in which case an inflow is prescribed. @@ -5235,24 +5161,13 @@ def assign_resistance(self, objects, boundary_name=None, total_power="0W", fluid objects specified as a list. boundary_name : str, optional The name of the boundary object that will be created. If not - provided, a unique name will be generated. Default is ``None``. - total_power : str, float, or dict, optional + provided, a unique name is generated. The default is ``None``. + total_power : str, float, or dict or BoundaryDictionary, optional The total power transferred to the fluid through the resistance - volume. It is specified as a string with value and unit, a float - where the default unit "W" will be used, or a dictionary for - transient assignment. The dictionary should contain two keys: - ``"Function"`` and ``"Values"``. - - - For the ``"Function"`` key, options are ``"Exponential"``, - ``"Linear"``, ``"Piecewise Linear"``, ``"Power Law"``, - ``"Sinusoidal"``, and ``"Square Wave"``. - - For the ``"Values"`` key, provide a list of strings containing - the parameters required by the ``"Function"`` key selection. For - example, when ``"Linear"`` is set as the ``"Function"`` key, two - parameters are required: the value of the variable at t=0 and the - slope of the line. For the parameters required by each - ``"Function"`` key selection, see the Icepak documentation. - + volume. It is specified as a string with a value and unit, a float + where the default unit "W" is used. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. Default is ``"0W"``. fluid : str, optional The material of the volume to which the resistance is being @@ -5356,7 +5271,7 @@ def assign_resistance(self, objects, boundary_name=None, total_power="0W", fluid "Y": [str(i) for i in values[1]] } - if isinstance(total_power, dict): + if isinstance(total_power, (dict, BoundaryDictionary)): if not self.solution_type == "Transient": self.logger.error("Transient assignment is supported only in transient designs.") return None @@ -5397,28 +5312,17 @@ def assign_power_law_resistance(self, objects, boundary_name=None, total_power=" objects specified as a list. boundary_name : str, optional The name of the boundary object that will be created. If not - provided, a unique name will be generated. Default is ``None``. - total_power : str, float, or dict, optional + provided, a unique name is generated. The default is ``None``. + total_power : str, float, or dict or BoundaryDictionary, optional The total power transferred to the fluid through the resistance - volume. It is specified as a string with value and unit, a float - where the default unit "W" will be used, or a dictionary for - transient assignment. The dictionary should contain two keys: - ``"Function"`` and ``"Values"``. - - - For the ``"Function"`` key, options are ``"Exponential"``, - ``"Linear"``, ``"Piecewise Linear"``, ``"Power Law"``, - ``"Sinusoidal"``, and ``"Square Wave"``. - - For the ``"Values"`` key, provide a list of strings containing - the parameters required by the ``"Function"`` key selection. For - example, when ``"Linear"`` is set as the ``"Function"`` key, two - parameters are required: the value of the variable at t=0 and the - slope of the line. For the parameters required by each - ``"Function"`` key selection, see the Icepak documentation. - - Default is ``"0W"``. + volume. It is specified as a string with a value and unit or a float + where the default unit ``"W"`` is used. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. + The default is ``"0W"``. fluid : str, optional - The material of the volume to which the resistance is being - assigned. Default is ``"air"``. + Material of the volume to assign the resistance to. The + default is ``"air"``. laminar : bool, optional Whether the flow inside the volume must be treated as laminar or not. Default is ``False``. @@ -5462,25 +5366,14 @@ def assign_loss_curve_resistance(self, objects, boundary_name=None, total_power= assigned. It can be a single object (a string) or multiple objects specified as a list. boundary_name : str, optional - The name of the boundary object that will be created. If not - provided, a unique name will be generated. Default is ``None``. - total_power : str, float, or dict, optional - The total power transferred to the fluid through the resistance - volume. It is specified as a string with value and unit, a float - where the default unit "W" will be used, or a dictionary for - transient assignment. The dictionary should contain two keys: - ``"Function"`` and ``"Values"``. - - - For the ``"Function"`` key, options are ``"Exponential"``, - ``"Linear"``, ``"Piecewise Linear"``, ``"Power Law"``, - ``"Sinusoidal"``, and ``"Square Wave"``. - - For the ``"Values"`` key, provide a list of strings containing - the parameters required by the ``"Function"`` key selection. For - example, when ``"Linear"`` is set as the ``"Function"`` key, two - parameters are required: the value of the variable at t=0 and the - slope of the line. For the parameters required by each - ``"Function"`` key selection, see the Icepak documentation. - + Name of the boundary object to create. If a name is not + provided, a unique name is generated. The default is ``None``. + total_power : str, float, or dict or BoundaryDictionary, optional + Total power transferred to the fluid through the resistance + volume. It is specified as a string with a value and unit or a float + where the default unit ``"W"`` is used. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. Default is ``"0W"``. fluid : str, optional The material of the volume to which the resistance is being @@ -5547,27 +5440,16 @@ def assign_device_resistance(self, objects, boundary_name=None, total_power="0W" objects specified as a list. boundary_name : str, optional The name of the boundary object that will be created. If not - provided, a unique name will be generated. Default is ``None``. - total_power : str, float, or dict, optional + provided, a unique name is generated. The default is ``None``. + total_power : str, float, or dict or BoundaryDictionary, optional The total power transferred to the fluid through the resistance - volume. It is specified as a string with value and unit, a float - where the default unit "W" will be used, or a dictionary for - transient assignment. The dictionary should contain two keys: - ``"Function"`` and ``"Values"``. - - - For the ``"Function"`` key, options are ``"Exponential"``, - ``"Linear"``, ``"Piecewise Linear"``, ``"Power Law"``, - ``"Sinusoidal"``, and ``"Square Wave"``. - - For the ``"Values"`` key, provide a list of strings containing - the parameters required by the ``"Function"`` key selection. For - example, when ``"Linear"`` is set as the ``"Function"`` key, two - parameters are required: the value of the variable at t=0 and the - slope of the line. For the parameters required by each - ``"Function"`` key selection, see the Icepak documentation. - - Default is ``"0W"``. + volume. It is specified as a string with a value and unit or a float + where the default unit ``"W"`` is used. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. + The default is ``"0W"``. fluid : str, optional - The material of the volume to which the resistance is being + Material of the volume to which the resistance is being assigned. Default is ``"air"``. laminar : bool, optional Whether the flow inside the volume must be treated as laminar or @@ -5628,20 +5510,10 @@ def assign_recirculation_opening(self, face_list, extract_face, thermal_specific Type of the thermal assignment across the two recirculation faces. The default is ``"Temperature"``. Options are ``"Conductance"``, ``"Heat Input"``, and ``"Temperature"``. - assignment_value : str or dict, optional - String with value and units of the thermal assignment. For a - transient assignment, a dictionary can be used. The dictionary - should contain two keys: ``"Function"`` and ``"Values"``. - - For the ``"Function"`` key, options are - ``"Exponential"``, ``"Linear"``, ``"Piecewise Linear"``, - ``"Power Law"``, ``"Sinusoidal"``, and ``"Square Wave"``. - - For the ``"Values"`` key, provide a list of strings containing the - parameters required by the ``"Function"`` key selection. For - example, when ``"Linear"`` is set as the ``"Function"`` key, two - parameters are required: the value of the variable at t=0 and the - slope of the line. For the parameters required by each ``"Function"`` - key selection, see the Icepak documentation. - The parameters must contain the units where needed. + assignment_value : str or dict or BoundaryDictionary, optional + String with a value and units of the thermal assignment. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. The default value is ``"0cel"``. conductance_external_temperature : str, optional External temperature value, which is needed if @@ -5651,20 +5523,10 @@ def assign_recirculation_opening(self, face_list, extract_face, thermal_specific Flow specification for the recirculation zone. The default is ``"Mass Flow"``. Options are: ``"Mass Flow"``, ``"Mass Flux"``, and ``"Volume Flow"``. - flow_assignment : str or dict, optional - String with the value and units of the flow assignment. For a - transient assignment, a dictionary can be used. The dictionary - should contain two keys: ``"Function"`` and ``"Values"``. - - For the ``"Function"`` key, options are - ``"Exponential"``, ``"Linear"``, ``"Piecewise Linear"``, - ``"Power Law"``, ``"Sinusoidal"``, and ``"Square Wave"``. - - For the ``"Values"`` key, provide a list of strings containing the - parameters required by the ``"Function"`` key selection. For - example, when``"Linear"`` is set as the ``"Function"`` key, two - parameters are required: the value of the variable at t=0 and the - slope of the line. For the parameters required by each - ``"Function"`` key selection, see the Icepak documentation. - The parameters must contain the units where needed. + flow_assignment : str or dict or BoundaryDictionary, optional + String with the value and units of the flow assignment. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. The default value is ``"0kg_per_s_m2"``. flow_direction : list, optional Flow direction enforced at the recirculation zone. The default value @@ -5714,7 +5576,7 @@ def assign_recirculation_opening(self, face_list, extract_face, thermal_specific if (start_time is not None or end_time is not None) and not self.solution_type == "Transient": self.logger.warning( '``start_time`` and ``end_time`` only effect steady-state simulations.') - elif self.solution_type == "Transient" and not (start_time and end_time): + elif self.solution_type == "Transient" and not (start_time is not None and end_time is not None): self.logger.warning( '``start_time`` and ``end_time`` should be declared for transient simulations. Setting them to "0s".') start_time = "0s" @@ -5734,7 +5596,7 @@ def assign_recirculation_opening(self, face_list, extract_face, thermal_specific extract_face = [extract_face.id] props["ExtractFace"] = extract_face props["Thermal Condition"] = thermal_specification - if isinstance(assignment_value, dict): + if isinstance(assignment_value, (dict, BoundaryDictionary)): if not self.solution_type == "Transient": self.logger.error("Transient assignment is supported only in transient designs.") return None @@ -5749,7 +5611,7 @@ def assign_recirculation_opening(self, face_list, extract_face, thermal_specific props[assignment_dict[thermal_specification]] = assignment_value if thermal_specification == "Conductance": props["External Temp"] = conductance_external_temperature - if isinstance(flow_assignment, dict): + if isinstance(flow_assignment, (dict, BoundaryDictionary)): if not self.solution_type == "Transient": self.logger.error("Transient assignment is supported only in transient designs.") return None @@ -5952,12 +5814,13 @@ def assign_conducting_plate(self, obj_plate, boundary_name=None, total_power="0W boundary_name : str, optional Boundary name. The default is ``None``, in which case a name is generated automatically. - total_power : str or float or dict, optional + total_power : str or float or dict or BoundaryDictionary, optional Power dissipated by the plate. The default is ``"0W"``. If a float, - the default unit is ``"W"``. A transient or temperature-dependent power - can be assigned with a dictionary. + the default unit is ``"W"``. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. thermal_specification : str, optional - Type of condition to apply. The default is `"Thickness"``. + Type of condition to apply. The default is ``"Thickness"``. Options are ``"Conductance"``, ``"Thermal Impedance"``, ``"Thermal Resistance"``, and ``"Thickness"``. thickness : str or float, optional @@ -6004,7 +5867,7 @@ def assign_conducting_plate(self, obj_plate, boundary_name=None, total_power="0W else: raise AttributeError("Invalid ``obj_plate`` argument.") - if isinstance(total_power, dict): + if isinstance(total_power, (dict, BoundaryDictionary)): assignment = self._parse_variation_data( "Total Power", total_power["Type"], @@ -6059,10 +5922,11 @@ def assign_conducting_plate_with_thickness(self, obj_plate, boundary_name=None, boundary_name : str, optional Boundary name. The default is ``None``, in which case a name is generated automatically. - total_power : str or float or dict, optional + total_power : str or float or dict or BoundaryDictionary, optional Power dissipated by the plate. The default is ``"0W"``. If a float, - the default unit is ``"W"``. A transient or temperature-dependent power - can be assigned with a dictionary. + the default unit is ``"W"``. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. thickness : str or float, optional If ``thermal_specification="Thickness"``, this parameter represents the thickness to model with the plate. The default is ``"1mm"``. If a float, @@ -6110,10 +5974,11 @@ def assign_conducting_plate_with_resistance(self, obj_plate, boundary_name=None, boundary_name : str, optional Boundary name. The default is ``None``, in which case a name is generated automatically. - total_power : str or float or dict, optional + total_power : str or float or dict or BoundaryDictionary, optional Power dissipated by the plate. The default is ``"0W"``. If a float, - the default unit is ``"W"``. A transient or temperature-dependent power - can be assigned with a dictionary. + the default unit is ``"W"``. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. thermal_resistance : str or float, optional If ``thermal_specification="Thermal Resistance"``, this parameter represents the thermal resistance of the plate. The default is ``"0Kel_per_W"``. If a float, the @@ -6157,10 +6022,11 @@ def assign_conducting_plate_with_impedance(self, obj_plate, boundary_name=None, boundary_name : str, optional Boundary name. The default is ``None``, in which case a name is generated automatically. - total_power : str or float or dict, optional + total_power : str or float or dict or BoundaryDictionary, optional Power dissipated by the plate. The default is ``"0W"``. If a float, - the default unit is ``"W"``. A transient or temperature-dependent power - can be assigned with a dictionary. + the default unit is ``"W"``. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. thermal_impedance : str or float, optional If ``thermal_specification="Thermal Impedance"``, this parameter represents the thermal impedance of the plate. The default is ``"0Cel_m2_per_W"``. If a float, the @@ -6204,10 +6070,11 @@ def assign_conducting_plate_with_conductance(self, obj_plate, boundary_name=None boundary_name : str, optional Boundary name. The default is ``None``, in which case a name is generated automatically. - total_power : str or float or dict, optional + total_power : str or float or dict or BoundaryDictionary, optional Power dissipated by the plate. The default is ``"0W"``. If a float, - the default unit is ``"W"``. A transient or temperature-dependent power - can be assigned with a dictionary. + the default unit is ``"W"``. + Assign a transient condition using the result of a function with + the ``create_*_transient_assignment`` pattern. conductance : str or float, optional If ``thermal_specification="Conductance"``, this parameter represents the conductance of the plate. The default is ``"0W_per_Cel"``. If a float, the default @@ -6235,3 +6102,211 @@ def assign_conducting_plate_with_conductance(self, obj_plate, boundary_name=None shell_conduction=shell_conduction, low_side_rad_material=low_side_rad_material, high_side_rad_material=high_side_rad_material) + + @pyaedt_function_handler + def __create_dataset_assignment(self, type_assignment, ds_name, scale): + """Create dataset condition assignments. + + Parameters + ---------- + type_assignment : str + Type of assignment represented by the class. + Options are ``"Temp Dep"`` and ``"Transient"``. + ds_name : str + Dataset name to assign. + scale : str + Scaling factor for the y values of the dataset. + + Returns + ------- + bool or :class:`PieceWiseLinearDictionary` + Created dataset condition assignments when successful, ``False`` when failed. + """ + ds = None + try: + if ds_name.startswith("$"): + self.logger.error("Only design datasets are supported.") + return False + else: + ds = self.design_datasets[ds_name] + except KeyError: + self.logger.error("Dataset {} not found.".format({ds_name})) + return False + if not isinstance(scale, str): + scale = str(scale) + return PieceWiseLinearDictionary(type_assignment, ds, scale) + + @pyaedt_function_handler + def create_temp_dep_assignment(self, ds_name, scale=1): + """ + Create a temperature-dependent assignment from a dataset. + + Parameters + ---------- + ds_name : str + Name of the dataset. + scale : float or str, optional + Value for scaling the y value of the dataset. The default is ``1``. + + Returns + ------- + :class:`pyaedt.modules.Boundary.PieceWiseLinearDictionary` + Boundary dictionary object that can be passed to boundary condition assignment functions. + + """ + return self.__create_dataset_assignment("Temp Dep", ds_name, scale) + + @pyaedt_function_handler + def create_dataset_transient_assignment(self, ds_name, scale=1): + """ + Create a transient assignment from a dataset. + + Parameters + ---------- + ds_name : str + Name of the dataset. + scale : float or str, optional + Value for scaling the y value of the dataset. The default is ``1``. + + Returns + ------- + :class:`pyaedt.modules.Boundary.PieceWiseLinearDictionary` + Boundary dictionary object that can be passed to boundary condition assignment functions. + + """ + return self.__create_dataset_assignment("Transient", ds_name, scale) + + @pyaedt_function_handler + def create_linear_transient_assignment(self, intercept, slope): + """ + Create an object to assign the linear transient condition to. + + This method applies a condition ``y`` dependent on the time ``t``: + ``y=a+b*t^c`` + + Parameters + ---------- + intercept : str + Value of the assignment condition at the initial time, which + corresponds to the coefficient ``a`` in the formula. + coefficient : str + Coefficient that multiplies the power term, which + corresponds to the coefficient ``b`` in the formula. + scaling_exponent : str + Exponent of the power term, which. + corresponds to the coefficient ``c`` in the formula. + + Returns + ------- + :class:`pyaedt.modules.Boundary.LinearDictionary` + Boundary dictionary object that can be passed to boundary condition assignment functions. + """ + return LinearDictionary(intercept, slope) + + @pyaedt_function_handler + def create_powerlaw_transient_assignment(self, intercept, coefficient, scaling_exponent): + """ + Create an object to assign the power law transient condition to. + + This method applies a condition ``y`` dependent on the time ``t``: + ``y=a+b*t^c`` + + Parameters + ---------- + intercept : str + Value of the assignment condition at the initial time, which + corresponds to the coefficient ``a`` in the formula. + coefficient : str + Coefficient that multiplies the power term, which + corresponds to the coefficient ``b`` in the formula. + scaling_exponent : str + Exponent of the power term, which + corresponds to the coefficient ``c`` in the formula. + + Returns + ------- + :class:`pyaedt.modules.Boundary.PowerLawDictionary` + Boundary dictionary object that can be passed to boundary condition assignment functions. + """ + return PowerLawDictionary(intercept, coefficient, scaling_exponent) + + @pyaedt_function_handler + def create_exponential_transient_assignment(self, vertical_offset, coefficient, exponent_coefficient): + """ + Create an object to assign the exponential transient condition to. + + This method applies a condition ``y`` dependent on the time ``t``: + ``y=a+b*exp(c*t)`` + + Parameters + ---------- + vertical_offset : str + Vertical offset summed to the exponential law, which + corresponds to the coefficient ``a`` in the formula. + coefficient : str + Coefficient that multiplies the exponential term, which + corresponds to the coefficient ``b`` in the formula. + exponent_coefficient : str + Coefficient in the exponential term, which + corresponds to the coefficient ``c`` in the formula. + + Returns + ------- + :class:`pyaedt.modules.Boundary.ExponentialDictionary` + Boundary dictionary object that can be passed to boundary condition assignment functions. + """ + return ExponentialDictionary(vertical_offset, coefficient, exponent_coefficient) + + @pyaedt_function_handler + def create_sinusoidal_transient_assignment(self, vertical_offset, vertical_scaling, period, period_offset): + """ + Create an object to assign the sinusoidal transient condition to. + + This method applies a condition ``y`` dependent on the time ``t``: + ``y=a+b*sin(2*pi(t-t0)/T)`` + + Parameters + ---------- + vertical_offset : str + Vertical offset summed to the sinusoidal law, which + corresponds to the coefficient ``a`` in the formula. + vertical_scaling : str + Coefficient that multiplies the sinusoidal term, which + corresponds to the coefficient ``b`` in the formula. + period : str + Period of the sinusoid, which + corresponds to the coefficient ``T`` in the formula. + period_offset : str + Offset of the sinusoid, which corresponds to the coefficient ``t0`` in the formula. + + Returns + ------- + :class:`pyaedt.modules.Boundary.SinusoidalDictionary` + Boundary dictionary object that can be passed to boundary condition assignment functions. + """ + return SinusoidalDictionary(vertical_offset, vertical_scaling, period, period_offset) + + @pyaedt_function_handler + def create_square_wave_transient_assignment(self, on_value, initial_time_off, on_time, off_time, off_value): + """ + Create an object to assign the square wave transient condition to. + + Parameters + ---------- + on_value : str + Maximum value of the square wave. + initial_time_off : str + Time after which the square wave assignment starts. + on_time : str + Time for which the square wave keeps the maximum value during one period. + off_time : str + Time for which the square wave keeps the minimum value during one period. + off_value : str + Minimum value of the square wave. + + Returns + ------- + :class:`pyaedt.modules.Boundary.SquareWaveDictionary` + Boundary dictionary object that can be passed to boundary condition assignment functions. + """ + return SquareWaveDictionary(on_value, initial_time_off, on_time, off_time, off_value) diff --git a/pyaedt/modules/Boundary.py b/pyaedt/modules/Boundary.py index e740638b7cb..f3c911199a3 100644 --- a/pyaedt/modules/Boundary.py +++ b/pyaedt/modules/Boundary.py @@ -1,7 +1,8 @@ """ -This module contains these classes: `BoundaryCommon` and `BoundaryObject`. +This module contains these classes: ``BoundaryCommon`` and ``BoundaryObject``. """ +from abc import abstractmethod from collections import OrderedDict import copy import re @@ -4520,3 +4521,232 @@ def _create_boundary(bound): raise Exception except Exception: # pragma: no cover return None + + +class BoundaryDictionary: + """ + Handles Icepak transient and temperature-dependent boundary condition assignments. + + Parameters + ---------- + assignment_type : str + Type of assignment represented by the class. Options are `"Temp Dep"`` + and ``"Transient"``. + function_type : str + Variation function to assign. If ``assignment_type=="Temp Dep"``, + the function can only be ``"Piecewise Linear"``. Otherwise, the function can be + ``"Exponential"``, ``"Linear"``, ``"Piecewise Linear"``, ``"Power Law"``, + ``"Sinusoidal"``, and ``"Square Wave"``. + """ + + def __init__(self, assignment_type, function_type): + if assignment_type not in ["Temp Dep", "Transient"]: # pragma : no cover + raise AttributeError("The argument {} for ``assignment_type`` is not valid.".format(assignment_type)) + if assignment_type == "Temp Dep" and function_type != "Piecewise Linear": # pragma : no cover + raise AttributeError( + "Temperature dependent assignments only support" + ' ``"Piecewise Linear"`` as ``function_type`` argument.'.format(assignment_type) + ) + self.assignment_type = assignment_type + self.function_type = function_type + + @property + def props(self): + return { + "Type": self.assignment_type, + "Function": self.function_type, + "Values": self._parse_value(), + } + + @abstractmethod + def _parse_value(self): + pass # pragma : no cover + + @pyaedt_function_handler + def __getitem__(self, k): + return self.props.get(k) + + +class LinearDictionary(BoundaryDictionary): + """ + Manages linear conditions assignments, which are children of the ``BoundaryDictionary`` class. + + This class applies a condition ``y`` dependent on the time ``t``: + ``y=a+b*t`` + + Parameters + ---------- + intercept : str + Value of the assignment condition at the initial time, which + corresponds to the coefficient ``a`` in the formula. + slope : str + Slope of the assignment condition, which + corresponds to the coefficient ``b`` in the formula. + """ + + def __init__(self, intercept, slope): + super().__init__("Transient", "Linear") + self.intercept = intercept + self.slope = slope + + @pyaedt_function_handler + def _parse_value(self): + return [self.slope, self.intercept] + + +class PowerLawDictionary(BoundaryDictionary): + """ + Manages power law condition assignments, which are children of the ``BoundaryDictionary`` class. + + This class applies a condition ``y`` dependent on the time ``t``: + ``y=a+b*t^c`` + + Parameters + ---------- + intercept : str + Value of the assignment condition at the initial time, which + corresponds to the coefficient ``a`` in the formula. + coefficient : str + Coefficient that multiplies the power term, which + corresponds to the coefficient ``b`` in the formula. + scaling_exponent : str + Exponent of the power term, which + corresponds to the coefficient ``c`` in the formula. + """ + + def __init__(self, intercept, coefficient, scaling_exponent): + super().__init__("Transient", "Power Law") + self.intercept = intercept + self.coefficient = coefficient + self.scaling_exponent = scaling_exponent + + @pyaedt_function_handler + def _parse_value(self): + return [self.intercept, self.coefficient, self.scaling_exponent] + + +class ExponentialDictionary(BoundaryDictionary): + """ + Manages exponential condition assignments, which are children of the ``BoundaryDictionary`` class. + + This class applies a condition ``y`` dependent on the time ``t``: + ``y=a+b*exp(c*t)`` + + Parameters + ---------- + vertical_offset : str + Vertical offset summed to the exponential law, which + corresponds to the coefficient ``a`` in the formula. + coefficient : str + Coefficient that multiplies the exponential term, which + corresponds to the coefficient ``b`` in the formula. + exponent_coefficient : str + Coefficient in the exponential term, which + corresponds to the coefficient ``c`` in the formula. + """ + + def __init__(self, vertical_offset, coefficient, exponent_coefficient): + super().__init__("Transient", "Exponential") + self.vertical_offset = vertical_offset + self.coefficient = coefficient + self.exponent_coefficient = exponent_coefficient + + @pyaedt_function_handler + def _parse_value(self): + return [self.vertical_offset, self.coefficient, self.exponent_coefficient] + + +class SinusoidalDictionary(BoundaryDictionary): + """ + Manages sinusoidal condition assignments, which are children of the ``BoundaryDictionary`` class. + + This class applies a condition ``y`` dependent on the time ``t``: + ``y=a+b*sin(2*pi(t-t0)/T)`` + + Parameters + ---------- + vertical_offset : str + Vertical offset summed to the sinusoidal law, which + corresponds to the coefficient ``a`` in the formula. + vertical_scaling : str + Coefficient that multiplies the sinusoidal term, which + corresponds to the coefficient ``b`` in the formula. + period : str + Period of the sinusoid, which + corresponds to the coefficient ``T`` in the formula. + period_offset : str + Offset of the sinusoid, which + corresponds to the coefficient ``t0`` in the formula. + """ + + def __init__(self, vertical_offset, vertical_scaling, period, period_offset): + super().__init__("Transient", "Sinusoidal") + self.vertical_offset = vertical_offset + self.vertical_scaling = vertical_scaling + self.period = period + self.period_offset = period_offset + + @pyaedt_function_handler + def _parse_value(self): + return [self.vertical_offset, self.vertical_scaling, self.period, self.period_offset] + + +class SquareWaveDictionary(BoundaryDictionary): + """ + Manages square wave condition assignments, which are children of the ``BoundaryDictionary`` class. + + Parameters + ---------- + on_value : str + Maximum value of the square wave. + initial_time_off : str + Time after which the square wave assignment starts. + on_time : str + Time for which the square wave keeps the maximum value during one period. + off_time : str + Time for which the square wave keeps the minimum value during one period. + off_value : str + Minimum value of the square wave. + """ + + def __init__(self, on_value, initial_time_off, on_time, off_time, off_value): + super().__init__("Transient", "Square Wave") + self.on_value = on_value + self.initial_time_off = initial_time_off + self.on_time = on_time + self.off_time = off_time + self.off_value = off_value + + @pyaedt_function_handler + def _parse_value(self): + return [self.on_value, self.initial_time_off, self.on_time, self.off_time, self.off_value] + + +class PieceWiseLinearDictionary(BoundaryDictionary): + """ + Manages dataset condition assignments, which are children of the ``BoundaryDictionary`` class. + + Parameters + ---------- + assignment_type : str + Type of assignment represented by the class. + Options are ``"Temp Dep"`` and ``"Transient"``. + ds : str + Dataset name to assign. + scale : str + Scaling factor for the y values of the dataset. + """ + + def __init__(self, assignment_type, ds, scale): + super().__init__(assignment_type, "Piecewise Linear") + self.scale = scale + self._assignment_type = assignment_type + self.dataset = ds + + @pyaedt_function_handler + def _parse_value(self): + return [self.scale, self.dataset.name] + + @property + def dataset_name(self): + return self.dataset.name From 70abfd2f0d1a64981ab36e7b452f788fd3b3de73 Mon Sep 17 00:00:00 2001 From: Sebastien Morais Date: Mon, 11 Mar 2024 10:36:26 +0100 Subject: [PATCH 03/36] MAINT: Update pyedb dependecy In order to have both pyaedt and pyedb in pyansys metapackage, we need more flexibility. The proprosed changes consist in: - freezing pyedb version for python 3.7; - freezing pyedb version to the latest compatible when testing; - allowing flexibility when installing pyaedt to retrieve any patch version in between 0.5 (included) and 0.6 (excluded) Note: This should be working well with dependabot and we might need to update pyedb to remove python version 3.7 compatibility to avoid unwanted version upgrades. --- pyproject.toml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index b413582bb00..08a456f8d62 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,8 @@ dependencies = [ "fpdf2", "jsonschema", "pytomlpp; python_version < '3.12'", - "pyedb==0.5.2" + "pyedb==0.4.2; python_version == '3.7'", + "pyedb>=0.5.0,<0.6; python_version > '3.7'", ] [project.optional-dependencies] @@ -51,6 +52,7 @@ tests = [ "pandas==1.3.5; python_version == '3.7'", "pandas==2.0.3; python_version == '3.8'", "pandas==2.2.1; python_version > '3.9'", + "pyedb==0.5.2; python_version > '3.7'", "pytest==8.0.2", "pytest-cov==4.1.0", "pytest-xdist==3.5.0", From 9a07e09f364d2382a699c538f71641f6f259c02b Mon Sep 17 00:00:00 2001 From: Sebastien Morais Date: Mon, 11 Mar 2024 10:42:09 +0100 Subject: [PATCH 04/36] MAINT: Add latest version when testing --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 08a456f8d62..242f65fe4be 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -53,6 +53,7 @@ tests = [ "pandas==2.0.3; python_version == '3.8'", "pandas==2.2.1; python_version > '3.9'", "pyedb==0.5.2; python_version > '3.7'", + "pyedb==0.4.2; python_version == '3.7'", "pytest==8.0.2", "pytest-cov==4.1.0", "pytest-xdist==3.5.0", From 2ce904df6cb24abc4e7e846b0729ccbdd3e3dcc0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Mar 2024 10:05:39 +0000 Subject: [PATCH 05/36] MAINT: Bump softprops/action-gh-release from 1 to 2 (#4347) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Maxime Rey <87315832+MaxJPRey@users.noreply.github.com> --- .github/workflows/wheelhouse.yml | 2 +- .github/workflows/wheelhouse_linux.yml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/wheelhouse.yml b/.github/workflows/wheelhouse.yml index 702ec02b7e3..600b1c73e9c 100644 --- a/.github/workflows/wheelhouse.yml +++ b/.github/workflows/wheelhouse.yml @@ -82,7 +82,7 @@ jobs: retention-days: 7 - name: Release - uses: softprops/action-gh-release@v1 + uses: softprops/action-gh-release@v2 if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') with: generate_release_notes: true diff --git a/.github/workflows/wheelhouse_linux.yml b/.github/workflows/wheelhouse_linux.yml index 497e2e84ee4..67458e53f95 100644 --- a/.github/workflows/wheelhouse_linux.yml +++ b/.github/workflows/wheelhouse_linux.yml @@ -81,7 +81,7 @@ jobs: retention-days: 7 - name: Release - uses: softprops/action-gh-release@v1 + uses: softprops/action-gh-release@v2 if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') with: generate_release_notes: true From 1202ce8f08c98b0cfefe458f019effcc2f477713 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Morais?= <146729917+SMoraisAnsys@users.noreply.github.com> Date: Mon, 11 Mar 2024 14:14:52 +0100 Subject: [PATCH 06/36] MAINT: Leverage flexibility in test dependency --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 242f65fe4be..ebc0bd97dac 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -52,8 +52,8 @@ tests = [ "pandas==1.3.5; python_version == '3.7'", "pandas==2.0.3; python_version == '3.8'", "pandas==2.2.1; python_version > '3.9'", - "pyedb==0.5.2; python_version > '3.7'", "pyedb==0.4.2; python_version == '3.7'", + "pyedb>=0.5.0,<0.6; python_version > '3.7'", "pytest==8.0.2", "pytest-cov==4.1.0", "pytest-xdist==3.5.0", From 76c08f17183f4cc4936fc5848cd74e3da88635a2 Mon Sep 17 00:00:00 2001 From: gmalinve <103059376+gmalinve@users.noreply.github.com> Date: Mon, 11 Mar 2024 14:44:34 +0100 Subject: [PATCH 07/36] delete unclassified (#4349) --- _unittest/test_07_Object3D.py | 6 ++++++ pyaedt/modeler/cad/Primitives.py | 6 +++++- 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/_unittest/test_07_Object3D.py b/_unittest/test_07_Object3D.py index 439aa15bfe5..22ec8213312 100644 --- a/_unittest/test_07_Object3D.py +++ b/_unittest/test_07_Object3D.py @@ -537,6 +537,12 @@ def test_26_unclassified_object(self): self.aedtapp.modeler.oeditor.Intersect(vArg1, vArg2) assert box1 in self.aedtapp.modeler.unclassified_objects + def test_26a_delete_unclassified_object(self): + unclassified = self.aedtapp.modeler.unclassified_objects + assert self.aedtapp.modeler.delete(unclassified) + assert len(self.aedtapp.modeler.unclassified_objects) != unclassified + assert len(self.aedtapp.modeler.unclassified_objects) == 0 + def test_27_get_object_history_properties(self): box = self.aedtapp.modeler.create_box([10, 10, 10], [15, 15, 15], "box_history", matname="Copper") cylinder = self.aedtapp.modeler.create_cylinder( diff --git a/pyaedt/modeler/cad/Primitives.py b/pyaedt/modeler/cad/Primitives.py index eaffc3493ce..55e7a820748 100644 --- a/pyaedt/modeler/cad/Primitives.py +++ b/pyaedt/modeler/cad/Primitives.py @@ -6490,7 +6490,11 @@ def delete(self, objects=None): objects = self.object_names objects = self._modeler.convert_to_selections(objects, return_list=True) for el in objects: - if el not in self.object_names and not list(self.oeditor.GetObjectsInGroup(el)): + if ( + el not in self.object_names + and not list(self.oeditor.GetObjectsInGroup(el)) + and not self.oeditor.GetObjectsInGroup("Unclassified") + ): objects.remove(el) if not objects: self.logger.warning("No objects to delete") From e6b88c824a765bdfa3bf5e8dc0287bb583ba67de Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Mar 2024 06:55:40 +0100 Subject: [PATCH 08/36] MAINT: Bump ipython from 8.16.1 to 8.22.2 (#4353) Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index ebc0bd97dac..58535231b94 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -38,7 +38,7 @@ dependencies = [ [project.optional-dependencies] tests = [ "ipython==8.13.0; python_version < '3.9'", - "ipython==8.16.1; python_version >= '3.9'", + "ipython==8.22.2; python_version >= '3.9'", "imageio==2.31.5", "joblib==1.3.2", "matplotlib==3.5.3; python_version == '3.7'", @@ -78,7 +78,7 @@ doc = [ "imageio==2.31.5", "imageio-ffmpeg==0.4.9", "ipython==8.13.0; python_version < '3.9'", - "ipython==8.16.1; python_version >= '3.9'", + "ipython==8.22.2; python_version >= '3.9'", "ipywidgets==8.1.1", "joblib==1.3.2", "jupyterlab==4.0.6", From 1dcf4431561b97e1a518d806c80189faaf371543 Mon Sep 17 00:00:00 2001 From: Massimo Capodiferro <77293250+maxcapodi78@users.noreply.github.com> Date: Tue, 12 Mar 2024 14:02:24 +0100 Subject: [PATCH 09/36] refactor error handler: (#4354) Co-authored-by: maxcapodi78 Co-authored-by: Samuel Lopez <85613111+Samuelopez-ansys@users.noreply.github.com> --- pyaedt/generic/general_methods.py | 65 ++++++++++++++----------------- 1 file changed, 29 insertions(+), 36 deletions(-) diff --git a/pyaedt/generic/general_methods.py b/pyaedt/generic/general_methods.py index 547b118c354..9bcd6fb1ea6 100644 --- a/pyaedt/generic/general_methods.py +++ b/pyaedt/generic/general_methods.py @@ -93,23 +93,39 @@ def _exception(ex_info, func, args, kwargs, message="Type Error"): ------- """ - + header = "**************************************************************" + _write_mes(header) tb_data = ex_info[2] tb_trace = traceback.format_tb(tb_data) - _write_mes("{} on {}".format(message.upper(), func.__name__)) - try: - _write_mes(ex_info[1].args[0]) - except (IndexError, AttributeError): - pass + for trace in traceback.format_stack(): - if func.__name__ in trace: - for el in trace.split("\n"): - _write_mes(el) + exceptions = [ + "_exception", + "pydev", + "traceback", + "user_function", + "__Invoke", + "interactiveshell", + "async_helpers", + ] + if any(exc in trace for exc in exceptions): + continue + # if func.__name__ in trace: + for el in trace.split("\n"): + _write_mes(el) for trace in tb_trace: + if "user_function" in trace or "async_helpers" in trace: + continue tblist = trace.split("\n") for el in tblist: - if func.__name__ in el: - _write_mes(el) + # if func.__name__ in el: + _write_mes(el) + + _write_mes("{} on {}".format(message, func.__name__)) + # try: + # _write_mes(ex_info[1].args[0]) + # except (IndexError, AttributeError): + # pass message_to_print = "" messages = "" @@ -144,6 +160,7 @@ def _exception(ex_info, func, args, kwargs, message="Type Error"): "+".join(args) ) ) + _write_mes(header) def normalize_path(path_in, sep=None): @@ -190,30 +207,6 @@ def wrapper(*args, **kwargs): if settings.enable_debug_logger or settings.enable_debug_edb_logger: _log_method(user_function, args, kwargs) return out - except TypeError: - _exception(sys.exc_info(), user_function, args, kwargs, "Type Error") - return False - except ValueError: - _exception(sys.exc_info(), user_function, args, kwargs, "Value Error") - return False - except AttributeError: - _exception(sys.exc_info(), user_function, args, kwargs, "Attribute Error") - return False - except KeyError: - _exception(sys.exc_info(), user_function, args, kwargs, "Key Error") - return False - except IndexError: - _exception(sys.exc_info(), user_function, args, kwargs, "Index Error") - return False - except AssertionError: - _exception(sys.exc_info(), user_function, args, kwargs, "Assertion Error") - return False - except NameError: - _exception(sys.exc_info(), user_function, args, kwargs, "Name Error") - return False - except IOError: - _exception(sys.exc_info(), user_function, args, kwargs, "IO Error") - return False except MethodNotSupportedError: message = "This Method is not supported in current AEDT Design Type." if settings.enable_screen_logs: @@ -228,7 +221,7 @@ def wrapper(*args, **kwargs): _exception(sys.exc_info(), user_function, args, kwargs, "AEDT grpc API call Error") return False except BaseException: - _exception(sys.exc_info(), user_function, args, kwargs, "General or AEDT Error") + _exception(sys.exc_info(), user_function, args, kwargs, str(sys.exc_info()[1]).capitalize()) return False return wrapper From 1b4105d4932e353f217238f415290f25b5366e72 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Morais?= <146729917+SMoraisAnsys@users.noreply.github.com> Date: Tue, 12 Mar 2024 14:20:18 +0100 Subject: [PATCH 10/36] MAINT: Fix wrong version of pyedb For some reasons, pyedb version 0.4.2 was not published in pypi. Therefore, we must use 0.4.0 instead. --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 58535231b94..a436e44504c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,7 @@ dependencies = [ "fpdf2", "jsonschema", "pytomlpp; python_version < '3.12'", - "pyedb==0.4.2; python_version == '3.7'", + "pyedb==0.4.0; python_version == '3.7'", "pyedb>=0.5.0,<0.6; python_version > '3.7'", ] @@ -52,7 +52,7 @@ tests = [ "pandas==1.3.5; python_version == '3.7'", "pandas==2.0.3; python_version == '3.8'", "pandas==2.2.1; python_version > '3.9'", - "pyedb==0.4.2; python_version == '3.7'", + "pyedb==0.4.0; python_version == '3.7'", "pyedb>=0.5.0,<0.6; python_version > '3.7'", "pytest==8.0.2", "pytest-cov==4.1.0", From f8ecd84d3c64de0534f23542a591b4018cf67547 Mon Sep 17 00:00:00 2001 From: Maxime Rey <87315832+MaxJPRey@users.noreply.github.com> Date: Thu, 14 Mar 2024 16:04:22 +0530 Subject: [PATCH 11/36] Remove the exception details -as e- when not needed. (#4361) --- pyaedt/icepak.py | 2 +- pyaedt/modeler/cad/Primitives.py | 2 +- pyaedt/modeler/cad/elements3d.py | 2 +- pyaedt/modeler/cad/polylines.py | 2 +- pyaedt/modules/Boundary.py | 2 +- pyaedt/modules/MaterialLib.py | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/pyaedt/icepak.py b/pyaedt/icepak.py index f8608ea8c7f..75362d3937b 100644 --- a/pyaedt/icepak.py +++ b/pyaedt/icepak.py @@ -2080,7 +2080,7 @@ def export_summary( ) arg.append("Calculation:=") arg.append([type, geometry_type, el, quantity, "", "Default"]) - except Exception as e: + except Exception: self.logger.warning("Object " + el + " not added.") if not output_dir: output_dir = self.working_directory diff --git a/pyaedt/modeler/cad/Primitives.py b/pyaedt/modeler/cad/Primitives.py index 55e7a820748..c964df95bae 100644 --- a/pyaedt/modeler/cad/Primitives.py +++ b/pyaedt/modeler/cad/Primitives.py @@ -549,7 +549,7 @@ def user_defined_component_names(self): if value not in new_obs3d: new_obs3d.append(value) - except Exception as e: + except Exception: new_obs3d = [] return new_obs3d diff --git a/pyaedt/modeler/cad/elements3d.py b/pyaedt/modeler/cad/elements3d.py index ed48ed9c3ce..82282569e4b 100644 --- a/pyaedt/modeler/cad/elements3d.py +++ b/pyaedt/modeler/cad/elements3d.py @@ -229,7 +229,7 @@ def position(self): vertex_data = list(self.oeditor.GetVertexPosition(self.id)) self._position = [float(i) for i in vertex_data] return self._position - except Exception as e: + except Exception: return None def __str__(self): diff --git a/pyaedt/modeler/cad/polylines.py b/pyaedt/modeler/cad/polylines.py index 69e4395fcc6..4dead96685e 100644 --- a/pyaedt/modeler/cad/polylines.py +++ b/pyaedt/modeler/cad/polylines.py @@ -533,7 +533,7 @@ def _point_segment_string_array(self): break else: current_segment = segment_types[vertex_count] - except Exception as e: + except Exception: raise IndexError("Number of segments inconsistent with the number of points!") if current_segment: diff --git a/pyaedt/modules/Boundary.py b/pyaedt/modules/Boundary.py index f3c911199a3..0d9dd231e7f 100644 --- a/pyaedt/modules/Boundary.py +++ b/pyaedt/modules/Boundary.py @@ -274,7 +274,7 @@ def create(self): try: a = [i for i in self._app.excitations if i not in names] self.excitation_name = a[0].split(":")[0] - except Exception as e: + except Exception: self.excitation_name = self.name return True diff --git a/pyaedt/modules/MaterialLib.py b/pyaedt/modules/MaterialLib.py index 95a10098e90..262d4644310 100644 --- a/pyaedt/modules/MaterialLib.py +++ b/pyaedt/modules/MaterialLib.py @@ -650,7 +650,7 @@ def _load_from_project(self): if el not in list(self.material_keys.keys()): try: self._aedmattolibrary(el) - except Exception as e: + except Exception: self.logger.info("aedmattolibrary failed for material %s", el) @pyaedt_function_handler() From d9d67ab6970d73e72f28a53fba885cdefe6f4f66 Mon Sep 17 00:00:00 2001 From: Sebastien Morais Date: Mon, 18 Mar 2024 11:15:39 +0100 Subject: [PATCH 12/36] MAINT: Update dependencies with ranges --- pyproject.toml | 194 ++++++++++++++++++++++++------------------------- 1 file changed, 94 insertions(+), 100 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index a436e44504c..0a6f55e819f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,131 +26,125 @@ classifiers = [ ] dependencies = [ - "rpyc==6.0.0", - "psutil", "fpdf2", "jsonschema", - "pytomlpp; python_version < '3.12'", - "pyedb==0.4.0; python_version == '3.7'", + "psutil", + "pyedb>=0.4.0,<0.5; python_version == '3.7'", "pyedb>=0.5.0,<0.6; python_version > '3.7'", + "pytomlpp; python_version < '3.12'", + "rpyc>=6.0.0,<6.1", ] [project.optional-dependencies] tests = [ - "ipython==8.13.0; python_version < '3.9'", - "ipython==8.22.2; python_version >= '3.9'", - "imageio==2.31.5", - "joblib==1.3.2", - "matplotlib==3.5.3; python_version == '3.7'", - "matplotlib==3.7.3; python_version == '3.8'", - "matplotlib==3.8.3; python_version > '3.8'", - "numpy==1.21.6; python_version <= '3.9'", - "numpy==1.26.4; python_version > '3.9'", - "mock", - "openpyxl==3.1.2", - "osmnx", - "pandas==1.3.5; python_version == '3.7'", - "pandas==2.0.3; python_version == '3.8'", - "pandas==2.2.1; python_version > '3.9'", - "pyedb==0.4.0; python_version == '3.7'", + "imageio>=2.30.0,<2.34", + "ipython>=7.30.0,<8.23", + "joblib>=1.0.0,<1.4", + "matplotlib>=3.5.0,<3.9", + "mock>=5.1.0,<5.2", + "numpy>=1.20.0,<2", + "openpyxl>=3.1.0,<3.3", + "osmnx>=1.1.0,<1.10", + "pandas>=1.1.0,<2.3", + "pytest>=7.4.0,<8.2", + "pytest-cov>=4.0.0,<4.2", + "pytest-xdist>=3.5.0,<3.6", + "pyedb>=0.4.0,<0.5; python_version == '3.7'", "pyedb>=0.5.0,<0.6; python_version > '3.7'", - "pytest==8.0.2", - "pytest-cov==4.1.0", - "pytest-xdist==3.5.0", - "vtk==9.2.6", - "pyvista==0.38.0; python_version <= '3.7'", - "pyvista==0.43.3; python_version > '3.7'", - "scikit-learn==1.3.0; python_version == '3.7'", - "scikit-learn==1.3.1; python_version > '3.7'", + "pyvista>=0.38.0,<0.44", + "scikit-learn>=1.0.0,<1.5", + "scikit-rf>=0.30.0,<0.33", "SRTM.py", "utm", - "scikit-rf==0.31.0", + "vtk==9.2.6", ] dotnet = [ - "cffi == 1.15.1;platform_system=='Linux' and python_version == '3.7'", - "cffi == 1.16.0;platform_system=='Linux' and python_version > '3.7'", - "pywin32 >= 303;platform_system=='Windows'", "ansys-pythonnet>=3.1.0rc3", - "dotnetcore2 ==3.1.23;platform_system=='Linux'", + "cffi==1.15.1; platform_system=='Linux' and python_version == '3.7'", + "cffi>=1.16.0,<1.17; platform_system=='Linux' and python_version > '3.7'", + "dotnetcore2==3.1.23; platform_system=='Linux'", + "pywin32>=303; platform_system=='Windows'", ] doc = [ - "ansys-sphinx-theme==0.14.0", - "imageio==2.31.5", - "imageio-ffmpeg==0.4.9", - "ipython==8.13.0; python_version < '3.9'", - "ipython==8.22.2; python_version >= '3.9'", - "ipywidgets==8.1.1", - "joblib==1.3.2", - "jupyterlab==4.0.6", - "matplotlib==3.5.3; python_version == '3.7'", - "matplotlib==3.7.3; python_version == '3.8'", - "matplotlib==3.8.3; python_version > '3.8'", - "nbsphinx==0.9.3", - "numpydoc==1.5.0; python_version == '3.7'", - "numpydoc==1.6.0; python_version > '3.7'", - "osmnx", - "pypandoc==1.13", - "pytest-sphinx==0.5.0", - "vtk==9.2.6", - "pyvista==0.43.3; python_version > '3.7'", - "pyvista==0.38.0; python_version <= '3.7'", - "recommonmark==0.7.1", - "scikit-learn==1.3.0; python_version == '3.7'", - "scikit-learn==1.3.1; python_version > '3.7'", - "Sphinx==7.1.2; python_version <= '3.9'", - "Sphinx==7.2.6; python_version >= '3.9'", - "sphinx-autobuild==2024.2.4", - "sphinx-autodoc-typehints==1.24.0", - "sphinx-copybutton==0.5.2", - "sphinx-gallery==0.14.0", - "sphinx-notfound-page==1.0.0", - "sphinxcontrib-websupport==1.2.4; python_version <= '3.9'", - "sphinxcontrib-websupport==1.2.5; python_version <= '3.7'", + "ansys-sphinx-theme>=0.10.0,<0.15", + "imageio>=2.30.0,<2.35", + #"imageio-ffmpeg>=0.4.0,<0.5", + "ipython>=7.34.0; python_version == '3.7'", + "ipython>=8.13.0<8.23; python_version > '3.7'", + #"ipywidgets>=8.0.0,<8.2", + "joblib>=1.3.0,<1.4", + "jupyterlab>=4.0.0,<4.3", + "matplotlib>=3.5.0,<3.9", + "nbsphinx>=0.9.0,<0.10", + "numpydoc>=1.5.0,<1.7", + "openpyxl>=3.0.0,<3.2", + "osmnx>=1.1.0,<1.10", + "pypandoc>=1.10.0,<1.14", + #"pytest-sphinx", + "pyvista>=0.38.0,<0.44", + #"recommonmark", + #"scikit-learn", + "scikit-rf>=0.30.0,<0.33", + "Sphinx==5.3.0; python_version == '3.7'", + "Sphinx>=7.1.0,<7.3; python_version > '3.7'", + "sphinx-autobuild==2021.3.14; python_version == '3.7'", + "sphinx-autobuild==2021.3.14; python_version == '3.8'", + "sphinx-autobuild==2024.2.4; python_version > '3.8'", + #"sphinx-autodoc-typehints", + "sphinx-copybutton>=0.5.0,<0.6", + "sphinx-gallery>=0.14.0,<0.16", + "sphinx-jinja>=2.0,<2.1", + #"sphinx-notfound-page", + "sphinx_design>=0.4.0,<0.6", + #"sphinxcontrib-websupport", "SRTM.py", "utm", - "scikit-rf==0.31.0", - "openpyxl==3.1.2", - "sphinx_design", - "sphinx_jinja", + "vtk==9.2.6", +] +doc-noexamples = [ + "ansys-sphinx-theme>=0.10.0,<0.15", + "imageio>=2.30.0,<2.35", + #"imageio-ffmpeg", + "numpydoc>=1.5.0,<1.7", + # "recommonmark", + "Sphinx==5.3.0; python_version == '3.7'", + "Sphinx>=7.1.0,<7.3; python_version > '3.7'", + "sphinx-autobuild==2021.3.14; python_version == '3.7'", + "sphinx-autobuild==2021.3.14; python_version == '3.8'", + "sphinx-autobuild==2024.2.4; python_version > '3.8'", + #"sphinx-autodoc-typehints", + "sphinx-copybutton>=0.5.0,<0.6", + "sphinx-gallery>=0.14.0,<0.16", + #"sphinx-notfound-page", + #"sphinxcontrib-websupport", + "sphinx_design>=0.4.0,<0.6", + "sphinx-jinja>=2.0,<2.1", ] full = [ - "imageio", - "matplotlib==3.5.3; python_version == '3.7'", - "matplotlib==3.7.3; python_version == '3.8'", - "matplotlib==3.8.3; python_version > '3.8'", - "numpy==1.21.6; python_version <= '3.9'", - "numpy==1.26.4; python_version > '3.9'", - "pandas==1.3.5; python_version == '3.7'", - "pandas==2.0.3; python_version == '3.8'", - "pandas==2.2.1; python_version > '3.9'", - "osmnx", - "vtk==9.2.6", - "pyvista==0.43.3; python_version > '3.7'", - "pyvista==0.38.0; python_version <= '3.7'", + "imageio>=2.30.0,<2.35", + "matplotlib>=3.5.0,<3.9", + "numpy>=1.20.0,<2", + "openpyxl>=3.1.0,<3.3", + "osmnx>=1.1.0,<1.10", + "pandas>=1.1.0,<2.3", + "pyvista>=0.38.0,<0.44", + "scikit-rf>=0.30.0,<0.33", "SRTM.py", "utm", - "scikit-rf==0.31.0", - "openpyxl==3.1.2", + "vtk==9.2.6", ] all = [ - "imageio", - "matplotlib==3.5.3; python_version == '3.7'", - "matplotlib==3.7.3; python_version == '3.8'", - "matplotlib==3.8.3; python_version > '3.8'", - "numpy==1.21.6; python_version <= '3.9'", - "numpy==1.26.4; python_version > '3.9'", - "pandas==1.3.5; python_version == '3.7'", - "pandas==2.0.3; python_version == '3.9'", - "pandas==2.0.3; python_version == '3.8'", - "pandas==2.2.1; python_version > '3.9'", - "osmnx", - "vtk==9.2.6", - "pyvista==0.43.3; python_version > '3.7'", - "pyvista==0.38.0; python_version <= '3.7'", + "imageio>=2.30.0,<2.35", + "matplotlib>=3.5.0,<3.9", + "numpy>=1.20.0,<2", + "openpyxl>=3.1.0,<3.3", + "osmnx>=1.1.0,<1.10", + "pandas>=1.1.0,<2.3", + "pyvista>=0.38.0,<0.44", + "scikit-rf>=0.30.0,<0.33", "SRTM.py", "utm", - "scikit-rf==0.31.0", - "openpyxl==3.1.2", + "vtk==9.2.6", ] [tool.flit.module] From 1ff4eefb6f2976a9addf5aee49313e8afe96e395 Mon Sep 17 00:00:00 2001 From: Sebastien Morais Date: Mon, 18 Mar 2024 14:46:37 +0100 Subject: [PATCH 13/36] CI: Update ansys license --- .github/workflows/cpython_linux.yml | 1 + .github/workflows/full_documentation.yml | 1 + .github/workflows/ironpython.yml | 2 ++ .github/workflows/nightly-docs.yml | 1 + .github/workflows/unit_tests.yml | 1 + 5 files changed, 6 insertions(+) diff --git a/.github/workflows/cpython_linux.yml b/.github/workflows/cpython_linux.yml index 7e8a316deae..5d9313e5765 100644 --- a/.github/workflows/cpython_linux.yml +++ b/.github/workflows/cpython_linux.yml @@ -1,6 +1,7 @@ name: Linux_CPython_UnitTests env: + ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} python.version: '3.10' python.venv: 'testvenv' # Following env vars when changed will "reset" the mentioned cache, diff --git a/.github/workflows/full_documentation.yml b/.github/workflows/full_documentation.yml index afb1575f756..0a114fa3635 100644 --- a/.github/workflows/full_documentation.yml +++ b/.github/workflows/full_documentation.yml @@ -3,6 +3,7 @@ name: FullDocumentation env: + ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} python.version: '3.10' python.venv: 'testvenv' DOCUMENTATION_CNAME: 'aedt.docs.pyansys.com' diff --git a/.github/workflows/ironpython.yml b/.github/workflows/ironpython.yml index c55245a41d7..0fb334beaaa 100644 --- a/.github/workflows/ironpython.yml +++ b/.github/workflows/ironpython.yml @@ -2,6 +2,8 @@ name: CI_Ironpython +env: + ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} # Controls when the workflow will run on: diff --git a/.github/workflows/nightly-docs.yml b/.github/workflows/nightly-docs.yml index e3ce3a34e56..68031a4bc57 100644 --- a/.github/workflows/nightly-docs.yml +++ b/.github/workflows/nightly-docs.yml @@ -6,6 +6,7 @@ on: - cron: '0 4 * * *' env: + ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} DOCUMENTATION_CNAME: 'aedt.docs.pyansys.com' MEILISEARCH_API_KEY: ${{ secrets.MEILISEARCH_API_KEY }} MEILISEARCH_PUBLIC_API_KEY: ${{ secrets.MEILISEARCH_PUBLIC_API_KEY }} diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml index 3601105a7ad..dd16afa66bf 100644 --- a/.github/workflows/unit_tests.yml +++ b/.github/workflows/unit_tests.yml @@ -1,6 +1,7 @@ name: CI env: + ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} python.version: '3.10' python.venv: 'testvenv' # Following env vars when changed will "reset" the mentioned cache, From 1da8ed3de9c1d3220cf4a4a1799465f634750e0d Mon Sep 17 00:00:00 2001 From: Sebastien Morais Date: Mon, 18 Mar 2024 11:23:25 +0100 Subject: [PATCH 14/36] DOC: remove deprecated extension recommonmark --- doc/source/conf.py | 1 - 1 file changed, 1 deletion(-) diff --git a/doc/source/conf.py b/doc/source/conf.py index a072c1cf724..93a99524d57 100644 --- a/doc/source/conf.py +++ b/doc/source/conf.py @@ -129,7 +129,6 @@ def setup(app): "sphinx_copybutton", "sphinx_design", "sphinx_jinja", - "recommonmark", "sphinx.ext.graphviz", "sphinx.ext.mathjax", "sphinx.ext.inheritance_diagram", From 26d2dcb2dd0f764bb5048fd7329d3aae1c6cc08e Mon Sep 17 00:00:00 2001 From: Sebastien Morais Date: Mon, 18 Mar 2024 12:04:59 +0100 Subject: [PATCH 15/36] WIP: Test all python version --- .github/workflows/build_documentation.yml | 99 ------------ .github/workflows/cpython_linux.yml | 92 ----------- .github/workflows/full_documentation.yml | 155 ------------------ .github/workflows/ironpython.yml | 42 ----- .github/workflows/label.yml | 91 ----------- .github/workflows/nightly-docs.yml | 117 -------------- .github/workflows/test_python_version.yml | 54 +++++++ .github/workflows/unit_test_prerelease.yml | 89 ----------- .github/workflows/unit_tests.yml | 173 --------------------- .github/workflows/unit_tests_solvers.bkp | 103 ------------ .github/workflows/wheelhouse.yml | 90 ----------- .github/workflows/wheelhouse_linux.yml | 89 ----------- 12 files changed, 54 insertions(+), 1140 deletions(-) delete mode 100644 .github/workflows/build_documentation.yml delete mode 100644 .github/workflows/cpython_linux.yml delete mode 100644 .github/workflows/full_documentation.yml delete mode 100644 .github/workflows/ironpython.yml delete mode 100644 .github/workflows/label.yml delete mode 100644 .github/workflows/nightly-docs.yml create mode 100644 .github/workflows/test_python_version.yml delete mode 100644 .github/workflows/unit_test_prerelease.yml delete mode 100644 .github/workflows/unit_tests.yml delete mode 100644 .github/workflows/unit_tests_solvers.bkp delete mode 100644 .github/workflows/wheelhouse.yml delete mode 100644 .github/workflows/wheelhouse_linux.yml diff --git a/.github/workflows/build_documentation.yml b/.github/workflows/build_documentation.yml deleted file mode 100644 index 5dfbb3d0d84..00000000000 --- a/.github/workflows/build_documentation.yml +++ /dev/null @@ -1,99 +0,0 @@ -name: Documentation Build - -on: [pull_request, workflow_dispatch] - -env: - # Following env vars when changed will "reset" the mentioned cache, - # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... - # You should go up in number, if you go down (or repeat a previous value) - # you might end up reusing a previous cache if it haven't been deleted already. - # It applies 7 days retention policy by default. - RESET_EXAMPLES_CACHE: 3 - RESET_DOC_BUILD_CACHE: 3 - RESET_AUTOSUMMARY_CACHE: 3 - - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - docs-style: - name: "Check documentation style" - runs-on: ubuntu-latest - steps: - - name: "Check documentation style" - uses: ansys/actions/doc-style@v5 - with: - token: ${{ secrets.GITHUB_TOKEN }} - vale-config: "doc/.vale.ini" - vale-version: "2.29.6" - - docs_build: - runs-on: ubuntu-20.04 - - steps: - - uses: actions/checkout@v4 - - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - - name: Update pip - run: | - pip install --upgrade pip - - - name: Install pyaedt - run: | - pip install .[doc] - - - name: Verify pyaedt can be imported - run: python -c "import pyaedt" - - - name: Retrieve PyAEDT version - id: version - run: | - echo "PYAEDT_VERSION=$(python -c 'from pyaedt import __version__; print(__version__)')" >> $GITHUB_OUTPUT - echo "PyAEDT version is: $(python -c "from pyaedt import __version__; print(__version__)")" - - # - name: Cache docs build directory - # uses: actions/cache@v3 - # with: - # path: doc/build - # key: doc-build-v${{ env.RESET_DOC_BUILD_CACHE }}-${{ steps.version.outputs.PYAEDT_VERSION }}-${{ github.sha }} - # restore-keys: | - # doc-build-v${{ env.RESET_DOC_BUILD_CACHE }}-${{ steps.version.outputs.PYAEDT_VERSION }} - # - name: Cache autosummary - # uses: actions/cache@v3 - # with: - # path: doc/source/**/_autosummary/*.rst - # key: autosummary-v${{ env.RESET_AUTOSUMMARY_CACHE }}-${{ steps.version.outputs.PYAEDT_VERSION }}-${{ github.sha }} - # restore-keys: | - # autosummary-v${{ env.RESET_AUTOSUMMARY_CACHE }}-${{ steps.version.outputs.PYAEDT_VERSION }} - - - name: Install doc build requirements - run: | - sudo apt install graphviz - - # run doc build, without creating the examples directory - # note that we have to add the examples file here since it won't - # be created as gallery is disabled on linux. - - name: Documentation Build - run: | - make -C doc clean - mkdir doc/source/examples -p - echo $'Examples\n========' > doc/source/examples/index.rst - make -C doc html SPHINXOPTS="-j auto -w build_errors.txt -N" - - # Verify that sphinx generates no warnings - - name: Check for warnings - run: | - python doc/print_errors.py - -# - name: Upload Documentation -# uses: actions/upload-artifact@v4 -# with: -# name: Documentation -# path: doc/_build/html -# retention-days: 7 diff --git a/.github/workflows/cpython_linux.yml b/.github/workflows/cpython_linux.yml deleted file mode 100644 index 5d9313e5765..00000000000 --- a/.github/workflows/cpython_linux.yml +++ /dev/null @@ -1,92 +0,0 @@ -name: Linux_CPython_UnitTests - -env: - ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} - python.version: '3.10' - python.venv: 'testvenv' - # Following env vars when changed will "reset" the mentioned cache, - # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... - # You should go up in number, if you go down (or repeat a previous value) - # you might end up reusing a previous cache if it haven't been deleted already. - # It applies 7 days retention policy by default. - RESET_PIP_CACHE: 0 - PACKAGE_NAME: PyAEDT - - -on: - workflow_dispatch: - inputs: - logLevel: - description: 'Log level' - required: true - default: 'warning' - tags: - description: 'Linux CPython daily' - schedule: # UTC at 0100 - - cron: '0 1 * * *' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - test: - runs-on: [Linux, pyaedt] - strategy: - matrix: - python-version: [ '3.10' ] - steps: - - uses: actions/checkout@v3 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - architecture: 'x86' - - - name: 'Install pyaedt' - run: | - python -m venv .pyaedt_test_env - export ANSYSEM_ROOT241=/apps/AnsysEM/v241/Linux64 - export LD_LIBRARY_PATH=$ANSYSEM_ROOT241/common/mono/Linux64/lib64:$LD_LIBRARY_PATH - source .pyaedt_test_env/bin/activate - python -m pip install --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org pip -U - python -m pip install --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org wheel setuptools -U - python -c "import sys; print(sys.executable)" - pip install .[tests] - pip install --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org pytest-azurepipelines - python -c "import pyaedt; print('Imported pyaedt')" - - - name: 'Unit testing' - uses: nick-fields/retry@v3 - with: - max_attempts: 3 - retry_on: error - timeout_minutes: 60 - command: | - export ANS_NODEPCHECK=1 - export ANSYSEM_ROOT241=/apps/AnsysEM/v241/Linux64 - export LD_LIBRARY_PATH=$ANSYSEM_ROOT241/common/mono/Linux64/lib64:$LD_LIBRARY_PATH - source .pyaedt_test_env/bin/activate - pytest --tx 6*popen --durations=50 --dist loadfile -v _unittest - - - name: 'Unit testing Solvers' - continue-on-error: true - uses: nick-fields/retry@v3 - with: - max_attempts: 3 - retry_on: error - timeout_minutes: 60 - command: | - export ANS_NODEPCHECK=1 - export ANSYSEM_ROOT241=/apps/AnsysEM/v241/Linux64 - export LD_LIBRARY_PATH=$ANSYSEM_ROOT241/common/mono/Linux64/lib64:$LD_LIBRARY_PATH - source .pyaedt_test_env/bin/activate - pytest --tx 2*popen --durations=50 --dist loadfile -v _unittest_solvers - - - name: Upload pytest test results - uses: actions/upload-artifact@v4 - with: - name: pytest-results - path: junit/test-results.xml - if: ${{ always() }} diff --git a/.github/workflows/full_documentation.yml b/.github/workflows/full_documentation.yml deleted file mode 100644 index 0a114fa3635..00000000000 --- a/.github/workflows/full_documentation.yml +++ /dev/null @@ -1,155 +0,0 @@ -# This is a basic workflow to help you get started with Actions - -name: FullDocumentation - -env: - ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} - python.version: '3.10' - python.venv: 'testvenv' - DOCUMENTATION_CNAME: 'aedt.docs.pyansys.com' - MEILISEARCH_API_KEY: ${{ secrets.MEILISEARCH_API_KEY }} - MEILISEARCH_HOST_URL: https://backend.search.pyansys.com - MEILISEARCH_PUBLIC_API_KEY: ${{ secrets.MEILISEARCH_PUBLIC_API_KEY }} -# Controls when the workflow will run -on: - # Triggers the workflow on push or pull request events but only for the main branch - push: - tags: - - v* - workflow_dispatch: - inputs: - logLevel: - description: 'Log level' - required: true - default: 'warning' - tags: - description: 'Test scenario tags' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - full_documentation: - # The type of runner that the job will run on - name: full_documentation - runs-on: [Windows, self-hosted, pyaedt] - timeout-minutes: 720 - strategy: - matrix: - python-version: ['3.10'] - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v4 - - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: 'Create virtual env' - run: | - python -m venv testenv - testenv\Scripts\Activate.ps1 - python -m pip install pip -U - python -m pip install wheel setuptools -U - python -c "import sys; print(sys.executable)" - - - name: 'Install pyaedt' - run: | - testenv\Scripts\Activate.ps1 - pip install .[doc] - Copy-Item -Path "C:\actions-runner\opengl32.dll" -Destination "testenv\Lib\site-packages\vtkmodules" -Force - - - name: Retrieve PyAEDT version - id: version - run: | - testenv\Scripts\Activate.ps1 - echo "PYAEDT_VERSION=$(python -c 'from pyaedt import __version__; print(__version__)')" >> $GITHUB_OUTPUT - echo "PyAEDT version is: $(python -c "from pyaedt import __version__; print(__version__)")" - - - name: Create HTML Documentations - run: | - testenv\Scripts\Activate.ps1 - sphinx-build -j auto --color -b html -a doc/source doc/_build/html - -# - name: Create PDF Documentations -# run: | -# testenv\Scripts\Activate.ps1 -# .\doc\make.bat pdf - - - name: Upload HTML documentation artifact - uses: actions/upload-artifact@v3 - with: - name: documentation-html - path: doc/_build/html - retention-days: 7 - -# - name: Upload PDF documentation artifact -# uses: actions/upload-artifact@v4 -# with: -# name: documentation-pdf -# path: doc/_build/pdf -# retention-days: 7 - -# - name: Release -# uses: softprops/action-gh-release@v1 -# if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') -# with: -# generate_release_notes: true -# files: | -# doc/_build/pdf - - doc-deploy-stable: - name: Deploy stable documentation - runs-on: ubuntu-latest - needs: full_documentation - if: github.event_name == 'push' && contains(github.ref, 'refs/tags') - steps: - - name: Deploy the stable documentation - uses: ansys/actions/doc-deploy-stable@v4 - with: - cname: ${{ env.DOCUMENTATION_CNAME }} - token: ${{ secrets.GITHUB_TOKEN }} - python-version: ${{ matrix.python-version }} - - - doc-index-stable: - name: "Deploy stable docs index" - if: github.event_name == 'push' && contains(github.ref, 'refs/tags') - runs-on: ubuntu-latest - needs: doc-deploy-stable - - steps: - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: ${{ env.MAIN_PYTHON_VERSION }} - - - uses: actions/checkout@v4 - - - uses: actions/download-artifact@v3 - - - name: Display structure of downloaded files - run: ls -R - - - name: Install the package requirements - run: pip install -e . - - - name: Get the version to PyMeilisearch - run: | - VERSION=$(python -c "from pyaedt import __version__; print('.'.join(__version__.split('.')[:2]))") - VERSION_MEILI=$(python -c "from pyaedt import __version__; print('-'.join(__version__.split('.')[:2]))") - echo "Calculated VERSION: $VERSION" - echo "Calculated VERSION_MEILI: $VERSION_MEILI" - echo "VERSION=$VERSION" >> $GITHUB_ENV - echo "VERSION_MEILI=$VERSION_MEILI" >> $GITHUB_ENV - - - name: "Deploy the stable documentation index for PyAEDT API" - uses: ansys/actions/doc-deploy-index@v4 - with: - cname: ${{ env.DOCUMENTATION_CNAME }}/version/${{ env.VERSION }} - index-name: pyaedt-v${{ env.VERSION_MEILI }} - host-url: ${{ vars.MEILISEARCH_HOST_URL }} - api-key: ${{ env.MEILISEARCH_API_KEY }} diff --git a/.github/workflows/ironpython.yml b/.github/workflows/ironpython.yml deleted file mode 100644 index 0fb334beaaa..00000000000 --- a/.github/workflows/ironpython.yml +++ /dev/null @@ -1,42 +0,0 @@ -# This is a basic workflow to help you get started with Actions - -name: CI_Ironpython - -env: - ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} - -# Controls when the workflow will run -on: - # Triggers the workflow on push or pull request events but only for the main branch - pull_request: - branches: [ main ] - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: [Windows, self-hosted, pyaedt] - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v4 - - - name: 'Run Unit Tests in Ironpython' - timeout-minutes: 60 - run: | - $processA = start-process 'cmd' -ArgumentList '/c .\_unittest_ironpython\run_unittests_batchmode.cmd' -PassThru - $processA.WaitForExit() - get-content .\_unittest_ironpython\pyaedt_unit_test_ironpython.log - $test_errors_failures = Select-String -Path .\_unittest_ironpython\pyaedt_unit_test_ironpython.log -Pattern "TextTestResult errors=" - if ($test_errors_failures -ne $null) - { - exit 1 - } - else - { - exit 0 - } diff --git a/.github/workflows/label.yml b/.github/workflows/label.yml deleted file mode 100644 index 596cace4c8f..00000000000 --- a/.github/workflows/label.yml +++ /dev/null @@ -1,91 +0,0 @@ -name: Labeler -on: - pull_request: - push: - branches: [ main ] - paths: - - '../labels.yml' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - - label-syncer: - name: Syncer - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - uses: micnncim/action-label-syncer@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - - labeler: - name: Set labels - needs: [label-syncer] - permissions: - contents: read - pull-requests: write - runs-on: ubuntu-latest - steps: - - # Label based on modified files - - name: Label based on changed files - uses: actions/labeler@v5 - with: - repo-token: "${{ secrets.GITHUB_TOKEN }}" - - # Label based on branch name - - uses: actions-ecosystem/action-add-labels@v1 - if: | - startsWith(github.event.pull_request.head.ref, 'doc') || - startsWith(github.event.pull_request.head.ref, 'docs') - with: - labels: documentation - - - uses: actions-ecosystem/action-add-labels@v1 - if: | - startsWith(github.event.pull_request.head.ref, 'maint') || - startsWith(github.event.pull_request.head.ref, 'no-ci') || - startsWith(github.event.pull_request.head.ref, 'ci') - with: - labels: maintenance - - - uses: actions-ecosystem/action-add-labels@v1 - if: startsWith(github.event.pull_request.head.ref, 'feat') - with: - labels: | - enhancement - - - uses: actions-ecosystem/action-add-labels@v1 - if: | - startsWith(github.event.pull_request.head.ref, 'fix') || - startsWith(github.event.pull_request.head.ref, 'patch') - with: - labels: bug - - - uses: actions-ecosystem/action-add-labels@v1 - if: | - startsWith(github.event.pull_request.head.ref, 'test') - with: - labels: testing - - commenter: - runs-on: ubuntu-latest - steps: - - name: Suggest to add labels - uses: peter-evans/create-or-update-comment@v4 - # Execute only when no labels have been applied to the pull request - if: toJSON(github.event.pull_request.labels.*.name) == '{}' - with: - issue-number: ${{ github.event.pull_request.number }} - body: | - Please add one of the following labels to add this contribution to the Release Notes :point_down: - - [bug](https://github.com/ansys/pyaedt/pulls?q=label%3Abug+) - - [documentation](https://github.com/ansys/pyaedt/pulls?q=label%3Adocumentation+) - - [enhancement](https://github.com/ansys/pyaedt/pulls?q=label%3Aenhancement+) - - [good first issue](https://github.com/ansys/pyaedt/pulls?q=label%3Agood+first+issue) - - [maintenance](https://github.com/ansys/pyaedt/pulls?q=label%3Amaintenance+) - - [release](https://github.com/ansys/pyaedt/pulls?q=label%3Arelease+) - - [testing](https://github.com/ansys/pyaedt/pulls?q=label%Atesting+) diff --git a/.github/workflows/nightly-docs.yml b/.github/workflows/nightly-docs.yml deleted file mode 100644 index 68031a4bc57..00000000000 --- a/.github/workflows/nightly-docs.yml +++ /dev/null @@ -1,117 +0,0 @@ -name: Nightly Documentation Build - -on: - workflow_dispatch: - schedule: # UTC at 0400 - - cron: '0 4 * * *' - -env: - ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} - DOCUMENTATION_CNAME: 'aedt.docs.pyansys.com' - MEILISEARCH_API_KEY: ${{ secrets.MEILISEARCH_API_KEY }} - MEILISEARCH_PUBLIC_API_KEY: ${{ secrets.MEILISEARCH_PUBLIC_API_KEY }} - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - docs_build: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - - - name: Setup Python - uses: actions/setup-python@v5 - with: - python-version: "3.10" - - - name: Install pyaedt - run: | - pip install . - - - name: Install doc build requirements - run: | - pip install .[doc] - - - name: Full Documentation Build - run: | - make -C doc phtml - - - name: Upload documentation HTML artifact - uses: actions/upload-artifact@v4 - with: - name: documentation-html - path: doc/_build/html - retention-days: 7 - - - docs_upload: - needs: docs_build - runs-on: ubuntu-latest - steps: - - - name: Deploy development documentation - uses: ansys/actions/doc-deploy-dev@v4 - with: - cname: ${{ env.DOCUMENTATION_CNAME }} - token: ${{ secrets.GITHUB_TOKEN }} - - doc-index-dev: - name: "Deploy dev docs index" - runs-on: ubuntu-latest - needs: docs_upload - steps: - - uses: actions/checkout@v4 - - - uses: actions/download-artifact@v3 - - - name: Display structure of downloaded files - run: ls -R - - - name: "Deploy the dev documentation index for PyAEDT API" - uses: ansys/actions/doc-deploy-index@v4 - with: - cname: ${{ env.DOCUMENTATION_CNAME }}/version/dev - index-name: pyaedt-vdev - host-url: ${{ vars.MEILISEARCH_HOST_URL }} - api-key: ${{ env.MEILISEARCH_API_KEY }} - - # docstring_testing: - # runs-on: Windows - - # steps: - # - uses: actions/checkout@v4 - - # - name: Setup Python - # uses: actions/setup-python@v2 - # with: - # python-version: 3.8 - - # - name: 'Create virtual env' - # run: | - # python -m venv testenv - # testenv\Scripts\Activate.ps1 - # python -m pip install pip -U - # python -m pip install wheel setuptools -U - # python -c "import sys; print(sys.executable)" - - # - name: 'Install pyaedt' - # run: | - # testenv\Scripts\Activate.ps1 - # pip install . --use-feature=in-tree-build - # cd _unittest - # python -c "import pyaedt; print('Imported pyaedt')" - - # - name: Install testing requirements - # run: | - # testenv\Scripts\Activate.ps1 - # pip install -r requirements/requirements_test.txt - # pip install pytest-azurepipelines - - # - name: Docstring testing - # run: | - # testenv\Scripts\Activate.ps1 - # pytest -v pyaedt/desktop.py pyaedt/icepak.py - # pytest -v pyaedt/desktop.py pyaedt/hfss.py diff --git a/.github/workflows/test_python_version.yml b/.github/workflows/test_python_version.yml new file mode 100644 index 00000000000..54bb88cfe02 --- /dev/null +++ b/.github/workflows/test_python_version.yml @@ -0,0 +1,54 @@ +name: Documentation Build + +on: [pull_request, workflow_dispatch] + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + + smoke-tests-with-install-target: + name: Tests | Python ${{ matrix.python-version }} | Target ${{ matrix.install_target}}) + runs-on: [Windows, self-hosted, pyaedt] + strategy: + fail-fast: false + matrix: + python-version: ['3.7', '3.8', '3.9', '3.10', '3.11'] + steps: + - name: "Install Git and clone project" + uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: 'Create virtual env' + run: | + python -m venv .venv + .venv\Scripts\Activate.ps1 + python -m pip install pip -U + python -m pip install wheel setuptools -U + python -c "import sys; print(sys.executable)" + + - name: 'Install pyaedt' + run: | + .venv\Scripts\Activate.ps1 + pip install . + pip install .[tests] + Copy-Item -Path "C:\actions-runner\opengl32.dll" -Destination "testenv_s\Lib\site-packages\vtkmodules" -Force + python -c "import pyaedt; print('Imported pyaedt')" + + - name: 'Unit testing' + uses: nick-fields/retry@v3 + with: + max_attempts: 3 + retry_on: error + timeout_minutes: 40 + command: | + .venv\Scripts\Activate.ps1 + Set-Item -Path env:PYTHONMALLOC -Value "malloc" + pytest --durations=50 -v --cov=pyaedt --cov-report=xml --cov-report=html --junitxml=junit/test-results.xml _unittest_solvers + diff --git a/.github/workflows/unit_test_prerelease.yml b/.github/workflows/unit_test_prerelease.yml deleted file mode 100644 index a15f6c2091c..00000000000 --- a/.github/workflows/unit_test_prerelease.yml +++ /dev/null @@ -1,89 +0,0 @@ -name: CI_PreRelease - -env: - python.version: '3.8' - python.venv: 'testvenv' - # Following env vars when changed will "reset" the mentioned cache, - # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... - # You should go up in number, if you go down (or repeat a previous value) - # you might end up reusing a previous cache if it haven't been deleted already. - # It applies 7 days retention policy by default. - RESET_PIP_CACHE: 0 - PACKAGE_NAME: PyAEDT -# Controls when the workflow will run -on: - workflow_dispatch: - inputs: - logLevel: - description: 'Log level' - required: true - default: 'warning' - tags: - description: 'Linux CPython daily' - schedule: # UTC at 0300 - - cron: '0 3 * * *' - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: [pre_release] - strategy: - matrix: - python-version: ['3.8'] - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: 'Create virtual env' - run: | - python -m venv testenv - testenv\Scripts\Activate.ps1 - python -m pip install pip -U - python -m pip install wheel setuptools -U - python -c "import sys; print(sys.executable)" - - - name: 'Install pyaedt' - run: | - testenv\Scripts\Activate.ps1 - pip install .[tests] - pip install pytest-azurepipelines - Copy-Item -Path "C:\actions-runner\opengl32.dll" -Destination "testenv\Lib\site-packages\vtkmodules" -Force - Copy-Item -Path "C:\actions-runner\local_config.json" -Destination "_unittest" -Force - mkdir tmp - cd tmp - python -c "import pyaedt; print('Imported pyaedt')" - - # - name: "Check licences of packages" - # uses: pyansys/pydpf-actions/check-licenses@v2.0 - - - name: 'Unit testing' - timeout-minutes: 60 - run: | - testenv\Scripts\Activate.ps1 - Set-Item -Path env:PYTHONMALLOC -Value "malloc" - pytest --tx 6*popen --durations=50 --dist loadfile -v --cov=pyaedt --cov-report=xml --junitxml=junit/test-results.xml --cov-report=html _unittest - - - uses: codecov/codecov-action@v4 - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - if: matrix.python-version == '3.8' - name: 'Upload coverage to Codecov' - - - name: Upload pytest test results - uses: actions/upload-artifact@v4 - with: - name: pytest-results - path: junit/test-results.xml - # Use always() to always run this step to publish test results when there are test failures - if: ${{ always() }} - diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml deleted file mode 100644 index dd16afa66bf..00000000000 --- a/.github/workflows/unit_tests.yml +++ /dev/null @@ -1,173 +0,0 @@ -name: CI - -env: - ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} - python.version: '3.10' - python.venv: 'testvenv' - # Following env vars when changed will "reset" the mentioned cache, - # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... - # You should go up in number, if you go down (or repeat a previous value) - # you might end up reusing a previous cache if it hasn't been deleted already. - # It applies 7 days retention policy by default. - RESET_PIP_CACHE: 0 - PACKAGE_NAME: PyAEDT -# Controls when the workflow will run -on: - # Triggers the workflow on push or pull request events but only for the main branch - push: - tags: - - 'v*' - branches: - - main - pull_request: - branches: [ main ] - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build_solvers: - # The type of runner that the job will run on - runs-on: [Windows, self-hosted, pyaedt] - strategy: - matrix: - python-version: [ '3.10' ] - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: 'Create virtual env' - run: | - Remove-Item D:\Temp\* -Recurse -Force -ErrorAction SilentlyContinue - python -m venv testenv_s - testenv_s\Scripts\Activate.ps1 - python -m pip install pip -U - python -m pip install wheel setuptools -U - python -c "import sys; print(sys.executable)" - - - name: 'Install pyaedt' - run: | - testenv_s\Scripts\Activate.ps1 - pip install . - pip install .[tests] - pip install pytest-azurepipelines - Copy-Item -Path "C:\actions-runner\opengl32.dll" -Destination "testenv_s\Lib\site-packages\vtkmodules" -Force - mkdir tmp - cd tmp - python -c "import pyaedt; print('Imported pyaedt')" - - # - name: "Check licences of packages" - # uses: pyansys/pydpf-actions/check-licenses@v2.0 - - - name: 'Unit testing' - uses: nick-fields/retry@v3 - with: - max_attempts: 3 - retry_on: error - timeout_minutes: 40 - command: | - testenv_s\Scripts\Activate.ps1 - Set-Item -Path env:PYTHONMALLOC -Value "malloc" - pytest --durations=50 -v --cov=pyaedt --cov-report=xml --cov-report=html --junitxml=junit/test-results.xml _unittest_solvers - - - uses: codecov/codecov-action@v4 - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - with: - name: 'Upload coverage to Codecov' - - - name: Upload pytest test results - uses: actions/upload-artifact@v4 - with: - name: pytest-solver-results - path: junit/test-results.xml - # Use always() to always run this step to publish test results when there are test failures - if: ${{ always() }} - - - build: - # The type of runner that the job will run on - runs-on: [Windows, self-hosted, pyaedt] - strategy: - matrix: - python-version: ['3.10'] - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: 'Create virtual env' - run: | - Remove-Item D:\Temp\* -Recurse -Force -ErrorAction SilentlyContinue - python -m venv testenv - testenv\Scripts\Activate.ps1 - python -m pip install pip -U - python -m pip install wheel setuptools -U - python -c "import sys; print(sys.executable)" - - - name: 'Install pyaedt' - run: | - testenv\Scripts\Activate.ps1 - pip install . - pip install .[tests] - pip install pytest-azurepipelines - Copy-Item -Path "C:\actions-runner\opengl32.dll" -Destination "testenv\Lib\site-packages\vtkmodules" -Force - mkdir tmp - cd tmp - python -c "import pyaedt; print('Imported pyaedt')" - - # - name: "Check licences of packages" - # uses: pyansys/pydpf-actions/check-licenses@v2.0 - - - name: 'Unit testing' - uses: nick-fields/retry@v3 - with: - max_attempts: 3 - retry_on: error - timeout_minutes: 50 - command: | - testenv\Scripts\Activate.ps1 - Set-Item -Path env:PYTHONMALLOC -Value "malloc" - pytest -n 6 --dist loadfile --durations=50 -v --cov=pyaedt --cov-report=xml --cov-report=html --junitxml=junit/test-results.xml _unittest - - - uses: codecov/codecov-action@v4 - env: - CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - with: - name: 'Upload coverage to Codecov' - - - name: Upload pytest test results - uses: actions/upload-artifact@v4 - with: - name: pytest-results - path: junit/test-results.xml - # Use always() to always run this step to publish test results when there are test failures - if: ${{ always() }} - - - name: 'Build and validate source distribution' - run: | - testenv\Scripts\Activate.ps1 - python -m pip install build twine - python -m build - python -m twine check dist/* - - - name: "Builds and uploads to PyPI" - if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') - run: | - testenv\Scripts\Activate.ps1 - python setup.py sdist - python -m pip install twine - python -m twine upload --skip-existing dist/* - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/unit_tests_solvers.bkp b/.github/workflows/unit_tests_solvers.bkp deleted file mode 100644 index 4d0691a5dab..00000000000 --- a/.github/workflows/unit_tests_solvers.bkp +++ /dev/null @@ -1,103 +0,0 @@ -name: CI_Solvers - -env: - python.version: '3.10' - python.venv: 'testvenv' - # Following env vars when changed will "reset" the mentioned cache, - # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... - # You should go up in number, if you go down (or repeat a previous value) - # you might end up reusing a previous cache if it hasn't been deleted already. - # It applies 7 days retention policy by default. - RESET_PIP_CACHE: 0 - PACKAGE_NAME: PyAEDT -# Controls when the workflow will run -on: - # Triggers the workflow on push or pull request events but only for the main branch - push: - tags: - - 'v*' - branches: - - main - pull_request: - branches: [ main ] - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: [Windows, self-hosted, pyaedt] - strategy: - matrix: - python-version: ['3.10'] - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - - name: 'Create virtual env' - run: | - Remove-Item D:\Temp\* -Recurse -Force - python -m venv testenv_s - testenv_s\Scripts\Activate.ps1 - python -m pip install pip -U - python -m pip install wheel setuptools -U - python -c "import sys; print(sys.executable)" - - - name: 'Install pyaedt' - run: | - testenv_s\Scripts\Activate.ps1 - pip install . - pip install .[tests] - pip install pytest-azurepipelines - Copy-Item -Path "C:\actions-runner\opengl32.dll" -Destination "testenv_s\Lib\site-packages\vtkmodules" -Force - mkdir tmp - cd tmp - python -c "import pyaedt; print('Imported pyaedt')" - - # - name: "Check licences of packages" - # uses: pyansys/pydpf-actions/check-licenses@v2.0 - - - name: 'Unit testing' - timeout-minutes: 40 - run: | - testenv_s\Scripts\Activate.ps1 - Set-Item -Path env:PYTHONMALLOC -Value "malloc" - pytest --durations=50 -v --cov=pyaedt --cov-report=xml --cov-report=html --junitxml=junit/test-results.xml _unittest_solvers - - - uses: codecov/codecov-action@v3 - if: matrix.python-version == '3.10' - name: 'Upload coverage to Codecov' - - - name: Upload pytest test results - uses: actions/upload-artifact@v3 - with: - name: pytest-results - path: junit/test-results.xml - # Use always() to always run this step to publish test results when there are test failures - if: ${{ always() }} - - - name: 'Build and validate source distribution' - run: | - testenv_s\Scripts\Activate.ps1 - python -m pip install build twine - python -m build - python -m twine check dist/* - - - name: "Builds and uploads to PyPI" - if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') - run: | - testenv_s\Scripts\Activate.ps1 - python setup.py sdist - python -m pip install twine - python -m twine upload --skip-existing dist/* - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/wheelhouse.yml b/.github/workflows/wheelhouse.yml deleted file mode 100644 index 600b1c73e9c..00000000000 --- a/.github/workflows/wheelhouse.yml +++ /dev/null @@ -1,90 +0,0 @@ -# This is a basic workflow to help you get started with Actions - -name: WheelHouse - -env: - python.venv: 'testvenv' - # Following env vars when changed will "reset" the mentioned cache, - # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... - # You should go up in number, if you go down (or repeat a previous value) - # you might end up reusing a previous cache if it haven't been deleted already. - # It applies 7 days retention policy by default. - RESET_PIP_CACHE: 0 - PACKAGE_NAME: PyAEDT -# Controls when the workflow will run -on: - # Triggers the workflow on push or pull request events but only for the main branch - push: - tags: - - 'v*' - - v* - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: [windows-latest] - strategy: - matrix: - python-version: [ 3.7, 3.8, 3.9, '3.10'] - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: 'Create virtual env' - run: | - python -m venv testenv - testenv\Scripts\Activate.ps1 - python -m pip install pip -U - python -m pip install wheel setuptools -U - python -c "import sys; print(sys.executable)" - pip install .[all,dotnet] - pip install jupyterlab - - - - name: Retrieve PyAEDT version - run: | - testenv\Scripts\Activate.ps1 - echo "PYAEDT_VERSION=$(python -c 'from pyaedt import __version__; print(__version__)')" >> $GITHUB_OUTPUT - echo "PyAEDT version is: $(python -c "from pyaedt import __version__; print(__version__)")" - id: version - - - name: Generate wheelhouse - run: | - testenv\Scripts\Activate.ps1 - $packages=$(pip freeze) - # Iterate over the packages and generate wheels - foreach ($package in $packages) { - echo "Generating wheel for $package" - pip wheel "$package" -w wheelhouse - } - - - name: Zip wheelhouse - uses: vimtor/action-zip@v1 - with: - files: wheelhouse - dest: ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-${{ runner.os }}-${{ matrix.python-version }}.zip - - - name: Upload Wheelhouse - uses: actions/upload-artifact@v4 - with: - name: ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-${{ runner.os }}-${{ matrix.python-version }} - path: '*.zip' - retention-days: 7 - - - name: Release - uses: softprops/action-gh-release@v2 - if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') - with: - generate_release_notes: true - files: | - ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-${{ runner.os }}-${{ matrix.python-version }}.zip diff --git a/.github/workflows/wheelhouse_linux.yml b/.github/workflows/wheelhouse_linux.yml deleted file mode 100644 index 67458e53f95..00000000000 --- a/.github/workflows/wheelhouse_linux.yml +++ /dev/null @@ -1,89 +0,0 @@ -# This is a basic workflow to help you get started with Actions - -name: WheelHouse Linux - -env: - python.venv: 'testvenv' - # Following env vars when changed will "reset" the mentioned cache - # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... - # You should go up in number. If you go down (or repeat a previous value), - # you might end up reusing a previous cache if it hasn't been deleted already. - # It applies a 7-day retention policy by default. - RESET_PIP_CACHE: 0 - PACKAGE_NAME: PyAEDT -# Controls when the workflow will run -on: - # Triggers the workflow on push or pull request events but only for the main branch - push: - tags: - - 'v*' - - v* - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - # This workflow contains a single job called "build" - build: - # The type of runner that the job will run on - runs-on: ubuntu-20.04 - strategy: - matrix: - python-version: [ 3.7, 3.8, 3.9, '3.10'] - # Steps represent a sequence of tasks that will be executed as part of the job - steps: - - uses: actions/checkout@v4 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v5 - with: - python-version: ${{ matrix.python-version }} - - - name: Install pyaedt - run: | - pip install .[all,dotnet] - pip install jupyterlab - - - name: Verify pyaedt can be imported - run: python -c "import pyaedt" - - - name: Retrieve PyAEDT version - run: | - echo "PYAEDT_VERSION=$(python -c 'from pyaedt import __version__; print(__version__)')" >> $GITHUB_OUTPUT - echo "PyAEDT version is: $(python -c "from pyaedt import __version__; print(__version__)")" - id: version - - - name: Generate wheelhouse - run: | - pip install wheel setuptools -U - pip install --upgrade pip - pip wheel . -w wheelhouse - export wheellist=$(pip freeze) - for file in $wheellist; do - if [[ $file != *"@"* ]] && [[ $file != *"pyaedt"* ]]; then - pip wheel $file -w wheelhouse - fi - done - continue-on-error: true - - - name: Zip wheelhouse - uses: vimtor/action-zip@v1 - with: - files: wheelhouse - dest: ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-wheelhouse-${{ runner.os }}-${{ matrix.python-version }}.zip - - - name: Upload Wheelhouse - uses: actions/upload-artifact@v4 - with: - name: ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-wheelhouse-${{ runner.os }}-${{ matrix.python-version }} - path: '*.zip' - retention-days: 7 - - - name: Release - uses: softprops/action-gh-release@v2 - if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') - with: - generate_release_notes: true - files: | - ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-wheelhouse-${{ runner.os }}-${{ matrix.python-version }}.zip \ No newline at end of file From c885d0a0b7bfe6204a5cde6ec9723aa27259659a Mon Sep 17 00:00:00 2001 From: Sebastien Morais Date: Mon, 18 Mar 2024 13:44:15 +0100 Subject: [PATCH 16/36] FIX: Wrong virtual env path --- .github/workflows/test_python_version.yml | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/test_python_version.yml b/.github/workflows/test_python_version.yml index 54bb88cfe02..e8605b0beae 100644 --- a/.github/workflows/test_python_version.yml +++ b/.github/workflows/test_python_version.yml @@ -1,4 +1,4 @@ -name: Documentation Build +name: Use test target with multiple python version on: [pull_request, workflow_dispatch] @@ -10,7 +10,7 @@ concurrency: jobs: smoke-tests-with-install-target: - name: Tests | Python ${{ matrix.python-version }} | Target ${{ matrix.install_target}}) + name: Tests | Python ${{ matrix.python-version }} runs-on: [Windows, self-hosted, pyaedt] strategy: fail-fast: false @@ -38,7 +38,7 @@ jobs: .venv\Scripts\Activate.ps1 pip install . pip install .[tests] - Copy-Item -Path "C:\actions-runner\opengl32.dll" -Destination "testenv_s\Lib\site-packages\vtkmodules" -Force + Copy-Item -Path "C:\actions-runner\opengl32.dll" -Destination ".venv\Lib\site-packages\vtkmodules" -Force python -c "import pyaedt; print('Imported pyaedt')" - name: 'Unit testing' From 982c8e513c725258b6cf5921c63220892e486e19 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?S=C3=A9bastien=20Morais?= <146729917+SMoraisAnsys@users.noreply.github.com> Date: Mon, 18 Mar 2024 16:09:54 +0000 Subject: [PATCH 17/36] MAINT: Remove install target full (#4368) --- README.md | 2 +- README_CN.md | 2 +- .../Resources/PyAEDTInstallerFromDesktop.py | 4 ++-- pyproject.toml | 19 ------------------- 4 files changed, 4 insertions(+), 23 deletions(-) diff --git a/README.md b/README.md index f34bff9d954..a4b5fbc0fed 100644 --- a/README.md +++ b/README.md @@ -45,7 +45,7 @@ You can install PyAEDT on CPython 3.7 through 3.10 from PyPI with this command: Install PyAEDT with all extra packages (matplotlib, numpy, pandas, pyvista): ```sh - pip install pyaedt[full] + pip install pyaedt[all] ``` You can also install PyAEDT from Conda-Forge with this command: diff --git a/README_CN.md b/README_CN.md index 3e620f70a8c..995e81647e1 100644 --- a/README_CN.md +++ b/README_CN.md @@ -52,7 +52,7 @@ ``` 4. 如果你需要其他库来做后期处理,可以使用以下方法来安装它们: ```sh -pip install pyaedt[full] +pip install pyaedt[all] ``` ## 关于 PyAEDT diff --git a/doc/source/Resources/PyAEDTInstallerFromDesktop.py b/doc/source/Resources/PyAEDTInstallerFromDesktop.py index fa6157cb4c2..8513222d0d9 100644 --- a/doc/source/Resources/PyAEDTInstallerFromDesktop.py +++ b/doc/source/Resources/PyAEDTInstallerFromDesktop.py @@ -146,7 +146,7 @@ def install_pyaedt(): else: run_command('"{}" -m pip install --upgrade pip'.format(python_exe)) run_command('"{}" --default-timeout=1000 install wheel'.format(pip_exe)) - run_command('"{}" --default-timeout=1000 install pyaedt[full]'.format(pip_exe)) + run_command('"{}" --default-timeout=1000 install pyaedt[all]'.format(pip_exe)) # run_command('"{}" --default-timeout=1000 install git+https://github.com/ansys/pyaedt.git@main'.format(pip_exe)) run_command('"{}" --default-timeout=1000 install jupyterlab'.format(pip_exe)) run_command('"{}" --default-timeout=1000 install ipython -U'.format(pip_exe)) @@ -175,7 +175,7 @@ def install_pyaedt(): run_command('"{}" install --no-cache-dir --no-index --find-links={} pyaedt'.format(pip_exe, unzipped_path)) else: - run_command('"{}" --default-timeout=1000 install pyaedt[full]'.format(pip_exe)) + run_command('"{}" --default-timeout=1000 install pyaedt[all]'.format(pip_exe)) # if is_windows: # pyaedt_setup_script = "{}/Lib/site-packages/pyaedt/misc/aedtlib_personalib_install.py".format(venv_dir) diff --git a/pyproject.toml b/pyproject.toml index a436e44504c..9c9255480b9 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -113,25 +113,6 @@ doc = [ "sphinx_design", "sphinx_jinja", ] -full = [ - "imageio", - "matplotlib==3.5.3; python_version == '3.7'", - "matplotlib==3.7.3; python_version == '3.8'", - "matplotlib==3.8.3; python_version > '3.8'", - "numpy==1.21.6; python_version <= '3.9'", - "numpy==1.26.4; python_version > '3.9'", - "pandas==1.3.5; python_version == '3.7'", - "pandas==2.0.3; python_version == '3.8'", - "pandas==2.2.1; python_version > '3.9'", - "osmnx", - "vtk==9.2.6", - "pyvista==0.43.3; python_version > '3.7'", - "pyvista==0.38.0; python_version <= '3.7'", - "SRTM.py", - "utm", - "scikit-rf==0.31.0", - "openpyxl==3.1.2", -] all = [ "imageio", "matplotlib==3.5.3; python_version == '3.7'", From f5f21d1f805e33d28048970ff10b22e8a067b67c Mon Sep 17 00:00:00 2001 From: Samuel Lopez <85613111+Samuelopez-ansys@users.noreply.github.com> Date: Mon, 18 Mar 2024 17:10:42 +0100 Subject: [PATCH 18/36] Call API only when Polyline (#4370) --- pyaedt/modeler/cad/Primitives.py | 12 ++++++++---- pyaedt/modeler/cad/polylines.py | 2 +- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/pyaedt/modeler/cad/Primitives.py b/pyaedt/modeler/cad/Primitives.py index c964df95bae..3c9d8e23075 100644 --- a/pyaedt/modeler/cad/Primitives.py +++ b/pyaedt/modeler/cad/Primitives.py @@ -735,7 +735,12 @@ def _refresh_all_ids_from_aedt_file(self): pid = operations["Operation"][0]["ParentPartID"] except: pass - o = self._create_object(name=attribs["Name"], pid=pid, use_cached=True) + + is_polyline = False + if operations and "PolylineParameters" in operations.get("Operation", {}): + is_polyline = True + + o = self._create_object(name=attribs["Name"], pid=pid, use_cached=True, is_polyline=is_polyline) o._part_coordinate_system = attribs["PartCoordinateSystem"] if "NonModel" in attribs["Flags"]: o._model = False @@ -7943,7 +7948,7 @@ def _refresh_object_types(self): self._all_object_names = self._solids + self._sheets + self._lines + self._points + self._unclassified @pyaedt_function_handler() - def _create_object(self, name, pid=0, use_cached=False, **kwargs): + def _create_object(self, name, pid=0, use_cached=False, is_polyline=False, **kwargs): if use_cached: line_names = self._lines else: @@ -7965,8 +7970,7 @@ def _create_object(self, name, pid=0, use_cached=False, **kwargs): self._object_names_to_ids[o.name] = new_id else: o = Object3d(self, name) - commands = self._get_commands(name) - if commands and commands[0].startswith("CreatePolyline"): + if is_polyline: o = self.get_existing_polyline(o) if pid: new_id = pid diff --git a/pyaedt/modeler/cad/polylines.py b/pyaedt/modeler/cad/polylines.py index 4dead96685e..0254e40bee0 100644 --- a/pyaedt/modeler/cad/polylines.py +++ b/pyaedt/modeler/cad/polylines.py @@ -325,7 +325,7 @@ def __init__( new_object_name = self._oeditor.CreatePolyline(varg1, varg2) Object3d.__init__(self, primitives, name=new_object_name) - self._primitives._create_object(self.name) + self._primitives._create_object(self.name, is_polyline=True) @property def start_point(self): From 224d99353b2084935ade9cf91a3626afc6bc255d Mon Sep 17 00:00:00 2001 From: Samuel Lopez <85613111+Samuelopez-ansys@users.noreply.github.com> Date: Tue, 19 Mar 2024 10:00:37 +0100 Subject: [PATCH 19/36] Lazy load of parametrics, setups and optimizations (#4366) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: Sébastien Morais <146729917+SMoraisAnsys@users.noreply.github.com> --- _unittest/test_11_Setup.py | 8 ++- pyaedt/application/Analysis.py | 64 +++++++++++++++++++---- pyaedt/application/Analysis3DLayout.py | 5 +- pyaedt/application/AnalysisNexxim.py | 3 +- pyaedt/application/AnalysisTwinBuilder.py | 3 +- pyaedt/application/Design.py | 4 +- pyaedt/modules/SolveSetup.py | 7 ++- 7 files changed, 75 insertions(+), 19 deletions(-) diff --git a/_unittest/test_11_Setup.py b/_unittest/test_11_Setup.py index 0b1911a6e3b..608f3e6b527 100644 --- a/_unittest/test_11_Setup.py +++ b/_unittest/test_11_Setup.py @@ -27,6 +27,7 @@ def init(self, aedtapp, local_scratch): def test_01_create_hfss_setup(self): setup1 = self.aedtapp.create_setup("My_HFSS_Setup", self.aedtapp.SETUPS.HFSSDrivenDefault) assert setup1.name == "My_HFSS_Setup" + assert self.aedtapp.setups[0].name == setup1.name assert "SaveRadFieldsOnly" in setup1.props assert "SaveRadFieldsOnly" in setup1.available_properties setup1["SaveRadFieldsonly"] = True @@ -67,6 +68,8 @@ def test_01c_create_hfss_setup_auto_open(self): self.aedtapp.duplicate_design("auto_open") for setup in self.aedtapp.get_setups(): self.aedtapp.delete_setup(setup) + assert setup not in self.aedtapp.setups + assert not self.aedtapp.setups self.aedtapp.set_auto_open() setup1 = self.aedtapp.get_setup("Auto1") setup1.enable_adaptive_setup_multifrequency([1.9, 2.4], 0.02) @@ -79,8 +82,9 @@ def test_02_create_circuit_setup(self): assert setup1.name == "circuit" setup1.props["SweepDefinition"]["Data"] = "LINC 0GHz 4GHz 501" setup1["SaveRadFieldsonly"] = True - setup1["SweepDefinition/Data"] = "LINC 0GHz 4GHz 301" - assert setup1.props["SweepDefinition"]["Data"] == "LINC 0GHz 4GHz 301" + setup1["SweepDefinition/Data"] = "LINC 0GHz 4GHz 302" + assert setup1.props["SweepDefinition"]["Data"] == "LINC 0GHz 4GHz 302" + assert circuit.setups[0].props["SweepDefinition"]["Data"] == "LINC 0GHz 4GHz 302" assert "SweepDefinition" in setup1.available_properties setup1.update() setup1.disable() diff --git a/pyaedt/application/Analysis.py b/pyaedt/application/Analysis.py index 2d79299c98c..86175299a86 100644 --- a/pyaedt/application/Analysis.py +++ b/pyaedt/application/Analysis.py @@ -107,7 +107,6 @@ def __init__( port=0, aedt_process_id=None, ): - self.setups = [] Design.__init__( self, application, @@ -128,11 +127,9 @@ def __init__( self.active_setup = setup_name self._materials = None self._available_variations = self.AvailableVariations(self) - if self.design_type != "Maxwell Circuit": - self.setups = [self.get_setup(setup_name) for setup_name in self.setup_names] - - self.parametrics = ParametricSetups(self) - self.optimizations = OptimizationSetups(self) + self._setups = [] + self._parametrics = [] + self._optimizations = [] self._native_components = [] self.SOLUTIONS = SOLUTIONS() self.SETUPS = SETUPS() @@ -143,6 +140,9 @@ def __init__( if not settings.lazy_load: self._materials = self.materials + self._setups = self.setups + self._parametrics = self.parametrics + self._optimizations = self.optimizations @property def native_components(self): @@ -192,6 +192,49 @@ def materials(self): return self._materials + @property + def setups(self): + """Setups in the project. + + Returns + ------- + :class:`pyaedt.modules.SolveSetup.Setup` + Setups in the project. + + """ + if not self._setups: + if self.design_type != "Maxwell Circuit": + self._setups = [self.get_setup(setup_name) for setup_name in self.setup_names] + return self._setups + + @property + def parametrics(self): + """Setups in the project. + + Returns + ------- + :class:`pyaedt.modules.DesignXPloration.ParametricSetups` + Parametric setups in the project. + + """ + if not self._parametrics: + self._parametrics = ParametricSetups(self) + return self._parametrics + + @property + def optimizations(self): + """Optimizations in the project. + + Returns + ------- + :class:`pyaedt.modules.DesignXPloration.OptimizationSetups` + Parametric setups in the project. + + """ + if not self._optimizations: + self._optimizations = OptimizationSetups(self) + return self._optimizations + @property def Position(self): """Position of the object. @@ -1265,6 +1308,7 @@ def _create_setup(self, setupname="MySetupAuto", setuptype=None, props=None): setup.props = SetupProps(setup, new_dict) setup.auto_update = True + tmp_setups = self.setups setup.create() if props: for el in props: @@ -1272,7 +1316,9 @@ def _create_setup(self, setupname="MySetupAuto", setuptype=None, props=None): setup.update() self.active_setup = name - self.setups.append(setup) + + self._setups = tmp_setups + [setup] + return setup @pyaedt_function_handler() @@ -1307,9 +1353,9 @@ def delete_setup(self, setupname): """ if setupname in self.existing_analysis_setups: self.oanalysis.DeleteSetups([setupname]) - for s in self.setups: + for s in self._setups: if s.name == setupname: - self.setups.remove(s) + self._setups.remove(s) return True return False diff --git a/pyaedt/application/Analysis3DLayout.py b/pyaedt/application/Analysis3DLayout.py index a0c4bfdc7c6..932296c4ae9 100644 --- a/pyaedt/application/Analysis3DLayout.py +++ b/pyaedt/application/Analysis3DLayout.py @@ -465,6 +465,7 @@ def create_setup(self, setupname="MySetupAuto", setuptype=None, **kwargs): setuptype = SetupKeys.SetupNames.index(setuptype) name = self.generate_unique_setup_name(setupname) setup = Setup3DLayout(self, setuptype, name) + tmp_setups = self.setups setup.create() setup.auto_update = False @@ -478,7 +479,7 @@ def create_setup(self, setupname="MySetupAuto", setuptype=None, **kwargs): setup[arg_name] = arg_value setup.auto_update = True setup.update() - self.setups.append(setup) + self._setups = tmp_setups + [setup] return setup @pyaedt_function_handler() @@ -501,7 +502,7 @@ def get_setup(self, setupname, setuptype=None): """ if setuptype is None: setuptype = self.design_solutions.default_setup - for setup in self.setups: + for setup in self._setups: if setupname == setup.name: return setup setup = Setup3DLayout(self, setuptype, setupname, isnewsetup=False) diff --git a/pyaedt/application/AnalysisNexxim.py b/pyaedt/application/AnalysisNexxim.py index dca1a7301c3..716fb0fc605 100644 --- a/pyaedt/application/AnalysisNexxim.py +++ b/pyaedt/application/AnalysisNexxim.py @@ -609,6 +609,7 @@ def create_setup(self, setupname="MySetupAuto", setuptype=None, **kwargs): setuptype = SetupKeys.SetupNames.index(setuptype) name = self.generate_unique_setup_name(setupname) setup = SetupCircuit(self, setuptype, name) + tmp_setups = self.setups setup.create() setup.auto_update = False @@ -622,5 +623,5 @@ def create_setup(self, setupname="MySetupAuto", setuptype=None, **kwargs): setup[arg_name] = arg_value setup.auto_update = True setup.update() - self.setups.append(setup) + self._setups = tmp_setups + [setup] return setup diff --git a/pyaedt/application/AnalysisTwinBuilder.py b/pyaedt/application/AnalysisTwinBuilder.py index a056af8d77f..3472c40047b 100644 --- a/pyaedt/application/AnalysisTwinBuilder.py +++ b/pyaedt/application/AnalysisTwinBuilder.py @@ -148,6 +148,7 @@ def create_setup(self, setupname="MySetupAuto", setuptype=None, **kwargs): setuptype = SetupKeys.SetupNames.index(setuptype) name = self.generate_unique_setup_name(setupname) setup = SetupCircuit(self, setuptype, name) + tmp_setups = self.setups setup.create() setup.auto_update = False @@ -161,5 +162,5 @@ def create_setup(self, setupname="MySetupAuto", setuptype=None, **kwargs): setup[arg_name] = arg_value setup.auto_update = True setup.update() - self.setups.append(setup) + self._setups = tmp_setups + [setup] return setup diff --git a/pyaedt/application/Design.py b/pyaedt/application/Design.py index a277dc5225c..13ecbc789f1 100644 --- a/pyaedt/application/Design.py +++ b/pyaedt/application/Design.py @@ -493,8 +493,8 @@ def _init_variables(self): self._post = None self._materials = None self._variable_manager = None - self.parametrics = None - self.optimizations = None + self._parametrics = None + self._optimizations = None self._native_components = None self._mesh = None diff --git a/pyaedt/modules/SolveSetup.py b/pyaedt/modules/SolveSetup.py index 6865793ae46..286eeea07b9 100644 --- a/pyaedt/modules/SolveSetup.py +++ b/pyaedt/modules/SolveSetup.py @@ -538,8 +538,7 @@ def delete(self): ``True`` if setup is deleted. ``False`` if it failed. """ - self.omodule.DeleteSetups([self.name]) - self._app.setups.remove(self) + self._app.delete_setup(self.name) return True @pyaedt_function_handler() @@ -3627,6 +3626,10 @@ def add_sweep(self, sweepname=None, sweeptype="Interpolating"): return False sweep_n.create() self.sweeps.append(sweep_n) + for setup in self.p_app.setups: + if self.name == setup.name: + setup.sweeps.append(sweep_n) + break return sweep_n @pyaedt_function_handler() From 50edb03412f15a540fc24fe4c317af6c53ad59e7 Mon Sep 17 00:00:00 2001 From: Sebastien Morais Date: Tue, 19 Mar 2024 10:18:25 +0100 Subject: [PATCH 20/36] CI: Fix nightly documentation build Note: There was an incompatibility between the actions to download and upload artifacts. The versions being different, the artifact generated was not compatible. --- .github/workflows/nightly-docs.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/nightly-docs.yml b/.github/workflows/nightly-docs.yml index 68031a4bc57..648055fd444 100644 --- a/.github/workflows/nightly-docs.yml +++ b/.github/workflows/nightly-docs.yml @@ -40,7 +40,7 @@ jobs: make -C doc phtml - name: Upload documentation HTML artifact - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v3 with: name: documentation-html path: doc/_build/html From 265b76adf7214be8aa4ed03b27146fbda3d908ed Mon Sep 17 00:00:00 2001 From: Sebastien Morais Date: Tue, 19 Mar 2024 11:11:04 +0100 Subject: [PATCH 21/36] FIX: License server --- .github/workflows/test_python_version.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/test_python_version.yml b/.github/workflows/test_python_version.yml index e8605b0beae..a622364992b 100644 --- a/.github/workflows/test_python_version.yml +++ b/.github/workflows/test_python_version.yml @@ -2,6 +2,9 @@ name: Use test target with multiple python version on: [pull_request, workflow_dispatch] +env: + ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} + concurrency: group: ${{ github.workflow }}-${{ github.ref }} cancel-in-progress: true From aed68a537d7b565c35c458070e17afdf0943a948 Mon Sep 17 00:00:00 2001 From: Sebastien Morais Date: Tue, 19 Mar 2024 15:43:31 +0100 Subject: [PATCH 22/36] TESTS: Skip emit tests for python 3.7 --- _unittest_solvers/test_26_emit.py | 1 + 1 file changed, 1 insertion(+) diff --git a/_unittest_solvers/test_26_emit.py b/_unittest_solvers/test_26_emit.py index a9355da4b4e..b47a1c132ef 100644 --- a/_unittest_solvers/test_26_emit.py +++ b/_unittest_solvers/test_26_emit.py @@ -28,6 +28,7 @@ def aedtapp(add_app): @pytest.mark.skipif(is_linux, reason="Emit API fails on linux.") +@pytest.mark.skipif(sys.version_info < (3,8), reason="Emit API is only available for Python 3.8+.") class TestClass: @pytest.fixture(autouse=True) From f0b25a7bcfd43b4e0c99c2ef9e37d55d0ab16564 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 19 Mar 2024 15:57:55 +0000 Subject: [PATCH 23/36] [pre-commit.ci] pre-commit autoupdate (#4376) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Maxime Rey <87315832+MaxJPRey@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index bfac3f66f85..b75dc54d20e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,7 +13,7 @@ exclude: | repos: - repo: https://github.com/psf/black - rev: 24.2.0 # IF VERSION CHANGES --> MODIFY "blacken-docs" MANUALLY AS WELL!! + rev: 24.3.0 # IF VERSION CHANGES --> MODIFY "blacken-docs" MANUALLY AS WELL!! hooks: - id: black args: @@ -56,7 +56,7 @@ repos: rev: 1.16.0 hooks: - id: blacken-docs - additional_dependencies: [black==24.2.0] + additional_dependencies: [black==24.3.0] # - repo: https://github.com/numpy/numpydoc From be9342914d69e12713a4c45a8a666a115207f68d Mon Sep 17 00:00:00 2001 From: Sebastien Morais Date: Mon, 18 Mar 2024 12:04:59 +0100 Subject: [PATCH 24/36] WIP: Put back all workflows --- .github/workflows/test_python_version.yml | 57 ----------------------- 1 file changed, 57 deletions(-) delete mode 100644 .github/workflows/test_python_version.yml diff --git a/.github/workflows/test_python_version.yml b/.github/workflows/test_python_version.yml deleted file mode 100644 index a622364992b..00000000000 --- a/.github/workflows/test_python_version.yml +++ /dev/null @@ -1,57 +0,0 @@ -name: Use test target with multiple python version - -on: [pull_request, workflow_dispatch] - -env: - ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -# A workflow run is made up of one or more jobs that can run sequentially or in parallel -jobs: - - smoke-tests-with-install-target: - name: Tests | Python ${{ matrix.python-version }} - runs-on: [Windows, self-hosted, pyaedt] - strategy: - fail-fast: false - matrix: - python-version: ['3.7', '3.8', '3.9', '3.10', '3.11'] - steps: - - name: "Install Git and clone project" - uses: actions/checkout@v4 - - - name: Setup Python - uses: actions/setup-python@v4 - with: - python-version: ${{ matrix.python-version }} - - - name: 'Create virtual env' - run: | - python -m venv .venv - .venv\Scripts\Activate.ps1 - python -m pip install pip -U - python -m pip install wheel setuptools -U - python -c "import sys; print(sys.executable)" - - - name: 'Install pyaedt' - run: | - .venv\Scripts\Activate.ps1 - pip install . - pip install .[tests] - Copy-Item -Path "C:\actions-runner\opengl32.dll" -Destination ".venv\Lib\site-packages\vtkmodules" -Force - python -c "import pyaedt; print('Imported pyaedt')" - - - name: 'Unit testing' - uses: nick-fields/retry@v3 - with: - max_attempts: 3 - retry_on: error - timeout_minutes: 40 - command: | - .venv\Scripts\Activate.ps1 - Set-Item -Path env:PYTHONMALLOC -Value "malloc" - pytest --durations=50 -v --cov=pyaedt --cov-report=xml --cov-report=html --junitxml=junit/test-results.xml _unittest_solvers - From 7452ce81bfc9baa0cab5d9af7ac362de3fc5a663 Mon Sep 17 00:00:00 2001 From: Sebastien Morais Date: Wed, 20 Mar 2024 09:32:53 +0100 Subject: [PATCH 25/36] Revert "WIP: Test all python version" This reverts commit 26d2dcb2dd0f764bb5048fd7329d3aae1c6cc08e. --- .github/workflows/build_documentation.yml | 99 ++++++++++++ .github/workflows/cpython_linux.yml | 92 +++++++++++ .github/workflows/full_documentation.yml | 155 ++++++++++++++++++ .github/workflows/ironpython.yml | 42 +++++ .github/workflows/label.yml | 91 +++++++++++ .github/workflows/nightly-docs.yml | 117 ++++++++++++++ .github/workflows/unit_test_prerelease.yml | 89 +++++++++++ .github/workflows/unit_tests.yml | 173 +++++++++++++++++++++ .github/workflows/unit_tests_solvers.bkp | 103 ++++++++++++ .github/workflows/wheelhouse.yml | 90 +++++++++++ .github/workflows/wheelhouse_linux.yml | 89 +++++++++++ 11 files changed, 1140 insertions(+) create mode 100644 .github/workflows/build_documentation.yml create mode 100644 .github/workflows/cpython_linux.yml create mode 100644 .github/workflows/full_documentation.yml create mode 100644 .github/workflows/ironpython.yml create mode 100644 .github/workflows/label.yml create mode 100644 .github/workflows/nightly-docs.yml create mode 100644 .github/workflows/unit_test_prerelease.yml create mode 100644 .github/workflows/unit_tests.yml create mode 100644 .github/workflows/unit_tests_solvers.bkp create mode 100644 .github/workflows/wheelhouse.yml create mode 100644 .github/workflows/wheelhouse_linux.yml diff --git a/.github/workflows/build_documentation.yml b/.github/workflows/build_documentation.yml new file mode 100644 index 00000000000..5dfbb3d0d84 --- /dev/null +++ b/.github/workflows/build_documentation.yml @@ -0,0 +1,99 @@ +name: Documentation Build + +on: [pull_request, workflow_dispatch] + +env: + # Following env vars when changed will "reset" the mentioned cache, + # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... + # You should go up in number, if you go down (or repeat a previous value) + # you might end up reusing a previous cache if it haven't been deleted already. + # It applies 7 days retention policy by default. + RESET_EXAMPLES_CACHE: 3 + RESET_DOC_BUILD_CACHE: 3 + RESET_AUTOSUMMARY_CACHE: 3 + + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + docs-style: + name: "Check documentation style" + runs-on: ubuntu-latest + steps: + - name: "Check documentation style" + uses: ansys/actions/doc-style@v5 + with: + token: ${{ secrets.GITHUB_TOKEN }} + vale-config: "doc/.vale.ini" + vale-version: "2.29.6" + + docs_build: + runs-on: ubuntu-20.04 + + steps: + - uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.10" + + - name: Update pip + run: | + pip install --upgrade pip + + - name: Install pyaedt + run: | + pip install .[doc] + + - name: Verify pyaedt can be imported + run: python -c "import pyaedt" + + - name: Retrieve PyAEDT version + id: version + run: | + echo "PYAEDT_VERSION=$(python -c 'from pyaedt import __version__; print(__version__)')" >> $GITHUB_OUTPUT + echo "PyAEDT version is: $(python -c "from pyaedt import __version__; print(__version__)")" + + # - name: Cache docs build directory + # uses: actions/cache@v3 + # with: + # path: doc/build + # key: doc-build-v${{ env.RESET_DOC_BUILD_CACHE }}-${{ steps.version.outputs.PYAEDT_VERSION }}-${{ github.sha }} + # restore-keys: | + # doc-build-v${{ env.RESET_DOC_BUILD_CACHE }}-${{ steps.version.outputs.PYAEDT_VERSION }} + # - name: Cache autosummary + # uses: actions/cache@v3 + # with: + # path: doc/source/**/_autosummary/*.rst + # key: autosummary-v${{ env.RESET_AUTOSUMMARY_CACHE }}-${{ steps.version.outputs.PYAEDT_VERSION }}-${{ github.sha }} + # restore-keys: | + # autosummary-v${{ env.RESET_AUTOSUMMARY_CACHE }}-${{ steps.version.outputs.PYAEDT_VERSION }} + + - name: Install doc build requirements + run: | + sudo apt install graphviz + + # run doc build, without creating the examples directory + # note that we have to add the examples file here since it won't + # be created as gallery is disabled on linux. + - name: Documentation Build + run: | + make -C doc clean + mkdir doc/source/examples -p + echo $'Examples\n========' > doc/source/examples/index.rst + make -C doc html SPHINXOPTS="-j auto -w build_errors.txt -N" + + # Verify that sphinx generates no warnings + - name: Check for warnings + run: | + python doc/print_errors.py + +# - name: Upload Documentation +# uses: actions/upload-artifact@v4 +# with: +# name: Documentation +# path: doc/_build/html +# retention-days: 7 diff --git a/.github/workflows/cpython_linux.yml b/.github/workflows/cpython_linux.yml new file mode 100644 index 00000000000..5d9313e5765 --- /dev/null +++ b/.github/workflows/cpython_linux.yml @@ -0,0 +1,92 @@ +name: Linux_CPython_UnitTests + +env: + ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} + python.version: '3.10' + python.venv: 'testvenv' + # Following env vars when changed will "reset" the mentioned cache, + # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... + # You should go up in number, if you go down (or repeat a previous value) + # you might end up reusing a previous cache if it haven't been deleted already. + # It applies 7 days retention policy by default. + RESET_PIP_CACHE: 0 + PACKAGE_NAME: PyAEDT + + +on: + workflow_dispatch: + inputs: + logLevel: + description: 'Log level' + required: true + default: 'warning' + tags: + description: 'Linux CPython daily' + schedule: # UTC at 0100 + - cron: '0 1 * * *' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + test: + runs-on: [Linux, pyaedt] + strategy: + matrix: + python-version: [ '3.10' ] + steps: + - uses: actions/checkout@v3 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + architecture: 'x86' + + - name: 'Install pyaedt' + run: | + python -m venv .pyaedt_test_env + export ANSYSEM_ROOT241=/apps/AnsysEM/v241/Linux64 + export LD_LIBRARY_PATH=$ANSYSEM_ROOT241/common/mono/Linux64/lib64:$LD_LIBRARY_PATH + source .pyaedt_test_env/bin/activate + python -m pip install --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org pip -U + python -m pip install --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org wheel setuptools -U + python -c "import sys; print(sys.executable)" + pip install .[tests] + pip install --trusted-host pypi.org --trusted-host pypi.python.org --trusted-host files.pythonhosted.org pytest-azurepipelines + python -c "import pyaedt; print('Imported pyaedt')" + + - name: 'Unit testing' + uses: nick-fields/retry@v3 + with: + max_attempts: 3 + retry_on: error + timeout_minutes: 60 + command: | + export ANS_NODEPCHECK=1 + export ANSYSEM_ROOT241=/apps/AnsysEM/v241/Linux64 + export LD_LIBRARY_PATH=$ANSYSEM_ROOT241/common/mono/Linux64/lib64:$LD_LIBRARY_PATH + source .pyaedt_test_env/bin/activate + pytest --tx 6*popen --durations=50 --dist loadfile -v _unittest + + - name: 'Unit testing Solvers' + continue-on-error: true + uses: nick-fields/retry@v3 + with: + max_attempts: 3 + retry_on: error + timeout_minutes: 60 + command: | + export ANS_NODEPCHECK=1 + export ANSYSEM_ROOT241=/apps/AnsysEM/v241/Linux64 + export LD_LIBRARY_PATH=$ANSYSEM_ROOT241/common/mono/Linux64/lib64:$LD_LIBRARY_PATH + source .pyaedt_test_env/bin/activate + pytest --tx 2*popen --durations=50 --dist loadfile -v _unittest_solvers + + - name: Upload pytest test results + uses: actions/upload-artifact@v4 + with: + name: pytest-results + path: junit/test-results.xml + if: ${{ always() }} diff --git a/.github/workflows/full_documentation.yml b/.github/workflows/full_documentation.yml new file mode 100644 index 00000000000..0a114fa3635 --- /dev/null +++ b/.github/workflows/full_documentation.yml @@ -0,0 +1,155 @@ +# This is a basic workflow to help you get started with Actions + +name: FullDocumentation + +env: + ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} + python.version: '3.10' + python.venv: 'testvenv' + DOCUMENTATION_CNAME: 'aedt.docs.pyansys.com' + MEILISEARCH_API_KEY: ${{ secrets.MEILISEARCH_API_KEY }} + MEILISEARCH_HOST_URL: https://backend.search.pyansys.com + MEILISEARCH_PUBLIC_API_KEY: ${{ secrets.MEILISEARCH_PUBLIC_API_KEY }} +# Controls when the workflow will run +on: + # Triggers the workflow on push or pull request events but only for the main branch + push: + tags: + - v* + workflow_dispatch: + inputs: + logLevel: + description: 'Log level' + required: true + default: 'warning' + tags: + description: 'Test scenario tags' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + full_documentation: + # The type of runner that the job will run on + name: full_documentation + runs-on: [Windows, self-hosted, pyaedt] + timeout-minutes: 720 + strategy: + matrix: + python-version: ['3.10'] + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + - uses: actions/checkout@v4 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: 'Create virtual env' + run: | + python -m venv testenv + testenv\Scripts\Activate.ps1 + python -m pip install pip -U + python -m pip install wheel setuptools -U + python -c "import sys; print(sys.executable)" + + - name: 'Install pyaedt' + run: | + testenv\Scripts\Activate.ps1 + pip install .[doc] + Copy-Item -Path "C:\actions-runner\opengl32.dll" -Destination "testenv\Lib\site-packages\vtkmodules" -Force + + - name: Retrieve PyAEDT version + id: version + run: | + testenv\Scripts\Activate.ps1 + echo "PYAEDT_VERSION=$(python -c 'from pyaedt import __version__; print(__version__)')" >> $GITHUB_OUTPUT + echo "PyAEDT version is: $(python -c "from pyaedt import __version__; print(__version__)")" + + - name: Create HTML Documentations + run: | + testenv\Scripts\Activate.ps1 + sphinx-build -j auto --color -b html -a doc/source doc/_build/html + +# - name: Create PDF Documentations +# run: | +# testenv\Scripts\Activate.ps1 +# .\doc\make.bat pdf + + - name: Upload HTML documentation artifact + uses: actions/upload-artifact@v3 + with: + name: documentation-html + path: doc/_build/html + retention-days: 7 + +# - name: Upload PDF documentation artifact +# uses: actions/upload-artifact@v4 +# with: +# name: documentation-pdf +# path: doc/_build/pdf +# retention-days: 7 + +# - name: Release +# uses: softprops/action-gh-release@v1 +# if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') +# with: +# generate_release_notes: true +# files: | +# doc/_build/pdf + + doc-deploy-stable: + name: Deploy stable documentation + runs-on: ubuntu-latest + needs: full_documentation + if: github.event_name == 'push' && contains(github.ref, 'refs/tags') + steps: + - name: Deploy the stable documentation + uses: ansys/actions/doc-deploy-stable@v4 + with: + cname: ${{ env.DOCUMENTATION_CNAME }} + token: ${{ secrets.GITHUB_TOKEN }} + python-version: ${{ matrix.python-version }} + + + doc-index-stable: + name: "Deploy stable docs index" + if: github.event_name == 'push' && contains(github.ref, 'refs/tags') + runs-on: ubuntu-latest + needs: doc-deploy-stable + + steps: + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: ${{ env.MAIN_PYTHON_VERSION }} + + - uses: actions/checkout@v4 + + - uses: actions/download-artifact@v3 + + - name: Display structure of downloaded files + run: ls -R + + - name: Install the package requirements + run: pip install -e . + + - name: Get the version to PyMeilisearch + run: | + VERSION=$(python -c "from pyaedt import __version__; print('.'.join(__version__.split('.')[:2]))") + VERSION_MEILI=$(python -c "from pyaedt import __version__; print('-'.join(__version__.split('.')[:2]))") + echo "Calculated VERSION: $VERSION" + echo "Calculated VERSION_MEILI: $VERSION_MEILI" + echo "VERSION=$VERSION" >> $GITHUB_ENV + echo "VERSION_MEILI=$VERSION_MEILI" >> $GITHUB_ENV + + - name: "Deploy the stable documentation index for PyAEDT API" + uses: ansys/actions/doc-deploy-index@v4 + with: + cname: ${{ env.DOCUMENTATION_CNAME }}/version/${{ env.VERSION }} + index-name: pyaedt-v${{ env.VERSION_MEILI }} + host-url: ${{ vars.MEILISEARCH_HOST_URL }} + api-key: ${{ env.MEILISEARCH_API_KEY }} diff --git a/.github/workflows/ironpython.yml b/.github/workflows/ironpython.yml new file mode 100644 index 00000000000..0fb334beaaa --- /dev/null +++ b/.github/workflows/ironpython.yml @@ -0,0 +1,42 @@ +# This is a basic workflow to help you get started with Actions + +name: CI_Ironpython + +env: + ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} + +# Controls when the workflow will run +on: + # Triggers the workflow on push or pull request events but only for the main branch + pull_request: + branches: [ main ] + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + # This workflow contains a single job called "build" + build: + # The type of runner that the job will run on + runs-on: [Windows, self-hosted, pyaedt] + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + - uses: actions/checkout@v4 + + - name: 'Run Unit Tests in Ironpython' + timeout-minutes: 60 + run: | + $processA = start-process 'cmd' -ArgumentList '/c .\_unittest_ironpython\run_unittests_batchmode.cmd' -PassThru + $processA.WaitForExit() + get-content .\_unittest_ironpython\pyaedt_unit_test_ironpython.log + $test_errors_failures = Select-String -Path .\_unittest_ironpython\pyaedt_unit_test_ironpython.log -Pattern "TextTestResult errors=" + if ($test_errors_failures -ne $null) + { + exit 1 + } + else + { + exit 0 + } diff --git a/.github/workflows/label.yml b/.github/workflows/label.yml new file mode 100644 index 00000000000..596cace4c8f --- /dev/null +++ b/.github/workflows/label.yml @@ -0,0 +1,91 @@ +name: Labeler +on: + pull_request: + push: + branches: [ main ] + paths: + - '../labels.yml' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + + label-syncer: + name: Syncer + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: micnncim/action-label-syncer@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + labeler: + name: Set labels + needs: [label-syncer] + permissions: + contents: read + pull-requests: write + runs-on: ubuntu-latest + steps: + + # Label based on modified files + - name: Label based on changed files + uses: actions/labeler@v5 + with: + repo-token: "${{ secrets.GITHUB_TOKEN }}" + + # Label based on branch name + - uses: actions-ecosystem/action-add-labels@v1 + if: | + startsWith(github.event.pull_request.head.ref, 'doc') || + startsWith(github.event.pull_request.head.ref, 'docs') + with: + labels: documentation + + - uses: actions-ecosystem/action-add-labels@v1 + if: | + startsWith(github.event.pull_request.head.ref, 'maint') || + startsWith(github.event.pull_request.head.ref, 'no-ci') || + startsWith(github.event.pull_request.head.ref, 'ci') + with: + labels: maintenance + + - uses: actions-ecosystem/action-add-labels@v1 + if: startsWith(github.event.pull_request.head.ref, 'feat') + with: + labels: | + enhancement + + - uses: actions-ecosystem/action-add-labels@v1 + if: | + startsWith(github.event.pull_request.head.ref, 'fix') || + startsWith(github.event.pull_request.head.ref, 'patch') + with: + labels: bug + + - uses: actions-ecosystem/action-add-labels@v1 + if: | + startsWith(github.event.pull_request.head.ref, 'test') + with: + labels: testing + + commenter: + runs-on: ubuntu-latest + steps: + - name: Suggest to add labels + uses: peter-evans/create-or-update-comment@v4 + # Execute only when no labels have been applied to the pull request + if: toJSON(github.event.pull_request.labels.*.name) == '{}' + with: + issue-number: ${{ github.event.pull_request.number }} + body: | + Please add one of the following labels to add this contribution to the Release Notes :point_down: + - [bug](https://github.com/ansys/pyaedt/pulls?q=label%3Abug+) + - [documentation](https://github.com/ansys/pyaedt/pulls?q=label%3Adocumentation+) + - [enhancement](https://github.com/ansys/pyaedt/pulls?q=label%3Aenhancement+) + - [good first issue](https://github.com/ansys/pyaedt/pulls?q=label%3Agood+first+issue) + - [maintenance](https://github.com/ansys/pyaedt/pulls?q=label%3Amaintenance+) + - [release](https://github.com/ansys/pyaedt/pulls?q=label%3Arelease+) + - [testing](https://github.com/ansys/pyaedt/pulls?q=label%Atesting+) diff --git a/.github/workflows/nightly-docs.yml b/.github/workflows/nightly-docs.yml new file mode 100644 index 00000000000..68031a4bc57 --- /dev/null +++ b/.github/workflows/nightly-docs.yml @@ -0,0 +1,117 @@ +name: Nightly Documentation Build + +on: + workflow_dispatch: + schedule: # UTC at 0400 + - cron: '0 4 * * *' + +env: + ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} + DOCUMENTATION_CNAME: 'aedt.docs.pyansys.com' + MEILISEARCH_API_KEY: ${{ secrets.MEILISEARCH_API_KEY }} + MEILISEARCH_PUBLIC_API_KEY: ${{ secrets.MEILISEARCH_PUBLIC_API_KEY }} + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + docs_build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: "3.10" + + - name: Install pyaedt + run: | + pip install . + + - name: Install doc build requirements + run: | + pip install .[doc] + + - name: Full Documentation Build + run: | + make -C doc phtml + + - name: Upload documentation HTML artifact + uses: actions/upload-artifact@v4 + with: + name: documentation-html + path: doc/_build/html + retention-days: 7 + + + docs_upload: + needs: docs_build + runs-on: ubuntu-latest + steps: + + - name: Deploy development documentation + uses: ansys/actions/doc-deploy-dev@v4 + with: + cname: ${{ env.DOCUMENTATION_CNAME }} + token: ${{ secrets.GITHUB_TOKEN }} + + doc-index-dev: + name: "Deploy dev docs index" + runs-on: ubuntu-latest + needs: docs_upload + steps: + - uses: actions/checkout@v4 + + - uses: actions/download-artifact@v3 + + - name: Display structure of downloaded files + run: ls -R + + - name: "Deploy the dev documentation index for PyAEDT API" + uses: ansys/actions/doc-deploy-index@v4 + with: + cname: ${{ env.DOCUMENTATION_CNAME }}/version/dev + index-name: pyaedt-vdev + host-url: ${{ vars.MEILISEARCH_HOST_URL }} + api-key: ${{ env.MEILISEARCH_API_KEY }} + + # docstring_testing: + # runs-on: Windows + + # steps: + # - uses: actions/checkout@v4 + + # - name: Setup Python + # uses: actions/setup-python@v2 + # with: + # python-version: 3.8 + + # - name: 'Create virtual env' + # run: | + # python -m venv testenv + # testenv\Scripts\Activate.ps1 + # python -m pip install pip -U + # python -m pip install wheel setuptools -U + # python -c "import sys; print(sys.executable)" + + # - name: 'Install pyaedt' + # run: | + # testenv\Scripts\Activate.ps1 + # pip install . --use-feature=in-tree-build + # cd _unittest + # python -c "import pyaedt; print('Imported pyaedt')" + + # - name: Install testing requirements + # run: | + # testenv\Scripts\Activate.ps1 + # pip install -r requirements/requirements_test.txt + # pip install pytest-azurepipelines + + # - name: Docstring testing + # run: | + # testenv\Scripts\Activate.ps1 + # pytest -v pyaedt/desktop.py pyaedt/icepak.py + # pytest -v pyaedt/desktop.py pyaedt/hfss.py diff --git a/.github/workflows/unit_test_prerelease.yml b/.github/workflows/unit_test_prerelease.yml new file mode 100644 index 00000000000..a15f6c2091c --- /dev/null +++ b/.github/workflows/unit_test_prerelease.yml @@ -0,0 +1,89 @@ +name: CI_PreRelease + +env: + python.version: '3.8' + python.venv: 'testvenv' + # Following env vars when changed will "reset" the mentioned cache, + # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... + # You should go up in number, if you go down (or repeat a previous value) + # you might end up reusing a previous cache if it haven't been deleted already. + # It applies 7 days retention policy by default. + RESET_PIP_CACHE: 0 + PACKAGE_NAME: PyAEDT +# Controls when the workflow will run +on: + workflow_dispatch: + inputs: + logLevel: + description: 'Log level' + required: true + default: 'warning' + tags: + description: 'Linux CPython daily' + schedule: # UTC at 0300 + - cron: '0 3 * * *' + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + # This workflow contains a single job called "build" + build: + # The type of runner that the job will run on + runs-on: [pre_release] + strategy: + matrix: + python-version: ['3.8'] + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: 'Create virtual env' + run: | + python -m venv testenv + testenv\Scripts\Activate.ps1 + python -m pip install pip -U + python -m pip install wheel setuptools -U + python -c "import sys; print(sys.executable)" + + - name: 'Install pyaedt' + run: | + testenv\Scripts\Activate.ps1 + pip install .[tests] + pip install pytest-azurepipelines + Copy-Item -Path "C:\actions-runner\opengl32.dll" -Destination "testenv\Lib\site-packages\vtkmodules" -Force + Copy-Item -Path "C:\actions-runner\local_config.json" -Destination "_unittest" -Force + mkdir tmp + cd tmp + python -c "import pyaedt; print('Imported pyaedt')" + + # - name: "Check licences of packages" + # uses: pyansys/pydpf-actions/check-licenses@v2.0 + + - name: 'Unit testing' + timeout-minutes: 60 + run: | + testenv\Scripts\Activate.ps1 + Set-Item -Path env:PYTHONMALLOC -Value "malloc" + pytest --tx 6*popen --durations=50 --dist loadfile -v --cov=pyaedt --cov-report=xml --junitxml=junit/test-results.xml --cov-report=html _unittest + + - uses: codecov/codecov-action@v4 + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + if: matrix.python-version == '3.8' + name: 'Upload coverage to Codecov' + + - name: Upload pytest test results + uses: actions/upload-artifact@v4 + with: + name: pytest-results + path: junit/test-results.xml + # Use always() to always run this step to publish test results when there are test failures + if: ${{ always() }} + diff --git a/.github/workflows/unit_tests.yml b/.github/workflows/unit_tests.yml new file mode 100644 index 00000000000..dd16afa66bf --- /dev/null +++ b/.github/workflows/unit_tests.yml @@ -0,0 +1,173 @@ +name: CI + +env: + ANSYSLMD_LICENSE_FILE: ${{ format('1055@{0}', secrets.LICENSE_SERVER) }} + python.version: '3.10' + python.venv: 'testvenv' + # Following env vars when changed will "reset" the mentioned cache, + # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... + # You should go up in number, if you go down (or repeat a previous value) + # you might end up reusing a previous cache if it hasn't been deleted already. + # It applies 7 days retention policy by default. + RESET_PIP_CACHE: 0 + PACKAGE_NAME: PyAEDT +# Controls when the workflow will run +on: + # Triggers the workflow on push or pull request events but only for the main branch + push: + tags: + - 'v*' + branches: + - main + pull_request: + branches: [ main ] + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + # This workflow contains a single job called "build" + build_solvers: + # The type of runner that the job will run on + runs-on: [Windows, self-hosted, pyaedt] + strategy: + matrix: + python-version: [ '3.10' ] + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: 'Create virtual env' + run: | + Remove-Item D:\Temp\* -Recurse -Force -ErrorAction SilentlyContinue + python -m venv testenv_s + testenv_s\Scripts\Activate.ps1 + python -m pip install pip -U + python -m pip install wheel setuptools -U + python -c "import sys; print(sys.executable)" + + - name: 'Install pyaedt' + run: | + testenv_s\Scripts\Activate.ps1 + pip install . + pip install .[tests] + pip install pytest-azurepipelines + Copy-Item -Path "C:\actions-runner\opengl32.dll" -Destination "testenv_s\Lib\site-packages\vtkmodules" -Force + mkdir tmp + cd tmp + python -c "import pyaedt; print('Imported pyaedt')" + + # - name: "Check licences of packages" + # uses: pyansys/pydpf-actions/check-licenses@v2.0 + + - name: 'Unit testing' + uses: nick-fields/retry@v3 + with: + max_attempts: 3 + retry_on: error + timeout_minutes: 40 + command: | + testenv_s\Scripts\Activate.ps1 + Set-Item -Path env:PYTHONMALLOC -Value "malloc" + pytest --durations=50 -v --cov=pyaedt --cov-report=xml --cov-report=html --junitxml=junit/test-results.xml _unittest_solvers + + - uses: codecov/codecov-action@v4 + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + with: + name: 'Upload coverage to Codecov' + + - name: Upload pytest test results + uses: actions/upload-artifact@v4 + with: + name: pytest-solver-results + path: junit/test-results.xml + # Use always() to always run this step to publish test results when there are test failures + if: ${{ always() }} + + + build: + # The type of runner that the job will run on + runs-on: [Windows, self-hosted, pyaedt] + strategy: + matrix: + python-version: ['3.10'] + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: 'Create virtual env' + run: | + Remove-Item D:\Temp\* -Recurse -Force -ErrorAction SilentlyContinue + python -m venv testenv + testenv\Scripts\Activate.ps1 + python -m pip install pip -U + python -m pip install wheel setuptools -U + python -c "import sys; print(sys.executable)" + + - name: 'Install pyaedt' + run: | + testenv\Scripts\Activate.ps1 + pip install . + pip install .[tests] + pip install pytest-azurepipelines + Copy-Item -Path "C:\actions-runner\opengl32.dll" -Destination "testenv\Lib\site-packages\vtkmodules" -Force + mkdir tmp + cd tmp + python -c "import pyaedt; print('Imported pyaedt')" + + # - name: "Check licences of packages" + # uses: pyansys/pydpf-actions/check-licenses@v2.0 + + - name: 'Unit testing' + uses: nick-fields/retry@v3 + with: + max_attempts: 3 + retry_on: error + timeout_minutes: 50 + command: | + testenv\Scripts\Activate.ps1 + Set-Item -Path env:PYTHONMALLOC -Value "malloc" + pytest -n 6 --dist loadfile --durations=50 -v --cov=pyaedt --cov-report=xml --cov-report=html --junitxml=junit/test-results.xml _unittest + + - uses: codecov/codecov-action@v4 + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} + with: + name: 'Upload coverage to Codecov' + + - name: Upload pytest test results + uses: actions/upload-artifact@v4 + with: + name: pytest-results + path: junit/test-results.xml + # Use always() to always run this step to publish test results when there are test failures + if: ${{ always() }} + + - name: 'Build and validate source distribution' + run: | + testenv\Scripts\Activate.ps1 + python -m pip install build twine + python -m build + python -m twine check dist/* + + - name: "Builds and uploads to PyPI" + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') + run: | + testenv\Scripts\Activate.ps1 + python setup.py sdist + python -m pip install twine + python -m twine upload --skip-existing dist/* + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/unit_tests_solvers.bkp b/.github/workflows/unit_tests_solvers.bkp new file mode 100644 index 00000000000..4d0691a5dab --- /dev/null +++ b/.github/workflows/unit_tests_solvers.bkp @@ -0,0 +1,103 @@ +name: CI_Solvers + +env: + python.version: '3.10' + python.venv: 'testvenv' + # Following env vars when changed will "reset" the mentioned cache, + # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... + # You should go up in number, if you go down (or repeat a previous value) + # you might end up reusing a previous cache if it hasn't been deleted already. + # It applies 7 days retention policy by default. + RESET_PIP_CACHE: 0 + PACKAGE_NAME: PyAEDT +# Controls when the workflow will run +on: + # Triggers the workflow on push or pull request events but only for the main branch + push: + tags: + - 'v*' + branches: + - main + pull_request: + branches: [ main ] + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + # This workflow contains a single job called "build" + build: + # The type of runner that the job will run on + runs-on: [Windows, self-hosted, pyaedt] + strategy: + matrix: + python-version: ['3.10'] + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: 'Create virtual env' + run: | + Remove-Item D:\Temp\* -Recurse -Force + python -m venv testenv_s + testenv_s\Scripts\Activate.ps1 + python -m pip install pip -U + python -m pip install wheel setuptools -U + python -c "import sys; print(sys.executable)" + + - name: 'Install pyaedt' + run: | + testenv_s\Scripts\Activate.ps1 + pip install . + pip install .[tests] + pip install pytest-azurepipelines + Copy-Item -Path "C:\actions-runner\opengl32.dll" -Destination "testenv_s\Lib\site-packages\vtkmodules" -Force + mkdir tmp + cd tmp + python -c "import pyaedt; print('Imported pyaedt')" + + # - name: "Check licences of packages" + # uses: pyansys/pydpf-actions/check-licenses@v2.0 + + - name: 'Unit testing' + timeout-minutes: 40 + run: | + testenv_s\Scripts\Activate.ps1 + Set-Item -Path env:PYTHONMALLOC -Value "malloc" + pytest --durations=50 -v --cov=pyaedt --cov-report=xml --cov-report=html --junitxml=junit/test-results.xml _unittest_solvers + + - uses: codecov/codecov-action@v3 + if: matrix.python-version == '3.10' + name: 'Upload coverage to Codecov' + + - name: Upload pytest test results + uses: actions/upload-artifact@v3 + with: + name: pytest-results + path: junit/test-results.xml + # Use always() to always run this step to publish test results when there are test failures + if: ${{ always() }} + + - name: 'Build and validate source distribution' + run: | + testenv_s\Scripts\Activate.ps1 + python -m pip install build twine + python -m build + python -m twine check dist/* + + - name: "Builds and uploads to PyPI" + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') + run: | + testenv_s\Scripts\Activate.ps1 + python setup.py sdist + python -m pip install twine + python -m twine upload --skip-existing dist/* + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }} diff --git a/.github/workflows/wheelhouse.yml b/.github/workflows/wheelhouse.yml new file mode 100644 index 00000000000..600b1c73e9c --- /dev/null +++ b/.github/workflows/wheelhouse.yml @@ -0,0 +1,90 @@ +# This is a basic workflow to help you get started with Actions + +name: WheelHouse + +env: + python.venv: 'testvenv' + # Following env vars when changed will "reset" the mentioned cache, + # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... + # You should go up in number, if you go down (or repeat a previous value) + # you might end up reusing a previous cache if it haven't been deleted already. + # It applies 7 days retention policy by default. + RESET_PIP_CACHE: 0 + PACKAGE_NAME: PyAEDT +# Controls when the workflow will run +on: + # Triggers the workflow on push or pull request events but only for the main branch + push: + tags: + - 'v*' + - v* + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + # This workflow contains a single job called "build" + build: + # The type of runner that the job will run on + runs-on: [windows-latest] + strategy: + matrix: + python-version: [ 3.7, 3.8, 3.9, '3.10'] + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: 'Create virtual env' + run: | + python -m venv testenv + testenv\Scripts\Activate.ps1 + python -m pip install pip -U + python -m pip install wheel setuptools -U + python -c "import sys; print(sys.executable)" + pip install .[all,dotnet] + pip install jupyterlab + + + - name: Retrieve PyAEDT version + run: | + testenv\Scripts\Activate.ps1 + echo "PYAEDT_VERSION=$(python -c 'from pyaedt import __version__; print(__version__)')" >> $GITHUB_OUTPUT + echo "PyAEDT version is: $(python -c "from pyaedt import __version__; print(__version__)")" + id: version + + - name: Generate wheelhouse + run: | + testenv\Scripts\Activate.ps1 + $packages=$(pip freeze) + # Iterate over the packages and generate wheels + foreach ($package in $packages) { + echo "Generating wheel for $package" + pip wheel "$package" -w wheelhouse + } + + - name: Zip wheelhouse + uses: vimtor/action-zip@v1 + with: + files: wheelhouse + dest: ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-${{ runner.os }}-${{ matrix.python-version }}.zip + + - name: Upload Wheelhouse + uses: actions/upload-artifact@v4 + with: + name: ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-${{ runner.os }}-${{ matrix.python-version }} + path: '*.zip' + retention-days: 7 + + - name: Release + uses: softprops/action-gh-release@v2 + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') + with: + generate_release_notes: true + files: | + ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-${{ runner.os }}-${{ matrix.python-version }}.zip diff --git a/.github/workflows/wheelhouse_linux.yml b/.github/workflows/wheelhouse_linux.yml new file mode 100644 index 00000000000..67458e53f95 --- /dev/null +++ b/.github/workflows/wheelhouse_linux.yml @@ -0,0 +1,89 @@ +# This is a basic workflow to help you get started with Actions + +name: WheelHouse Linux + +env: + python.venv: 'testvenv' + # Following env vars when changed will "reset" the mentioned cache + # by changing the cache file name. It is rendered as ...-v%RESET_XXX%-... + # You should go up in number. If you go down (or repeat a previous value), + # you might end up reusing a previous cache if it hasn't been deleted already. + # It applies a 7-day retention policy by default. + RESET_PIP_CACHE: 0 + PACKAGE_NAME: PyAEDT +# Controls when the workflow will run +on: + # Triggers the workflow on push or pull request events but only for the main branch + push: + tags: + - 'v*' + - v* + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +# A workflow run is made up of one or more jobs that can run sequentially or in parallel +jobs: + # This workflow contains a single job called "build" + build: + # The type of runner that the job will run on + runs-on: ubuntu-20.04 + strategy: + matrix: + python-version: [ 3.7, 3.8, 3.9, '3.10'] + # Steps represent a sequence of tasks that will be executed as part of the job + steps: + - uses: actions/checkout@v4 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: Install pyaedt + run: | + pip install .[all,dotnet] + pip install jupyterlab + + - name: Verify pyaedt can be imported + run: python -c "import pyaedt" + + - name: Retrieve PyAEDT version + run: | + echo "PYAEDT_VERSION=$(python -c 'from pyaedt import __version__; print(__version__)')" >> $GITHUB_OUTPUT + echo "PyAEDT version is: $(python -c "from pyaedt import __version__; print(__version__)")" + id: version + + - name: Generate wheelhouse + run: | + pip install wheel setuptools -U + pip install --upgrade pip + pip wheel . -w wheelhouse + export wheellist=$(pip freeze) + for file in $wheellist; do + if [[ $file != *"@"* ]] && [[ $file != *"pyaedt"* ]]; then + pip wheel $file -w wheelhouse + fi + done + continue-on-error: true + + - name: Zip wheelhouse + uses: vimtor/action-zip@v1 + with: + files: wheelhouse + dest: ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-wheelhouse-${{ runner.os }}-${{ matrix.python-version }}.zip + + - name: Upload Wheelhouse + uses: actions/upload-artifact@v4 + with: + name: ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-wheelhouse-${{ runner.os }}-${{ matrix.python-version }} + path: '*.zip' + retention-days: 7 + + - name: Release + uses: softprops/action-gh-release@v2 + if: github.event_name == 'push' && startsWith(github.ref, 'refs/tags') + with: + generate_release_notes: true + files: | + ${{ env.PACKAGE_NAME }}-v${{ steps.version.outputs.PYAEDT_VERSION }}-wheelhouse-${{ runner.os }}-${{ matrix.python-version }}.zip \ No newline at end of file From a6328430e44af9c7fb089e13d9894e83a34c0607 Mon Sep 17 00:00:00 2001 From: Hui Zhou Date: Wed, 20 Mar 2024 12:11:22 +0100 Subject: [PATCH 26/36] spisim_com_enhancement (#4312) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: ring630 <@gmail.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Sébastien Morais <146729917+SMoraisAnsys@users.noreply.github.com> --- _unittest_solvers/test_00_analyze.py | 61 ++- pyaedt/generic/com_parameters.py | 83 ---- pyaedt/generic/spisim.py | 90 +++-- .../__init__.py | 3 + .../com_120d_8.cfg | 333 ---------------- .../com_120d_8.json | 192 +++++++++ .../com_93_8.cfg | 333 ---------------- .../com_93_8.json | 192 +++++++++ .../com_94_17.json | 192 +++++++++ .../com_parameters.py | 374 ++++++++++++++++++ .../com_settings_mapping.py | 233 +++++++++++ 11 files changed, 1280 insertions(+), 806 deletions(-) delete mode 100644 pyaedt/generic/com_parameters.py create mode 100644 pyaedt/misc/spisim_com_configuration_files/__init__.py delete mode 100644 pyaedt/misc/spisim_com_configuration_files/com_120d_8.cfg create mode 100644 pyaedt/misc/spisim_com_configuration_files/com_120d_8.json delete mode 100644 pyaedt/misc/spisim_com_configuration_files/com_93_8.cfg create mode 100644 pyaedt/misc/spisim_com_configuration_files/com_93_8.json create mode 100644 pyaedt/misc/spisim_com_configuration_files/com_94_17.json create mode 100644 pyaedt/misc/spisim_com_configuration_files/com_parameters.py create mode 100644 pyaedt/misc/spisim_com_configuration_files/com_settings_mapping.py diff --git a/_unittest_solvers/test_00_analyze.py b/_unittest_solvers/test_00_analyze.py index 126eed52f7d..f507030ee2b 100644 --- a/_unittest_solvers/test_00_analyze.py +++ b/_unittest_solvers/test_00_analyze.py @@ -8,6 +8,8 @@ from _unittest_solvers.conftest import desktop_version from _unittest_solvers.conftest import local_path +from pathlib import Path + from pyaedt import is_linux from pyaedt import Icepak from pyaedt import Hfss3dLayout @@ -436,7 +438,6 @@ def test_06_m3d_harmonic_forces(self, m3dtransient): ) assert m3dtransient.export_element_based_harmonic_force(number_of_frequency=5) - def test_07_export_maxwell_fields(self, m3dtransient): m3dtransient.analyze(m3dtransient.active_setup, num_cores=2) fld_file_3 = os.path.join(self.local_scratch.path, "test_fld_3.fld") @@ -493,26 +494,14 @@ def test_08_compute_erl(self, circuit_erl): def test_09a_compute_com(self, local_scratch, circuit_com): touchstone_file = circuit_com.export_touchstone() spisim = SpiSim(touchstone_file) - assert spisim.com_standards - assert spisim.com_parameters() report_dir = os.path.join(spisim.working_directory, "50GAUI-1_C2C") os.mkdir(report_dir) - com_0, com_1 = spisim.compute_com( - standard="50GAUI-1_C2C", - out_folder=report_dir, - ) - assert com_0 and com_1 - - report_dir = os.path.join(spisim.working_directory, "100GBASE-KR4") - os.mkdir(report_dir) - com_0, com_1 = spisim.compute_com( - standard="100GBASE-KR4", - fext_s4p=[touchstone_file, touchstone_file], - next_s4p=touchstone_file, + com = spisim.compute_com( + standard="50GAUI-1-C2C", out_folder=report_dir, ) - assert com_0 and com_1 + assert com def test_09b_compute_com(self, local_scratch): com_example_file_folder = os.path.join(local_path, "example_models", test_subfolder, "com_unit_test_sparam") @@ -523,18 +512,48 @@ def test_09b_compute_com(self, local_scratch): next_s4p = local_scratch.copyfile( os.path.join(com_example_file_folder, "FCI_CC_Long_Link_Pair_11_to_Pair_9_NEXT.s4p") ) - report_dir = os.path.join(local_scratch.path, "custom") os.mkdir(report_dir) - spisim = SpiSim(thru_s4p) - spisim.export_com_configure_file(os.path.join(spisim.working_directory, "custom.cfg")) + spisim.working_directory = local_scratch.path + com_0, com_1 = spisim.compute_com( - standard="custom", - config_file=os.path.join(spisim.working_directory, "custom.cfg"), + standard="50GAUI-1-C2C", port_order="EvenOdd", fext_s4p=fext_s4p, next_s4p=next_s4p, out_folder=report_dir, ) assert com_0 and com_1 + com_0, com_1 = spisim.compute_com( + standard="100GBASE-KR4", + port_order="EvenOdd", + fext_s4p=fext_s4p, + next_s4p=next_s4p, + out_folder=report_dir, + ) + assert com_0 and com_1 + com_0, com_1 = spisim.compute_com( + standard="100GBASE-KP4", + port_order="EvenOdd", + fext_s4p=fext_s4p, + next_s4p=next_s4p, + out_folder=report_dir, + ) + assert com_0 and com_1 + + def test_09c_compute_com(self, local_scratch): + com_example_file_folder = Path(local_path) / "example_models" / test_subfolder / "com_unit_test_sparam" + thru_s4p = local_scratch.copyfile(com_example_file_folder / "SerDes_Demo_02_Thru.s4p") + spisim = SpiSim(thru_s4p) + spisim.export_com_configure_file(Path(local_scratch.path) / "test.cfg") + com_0, com_1 = spisim.compute_com("custom", Path(local_scratch.path) / "test.cfg") + assert com_0 and com_1 + + spisim.export_com_configure_file(os.path.join(spisim.working_directory, "custom.json")) + com_0, com_1 = spisim.compute_com( + standard="custom", + config_file=os.path.join(spisim.working_directory, "custom.json"), + port_order="EvenOdd", + ) + assert com_0 and com_1 diff --git a/pyaedt/generic/com_parameters.py b/pyaedt/generic/com_parameters.py deleted file mode 100644 index 91d65d2601c..00000000000 --- a/pyaedt/generic/com_parameters.py +++ /dev/null @@ -1,83 +0,0 @@ -from pathlib import Path - -from pyaedt import pyaedt_function_handler -from pyaedt import settings - -logger = settings.logger - - -class COMParameters: - _CFG_DIR = Path(__file__).parent.parent / "misc" / "spisim_com_configuration_files" - _STD_TABLE_MAPPING = {"50GAUI-1_C2C": "com_120d_8.cfg", "100GBASE-KR4": "com_93_8.cfg"} - - def __init__(self, standard="50GAUI-1_C2C"): - self._standard = standard - self.standard = standard - - @property - def standard(self): - """Standard name. - - Returns - ------- - str - """ - return self._standard - - @standard.setter - def standard(self, value): - std_table = self._STD_TABLE_MAPPING[value] - cfg_path = self._CFG_DIR / std_table - self.load(cfg_path) - self._standard = value - - @property - def parameters(self): - """Parameters of the standard with value. - - Returns - ------- - dict - """ - return {i: j for i, j in self.__dict__.items() if not i.startswith("_")} - - @pyaedt_function_handler - def load(self, file_path): - """Load configuration file. - - Returns - ------- - bool - """ - self._standard = "custom" - with open(file_path, "r") as fp: - lines = fp.readlines() - for line in lines: - if not line.startswith("#") and "=" in line: - split_line = [i.strip() for i in line.split("=")] - name, value = split_line - self.__setattr__(name, str(value)) - return True - - @pyaedt_function_handler - def export(self, file_path): - """Generate a configuration file for SpiSim. - - Parameters - ---------- - file_path : str - Full path to configuration file to create. - - Returns - ------- - bool - """ - with open(file_path, "w") as fp: - fp.write("################################################################################\n") - fp.write("# MODULE: COM\n") - fp.write("# GENERATED ON\n") - fp.write("################################################################################\n") - for k, v in self.parameters.items(): - fp.write("# {0}: {0}\n".format(k.upper())) - fp.write("{} = {}\n".format(k.upper(), v)) - return True diff --git a/pyaedt/generic/spisim.py b/pyaedt/generic/spisim.py index 38c1fca2a43..ad393b087a8 100644 --- a/pyaedt/generic/spisim.py +++ b/pyaedt/generic/spisim.py @@ -9,13 +9,14 @@ from numpy import float64 from numpy import zeros +from pyaedt import generate_unique_folder_name from pyaedt import generate_unique_name from pyaedt import is_linux from pyaedt import pyaedt_function_handler from pyaedt import settings -from pyaedt.generic.com_parameters import COMParameters from pyaedt.generic.general_methods import env_value from pyaedt.misc import current_version +from pyaedt.misc.spisim_com_configuration_files.com_parameters import COMParametersVer3p4 class SpiSim: @@ -63,7 +64,11 @@ def _compute_spisim(self, parameter, out_file="", touchstone_file="", config_fil command = [spisimExe, parameter, cfgCmmd] # Debug('%s %s' % (cmdList[0], ' '.join(arguments))) # try up to three times to be sure - out_processing = os.path.join(out_file, generate_unique_name("spsim_out") + ".txt") + if out_file: + out_processing = os.path.join(out_file, generate_unique_name("spsim_out") + ".txt") + else: + out_processing = os.path.join(generate_unique_folder_name(), generate_unique_name("spsim_out") + ".txt") + my_env = os.environ.copy() my_env.update(settings.aedt_environment_variables) if is_linux: # pragma: no cover @@ -97,9 +102,19 @@ def _get_output_parameter_from_result(self, out_file, parameter_name): try: with open(out_file, "r") as infile: txt = infile.read() - com_case_0 = re.search(r"Case 0: Calculated COM = (.*?),", txt).groups()[0] - com_case_1 = re.search(r"Case 1: Calculated COM = (.*?),", txt).groups()[0] - return float(com_case_0), float(com_case_1) + i = 0 + com_results = [] + while True: + m = re.search(r"Case {}: Calculated COM = (.*?),".format(i), txt) + if m: + com_results.append(float(m.groups()[0])) + i = i + 1 + else: + if i == 0: + self.logger.error("Failed to find results from SPISim log file. \n{txt}") + break + + return com_results except IndexError: # pragma: no cover self.logger.error("Failed to compute {}. Check input parameters and retry".format(parameter_name)) @@ -255,13 +270,14 @@ def compute_com( next_s4p="", out_folder="", ): - """Compute Channel Operating Margin. + """Compute Channel Operating Margin. Only COM ver3.4 is supported. Parameters ---------- standard : str - Name of the standard to apply. - config_file : str, optional + Name of the standard to apply. Options are ``"Custom"`, ``"50GAUI-1-C2C"`, ``"100GBASE-KR4"`` and + ``"100GBASE-KP4"``. + config_file : str, Path, optional Config file to use. port_order : str, optional Whether to use "``EvenOdd``" or "``Incremental``" numbering for S4P files. The default is ``EvenOdd``. @@ -277,19 +293,24 @@ def compute_com( ------- """ - if standard == "custom": - com_param = COMParameters() - com_param.load(config_file) + + com_param = COMParametersVer3p4() + if standard.lower() == "custom": + + if os.path.splitext(config_file)[-1] == ".cfg": + com_param.load_spisim_cfg(config_file) + else: + com_param.load(config_file) else: - com_param = COMParameters(standard) + com_param.standard = standard - com_param.THRUSNP = self.touchstone_file - com_param.FEXTARY = fext_s4p if not isinstance(fext_s4p, list) else ";".join(fext_s4p) - com_param.NEXTARY = next_s4p if not isinstance(next_s4p, list) else ";".join(next_s4p) + com_param.set_parameter("THRUSNP", self.touchstone_file) + com_param.set_parameter("FEXTARY", fext_s4p if not isinstance(fext_s4p, list) else ";".join(fext_s4p)) + com_param.set_parameter("NEXTARY", next_s4p if not isinstance(next_s4p, list) else ";".join(next_s4p)) - com_param.PORT_ORDER = "[1 3 2 4]" if port_order == "EvenOdd" else "[1 2 3 4]" + com_param.set_parameter("Port Order", "[1 3 2 4]" if port_order == "EvenOdd" else "[1 2 3 4]") - com_param.RESULT_DIR = out_folder if out_folder else self.working_directory + com_param.set_parameter("RESULT_DIR", out_folder if out_folder else self.working_directory) return self._compute_com(com_param) @pyaedt_function_handler @@ -308,42 +329,39 @@ def _compute_com( ------- """ + thru_snp = com_parameter.parameters["THRUSNP"].replace("\\", "/") + fext_snp = com_parameter.parameters["FEXTARY"].replace("\\", "/") + next_snp = com_parameter.parameters["NEXTARY"].replace("\\", "/") + result_dir = com_parameter.parameters["RESULT_DIR"].replace("\\", "/") - com_parameter.THRUSNP = com_parameter.THRUSNP.replace("\\", "/") - com_parameter.FEXTARY = com_parameter.FEXTARY.replace("\\", "/") - com_parameter.NEXTARY = com_parameter.NEXTARY.replace("\\", "/") - com_parameter.RESULT_DIR = com_parameter.RESULT_DIR.replace("\\", "/") + com_parameter.set_parameter("THRUSNP", thru_snp) + com_parameter.set_parameter("FEXTARY", fext_snp) + com_parameter.set_parameter("NEXTARY", next_snp) + com_parameter.set_parameter("RESULT_DIR", result_dir) - cfg_file = os.path.join(com_parameter.RESULT_DIR, "com_parameters.cfg") - com_parameter.export(cfg_file) + cfg_file = os.path.join(com_parameter.parameters["RESULT_DIR"], "com_parameters.cfg") + com_parameter.export_spisim_cfg(cfg_file) out_processing = self._compute_spisim(parameter="COM", config_file=cfg_file) return self._get_output_parameter_from_result(out_processing, "COM") - @property - def com_standards(self): - """List of supported COM standards.""" - return list(COMParameters._STD_TABLE_MAPPING.keys()) - - @staticmethod @pyaedt_function_handler - def com_parameters(standard="50GAUI-1_C2C"): - return COMParameters(standard).parameters - - @pyaedt_function_handler - def export_com_configure_file(self, file_path, standard="50GAUI-1_C2C"): + def export_com_configure_file(self, file_path, standard="50GAUI-1-C2C"): """Generate a configuration file for SpiSim. Parameters ---------- - file_path : str + file_path : str, Path Full path to configuration file to create. Returns ------- bool """ - return COMParameters(standard).export(file_path) + if os.path.splitext(file_path)[-1] == ".cfg": + COMParametersVer3p4(standard).export_spisim_cfg(file_path) + else: + return COMParametersVer3p4(standard).export(file_path) def detect_encoding(file_path, expected_pattern="", re_flags=0): diff --git a/pyaedt/misc/spisim_com_configuration_files/__init__.py b/pyaedt/misc/spisim_com_configuration_files/__init__.py new file mode 100644 index 00000000000..c23e620fcaf --- /dev/null +++ b/pyaedt/misc/spisim_com_configuration_files/__init__.py @@ -0,0 +1,3 @@ +from pathlib import Path + +workdir = Path(__file__).parent diff --git a/pyaedt/misc/spisim_com_configuration_files/com_120d_8.cfg b/pyaedt/misc/spisim_com_configuration_files/com_120d_8.cfg deleted file mode 100644 index 7f2992b5c26..00000000000 --- a/pyaedt/misc/spisim_com_configuration_files/com_120d_8.cfg +++ /dev/null @@ -1,333 +0,0 @@ -################################################################################ -# MODULE: COM -# GENERATED ON 20240215170006 -################################################################################ - -# VERSION: VERSION -VERSION = 3.40 -# THRUSNP: THRUSNP -THRUSNP = -# FEXTARY: FEXTARY -FEXTARY = -# NEXTARY: NEXTARY -NEXTARY = -# SPECTAG: SPECTAG -SPECTAG = Customized -# FSTTHRU: FSTTHRU -FSTTHRU = -1 -# NUMPORT: NUMPORT -NUMPORT = -1 -# GENHTML: GENHTML -GENHTML = T -# A_DD: A_DD -A_DD = 0.02 -# A_FE: A_FE -A_FE = 0.45 -# A_NE: A_NE -A_NE = 0.65 -# A_FT: A_FT -A_FT = -# A_NT: A_NT -A_NT = -# A_V: A_V -A_V = 0.45 -# B_MAX1: B_MAX1 -B_MAX1 = 0.5 -# B_MIN1: B_MIN1 -B_MIN1 = -# B_MAX2_N_B: B_MAX2_N_B -B_MAX2_N_B = -# B_MIN2_N_B: B_MIN2_N_B -B_MIN2_N_B = -# C_1: C_1 -C_1 = [-0.15:0.05:0] -# C_2: C_2 -C_2 = -# C_3: C_3 -C_3 = -# C_4: C_4 -C_4 = -# C0: C0 -C0 = 0.6 -# C1: C1 -C1 = [-0.25:0.05:0] -# C2: C2 -C2 = -# C3: C3 -C3 = -# C_D: C_D -C_D = [2.8e-4 2.8e-4] -# C_P: C_P -C_P = [1.1e-4 1.1e-4] -# C_V: C_V -C_V = 0.0 -# L_S: L_S -L_S = -# C_B: C_B -C_B = -# LOCAL_SEARCH: LOCAL_SEARCH -LOCAL_SEARCH = -# DELTA_F: DELTA_F -DELTA_F = 0.01 -# FFE_BACKOFF: FFE_BACKOFF -FFE_BACKOFF = 4 -# DER_0: DER_0 -DER_0 = 1.0E-5 -# ETA_0: ETA_0 -ETA_0 = 2.6E-8 -# F_B: F_B -F_B = 26.5625 -# F_1: F_1 -F_1 = 0.05 -# F_2: F_2 -F_2 = 26.5625 -# F_MIN: F_MIN -F_MIN = 0.05 -# F_P1: F_P1 -F_P1 = 10.625 -# F_P2: F_P2 -F_P2 = 1.0E99 -# F_V: F_V -F_V = 4.0 -# F_F: F_F -F_F = 4.0 -# F_N: F_N -F_N = 4.0 -# F_R: F_R -F_R = 0.75 -# F_Z: F_Z -F_Z = 10.625 -# G_DC: G_DC -G_DC = [-15:1:0] -# GDC_MIN: GDC_MIN -GDC_MIN = -# G_QUAL: G_QUAL -G_QUAL = -# G2_QUAL: G2_QUAL -G2_QUAL = -# L: L -L = 4.0 -# M: M -M = 32.0 -# N_B: N_B -N_B = 10.0 -# N_BX: N_BX -N_BX = 10.0 -# N_B_STEP: N_B_STEP -N_B_STEP = -# R_0: R_0 -R_0 = 50.0 -# R_D: R_D -R_D = [55 55] -# R_LM: R_LM -R_LM = 0.95 -# SIGMA_RJ: SIGMA_RJ -SIGMA_RJ = 0.01 -# SNR_TX: SNR_TX -SNR_TX = 31.0 -# Z_PFEXT: Z_PFEXT -Z_PFEXT = [12 30] -# Z_PNEXT: Z_PNEXT -Z_PNEXT = [12 12] -# Z_PRX: Z_PRX -Z_PRX = [12 30] -# Z_PSELECT: Z_PSELECT -Z_PSELECT = [1 2 ] -# Z_PTX: Z_PTX -Z_PTX = [12 30] -# N_BG: N_BG -N_BG = 0 -# N_BF: N_BF -N_BF = 6 -# N_F: N_F -N_F = -# BMAXG: BMAXG -BMAXG = 0.2 -# B_FLOAT_RSS_MAX: B_FLOAT_RSS_MAX -B_FLOAT_RSS_MAX = -# N_TAIL_START: N_TAIL_START -N_TAIL_START = -# SAMPLES_FOR_C2M: SAMPLES_FOR_C2M -SAMPLES_FOR_C2M = 100 -# T_O: T_O -T_O = -# AC_CM_RMS: AC_CM_RMS -AC_CM_RMS = 0.0 -# ACCM_MAX_FREQ: ACCM_MAX_FREQ -ACCM_MAX_FREQ = 26.5625 -# ENFORCE_CAUSALITY: ENFORCE_CAUSALITY -ENFORCE_CAUSALITY = -# EC_PULSE_TOL: EC_PULSE_TOL -EC_PULSE_TOL = 0.01 -# EC_DIFF_TOL: EC_DIFF_TOL -EC_DIFF_TOL = 1e-3 -# EC_REL_TOL: EC_REL_TOL -EC_REL_TOL = 1e-2 -# FORCE_PDF_BIN_SIZE: FORCE_PDF_BIN_SIZE -FORCE_PDF_BIN_SIZE = -# PDF_BIN_SIZE: PDF_BIN_SIZE -PDF_BIN_SIZE = 1e-5 -# IMPRSP_TRUNC_THRESHOLD: IMPRSP_TRUNC_THRESHOLD -IMPRSP_TRUNC_THRESHOLD = 1E-3 -# N_V: N_V -N_V = -# VEC_PASS_THRESHOLD: VEC_PASS_THRESHOLD -VEC_PASS_THRESHOLD = -# COM_PASS_THRESHOLD: COM_PASS_THRESHOLD -COM_PASS_THRESHOLD = 3.0 -# ERL_PASS_THRESHOLD: ERL_PASS_THRESHOLD -ERL_PASS_THRESHOLD = 8 -# EH_MAX: EH_MAX -EH_MAX = -# EH_MIN: EH_MIN -EH_MIN = -# CTLE_TYPE: CTLE_TYPE -CTLE_TYPE = CL93 -# F_HP_P: F_HP_P -F_HP_P = -# F_HP_PZ: F_HP_PZ -F_HP_PZ = 0.6640625 -# F_HP_Z: F_HP_Z -F_HP_Z = -# G_DC_HP: G_DC_HP -G_DC_HP = [-4:1:0] -# INCLUDE_PCB: INCLUDE_PCB -INCLUDE_PCB = 0.0 -# NOISE_CREST_FACTOR: NOISE_CREST_FACTOR -NOISE_CREST_FACTOR = -# PMD_TYPE: PMD_TYPE -PMD_TYPE = C2C -# MAX_BURST_LEN: MAX_BURST_LEN -MAX_BURST_LEN = -# ERR_PROPAGATION_COM_MARGIN: ERR_PROPAGATION_COM_MARGIN -ERR_PROPAGATION_COM_MARGIN = -# CDR: CDR -CDR = MM -# USE_ETA0_PSD: USE_ETA0_PSD -USE_ETA0_PSD = -# TDR_W_TXPKG: TDR_W_TXPKG -TDR_W_TXPKG = -# TDR_BUTTERWORTH: TDR_BUTTERWORTH -TDR_BUTTERWORTH = 1.0 -# BUTTERWORTH: BUTTERWORTH -BUTTERWORTH = TRUE -# BESSEL_THOMSON: BESSEL_THOMSON -BESSEL_THOMSON = FALSE -# BT_ORDER: BT_ORDER -BT_ORDER = 4 -# RUNTAG: RUNTAG -RUNTAG = V164 -# SIGMA_R: SIGMA_R -SIGMA_R = 0.02 -# HISTOGRAM_WINDOW_WEIGHT: HISTOGRAM_WINDOW_WEIGHT -HISTOGRAM_WINDOW_WEIGHT = RECTANGLE -# OPTIMIZE_LOOP_SPEED_UP: OPTIMIZE_LOOP_SPEED_UP -OPTIMIZE_LOOP_SPEED_UP = 1 -# TDECQ: TDECQ -TDECQ = -# PORT_ORDER: PORT_ORDER -PORT_ORDER = [1 3 2 4] -# RESULT_DIR: RESULT_DIR -RESULT_DIR = -# MIN_VEO_TEST: MIN_VEO_TEST -MIN_VEO_TEST = -# FORCE_TR: FORCE_TR -FORCE_TR = FALSE -# IDEAL_TX_TERM: IDEAL_TX_TERM -IDEAL_TX_TERM = 0.0 -# RX_CALIBRATION: RX_CALIBRATION -RX_CALIBRATION = 0.0 -# SIGMA_BBN_STEP: SIGMA_BBN_STEP -SIGMA_BBN_STEP = 0.005 -# T_R: T_R -T_R = 0.013 -# T_R_FILTER_TYPE: T_R_FILTER_TYPE -T_R_FILTER_TYPE = 1.0 -# T_R_MEAS_POINT: T_R_MEAS_POINT -T_R_MEAS_POINT = 0.0 -# IDEAL_RX_TERM: IDEAL_RX_TERM -IDEAL_RX_TERM = 0.0 -# INCLUDE_CTLE: INCLUDE_CTLE -INCLUDE_CTLE = 1.0 -# INCLUDE_TX_RX_FILTER: INCLUDE_TX_RX_FILTER -INCLUDE_TX_RX_FILTER = 1.0 -# INC_PACKAGE: INC_PACKAGE -INC_PACKAGE = 1.0 -# PACKAGE_TL_GAMMA0_A1_A2: PACKAGE_TL_GAMMA0_A1_A2 -PACKAGE_TL_GAMMA0_A1_A2 = [0 1.734e-3 1.455e-4] -# PACKAGE_TL_TAU: PACKAGE_TL_TAU -PACKAGE_TL_TAU = 0.006141 -# PACKAGE_Z_C: PACKAGE_Z_C -PACKAGE_Z_C = 85.0 -# BOARD_TL_GAMMA0_A1_A2: BOARD_TL_GAMMA0_A1_A2 -BOARD_TL_GAMMA0_A1_A2 = [0 4.114e-4 2.547e-4] -# BOARD_TL_TAU: BOARD_TL_TAU -BOARD_TL_TAU = 0.006191 -# BOARD_Z_C: BOARD_Z_C -BOARD_Z_C = 110.0 -# Z_BPFEXT: Z_BPFEXT -Z_BPFEXT = 72.0 -# Z_BPNEXT: Z_BPNEXT -Z_BPNEXT = 72.0 -# Z_BPRX: Z_BPRX -Z_BPRX = 151.0 -# Z_BPTX: Z_BPTX -Z_BPTX = 151.0 -# CB0: CB0 -CB0 = -# CB1: CB1 -CB1 = -# TDR: TDR -TDR = 1 -# ERL: ERL -ERL = 1.0 -# Z_T: Z_T -Z_T = 50.0 -# ERL_ONLY: ERL_ONLY -ERL_ONLY = 0.0 -# TR_TDR: TR_TDR -TR_TDR = 0.0189 -# TDR_DURATION: TDR_DURATION -TDR_DURATION = 10.0 -# N: N -N = -# KAPPA1: KAPPA1 -KAPPA1 = 1.0 -# KAPPA2: KAPPA2 -KAPPA2 = 1.0 -# TUKEY_WINDOW: TUKEY_WINDOW -TUKEY_WINDOW = -# BETA_X: BETA_X -BETA_X = 1.07E10 -# RHO_X: RHO_X -RHO_X = 0.44 -# FIXTURE_DELAY_TIME: FIXTURE_DELAY_TIME -FIXTURE_DELAY_TIME = 0.0 -# AUTO_TFX: AUTO_TFX -AUTO_TFX = -# GRR_LIMIT: GRR_LIMIT -GRR_LIMIT = 1 -# GRR: GRR -GRR = 1 -# GX: GX -GX = -# FFE_PRE_TAP_LEN: FFE_PRE_TAP_LEN -FFE_PRE_TAP_LEN = -# FFE_POST_TAP_LEN: FFE_POST_TAP_LEN -FFE_POST_TAP_LEN = -# FFE_TAP_STEP_SIZE: FFE_TAP_STEP_SIZE -FFE_TAP_STEP_SIZE = 0 -# FFE_MAIN_CUR_MIN: FFE_MAIN_CUR_MIN -FFE_MAIN_CUR_MIN = 0.7 -# FFE_PRE_TAP1_MAX: FFE_PRE_TAP1_MAX -FFE_PRE_TAP1_MAX = 0.7 -# FFE_POST_TAP1_MAX: FFE_POST_TAP1_MAX -FFE_POST_TAP1_MAX = 0.7 -# FFE_TAPN_MAX: FFE_TAPN_MAX -FFE_TAPN_MAX = 0.7 -# CURSOR_GAIN: CURSOR_GAIN -CURSOR_GAIN = -# SBR_GEN_METHOD: SBR_GEN_METHOD -SBR_GEN_METHOD = DEFAULT -# FIXTURE_BUILTIN_DELAY: FIXTURE_BUILTIN_DELAY -FIXTURE_BUILTIN_DELAY = 500p diff --git a/pyaedt/misc/spisim_com_configuration_files/com_120d_8.json b/pyaedt/misc/spisim_com_configuration_files/com_120d_8.json new file mode 100644 index 00000000000..599a2aebad2 --- /dev/null +++ b/pyaedt/misc/spisim_com_configuration_files/com_120d_8.json @@ -0,0 +1,192 @@ +{ + "table_93a1": { + "f_b": "26.5625", + "f_min": "0.05", + "Delta_f": "0.01", + "C_d": "[2.8e-4 2.8e-4]", + "L_s": "", + "C_b": "", + "z_p select": "[1 2 ]", + "z_p (TX)": "[12 30]", + "z_p (NEXT)": "[12 12]", + "z_p (FEXT)": "[12 30]", + "z_p (RX)": "[12 30]", + "C_p": "[1.1e-4 1.1e-4]", + "R_0": "50.0", + "R_d": "[55 55]", + "A_v": "0.45", + "A_fe": "0.45", + "A_ne": "0.65", + "AC_CM_RMS": "0.0", + "L": "4.0", + "M": "32.0" + }, + "filter_and_eq": { + "f_r": "0.75", + "c(0)": "0.6", + "c(-1)": "[-0.15:0.05:0]", + "c(-2)": "", + "c(-3)": "", + "c(1)": "[-0.25:0.05:0]", + "N_b": "10.0", + "b_max(1)": "0.5", + "b_max(2..N_b)": "", + "b_min(1)": "", + "b_min(2..N_b)": "", + "g_DC": "[-15:1:0]", + "f_z": "10.625", + "f_p1": "10.625", + "f_p2": "1.0E99", + "g_DC_HP": "[-4:1:0]", + "f_HP_PZ": "0.6640625" + }, + "io_control": { + "RESULT_DIR": "", + "Port Order": "[1 3 2 4]", + "RUNTAG": "V164" + }, + "operational": { + "COM Pass threshold": "3.0", + "ERL Pass threshold": "8", + "DER_0": "1.0E-5", + "T_r": "0.013", + "FORCE_TR": "FALSE", + "Local Search": "" + }, + "tdr_and_erl_options": { + "TDR": "1", + "ERL": "1.0", + "ERL_ONLY": "0.0", + "TR_TDR": "0.0189", + "N": "", + "beta_x": "1.07E10", + "rho_x": "0.44", + "fixture delay time": "0.0", + "TDR_W_TXPKG": "", + "N_bx": "10.0", + "Tukey_Window": "" + }, + "noise_jitter": { + "sigma_RJ": "0.01", + "A_DD": "0.02", + "eta_0": "2.6E-8", + "SNR_TX": "31.0", + "R_LM": "0.95" + }, + "table_93a3": { + "package_tl_gamma0_a1_a2": "[0 1.734e-3 1.455e-4]", + "package_tl_tau": "0.006141", + "package_Z_c": "85.0" + }, + "table_92_12": { + "board_tl_gamma0_a1_a2": "[0 4.114e-4 2.547e-4]", + "board_tl_tau": "0.006191", + "board_Z_c": "110.0", + "z_bp (TX)": "151.0", + "z_bp (NEXT)": "72.0", + "z_bp (FEXT)": "72.0", + "z_bp (RX)": "151.0", + "C_0": "", + "C_1": "", + "Include PCB": "0.0" + }, + "floating_tap_control": { + "N_bg": "0", + "N_bf": "6", + "N_f": "", + "bmaxg": "0.2", + "B_float_RSS_MAX": "", + "N_tail_start": "" + }, + "icn_fom_ild_parameters": { + "f_v": "4.0", + "f_f": "4.0", + "f_n": "4.0", + "f_2": "26.5625", + "A_ft": "", + "A_nt": "" + }, + "receiver_testing": { + "RX_CALIBRATION": "0.0", + "Sigma BBN step": "0.005" + }, + "spisim_control": { + "VERSION": "3.40", + "THRUSNP": "", + "FEXTARY": "", + "NEXTARY": "", + "SPECTAG": "Customized", + "FSTTHRU": "-1", + "NUMPORT": "-1", + "GENHTML": "T" + }, + "other_parameters": { + "C_4": "", + "C2": "", + "C3": "", + "C_V": "0.0", + "FFE_BACKOFF": "4", + "F_1": "0.05", + "GDC_MIN": "", + "G_QUAL": "", + "G2_QUAL": "", + "N_B_STEP": "", + "SAMPLES_FOR_C2M": "100", + "T_O": "", + "ACCM_MAX_FREQ": "26.5625", + "ENFORCE_CAUSALITY": "", + "EC_PULSE_TOL": "0.01", + "EC_DIFF_TOL": "1e-3", + "EC_REL_TOL": "1e-2", + "Force PDF bin size": "", + "PDF_BIN_SIZE": "1e-5", + "IMPRSP_TRUNC_THRESHOLD": "1E-3", + "N_V": "", + "VEC_PASS_THRESHOLD": "", + "EH_MAX": "", + "EH_MIN": "", + "CTLE_TYPE": "CL93", + "F_HP_P": "", + "F_HP_Z": "", + "NOISE_CREST_FACTOR": "", + "PMD_TYPE": "C2C", + "MAX_BURST_LEN": "", + "ERR_PROPAGATION_COM_MARGIN": "", + "CDR": "MM", + "USE_ETA0_PSD": "", + "TDR_BUTTERWORTH": "1.0", + "BUTTERWORTH": "TRUE", + "BESSEL_THOMSON": "FALSE", + "BT_ORDER": "4", + "SIGMA_R": "0.02", + "HISTOGRAM_WINDOW_WEIGHT": "RECTANGLE", + "OPTIMIZE_LOOP_SPEED_UP": "1", + "TDECQ": "", + "MIN_VEO_TEST": "", + "IDEAL_TX_TERM": "0.0", + "T_R_FILTER_TYPE": "1.0", + "T_R_MEAS_POINT": "0.0", + "IDEAL_RX_TERM": "0.0", + "INCLUDE_CTLE": "1.0", + "INCLUDE_TX_RX_FILTER": "1.0", + "INC_PACKAGE": "1.0", + "Z_T": "50.0", + "TDR_DURATION": "10.0", + "KAPPA1": "1.0", + "kappa2": "1.0", + "AUTO_TFX": "", + "GRR_LIMIT": "1", + "GRR": "1", + "GX": "", + "FFE_PRE_TAP_LEN": "", + "FFE_POST_TAP_LEN": "", + "FFE_TAP_STEP_SIZE": "0", + "FFE_MAIN_CUR_MIN": "0.7", + "FFE_PRE_TAP1_MAX": "0.7", + "FFE_POST_TAP1_MAX": "0.7", + "FFE_TAPN_MAX": "0.7", + "CURSOR_GAIN": "", + "SBR_GEN_METHOD": "DEFAULT", + "FIXTURE_BUILTIN_DELAY": "500p" + } +} \ No newline at end of file diff --git a/pyaedt/misc/spisim_com_configuration_files/com_93_8.cfg b/pyaedt/misc/spisim_com_configuration_files/com_93_8.cfg deleted file mode 100644 index 089a50c86de..00000000000 --- a/pyaedt/misc/spisim_com_configuration_files/com_93_8.cfg +++ /dev/null @@ -1,333 +0,0 @@ -################################################################################ -# MODULE: COM -# GENERATED ON 20240215153946 -################################################################################ - -# VERSION: VERSION -VERSION = 3.40 -# THRUSNP: THRUSNP -THRUSNP = -# FEXTARY: FEXTARY -FEXTARY = -# NEXTARY: NEXTARY -NEXTARY = -# SPECTAG: SPECTAG -SPECTAG = Customized -# FSTTHRU: FSTTHRU -FSTTHRU = -1 -# NUMPORT: NUMPORT -NUMPORT = -1 -# GENHTML: GENHTML -GENHTML = T -# A_DD: A_DD -A_DD = 0.05 -# A_FE: A_FE -A_FE = 0.4 -# A_NE: A_NE -A_NE = 0.6 -# A_FT: A_FT -A_FT = -# A_NT: A_NT -A_NT = -# A_V: A_V -A_V = 0.4 -# B_MAX1: B_MAX1 -B_MAX1 = 1.0 -# B_MIN1: B_MIN1 -B_MIN1 = -# B_MAX2_N_B: B_MAX2_N_B -B_MAX2_N_B = -# B_MIN2_N_B: B_MIN2_N_B -B_MIN2_N_B = -# C_1: C_1 -C_1 = [-0.18:0.02:0] -# C_2: C_2 -C_2 = -# C_3: C_3 -C_3 = -# C_4: C_4 -C_4 = -# C0: C0 -C0 = 0.62 -# C1: C1 -C1 = [-0.38:0.02:0] -# C2: C2 -C2 = -# C3: C3 -C3 = -# C_D: C_D -C_D = [2.5e-4 2.5e-4] -# C_P: C_P -C_P = [1.8e-4 1.8e-4] -# C_V: C_V -C_V = 0.0 -# L_S: L_S -L_S = -# C_B: C_B -C_B = -# LOCAL_SEARCH: LOCAL_SEARCH -LOCAL_SEARCH = -# DELTA_F: DELTA_F -DELTA_F = 0.005 -# FFE_BACKOFF: FFE_BACKOFF -FFE_BACKOFF = 4 -# DER_0: DER_0 -DER_0 = 1.0E-5 -# ETA_0: ETA_0 -ETA_0 = 5.2E-8 -# F_B: F_B -F_B = 25.78125 -# F_1: F_1 -F_1 = 0.05 -# F_2: F_2 -F_2 = 25.78125 -# F_MIN: F_MIN -F_MIN = 0.05 -# F_P1: F_P1 -F_P1 = 6.4453125 -# F_P2: F_P2 -F_P2 = 25.78125 -# F_V: F_V -F_V = 4.0 -# F_F: F_F -F_F = 4.0 -# F_N: F_N -F_N = 4.0 -# F_R: F_R -F_R = 0.75 -# F_Z: F_Z -F_Z = 6.4453125 -# G_DC: G_DC -G_DC = [-12:1:0] -# GDC_MIN: GDC_MIN -GDC_MIN = -# G_QUAL: G_QUAL -G_QUAL = -# G2_QUAL: G2_QUAL -G2_QUAL = -# L: L -L = 2.0 -# M: M -M = 32.0 -# N_B: N_B -N_B = 14.0 -# N_BX: N_BX -N_BX = 14.0 -# N_B_STEP: N_B_STEP -N_B_STEP = -# R_0: R_0 -R_0 = 50.0 -# R_D: R_D -R_D = [55 55] -# R_LM: R_LM -R_LM = 1.0 -# SIGMA_RJ: SIGMA_RJ -SIGMA_RJ = 0.01 -# SNR_TX: SNR_TX -SNR_TX = 27.0 -# Z_PFEXT: Z_PFEXT -Z_PFEXT = [12 30] -# Z_PNEXT: Z_PNEXT -Z_PNEXT = [12 12] -# Z_PRX: Z_PRX -Z_PRX = [12 30] -# Z_PSELECT: Z_PSELECT -Z_PSELECT = [1 2] -# Z_PTX: Z_PTX -Z_PTX = [12 30] -# N_BG: N_BG -N_BG = 0 -# N_BF: N_BF -N_BF = 6 -# N_F: N_F -N_F = -# BMAXG: BMAXG -BMAXG = 0.2 -# B_FLOAT_RSS_MAX: B_FLOAT_RSS_MAX -B_FLOAT_RSS_MAX = -# N_TAIL_START: N_TAIL_START -N_TAIL_START = -# SAMPLES_FOR_C2M: SAMPLES_FOR_C2M -SAMPLES_FOR_C2M = 100 -# T_O: T_O -T_O = -# AC_CM_RMS: AC_CM_RMS -AC_CM_RMS = 0.0 -# ACCM_MAX_FREQ: ACCM_MAX_FREQ -ACCM_MAX_FREQ = 25.78125 -# ENFORCE_CAUSALITY: ENFORCE_CAUSALITY -ENFORCE_CAUSALITY = -# EC_PULSE_TOL: EC_PULSE_TOL -EC_PULSE_TOL = 0.01 -# EC_DIFF_TOL: EC_DIFF_TOL -EC_DIFF_TOL = 1e-3 -# EC_REL_TOL: EC_REL_TOL -EC_REL_TOL = 1e-2 -# FORCE_PDF_BIN_SIZE: FORCE_PDF_BIN_SIZE -FORCE_PDF_BIN_SIZE = -# PDF_BIN_SIZE: PDF_BIN_SIZE -PDF_BIN_SIZE = 1e-5 -# IMPRSP_TRUNC_THRESHOLD: IMPRSP_TRUNC_THRESHOLD -IMPRSP_TRUNC_THRESHOLD = 1E-3 -# N_V: N_V -N_V = -# VEC_PASS_THRESHOLD: VEC_PASS_THRESHOLD -VEC_PASS_THRESHOLD = -# COM_PASS_THRESHOLD: COM_PASS_THRESHOLD -COM_PASS_THRESHOLD = 3.0 -# ERL_PASS_THRESHOLD: ERL_PASS_THRESHOLD -ERL_PASS_THRESHOLD = -# EH_MAX: EH_MAX -EH_MAX = -# EH_MIN: EH_MIN -EH_MIN = -# CTLE_TYPE: CTLE_TYPE -CTLE_TYPE = CL93 -# F_HP_P: F_HP_P -F_HP_P = -# F_HP_PZ: F_HP_PZ -F_HP_PZ = -# F_HP_Z: F_HP_Z -F_HP_Z = -# G_DC_HP: G_DC_HP -G_DC_HP = -# INCLUDE_PCB: INCLUDE_PCB -INCLUDE_PCB = 1.0 -# NOISE_CREST_FACTOR: NOISE_CREST_FACTOR -NOISE_CREST_FACTOR = -# PMD_TYPE: PMD_TYPE -PMD_TYPE = C2C -# MAX_BURST_LEN: MAX_BURST_LEN -MAX_BURST_LEN = -# ERR_PROPAGATION_COM_MARGIN: ERR_PROPAGATION_COM_MARGIN -ERR_PROPAGATION_COM_MARGIN = -# CDR: CDR -CDR = MM -# USE_ETA0_PSD: USE_ETA0_PSD -USE_ETA0_PSD = -# TDR_W_TXPKG: TDR_W_TXPKG -TDR_W_TXPKG = -# TDR_BUTTERWORTH: TDR_BUTTERWORTH -TDR_BUTTERWORTH = -# BUTTERWORTH: BUTTERWORTH -BUTTERWORTH = TRUE -# BESSEL_THOMSON: BESSEL_THOMSON -BESSEL_THOMSON = FALSE -# BT_ORDER: BT_ORDER -BT_ORDER = 4 -# RUNTAG: RUNTAG -RUNTAG = -# SIGMA_R: SIGMA_R -SIGMA_R = 0.02 -# HISTOGRAM_WINDOW_WEIGHT: HISTOGRAM_WINDOW_WEIGHT -HISTOGRAM_WINDOW_WEIGHT = RECTANGLE -# OPTIMIZE_LOOP_SPEED_UP: OPTIMIZE_LOOP_SPEED_UP -OPTIMIZE_LOOP_SPEED_UP = 1 -# TDECQ: TDECQ -TDECQ = -# PORT_ORDER: PORT_ORDER -PORT_ORDER = [1 3 2 4] -# RESULT_DIR: RESULT_DIR -RESULT_DIR = -# MIN_VEO_TEST: MIN_VEO_TEST -MIN_VEO_TEST = -# FORCE_TR: FORCE_TR -FORCE_TR = FALSE -# IDEAL_TX_TERM: IDEAL_TX_TERM -IDEAL_TX_TERM = 0.0 -# RX_CALIBRATION: RX_CALIBRATION -RX_CALIBRATION = 0.0 -# SIGMA_BBN_STEP: SIGMA_BBN_STEP -SIGMA_BBN_STEP = 0.005 -# T_R: T_R -T_R = 0.008 -# T_R_FILTER_TYPE: T_R_FILTER_TYPE -T_R_FILTER_TYPE = FALSE -# T_R_MEAS_POINT: T_R_MEAS_POINT -T_R_MEAS_POINT = FALSE -# IDEAL_RX_TERM: IDEAL_RX_TERM -IDEAL_RX_TERM = 0.0 -# INCLUDE_CTLE: INCLUDE_CTLE -INCLUDE_CTLE = 1.0 -# INCLUDE_TX_RX_FILTER: INCLUDE_TX_RX_FILTER -INCLUDE_TX_RX_FILTER = 1.0 -# INC_PACKAGE: INC_PACKAGE -INC_PACKAGE = 1.0 -# PACKAGE_TL_GAMMA0_A1_A2: PACKAGE_TL_GAMMA0_A1_A2 -PACKAGE_TL_GAMMA0_A1_A2 = [0 1.734e-3 1.455e-4] -# PACKAGE_TL_TAU: PACKAGE_TL_TAU -PACKAGE_TL_TAU = 0.006141 -# PACKAGE_Z_C: PACKAGE_Z_C -PACKAGE_Z_C = 78.2 -# BOARD_TL_GAMMA0_A1_A2: BOARD_TL_GAMMA0_A1_A2 -BOARD_TL_GAMMA0_A1_A2 = [0 4.114e-4 2.547e-4] -# BOARD_TL_TAU: BOARD_TL_TAU -BOARD_TL_TAU = 0.006191 -# BOARD_Z_C: BOARD_Z_C -BOARD_Z_C = 109.8 -# Z_BPFEXT: Z_BPFEXT -Z_BPFEXT = 72.0 -# Z_BPNEXT: Z_BPNEXT -Z_BPNEXT = 72.0 -# Z_BPRX: Z_BPRX -Z_BPRX = 151.0 -# Z_BPTX: Z_BPTX -Z_BPTX = 151.0 -# CB0: CB0 -CB0 = -# CB1: CB1 -CB1 = -# TDR: TDR -TDR = 0 -# ERL: ERL -ERL = -# Z_T: Z_T -Z_T = 50 -# ERL_ONLY: ERL_ONLY -ERL_ONLY = -# TR_TDR: TR_TDR -TR_TDR = 8.0E-3 -# TDR_DURATION: TDR_DURATION -TDR_DURATION = 5 -# N: N -N = -# KAPPA1: KAPPA1 -KAPPA1 = 1.0 -# KAPPA2: KAPPA2 -KAPPA2 = 1.0 -# TUKEY_WINDOW: TUKEY_WINDOW -TUKEY_WINDOW = -# BETA_X: BETA_X -BETA_X = -# RHO_X: RHO_X -RHO_X = -# FIXTURE_DELAY_TIME: FIXTURE_DELAY_TIME -FIXTURE_DELAY_TIME = -# AUTO_TFX: AUTO_TFX -AUTO_TFX = -# GRR_LIMIT: GRR_LIMIT -GRR_LIMIT = 1 -# GRR: GRR -GRR = 1 -# GX: GX -GX = -# FFE_PRE_TAP_LEN: FFE_PRE_TAP_LEN -FFE_PRE_TAP_LEN = -# FFE_POST_TAP_LEN: FFE_POST_TAP_LEN -FFE_POST_TAP_LEN = -# FFE_TAP_STEP_SIZE: FFE_TAP_STEP_SIZE -FFE_TAP_STEP_SIZE = 0 -# FFE_MAIN_CUR_MIN: FFE_MAIN_CUR_MIN -FFE_MAIN_CUR_MIN = 0.7 -# FFE_PRE_TAP1_MAX: FFE_PRE_TAP1_MAX -FFE_PRE_TAP1_MAX = 0.7 -# FFE_POST_TAP1_MAX: FFE_POST_TAP1_MAX -FFE_POST_TAP1_MAX = 0.7 -# FFE_TAPN_MAX: FFE_TAPN_MAX -FFE_TAPN_MAX = 0.7 -# CURSOR_GAIN: CURSOR_GAIN -CURSOR_GAIN = -# SBR_GEN_METHOD: SBR_GEN_METHOD -SBR_GEN_METHOD = DEFAULT -# FIXTURE_BUILTIN_DELAY: FIXTURE_BUILTIN_DELAY -FIXTURE_BUILTIN_DELAY = 500p diff --git a/pyaedt/misc/spisim_com_configuration_files/com_93_8.json b/pyaedt/misc/spisim_com_configuration_files/com_93_8.json new file mode 100644 index 00000000000..10f2092bb7e --- /dev/null +++ b/pyaedt/misc/spisim_com_configuration_files/com_93_8.json @@ -0,0 +1,192 @@ +{ + "table_93a1": { + "f_b": "25.78125", + "f_min": "0.05", + "Delta_f": "0.005", + "C_d": "[2.5e-4 2.5e-4]", + "L_s": "", + "C_b": "", + "z_p select": "[1 2]", + "z_p (TX)": "[12 30]", + "z_p (NEXT)": "[12 12]", + "z_p (FEXT)": "[12 30]", + "z_p (RX)": "[12 30]", + "C_p": "[1.8e-4 1.8e-4]", + "R_0": "50.0", + "R_d": "[55 55]", + "A_v": "0.4", + "A_fe": "0.4", + "A_ne": "0.6", + "AC_CM_RMS": "0.0", + "L": "2.0", + "M": "32.0" + }, + "filter_and_eq": { + "f_r": "0.75", + "c(0)": "0.62", + "c(-1)": "[-0.18:0.02:0]", + "c(-2)": "", + "c(-3)": "", + "c(1)": "[-0.38:0.02:0]", + "N_b": "14.0", + "b_max(1)": "1.0", + "b_max(2..N_b)": "", + "b_min(1)": "", + "b_min(2..N_b)": "", + "g_DC": "[-12:1:0]", + "f_z": "6.4453125", + "f_p1": "6.4453125", + "f_p2": "25.78125", + "g_DC_HP": "", + "f_HP_PZ": "" + }, + "io_control": { + "RESULT_DIR": "", + "Port Order": "[1 3 2 4]", + "RUNTAG": "" + }, + "operational": { + "COM Pass threshold": "3.0", + "ERL Pass threshold": "", + "DER_0": "1.0E-5", + "T_r": "0.008", + "FORCE_TR": "FALSE", + "Local Search": "" + }, + "tdr_and_erl_options": { + "TDR": "0", + "ERL": "", + "ERL_ONLY": "", + "TR_TDR": "8.0E-3", + "N": "", + "beta_x": "", + "rho_x": "", + "fixture delay time": "", + "TDR_W_TXPKG": "", + "N_bx": "14.0", + "Tukey_Window": "" + }, + "noise_jitter": { + "sigma_RJ": "0.01", + "A_DD": "0.05", + "eta_0": "5.2E-8", + "SNR_TX": "27.0", + "R_LM": "1.0" + }, + "table_93a3": { + "package_tl_gamma0_a1_a2": "[0 1.734e-3 1.455e-4]", + "package_tl_tau": "0.006141", + "package_Z_c": "78.2" + }, + "table_92_12": { + "board_tl_gamma0_a1_a2": "[0 4.114e-4 2.547e-4]", + "board_tl_tau": "0.006191", + "board_Z_c": "109.8", + "z_bp (TX)": "151.0", + "z_bp (NEXT)": "72.0", + "z_bp (FEXT)": "72.0", + "z_bp (RX)": "151.0", + "C_0": "", + "C_1": "", + "Include PCB": "1.0" + }, + "floating_tap_control": { + "N_bg": "0", + "N_bf": "6", + "N_f": "", + "bmaxg": "0.2", + "B_float_RSS_MAX": "", + "N_tail_start": "" + }, + "icn_fom_ild_parameters": { + "f_v": "4.0", + "f_f": "4.0", + "f_n": "4.0", + "f_2": "25.78125", + "A_ft": "", + "A_nt": "" + }, + "receiver_testing": { + "RX_CALIBRATION": "0.0", + "Sigma BBN step": "0.005" + }, + "spisim_control": { + "VERSION": "3.40", + "THRUSNP": "", + "FEXTARY": "", + "NEXTARY": "", + "SPECTAG": "Customized", + "FSTTHRU": "-1", + "NUMPORT": "-1", + "GENHTML": "T" + }, + "other_parameters": { + "C_4": "", + "C2": "", + "C3": "", + "C_V": "0.0", + "FFE_BACKOFF": "4", + "F_1": "0.05", + "GDC_MIN": "", + "G_QUAL": "", + "G2_QUAL": "", + "N_B_STEP": "", + "SAMPLES_FOR_C2M": "100", + "T_O": "", + "ACCM_MAX_FREQ": "25.78125", + "ENFORCE_CAUSALITY": "", + "EC_PULSE_TOL": "0.01", + "EC_DIFF_TOL": "1e-3", + "EC_REL_TOL": "1e-2", + "Force PDF bin size": "", + "PDF_BIN_SIZE": "1e-5", + "IMPRSP_TRUNC_THRESHOLD": "1E-3", + "N_V": "", + "VEC_PASS_THRESHOLD": "", + "EH_MAX": "", + "EH_MIN": "", + "CTLE_TYPE": "CL93", + "F_HP_P": "", + "F_HP_Z": "", + "NOISE_CREST_FACTOR": "", + "PMD_TYPE": "C2C", + "MAX_BURST_LEN": "", + "ERR_PROPAGATION_COM_MARGIN": "", + "CDR": "MM", + "USE_ETA0_PSD": "", + "TDR_BUTTERWORTH": "", + "BUTTERWORTH": "TRUE", + "BESSEL_THOMSON": "FALSE", + "BT_ORDER": "4", + "SIGMA_R": "0.02", + "HISTOGRAM_WINDOW_WEIGHT": "RECTANGLE", + "OPTIMIZE_LOOP_SPEED_UP": "1", + "TDECQ": "", + "MIN_VEO_TEST": "", + "IDEAL_TX_TERM": "0.0", + "T_R_FILTER_TYPE": "FALSE", + "T_R_MEAS_POINT": "FALSE", + "IDEAL_RX_TERM": "0.0", + "INCLUDE_CTLE": "1.0", + "INCLUDE_TX_RX_FILTER": "1.0", + "INC_PACKAGE": "1.0", + "Z_T": "50", + "TDR_DURATION": "5", + "KAPPA1": "1.0", + "kappa2": "1.0", + "AUTO_TFX": "", + "GRR_LIMIT": "1", + "GRR": "1", + "GX": "", + "FFE_PRE_TAP_LEN": "", + "FFE_POST_TAP_LEN": "", + "FFE_TAP_STEP_SIZE": "0", + "FFE_MAIN_CUR_MIN": "0.7", + "FFE_PRE_TAP1_MAX": "0.7", + "FFE_POST_TAP1_MAX": "0.7", + "FFE_TAPN_MAX": "0.7", + "CURSOR_GAIN": "", + "SBR_GEN_METHOD": "DEFAULT", + "FIXTURE_BUILTIN_DELAY": "500p" + } +} \ No newline at end of file diff --git a/pyaedt/misc/spisim_com_configuration_files/com_94_17.json b/pyaedt/misc/spisim_com_configuration_files/com_94_17.json new file mode 100644 index 00000000000..88fd749a2be --- /dev/null +++ b/pyaedt/misc/spisim_com_configuration_files/com_94_17.json @@ -0,0 +1,192 @@ +{ + "table_93a1": { + "f_b": "13.59375", + "f_min": "0.05", + "Delta_f": "0.01", + "C_d": "[2.5e-4 2.5e-4]", + "L_s": "", + "C_b": "", + "z_p select": "[1 2]", + "z_p (TX)": "[12 30]", + "z_p (NEXT)": "[12 12]", + "z_p (FEXT)": "[12 30]", + "z_p (RX)": "[12 30]", + "C_p": "[1.8e-4 1.8e-4]", + "R_0": "50.0", + "R_d": "[55 55]", + "A_v": "0.4", + "A_fe": "0.4", + "A_ne": "0.6", + "AC_CM_RMS": "0.0", + "L": "4.0", + "M": "32.0" + }, + "filter_and_eq": { + "f_r": "0.75", + "c(0)": "0.62", + "c(-1)": "[-0.18:0.02:0]", + "c(-2)": "", + "c(-3)": "", + "c(1)": "[-0.38:0.02:0]", + "N_b": "16.0", + "b_max(1)": "1.0", + "b_max(2..N_b)": "", + "b_min(1)": "", + "b_min(2..N_b)": "", + "g_DC": "[-12:1:0]", + "f_z": "3.3984375", + "f_p1": "3.3984375", + "f_p2": "13.59375", + "g_DC_HP": "", + "f_HP_PZ": "" + }, + "io_control": { + "RESULT_DIR": "", + "Port Order": "[1 3 2 4]", + "RUNTAG": "" + }, + "operational": { + "COM Pass threshold": "3.0", + "ERL Pass threshold": "", + "DER_0": "3.0E-4", + "T_r": "0.008", + "FORCE_TR": "FALSE", + "Local Search": "" + }, + "tdr_and_erl_options": { + "TDR": "0", + "ERL": "", + "ERL_ONLY": "", + "TR_TDR": "8.0E-3", + "N": "", + "beta_x": "", + "rho_x": "", + "fixture delay time": "", + "TDR_W_TXPKG": "", + "N_bx": "16.0", + "Tukey_Window": "" + }, + "noise_jitter": { + "sigma_RJ": "0.005", + "A_DD": "0.025", + "eta_0": "5.2E-8", + "SNR_TX": "31.0", + "R_LM": "0.92" + }, + "table_93a3": { + "package_tl_gamma0_a1_a2": "[0 1.734e-3 1.455e-4]", + "package_tl_tau": "0.006141", + "package_Z_c": "78.2" + }, + "table_92_12": { + "board_tl_gamma0_a1_a2": "[0 4.114e-4 2.547e-4]", + "board_tl_tau": "0.006191", + "board_Z_c": "109.8", + "z_bp (TX)": "151.0", + "z_bp (NEXT)": "72.0", + "z_bp (FEXT)": "72.0", + "z_bp (RX)": "151.0", + "C_0": "", + "C_1": "", + "Include PCB": "0.0" + }, + "floating_tap_control": { + "N_bg": "0", + "N_bf": "6", + "N_f": "", + "bmaxg": "0.2", + "B_float_RSS_MAX": "", + "N_tail_start": "" + }, + "icn_fom_ild_parameters": { + "f_v": "4.0", + "f_f": "4.0", + "f_n": "4.0", + "f_2": "13.59375", + "A_ft": "", + "A_nt": "" + }, + "receiver_testing": { + "RX_CALIBRATION": "0.0", + "Sigma BBN step": "0.005" + }, + "spisim_control": { + "VERSION": "3.40", + "THRUSNP": "", + "FEXTARY": "", + "NEXTARY": "", + "SPECTAG": "Customized", + "FSTTHRU": "-1", + "NUMPORT": "-1", + "GENHTML": "T" + }, + "other_parameters": { + "C_4": "", + "C2": "", + "C3": "", + "C_V": "0.0", + "FFE_BACKOFF": "4", + "F_1": "0.05", + "GDC_MIN": "", + "G_QUAL": "", + "G2_QUAL": "", + "N_B_STEP": "", + "SAMPLES_FOR_C2M": "100", + "T_O": "", + "ACCM_MAX_FREQ": "13.59375", + "ENFORCE_CAUSALITY": "", + "EC_PULSE_TOL": "0.01", + "EC_DIFF_TOL": "1e-3", + "EC_REL_TOL": "1e-2", + "Force PDF bin size": "", + "PDF_BIN_SIZE": "1e-5", + "IMPRSP_TRUNC_THRESHOLD": "1E-3", + "N_V": "", + "VEC_PASS_THRESHOLD": "", + "EH_MAX": "", + "EH_MIN": "", + "CTLE_TYPE": "CL93", + "F_HP_P": "", + "F_HP_Z": "", + "NOISE_CREST_FACTOR": "", + "PMD_TYPE": "C2C", + "MAX_BURST_LEN": "", + "ERR_PROPAGATION_COM_MARGIN": "", + "CDR": "MM", + "USE_ETA0_PSD": "", + "TDR_BUTTERWORTH": "", + "BUTTERWORTH": "TRUE", + "BESSEL_THOMSON": "FALSE", + "BT_ORDER": "4", + "SIGMA_R": "0.02", + "HISTOGRAM_WINDOW_WEIGHT": "RECTANGLE", + "OPTIMIZE_LOOP_SPEED_UP": "1", + "TDECQ": "", + "MIN_VEO_TEST": "", + "IDEAL_TX_TERM": "0.0", + "T_R_FILTER_TYPE": "FALSE", + "T_R_MEAS_POINT": "FALSE", + "IDEAL_RX_TERM": "0.0", + "INCLUDE_CTLE": "1.0", + "INCLUDE_TX_RX_FILTER": "1.0", + "INC_PACKAGE": "1.0", + "Z_T": "50", + "TDR_DURATION": "5", + "KAPPA1": "1.0", + "kappa2": "1.0", + "AUTO_TFX": "", + "GRR_LIMIT": "1", + "GRR": "1", + "GX": "", + "FFE_PRE_TAP_LEN": "", + "FFE_POST_TAP_LEN": "", + "FFE_TAP_STEP_SIZE": "0", + "FFE_MAIN_CUR_MIN": "0.7", + "FFE_PRE_TAP1_MAX": "0.7", + "FFE_POST_TAP1_MAX": "0.7", + "FFE_TAPN_MAX": "0.7", + "CURSOR_GAIN": "", + "SBR_GEN_METHOD": "DEFAULT", + "FIXTURE_BUILTIN_DELAY": "500p" + } +} \ No newline at end of file diff --git a/pyaedt/misc/spisim_com_configuration_files/com_parameters.py b/pyaedt/misc/spisim_com_configuration_files/com_parameters.py new file mode 100644 index 00000000000..5c705cc9598 --- /dev/null +++ b/pyaedt/misc/spisim_com_configuration_files/com_parameters.py @@ -0,0 +1,374 @@ +import json +from pathlib import Path + +from pyaedt import pyaedt_function_handler +from pyaedt import settings +from pyaedt.misc.spisim_com_configuration_files.com_settings_mapping import spimsim_matlab_keywords_mapping + +logger = settings.logger + + +class COMParameters: + """Base class to manage COM parameters.""" + + _CFG_DIR = Path(__file__).parent.parent / "spisim_com_configuration_files" + _STD_TABLE_MAPPING = { + "50GAUI-1-C2C": "com_120d_8.json", + "100GAUI-2-C2C": "com_120d_8.json", + "200GAUI-4": "com_120d_8.json", + "400GAUI-8": "com_120d_8.json", + "100GBASE-KR4": "com_93_8.json", + "100GBASE-KP4": "com_94_17.json", + } + + def __init__(self, standard): + self.table_93a1 = {} + self.filter_and_eq = {} + self.io_control = {} + self.operational = {} + self.tdr_and_erl_options = {} + self.noise_jitter = {} + self.table_93a3 = {} + self.table_92_12 = {} + self.floating_tap_control = {} + self.icn_fom_ild_parameters = {} + self.receiver_testing = {} + self.spisim_control = {} + self.other_parameters = {} + + self._init() + self.standard = standard + + @pyaedt_function_handler + def _init(self): + pass # pragma: no cover + + @property + def parameters(self): + """All parameters.""" + temp = { + **self.spisim_control, + **self.table_93a1, + **self.filter_and_eq, + **self.io_control, + **self.operational, + **self.tdr_and_erl_options, + **self.noise_jitter, + **self.table_93a3, + **self.table_92_12, + **self.floating_tap_control, + **self.icn_fom_ild_parameters, + **self.receiver_testing, + **self.other_parameters, + } + return temp + + @property + def standard(self): + """Standard name. + + Returns + ------- + str + """ + return self._standard # pragma: no cover + + @standard.setter + def standard(self, value): + std_table = self._STD_TABLE_MAPPING[value] + cfg_path = self._CFG_DIR / std_table + self.load(cfg_path) + self._standard = value + + @pyaedt_function_handler + def set_parameter(self, keyword, value): + """Set a COM parameter. + + Parameters + ---------- + keyword : str, + Keyword of the COM parameter. + value : str, + Value of the COM parameter. + """ + if keyword in self.table_93a1: + self.table_93a1[keyword] = value + elif keyword in self.filter_and_eq: + self.filter_and_eq[keyword] = value + elif keyword in self.io_control: + self.io_control[keyword] = value + elif keyword in self.operational: + self.operational[keyword] = value + elif keyword in self.tdr_and_erl_options: + self.tdr_and_erl_options[keyword] = value + elif keyword in self.noise_jitter: + self.noise_jitter[keyword] = value + elif keyword in self.table_93a3: + self.table_93a3[keyword] = value + elif keyword in self.table_92_12: + self.table_92_12[keyword] = value + elif keyword in self.floating_tap_control: + self.floating_tap_control[keyword] = value + elif keyword in self.icn_fom_ild_parameters: + self.icn_fom_ild_parameters[keyword] = value + elif keyword in self.receiver_testing: + self.receiver_testing[keyword] = value + elif keyword in self.spisim_control: + self.spisim_control[keyword] = value + else: + self.other_parameters[keyword] = value + + @pyaedt_function_handler + def export(self, file_path): + """Export COM parameter to a JSON file. + + Parameters + ---------- + file_path : str + Path of file. + """ + temp = dict() + temp["table_93a1"] = self.table_93a1 + temp["filter_and_eq"] = self.filter_and_eq + temp["io_control"] = self.io_control + temp["operational"] = self.operational + temp["tdr_and_erl_options"] = self.tdr_and_erl_options + temp["noise_jitter"] = self.noise_jitter + temp["table_93a3"] = self.table_93a3 + temp["table_92_12"] = self.table_92_12 + temp["floating_tap_control"] = self.floating_tap_control + temp["icn_fom_ild_parameters"] = self.icn_fom_ild_parameters + temp["receiver_testing"] = self.receiver_testing + temp["spisim_control"] = self.spisim_control + temp["other_parameters"] = self.other_parameters + + with open(file_path, "w", encoding="utf-8") as f: + f.write(json.dumps(temp, indent=4, ensure_ascii=False)) + + @pyaedt_function_handler + def load(self, file_path): + """Load COM parameters from a JSON file. + + Parameters + ---------- + file_path : str, + Path of file. + """ + with open(file_path) as f: # pragma: no cover + temp = json.load(f) + + for k, v in temp.items(): # pragma: no cover + for k2, v2 in v.items(): + self.__getattribute__(k)[k2] = v2 + + @pyaedt_function_handler + def export_spisim_cfg(self, file_path): + """Export COM parameter to a SPISim cfg file. + + Parameters + ---------- + file_path : str + Path of file. + """ + with open(file_path, "w") as fp: + fp.write("################################################################################\n") + fp.write("# MODULE: COM\n") + fp.write("# GENERATED ON\n") + fp.write("################################################################################\n") + for kw, v in self.parameters.items(): + if kw in spimsim_matlab_keywords_mapping: + kw = spimsim_matlab_keywords_mapping[kw] + fp.write("# {0}: {0}\n".format(kw.upper())) + fp.write("{} = {}\n".format(kw.upper(), v)) + return True + + @pyaedt_function_handler + def load_spisim_cfg(self, file_path): + """Load a SPIsim configuration file. + + Parameters + ---------- + file_path: str + Path of the configuration file. + + Returns + ------- + bool + ``True`` when successful, ``False`` when failed. + """ + reverse_map = {j: i for i, j in spimsim_matlab_keywords_mapping.items()} + + with open(file_path, "r") as fp: + lines = fp.readlines() + for line in lines: + if not line.startswith("#") and "=" in line: + split_line = [i.strip() for i in line.split("=")] + kw, value = split_line + if kw in reverse_map: # Get Matlab keyword + kw = reverse_map[kw] + self.set_parameter(kw, value) + return True + + +class COMParametersVer3p4(COMParameters): + """Manages COM parameters of version 3.4.""" + + def __init__(self, standard="50GAUI-1-C2C"): + super().__init__(standard) + + @pyaedt_function_handler + def _init(self): + """Initialize COM parameters.""" + self.table_93a1.update( + { + "f_b": "", + "f_min": "", + "Delta_f": "", + "C_d": "", + "L_s": "", + "C_b": "", + "z_p select": "", + "z_p (TX)": "", + "z_p (NEXT)": "", + "z_p (FEXT)": "", + "z_p (RX)": "", + "C_p": "", + "R_0": "", + "R_d": "", + "A_v": "", + "A_fe": "", + "A_ne": "", + "AC_CM_RMS": "", + "L": "", + "M": "", + } + ) + self.filter_and_eq.update( + { + "f_r": "", + "c(0)": "", + "c(-1)": "", + "c(-2)": "", + "c(-3)": "", + "c(1)": "", + "N_b": "", + "b_max(1)": "", + "b_max(2..N_b)": "", + "b_min(1)": "", + "b_min(2..N_b)": "", + "g_DC": "", + "f_z": "", + "f_p1": "", + "f_p2": "", + "g_DC_HP": "", + "f_HP_PZ": "", + } + ) + self.io_control.update( + { + # "DIAGNOSTICS": "", + # "DISPLAY_WINDOW": "", + # "CSV_REPORT": "", + "RESULT_DIR": "", + # "SAVE_FIGURES": "", + "Port Order": "", + "RUNTAG": "", + # "COM_CONTRIBUTION": "", + } + ) + self.operational.update( + { + "COM Pass threshold": "", + "ERL Pass threshold": "", + "DER_0": "", + "T_r": "", + "FORCE_TR": "", + "Local Search": "", + # "BREAD_CRUMBS": "", + # "SAVE_CONFIG2MAT": "", + # "PLOT_CM": "", + } + ) + self.tdr_and_erl_options.update( + { + "TDR": "", + "ERL": "", + "ERL_ONLY": "", + "TR_TDR": "", + "N": "", + "beta_x": "", + "rho_x": "", + "fixture delay time": "", + "TDR_W_TXPKG": "", + "N_bx": "", + "Tukey_Window": "", + } + ) + self.noise_jitter.update( + { + "sigma_RJ": "", + "A_DD": "", + "eta_0": "", + "SNR_TX": "", + "R_LM": "", + } + ) + self.table_93a3.update( + { + "package_tl_gamma0_a1_a2": "", + "package_tl_tau": "", + "package_Z_c": "", + } + ) + self.table_92_12.update( + { + "board_tl_gamma0_a1_a2": "", + "board_tl_tau": "", + "board_Z_c": "", + "z_bp (TX)": "", + "z_bp (NEXT)": "", + "z_bp (FEXT)": "", + "z_bp (RX)": "", + "C_0": "", + "C_1": "", + "Include PCB": "", + } + ) + self.floating_tap_control.update( + { + "N_bg": "", + "N_bf": "", + "N_f": "", + "bmaxg": "", + "B_float_RSS_MAX": "", + "N_tail_start": "", + } + ) + self.icn_fom_ild_parameters.update( + { + "f_v": "", + "f_f": "", + "f_n": "", + "f_2": "", + "A_ft": "", + "A_nt": "", + } + ) + self.receiver_testing.update( + { + "RX_CALIBRATION": "", + "Sigma BBN step": "", + } + ) + self.spisim_control.update( + { + "VERSION": "", + "THRUSNP": "", + "FEXTARY": "", + "NEXTARY": "", + "SPECTAG": "", + "FSTTHRU": "", + "NUMPORT": "", + "GENHTML": "", + } + ) diff --git a/pyaedt/misc/spisim_com_configuration_files/com_settings_mapping.py b/pyaedt/misc/spisim_com_configuration_files/com_settings_mapping.py new file mode 100644 index 00000000000..942f327492c --- /dev/null +++ b/pyaedt/misc/spisim_com_configuration_files/com_settings_mapping.py @@ -0,0 +1,233 @@ +spimsim_matlab_keywords_mapping = { + # Matlab keyword -> SPIsim keyword + # OP_IO_CTRL + "BESSEL_THOMSON": "BESSEL_THOMSON", # enable Bessel Thomsen filter for COM + "Butterworth": "BUTTERWORTH", # enable Butterworth filter for TDR, PTDR, and ERL + "CDR": "CDR", # CDR method, default is 'MM' (Mueller-Muller) + "COM Pass threshold": "COM_PASS_THRESHOLD", # the pass fail threshold for COM in dB + "Enforce Causality DIFF_TOL": "EC_DIFF_TOL", + # Difference Tolerance parameter for causality, Hard enforcement, 1e-4,Soft enforcement, 1e-3 + "Enforce Causality pulse start tolerance": "EC_PULSE_TOL", + # Tolerance parameter for causality, Hard enforcement, 0.05, Soft enforcement, .01 + "Enforce Causality REL_TOL": "EC_REL_TOL", + # Difference Tolerance parameter for causality, Hard enforcement, 1e-4,Soft enforcement, 1e-3 + "EH_MAX": "EH_MAX", # used when PMD_type is C2M and is not really computed per spec + "EH_MIN": "EH_MIN", # used when PMD_type is C2M + "ENFORCE_CAUSALITY": "ENFORCE_CAUSALITY", # default is 0. Not recommended to use + "ERL Pass threshold": "ERL_PASS_THRESHOLD", # the pass fail threshold for ERL in dB + "Error propagation COM margin": "ERR_PROPAGATION_COM_MARGIN", + # Use to calculate error propagation (not normally used) + "Force PDF bin size": "FORCE_PDF_BIN_SIZE", # do not use + "Histogram_Window_Weight": "HISTOGRAM_WINDOW_WEIGHT", + # Weighting for VEC and VEO are histogram processing. Type are Gaussian,Dual Rayleigh,Triangle, and Rectangle + # (default) + "Impulse response truncation threshold": "IMPRSP_TRUNC_THRESHOLD", # Impulse response truncation threshold + "Include PCB": "INCLUDE_PCB", # Used to add a PCB one each side of the passed s-parameters. + "nburst": "MAX_BURST_LEN", # Use to calculate burst error rate (not normally used) + "Min_VEO_Test": "MIN_VEO_TEST", + # used when PMD_type is C2M. This allow EH to go blow EH_min. If set to Zero it is ignored + "N_v": "N_V", # number of UI used to compute Vf + "Optimize_loop_speed_up": "OPTIMIZE_LOOP_SPEED_UP", + # If set to 0 (or default) normal looping, If set to 1 loop speedup by slightly reducing PD Fbin and FIR_threshold + # for optimize looping only + "Port Order": "PORT_ORDER", # s parameter port order [ tx+ tx- rx+ rx-] + "RUNTAG": "RUNTAG", # This string is appended to the beginning of results files + "SBR_GEN_METHOD": "SBR_GEN_METHOD", # Pulse generation method: use IBIS's slew rate or filtered rectangular pause + "sigma_r": "SIGMA_R", # sigma_r for 0.3ck Gaussian histogram window. Unit are UI. Preferred usage. + "TDECQ": "TDECQ", # Experimental, for only option is none (0) or vma. Default is 0. + "TDR_W_TXPKG": "TDR_W_TXPKG", # adds tx package for TDR, PTDR, and ERL. Default is 0. + "USE_ETA0_PSD": "USE_ETA0_PSD", # Used eta_0 PSD equation for sigma_n. Default is 0. Do not use. + "VEC Pass threshold": "VEC_PASS_THRESHOLD", # the pass fail threshold for VEC in dB only used when PMD_type is C2M + # TABLE_93A1 + "A_fe": "A_FE", # FEXT aggressor differential peak source output voltage (half of peak to peak) + "A_ne": "A_NE", # NEXT aggressor differential peak source output voltage (half of peak to peak) + "A_v": "A_V", # Victim differential peak source output voltage (half of peak to peak) + "AC_CM_RMS": "AC_CM_RMS", + # AC_CM_RMS is the CM BBN AWGN RMS at COM source point. Default is 0. Adds common mode noise source to the COM + # signal path for the through channel + "ACCM_MAX_Freq": "ACCM_MAX_FREQ", # F max for integrating ACCM voltage in Hz. Default is fb + "C_b": "C_B", # C_b in nF (single sided) + "C_d": "C_D", # C_d in nF (single sided) + "C_p": "C_P", # C_p in nF (single sided) + "C_V": "C_V", # C_v in nF (via cap) (single sided) + "Delta_f": "DELTA_F", # frequency step + "DER_0": "DER_0", # Target detector error ratio + "f_b": "F_B", # Baud (Signaling) rate in Gbaud + "f_min": "F_MIN", # minimum required frequency start for s parameters + "f_v": "F_V", # For FOM_ILD: Transition rate cutoff frequency for ICN/ILD calc in terms of fb + "L": "L", # number of symbols levels (PAM-4 is 4, NRZ is 2) + "L_s": "L_S", # L_s in nH (single sided) + "Local Search": "LOCAL_SEARCH", # Decreases COM compute time. Setting to 2 seems ok ,if 0 search is full grid + "M": "M", # Samples per UI + "N_B_STEP": "N_B_STEP", # Discretization of DFE, 0 disables and is not normally used + "N_bx": "N_BX", # Used for ERL to Compensate for a number of Ui associated with the DFE + "R_0": "R_0", # reference impedance + "R_d": "R_D", # Die source termination resistance (single sided) + "samples_for_C2M": "SAMPLES_FOR_C2M", # Finer sampling in terms of samples per UI for c2m histogram analysis. + "T_h": "T_O", # superseded with T_O but is the internal values that is used. Do not use. + "z_p (FEXT)": "Z_PFEXT", # List of FEXT transmitter package trace lengths in mm, one per case + "z_p (NEXT)": "Z_PNEXT", # List of NEXT transmitter package trace lengths in mm, one per case + "z_p (RX)": "Z_PRX", # List of FEXT receiver package trace lengths in mm, one per case + "z_p select": "Z_PSELECT", # List of package length indexes used to run COM + "z_p (TX)": "Z_PTX", # List of victim transmitter package trace lengths in mm, one per case + # TABLE_93A3 + "package_tl_gamma0_a1_a2": "PACKAGE_TL_GAMMA0_A1_A2", + # Fitting parameters for package model per unit length. First element is in 1/mm and affects DC loss of package + # model . Second element is in ns1/2/mm and affects loss proportional to sqrt(f). Third element is in ns/mm and + # affects loss proportional to f. + "package_tl_tau": "PACKAGE_TL_TAU", # Package model transmission line delay ns/mm + "package_Z_c": "PACKAGE_Z_C", # Package model transmission line characteristic impedance [ Tx , Rx ] + # TABLE_9212 + "board_tl_gamma0_a1_a2": "BOARD_TL_GAMMA0_A1_A2", + # Fitting parameters for package model per unit length. First element is in 1/mm and affects DC loss of package + # model . Second element is in ns1/2/mm and affects loss proportional to sqrt(f). Third element is in ns/mm and + # affects loss proportional to f. + "board_tl_tau": "BOARD_TL_TAU", # Board model transmission line delay ns/mm + "board_Z_c": "BOARD_Z_C", # Board model transmission line characteristic impedance [ Tx , Rx ] + "C_0": "CB0", # If Include PCB is set to 1, near device single ended capacitance C0 in nF is added + "C_1": "CB1", # if Include PCB is set to 1, connector side single ended capacitance C1 in nF is added + "z_bp (FEXT)": "Z_BPFEXT", # Fext Assessor transmitter board trace lengths in mm + "z_bp (NEXT)": "Z_BPNEXT", # Next Assessor transmitter board trace lengths in mm + "z_bp (RX)": "Z_BPRX", # Victim receiver board trace lengths in mm + "z_bp (TX)": "Z_BPTX", # Victim transmitter board trace lengths in mm + # TDR_ERL + "AUTO_TFX": "AUTO_TFX", # Mostly used for device ERL. If sent to 1 the fixture tfx will be estimated. + "beta_x": "BETA_X", # (for ERL) use default 0 + "BTorder": "BT_ORDER", # Bessel function order + "ERL": "ERL", # Enables ERL. Needs TDR to be set as well. + "ERL_ONLY": "ERL_ONLY", # Compute ERL only + "FIXTURE_BUILTIN_DELAY": "FIXTURE_BUILTIN_DELAY", # built-in fixture delay + "fixture delay time": "FIXTURE_DELAY_TIME", # fixture delay time (for ERL) + "N": "N", # duration time in UI which is used for ERL (PTDR) + "rho_x": "RHO_X", # (for ERL) use default 0.618 + "TDR_DURATION": "TDR_DURATION", + # only used if N*UI is longer than the TDR duration time. Default is 5 times the raw s-parameter transit time. + "TR_TDR": "TR_TDR", # Gaussian shaped transition time for TDR source in ns + "Turkey_Window": "TUKEY_WINDOW", # required for ERL. Set to 1. Default is 0. + "Z_T": "Z_T", # single sided source termination reference resistance for TDR and ERL + # ICN_PARAM + "A_ft": "A_FT", # FEXT aggressor amplitude for ICN. Defaults to A_fe if not specified + "A_nt": "A_NT", # NEXT aggressor amplitude for ICN. Defaults to A_ne if not specified + "f_1": "F_1", # start frequency for ICN and ILD calculations in GHz + "f_2": "F_2", # frequency in GHz for integration computation of ICN or FOM_Ild in GHz + "f_f": "F_F", # For ICN: Fext transition rate cut off frequency for ICN calc in terms of fb + "f_n": "F_N", # For ICN: Next transition rate cut off frequency for ICN calc in terms of fb + # FILTER_EQ + "b_max(1)": "B_MAX1", # DFE magnitude limit, first coefficient(ignored if Nb=0) + "b_max(2..N_b)": "B_MAX2_N_B", + # DFE magnitude limit, second coefficient and on (ignored if Nb<2). Can be a regular expression + "b_min(1)": "B_MIN1", # DFE negative magnitude limit. If not specified it defaults to -bmax. + "b_min(2..N_b)": "B_MIN2_N_B", # DFE negative magnitude limit, if not specified it defaults to -b_max(2..N_b) + "c(-1)": "C_1", # TX equalizer pre cursor tap -1 + "c(-2)": "C_2", # TX equalizer pre cursor tap -2 + "c(-3)": "C_3", # TX equalizer pre cursor tap -3 + "c(-4)": "C_4", # TX equalizer pre cursor tap -4 + "c(0)": "C0", + # TX equalizer cursor minimum value (actual value is calculated as 1-sum(abs(tap)), Grid seat ignored for when C(0) + # is below this value + "c(1)": "C1", # TX equalizer post cursor tap 1 + "c(2)": "C2", # TX equalizer post cursor tap 2 + "c(3)": "C3", # TX equalizer post cursor tap 3 + "CTLE_TYPE": "CTLE_TYPE", # Sets the CTLE type default is poles and zeros (i.e. not a list of poles as in 120e) + "F_HP_P": "F_HP_P", # CFT pole fp2 is in GHz. Normally a list for 120e. Not normally use elsewhere. + "f_HP_PZ": "F_HP_PZ", # CFT pole zero pair in GHz for low frequency CTF + "F_HP_Z": "F_HP_Z", # CFT pole zero pair in GHz for low frequency CTF + "f_p1": "F_P1", # CTLE pole 1 in GHz + "f_p2": "F_P2", # CTLE pole 2 in GHz + "f_r": "F_R", # Receiver filter in COM and in ICN/FOM_ILD calcs in terms of fb + "f_z": "F_Z", # CTLE zero in GHz + "g_DC": "G_DC", # AC-DC gain list + "g_DC_HP": "G_DC_HP", # CTF AC-DC gain list (GDC2) + "G_Qual": "G_QUAL", # G_Qual are the dB ranges of g_DC g DC )which correspond tog_DC_HP (g DC2) + "G2_Qual": "G2_QUAL", + # G2_Qual limit values of g_DC_HP (g DC2 ) which corresponds to ranges of g_DC g DC specified with G_QUAL + "GDC_MIN": "GDC_MIN", # max ACDC gain, if 0 ignore + "N_b": "N_B", # Decision feedback fixed equalizer (DFE) length + # NOISE_JITTER + "A_DD": "A_DD", # Normalized peak dual-Dirac noise, this is half of the total bound uncorrelated jitter (BUJ) in UI + "eta_0": "ETA_0", + # One-sided noise spectral density (V^2/GHz).Input referred noise at TP5. Input referred noise at TP5 + "R_LM": "R_LM", # Ratio of level separation mismatch. Relevant when not PAM-2 (NRZ). + "sigma_RJ": "SIGMA_RJ", # rms of random jitter + "SNR_TX": "SNR_TX", # Transmitter SNDR noise in dB + # RCV_NONSTD + "FORCE_TR": "FORCE_TR", # Included for earlier version support but should be set to 1 in most later config sheets. + "Grr": "GRR", # either do no use or set to 1 (for ERL) + "GRR_LIMIT": "GRR_LIMIT", # either do no use or set to 1 (for ERL) + "Gx": "GX", # ERL parameter param.Grr, This is used is the COM code + "IDEAL_RX_TERM": "IDEAL_RX_TERM", + # IDEAL_RX_TERM not supported, instead, set Zp,Cd, and Cp parameters to zero and Zp select to 1 + "IDEAL_TX_TERM": "IDEAL_TX_TERM", # not supported, instead, set Zp,Cd, and Cp parameters to zero and Zp select to 1 + "INC_PACKAGE": "INC_PACKAGE", + # warning: INC_PACKAGE=0 not fully supported, instead, set Zp,Cd, and Cp parameters to zero and Zp select to 1 + "INCLUDE_CTLE": "INCLUDE_CTLE", # do not use + "INCLUDE_TX_RX_FILTER": "INCLUDE_TX_RX_FILTER", # do not use + "kappa1": "KAPPA1", # if set 0 reflection at tp0 are omitted from COM + "kappa2": "KAPPA2", # if set 0 reflection at tp5 are omitted from COM + "RX_CALIBRATION": "RX_CALIBRATION", # Turn on RX_Calibration loop + "Sigma BBN step": "SIGMA_BBN_STEP", # BBN step for Rx Calibration in volts. Defaults is 0.5e-3 + "T_r": "T_R", # 20% to 80% transition time used for the Gaussian shaped source + "T_R_FILTER_TYPE": "T_R_FILTER_TYPE", # included for earlier version support. Not recommended to use. + "T_R_MEAS_POINT": "T_R_MEAS_POINT", # included for earlier version support. Not recommended to use. + # FLOATING_TAPS + "B_float_RSS_MAX": "B_FLOAT_RSS_MAX", # floating DFE tap start for RSS floating tap limit + "bmaxg": "BMAXG", # max DFE value for floating taps + "N_bf": "N_BF", # number of taps in group + "N_bg": "N_BG", # number of group of floating tap. Used as a switch, 0 means no float + "N_f": "N_F", # UI span for floating taps. replaced by N_bmax + "N_tail_start": "N_TAIL_START", # start range for max RSS limit for DFE taps + # RX_FFE + "CURSOR_GAIN": "CURSOR_GAIN", # only FFE and not supported + "FFE_BACKOFF": "FFE_BACKOFF", + # see if better zero forced solution is better by backing off the number specified FFE taps one at at time + "FFE_MAIN_CUR_MIN": "FFE_MAIN_CUR_MIN", # Rx FFE main cursor minimum + "ffe_post_tap_len": "FFE_POST_TAP_LEN", # Rx FFE post cursor tap length + "FFE_POST_TAP1_MAX": "FFE_POST_TAP1_MAX", # Rx FFE post cursor tap1 limit + "ffe_pre_tap_len": "FFE_PRE_TAP_LEN", # Rx FFE pre-cursor tap length + "FFE_PRE_TAP1_MAX": "FFE_PRE_TAP1_MAX", # Rx FFE pre cursor tap1 limit + "FFE_TAP_STEP_SIZE": "FFE_TAP_STEP_SIZE", # Rx FFE tap step size + "FFE_TAPN_MAX": "FFE_TAPN_MAX", # Rx FFE precursor tap N limit + "Tukey_Window": "TUKEY_WINDOW", + "TDR": "TDR", + "NEXTARY": "NEXTARY", + "HISTOGRAM_WINDOW_WEIGHT": "HISTOGRAM_WINDOW_WEIGHT", + "OPTIMIZE_LOOP_SPEED_UP": "OPTIMIZE_LOOP_SPEED_UP", + "EC_PULSE_TOL": "EC_PULSE_TOL", + "VEC_PASS_THRESHOLD": "VEC_PASS_THRESHOLD", + "KAPPA1": "KAPPA1", + "VERSION": "VERSION", + "PDF_BIN_SIZE": "PDF_BIN_SIZE", + "RESULT_DIR": "RESULT_DIR", + "G_QUAL": "G_QUAL", + "T_O": "T_O", + "TDR_BUTTERWORTH": "TDR_BUTTERWORTH", + "GRR": "GRR", + "SPECTAG": "SPECTAG", + "FFE_PRE_TAP_LEN": "FFE_PRE_TAP_LEN", + "FEXTARY": "FEXTARY", + "GENHTML": "GENHTML", + "SIGMA_R": "SIGMA_R", + "FFE_POST_TAP_LEN": "FFE_POST_TAP_LEN", + "MIN_VEO_TEST": "MIN_VEO_TEST", + "ACCM_MAX_FREQ": "ACCM_MAX_FREQ", + "N_V": "N_V", + "BUTTERWORTH": "BUTTERWORTH", + "IMPRSP_TRUNC_THRESHOLD": "IMPRSP_TRUNC_THRESHOLD", + "EC_DIFF_TOL": "EC_DIFF_TOL", + "C_4": "C_4", + "ERR_PROPAGATION_COM_MARGIN": "ERR_PROPAGATION_COM_MARGIN", + "EC_REL_TOL": "EC_REL_TOL", + "MAX_BURST_LEN": "MAX_BURST_LEN", + "FSTTHRU": "FSTTHRU", + "BT_ORDER": "BT_ORDER", + "PMD_TYPE": "PMD_TYPE", + "C3": "C3", + "C2": "C2", + "F_1": "F_1", + "G2_QUAL": "G2_QUAL", + "NUMPORT": "NUMPORT", + "NOISE_CREST_FACTOR": "NOISE_CREST_FACTOR", + "THRUSNP": "THRUSNP", + "SAMPLES_FOR_C2M": "SAMPLES_FOR_C2M", + "GX": "GX", +} From 0cc6511eb6746d134c7a207c9ca98dc8ab3d82c6 Mon Sep 17 00:00:00 2001 From: Sebastien Morais Date: Wed, 20 Mar 2024 15:15:19 +0100 Subject: [PATCH 27/36] WIP --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 0d16acb9062..a26fdc59f8e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ dependencies = [ "jsonschema", "psutil", "pyedb>=0.4.0,<0.5; python_version == '3.7'", - "pyedb>=0.5.0,<0.6; python_version > '3.7'", + "pyedb>=0.5.0,<0.6 || ==0.6.dev0; python_version > '3.7'", "pytomlpp; python_version < '3.12'", "rpyc>=6.0.0,<6.1", ] From 02a5c6c272367b3dfec06513a710205be6e249c4 Mon Sep 17 00:00:00 2001 From: Sebastien Morais Date: Wed, 20 Mar 2024 15:38:40 +0100 Subject: [PATCH 28/36] WIP --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index a26fdc59f8e..76d034ea4b7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ dependencies = [ "jsonschema", "psutil", "pyedb>=0.4.0,<0.5; python_version == '3.7'", - "pyedb>=0.5.0,<0.6 || ==0.6.dev0; python_version > '3.7'", + "pyedb>=0.5.0,<0.6||==0.6.dev0; python_version > '3.7'", "pytomlpp; python_version < '3.12'", "rpyc>=6.0.0,<6.1", ] From 0a6442000647ded721b0141c6cc2c014af5db8de Mon Sep 17 00:00:00 2001 From: Irene Woyna <98172186+IreneWoyna@users.noreply.github.com> Date: Thu, 21 Mar 2024 09:01:44 +0100 Subject: [PATCH 29/36] fixed region definition for RZ designs (#4360) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: Maxime Rey <87315832+MaxJPRey@users.noreply.github.com> Co-authored-by: Sébastien Morais <146729917+SMoraisAnsys@users.noreply.github.com> Co-authored-by: Samuel Lopez <85613111+Samuelopez-ansys@users.noreply.github.com> Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> Co-authored-by: Giulia Malinverno Co-authored-by: gmalinve <103059376+gmalinve@users.noreply.github.com> --- _unittest/test_09_Primitives2D.py | 29 ++++++++++++++++++++++++++++- pyaedt/modeler/cad/Primitives2D.py | 15 +++++++++++---- 2 files changed, 39 insertions(+), 5 deletions(-) diff --git a/_unittest/test_09_Primitives2D.py b/_unittest/test_09_Primitives2D.py index 4e45415d93b..166d379f34b 100644 --- a/_unittest/test_09_Primitives2D.py +++ b/_unittest/test_09_Primitives2D.py @@ -11,10 +11,17 @@ def aedtapp(add_app): return app +@pytest.fixture(scope="class") +def axisymmetrical(add_app): + app = add_app(design_name="2D_Primitives_3", solution_type="TransientZ", application=Maxwell2d) + return app + + class TestClass: @pytest.fixture(autouse=True) - def init(self, aedtapp, local_scratch): + def init(self, aedtapp, axisymmetrical, local_scratch): self.aedtapp = aedtapp + self.axisymmetrical = axisymmetrical self.local_scratch = local_scratch def create_rectangle(self, name=None): @@ -74,6 +81,26 @@ def test_06_create_region(self): region = self.aedtapp.modeler.create_region([100, 100, 100, 100, 100, 100]) assert not region + def test_06_a_create_region_Z(self): + if self.axisymmetrical.modeler["Region"]: + self.axisymmetrical.modeler.delete("Region") + assert "Region" not in self.axisymmetrical.modeler.object_names + assert not self.axisymmetrical.modeler.create_region(["100%", "50%", "20%"]) + assert self.axisymmetrical.modeler.create_region([100, 50, 20]) + self.axisymmetrical.modeler["Region"].delete() + assert self.axisymmetrical.modeler.create_region(100) + self.axisymmetrical.modeler["Region"].delete() + assert self.axisymmetrical.modeler.create_region("200") + self.axisymmetrical.modeler["Region"].delete() + assert self.axisymmetrical.modeler.create_region([100, "50mm", 20], False) + self.axisymmetrical.modeler["Region"].delete() + assert self.axisymmetrical.modeler.create_region([100, "50mm", "100"], False) + self.axisymmetrical.modeler["Region"].delete() + assert self.axisymmetrical.modeler.create_region(["50mm", "50mm", "50mm"], False) + self.axisymmetrical.modeler["Region"].delete() + assert self.axisymmetrical.modeler.create_region("10mm", False) + self.axisymmetrical.modeler["Region"].delete() + def test_07_assign_material_ceramic(self, material="Ceramic_material"): self.aedtapp.assign_material(["Rectangle1"], material) assert self.aedtapp.modeler["Rectangle1"].material_name == material diff --git a/pyaedt/modeler/cad/Primitives2D.py b/pyaedt/modeler/cad/Primitives2D.py index d97dfc7454e..6ff9c63bd84 100644 --- a/pyaedt/modeler/cad/Primitives2D.py +++ b/pyaedt/modeler/cad/Primitives2D.py @@ -288,9 +288,10 @@ def create_region(self, pad_percent=300, is_percentage=True): ---------- pad_percent : float, str, list of floats or list of str, optional Same padding is applied if not a list. The default is ``300``. - If a list of floats or str, interpret as adding for ``["+X", "+Y", "-X", "-Y"]``. + If a list of floats or strings, interpret as adding ``["+X", "+Y", "-X", "-Y"]`` for XY geometry mode, + and ``["+R", "+Z", "-Z"]`` for RZ geometry mode. is_percentage : bool, optional - Region definition in percentage or absolute value. The default is `True``. + Whether the region definition is a percentage or absolute value. The default is `True``. Returns ------- @@ -304,9 +305,15 @@ def create_region(self, pad_percent=300, is_percentage=True): """ if not isinstance(pad_percent, list): if self._app.design_type == "2D Extractor" or self._app.design_type == "Maxwell 2D": - pad_percent = [pad_percent, pad_percent, 0, pad_percent, pad_percent, 0] + if hasattr(self._app.SOLUTIONS, self._app.solution_type): + pad_percent = [pad_percent, pad_percent, 0, pad_percent, pad_percent, 0] + else: + pad_percent = [pad_percent, 0, pad_percent, 0, 0, pad_percent] else: if self._app.design_type == "2D Extractor" or self._app.design_type == "Maxwell 2D": - pad_percent = [pad_percent[0], pad_percent[1], 0, pad_percent[2], pad_percent[3], 0] + if hasattr(self._app.SOLUTIONS, self._app.solution_type): + pad_percent = [pad_percent[0], pad_percent[1], 0, pad_percent[2], pad_percent[3], 0] + else: + pad_percent = [pad_percent[0], 0, pad_percent[1], 0, 0, pad_percent[2]] return self._create_region(pad_percent, is_percentage) From 66af817f9f32b61a01a6b5ca3392d8f72be82c6c Mon Sep 17 00:00:00 2001 From: gmalinve <103059376+gmalinve@users.noreply.github.com> Date: Thu, 21 Mar 2024 11:16:28 +0100 Subject: [PATCH 30/36] improve create_component (#4389) --- pyaedt/modeler/circuits/PrimitivesCircuit.py | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/pyaedt/modeler/circuits/PrimitivesCircuit.py b/pyaedt/modeler/circuits/PrimitivesCircuit.py index c7ee5434b8d..6548ebeae57 100644 --- a/pyaedt/modeler/circuits/PrimitivesCircuit.py +++ b/pyaedt/modeler/circuits/PrimitivesCircuit.py @@ -688,10 +688,10 @@ def create_component( inst_name=None, component_library="Resistors", component_name="RES_", - location=[], + location=None, angle=0, use_instance_id_netlist=False, - global_netlist_list=[], + global_netlist_list=None, ): """Create a component from a library. @@ -705,13 +705,14 @@ def create_component( Name of component in the library. The default is ``"RES"``. location : list of float, optional Position on the X axis and Y axis. + The default is ``None``, in which case the component is placed in [0, 0]. angle : optional Angle rotation in degrees. The default is ``0``. use_instance_id_netlist : bool, optional Whether to enable the instance ID in the net list. The default is ``False``. global_netlist_list : list, optional - The default is``[]``. + The default is ``None``, in which case an empty list is passed. Returns ------- @@ -722,6 +723,15 @@ def create_component( ---------- >>> oEditor.CreateComponent + + Examples + -------- + + >>> from pyaedt import TwinBuilder + >>> aedtapp = TwinBuilder() + >>> cmp = aedtapp.modeler.schematic.create_component(component_library="", component_name="ExcitationComponent") + >>> cmp.set_property("ShowPin", True) + >>> aedtapp.release_desktop(True, True) """ id = self.create_unique_id() if component_library: From ddb5cf07fc60da3130361c13f35ec3ccd564d150 Mon Sep 17 00:00:00 2001 From: Samuel Lopez <85613111+Samuelopez-ansys@users.noreply.github.com> Date: Thu, 21 Mar 2024 12:38:16 +0100 Subject: [PATCH 31/36] Add export option to export_file and export_on_grid (#4390) --- _unittest_solvers/test_00_analyze.py | 14 +++++ pyaedt/modules/PostProcessor.py | 84 +++++++++++++++++++++++++--- 2 files changed, 91 insertions(+), 7 deletions(-) diff --git a/_unittest_solvers/test_00_analyze.py b/_unittest_solvers/test_00_analyze.py index f507030ee2b..f94ec347b83 100644 --- a/_unittest_solvers/test_00_analyze.py +++ b/_unittest_solvers/test_00_analyze.py @@ -331,6 +331,20 @@ def test_03e_icepak_ExportFLDFil(self): sample_points_lists=[[0, 0, 0], [3, 6, 8], [4, 7, 9]], ) assert os.path.exists(fld_file_2) + cs = self.icepak_app.modeler.create_coordinate_system() + fld_file_3 = os.path.join(self.local_scratch.path, "test_fld_3.fld") + self.icepak_app.post.export_field_file( + quantity_name="Temp", + solution=self.icepak_app.nominal_sweep, + variation_dict=self.icepak_app.available_variations.nominal_w_values_dict, + filename=fld_file_3, + obj_list="box", + sample_points_lists=[[0, 0, 0], [3, 6, 8], [4, 7, 9]], + reference_coordinate_system=cs.name, + export_in_si_system=False, + export_field_in_reference=False, + ) + assert os.path.exists(fld_file_3) def test_04a_3dl_generate_mesh(self): assert self.hfss3dl_solve.mesh.generate_mesh("Setup1") diff --git a/pyaedt/modules/PostProcessor.py b/pyaedt/modules/PostProcessor.py index 713942b49b4..45cb207971c 100644 --- a/pyaedt/modules/PostProcessor.py +++ b/pyaedt/modules/PostProcessor.py @@ -2498,13 +2498,17 @@ def export_field_file_on_grid( variation_dict=None, filename=None, gridtype="Cartesian", - grid_center=[0, 0, 0], - grid_start=[0, 0, 0], - grid_stop=[0, 0, 0], - grid_step=[0, 0, 0], + grid_center=None, + grid_start=None, + grid_stop=None, + grid_step=None, isvector=False, intrinsics=None, phase=None, + export_with_sample_points=True, + reference_coordinate_system="Global", + export_in_si_system=True, + export_field_in_reference=True, ): """Use the field calculator to create a field file on a grid based on a solution and variation. @@ -2542,6 +2546,18 @@ def export_field_file_on_grid( calculation. The default is ``None``. phase : str, optional Field phase. The default is ``None``. + export_with_sample_points : bool, optional + Whether to include the sample points in the file to export. + The default is ``True``. + reference_coordinate_system : str, optional + Reference coordinate system in the file to export. + The default is ``"Global"``. + export_in_si_system : bool, optional + Whether the provided sample points are defined in the SI system or model units. + The default is ``True``. + export_field_in_reference : bool, optional + Whether to export the field in reference coordinate system. + The default is ``True``. Returns ------- @@ -2567,6 +2583,14 @@ def export_field_file_on_grid( >>> path = "Field.fld" >>> hfss.post.export_field_file_on_grid("E", setup, var, path, 'Cartesian', [0, 0, 0], intrinsics="8GHz") """ + if grid_step is None: + grid_step = [0, 0, 0] + if grid_start is None: + grid_start = [0, 0, 0] + if grid_stop is None: + grid_stop = [0, 0, 0] + if grid_center is None: + grid_center = [0, 0, 0] self.logger.info("Exporting %s field. Be patient", quantity_name) if not solution: solution = self._app.existing_analysis_sweeps[0] @@ -2629,6 +2653,18 @@ def export_field_file_on_grid( else: variation.append("0deg") + export_options = [ + "NAME:ExportOption", + "IncludePtInOutput:=", + export_with_sample_points, + "RefCSName:=", + reference_coordinate_system, + "PtInSI:=", + export_in_si_system, + "FieldInRefCS:=", + export_field_in_reference, + ] + self.ofieldsreporter.ExportOnGrid( filename, grid_start_wu, @@ -2636,7 +2672,7 @@ def export_field_file_on_grid( grid_step_wu, solution, variation, - True, + export_options, gridtype, grid_center, False, @@ -2659,6 +2695,9 @@ def export_field_file( sample_points_file=None, sample_points_lists=None, export_with_sample_points=True, + reference_coordinate_system="Global", + export_in_si_system=True, + export_field_in_reference=True, ): """Use the field calculator to create a field file based on a solution and variation. @@ -2692,6 +2731,15 @@ def export_field_file( export_with_sample_points : bool, optional Whether to include the sample points in the file to export. The default is ``True``. + reference_coordinate_system : str, optional + Reference coordinate system in the file to export. + The default is ``"Global"``. + export_in_si_system : bool, optional + Whether the provided sample points are defined in the SI system or model units. + The default is ``True``. + export_field_in_reference : bool, optional + Whether to export the field in reference coordinate system. + The default is ``True``. Returns ------- @@ -2758,12 +2806,23 @@ def export_field_file( self.ofieldsreporter.CalcOp("Value") self.ofieldsreporter.CalculatorWrite(filename, ["Solution:=", solution], variation) elif sample_points_file: + export_options = [ + "NAME:ExportOption", + "IncludePtInOutput:=", + export_with_sample_points, + "RefCSName:=", + reference_coordinate_system, + "PtInSI:=", + export_in_si_system, + "FieldInRefCS:=", + export_field_in_reference, + ] self.ofieldsreporter.ExportToFile( filename, sample_points_file, solution, variation, - export_with_sample_points, + export_options, ) else: sample_points_file = os.path.join(self._app.working_directory, "temp_points.pts") @@ -2771,12 +2830,23 @@ def export_field_file( f.write("Unit={}\n".format(self.model_units)) for point in sample_points_lists: f.write(" ".join([str(i) for i in point]) + "\n") + export_options = [ + "NAME:ExportOption", + "IncludePtInOutput:=", + export_with_sample_points, + "RefCSName:=", + reference_coordinate_system, + "PtInSI:=", + export_in_si_system, + "FieldInRefCS:=", + export_field_in_reference, + ] self.ofieldsreporter.ExportToFile( filename, sample_points_file, solution, variation, - export_with_sample_points, + export_options, ) if os.path.exists(filename): From c3326260863d8589ba6ec61acf111fec934315e4 Mon Sep 17 00:00:00 2001 From: gmalinve <103059376+gmalinve@users.noreply.github.com> Date: Thu, 21 Mar 2024 13:59:58 +0100 Subject: [PATCH 32/36] improve create_polyline for angular arc (#4392) --- pyaedt/modeler/cad/polylines.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pyaedt/modeler/cad/polylines.py b/pyaedt/modeler/cad/polylines.py index 0254e40bee0..7018f40dcc5 100644 --- a/pyaedt/modeler/cad/polylines.py +++ b/pyaedt/modeler/cad/polylines.py @@ -257,6 +257,8 @@ def __init__( ) self._positions = [list(i) for i in position_list[: self._segment_types[-1].num_points]] else: # AngularArc + if not all(isinstance(x, list) for x in position_list): + position_list = [position_list] self._positions = [position_list[0]] self._evaluate_arc_angle_extra_points(segment_type, start_point=position_list[0]) self._positions.extend(segment_type.extra_points[:]) From 154de59c43257ceb3f2fce806019c084d47d8d36 Mon Sep 17 00:00:00 2001 From: Hui Zhou Date: Thu, 21 Mar 2024 14:11:33 +0100 Subject: [PATCH 33/36] Com enhancement (#4393) Co-authored-by: ring630 <@gmail.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- _unittest_solvers/test_00_analyze.py | 20 ++++++---- pyaedt/generic/spisim.py | 25 ++++++------ .../com_parameters.py | 38 ++++++++++++++----- 3 files changed, 54 insertions(+), 29 deletions(-) diff --git a/_unittest_solvers/test_00_analyze.py b/_unittest_solvers/test_00_analyze.py index f94ec347b83..4a5e93d7120 100644 --- a/_unittest_solvers/test_00_analyze.py +++ b/_unittest_solvers/test_00_analyze.py @@ -512,7 +512,7 @@ def test_09a_compute_com(self, local_scratch, circuit_com): report_dir = os.path.join(spisim.working_directory, "50GAUI-1_C2C") os.mkdir(report_dir) com = spisim.compute_com( - standard="50GAUI-1-C2C", + standard=1, out_folder=report_dir, ) assert com @@ -532,7 +532,7 @@ def test_09b_compute_com(self, local_scratch): spisim.working_directory = local_scratch.path com_0, com_1 = spisim.compute_com( - standard="50GAUI-1-C2C", + standard=1, port_order="EvenOdd", fext_s4p=fext_s4p, next_s4p=next_s4p, @@ -540,7 +540,7 @@ def test_09b_compute_com(self, local_scratch): ) assert com_0 and com_1 com_0, com_1 = spisim.compute_com( - standard="100GBASE-KR4", + standard=2, port_order="EvenOdd", fext_s4p=fext_s4p, next_s4p=next_s4p, @@ -548,7 +548,7 @@ def test_09b_compute_com(self, local_scratch): ) assert com_0 and com_1 com_0, com_1 = spisim.compute_com( - standard="100GBASE-KP4", + standard=3, port_order="EvenOdd", fext_s4p=fext_s4p, next_s4p=next_s4p, @@ -560,14 +560,18 @@ def test_09c_compute_com(self, local_scratch): com_example_file_folder = Path(local_path) / "example_models" / test_subfolder / "com_unit_test_sparam" thru_s4p = local_scratch.copyfile(com_example_file_folder / "SerDes_Demo_02_Thru.s4p") spisim = SpiSim(thru_s4p) - spisim.export_com_configure_file(Path(local_scratch.path) / "test.cfg") - com_0, com_1 = spisim.compute_com("custom", Path(local_scratch.path) / "test.cfg") - assert com_0 and com_1 spisim.export_com_configure_file(os.path.join(spisim.working_directory, "custom.json")) com_0, com_1 = spisim.compute_com( - standard="custom", + standard=0, config_file=os.path.join(spisim.working_directory, "custom.json"), port_order="EvenOdd", ) assert com_0 and com_1 + + from pyaedt.misc.spisim_com_configuration_files.com_parameters import COMParametersVer3p4 + com_param = COMParametersVer3p4() + com_param.load(os.path.join(spisim.working_directory, "custom.json"),) + com_param.export_spisim_cfg(str(Path(local_scratch.path) / "test.cfg")) + com_0, com_1 = spisim.compute_com(0, Path(local_scratch.path) / "test.cfg") + assert com_0 and com_1 diff --git a/pyaedt/generic/spisim.py b/pyaedt/generic/spisim.py index ad393b087a8..9509ec770e9 100644 --- a/pyaedt/generic/spisim.py +++ b/pyaedt/generic/spisim.py @@ -274,9 +274,15 @@ def compute_com( Parameters ---------- - standard : str - Name of the standard to apply. Options are ``"Custom"`, ``"50GAUI-1-C2C"`, ``"100GBASE-KR4"`` and - ``"100GBASE-KP4"``. + standard : int + Name of the standard to apply. Supported stdnards are as below. + COM_CUSTOM = 0 + COM_50GAUI_1_C2C = 1 + COM_100GAUI_2_C2C = 2 + COM_200GAUI_4 = 3 + COM_400GAUI_8 = 4 + COM_100GBASE_KR4 = 5 + COM_100GBASE_KP4 = 6 config_file : str, Path, optional Config file to use. port_order : str, optional @@ -295,8 +301,7 @@ def compute_com( """ com_param = COMParametersVer3p4() - if standard.lower() == "custom": - + if standard == 0: if os.path.splitext(config_file)[-1] == ".cfg": com_param.load_spisim_cfg(config_file) else: @@ -346,22 +351,20 @@ def _compute_com( return self._get_output_parameter_from_result(out_processing, "COM") @pyaedt_function_handler - def export_com_configure_file(self, file_path, standard="50GAUI-1-C2C"): + def export_com_configure_file(self, file_path, standard=1): """Generate a configuration file for SpiSim. Parameters ---------- file_path : str, Path Full path to configuration file to create. - + standard : int + Index of the standard. Returns ------- bool """ - if os.path.splitext(file_path)[-1] == ".cfg": - COMParametersVer3p4(standard).export_spisim_cfg(file_path) - else: - return COMParametersVer3p4(standard).export(file_path) + return COMParametersVer3p4(standard).export(file_path) def detect_encoding(file_path, expected_pattern="", re_flags=0): diff --git a/pyaedt/misc/spisim_com_configuration_files/com_parameters.py b/pyaedt/misc/spisim_com_configuration_files/com_parameters.py index 5c705cc9598..7aaeddda5c7 100644 --- a/pyaedt/misc/spisim_com_configuration_files/com_parameters.py +++ b/pyaedt/misc/spisim_com_configuration_files/com_parameters.py @@ -1,3 +1,4 @@ +from enum import Enum import json from pathlib import Path @@ -8,20 +9,36 @@ logger = settings.logger +class COMStandards(Enum): + COM_CUSTOM = 0 + COM_50GAUI_1_C2C = 1 # com_120d_8 + COM_100GAUI_2_C2C = 2 # com_120d_8 + COM_200GAUI_4 = 3 # com_120d_8 + COM_400GAUI_8 = 4 # com_120d_8 + COM_100GBASE_KR4 = 5 # com_93_8 + COM_100GBASE_KP4 = 6 # com_94_17 + + class COMParameters: """Base class to manage COM parameters.""" _CFG_DIR = Path(__file__).parent.parent / "spisim_com_configuration_files" _STD_TABLE_MAPPING = { - "50GAUI-1-C2C": "com_120d_8.json", - "100GAUI-2-C2C": "com_120d_8.json", - "200GAUI-4": "com_120d_8.json", - "400GAUI-8": "com_120d_8.json", - "100GBASE-KR4": "com_93_8.json", - "100GBASE-KP4": "com_94_17.json", + "COM_50GAUI_1_C2C": "com_120d_8.json", + "COM_100GAUI_2_C2C": "com_120d_8.json", + "COM_200GAUI_4": "com_120d_8.json", + "COM_400GAUI_8": "com_120d_8.json", + "COM_100GBASE_KR4": "com_93_8.json", + "COM_100GBASE_KP4": "com_94_17.json", } def __init__(self, standard): + """ + + Parameters + ---------- + standard : int + """ self.table_93a1 = {} self.filter_and_eq = {} self.io_control = {} @@ -75,7 +92,7 @@ def standard(self): @standard.setter def standard(self, value): - std_table = self._STD_TABLE_MAPPING[value] + std_table = self._STD_TABLE_MAPPING[COMStandards(value).name] cfg_path = self._CFG_DIR / std_table self.load(cfg_path) self._standard = value @@ -154,6 +171,7 @@ def load(self, file_path): file_path : str, Path of file. """ + self._init() with open(file_path) as f: # pragma: no cover temp = json.load(f) @@ -167,8 +185,8 @@ def export_spisim_cfg(self, file_path): Parameters ---------- - file_path : str - Path of file. + file_path : str, Path + Full path of file. """ with open(file_path, "w") as fp: fp.write("################################################################################\n") @@ -213,7 +231,7 @@ def load_spisim_cfg(self, file_path): class COMParametersVer3p4(COMParameters): """Manages COM parameters of version 3.4.""" - def __init__(self, standard="50GAUI-1-C2C"): + def __init__(self, standard=1): super().__init__(standard) @pyaedt_function_handler From dfa94d75092d855f6191d3b0a8c6c64ff0c168ff Mon Sep 17 00:00:00 2001 From: Sebastien Morais Date: Thu, 21 Mar 2024 15:24:36 +0100 Subject: [PATCH 34/36] MAINT: Remove pyedb upper bound Closes #4385 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 76d034ea4b7..51dae03e060 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -30,7 +30,7 @@ dependencies = [ "jsonschema", "psutil", "pyedb>=0.4.0,<0.5; python_version == '3.7'", - "pyedb>=0.5.0,<0.6||==0.6.dev0; python_version > '3.7'", + "pyedb>=0.5.0; python_version > '3.7'", "pytomlpp; python_version < '3.12'", "rpyc>=6.0.0,<6.1", ] From 8f3ededd31c969be7a9df9640153e704a1dc65bc Mon Sep 17 00:00:00 2001 From: Massimo Capodiferro <77293250+maxcapodi78@users.noreply.github.com> Date: Fri, 22 Mar 2024 10:20:19 +0100 Subject: [PATCH 35/36] Refactored Desktop class to support multiple desktop instances at the same time (#4371) Co-authored-by: maxcapodi78 Co-authored-by: Kathy Pippert <84872299+PipKat@users.noreply.github.com> Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> Co-authored-by: samuel Co-authored-by: Samuel Lopez <85613111+Samuelopez-ansys@users.noreply.github.com> --- _unittest/test_01_Design.py | 2 +- pyaedt/aedt_logger.py | 10 +- pyaedt/application/Design.py | 27 +-- pyaedt/application/aedt_objects.py | 17 +- pyaedt/desktop.py | 266 +++++++++++++++-------------- pyaedt/generic/design_types.py | 82 +++++---- pyaedt/generic/general_methods.py | 25 +-- pyaedt/generic/settings.py | 27 +++ 8 files changed, 255 insertions(+), 201 deletions(-) diff --git a/_unittest/test_01_Design.py b/_unittest/test_01_Design.py index bf741cc21b9..4fb2ebf52f0 100644 --- a/_unittest/test_01_Design.py +++ b/_unittest/test_01_Design.py @@ -328,7 +328,7 @@ def test_33_aedt_object(self): aedt_obj = AedtObjects() assert aedt_obj.odesign assert aedt_obj.oproject - aedt_obj = AedtObjects(self.aedtapp.oproject, self.aedtapp.odesign) + aedt_obj = AedtObjects(self.aedtapp._desktop_class, self.aedtapp.oproject, self.aedtapp.odesign) assert aedt_obj.odesign == self.aedtapp.odesign def test_34_force_project_path_disable(self): diff --git a/pyaedt/aedt_logger.py b/pyaedt/aedt_logger.py index 35d43a63c28..5fdbac374d9 100644 --- a/pyaedt/aedt_logger.py +++ b/pyaedt/aedt_logger.py @@ -133,7 +133,8 @@ class AedtLogger(object): Whether to write log messages to stdout. The default is ``False``. """ - def __init__(self, level=logging.DEBUG, filename=None, to_stdout=False): + def __init__(self, level=logging.DEBUG, filename=None, to_stdout=False, desktop=None): + self._desktop_class = desktop self._oproject = None self._odesign = None self._project_name = "" @@ -240,9 +241,8 @@ def remove_all_project_file_logger(self): @property def _desktop(self): - if "oDesktop" in dir(sys.modules["__main__"]): - MainModule = sys.modules["__main__"] - return MainModule.oDesktop + if self._desktop_class: + return self._desktop_class.odesktop return None # pragma: no cover @property @@ -591,7 +591,7 @@ def _log_on_dekstop(self, message_type, message_text, level=None, proj_name=None try: self._desktop.AddMessage(proj_name, des_name, message_type, message_text) except: - print("PyAEDT INFO: Failed in Adding Desktop Message") + pass def _log_on_handler(self, message_type, message_text, *args, **kwargs): message_text = str(message_text) diff --git a/pyaedt/application/Design.py b/pyaedt/application/Design.py index 13ecbc789f1..f0dcba75b2c 100644 --- a/pyaedt/application/Design.py +++ b/pyaedt/application/Design.py @@ -220,10 +220,7 @@ def load_aedt_thread(path): self._boundaries = {} self._project_datasets = {} self._design_datasets = {} - main_module = sys.modules["__main__"] self.close_on_exit = close_on_exit - self._global_logger = pyaedt_logger - self._logger = pyaedt_logger self._desktop_class = None self._desktop_class = _init_desktop_from_design( specified_version, @@ -235,13 +232,16 @@ def load_aedt_thread(path): port, aedt_process_id, ) + self._global_logger = self._desktop_class.logger + self._logger = self._desktop_class.logger + self.student_version = self._desktop_class.student_version if self.student_version: settings.disable_bounding_box_sat = True self._mttime = None - self._desktop = main_module.oDesktop + self._desktop = self._desktop_class.odesktop - self._desktop_install_dir = main_module.sDesktopinstallDirectory + self._desktop_install_dir = settings.aedt_install_dir self._odesign = None self._oproject = None if design_type == "HFSS": @@ -260,7 +260,7 @@ def load_aedt_thread(path): self.odesign = design_name self._logger.oproject = self.oproject self._logger.odesign = self.odesign - AedtObjects.__init__(self, is_inherithed=True) + AedtObjects.__init__(self, self._desktop_class, self.oproject, self.odesign, is_inherithed=True) self.logger.info("Aedt Objects correctly read") if t: t.join() @@ -2430,17 +2430,6 @@ def release_desktop(self, close_projects=True, close_desktop=True): for a in props: self.__dict__.pop(a, None) - dicts = [self, sys.modules["__main__"]] - for dict_to_clean in dicts: - props = [ - a - for a in dir(dict_to_clean) - if "win32com" in str(type(dict_to_clean.__dict__.get(a, None))) - or "pyaedt" in str(type(dict_to_clean.__dict__.get(a, None))) - ] - for a in props: - dict_to_clean.__dict__[a] = None - self._desktop_class = None gc.collect() return True @@ -3145,7 +3134,7 @@ def close_project(self, name=None, save_project=True): self._odesign = None else: self.odesktop.SetActiveProject(legacy_name) - AedtObjects.__init__(self, is_inherithed=True) + AedtObjects.__init__(self, self._desktop_class, is_inherithed=True) i = 0 timeout = 10 @@ -3501,7 +3490,7 @@ def duplicate_design(self, label, save_after_duplicate=True): self.odesign = actual_name[0] self.design_name = newname self._close_edb() - AedtObjects.__init__(self, is_inherithed=True) + AedtObjects.__init__(self, self._desktop_class, self.oproject, self.odesign, is_inherithed=True) if save_after_duplicate: self.oproject.Save() self._project_dictionary = None diff --git a/pyaedt/application/aedt_objects.py b/pyaedt/application/aedt_objects.py index 70ca429b6d6..252bba97ea5 100644 --- a/pyaedt/application/aedt_objects.py +++ b/pyaedt/application/aedt_objects.py @@ -1,11 +1,24 @@ import sys from pyaedt import pyaedt_function_handler +from pyaedt.generic.desktop_sessions import _desktop_sessions class AedtObjects(object): - def __init__(self, project=None, design=None, is_inherithed=False): - self._odesktop = sys.modules["__main__"].oDesktop + def __init__(self, desktop=None, project=None, design=None, is_inherithed=False): + if desktop: + self._odesktop = desktop.odesktop + elif _desktop_sessions and project: + project_name = project.GetName() + for desktop in list(_desktop_sessions.values()): + if project_name in list(desktop.project_list): + self._odesktop = desktop.odesktop + break + elif _desktop_sessions: + self._odesktop = list(_desktop_sessions.values())[-1].odesktop + elif "oDesktop" in dir(sys.modules["__main__"]): # ironpython + self._odesktop = sys.modules["__main__"].oDesktop # ironpython + if not is_inherithed: if project: self.oproject = project diff --git a/pyaedt/desktop.py b/pyaedt/desktop.py index 00bcd34a048..b4949e87125 100644 --- a/pyaedt/desktop.py +++ b/pyaedt/desktop.py @@ -21,9 +21,11 @@ import traceback import warnings +from pyaedt import __version__ as pyaedt_version from pyaedt import is_ironpython from pyaedt import is_linux from pyaedt import is_windows +from pyaedt.aedt_logger import AedtLogger from pyaedt.aedt_logger import pyaedt_logger from pyaedt.generic.general_methods import generate_unique_name @@ -56,6 +58,8 @@ modules = [tup[1] for tup in pkgutil.iter_modules()] +python_grpc_wrapper = None + @pyaedt_function_handler() def launch_aedt(full_path, non_graphical, port, student_version, first_run=True): @@ -205,42 +209,8 @@ def exception_to_desktop(ex_value, tb_data): # pragma: no cover def _delete_objects(): - settings._aedt_version = None settings.remote_api = False - module = sys.modules["__main__"] - try: - del module.COMUtil - except AttributeError: - pass pyaedt_logger.remove_all_project_file_logger() - try: - del module.oDesktop - except AttributeError: - pass - try: - del module.pyaedt_initialized - except AttributeError: - pass - try: - del module.oAnsoftApplication - except AttributeError: - pass - try: - del module.desktop - except AttributeError: - pass - try: - del module.sDesktopinstallDirectory - except AttributeError: - pass - try: - del module.isoutsideDesktop - except AttributeError: - pass - try: - del module.AEDTVersion - except AttributeError: - pass try: del sys.modules["glob"] except: @@ -249,11 +219,13 @@ def _delete_objects(): @pyaedt_function_handler() -def _close_aedt_application(close_desktop, pid, is_grpc_api): +def _close_aedt_application(desktop_class, close_desktop, pid, is_grpc_api): """Release the AEDT API. Parameters ---------- + desktop_class : :class:pyaedt.desktop.Desktop + Desktop class. close_desktop : bool Whether to close the active AEDT session. pid : int @@ -267,23 +239,47 @@ def _close_aedt_application(close_desktop, pid, is_grpc_api): ``True`` when successful, ``False`` when failed. """ - _main = sys.modules["__main__"] + global python_grpc_wrapper if settings.remote_rpc_session or (settings.aedt_version >= "2022.2" and is_grpc_api and not is_ironpython): - if close_desktop: + if close_desktop and desktop_class.parent_desktop_id: + pyaedt_logger.error("A child desktop session is linked to this session.") + pyaedt_logger.error("Multiple desktop sessions must be released in reverse order.") + return False + elif close_desktop: try: - _main.oDesktop.QuitApplication() + os.kill(pid, 9) + if _desktop_sessions: + for v in _desktop_sessions.values(): + if pid in v.parent_desktop_id: + del v.parent_desktop_id[v.parent_desktop_id.index(pid)] + return True except: # pragma: no cover warnings.warn("Something went wrong closing AEDT. Exception in `_main.oDesktop.QuitApplication()`.") pass + elif _desktop_sessions and len(_desktop_sessions) > 1 and not desktop_class.parent_desktop_id: + pyaedt_logger.error("Release is not allowed when multiple desktop sessions are available.") + pyaedt_logger.error("Closing Desktop session.") try: - _main.oDesktop.QuitApplication() + os.kill(pid, 9) + if _desktop_sessions: + for v in _desktop_sessions.values(): + if pid in v.parent_desktop_id: + del v.parent_desktop_id[v.parent_desktop_id.index(pid)] + return True except: # pragma: no cover + warnings.warn("Something went wrong closing AEDT. Exception in `_main.oDesktop.QuitApplication()`.") pass + elif _desktop_sessions and len(_desktop_sessions) > 1: + pyaedt_logger.error("A child desktop session is linked to this session.") + pyaedt_logger.error("Multiple desktop sessions must be released in reverse order.") + return False else: try: - import pyaedt.generic.grpc_plugin as StandalonePyScriptWrapper - - StandalonePyScriptWrapper.Release() + if not python_grpc_wrapper: + python_grpc_wrapper = __import__("pyaedt.generic.grpc_plugin") + # import pyaedt.generic.grpc_plugin as StandalonePyScriptWrapper + python_grpc_wrapper.AedtAPI.ReleaseAll() + return True except: # pragma: no cover warnings.warn( "Something went wrong releasing AEDT. Exception in `StandalonePyScriptWrapper.Release()`." @@ -300,7 +296,7 @@ def _close_aedt_application(close_desktop, pid, is_grpc_api): try: scopeID = 0 while scopeID <= 5: - _main.COMUtil.ReleaseCOMObjectScope(_main.COMUtil.PInvokeProxyAPI, scopeID) + desktop_class.COMUtil.ReleaseCOMObjectScope(desktop_class.COMUtil.PInvokeProxyAPI, scopeID) scopeID += 1 except: pyaedt_logger.warning( @@ -315,10 +311,12 @@ def _close_aedt_application(close_desktop, pid, is_grpc_api): if timeout == 0: try: os.kill(pid, 9) + return True except: # pragma: no cover warnings.warn("Something went wrong closing AEDT. Exception in `os.kill(pid, 9)` after timeout.") return False break + return True @@ -451,26 +449,33 @@ class Desktop(object): def __new__(cls, *args, **kwargs): # The following commented lines will be useful when we will need to search among multiple saved desktop. # specified_version = kwargs.get("specified_version") or None if not args else args[0] - # new_desktop_session = kwargs.get("new_desktop_session") or True if (not args or len(args)<3) else args[2] + new_desktop_session = kwargs.get("new_desktop_session") or False if (not args or len(args) < 3) else args[2] # student_version = kwargs.get("student_version") or False if (not args or len(args)<5) else args[4] # machine = kwargs.get("machine") or "" if (not args or len(args)<6) else args[5] - # port = kwargs.get("port") or 0 if (not args or len(args)<7) else args[6] - # aedt_process_id = kwargs.get("aedt_process_id") or None if (not args or len(args)<8) else args[7] - - if len(_desktop_sessions.keys()) > 0: + port = kwargs.get("port") or 0 if (not args or len(args) < 7) else args[6] + aedt_process_id = kwargs.get("aedt_process_id") or None if (not args or len(args) < 8) else args[7] + if settings.use_multi_desktop and is_windows and not inside_desktop and new_desktop_session: + pyaedt_logger.info("Initializing new Desktop session.") + return object.__new__(cls) + elif len(_desktop_sessions.keys()) > 0: + if settings.use_multi_desktop and is_windows and (port or aedt_process_id): + for el in list(_desktop_sessions.values()): + if (el.port != 0 and el.port == port) or ( + el.aedt_process_id and el.aedt_process_id == aedt_process_id + ): + return el sessions = list(_desktop_sessions.keys()) try: - # for the moment aedt supports only one desktop, which is saved in sessions[0] process_id = _desktop_sessions[sessions[0]].odesktop.GetProcessID() - print("Returning found desktop with PID {}!".format(process_id)) + pyaedt_logger.info("Returning found Desktop session with PID {}!".format(process_id)) cls._invoked_from_design = False return _desktop_sessions[sessions[0]] except: del _desktop_sessions[sessions[0]] - print("Initializing new desktop!") + pyaedt_logger.info("Initializing new Desktop session.") return object.__new__(cls) else: - print("Initializing new desktop!") + pyaedt_logger.info("Initializing new Desktop session.") return object.__new__(cls) def __init__( @@ -484,6 +489,8 @@ def __init__( port=0, aedt_process_id=None, ): + if _desktop_sessions and (specified_version is None or not settings.use_grpc_api): + specified_version = list(_desktop_sessions.values())[-1].aedt_version_id if aedt_process_id: # pragma no cover aedt_process_id = int(aedt_process_id) if getattr(self, "_initialized", None) is not None and self._initialized: @@ -492,6 +499,7 @@ def __init__( self._initialized = True self._initialized_from_design = True if Desktop._invoked_from_design else False Desktop._invoked_from_design = False + self.parent_desktop_id = [] self._connected_app_instances = 0 @@ -514,7 +522,6 @@ def __init__( if os.getenv("PYAEDT_SCRIPT_VERSION", None): specified_version = str(os.getenv("PYAEDT_SCRIPT_VERSION")) - self._main = sys.modules["__main__"] self.close_on_exit = close_on_exit self.machine = machine self.port = port @@ -524,6 +531,7 @@ def __init__( self.logfile = None self._logger = pyaedt_logger + if settings.enable_screen_logs: self._logger.enable_stdout_log() else: @@ -544,12 +552,10 @@ def __init__( # start the AEDT opening decision tree # starting_mode can be one of these: "grpc", "com", "ironpython", "console_in", "console_out" - if "oDesktop" in dir(): # pragma: no cover + if "oDesktop" in dir(sys.modules["__main__"]): # pragma: no cover # we are inside the AEDT Ironpython console + pyaedt_logger.logger.info("Iropnpython session with embedded oDesktop") starting_mode = "console_in" - elif "oDesktop" in dir(self._main) and self._main.oDesktop is not None: # pragma: no cover - # we are inside a python console outside AEDT (toolkit) - starting_mode = "console_out" elif is_linux: starting_mode = "grpc" elif is_windows and "pythonnet" not in modules: @@ -597,23 +603,19 @@ def __init__( else: # pragma: no cover # it should not arrive here, it means that there is a starting case not covered by the decision tree raise Exception("Unsupported AEDT starting mode") - # Starting AEDT if "console" in starting_mode: # technically not a startup mode, we have just to load oDesktop + self.odesktop = sys.modules["__main__"].oDesktop self.close_on_exit = False try: - self.non_graphical = oDesktop.GetIsNonGraphical() + self.non_graphical = self.odesktop.GetIsNonGraphical() except: # pragma: no cover self.non_graphical = non_graphical self.is_grpc_api = False - settings.aedt_version = self._main.oDesktop.GetVersion()[0:6] - if starting_mode == "console_in": - self._main.oDesktop = oDesktop + else: settings.aedt_version = version_key - if "oDesktop" in dir(self._main): - del self._main.oDesktop if starting_mode == "ironpython": self._logger.info("Launching PyAEDT outside AEDT with IronPython.") self._init_ironpython(non_graphical, new_desktop_session, version) @@ -634,10 +636,9 @@ def __init__( self._set_logger_file() settings.enable_desktop_logs = not self.non_graphical self._init_desktop() - self._logger.info("pyaedt v%s", self._main.pyaedt_version) + self._logger.info("pyaedt v%s", pyaedt_version) if not settings.remote_api: self._logger.info("Python version %s", sys.version) - self.odesktop = self._main.oDesktop current_pid = int(self.odesktop.GetProcessID()) if aedt_process_id and not new_desktop_session and aedt_process_id != current_pid: @@ -646,7 +647,7 @@ def __init__( ) self.aedt_process_id = current_pid - current_is_student = is_student_version(self._main.oDesktop) + current_is_student = is_student_version(self.odesktop) if student_version ^ current_is_student: self._logger.warning( "AEDT started as {} version, but requested as {} version.".format( @@ -655,14 +656,12 @@ def __init__( ) self.student_version = current_is_student - self.aedt_version_id = self._main.oDesktop.GetVersion()[0:6] + self.aedt_version_id = self.odesktop.GetVersion()[0:6] self._logger.info("AEDT %s Build Date %s", self.odesktop.GetVersion(), self.odesktop.GetBuildDateTimeString()) if is_ironpython: - sys.path.append( - os.path.join(self._main.sDesktopinstallDirectory, "common", "commonfiles", "IronPython", "DLLs") - ) + sys.path.append(os.path.join(settings.aedt_install_dir, "common", "commonfiles", "IronPython", "DLLs")) if "GetGrpcServerPort" in dir(self.odesktop): self.port = self.odesktop.GetGrpcServerPort() # save the current desktop session in the database @@ -715,12 +714,12 @@ def __getitem__(self, project_design_name): else: return None - return get_pyaedt_app(projectname, designname) + return get_pyaedt_app(projectname, designname, self) @property def install_path(self): """Installation path for AEDT.""" - version_key = self._main.AEDTVersion + version_key = settings.aedt_version try: return installed_versions()[version_key] except: # pragma: no cover @@ -743,11 +742,10 @@ def installed_versions(self): def _init_desktop(self): # run it after the settings.non_graphical is set - self._main.pyaedt_version = pyaedtversion - self._main.AEDTVersion = self._main.oDesktop.GetVersion()[0:6] - self._main.oDesktop.RestoreWindow() - self._main.sDesktopinstallDirectory = self._main.oDesktop.GetExeDir() - self._main.pyaedt_initialized = True + self.pyaedt_version = pyaedtversion + settings.aedt_version = self.odesktop.GetVersion()[0:6] + self.odesktop.RestoreWindow() + settings.aedt_install_dir = self.odesktop.GetExeDir() def _assert_version(self, specified_version, student_version): # avoid evaluating the env variables multiple times @@ -789,9 +787,9 @@ def _assert_version(self, specified_version, student_version): ) version = "Ansoft.ElectronicsDesktop." + specified_version[0:6] - self._main.sDesktopinstallDirectory = None + settings.aedt_install_dir = None if specified_version in self.installed_versions: - self._main.sDesktopinstallDirectory = self.installed_versions[specified_version] + settings.aedt_install_dir = self.installed_versions[specified_version] if settings.remote_rpc_session: try: version = "Ansoft.ElectronicsDesktop." + settings.remote_rpc_session.aedt_version[0:6] @@ -804,21 +802,20 @@ def _assert_version(self, specified_version, student_version): def _init_ironpython(self, non_graphical, new_aedt_session, version): from pyaedt.generic.clr_module import _clr - base_path = self._main.sDesktopinstallDirectory + base_path = settings.aedt_install_dir sys.path.append(base_path) sys.path.append(os.path.join(base_path, "PythonFiles", "DesktopPlugin")) _clr.AddReference("Ansys.Ansoft.CoreCOMScripting") AnsoftCOMUtil = __import__("Ansys.Ansoft.CoreCOMScripting") self.COMUtil = AnsoftCOMUtil.Ansoft.CoreCOMScripting.Util.COMUtil - self._main.COMUtil = self.COMUtil StandalonePyScriptWrapper = AnsoftCOMUtil.Ansoft.CoreCOMScripting.COM.StandalonePyScriptWrapper if non_graphical or new_aedt_session: # forcing new thread to start in non-graphical oAnsoftApp = StandalonePyScriptWrapper.CreateObjectNew(non_graphical) else: oAnsoftApp = StandalonePyScriptWrapper.CreateObject(version) - self._main.oDesktop = oAnsoftApp.GetAppDesktop() - self._main.isoutsideDesktop = True + self.odesktop = oAnsoftApp.GetAppDesktop() + self.isoutsideDesktop = True sys.path.append(os.path.join(base_path, "common", "commonfiles", "IronPython", "DLLs")) self.is_grpc_api = False @@ -827,7 +824,7 @@ def _init_ironpython(self, non_graphical, new_aedt_session, version): def _run_student(self): DETACHED_PROCESS = 0x00000008 pid = subprocess.Popen( - [os.path.join(self._main.sDesktopinstallDirectory, "ansysedtsv.exe")], creationflags=DETACHED_PROCESS + [os.path.join(settings.aedt_install_dir, "ansysedtsv.exe")], creationflags=DETACHED_PROCESS ).pid time.sleep(5) @@ -835,8 +832,8 @@ def _dispatch_win32(self, version): from pyaedt.generic.clr_module import win32_client o_ansoft_app = win32_client.Dispatch(version) - self._main.oDesktop = o_ansoft_app.GetAppDesktop() - self._main.isoutsideDesktop = True + self.odesktop = o_ansoft_app.GetAppDesktop() + self.isoutsideDesktop = True def _init_dotnet( self, @@ -855,12 +852,11 @@ def _init_dotnet( raise Exception( "PyAEDT supports COM initialization in Windows only. To use in Linux, upgrade to AEDT 2022 R2 or later." ) - base_path = self._main.sDesktopinstallDirectory + base_path = settings.aedt_install_dir sys.path.insert(0, base_path) sys.path.insert(0, os.path.join(base_path, "PythonFiles", "DesktopPlugin")) launch_msg = "AEDT installation Path {}.".format(base_path) self.logger.info(launch_msg) - self.logger.info("Launching AEDT with COM plugin using PythonNET.") processID = [] if is_windows: processID = com_active_sessions(version, student_version, non_graphical) @@ -897,10 +893,10 @@ def _init_dotnet( m = re.search(version[10:] + r"\.\d:" + str(proc[0]), monikier.GetDisplayName(context, monikier)) if m: obj = running_coms.GetObject(monikier) - self._main.isoutsideDesktop = True + self.isoutsideDesktop = True from pyaedt.generic.clr_module import win32_client - self._main.oDesktop = win32_client.Dispatch(obj.QueryInterface(pythoncom.IID_IDispatch)) + self.odesktop = win32_client.Dispatch(obj.QueryInterface(pythoncom.IID_IDispatch)) break else: self.logger.warning( @@ -919,39 +915,56 @@ def _initialize( version=None, is_grpc=True, ): + global python_grpc_wrapper if not is_grpc: from pyaedt.generic.clr_module import _clr _clr.AddReference("Ansys.Ansoft.CoreCOMScripting") AnsoftCOMUtil = __import__("Ansys.Ansoft.CoreCOMScripting") self.COMUtil = AnsoftCOMUtil.Ansoft.CoreCOMScripting.Util.COMUtil - self._main.COMUtil = self.COMUtil StandalonePyScriptWrapper = AnsoftCOMUtil.Ansoft.CoreCOMScripting.COM.StandalonePyScriptWrapper if non_graphical or new_session: self.launched_by_pyaedt = True - return StandalonePyScriptWrapper.CreateObjectNew(non_graphical) + oapp = StandalonePyScriptWrapper.CreateObjectNew(non_graphical) else: - return StandalonePyScriptWrapper.CreateObject(version) + oapp = StandalonePyScriptWrapper.CreateObject(version) else: - base_path = self._main.sDesktopinstallDirectory + base_path = settings.aedt_install_dir sys.path.insert(0, base_path) sys.path.insert(0, os.path.join(base_path, "PythonFiles", "DesktopPlugin")) if is_linux: pyaedt_path = os.path.realpath(os.path.join(os.path.dirname(os.path.realpath(__file__)), "..")) os.environ["PATH"] = pyaedt_path + os.pathsep + os.environ["PATH"] - os.environ["DesktopPluginPyAEDT"] = os.path.join( - self._main.sDesktopinstallDirectory, "PythonFiles", "DesktopPlugin" - ) + os.environ["DesktopPluginPyAEDT"] = os.path.join(settings.aedt_install_dir, "PythonFiles", "DesktopPlugin") launch_msg = "AEDT installation Path {}".format(base_path) self.logger.info(launch_msg) - import pyaedt.generic.grpc_plugin as StandalonePyScriptWrapper - + if not python_grpc_wrapper: + python_grpc_wrapper = __import__("pyaedt.generic.grpc_plugin") + python_grpc_wrapper = python_grpc_wrapper.generic.grpc_plugin + # import pyaedt.generic.grpc_plugin as StandalonePyScriptWrapper + if _desktop_sessions: + last_session = list(_desktop_sessions.values())[-1] + all_desktop = [i for i in last_session.odesktop.GetRunningInstancesMgr().GetAllRunningInstances()] + for desktop in all_desktop: + if port and desktop.GetGrpcServerPort() == port: + self.isoutsideDesktop = True + self.odesktop = desktop + self.aedt_process_id = self.odesktop.GetProcessID() + self.is_grpc_api = True + last_session.parent_desktop_id.append(self.aedt_process_id) + return True if new_session: self.launched_by_pyaedt = new_session - return StandalonePyScriptWrapper.CreateAedtApplication(machine, port, non_graphical, new_session) + oapp = python_grpc_wrapper.CreateAedtApplication(machine, port, non_graphical, new_session) + if oapp: + + self.isoutsideDesktop = True + self.odesktop = oapp.GetAppDesktop() + self.aedt_process_id = self.odesktop.GetProcessID() + self.is_grpc_api = True + return True def _init_grpc(self, non_graphical, new_aedt_session, version, student_version, version_key): - self.logger.info("Launching AEDT using the gRPC plugin.") if settings.remote_rpc_session: # pragma: no cover settings.remote_api = True if not self.machine: @@ -1030,14 +1043,14 @@ def _init_grpc(self, non_graphical, new_aedt_session, version, student_version, self.logger.error("Failed to start LSF job on machine: %s.", self.machine) return elif new_aedt_session: - installer = os.path.join(self._main.sDesktopinstallDirectory, "ansysedt") + installer = os.path.join(settings.aedt_install_dir, "ansysedt") if student_version: # pragma: no cover - installer = os.path.join(self._main.sDesktopinstallDirectory, "ansysedtsv") + installer = os.path.join(settings.aedt_install_dir, "ansysedtsv") if not is_linux: if student_version: # pragma: no cover - installer = os.path.join(self._main.sDesktopinstallDirectory, "ansysedtsv.exe") + installer = os.path.join(settings.aedt_install_dir, "ansysedtsv.exe") else: - installer = os.path.join(self._main.sDesktopinstallDirectory, "ansysedt.exe") + installer = os.path.join(settings.aedt_install_dir, "ansysedt.exe") out, self.port = launch_aedt(installer, non_graphical, self.port, student_version) self.launched_by_pyaedt = True @@ -1059,13 +1072,9 @@ def _init_grpc(self, non_graphical, new_aedt_session, version, student_version, version=version_key, ) if oApp: - self._main.isoutsideDesktop = True - self._main.oDesktop = oApp.GetAppDesktop() - _proc = self._main.oDesktop.GetProcessID() - self.is_grpc_api = True if new_aedt_session: message = "{}{} version started with process ID {}.".format( - version, " Student" if student_version else "", _proc + version, " Student" if student_version else "", self.aedt_process_id ) self.logger.info(message) @@ -1080,14 +1089,14 @@ def _set_logger_file(self): else: if settings.remote_api or settings.remote_rpc_session: project_dir = tempfile.gettempdir() - elif "oDesktop" in dir(self._main): - project_dir = self._main.oDesktop.GetProjectDirectory() + elif self.odesktop: + project_dir = self.odesktop.GetProjectDirectory() else: project_dir = tempfile.gettempdir() self.logfile = os.path.join( project_dir, "pyaedt{}.log".format(datetime.datetime.now().strftime("%Y%m%d_%H%M%S")) ) - + self._logger = AedtLogger(desktop=self) return True @property @@ -1469,24 +1478,17 @@ def release_desktop(self, close_projects=True, close_on_exit=True): self.odesktop.CloseProject(project) except: # pragma: no cover self.logger.warning("Failed to close Project {}".format(project)) - result = _close_aedt_application(close_on_exit, self.aedt_process_id, self.is_grpc_api) - if result: - self.logger.info("Desktop has been released") + result = _close_aedt_application(self, close_on_exit, self.aedt_process_id, self.is_grpc_api) + if not result: + self.logger.error("Error releasing desktop.") + return False + self.logger.info("Desktop has been released") del _desktop_sessions[self.aedt_process_id] props = [a for a in dir(self) if not a.startswith("__")] for a in props: self.__dict__.pop(a, None) - dicts = [self, sys.modules["__main__"]] - for dict_to_clean in dicts: - props = [a for a in dir(dict_to_clean) if "win32com" in str(type(dict_to_clean.__dict__.get(a, None)))] - for a in props: - dict_to_clean.__dict__[a] = None self.odesktop = None - try: - del sys.modules["__main__"].oDesktop - except AttributeError: - pass gc.collect() return result @@ -1521,7 +1523,7 @@ def enable_autosave(self): >>> desktop.enable_autosave() """ - self._main.oDesktop.EnableAutoSave(True) + self.odesktop.EnableAutoSave(True) def disable_autosave(self): """Disable the autosave option. @@ -1535,7 +1537,7 @@ def disable_autosave(self): >>> desktop.disable_autosave() """ - self._main.oDesktop.EnableAutoSave(False) + self.odesktop.EnableAutoSave(False) def change_license_type(self, license_type="Pool"): """Change the license type. @@ -1556,7 +1558,7 @@ def change_license_type(self, license_type="Pool"): """ try: - self._main.oDesktop.SetRegistryString("Desktop/Settings/ProjectOptions/HPCLicenseType", license_type) + self.odesktop.SetRegistryString("Desktop/Settings/ProjectOptions/HPCLicenseType", license_type) return True except: return False @@ -1578,7 +1580,7 @@ def change_registry_key(self, key_full_name, key_value): """ if isinstance(key_value, str): try: - self._main.oDesktop.SetRegistryString(key_full_name, key_value) + self.odesktop.SetRegistryString(key_full_name, key_value) self.logger.info("Key %s correctly changed.", key_full_name) return True except: @@ -1586,7 +1588,7 @@ def change_registry_key(self, key_full_name, key_value): return False elif isinstance(key_value, int): try: - self._main.oDesktop.SetRegistryInt(key_full_name, key_value) + self.odesktop.SetRegistryInt(key_full_name, key_value) self.logger.info("Key %s correctly changed.", key_full_name) return True except: @@ -1639,7 +1641,7 @@ def change_registry_from_file(self, registry_file, make_active=True): ``True`` when successful, ``False`` when failed. """ try: - self._main.oDesktop.SetRegistryFromFile(registry_file) + self.odesktop.SetRegistryFromFile(registry_file) if make_active: with open(registry_file, "r") as f: for line in f: diff --git a/pyaedt/generic/design_types.py b/pyaedt/generic/design_types.py index 52146db27c6..30bca970151 100644 --- a/pyaedt/generic/design_types.py +++ b/pyaedt/generic/design_types.py @@ -1750,45 +1750,63 @@ def Siwave( } -def get_pyaedt_app(project_name=None, design_name=None): - """Returns the Pyaedt Object of specific project_name and design_name. +def get_pyaedt_app(project_name=None, design_name=None, desktop=None): + """Gets the PyAEDT object with a given project name and design name. Parameters ---------- - project_name - design_name + project_name : str, optional + Project name. + design_name : str, optional + Design name. + desktop : :class:`pyaedt.desktop.Desktop`, optional + Desktop class. The default is ``None``. Returns ------- :def :`pyaedt.Hfss` Any of the Pyaedt App initialized. """ - main = sys.modules["__main__"] - if "oDesktop" in dir(main): - if project_name and project_name not in main.oDesktop.GetProjectList(): - raise AttributeError("Project {} doesn't exist in current Desktop.".format(project_name)) - if not project_name: - oProject = main.oDesktop.GetActiveProject() - else: - oProject = main.oDesktop.SetActiveProject(project_name) - if not oProject: - raise AttributeError("No Project Present.") - design_names = [] - deslist = list(oProject.GetTopDesignList()) - for el in deslist: - m = re.search(r"[^;]+$", el) - design_names.append(m.group(0)) - if design_name and design_name not in design_names: - raise AttributeError("Design {} doesn't exists in current Project.".format(design_name)) - if not design_name: - oDesign = oProject.GetActiveDesign() - else: - oDesign = oProject.SetActiveDesign(design_name) - if not oDesign: - raise AttributeError("No Design Present.") - design_type = oDesign.GetDesignType() - if design_type in list(app_map.keys()): - version = main.oDesktop.GetVersion().split(".") - v = ".".join([version[0], version[1]]) - return app_map[design_type](project_name, design_name, specified_version=v) + from pyaedt.generic.desktop_sessions import _desktop_sessions + + odesktop = None + if desktop: + odesktop = desktop.odesktop + elif _desktop_sessions and project_name: + for desktop in list(_desktop_sessions.values()): + if project_name in list(desktop.project_list()): + odesktop = desktop.odesktop + break + elif _desktop_sessions: + odesktop = list(_desktop_sessions.values())[-1] + elif "oDesktop" in dir(sys.modules["__main__"]): # ironpython + odesktop = sys.modules["__main__"].oDesktop # ironpython + else: + raise AttributeError("No Desktop Present.") + if project_name and project_name not in odesktop.GetProjectList(): + raise AttributeError("Project {} doesn't exist in current desktop.".format(project_name)) + if not project_name: + oProject = odesktop.GetActiveProject() + else: + oProject = odesktop.SetActiveProject(project_name) + if not oProject: + raise AttributeError("No project is present.") + design_names = [] + deslist = list(oProject.GetTopDesignList()) + for el in deslist: + m = re.search(r"[^;]+$", el) + design_names.append(m.group(0)) + if design_name and design_name not in design_names: + raise AttributeError("Design {} doesn't exist in current project.".format(design_name)) + if not design_name: + oDesign = oProject.GetActiveDesign() + else: + oDesign = oProject.SetActiveDesign(design_name) + if not oDesign: + raise AttributeError("No design is present.") + design_type = oDesign.GetDesignType() + if design_type in list(app_map.keys()): + version = odesktop.GetVersion().split(".") + v = ".".join([version[0], version[1]]) + return app_map[design_type](project_name, design_name, specified_version=v) return None diff --git a/pyaedt/generic/general_methods.py b/pyaedt/generic/general_methods.py index 9bcd6fb1ea6..c01df453d4d 100644 --- a/pyaedt/generic/general_methods.py +++ b/pyaedt/generic/general_methods.py @@ -129,10 +129,13 @@ def _exception(ex_info, func, args, kwargs, message="Type Error"): message_to_print = "" messages = "" - try: - messages = list(sys.modules["__main__"].oDesktop.GetMessages("", "", 2))[-1].lower() - except (GrpcApiError, AttributeError, TypeError, IndexError): - pass + from pyaedt.generic.desktop_sessions import _desktop_sessions + + if len(list(_desktop_sessions.values())) == 1: + try: + messages = list(list(_desktop_sessions.values())[0].odesktop.GetMessages("", "", 2))[-1].lower() + except (GrpcApiError, AttributeError, TypeError, IndexError): + pass if "error" in messages: message_to_print = messages[messages.index("[error]") :] # _write_mes("{} - {} - {}.".format(ex_info[1], func.__name__, message.upper())) @@ -208,14 +211,16 @@ def wrapper(*args, **kwargs): _log_method(user_function, args, kwargs) return out except MethodNotSupportedError: - message = "This Method is not supported in current AEDT Design Type." + message = "This method is not supported in current AEDT design type." if settings.enable_screen_logs: - print("**************************************************************") - print("pyaedt error on Method {}: {}. Please Check again".format(user_function.__name__, message)) - print("**************************************************************") - print("") + pyaedt_logger.error("**************************************************************") + pyaedt_logger.error( + "PyAEDT error on method {}: {}. Check again".format(user_function.__name__, message) + ) + pyaedt_logger.error("**************************************************************") + pyaedt_logger.error("") if settings.enable_file_logs: - settings.logger.error(message) + settings.error(message) return False except GrpcApiError: _exception(sys.exc_info(), user_function, args, kwargs, "AEDT grpc API call Error") diff --git a/pyaedt/generic/settings.py b/pyaedt/generic/settings.py index 59c3cd9d8c2..72c21e556e6 100644 --- a/pyaedt/generic/settings.py +++ b/pyaedt/generic/settings.py @@ -25,6 +25,8 @@ def __init__(self): self._enable_debug_logger = False self._enable_error_handler = True self._aedt_version = None + self._aedt_install_dir = None + self._use_multi_desktop = False self.remote_api = False self._use_grpc_api = None self.formatter = None @@ -217,6 +219,31 @@ def aedt_version(self, value): if self._aedt_version >= "2023.1": self.disable_bounding_box_sat = True + @property + def aedt_install_dir(self): + """AEDT installation path.""" + return self._aedt_install_dir + + @aedt_install_dir.setter + def aedt_install_dir(self, value): + self._aedt_install_dir = value + + @property + def use_multi_desktop(self): + """Flag indicating if multiple desktop sessions are enabled in the same Python script. + Current limitations follow: + + - Release without closing the desktop is not possible, + - The first desktop created must be the last to be closed. + + Enabling multiple desktop sessions is a beta feature.""" + + return self._use_multi_desktop + + @use_multi_desktop.setter + def use_multi_desktop(self, value): + self._use_multi_desktop = value + @property def edb_dll_path(self): """Optional path for the EDB DLL file.""" From 8e54f8125e2796847c17155e9484c33cc79181c6 Mon Sep 17 00:00:00 2001 From: Massimo Capodiferro <77293250+maxcapodi78@users.noreply.github.com> Date: Fri, 22 Mar 2024 10:22:55 +0100 Subject: [PATCH 36/36] Lazy load on objects (#4384) Co-authored-by: maxcapodi78 --- _unittest/conftest.py | 2 +- _unittest/test_98_Icepak.py | 1 - pyaedt/application/Design.py | 32 ++- pyaedt/generic/LoadAEDTFile.py | 25 +- pyaedt/generic/settings.py | 16 ++ pyaedt/modeler/cad/Modeler.py | 2 +- pyaedt/modeler/cad/Primitives.py | 371 +++++++++++++++++++++------- pyaedt/modeler/cad/Primitives3D.py | 4 +- pyaedt/modeler/cad/components_3d.py | 4 +- pyaedt/modules/PostProcessor.py | 4 +- pyaedt/modules/SolveSetup.py | 8 +- pyaedt/modules/solutions.py | 2 +- 12 files changed, 345 insertions(+), 126 deletions(-) diff --git a/_unittest/conftest.py b/_unittest/conftest.py index eefb4baa7b8..5daf7321ad9 100644 --- a/_unittest/conftest.py +++ b/_unittest/conftest.py @@ -94,7 +94,7 @@ desktop_version = config["desktopVersion"] new_thread = config["NewThread"] settings.use_grpc_api = config["use_grpc"] - +settings.objects_lazy_load = False logger = pyaedt_logger diff --git a/_unittest/test_98_Icepak.py b/_unittest/test_98_Icepak.py index 771b0a22ce0..b84085e149e 100644 --- a/_unittest/test_98_Icepak.py +++ b/_unittest/test_98_Icepak.py @@ -11,7 +11,6 @@ from pyaedt.modules.SetupTemplates import SetupKeys test_subfolder = "T98" - if config["desktopVersion"] > "2022.2": test_project_name = "Filter_Board_Icepak_231" src_project_name = "USB_Connector_IPK_231" diff --git a/pyaedt/application/Design.py b/pyaedt/application/Design.py index f0dcba75b2c..3384fcede50 100644 --- a/pyaedt/application/Design.py +++ b/pyaedt/application/Design.py @@ -60,6 +60,12 @@ import base64 +def load_aedt_thread(project_path): + pp = load_entire_aedt_file(project_path) + settings._project_properties[os.path.normpath(project_path)] = pp + settings._project_time_stamp = os.path.getmtime(project_path) + + class Design(AedtObjects): """Contains all functions and objects connected to the active project and design. @@ -195,21 +201,16 @@ def __init__( port=0, aedt_process_id=None, ): - def load_aedt_thread(path): - start = time.time() - settings._project_properties[path] = load_entire_aedt_file(path) - settings._project_time_stamp = os.path.getmtime(project_name) - pyaedt_logger.info("AEDT file load (threaded) time: {}".format(time.time() - start)) - t = None + self.__t = None if ( not is_ironpython and project_name and os.path.exists(project_name) and (os.path.splitext(project_name)[1] == ".aedt" or os.path.splitext(project_name)[1] == ".a3dcomp") ): - t = threading.Thread(target=load_aedt_thread, args=(project_name,)) - t.start() + self.__t = threading.Thread(target=load_aedt_thread, args=(project_name,), daemon=True) + self.__t.start() self._init_variables() self._design_type = design_type self.last_run_log = "" @@ -262,8 +263,11 @@ def load_aedt_thread(path): self._logger.odesign = self.odesign AedtObjects.__init__(self, self._desktop_class, self.oproject, self.odesign, is_inherithed=True) self.logger.info("Aedt Objects correctly read") - if t: - t.join() + # if t: + # t.join() + if not self.__t and not settings.lazy_load and not is_ironpython and os.path.exists(self.project_file): + self.__t = threading.Thread(target=load_aedt_thread, args=(self.project_file,), daemon=True) + self.__t.start() self._variable_manager = VariableManager(self) self._project_datasets = [] self._design_datasets = [] @@ -342,8 +346,8 @@ def boundaries(self): bb.append(thermal) bb.append(self.get_oo_property_value(othermal, thermal, "Type")) - if self.modeler.user_defined_components: - for component in self.modeler.user_defined_components: + if self.modeler.user_defined_components.items(): + for component in self.modeler.user_defined_components.keys(): thermal_properties = self.get_oo_properties(self.oeditor, component) if thermal_properties and "Type" not in thermal_properties and thermal_properties[-1] != "Icepak": thermal_boundaries = self.design_properties["BoundarySetup"]["Boundaries"] @@ -522,6 +526,9 @@ def project_properties(self): dict Dictionary of the project properties. """ + if self.__t: + self.__t.join() + self.__t = None start = time.time() if self.project_timestamp_changed or ( os.path.exists(self.project_file) @@ -555,7 +562,6 @@ def design_properties(self): Dictionary of the design properties. """ - try: if model_names[self._design_type] in self.project_properties["AnsoftProject"]: designs = self.project_properties["AnsoftProject"][model_names[self._design_type]] diff --git a/pyaedt/generic/LoadAEDTFile.py b/pyaedt/generic/LoadAEDTFile.py index 29bc94d2e78..c9cbc828e24 100644 --- a/pyaedt/generic/LoadAEDTFile.py +++ b/pyaedt/generic/LoadAEDTFile.py @@ -23,10 +23,14 @@ def load_entire_aedt_file(filename): dictionary containing the decoded AEDT file """ - return _load_entire_aedt_file(os.path.normpath(filename)) + settings.logger.reset_timer() + settings.logger.info("Parsing {}.".format(filename)) + f_d = _load_entire_aedt_file(os.path.normpath(filename)) + settings.logger.info_timer("File {} correctly loaded.".format(filename)) + return f_d -def load_keyword_in_aedt_file(filename, keyword): +def load_keyword_in_aedt_file(filename, keyword, design_name=None): """Load s specific keyword in the AEDT file and return the dictionary Parameters @@ -42,7 +46,7 @@ def load_keyword_in_aedt_file(filename, keyword): dictionary containing the decoded AEDT file """ - return _load_keyword_in_aedt_file(filename, keyword) + return _load_keyword_in_aedt_file(filename, keyword, design_name) # -------------------------------------------------------------------- @@ -358,7 +362,7 @@ def _decode_subkey(line, d): d[k] = None -def _walk_through_structure(keyword, save_dict): +def _walk_through_structure(keyword, save_dict, design_name=None): """ Parameters @@ -375,12 +379,19 @@ def _walk_through_structure(keyword, save_dict): global _count begin_key = "$begin '{}'".format(keyword) end_key = "$end '{}'".format(keyword) + design_key = None + design_found = True + if design_name: + design_key = "Name='{}'".format(design_name) + design_found = False found = False saved_value = None while _count < _len_all_lines: line = _all_lines[_count] + if design_key and design_key in line: + design_found = True # begin_key is found - if begin_key == line: + if begin_key == line and design_found: found = True saved_value = save_dict.get(keyword) # if the keyword is already present, save it save_dict[keyword] = {} @@ -476,7 +487,7 @@ def _load_entire_aedt_file(filename): return main_dict -def _load_keyword_in_aedt_file(filename, keyword): +def _load_keyword_in_aedt_file(filename, keyword, design_name=None): """Load a specific keyword in the AEDT file and return the dictionary Parameters @@ -495,5 +506,5 @@ def _load_keyword_in_aedt_file(filename, keyword): _read_aedt_file(filename) # load the aedt file main_dict = {} - _walk_through_structure(keyword, main_dict) + _walk_through_structure(keyword, main_dict, design_name) return main_dict diff --git a/pyaedt/generic/settings.py b/pyaedt/generic/settings.py index 72c21e556e6..e7bc5d02a93 100644 --- a/pyaedt/generic/settings.py +++ b/pyaedt/generic/settings.py @@ -70,6 +70,22 @@ def __init__(self): self._retry_n_times_time_interval = 0.1 self._wait_for_license = False self.__lazy_load = True + self.__objects_lazy_load = False + + @property + def objects_lazy_load(self): + """Flag for enabling and disabling the lazy load. + The default is ``True``. + + Returns + ------- + bool + """ + return self.__objects_lazy_load + + @objects_lazy_load.setter + def objects_lazy_load(self, value): + self.__objects_lazy_load = value @property def lazy_load(self): diff --git a/pyaedt/modeler/cad/Modeler.py b/pyaedt/modeler/cad/Modeler.py index b8feba49549..8883b46e315 100644 --- a/pyaedt/modeler/cad/Modeler.py +++ b/pyaedt/modeler/cad/Modeler.py @@ -1823,7 +1823,7 @@ def _list_verification(self, object_list, list_type): object_list_new.append(int(element)) else: if element in self._modeler.object_names: - obj_id = self._modeler._object_names_to_ids[element] + obj_id = self._modeler.objects[element].id for sel in self._modeler.object_list: if sel.id == obj_id: for f in sel.faces: diff --git a/pyaedt/modeler/cad/Primitives.py b/pyaedt/modeler/cad/Primitives.py index 3c9d8e23075..023c94a5ef5 100644 --- a/pyaedt/modeler/cad/Primitives.py +++ b/pyaedt/modeler/cad/Primitives.py @@ -55,6 +55,129 @@ aedt_wait_time = 0.1 +class Objects(dict): + """AEDT object dictionary.""" + + def _parse_objs(self): + if self.__refreshed is False and dict.__len__(self) != len(self.__parent.object_names): + self.__refreshed = True + if self.__obj_type == "o": + self.__parent.logger.info("Parsing design objects. This operation can take time") + self.__parent.logger.reset_timer() + self.__parent._refresh_all_ids_from_aedt_file() + self.__parent.add_new_solids() + self.__parent.cleanup_solids() + self.__parent.logger.info_timer("3D Modeler objects parsed.") + elif self.__obj_type == "p": + self.__parent.logger.info("Parsing design points. This operation can take time") + self.__parent.logger.reset_timer() + self.__parent.add_new_points() + self.__parent.cleanup_points() + self.__parent.logger.info_timer("3D Modeler objects parsed.") + elif self.__obj_type == "u": + self.__parent.add_new_user_defined_component() + + def __len__(self): + if self.__refreshed: + return dict.__len__(self) + elif self.__obj_type == "o": + return len(self.__parent.object_names) + elif self.__obj_type == "p": + return len(self.__parent.point_names) + else: + return len(self.__parent.user_defined_component_names) + + def __contains__(self, item): + if self.__refreshed: + return True if (item in dict.keys(self) or item in self.__obj_names) else False + elif isinstance(item, str): + if self.__obj_type == "o": + return True if item in self.__parent.object_names else False + elif self.__obj_type == "p": + return True if item in self.__parent.point_names else False + else: + return True if item in self.__parent.user_defined_component_names else False + self._parse_objs() + return True if (item in dict.keys(self) or item in self.__obj_names) else False + + def keys(self): + self._parse_objs() + + return dict.keys(self) + + def values(self): + self._parse_objs() + return dict.values(self) + + def items(self): + self._parse_objs() + return dict.items(self) + + def __iter__(self): + self._parse_objs() + return dict.__iter__(self) + + def __setitem__(self, key, value): + dict.__setitem__(self, key, value) + self.__obj_names[value.name] = value + if self.__obj_type == "o": + self.__parent._object_names_to_ids[value.name] = key + + @pyaedt_function_handler() + def __getitem__(self, item): + if item in dict.keys(self): + return dict.__getitem__(self, item) + elif item in self.__obj_names: + return self.__obj_names[item] + if self.__obj_type == "o": + if isinstance(item, int): + try: + id = item + name = self.__parent.oeditor.GetObjectNameByID(id) + o = self.__parent._create_object(name, id) + self.__setitem__(id, o) + return o + except: + raise KeyError(item) + + elif isinstance(item, str): + try: + name = item + id = self.__parent.oeditor.GetObjectIDByName(name) + o = self.__parent._create_object(name, id) + self.__setitem__(id, o) + return o + except: + raise KeyError(item) + + elif isinstance(item, (Object3d, Polyline)): + self.__setitem__(item.id, item) + return item + else: + raise TypeError(item) + self._parse_objs() + if item in dict.keys(self): + return dict.__getitem__(self, item) + elif item in self.__obj_names: + return self.__obj_names[item] + raise KeyError(item) + + def __init__(self, parent, obj_type="o", props=None): + dict.__init__(self) + self.__obj_names = {} + self.__parent = parent + self.__obj_type = obj_type + if props: + for key, value in props.items(): + dict.__setitem__(self, key, value) + self.__obj_names[value.name] = value + if self.__obj_type == "o": + self.__parent._object_names_to_ids[value.name] = key + self.__refreshed = True + else: + self.__refreshed = False + + class GeometryModeler(Modeler): """Manages the main AEDT Modeler functionalities for geometry-based designs. @@ -81,26 +204,20 @@ def __getitem__(self, partId): Returns ``None`` if the part ID or the object name is not found. """ - if isinstance(partId, (int, str)) and not ( - partId in self.objects or partId in self._object_names_to_ids or partId in self.user_defined_components - ): - self.refresh_all_ids() - if isinstance(partId, int): - if partId in self.objects: - return self.objects[partId] - elif partId in self._object_names_to_ids: - return self.objects[self._object_names_to_ids[partId]] - elif partId in self.user_defined_components: - return self.user_defined_components[partId] - elif isinstance(partId, Object3d) or isinstance(partId, UserDefinedComponent): + if isinstance(partId, (Object3d, UserDefinedComponent, Point)): return partId + try: + return self.objects[partId] + except: + if partId in self.user_defined_components.keys(): + return self.user_defined_components[partId] self.logger.error("Object '{}' not found.".format(partId)) return None def __init__(self, app, is3d=True): self._app = app + self._model_data = {} Modeler.__init__(self, app) - # TODO Refactor this as a dictionary with names as key self._coordinate_systems = [] self._user_lists = [] self._planes = [] @@ -111,10 +228,10 @@ def __init__(self, app, is3d=True): self._points = [] self._unclassified = [] self._all_object_names = [] - self.objects = {} - self.user_defined_components = {} self._object_names_to_ids = {} - self.points = {} + self.objects = Objects(self, "o") + self.user_defined_components = Objects(self, "u") + self.points = Objects(self, "p") self.refresh() class Position: @@ -402,8 +519,8 @@ def sheet_objects(self): list of :class:`pyaedt.modeler.cad.object3d.Object3d` 3D object. """ - # self._refresh_sheets() - return [self[name] for name in self.sheet_names if self[name]] + self._refresh_sheets() + return [v for k, v in self.objects_by_name.items() if k in self._sheets] @property def line_objects(self): @@ -414,8 +531,8 @@ def line_objects(self): list of :class:`pyaedt.modeler.cad.object3d.Object3d` 3D object. """ - # self._refresh_lines() - return [self[name] for name in self.line_names if self[name]] + self._refresh_lines() + return [v for k, v in self.objects_by_name.items() if k in self._lines] @property def point_objects(self): @@ -426,8 +543,8 @@ def point_objects(self): list of :class:`pyaedt.modeler.cad.object3d.Object3d` 3D object. """ - # self._refresh_points() - return [self.points[name] for name in self.point_names] + self._refresh_points() + return [v for k, v in self.points.items() if k in self._points] @property def unclassified_objects(self): @@ -438,8 +555,8 @@ def unclassified_objects(self): list of :class:`pyaedt.modeler.cad.object3d.Object3d` 3D object. """ - # self._refresh_unclassified() - return [self[name] for name in self.unclassified_names if name is not None] + self._refresh_unclassified() + return [v for k, v in self.objects_by_name.items() if k in self._unclassified] @property def object_list(self): @@ -451,7 +568,7 @@ def object_list(self): 3D object. """ self._refresh_object_types() - return [self[name] for name in self._all_object_names if name is not None and name not in self.point_names] + return [v for name, v in self.objects_by_name.items() if name is not None and name not in self.point_names] @property def solid_names(self): @@ -542,7 +659,7 @@ def user_defined_component_names(self): udm = [] obs3d = list(set(udm + obs3d)) new_obs3d = copy.deepcopy(obs3d) - if self.user_defined_components: + if self.user_defined_components.keys(): existing_components = list(self.user_defined_components.keys()) new_obs3d = [i for i in obs3d if i] for _, value in enumerate(existing_components): @@ -563,7 +680,7 @@ def layout_component_names(self): Layout component names. """ lc_names = [] - if self.user_defined_components: + if self.user_defined_components.keys(): for name, value in self.user_defined_components.items(): if value.layout_component: lc_names.append(name) @@ -658,13 +775,13 @@ def refresh(self): self._points = [] self._unclassified = [] self._all_object_names = [] - self.objects = {} - self.user_defined_components = {} self._object_names_to_ids = {} - self._currentId = 0 + self.objects = Objects(self, "o") + self.user_defined_components = Objects(self, "u") self._refresh_object_types() - self._refresh_all_ids_from_aedt_file() - self.refresh_all_ids() + if not settings.objects_lazy_load: + self._refresh_all_ids_from_aedt_file() + self.refresh_all_ids() @pyaedt_function_handler() def _get_commands(self, name): @@ -696,32 +813,33 @@ def _create_point(self, name): @pyaedt_function_handler() def _refresh_all_ids_from_aedt_file(self): - if not self._design_properties or "ModelSetup" not in self._design_properties: + + dp = copy.deepcopy(self._app.design_properties) + if not dp or "ModelSetup" not in dp: return False try: - groups = self._design_properties["ModelSetup"]["GeometryCore"]["GeometryOperations"]["Groups"]["Group"] + groups = dp["ModelSetup"]["GeometryCore"]["GeometryOperations"]["Groups"]["Group"] except KeyError: groups = [] if not isinstance(groups, list): groups = [groups] try: - self._design_properties["ModelSetup"]["GeometryCore"]["GeometryOperations"]["ToplevelParts"]["GeometryPart"] + dp["ModelSetup"]["GeometryCore"]["GeometryOperations"]["ToplevelParts"]["GeometryPart"] except KeyError: return 0 - for el in self._design_properties["ModelSetup"]["GeometryCore"]["GeometryOperations"]["ToplevelParts"][ - "GeometryPart" - ]: + + for el in dp["ModelSetup"]["GeometryCore"]["GeometryOperations"]["ToplevelParts"]["GeometryPart"]: if isinstance(el, (OrderedDict, dict)): attribs = el["Attributes"] operations = el.get("Operations", None) else: - attribs = self._design_properties["ModelSetup"]["GeometryCore"]["GeometryOperations"]["ToplevelParts"][ - "GeometryPart" - ]["Attributes"] - operations = self._design_properties["ModelSetup"]["GeometryCore"]["GeometryOperations"][ - "ToplevelParts" - ]["GeometryPart"]["Operations"] + attribs = dp["ModelSetup"]["GeometryCore"]["GeometryOperations"]["ToplevelParts"]["GeometryPart"][ + "Attributes" + ] + operations = dp["ModelSetup"]["GeometryCore"]["GeometryOperations"]["ToplevelParts"]["GeometryPart"][ + "Operations" + ] if attribs["Name"] in self._all_object_names: pid = 0 @@ -778,6 +896,24 @@ def cleanup_objects(self): a modeler operation such as :func:`pyaedt.modeler.Model3D.Modeler3D.unite` or :func:`pyaedt.modeler.Model2D.Modeler2D.unite`. + Returns + ------- + dict + Dictionary of updated object IDs. + + """ + self.cleanup_solids() + self.cleanup_points() + + @pyaedt_function_handler() + def cleanup_solids(self): + """Clean up solids that no longer exist in the modeler because + they were removed by previous operations. + + This method also updates object IDs that may have changed via + a modeler operation such as :func:`pyaedt.modeler.Model3D.Modeler3D.unite` + or :func:`pyaedt.modeler.Model2D.Modeler2D.unite`. + Returns ------- dict @@ -786,23 +922,42 @@ def cleanup_objects(self): """ new_object_dict = {} new_object_id_dict = {} - new_points_dict = {} all_objects = self.object_names all_unclassified = self.unclassified_names all_objs = all_objects + all_unclassified - for old_id, obj in self.objects.items(): - if obj.name in all_objs: - # Check if ID can change in boolean operations - # updated_id = obj.id # By calling the object property we get the new id - new_object_id_dict[obj.name] = old_id - new_object_dict[old_id] = obj + if len(all_objs) != len(self._object_names_to_ids): + for old_id, obj in self.objects.items(): + if obj.name in all_objs: + # Check if ID can change in boolean operations + # updated_id = obj.id # By calling the object property we get the new id + new_object_id_dict[obj.name] = old_id + new_object_dict[old_id] = obj + self._object_names_to_ids = {} + self.objects = Objects(self, "o", new_object_dict) + + @pyaedt_function_handler() + def cleanup_points(self): + """Clean up points that no longer exist in the modeler because + they were removed by previous operations. + + This method also updates object IDs that may have changed via + a modeler operation such as :func:`pyaedt.modeler.Model3D.Modeler3D.unite` + or :func:`pyaedt.modeler.Model2D.Modeler2D.unite`. + + Returns + ------- + dict + Dictionary of updated object IDs. + + """ + new_points_dict = {} + for old_id, obj in self.points.items(): if obj.name in self._points: new_points_dict[obj.name] = obj - self.objects = new_object_dict - self._object_names_to_ids = new_object_id_dict - self.points = new_points_dict + + self.points = Objects(self, "p", new_points_dict) @pyaedt_function_handler() def find_new_objects(self): @@ -832,29 +987,59 @@ def add_new_objects(self): List of added objects. """ - # TODO: Need to improve documentation for this method. added_objects = [] objs_ids = {} - if not self._object_names_to_ids: - for obj in self._all_object_names: - try: - objs_ids[obj] = self.oeditor.GetObjectIDByName(obj) - except: - pass + added_objects = self.add_new_solids() + added_objects += self.add_new_points() + return added_objects + + @pyaedt_function_handler() + def add_new_solids(self): + """Add objects that have been created in the modeler by + previous operations. + + Returns + ------- + list + List of added objects. + + """ + added_objects = [] + for obj_name in self.object_names: if obj_name not in self._object_names_to_ids: - pid = objs_ids[obj_name] if obj_name in objs_ids else 0 + try: + pid = self.oeditor.GetObjectIDByName(obj) + except: + pid = 0 self._create_object(obj_name, pid=pid, use_cached=True) added_objects.append(obj_name) for obj_name in self.unclassified_names: if obj_name not in self._object_names_to_ids: - pid = objs_ids[obj_name] if obj_name in objs_ids else 0 + try: + pid = self.oeditor.GetObjectIDByName(obj) + except: + pid = 0 self._create_object(obj_name, pid=pid, use_cached=True) added_objects.append(obj_name) + + return added_objects + + @pyaedt_function_handler() + def add_new_points(self): + """Add objects that have been created in the modeler by + previous operations. + + Returns + ------- + list + List of added objects. + + """ + added_objects = [] for obj_name in self.point_names: if obj_name not in self.points.keys(): - pid = objs_ids[obj_name] if obj_name in objs_ids else 0 - self._create_object(obj_name, pid=pid, use_cached=True) + self._create_object(obj_name, pid=0, use_cached=True) added_objects.append(obj_name) return added_objects @@ -871,7 +1056,7 @@ def add_new_user_defined_component(self): """ added_component = [] for comp_name in self.user_defined_component_names: - if comp_name not in self.user_defined_components: + if comp_name not in self.user_defined_components.keys(): self._create_user_defined_component(comp_name) added_component.append(comp_name) return added_component @@ -882,7 +1067,8 @@ def add_new_user_defined_component(self): def refresh_all_ids(self): """Refresh all IDs.""" - self.add_new_objects() + self.add_new_solids() + self.add_new_points() self.add_new_user_defined_component() self.cleanup_objects() @@ -941,8 +1127,9 @@ def _get_coordinates_data(self): # pragma: no cover coord = [] id2name = {1: "Global"} name2refid = {} - if self._design_properties and "ModelSetup" in self._design_properties: - cs = self._design_properties["ModelSetup"]["GeometryCore"]["GeometryOperations"]["CoordinateSystems"] + dp = copy.deepcopy(self._design_properties) + if dp and "ModelSetup" in dp: + cs = dp["ModelSetup"]["GeometryCore"]["GeometryOperations"]["CoordinateSystems"] for ds in cs: try: if isinstance(cs[ds], (OrderedDict, dict)): @@ -964,9 +1151,9 @@ def _get_coordinates_data(self): # pragma: no cover cs_id = cs[ds]["ID"] id2name[cs_id] = name op_id = cs[ds]["PlaceHolderOperationID"] - geometry_part = self._design_properties["ModelSetup"]["GeometryCore"]["GeometryOperations"][ - "ToplevelParts" - ]["GeometryPart"] + geometry_part = dp["ModelSetup"]["GeometryCore"]["GeometryOperations"]["ToplevelParts"][ + "GeometryPart" + ] if isinstance(geometry_part, (OrderedDict, dict)): op = geometry_part["Operations"]["FaceCSHolderOperation"] if isinstance(op, (OrderedDict, dict)): @@ -1012,9 +1199,9 @@ def _get_coordinates_data(self): # pragma: no cover cs_id = el["ID"] id2name[cs_id] = name op_id = el["PlaceHolderOperationID"] - geometry_part = self._design_properties["ModelSetup"]["GeometryCore"][ - "GeometryOperations" - ]["ToplevelParts"]["GeometryPart"] + geometry_part = dp["ModelSetup"]["GeometryCore"]["GeometryOperations"]["ToplevelParts"][ + "GeometryPart" + ] if isinstance(geometry_part, (OrderedDict, dict)): op = geometry_part["Operations"]["FaceCSHolderOperation"] if isinstance(op, (OrderedDict, dict)): @@ -1062,12 +1249,13 @@ def _get_lists_data(self): [Dict with List information] """ design_lists = [] - if self._design_properties and self._design_properties.get("ModelSetup", None): + dp = copy.deepcopy(self._design_properties) + if dp and dp.get("ModelSetup", None): key1 = "GeometryOperations" key2 = "GeometryEntityLists" key3 = "GeometryEntityListOperation" try: - entity_list = self._design_properties["ModelSetup"]["GeometryCore"][key1][key2] + entity_list = dp["ModelSetup"]["GeometryCore"][key1][key2] if entity_list: geom_entry = copy.deepcopy(entity_list[key3]) if isinstance(geom_entry, (dict, OrderedDict)): @@ -4270,7 +4458,7 @@ def get_object_name_from_edge_id(self, edge_id): >>> oEditor.GetEdgeIDsFromObject """ - for object in list(self._object_names_to_ids.keys()): + for object in self.solid_names + self.sheet_names + self.line_names: try: oEdgeIDs = self.oeditor.GetEdgeIDsFromObject(object) if str(edge_id) in oEdgeIDs: @@ -5780,7 +5968,7 @@ def update_object(self, obj): o = self._resolve_object(obj) name = o.name - del self.objects[self._object_names_to_ids[name]] + del self.objects[self.objects_by_name[name].id] del self._object_names_to_ids[name] o = self._create_object(name) return o @@ -6548,7 +6736,7 @@ def delete_objects_containing(self, contained_string, case_sensitive=True): >>> oEditor.Delete """ - objnames = self._object_names_to_ids + objnames = self.object_names num_del = 0 for el in objnames: if case_sensitive: @@ -6577,8 +6765,8 @@ def get_obj_id(self, objname): Object ID. """ - if objname in self._object_names_to_ids: - return self._object_names_to_ids[objname] + if objname in self.objects_by_name: + return self.objects_by_name[objname].id return None @pyaedt_function_handler() @@ -6596,9 +6784,9 @@ def get_object_from_name(self, objname): 3D object returned. """ - if objname in self._object_names_to_ids: - object_id = self.get_obj_id(objname) - return self.objects[object_id] + if objname in self.object_names: + # object_id = self.get_obj_id(objname) + return self.objects[objname] @pyaedt_function_handler() def get_objects_w_string(self, stringname, case_sensitive=True): @@ -6898,7 +7086,7 @@ def get_object_faces(self, partId): """ oFaceIDs = [] - if isinstance(partId, str) and partId in self._object_names_to_ids: + if isinstance(partId, str) and partId in self.objects_by_name: oFaceIDs = self.oeditor.GetFaceIDs(partId) oFaceIDs = [int(i) for i in oFaceIDs] elif partId in self.objects: @@ -7967,7 +8155,6 @@ def _create_object(self, name, pid=0, use_cached=False, is_polyline=False, **kwa new_id = o.id o = self.get_existing_polyline(o) self.objects[new_id] = o - self._object_names_to_ids[o.name] = new_id else: o = Object3d(self, name) if is_polyline: @@ -7977,7 +8164,6 @@ def _create_object(self, name, pid=0, use_cached=False, is_polyline=False, **kwa else: new_id = o.id self.objects[new_id] = o - self._object_names_to_ids[o.name] = new_id # Set properties from kwargs. if len(kwargs) > 0: @@ -8182,11 +8368,12 @@ def _get_native_component_properties(self, name): if name in self.oeditor.Get3DComponentInstanceNames(comp3d): component_name = comp3d break - if self._design_properties and self._design_properties.get("ModelSetup", None) and component_name: + dp = copy.deepcopy(self._app.design_properties) + if dp and dp.get("ModelSetup", None) and component_name: try: - native_comp_entry = self._design_properties["ModelSetup"]["GeometryCore"]["GeometryOperations"][ - "SubModelDefinitions" - ]["NativeComponentDefinition"] + native_comp_entry = dp["ModelSetup"]["GeometryCore"]["GeometryOperations"]["SubModelDefinitions"][ + "NativeComponentDefinition" + ] if native_comp_entry: if isinstance(native_comp_entry, (dict, OrderedDict)): native_comp_entry = [native_comp_entry] diff --git a/pyaedt/modeler/cad/Primitives3D.py b/pyaedt/modeler/cad/Primitives3D.py index e5a1b782ab0..1e56c708362 100644 --- a/pyaedt/modeler/cad/Primitives3D.py +++ b/pyaedt/modeler/cad/Primitives3D.py @@ -1148,8 +1148,8 @@ def create_helix(self, polyline_name, position, x_start_dir, y_start_dir, z_star vArg2.append(self._arg_with_dim(thread)) self.oeditor.CreateHelix(vArg1, vArg2) - if polyline_name in self._object_names_to_ids: - del self.objects[self._object_names_to_ids[polyline_name]] + if polyline_name in self.objects_by_name: + del self.objects[self.objects_by_name[polyline_name].id] return self._create_object(polyline_name, **kwargs) @pyaedt_function_handler() diff --git a/pyaedt/modeler/cad/components_3d.py b/pyaedt/modeler/cad/components_3d.py index 612029767f8..7df24b8429b 100644 --- a/pyaedt/modeler/cad/components_3d.py +++ b/pyaedt/modeler/cad/components_3d.py @@ -414,8 +414,8 @@ def parts(self): component_parts = list(self._primitives.oeditor.GetChildObject(self.name).GetChildNames()) parts_id = [ - self._primitives._object_names_to_ids[part] - for part in self._primitives._object_names_to_ids + self._primitives.objects_by_name[part].id + for part in self._primitives.objects_by_name.keys() if part in component_parts ] parts_dict = {part_id: self._primitives.objects[part_id] for part_id in parts_id} diff --git a/pyaedt/modules/PostProcessor.py b/pyaedt/modules/PostProcessor.py index 45cb207971c..892304de2c7 100644 --- a/pyaedt/modules/PostProcessor.py +++ b/pyaedt/modules/PostProcessor.py @@ -3899,7 +3899,7 @@ def power_budget(self, units="W", temperature=22, output_type="component"): group_hierarchy = {} groups = self._app.oeditor.GetChildNames("Groups") - + self._app.modeler.add_new_user_defined_component() for g in groups: g1 = self._app.oeditor.GetChildObject(g) if g1: @@ -4186,7 +4186,7 @@ def extract_dataset_info(boundary_obj, units_input="W", boundary="Power"): power_dict[bc_obj.name] = power_value - for native_comps in self.modeler.user_defined_components: + for native_comps in self.modeler.user_defined_components.keys(): if hasattr(self.modeler.user_defined_components[native_comps], "native_properties"): native_key = "NativeComponentDefinitionProvider" if native_key in self.modeler.user_defined_components[native_comps].native_properties: diff --git a/pyaedt/modules/SolveSetup.py b/pyaedt/modules/SolveSetup.py index 286eeea07b9..da7e9f80ce6 100644 --- a/pyaedt/modules/SolveSetup.py +++ b/pyaedt/modules/SolveSetup.py @@ -1951,14 +1951,14 @@ def _get_net_names(self, app, file_fullname): if aedtapp_objs: for p in aedtapp.modeler.get_bodynames_from_position(position, None, False): if p in metal_object: - obj_ind = aedtapp.modeler._object_names_to_ids[p] + obj_ind = aedtapp.modeler.objects[p].id if obj_ind not in obj_dict: obj_dict[obj_ind] = aedtapp.modeler.objects[obj_ind] if net in via_per_nets: for via_pos in via_per_nets[net]: for p in aedtapp.modeler.get_bodynames_from_position(via_pos, None, False): if p in metal_object: - obj_ind = aedtapp.modeler._object_names_to_ids[p] + obj_ind = aedtapp.modeler.objects[p].id if obj_ind not in obj_dict: obj_dict[obj_ind] = aedtapp.modeler.objects[obj_ind] for lay_el in list(layers_elevation.values()): @@ -1967,7 +1967,7 @@ def _get_net_names(self, app, file_fullname): pad_objs = aedtapp.modeler.get_bodynames_from_position(pad_pos, None, False) for pad_obj in pad_objs: if pad_obj in metal_object: - pad_ind = aedtapp.modeler._object_names_to_ids[pad_obj] + pad_ind = aedtapp.modeler.objects[pad_obj].id if pad_ind not in obj_dict: obj_dict[pad_ind] = aedtapp.modeler.objects[pad_ind] obj_list = list(obj_dict.values()) @@ -1979,7 +1979,7 @@ def _get_net_names(self, app, file_fullname): obj_list[0].color = [randrange(255), randrange(255), randrange(255)] elif len(obj_list) > 1: united_object = aedtapp.modeler.unite(obj_list, purge=True) - obj_ind = aedtapp.modeler._object_names_to_ids[united_object] + obj_ind = aedtapp.modeler.objects[united_object].id try: net = net.replace("-", "m") net = net.replace("+", "p") diff --git a/pyaedt/modules/solutions.py b/pyaedt/modules/solutions.py index 051788ce994..37844785cfd 100644 --- a/pyaedt/modules/solutions.py +++ b/pyaedt/modules/solutions.py @@ -3453,7 +3453,7 @@ def _create_args(self): if isinstance(self.ray_box, int): box_id = self.ray_box elif isinstance(self.ray_box, str): - box_id = self._postprocessor._primitives._object_names_to_ids[self.ray_box] + box_id = self._postprocessor._primitives.objects[self.ray_box].id else: box_id = self.ray_box.id args.extend("FilterBoxID:=", box_id)