diff --git a/.gitignore b/.gitignore index a47b528f..5f08477d 100644 --- a/.gitignore +++ b/.gitignore @@ -162,4 +162,7 @@ cython_debug/ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ \ No newline at end of file +#.idea/ + +# VSCode +.vscode \ No newline at end of file diff --git a/README.md b/README.md index df4efee7..991e45cd 100644 --- a/README.md +++ b/README.md @@ -128,7 +128,11 @@ Installation python -m pytest ./ ``` -**NOTE: geos-pv package cannot be build alone, but together with Paraview ([see Paraview compilation guide](https://gitlab.kitware.com/paraview/paraview/-/blob/master/Documentation/dev/build.md)). It is recommended to use Paraview v5.12+, which is based on python 3.10+. Alternatively, plugins from geos-pv/PVplugins can be manually loaded into Paraview ([see documentation](https://docs.paraview.org/en/latest/ReferenceManual/pythonProgrammableFilter.html#python-algorithm)).** + [!WARNING] + Due to local package conflicts with `pip install`, it is recommended either to build the packages one by one, or to inlude only top-level packages (see dependency tree hereabove) in the build list. + + [!NOTE] + geos-pv package cannot be build alone, but together with Paraview ([see Paraview compilation guide](https://gitlab.kitware.com/paraview/paraview/-/blob/master/Documentation/dev/build.md)). It is recommended to use Paraview v5.12+, which is based on python 3.10+. Alternatively, plugins from geos-pv/PVplugins can be manually loaded into Paraview ([see documentation](https://docs.paraview.org/en/latest/ReferenceManual/pythonProgrammableFilter.html#python-algorithm)). Contributions @@ -141,8 +145,8 @@ If you would like to report a bug, please submit an [issue](https://github.com/G If you would like to contribute to GEOS Python packages, please respect the following guidelines: 1. Create a new branch named from this template: `[CONTRIBUTOR]/[TYPE]/[TITLE]` where CONTRIBUTOR is the name of the contributor, TYPE is the type of contribution among 'feature', 'refactor', 'doc', 'ci', TITLE is a short title for the branch. -1. Add your code trying to integrate into the current code architecture. -1. Push the branch, open a new PR, and add reviewers +2. Add your code trying to integrate into the current code architecture. +3. Push the branch, open a new PR respecting naming [semantics](https://gist.github.com/joshbuchea/6f47e86d2510bce28f8e7f42ae84c716), and add reviewers If you do not have the rights to push the code and open new PRs, consider opening a new issue to explain what you want to do and ask for the dev rights. @@ -170,6 +174,9 @@ dependencies = [ ] ``` +[!IMPORTANT] +geos-pv dependencies are managed using a requirements.txt (together with the setup.py) file where all internal (and external if needed) dependencies are present. It ensures that internal dependency paths are correctly set when plugins are manually loaded into Paraview. + Release ------- diff --git a/docs/conf.py b/docs/conf.py index 91d1fb2e..aaa025ec 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -17,7 +17,8 @@ # Add python modules to be documented python_root = '..' -python_modules = ( 'geos-ats', 'geos-geomechanics', 'geos-mesh', 'geos-posp', 'geos-timehistory', 'geos-utils', 'geos-xml-tools', 'geos-xml-viewer', 'hdf5-wrapper', 'pygeos-tools' ) +python_modules = ( 'geos-ats', 'geos-geomechanics', 'geos-mesh', 'geos-posp', 'geos-pv', 'geos-timehistory', 'geos-utils', 'geos-xml-tools', 'geos-xml-viewer', 'hdf5-wrapper', 'pygeos-tools' ) + for m in python_modules: sys.path.insert( 0, os.path.abspath( os.path.join( python_root, m, 'src' ) ) ) @@ -48,7 +49,7 @@ ] autoclass_content = 'both' -autodoc_mock_imports = [ "ats", "colorcet", "h5py", "lxml", "meshio", "mpi4py", "scipy", "paraview", "pygeosx", "pylvarray", "vtk", "xmlschema", "xsdata" ] +autodoc_mock_imports = [ "ats", "colorcet", "h5py", "lxml", "matplotlib", "meshio", "mpi4py", "scipy", "pandas", "paraview", "pygeosx", "pylvarray", "vtk", "xmlschema", "xsdata", ] autodoc_typehints = 'none' autodoc_typehints_format = 'short' suppress_warnings = [ "autodoc.mocked_object" ] diff --git a/docs/geos-pv.rst b/docs/geos-pv.rst new file mode 100644 index 00000000..fc153b70 --- /dev/null +++ b/docs/geos-pv.rst @@ -0,0 +1,12 @@ +GEOS Paraview plugins +===================== + +.. toctree:: + :maxdepth: 5 + :caption: Contents: + + ./geos_pv_docs/home.rst + + ./geos_pv_docs/modules.rst + + ./geos_pv_docs/readers.rst \ No newline at end of file diff --git a/docs/geos_posp_docs/PVplugins.rst b/docs/geos_posp_docs/PVplugins.rst index 4987b058..91c859b6 100644 --- a/docs/geos_posp_docs/PVplugins.rst +++ b/docs/geos_posp_docs/PVplugins.rst @@ -75,14 +75,7 @@ PVGeomechanicsWorkflowVolumeWell plugin .. automodule:: PVplugins.PVGeomechanicsWorkflowVolumeWell - -PVGeosLogReader plugin --------------------------------- - -.. automodule:: PVplugins.PVGeosLogReader - - -PVMergeBlocksEnhanced plugin +PVplugins.PVMergeBlocksEnhanced module -------------------------------------- .. automodule:: PVplugins.PVMergeBlocksEnhanced @@ -93,14 +86,7 @@ PVMohrCirclePlot plugin .. automodule:: PVplugins.PVMohrCirclePlot - -PVPythonViewConfigurator plugin ------------------------------------------ - -.. automodule:: PVplugins.PVPythonViewConfigurator - - -PVSurfaceGeomechanics plugin +PVplugins.PVSurfaceGeomechanics module -------------------------------------- .. automodule:: PVplugins.PVSurfaceGeomechanics diff --git a/docs/geos_posp_docs/modules.rst b/docs/geos_posp_docs/modules.rst index 476a7c57..99fcad60 100644 --- a/docs/geos_posp_docs/modules.rst +++ b/docs/geos_posp_docs/modules.rst @@ -6,8 +6,6 @@ Processing filters - readers - processing pyvistaTools diff --git a/docs/geos_posp_docs/processing.rst b/docs/geos_posp_docs/processing.rst index a05fb0bd..d82e7361 100644 --- a/docs/geos_posp_docs/processing.rst +++ b/docs/geos_posp_docs/processing.rst @@ -3,14 +3,6 @@ Processing functions This package define functions to process data. -geos_posp.processing.geosLogReaderFunctions module ------------------------------------------------------- - -.. automodule:: geos_posp.processing.geosLogReaderFunctions - :members: - :undoc-members: - :show-inheritance: - geos_posp.processing.multiblockInpectorTreeFunctions module --------------------------------------------------------------- diff --git a/docs/geos_posp_docs/readers.rst b/docs/geos_posp_docs/readers.rst deleted file mode 100644 index 588a5910..00000000 --- a/docs/geos_posp_docs/readers.rst +++ /dev/null @@ -1,36 +0,0 @@ -vtk Readers -=========== - -This package defines vtk readers that allows to load Geos output files. - -geos_posp.readers.GeosLogReaderAquifers module --------------------------------------------------- - -.. automodule:: geos_posp.readers.GeosLogReaderAquifers - :members: - :undoc-members: - :show-inheritance: - -geos_posp.readers.GeosLogReaderConvergence module ------------------------------------------------------ - -.. automodule:: geos_posp.readers.GeosLogReaderConvergence - :members: - :undoc-members: - :show-inheritance: - -geos_posp.readers.GeosLogReaderFlow module ----------------------------------------------- - -.. automodule:: geos_posp.readers.GeosLogReaderFlow - :members: - :undoc-members: - :show-inheritance: - -geos_posp.readers.GeosLogReaderWells module ------------------------------------------------ - -.. automodule:: geos_posp.readers.GeosLogReaderWells - :members: - :undoc-members: - :show-inheritance: diff --git a/docs/geos_posp_docs/visu.pythonViewUtils.rst b/docs/geos_posp_docs/visu.pythonViewUtils.rst deleted file mode 100644 index 68047cfb..00000000 --- a/docs/geos_posp_docs/visu.pythonViewUtils.rst +++ /dev/null @@ -1,23 +0,0 @@ -PythonViewUtils Package -============================ - -This package includes utilities to display cross-plot using the Python View from Paraview. - - -geos_posp.visu.pythonViewUtils.Figure2DGenerator module ----------------------------------------------------------- - -.. automodule:: geos_posp.visu.pythonViewUtils.Figure2DGenerator - :members: - :undoc-members: - :show-inheritance: - -geos_posp.visu.pythonViewUtils.functionsFigure2DGenerator module -------------------------------------------------------------------- - -.. automodule:: geos_posp.visu.pythonViewUtils.functionsFigure2DGenerator - :members: - :undoc-members: - :show-inheritance: - - diff --git a/docs/geos_posp_docs/visu.rst b/docs/geos_posp_docs/visu.rst index c6e782b7..66ec9bf8 100644 --- a/docs/geos_posp_docs/visu.rst +++ b/docs/geos_posp_docs/visu.rst @@ -9,6 +9,4 @@ This package includes visualization tools dedicated to Paraview software. visu.PVUtils - visu.mohrCircles - - visu.pythonViewUtils \ No newline at end of file + visu.mohrCircles \ No newline at end of file diff --git a/docs/geos_pv_docs/geosLogReaderUtils.rst b/docs/geos_pv_docs/geosLogReaderUtils.rst new file mode 100644 index 00000000..1b2e2018 --- /dev/null +++ b/docs/geos_pv_docs/geosLogReaderUtils.rst @@ -0,0 +1,45 @@ +GeosLogReaderUtils functions +============================ + +This package define functions dedicated to the GeosLogReader. + + +geos.pv.geosLogReaderUtils.GeosLogReaderAquifers module +------------------------------------------------------------------- + +.. automodule:: geos.pv.geosLogReaderUtils.GeosLogReaderAquifers + :members: + :undoc-members: + :show-inheritance: + +geos.pv.geosLogReaderUtils.geosLogReaderConvergence module +--------------------------------------------------------------------- + +.. automodule:: geos.pv.geosLogReaderUtils.GeosLogReaderConvergence + :members: + :undoc-members: + :show-inheritance: + +geos.pv.geosLogReaderUtils.GeosLogReaderFlow module +--------------------------------------------------------------- + +.. automodule:: geos.pv.geosLogReaderUtils.GeosLogReaderFlow + :members: + :undoc-members: + :show-inheritance: + +geos.pv.geosLogReaderUtils.GeosLogReaderFunctions module +-------------------------------------------------------------------- + +.. automodule:: geos.pv.geosLogReaderUtils.geosLogReaderFunctions + :members: + :undoc-members: + :show-inheritance: + +geos.pv.geosLogReaderUtils.GeosLogReaderWells module +-------------------------------------------------------------------- + +.. automodule:: geos.pv.geosLogReaderUtils.GeosLogReaderWells + :members: + :undoc-members: + :show-inheritance: \ No newline at end of file diff --git a/docs/geos_pv_docs/home.rst b/docs/geos_pv_docs/home.rst new file mode 100644 index 00000000..68c9fff1 --- /dev/null +++ b/docs/geos_pv_docs/home.rst @@ -0,0 +1,18 @@ +Home +==== + +**geos-pv** is a Python package that gathers `Paraview `_ plugins of GEOS python tools. + +It includes: + +* Paraview readers allowing to load data; +* generic tools to processes meshes; +* GEOS pre-processing tools to clean and check GEOS input mesh; +* GEOS post-processing tools to clean GEOS output mesh, compute additional properties, or create specific plots such as Mohr's circle plot. + +The packages can be loaded into Paraview using the Plugin Manager from `Tools > Plugin Manager`. On success, you will +see the selected plugin in the `Filters`` menu (see `Paraview documentation `. + +Alternatively, geos-pv package can be build together with Paraview ([see Paraview compilation guide](https://gitlab.kitware.com/paraview/paraview/-/blob/master/Documentation/dev/build.md)). +It is recommended to use Paraview v5.12+, which is based on python 3.10+. If you need to build geos-pv package with the paraview dependency, use the command: +`pip install Path/To/geosPythonPackages/geos-pv[paraview]` diff --git a/docs/geos_pv_docs/modules.rst b/docs/geos_pv_docs/modules.rst new file mode 100644 index 00000000..2a9bdca1 --- /dev/null +++ b/docs/geos_pv_docs/modules.rst @@ -0,0 +1,11 @@ +Processing +========== + +.. toctree:: + :maxdepth: 5 + + geosLogReaderUtils + + pyplotUtils + + utils \ No newline at end of file diff --git a/docs/geos_pv_docs/pyplotUtils.rst b/docs/geos_pv_docs/pyplotUtils.rst new file mode 100644 index 00000000..6b2b36c4 --- /dev/null +++ b/docs/geos_pv_docs/pyplotUtils.rst @@ -0,0 +1,13 @@ +pyplotUtils functions +===================== + +This package define options for matplotlib. + + +geos.pv.pyplotUtils.matplotlibOptions module +--------------------------------------------- + +.. automodule:: geos.pv.pyplotUtils.matplotlibOptions + :members: + :undoc-members: + :show-inheritance: \ No newline at end of file diff --git a/docs/geos_pv_docs/readers.rst b/docs/geos_pv_docs/readers.rst new file mode 100644 index 00000000..c7fb7100 --- /dev/null +++ b/docs/geos_pv_docs/readers.rst @@ -0,0 +1,7 @@ +Paraview readers +================ + +readers.PVGeosLogReader module +---------------------------------- + +.. automodule:: readers.PVGeosLogReader \ No newline at end of file diff --git a/docs/geos_pv_docs/utils.rst b/docs/geos_pv_docs/utils.rst new file mode 100644 index 00000000..3a868de5 --- /dev/null +++ b/docs/geos_pv_docs/utils.rst @@ -0,0 +1,29 @@ +Utilities +========= + +This package defines utilities for Paraview plugins. + + +geos.pv.utils.checkboxFunction module +--------------------------------------------- + +.. automodule:: geos.pv.utils.checkboxFunction + :members: + :undoc-members: + :show-inheritance: + +geos.pv.utils.DisplayOrganizationParaview module +------------------------------------------------- + +.. automodule:: geos.pv.utils.DisplayOrganizationParaview + :members: + :undoc-members: + :show-inheritance: + +geos.pv.utils.paraviewTreatments module +--------------------------------------------- + +.. automodule:: geos.pv.utils.paraviewTreatments + :members: + :undoc-members: + :show-inheritance: \ No newline at end of file diff --git a/docs/index.rst b/docs/index.rst index c14a1db8..1738c55e 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -80,11 +80,13 @@ Packages geos-ats + geos-geomechanics + geos-mesh geos-posp - - geos-geomechanics + + geos-pv geos-timehistory diff --git a/docs/requirements.txt b/docs/requirements.txt index 773420cf..f911d8f8 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,19 +1,19 @@ -sphinx >= 7.4.7 -sphinx_rtd_theme -sphinx-argparse >= 0.5.2 -sphinx-autodoc-typehints -sphinx-design +sphinx >= 8.2 +sphinx_rtd_theme >= 3.0 +sphinx-argparse >= 0.5 +sphinx-autodoc-typehints >= 3.1 +sphinx-design >= 0.6 # Running CLI programs and capture outputs -sphinxcontrib-programoutput>=0.17 +sphinxcontrib-programoutput >= 0.17 # Installing all package requirements to be able to load all the modules and run the help. -vtk >= 9.1 +vtk >= 9.3 networkx >= 2.4 -tqdm -numpy -pandas -typing_extensions +tqdm >= 4.67 +numpy >= 2.2 +pandas >= 2.2 +typing_extensions > 4.12 matplotlib>=3.9.4 -h5py -lxml>=4.5.0 -parameterized -pyvista +h5py >= 3.12 +lxml >= 4.5.0 +parameterized >= 0.9 +pyvista >= 0.44 diff --git a/geos-geomechanics/pyproject.toml b/geos-geomechanics/pyproject.toml index d49048c3..16439ad6 100644 --- a/geos-geomechanics/pyproject.toml +++ b/geos-geomechanics/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools>=61.2"] +requires = ["setuptools>=61.2", "wheel >= 0.37.1"] build-backend = "setuptools.build_meta" [tool.setuptools] @@ -7,7 +7,7 @@ include-package-data = true [tool.setuptools.packages.find] where = ["src"] -include = ["geos_geomechanics*"] +include = ["geos.geomechanics*"] exclude = ['tests*'] [project] @@ -22,11 +22,11 @@ classifiers = [ "Programming Language :: Python" ] dependencies=[ + "geos-utils @ file:./geos-utils", "vtk >= 9.3", - "numpy >= 1.26", + "numpy >= 2.2", "pandas >= 2.2", "typing_extensions >= 4.12", - "geos-utils @ file:./geos-utils", ] requires-python = ">= 3.10" diff --git a/geos-mesh/pyproject.toml b/geos-mesh/pyproject.toml index 03708863..2317c68b 100644 --- a/geos-mesh/pyproject.toml +++ b/geos-mesh/pyproject.toml @@ -1,29 +1,35 @@ [build-system] -requires = ["setuptools>=42", "wheel"] +requires = ["setuptools>=61.2", "wheel >= 0.37.1"] build-backend = "setuptools.build_meta" +[tool.setuptools] +include-package-data = true + +[tool.setuptools.packages.find] +where = ["src"] +include = ["geos.mesh*"] +exclude = ['tests*'] + [project] name = "geos-mesh" version = "0.0.1" description = "GEOS mesh tools" authors = [{name = "GEOS Contributors" }] -maintainers = [ - {name = "Christopher Sherman", email = "sherman27@llnl.gov"} -] +maintainers = [{name = "Christopher Sherman", email = "sherman27@llnl.gov"}] license = {text = "LGPL-2.1"} classifiers = [ "Development Status :: 4 - Beta", "Programming Language :: Python" ] -requires-python = ">=3.8" +requires-python = ">=3.10" dependencies = [ - "vtk >= 9.1", + "vtk >= 9.3", "networkx >= 2.4", - "tqdm", - "numpy", - "meshio>=5.3.2", + "tqdm >= 4.67", + "numpy >= 2.2", + "meshio >= 5.3", ] [project.scripts] @@ -36,6 +42,19 @@ Documentation = "https://geosx-geosx.readthedocs-hosted.com/projects/geosx-geosp Repository = "https://github.com/GEOS-DEV/geosPythonPackages.git" "Bug Tracker" = "https://github.com/GEOS-DEV/geosPythonPackages/issues" +[project.optional-dependencies] +build = [ + "build ~= 1.2" +] +dev = [ + "mypy", + "yapf", +] +test = [ + "pytest-cov", + "pytest" +] + [tool.pytest.ini_options] addopts = [ "--import-mode=importlib", diff --git a/geos-posp/pyproject.toml b/geos-posp/pyproject.toml index d9e9d26a..b51c1ca7 100644 --- a/geos-posp/pyproject.toml +++ b/geos-posp/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools>=61.2"] +requires = ["setuptools>=61.2", "wheel >= 0.37.1"] build-backend = "setuptools.build_meta" [tool.setuptools] @@ -14,7 +14,8 @@ exclude = ['tests*'] name = "geos-posp" version = "1.0.0" description = "The Python package geos-posp is dedicated to post-process data from the geos simulation tool." -authors = [{name = "Martin Lemay", email = "martin.lemay@external.totalenergies.com"}] +authors = [{name = "GEOS Contributors" }] +maintainers = [{name = "Martin Lemay", email = "martin.lemay@external.totalenergies.com"}] license = {text = "Apache-2.0"} classifiers = [ "Intended Audience :: Developers", @@ -33,12 +34,12 @@ keywords = [ requires-python = ">= 3.10" dependencies = [ + "geos-geomechanics @ file:./geos-geomechanics", + "geos-utils @ file:./geos-utils", "vtk >= 9.3", - "numpy >= 1.26", + "numpy >= 2.2", "pandas >= 2.2", "typing_extensions >= 4.12", - "geos-utils @ file:./geos-utils", - "geos-geomechanics @ file:./geos-geomechanics", ] @@ -49,6 +50,9 @@ Repository = "https://github.com/GEOS-DEV/geosPythonPackages.git" "Bug Tracker" = "https://github.com/GEOS-DEV/geosPythonPackages/issues" [project.optional-dependencies] +build = [ + "build ~= 1.2" +] dev = [ "mypy", "yapf", diff --git a/geos-posp/src/PVplugins/PVAttributeMapping.py b/geos-posp/src/PVplugins/PVAttributeMapping.py index e4532d30..989d6394 100644 --- a/geos-posp/src/PVplugins/PVAttributeMapping.py +++ b/geos-posp/src/PVplugins/PVAttributeMapping.py @@ -13,6 +13,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from geos.utils.Logger import Logger, getLogger from geos_posp.filters.AttributeMappingFromCellCoords import ( AttributeMappingFromCellCoords, ) @@ -211,6 +213,7 @@ def RequestData( "Use either vtkUnstructuredGrid or vtkMultiBlockDataSet" ) outData.Modified() + mess: str = "Attributes were successfully transferred ." self.m_logger.info( mess ) except AssertionError as e: diff --git a/geos-posp/src/PVplugins/PVCreateConstantAttributePerRegion.py b/geos-posp/src/PVplugins/PVCreateConstantAttributePerRegion.py index e49c61e9..7aba2271 100644 --- a/geos-posp/src/PVplugins/PVCreateConstantAttributePerRegion.py +++ b/geos-posp/src/PVplugins/PVCreateConstantAttributePerRegion.py @@ -15,6 +15,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + import vtkmodules.util.numpy_support as vnp from geos.utils.Logger import Logger, getLogger from geos_posp.processing.multiblockInpectorTreeFunctions import ( diff --git a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolume.py b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolume.py index 817d7762..cd0814f0 100644 --- a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolume.py +++ b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolume.py @@ -4,7 +4,6 @@ # ruff: noqa: E402 # disable Module level import not at top of file import os import sys - import numpy as np import numpy.typing as npt from typing_extensions import Self @@ -16,6 +15,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from geos.utils.GeosOutputsConstants import ( GeosMeshOutputsEnum, getAttributeToTransferFromInitialTime, diff --git a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurface.py b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurface.py index 22477abf..6233809b 100644 --- a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurface.py +++ b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurface.py @@ -16,6 +16,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from geos.utils.GeosOutputsConstants import ( GeosMeshOutputsEnum, getAttributeToTransferFromInitialTime, diff --git a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurfaceWell.py b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurfaceWell.py index 7aaabc5a..3de64962 100644 --- a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurfaceWell.py +++ b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeSurfaceWell.py @@ -16,6 +16,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from geos.utils.GeosOutputsConstants import ( GeosMeshOutputsEnum, getAttributeToTransferFromInitialTime, diff --git a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeWell.py b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeWell.py index 43882eac..2519d41c 100644 --- a/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeWell.py +++ b/geos-posp/src/PVplugins/PVExtractMergeBlocksVolumeWell.py @@ -19,6 +19,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from geos.utils.GeosOutputsConstants import ( GeosMeshOutputsEnum, getAttributeToTransferFromInitialTime, diff --git a/geos-posp/src/PVplugins/PVGeomechanicsAnalysis.py b/geos-posp/src/PVplugins/PVGeomechanicsAnalysis.py index ad78eb0e..d1e44c26 100644 --- a/geos-posp/src/PVplugins/PVGeomechanicsAnalysis.py +++ b/geos-posp/src/PVplugins/PVGeomechanicsAnalysis.py @@ -19,6 +19,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from geos.utils.Logger import Logger, getLogger from geos.utils.PhysicalConstants import ( DEFAULT_FRICTION_ANGLE_DEG, diff --git a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolume.py b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolume.py index 2807152c..c50e5c22 100644 --- a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolume.py +++ b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolume.py @@ -16,6 +16,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from geos.utils.Logger import Logger, getLogger from geos.utils.PhysicalConstants import ( DEFAULT_FRICTION_ANGLE_DEG, diff --git a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurface.py b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurface.py index b384ba06..0bfae2ee 100644 --- a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurface.py +++ b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurface.py @@ -16,6 +16,12 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + +from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] + VTKPythonAlgorithmBase, smdomain, smhint, smproperty, smproxy, +) + from geos.utils.Logger import Logger, getLogger from geos.utils.PhysicalConstants import ( DEFAULT_FRICTION_ANGLE_DEG, @@ -24,10 +30,6 @@ DEFAULT_ROCK_COHESION, WATER_DENSITY, ) -from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] - VTKPythonAlgorithmBase, smdomain, smhint, smproperty, smproxy, -) - from PVplugins.PVExtractMergeBlocksVolumeSurface import ( PVExtractMergeBlocksVolumeSurface, ) from PVplugins.PVGeomechanicsAnalysis import PVGeomechanicsAnalysis diff --git a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurfaceWell.py b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurfaceWell.py index 2cdbba33..fff90856 100644 --- a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurfaceWell.py +++ b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeSurfaceWell.py @@ -16,6 +16,12 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + +from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] + VTKPythonAlgorithmBase, smdomain, smhint, smproperty, smproxy, +) + from geos.utils.Logger import Logger, getLogger from geos.utils.PhysicalConstants import ( DEFAULT_FRICTION_ANGLE_DEG, @@ -24,10 +30,6 @@ DEFAULT_ROCK_COHESION, WATER_DENSITY, ) -from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] - VTKPythonAlgorithmBase, smdomain, smhint, smproperty, smproxy, -) - from PVplugins.PVExtractMergeBlocksVolumeSurfaceWell import ( PVExtractMergeBlocksVolumeSurfaceWell, ) from PVplugins.PVGeomechanicsAnalysis import PVGeomechanicsAnalysis diff --git a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeWell.py b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeWell.py index dbbbab49..210fd933 100644 --- a/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeWell.py +++ b/geos-posp/src/PVplugins/PVGeomechanicsWorkflowVolumeWell.py @@ -16,6 +16,12 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + +from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] + VTKPythonAlgorithmBase, smdomain, smhint, smproperty, smproxy, +) + from geos.utils.Logger import Logger, getLogger from geos.utils.PhysicalConstants import ( DEFAULT_FRICTION_ANGLE_DEG, @@ -24,9 +30,6 @@ DEFAULT_ROCK_COHESION, WATER_DENSITY, ) -from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] - VTKPythonAlgorithmBase, smdomain, smhint, smproperty, smproxy, -) from PVplugins.PVExtractMergeBlocksVolumeWell import ( PVExtractMergeBlocksVolumeWell, ) diff --git a/geos-posp/src/PVplugins/PVMergeBlocksEnhanced.py b/geos-posp/src/PVplugins/PVMergeBlocksEnhanced.py index bbbd9696..1bdc9666 100644 --- a/geos-posp/src/PVplugins/PVMergeBlocksEnhanced.py +++ b/geos-posp/src/PVplugins/PVMergeBlocksEnhanced.py @@ -13,6 +13,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from geos.utils.Logger import Logger, getLogger from geos_posp.processing.vtkUtils import mergeBlocks from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] diff --git a/geos-posp/src/PVplugins/PVMohrCirclePlot.py b/geos-posp/src/PVplugins/PVMohrCirclePlot.py index 90693c3d..be2b5e7d 100644 --- a/geos-posp/src/PVplugins/PVMohrCirclePlot.py +++ b/geos-posp/src/PVplugins/PVMohrCirclePlot.py @@ -27,6 +27,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + import geos_posp.visu.mohrCircles.functionsMohrCircle as mcf import geos_posp.visu.PVUtils.paraviewTreatments as pvt from geos.geomechanics.model.MohrCircle import MohrCircle diff --git a/geos-posp/src/PVplugins/PVPythonViewConfigurator.py b/geos-posp/src/PVplugins/PVPythonViewConfigurator.py deleted file mode 100644 index 8d52d412..00000000 --- a/geos-posp/src/PVplugins/PVPythonViewConfigurator.py +++ /dev/null @@ -1,859 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. -# SPDX-FileContributor: Alexandre Benedicto, Martin Lemay -# ruff: noqa: E402 # disable Module level import not at top of file -import os -import sys -from typing import Any, Union, cast - -import pandas as pd # type: ignore[import-untyped] -from typing_extensions import Self - -dir_path = os.path.dirname( os.path.realpath( __file__ ) ) -parent_dir_path = os.path.dirname( dir_path ) -if parent_dir_path not in sys.path: - sys.path.append( parent_dir_path ) - -import geos_posp.visu.PVUtils.paraviewTreatments as pvt -from geos_posp.visu.PVUtils.checkboxFunction import ( # type: ignore[attr-defined] - createModifiedCallback, ) -from geos_posp.visu.PVUtils.DisplayOrganizationParaview import ( - DisplayOrganizationParaview, ) -from geos_posp.visu.PVUtils.matplotlibOptions import ( - FontStyleEnum, - FontWeightEnum, - LegendLocationEnum, - LineStyleEnum, - MarkerStyleEnum, - OptionSelectionEnum, - optionEnumToXml, -) -from paraview.simple import ( # type: ignore[import-not-found] - GetActiveSource, GetActiveView, Render, Show, servermanager, -) -from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] - VTKPythonAlgorithmBase, smdomain, smhint, smproperty, smproxy, -) -from vtkmodules.vtkCommonCore import ( - vtkDataArraySelection, - vtkInformation, - vtkInformationVector, -) - -__doc__ = """ -PVPythonViewConfigurator is a Paraview plugin that allows to create cross-plots -from input data using the PythonView. - -Input type is vtkDataObject. - -This filter results in opening a new Python View window and displaying cross-plot. - -To use it: - -* Load the module in Paraview: Tools>Manage Plugins...>Load new>PVPythonViewConfigurator. -* Select the vtkDataObject containing the data to plot. -* Search and Apply PVPythonViewConfigurator Filter. - -""" - - -@smproxy.filter( name="PVPythonViewConfigurator", label="Python View Configurator" ) -@smhint.xml( '' ) -@smproperty.input( name="Input" ) -@smdomain.datatype( dataTypes=[ "vtkDataObject" ], composite_data_supported=True ) -class PVPythonViewConfigurator( VTKPythonAlgorithmBase ): - - def __init__( self: Self ) -> None: - """Paraview plugin to create cross-plots in a Python View. - - Input is a vtkDataObject. - """ - super().__init__( nInputPorts=1, nOutputPorts=1 ) - # python view layout and object - self.m_layoutName: str = "" - self.m_pythonView: Any - self.m_organizationDisplay = DisplayOrganizationParaview() - self.buildNewLayoutWithPythonView() - - # input source and curve names - inputSource = GetActiveSource() - dataset = servermanager.Fetch( inputSource ) - dataframe: pd.DataFrame = pvt.vtkToDataframe( dataset ) - self.m_pathPythonViewScript: str = os.path.join( parent_dir_path, "visu/pythonViewUtils/mainPythonView.py" ) - - # checkboxes - self.m_modifyInputs: int = 1 - self.m_modifyCurves: int = 1 - self.m_multiplyCurves: int = 0 - - # checkboxes curves available from the data of pipeline - self.m_validSources = vtkDataArraySelection() - self.m_curvesToPlot = vtkDataArraySelection() - self.m_curvesMinus1 = vtkDataArraySelection() - self.m_validSources.AddObserver( "ModifiedEvent", createModifiedCallback( self ) ) # type: ignore[arg-type] - self.m_curvesToPlot.AddObserver( "ModifiedEvent", createModifiedCallback( self ) ) # type: ignore[arg-type] - self.m_curvesMinus1.AddObserver( "ModifiedEvent", createModifiedCallback( self ) ) # type: ignore[arg-type] - validSourceNames: set[ str ] = pvt.getPossibleSourceNames() - for sourceName in validSourceNames: - self.m_validSources.AddArray( sourceName ) - validColumnsDataframe: list[ str ] = list( dataframe.columns ) - for name in list( dataframe.columns ): - for axis in [ "X", "Y", "Z" ]: - if "Points" + axis in name and "Points" + axis + "__" in name: - positionDoublon: int = validColumnsDataframe.index( "Points" + axis ) - validColumnsDataframe.pop( positionDoublon ) - break - self.m_validColumnsDataframe: list[ str ] = sorted( validColumnsDataframe, key=lambda x: x.lower() ) - for curveName in validColumnsDataframe: - self.m_curvesToPlot.AddArray( curveName ) - self.m_curvesMinus1.AddArray( curveName ) - self.m_validSources.DisableAllArrays() - self.m_curvesToPlot.DisableAllArrays() - self.m_curvesMinus1.DisableAllArrays() - self.m_curveToUse: str = "" - # to change the aspects of curves - self.m_curvesToModify: set[ str ] = pvt.integrateSourceNames( validSourceNames, set( validColumnsDataframe ) ) - self.m_color: tuple[ float, float, float ] = ( 0.0, 0.0, 0.0 ) - self.m_lineStyle: str = LineStyleEnum.SOLID.optionValue - self.m_lineWidth: float = 1.0 - self.m_markerStyle: str = MarkerStyleEnum.NONE.optionValue - self.m_markerSize: float = 1.0 - - # user choices - self.m_userChoices: dict[ str, Any ] = { - "variableName": "", - "curveNames": [], - "curveConvention": [], - "inputNames": [], - "plotRegions": False, - "reverseXY": False, - "logScaleX": False, - "logScaleY": False, - "minorticks": False, - "displayTitle": True, - "title": "title1", - "titleStyle": FontStyleEnum.NORMAL.optionValue, - "titleWeight": FontWeightEnum.BOLD.optionValue, - "titleSize": 12, - "legendDisplay": True, - "legendPosition": LegendLocationEnum.BEST.optionValue, - "legendSize": 10, - "removeJobName": True, - "removeRegions": False, - "curvesAspect": {}, - } - - def getUserChoices( self: Self ) -> dict[ str, Any ]: - """Access the m_userChoices attribute. - - Returns: - dict[str] : the user choices for the figure. - """ - return self.m_userChoices - - def getInputNames( self: Self ) -> set[ str ]: - """Get source names from user selection. - - Returns: - set[str] : source names from ParaView pipeline. - """ - inputAvailables = self.a01GetInputSources() - inputNames: set[ str ] = set( pvt.getArrayChoices( inputAvailables ) ) - return inputNames - - def defineInputNames( self: Self ) -> None: - """Adds the input names to the userChoices.""" - inputNames: set[ str ] = self.getInputNames() - self.m_userChoices[ "inputNames" ] = inputNames - - def defineUserChoicesCurves( self: Self ) -> None: - """Define user choices for curves to plot.""" - sourceNames: set[ str ] = self.getInputNames() - dasPlot = self.b02GetCurvesToPlot() - dasMinus1 = self.b07GetCurveConvention() - curveNames: set[ str ] = set( pvt.getArrayChoices( dasPlot ) ) - minus1Names: set[ str ] = set( pvt.getArrayChoices( dasMinus1 ) ) - toUse1: set[ str ] = pvt.integrateSourceNames( sourceNames, curveNames ) - toUse2: set[ str ] = pvt.integrateSourceNames( sourceNames, minus1Names ) - self.m_userChoices[ "curveNames" ] = tuple( toUse1 ) - self.m_userChoices[ "curveConvention" ] = tuple( toUse2 ) - - def defineCurvesAspect( self: Self ) -> None: - """Define user choices for curve aspect properties.""" - curveAspect: tuple[ tuple[ float, float, float ], str, float, str, float ] = ( self.getCurveAspect() ) - curveName: str = self.getCurveToUse() - self.m_userChoices[ "curvesAspect" ][ curveName ] = curveAspect - - def buildPythonViewScript( self: Self ) -> str: - """Builds the Python script used to launch the Python View. - - The script is returned as a string to be then injected in the Python - View. - - Returns: - str: Complete Python View script. - """ - sourceNames: set[ str ] = self.getInputNames() - userChoices: dict[ str, Any ] = self.getUserChoices() - script: str = f"timestep = '{str(GetActiveView().ViewTime)}'\n" - script += f"sourceNames = {sourceNames}\n" - script += f"variableName = '{userChoices['variableName']}'\n" - script += f"dir_path = '{dir_path}'\n" - script += f"userChoices = {userChoices}\n\n\n" - with open( self.m_pathPythonViewScript ) as file: - fileContents = file.read() - script += fileContents - return script - - def buildNewLayoutWithPythonView( self: Self ) -> None: - """Create a new Python View layout.""" - # we first built the new layout - layout_names: list[ str ] = self.m_organizationDisplay.getLayoutsNames() - nb_layouts: int = len( layout_names ) - # imagine two layouts already exists, the new one will be named "Layout #3" - layoutName: str = "Layout #" + str( nb_layouts + 1 ) - # check that we that the layoutName is new and does not belong to the list of layout_names, - # if not we modify the layoutName until it is a new one - if layoutName in layout_names: - cpt: int = 2 - while layoutName in layout_names: - layoutName = "Layout #" + str( nb_layouts + cpt ) - cpt += 1 - self.m_organizationDisplay.addLayout( layoutName ) - self.m_layoutName = layoutName - - # we then build the new python view - self.m_organizationDisplay.addViewToLayout( "PythonView", layoutName, 0 ) - self.m_pythonView = self.m_organizationDisplay.getLayoutViews()[ layoutName ][ 0 ] - Show( GetActiveSource(), self.m_pythonView, "PythonRepresentation" ) - - # widgets definition - """The names of the @smproperty methods command names below have a letter in lower case in - front because PARAVIEW displays properties in the alphabetical order. - See https://gitlab.kitware.com/paraview/paraview/-/issues/21493 for possible improvements on - this issue""" - - @smproperty.dataarrayselection( name="InputSources" ) - def a01GetInputSources( self: Self ) -> vtkDataArraySelection: - """Get all valid sources for the filter. - - Returns: - vtkDataArraySelection: valid data sources. - """ - return self.m_validSources - - @smproperty.xml( """ - - """ ) - def a02GroupFlow( self: Self ) -> None: - """Organize groups.""" - self.Modified() - - @smproperty.stringvector( name="CurvesAvailable", information_only="1" ) - def b00GetCurvesAvailable( self: Self ) -> list[ str ]: - """Get the available curves. - - Returns: - list[str]: list of curves. - """ - return self.m_validColumnsDataframe - - @smproperty.stringvector( name="Abscissa", number_of_elements="1" ) - @smdomain.xml( """ - - """ ) - def b01SetVariableName( self: Self, name: str ) -> None: - """Set the name of X axis variable. - - Args: - name: name of the variable. - """ - self.m_userChoices[ "variableName" ] = name - self.Modified() - - @smproperty.dataarrayselection( name="Ordinate" ) - def b02GetCurvesToPlot( self: Self ) -> vtkDataArraySelection: - """Get the curves to plot. - - Returns: - vtkDataArraySelection: data to plot. - """ - return self.m_curvesToPlot - - @smproperty.intvector( name="PlotsPerRegion", label="PlotsPerRegion", default_values=0 ) - @smdomain.xml( """""" ) - def b03SetPlotsPerRegion( self: Self, boolean: bool ) -> None: - """Set plot per region option. - - Args: - boolean: user choice. - """ - self.m_userChoices[ "plotRegions" ] = boolean - self.Modified() - - @smproperty.xml( """ - - - - """ ) - def b04GroupFlow( self: Self ) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.intvector( - name="CurveConvention", - label="Select Curves To Change Convention", - default_values=0, - ) - @smdomain.xml( """""" ) - def b05SetCurveConvention( self: Self, boolean: bool ) -> None: - """Select Curves To Change Convention. - - Args: - boolean: user choice. - """ - self.m_multiplyCurves = boolean - - @smproperty.xml( """ - - """ ) - def b06GroupFlow( self: Self ) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.dataarrayselection( name="CurveConventionSelection" ) - def b07GetCurveConvention( self: Self ) -> vtkDataArraySelection: - """Get the curves to change convention. - - Returns: - vtkDataArraySelection: selected curves to change convention. - """ - return self.m_curvesMinus1 - - @smproperty.xml( """ - - - """ ) - def b08GroupFlow( self: Self ) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.intvector( name="EditAxisProperties", label="Edit Axis Properties", default_values=0 ) - @smdomain.xml( """""" ) - def c01SetEditAxisProperties( self: Self, boolean: bool ) -> None: - """Set option to edit axis properties. - - Args: - boolean (bool): user choice. - """ - self.Modified() - - @smproperty.xml( """ - - """ ) - def c02GroupFlow( self: Self ) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.intvector( name="ReverseXY", label="Reverse XY Axes", default_values=0 ) - @smdomain.xml( """""" ) - def c02SetReverseXY( self: Self, boolean: bool ) -> None: - """Set option to reverse X and Y axes. - - Args: - boolean (bool): user choice. - """ - self.m_userChoices[ "reverseXY" ] = boolean - self.Modified() - - @smproperty.intvector( name="LogScaleX", label="X Axis Log Scale", default_values=0 ) - @smdomain.xml( """""" ) - def c03SetReverseXY( self: Self, boolean: bool ) -> None: - """Set option to log scale for X axis. - - Args: - boolean (bool): user choice. - """ - self.m_userChoices[ "logScaleX" ] = boolean - self.Modified() - - @smproperty.intvector( name="LogScaleY", label="Y Axis Log Scale", default_values=0 ) - @smdomain.xml( """""" ) - def c04SetReverseXY( self: Self, boolean: bool ) -> None: - """Set option to log scale for Y axis. - - Args: - boolean (bool): user choice. - """ - self.m_userChoices[ "logScaleY" ] = boolean - self.Modified() - - @smproperty.intvector( name="Minorticks", label="Display Minor ticks", default_values=0 ) - @smdomain.xml( """""" ) - def c05SetMinorticks( self: Self, boolean: bool ) -> None: - """Set option to display minor ticks. - - Args: - boolean (bool): user choice. - """ - self.m_userChoices[ "minorticks" ] = boolean - self.Modified() - - @smproperty.intvector( name="CustomAxisLim", label="Use Custom Axis Limits", default_values=0 ) - @smdomain.xml( """""" ) - def c06SetCustomAxisLim( self: Self, boolean: bool ) -> None: - """Set option to define axis limits. - - Args: - boolean (bool): user choice. - """ - self.m_userChoices[ "customAxisLim" ] = boolean - self.Modified() - - @smproperty.doublevector( name="LimMinX", label="X min", default_values=-1e36 ) - def c07LimMinX( self: Self, value: float ) -> None: - """Set X axis min. - - Args: - value (float): X axis min. - """ - value2: Union[ float, None ] = value - if value2 == -1e36: - value2 = None - self.m_userChoices[ "limMinX" ] = value2 - self.Modified() - - @smproperty.doublevector( name="LimMaxX", label="X max", default_values=1e36 ) - def c08LimMaxX( self: Self, value: float ) -> None: - """Set X axis max. - - Args: - value (float): X axis max. - """ - value2: Union[ float, None ] = value - if value2 == 1e36: - value2 = None - self.m_userChoices[ "limMaxX" ] = value2 - self.Modified() - - @smproperty.doublevector( name="LimMinY", label="Y min", default_values=-1e36 ) - def c09LimMinY( self: Self, value: float ) -> None: - """Set Y axis min. - - Args: - value (float): Y axis min. - """ - value2: Union[ float, None ] = value - if value2 == -1e36: - value2 = None - self.m_userChoices[ "limMinY" ] = value2 - self.Modified() - - @smproperty.doublevector( name="LimMaxY", label="Y max", default_values=1e36 ) - def c10LimMaxY( self: Self, value: float ) -> None: - """Set Y axis max. - - Args: - value (float): Y axis max. - """ - value2: Union[ float, None ] = value - if value2 == 1e36: - value2 = None - self.m_userChoices[ "limMaxY" ] = value2 - self.Modified() - - @smproperty.xml( """ - - - - - - """ ) - def c11GroupFlow( self: Self ) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.xml( """ - - - - - - - """ ) - def c12GroupFlow( self: Self ) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.intvector( name="DisplayTitle", label="Display Title", default_values=1 ) - @smdomain.xml( """""" ) - def d01SetDisplayTitle( self: Self, boolean: bool ) -> None: - """Set option to display title. - - Args: - boolean (bool): user choice. - """ - self.m_userChoices[ "displayTitle" ] = boolean - self.Modified() - - @smproperty.xml( """ - - """ ) - def d02GroupFlow( self: Self ) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.stringvector( name="Title", default_values="title1" ) - def d03SetTitlePlot( self: Self, title: str ) -> None: - """Set title. - - Args: - title (str): title. - """ - self.m_userChoices[ "title" ] = title - self.Modified() - - @smproperty.intvector( name="TitleStyle", label="Title Style", default_values=0 ) - @smdomain.xml( optionEnumToXml( cast( OptionSelectionEnum, FontStyleEnum ) ) ) - def d04SetTitleStyle( self: Self, value: int ) -> None: - """Set title font style. - - Args: - value (int): title font style index in FontStyleEnum. - """ - choice = list( FontStyleEnum )[ value ] - self.m_userChoices[ "titleStyle" ] = choice.optionValue - self.Modified() - - @smproperty.intvector( name="TitleWeight", label="Title Weight", default_values=1 ) - @smdomain.xml( optionEnumToXml( cast( OptionSelectionEnum, FontWeightEnum ) ) ) - def d05SetTitleWeight( self: Self, value: int ) -> None: - """Set title font weight. - - Args: - value (int): title font weight index in FontWeightEnum. - """ - choice = list( FontWeightEnum )[ value ] - self.m_userChoices[ "titleWeight" ] = choice.optionValue - self.Modified() - - @smproperty.intvector( name="TitleSize", label="Title Size", default_values=12 ) - @smdomain.xml( """""" ) - def d06SetTitleSize( self: Self, size: float ) -> None: - """Set title font size. - - Args: - size (float): title font size between 1 and 50. - """ - self.m_userChoices[ "titleSize" ] = size - self.Modified() - - @smproperty.xml( """ - panel_visibility="advanced"> - - - - - - """ ) - def d07PropertyGroup( self: Self ) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.intvector( name="DisplayLegend", label="Display Legend", default_values=1 ) - @smdomain.xml( """""" ) - def e00SetDisplayLegend( self: Self, boolean: bool ) -> None: - """Set option to display legend. - - Args: - boolean (bool): user choice. - """ - self.m_userChoices[ "displayLegend" ] = boolean - self.Modified() - - @smproperty.xml( """ - - """ ) - def e01PropertyGroup( self: Self ) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.intvector( name="LegendPosition", label="Legend Position", default_values=0 ) - @smdomain.xml( optionEnumToXml( cast( OptionSelectionEnum, LegendLocationEnum ) ) ) - def e02SetLegendPosition( self: Self, value: int ) -> None: - """Set legend position. - - Args: - value (int): legend position index in LegendLocationEnum. - """ - choice = list( LegendLocationEnum )[ value ] - self.m_userChoices[ "legendPosition" ] = choice.optionValue - self.Modified() - - @smproperty.intvector( name="LegendSize", label="Legend Size", default_values=10 ) - @smdomain.xml( """""" ) - def e03SetLegendSize( self: Self, size: float ) -> None: - """Set legend font size. - - Args: - size (float): legend font size between 1 and 50. - """ - self.m_userChoices[ "legendSize" ] = size - self.Modified() - - @smproperty.intvector( name="RemoveJobName", label="Remove Job Name in legend", default_values=1 ) - @smdomain.xml( """""" ) - def e04SetRemoveJobName( self: Self, boolean: bool ) -> None: - """Set option to remove job names from legend. - - Args: - boolean (bool): user choice. - """ - self.m_userChoices[ "removeJobName" ] = boolean - self.Modified() - - @smproperty.intvector( - name="RemoveRegionsName", - label="Remove Regions Name in legend", - default_values=0, - ) - @smdomain.xml( """""" ) - def e05SetRemoveRegionsName( self: Self, boolean: bool ) -> None: - """Set option to remove region names from legend. - - Args: - boolean (bool): user choice. - """ - self.m_userChoices[ "removeRegions" ] = boolean - self.Modified() - - @smproperty.xml( """ - - - - - - """ ) - def e06PropertyGroup( self: Self ) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.intvector( name="ModifyCurvesAspect", label="Edit Curve Graphics", default_values=1 ) - @smdomain.xml( """""" ) - def f01SetModifyCurvesAspect( self: Self, boolean: bool ) -> None: - """Set option to change curve aspects. - - Args: - boolean (bool): user choice. - """ - self.m_modifyCurvesAspect = boolean - - @smproperty.xml( """ - - """ ) - def f02PropertyGroup( self: Self ) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.stringvector( name="CurvesInfo", information_only="1" ) - def f03GetCurveNames( self: Self ) -> list[ str ]: - """Get curves to modify aspects. - - Returns: - set[str]: curves to modify aspects. - """ - return list( self.m_curvesToModify ) - - # TODO: still usefull? - @smproperty.stringvector( name="CurveToModify", number_of_elements="1" ) - @smdomain.xml( """ - - """ ) - def f04SetCircleID( self: Self, value: str ) -> None: - """Set m_curveToUse. - - Args: - value (float): value of m_curveToUse - """ - self.m_curveToUse = value - self.Modified() - - def getCurveToUse( self: Self ) -> str: - """Get m_curveToUse.""" - return self.m_curveToUse - - @smproperty.intvector( name="LineStyle", label="Line Style", default_values=1 ) - @smdomain.xml( optionEnumToXml( cast( OptionSelectionEnum, LineStyleEnum ) ) ) - def f05SetLineStyle( self: Self, value: int ) -> None: - """Set line style. - - Args: - value (int): line style index in LineStyleEnum - """ - choice = list( LineStyleEnum )[ value ] - self.m_lineStyle = choice.optionValue - self.Modified() - - @smproperty.doublevector( name="LineWidth", default_values=1.0 ) - @smdomain.xml( """""" ) - def f06SetLineWidth( self: Self, value: float ) -> None: - """Set line width. - - Args: - value (float): line width between 1 and 10. - """ - self.m_lineWidth = value - self.Modified() - - @smproperty.intvector( name="MarkerStyle", label="Marker Style", default_values=0 ) - @smdomain.xml( optionEnumToXml( cast( LegendLocationEnum, MarkerStyleEnum ) ) ) - def f07SetMarkerStyle( self: Self, value: int ) -> None: - """Set marker style. - - Args: - value (int): Marker style index in MarkerStyleEnum - """ - choice = list( MarkerStyleEnum )[ value ] - self.m_markerStyle = choice.optionValue - self.Modified() - - @smproperty.doublevector( name="MarkerSize", default_values=1.0 ) - @smdomain.xml( """""" ) - def f08SetMarkerSize( self: Self, value: float ) -> None: - """Set marker size. - - Args: - value (float): size of markers between 1 and 30. - """ - self.m_markerSize = value - self.Modified() - - @smproperty.xml( """ - - - - - - - - """ ) - def f09PropertyGroup( self: Self ) -> None: - """Organized groups.""" - self.Modified() - - @smproperty.doublevector( name="ColorEnvelop", default_values=[ 0, 0, 0 ], number_of_elements=3 ) - @smdomain.xml( """""" ) - def f10SetColor( self: Self, value0: float, value1: float, value2: float ) -> None: - """Set envelope color. - - Args: - value0 (float): Red color between 0 and 1. - - value1 (float): Green color between 0 and 1. - - value2 (float): Blue color between 0 and 1. - """ - self.m_color = ( value0, value1, value2 ) - self.Modified() - - @smproperty.xml( """ - - - """ ) - def f11PropertyGroup( self: Self ) -> None: - """Organized groups.""" - self.Modified() - - def getCurveAspect( self: Self, ) -> tuple[ tuple[ float, float, float ], str, float, str, float ]: - """Get curve aspect properties according to user choices. - - Returns: - tuple: (color, linestyle, linewidth, marker, markersize) - """ - return ( - self.m_color, - self.m_lineStyle, - self.m_lineWidth, - self.m_markerStyle, - self.m_markerSize, - ) - - def FillInputPortInformation( self: Self, port: int, info: vtkInformation ) -> int: - """Inherited from VTKPythonAlgorithmBase::RequestInformation. - - Args: - port (int): input port - info (vtkInformationVector): info - - Returns: - int: 1 if calculation successfully ended, 0 otherwise. - """ - if port == 0: - info.Set( self.INPUT_REQUIRED_DATA_TYPE(), "vtkDataObject" ) - else: - info.Set( self.INPUT_REQUIRED_DATA_TYPE(), "vtkDataObject" ) - return 1 - - def RequestDataObject( - self: Self, - request: vtkInformation, - inInfoVec: list[ vtkInformationVector ], - outInfoVec: vtkInformationVector, - ) -> int: - """Inherited from VTKPythonAlgorithmBase::RequestDataObject. - - Args: - request (vtkInformation): request - inInfoVec (list[vtkInformationVector]): input objects - outInfoVec (vtkInformationVector): output objects - - Returns: - int: 1 if calculation successfully ended, 0 otherwise. - """ - inData = self.GetInputData( inInfoVec, 0, 0 ) - outData = self.GetOutputData( outInfoVec, 0 ) - assert inData is not None - if outData is None or ( not outData.IsA( inData.GetClassName() ) ): - outData = inData.NewInstance() - outInfoVec.GetInformationObject( 0 ).Set( outData.DATA_OBJECT(), outData ) - return super().RequestDataObject( request, inInfoVec, outInfoVec ) # type: ignore[no-any-return] - - def RequestData( - self: Self, - request: vtkInformation, # noqa: F841 - inInfoVec: list[ vtkInformationVector ], # noqa: F841 - outInfoVec: vtkInformationVector, # noqa: F841 - ) -> int: - """Inherited from VTKPythonAlgorithmBase::RequestData. - - Args: - request (vtkInformation): request - inInfoVec (list[vtkInformationVector]): input objects - outInfoVec (vtkInformationVector): output objects - - Returns: - int: 1 if calculation successfully ended, 0 otherwise. - """ - # pythonViewGeneration - assert self.m_pythonView is not None, "No Python View was found." - viewSize = GetActiveView().ViewSize - self.m_userChoices[ "ratio" ] = viewSize[ 0 ] / viewSize[ 1 ] - self.defineInputNames() - self.defineUserChoicesCurves() - self.defineCurvesAspect() - self.m_pythonView.Script = self.buildPythonViewScript() - Render() - return 1 diff --git a/geos-posp/src/PVplugins/PVSurfaceGeomechanics.py b/geos-posp/src/PVplugins/PVSurfaceGeomechanics.py index 85aa088f..4857c477 100644 --- a/geos-posp/src/PVplugins/PVSurfaceGeomechanics.py +++ b/geos-posp/src/PVplugins/PVSurfaceGeomechanics.py @@ -13,6 +13,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from geos.utils.Logger import Logger, getLogger from geos.utils.PhysicalConstants import ( DEFAULT_FRICTION_ANGLE_DEG, diff --git a/geos-posp/src/PVplugins/PVTransferAttributesVolumeSurface.py b/geos-posp/src/PVplugins/PVTransferAttributesVolumeSurface.py index 1783ddbd..046ba939 100644 --- a/geos-posp/src/PVplugins/PVTransferAttributesVolumeSurface.py +++ b/geos-posp/src/PVplugins/PVTransferAttributesVolumeSurface.py @@ -12,6 +12,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) +import PVplugins # noqa: F401 + from geos.utils.Logger import Logger, getLogger from geos_posp.filters.TransferAttributesVolumeSurface import ( TransferAttributesVolumeSurface, ) diff --git a/geos-posp/src/geos_posp/filters/AttributeMappingFromCellId.py b/geos-posp/src/geos_posp/filters/AttributeMappingFromCellId.py index 65cf4672..aa205f16 100644 --- a/geos-posp/src/geos_posp/filters/AttributeMappingFromCellId.py +++ b/geos-posp/src/geos_posp/filters/AttributeMappingFromCellId.py @@ -102,7 +102,7 @@ def RequestDataObject( if outData is None or ( not outData.IsA( inData.GetClassName() ) ): outData = inData.NewInstance() outInfoVec.GetInformationObject( 0 ).Set( outData.DATA_OBJECT(), outData ) - return super().RequestDataObject( request, inInfoVec, outInfoVec ) # type: ignore + return super().RequestDataObject( request, inInfoVec, outInfoVec ) # type: ignore[no-any-return] def RequestData( self: Self, diff --git a/geos-posp/src/geos_posp/processing/multiblockInpectorTreeFunctions.py b/geos-posp/src/geos_posp/processing/multiblockInpectorTreeFunctions.py index 8b7e71c5..189a9a85 100644 --- a/geos-posp/src/geos_posp/processing/multiblockInpectorTreeFunctions.py +++ b/geos-posp/src/geos_posp/processing/multiblockInpectorTreeFunctions.py @@ -1,7 +1,7 @@ # SPDX-License-Identifier: Apache-2.0 # SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. # SPDX-FileContributor: Martin Lemay -from typing import Union +from typing import Union, cast from vtkmodules.vtkCommonDataModel import ( vtkCompositeDataSet, @@ -36,7 +36,7 @@ def getBlockName( input: Union[ vtkMultiBlockDataSet, vtkCompositeDataSet ] ) -> block: vtkDataObject = iter.GetCurrentDataObject() nbBlocks: int = 99 if isinstance( block, vtkMultiBlockDataSet ): - block1: vtkMultiBlockDataSet = block + block1: vtkMultiBlockDataSet = cast( vtkMultiBlockDataSet, block ) nbBlocks = block1.GetNumberOfBlocks() # stop if multiple children diff --git a/geos-posp/src/geos_posp/visu/PVUtils/paraviewTreatments.py b/geos-posp/src/geos_posp/visu/PVUtils/paraviewTreatments.py index 18d30d83..ca36a4b9 100644 --- a/geos-posp/src/geos_posp/visu/PVUtils/paraviewTreatments.py +++ b/geos-posp/src/geos_posp/visu/PVUtils/paraviewTreatments.py @@ -468,7 +468,7 @@ def getVtkOriginalCellIds( mesh: Union[ vtkMultiBlockDataSet, vtkCompositeDataSe list[str]: ids of the cells. """ # merge blocks for vtkCompositeDataSet - mesh2: vtkUnstructuredGrid = mergeFilterPV( mesh, True ) + mesh2: vtkUnstructuredGrid = mergeFilterPV( mesh ) name: str = GeosMeshOutputsEnum.VTK_ORIGINAL_CELL_ID.attributeName assert isAttributeInObject( mesh2, name, False ), f"Attribute {name} is not in the mesh." return [ str( int( ide ) ) for ide in getArrayInObject( mesh2, name, False ) ] @@ -563,18 +563,12 @@ def getTimeStepIndex( time: float, timeSteps: npt.NDArray[ np.float64 ] ) -> int return int( indexes[ 0 ] ) -def mergeFilterPV( - input: vtkDataObject, - keepPartialAttributes: bool = False, -) -> vtkUnstructuredGrid: +def mergeFilterPV( input: vtkDataObject, ) -> vtkUnstructuredGrid: """Apply Paraview merge block filter. Args: input (vtkMultiBlockDataSet | vtkCompositeDataSet | vtkDataObject): composite object to merge blocks - keepPartialAttributes (bool): if True, keep partial attributes after merge. - - Defaults to False. Returns: vtkUnstructuredGrid: merged block object diff --git a/geos-posp/src/geos_posp/visu/mohrCircles/__init__.py b/geos-posp/src/geos_posp/visu/mohrCircles/__init__.py index 4e4c7b57..2aa543df 100644 --- a/geos-posp/src/geos_posp/visu/mohrCircles/__init__.py +++ b/geos-posp/src/geos_posp/visu/mohrCircles/__init__.py @@ -1,3 +1,2 @@ -MOHR_CIRCLE_PATH: str = "visu/mohrCircles/" +MOHR_CIRCLE_PATH: str = "geos_posp/visu/mohrCircles/" MOHR_CIRCLE_ANALYSIS_MAIN = "mainMohrCircles.py" -MOHR_CIRCLE_EVOLUTION_MAIN = "mainMohrCircleEvolution.py" diff --git a/geos-posp/src/geos_posp/visu/pythonViewUtils/Figure2DGenerator.py b/geos-posp/src/geos_posp/visu/pythonViewUtils/Figure2DGenerator.py deleted file mode 100644 index c8dd4860..00000000 --- a/geos-posp/src/geos_posp/visu/pythonViewUtils/Figure2DGenerator.py +++ /dev/null @@ -1,137 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. -# SPDX-FileContributor: Alexandre Benedicto - -from typing import Any - -import pandas as pd # type: ignore[import-untyped] -from geos.utils.Logger import Logger, getLogger -from matplotlib import axes, figure, lines # type: ignore[import-untyped] -from matplotlib.font_manager import ( # type: ignore[import-untyped] - FontProperties, # type: ignore[import-untyped] -) -from typing_extensions import Self - -import geos_posp.visu.pythonViewUtils.functionsFigure2DGenerator as fcts - - -class Figure2DGenerator: - - def __init__( self: Self, dataframe: pd.DataFrame, userChoices: dict[ str, list[ str ] ] ) -> None: - """Utility to create cross plots using Python View. - - We want to plot f(X) = Y where in this class, - "X" will be called "variable", "Y" will be called "curves". - - Args: - dataframe (pd.DataFrame): data to plot - userChoices (dict[str, list[str]]): user choices. - """ - self.m_dataframe: pd.DataFrame = dataframe - self.m_userChoices: dict[ str, Any ] = userChoices - self.m_fig: figure.Figure - self.m_axes: list[ axes._axes.Axes ] = [] - self.m_lines: list[ lines.Line2D ] = [] - self.m_labels: list[ str ] = [] - self.m_logger: Logger = getLogger( "Python View Configurator" ) - - try: - # apply minus 1 multiplication on certain columns - self.initMinus1Multiplication() - # defines m_fig, m_axes, m_lines and m_lables - self.plotInitialFigure() - # then to edit and customize the figure - self.enhanceFigure() - self.m_logger.info( "Data were successfully plotted." ) - - except Exception as e: - mess: str = "Plot creation failed due to:" - self.m_logger.critical( mess ) - self.m_logger.critical( e, exc_info=True ) - - def initMinus1Multiplication( self: Self ) -> None: - """Multiply by -1 certain columns of the input dataframe.""" - df: pd.DataFrame = self.m_dataframe.copy( deep=True ) - minus1CurveNames: list[ str ] = self.m_userChoices[ "curveConvention" ] - for name in minus1CurveNames: - df[ name ] = df[ name ] * ( -1 ) - self.m_dataframe = df - - def enhanceFigure( self: Self ) -> None: - """Apply all the enhancement features to the initial figure.""" - self.changeTitle() - self.changeMinorticks() - self.changeAxisScale() - self.changeAxisLimits() - - def plotInitialFigure( self: Self ) -> None: - """Generates a figure and axes objects from matplotlib. - - The figure plots all the curves along the X or Y axis, with legend and - label for X and Y. - """ - if self.m_userChoices[ "plotRegions" ]: - if not self.m_userChoices[ "reverseXY" ]: - ( fig, ax_all, lines, labels ) = fcts.multipleSubplots( self.m_dataframe, self.m_userChoices ) - else: - ( fig, ax_all, lines, labels ) = fcts.multipleSubplotsInverted( self.m_dataframe, self.m_userChoices ) - else: - if not self.m_userChoices[ "reverseXY" ]: - ( fig, ax_all, lines, labels ) = fcts.oneSubplot( self.m_dataframe, self.m_userChoices ) - else: - ( fig, ax_all, lines, labels ) = fcts.oneSubplotInverted( self.m_dataframe, self.m_userChoices ) - self.m_fig = fig - self.m_axes = ax_all - self.m_lines = lines - self.m_labels = labels - - def changeTitle( self: Self ) -> None: - """Update title of the first axis of the figure based on user choices.""" - if self.m_userChoices[ "displayTitle" ]: - title: str = self.m_userChoices[ "title" ] - fontTitle: FontProperties = fcts.buildFontTitle( self.m_userChoices ) - self.m_fig.suptitle( title, fontproperties=fontTitle ) - - def changeMinorticks( self: Self ) -> None: - """Set the minorticks on or off for every axes.""" - choice: bool = self.m_userChoices[ "minorticks" ] - if choice: - for ax in self.m_axes: - ax.minorticks_on() - else: - for ax in self.m_axes: - ax.minorticks_off() - - def changeAxisScale( self: Self ) -> None: - """Set the minorticks on or off for every axes.""" - for ax in self.m_axes: - if self.m_userChoices[ "logScaleX" ]: - ax.set_xscale( "log" ) - if self.m_userChoices[ "logScaleY" ]: - ax.set_yscale( "log" ) - - def changeAxisLimits( self: Self ) -> None: - """Update axis limits.""" - if self.m_userChoices[ "customAxisLim" ]: - for ax in self.m_axes: - xmin, xmax = ax.get_xlim() - if self.m_userChoices[ "limMinX" ] is not None: - xmin = self.m_userChoices[ "limMinX" ] - if self.m_userChoices[ "limMaxX" ] is not None: - xmax = self.m_userChoices[ "limMaxX" ] - ax.set_xlim( xmin, xmax ) - - ymin, ymax = ax.get_ylim() - if self.m_userChoices[ "limMinY" ] is not None: - ymin = self.m_userChoices[ "limMinY" ] - if self.m_userChoices[ "limMaxY" ] is not None: - ymax = self.m_userChoices[ "limMaxY" ] - ax.set_ylim( ymin, ymax ) - - def getFigure( self: Self ) -> figure.Figure: - """Acces the m_fig attribute. - - Returns: - figure.Figure: Figure containing all the plots. - """ - return self.m_fig diff --git a/geos-posp/src/geos_posp/visu/pythonViewUtils/functionsFigure2DGenerator.py b/geos-posp/src/geos_posp/visu/pythonViewUtils/functionsFigure2DGenerator.py deleted file mode 100644 index 84b2a6d8..00000000 --- a/geos-posp/src/geos_posp/visu/pythonViewUtils/functionsFigure2DGenerator.py +++ /dev/null @@ -1,1375 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. -# SPDX-FileContributor: Alexandre Benedicto -import math -from typing import Any - -import matplotlib.pyplot as plt # type: ignore[import-untyped] -import numpy as np -import numpy.typing as npt -import pandas as pd # type: ignore[import-untyped] -from matplotlib import axes, figure, lines # type: ignore[import-untyped] -from matplotlib.font_manager import ( # type: ignore[import-untyped] - FontProperties, # type: ignore[import-untyped] -) - -import geos_posp.processing.geosLogReaderFunctions as fcts -""" -Plotting tools for 2D figure and axes generation. -""" - - -def oneSubplot( - df: pd.DataFrame, - userChoices: dict[ str, Any ] ) -> tuple[ figure.Figure, list[ axes.Axes ], list[ lines.Line2D ], list[ str ] ]: - """Created a single subplot. - - From a dataframe, knowing which curves to plot along which variable, - generates a fig and its list of axes with the data plotted. - - Args: - df (pd.DataFrame): dataframe containing at least two columns, - one named "variableName" and the other "curveName" - userChoices (dict[str, Any]): Choices made by widget selection - in PythonViewConfigurator filter. - - Returns: - tuple[figure.Figure, list[axes.Axes], - list[lines.Line2D] , list[str]]: the fig and its list of axes. - """ - curveNames: list[ str ] = userChoices[ "curveNames" ] - variableName: str = userChoices[ "variableName" ] - curvesAspect: dict[ str, tuple[ tuple[ float, float, float ], str, float, str, - float ] ] = userChoices[ "curvesAspect" ] - associatedProperties: dict[ str, list[ str ] ] = associatePropertyToAxeType( curveNames ) - fig, ax = plt.subplots( constrained_layout=True ) - all_ax: list[ axes.Axes ] = setupAllAxes( ax, variableName, associatedProperties, True ) - lineList: list[ lines.Line2D ] = [] - labels: list[ str ] = [] - cpt_cmap: int = 0 - x: npt.NDArray[ np.float64 ] = df[ variableName ].to_numpy() - for cpt_ax, ( ax_name, propertyNames ) in enumerate( associatedProperties.items() ): - ax_to_use: axes.Axes = setupAxeToUse( all_ax, cpt_ax, ax_name, False ) - for propName in propertyNames: - y: npt.NDArray[ np.float64 ] = df[ propName ].to_numpy() - plotAxe( ax_to_use, x, y, propName, cpt_cmap, curvesAspect ) - cpt_cmap += 1 - new_lines, new_labels = ax_to_use.get_legend_handles_labels() - lineList += new_lines # type: ignore[arg-type] - labels += new_labels - labels, lineList = smartLabelsSorted( labels, lineList, userChoices ) - if userChoices[ "displayLegend" ]: - ax.legend( - lineList, - labels, - loc=userChoices[ "legendPosition" ], - fontsize=userChoices[ "legendSize" ], - ) - ax.grid() - return ( fig, all_ax, lineList, labels ) - - -def oneSubplotInverted( - df: pd.DataFrame, - userChoices: dict[ str, Any ] ) -> tuple[ figure.Figure, list[ axes.Axes ], list[ lines.Line2D ], list[ str ] ]: - """Created a single subplot with inverted X Y axes. - - From a dataframe, knowing which curves to plot along which variable, - generates a fig and its list of axes with the data plotted. - - Args: - df (pd.DataFrame): dataframe containing at least two columns, - one named "variableName" and the other "curveName" - userChoices (dict[str, Any]): Choices made by widget selection - in PythonViewConfigurator filter. - - Returns: - tuple[figure.Figure, list[axes.Axes], - list[lines.Line2D] , list[str]]: the fig and its list of axes. - """ - curveNames: list[ str ] = userChoices[ "curveNames" ] - variableName: str = userChoices[ "variableName" ] - curvesAspect: dict[ str, tuple[ tuple[ float, float, float ], str, float, str, - float ] ] = userChoices[ "curvesAspect" ] - associatedProperties: dict[ str, list[ str ] ] = associatePropertyToAxeType( curveNames ) - fig, ax = plt.subplots( constrained_layout=True ) - all_ax: list[ axes.Axes ] = setupAllAxes( ax, variableName, associatedProperties, False ) - linesList: list[ lines.Line2D ] = [] - labels: list[ str ] = [] - cpt_cmap: int = 0 - y: npt.NDArray[ np.float64 ] = df[ variableName ].to_numpy() - for cpt_ax, ( ax_name, propertyNames ) in enumerate( associatedProperties.items() ): - ax_to_use: axes.Axes = setupAxeToUse( all_ax, cpt_ax, ax_name, True ) - for propName in propertyNames: - x: npt.NDArray[ np.float64 ] = df[ propName ].to_numpy() - plotAxe( ax_to_use, x, y, propName, cpt_cmap, curvesAspect ) - cpt_cmap += 1 - new_lines, new_labels = ax_to_use.get_legend_handles_labels() - linesList += new_lines # type: ignore[arg-type] - labels += new_labels - labels, linesList = smartLabelsSorted( labels, linesList, userChoices ) - if userChoices[ "displayLegend" ]: - ax.legend( - linesList, - labels, - loc=userChoices[ "legendPosition" ], - fontsize=userChoices[ "legendSize" ], - ) - ax.grid() - return ( fig, all_ax, linesList, labels ) - - -def multipleSubplots( - df: pd.DataFrame, - userChoices: dict[ str, Any ] ) -> tuple[ figure.Figure, list[ axes.Axes ], list[ lines.Line2D ], list[ str ] ]: - """Created multiple subplots. - - From a dataframe, knowing which curves to plot along which variable, - generates a fig and its list of axes with the data plotted. - - Args: - df (pd.DataFrame): dataframe containing at least two columns, - one named "variableName" and the other "curveName". - userChoices (dict[str, Any]): Choices made by widget selection - in PythonViewConfigurator filter. - - Returns: - tuple[figure.Figure, list[axes.Axes], - list[lines.Line2D] , list[str]]: the fig and its list of axes. - """ - curveNames: list[ str ] = userChoices[ "curveNames" ] - variableName: str = userChoices[ "variableName" ] - curvesAspect: dict[ str, tuple[ tuple[ float, float, float ], str, float, str, - float ] ] = userChoices[ "curvesAspect" ] - ratio: float = userChoices[ "ratio" ] - assosIdentifiers: dict[ str, dict[ str, list[ str ] ] ] = associationIdentifiers( curveNames ) - nbr_suplots: int = len( assosIdentifiers.keys() ) - # if only one subplots needs to be created - if nbr_suplots == 1: - return oneSubplot( df, userChoices ) - - layout: tuple[ int, int, int ] = smartLayout( nbr_suplots, ratio ) - fig, axs0 = plt.subplots( layout[ 0 ], layout[ 1 ], constrained_layout=True ) - axs: list[ axes.Axes ] = axs0.flatten().tolist() # type: ignore[union-attr] - for i in range( layout[ 2 ] ): - fig.delaxes( axs[ -( i + 1 ) ] ) - all_lines: list[ lines.Line2D ] = [] - all_labels: list[ str ] = [] - # first loop for subplots - propertiesExtremas: dict[ str, tuple[ float, float ] ] = ( findExtremasPropertiesForAssociatedIdentifiers( - df, assosIdentifiers, True ) ) - for j, identifier in enumerate( assosIdentifiers.keys() ): - first_ax: axes.Axes = axs[ j ] - associatedProperties: dict[ str, list[ str ] ] = assosIdentifiers[ identifier ] - all_ax: list[ axes.Axes ] = setupAllAxes( first_ax, variableName, associatedProperties, True ) - axs += all_ax[ 1: ] - linesList: list[ lines.Line2D ] = [] - labels: list[ str ] = [] - cpt_cmap: int = 0 - x: npt.NDArray[ np.float64 ] = df[ variableName ].to_numpy() - # second loop for axes per subplot - for cpt_ax, ( ax_name, propertyNames ) in enumerate( associatedProperties.items() ): - ax_to_use: axes.Axes = setupAxeToUse( all_ax, cpt_ax, ax_name, False ) - for propName in propertyNames: - y: npt.NDArray[ np.float64 ] = df[ propName ].to_numpy() - plotAxe( ax_to_use, x, y, propName, cpt_cmap, curvesAspect ) - ax_to_use.set_ylim( *propertiesExtremas[ ax_name ] ) - cpt_cmap += 1 - new_lines, new_labels = ax_to_use.get_legend_handles_labels() - linesList += new_lines # type: ignore[arg-type] - all_lines += new_lines # type: ignore[arg-type] - labels += new_labels - all_labels += new_labels - labels, linesList = smartLabelsSorted( labels, linesList, userChoices ) - if userChoices[ "displayLegend" ]: - first_ax.legend( - linesList, - labels, - loc=userChoices[ "legendPosition" ], - fontsize=userChoices[ "legendSize" ], - ) - if userChoices[ "displayTitle" ]: - first_ax.set_title( identifier, fontsize=10 ) - first_ax.grid() - return ( fig, axs, all_lines, all_labels ) - - -def multipleSubplotsInverted( - df: pd.DataFrame, - userChoices: dict[ str, Any ] ) -> tuple[ figure.Figure, list[ axes.Axes ], list[ lines.Line2D ], list[ str ] ]: - """Created multiple subplots with inverted X Y axes. - - From a dataframe, knowing which curves to plot along which variable, - generates a fig and its list of axes with the data plotted. - - Args: - df (pd.DataFrame): dataframe containing at least two columns, - one named "variableName" and the other "curveName". - userChoices (dict[str, Any]): Choices made by widget selection - in PythonViewConfigurator filter. - - Returns: - tuple[figure.Figure, list[axes.Axes], - list[lines.Line2D] , list[str]]: the fig and its list of axes. - """ - curveNames: list[ str ] = userChoices[ "curveNames" ] - variableName: str = userChoices[ "variableName" ] - curvesAspect: dict[ str, tuple[ tuple[ float, float, float ], str, float, str, - float ] ] = userChoices[ "curvesAspect" ] - ratio: float = userChoices[ "ratio" ] - assosIdentifiers: dict[ str, dict[ str, list[ str ] ] ] = associationIdentifiers( curveNames ) - nbr_suplots: int = len( assosIdentifiers.keys() ) - # if only one subplots needs to be created - if nbr_suplots == 1: - return oneSubplotInverted( df, userChoices ) - - layout: tuple[ int, int, int ] = smartLayout( nbr_suplots, ratio ) - fig, axs0 = plt.subplots( layout[ 0 ], layout[ 1 ], constrained_layout=True ) - axs: list[ axes.Axes ] = axs0.flatten().tolist() # type: ignore[union-attr] - for i in range( layout[ 2 ] ): - fig.delaxes( axs[ -( i + 1 ) ] ) - all_lines: list[ lines.Line2D ] = [] - all_labels: list[ str ] = [] - # first loop for subplots - propertiesExtremas: dict[ str, tuple[ float, float ] ] = ( findExtremasPropertiesForAssociatedIdentifiers( - df, assosIdentifiers, True ) ) - for j, identifier in enumerate( assosIdentifiers.keys() ): - first_ax: axes.Axes = axs[ j ] - associatedProperties: dict[ str, list[ str ] ] = assosIdentifiers[ identifier ] - all_ax: list[ axes.Axes ] = setupAllAxes( first_ax, variableName, associatedProperties, False ) - axs += all_ax[ 1: ] - linesList: list[ lines.Line2D ] = [] - labels: list[ str ] = [] - cpt_cmap: int = 0 - y: npt.NDArray[ np.float64 ] = df[ variableName ].to_numpy() - # second loop for axes per subplot - for cpt_ax, ( ax_name, propertyNames ) in enumerate( associatedProperties.items() ): - ax_to_use: axes.Axes = setupAxeToUse( all_ax, cpt_ax, ax_name, True ) - for propName in propertyNames: - x: npt.NDArray[ np.float64 ] = df[ propName ].to_numpy() - plotAxe( ax_to_use, x, y, propName, cpt_cmap, curvesAspect ) - ax_to_use.set_xlim( propertiesExtremas[ ax_name ] ) - cpt_cmap += 1 - new_lines, new_labels = ax_to_use.get_legend_handles_labels() - linesList += new_lines # type: ignore[arg-type] - all_lines += new_lines # type: ignore[arg-type] - labels += new_labels - all_labels += new_labels - labels, linesList = smartLabelsSorted( labels, linesList, userChoices ) - if userChoices[ "displayLegend" ]: - first_ax.legend( - linesList, - labels, - loc=userChoices[ "legendPosition" ], - fontsize=userChoices[ "legendSize" ], - ) - if userChoices[ "displayTitle" ]: - first_ax.set_title( identifier, fontsize=10 ) - first_ax.grid() - return ( fig, axs, all_lines, all_labels ) - - -def setupAllAxes( - first_ax: axes.Axes, - variableName: str, - associatedProperties: dict[ str, list[ str ] ], - axisX: bool, -) -> list[ axes.Axes ]: - """Modify axis name and ticks avec X or Y axis of all subplots. - - Args: - first_ax (axes.Axes): subplot id. - variableName (str): name of the axis. - associatedProperties (dict[str, list[str]]): Name of the properties - axisX (bool): X (True) or Y (False) axis to modify. - - Returns: - list[axes.Axes]: modified subplots - """ - all_ax: list[ axes.Axes ] = [ first_ax ] - if axisX: - first_ax.set_xlabel( variableName ) - first_ax.ticklabel_format( style="sci", axis="x", scilimits=( 0, 0 ), useMathText=True ) - for i in range( 1, len( associatedProperties.keys() ) ): - second_ax = first_ax.twinx() - assert isinstance( second_ax, axes.Axes ) - all_ax.append( second_ax ) - all_ax[ i ].spines[ "right" ].set_position( ( "axes", 1 + 0.07 * ( i - 1 ) ) ) - all_ax[ i ].tick_params( axis="y", which="both", left=False, right=True ) - all_ax[ i ].yaxis.set_ticks_position( "right" ) - all_ax[ i ].yaxis.offsetText.set_position( ( 1.04 + 0.07 * ( i - 1 ), 0 ) ) - first_ax.yaxis.offsetText.set_position( ( -0.04, 0 ) ) - else: - first_ax.set_ylabel( variableName ) - first_ax.ticklabel_format( style="sci", axis="y", scilimits=( 0, 0 ), useMathText=True ) - for i in range( 1, len( associatedProperties.keys() ) ): - second_ax = first_ax.twiny() - assert isinstance( second_ax, axes.Axes ) - all_ax.append( second_ax ) - all_ax[ i ].spines[ "bottom" ].set_position( ( "axes", -0.08 * i ) ) - all_ax[ i ].xaxis.set_label_position( "bottom" ) - all_ax[ i ].tick_params( axis="x", which="both", bottom=True, top=False ) - all_ax[ i ].xaxis.set_ticks_position( "bottom" ) - return all_ax - - -def setupAxeToUse( all_ax: list[ axes.Axes ], axeId: int, ax_name: str, axisX: bool ) -> axes.Axes: - """Modify axis name and ticks avec X or Y axis of subplot axeId in all_ax. - - Args: - all_ax (list[axes.Axes]): list of all subplots - axeId (int): id of the subplot - ax_name (str): name of the X or Y axis - axisX (bool): X (True) or Y (False) axis to modify. - - Returns: - axes.Axes: modified subplot - """ - ax_to_use: axes.Axes = all_ax[ axeId ] - if axisX: - ax_to_use.set_xlabel( ax_name ) - ax_to_use.ticklabel_format( style="sci", axis="x", scilimits=( 0, 0 ), useMathText=True ) - else: - ax_to_use.set_ylabel( ax_name ) - ax_to_use.ticklabel_format( style="sci", axis="y", scilimits=( 0, 0 ), useMathText=True ) - return ax_to_use - - -def plotAxe( - ax_to_use: axes.Axes, - x: npt.NDArray[ np.float64 ], - y: npt.NDArray[ np.float64 ], - propertyName: str, - cpt_cmap: int, - curvesAspect: dict[ str, tuple[ tuple[ float, float, float ], str, float, str, float ] ], -) -> None: - """Plot x, y data using input ax_to_use according to curvesAspect. - - Args: - ax_to_use (axes.Axes): subplot to use - x (npt.NDArray[np.float64]): abscissa data - y (npt.NDArray[np.float64]): ordinate data - propertyName (str): name of the property - cpt_cmap (int): colormap to use - curvesAspect (dict[str, tuple[tuple[float, float, float],str, float, str, float]]): - user choices on curve aspect - """ - cmap = plt.rcParams[ "axes.prop_cycle" ].by_key()[ "color" ][ cpt_cmap % 10 ] - mask = np.logical_and( np.isnan( x ), np.isnan( y ) ) - not_mask = ~mask - # Plot only when x and y values are not nan values - if propertyName in curvesAspect: - asp: tuple[ tuple[ float, float, float ], str, float, str, float ] = curvesAspect[ propertyName ] - ax_to_use.plot( - x[ not_mask ], - y[ not_mask ], - label=propertyName, - color=asp[ 0 ], - linestyle=asp[ 1 ], - linewidth=asp[ 2 ], - marker=asp[ 3 ], - markersize=asp[ 4 ], - ) - else: - ax_to_use.plot( x[ not_mask ], y[ not_mask ], label=propertyName, color=cmap ) - - -def getExtremaAllAxes( axes: list[ axes.Axes ], ) -> tuple[ tuple[ float, float ], tuple[ float, float ] ]: - """Gets the limits of both X and Y axis as a 2x2 element tuple. - - Args: - axes (list[axes.Axes]): list of subplots to get limits. - - Returns: - tuple[tuple[float, float], tuple[float, float]]:: ((xMin, xMax), (yMin, yMax)) - """ - assert len( axes ) > 0 - xMin, xMax, yMin, yMax = getAxeLimits( axes[ 0 ] ) - if len( axes ) > 1: - for i in range( 1, len( axes ) ): - x1, x2, y1, y2 = getAxeLimits( axes[ i ] ) - if x1 < xMin: - xMin = x1 - if x2 > xMax: - xMax = x2 - if y1 < yMin: - yMin = y1 - if y2 > yMax: - yMax = y2 - return ( ( xMin, xMax ), ( yMin, yMax ) ) - - -def getAxeLimits( ax: axes.Axes ) -> tuple[ float, float, float, float ]: - """Gets the limits of both X and Y axis as a 4 element tuple. - - Args: - ax (axes.Axes): subplot to get limits. - - Returns: - tuple[float, float, float, float]: (xMin, xMax, yMin, yMax) - """ - xMin, xMax = ax.get_xlim() - yMin, yMax = ax.get_ylim() - return ( xMin, xMax, yMin, yMax ) - - -def findExtremasPropertiesForAssociatedIdentifiers( - df: pd.DataFrame, - associatedIdentifiers: dict[ str, dict[ str, list[ str ] ] ], - offsetPlotting: bool = False, - offsetPercentage: int = 5, -) -> dict[ str, tuple[ float, float ] ]: - """Find min and max of all properties linked to a same identifier. - - Using an associatedIdentifiers dict containing associatedProperties dict, - we can find the extremas for each property of each identifier. Once we have them all, - we compare for each identifier what are the most extreme values and only the biggest and - lowest are kept in the end. - - - Args: - df (pd.DataFrame): Pandas dataframe - associatedIdentifiers (dict[str, dict[str, list[str]]]): property identifiers. - offsetPlotting (bool, optional): When using the values being returned, - we might want to add an offset to these values. If set to True, - the offsetPercentage is taken into account. Defaults to False. - offsetPercentage (int, optional): Value by which we will offset - the min and max values of each tuple of floats. Defaults to 5. - - Returns: - dict[str, tuple[float, float]]: { - "BHP (Pa)": (minAllWells, maxAllWells), - "TotalMassRate (kg)": (minAllWells, maxAllWells), - "TotalSurfaceVolumetricRate (m3/s)": (minAllWells, maxAllWells), - "SurfaceVolumetricRateCO2 (m3/s)": (minAllWells, maxAllWells), - "SurfaceVolumetricRateWater (m3/s)": (minAllWells, maxAllWells) - } - """ - extremasProperties: dict[ str, tuple[ float, float ] ] = {} - # first we need to find the extrema for each property type per region - propertyTypesExtremas: dict[ str, list[ tuple[ float, float ] ] ] = {} - for associatedProperties in associatedIdentifiers.values(): - extremasPerProperty: dict[ str, - tuple[ float, - float ] ] = ( findExtremasAssociatedProperties( df, associatedProperties ) ) - for propertyType, extremaFound in extremasPerProperty.items(): - if propertyType not in propertyTypesExtremas: - propertyTypesExtremas[ propertyType ] = [ extremaFound ] - else: - propertyTypesExtremas[ propertyType ].append( extremaFound ) - # then, once all extrema have been found for all regions, we need to figure out - # which extrema per property type is the most extreme one - for propertyType in propertyTypesExtremas: - values: list[ tuple[ float, float ] ] = propertyTypesExtremas[ propertyType ] - minValues: list[ float ] = [ values[ i ][ 0 ] for i in range( len( values ) ) ] - maxValues: list[ float ] = [ values[ i ][ 1 ] for i in range( len( values ) ) ] - lowest, highest = ( min( minValues ), max( maxValues ) ) - if offsetPlotting: - offset: float = ( highest - lowest ) / 100 * offsetPercentage - lowest, highest = ( lowest - offset, highest + offset ) - extremasProperties[ propertyType ] = ( lowest, highest ) - return extremasProperties - - -def findExtremasAssociatedProperties( - df: pd.DataFrame, associatedProperties: dict[ str, list[ str ] ] ) -> dict[ str, tuple[ float, float ] ]: - """Find the min and max of properties. - - Using an associatedProperties dict containing property types - as keys and a list of property names as values, - and a pandas dataframe whose column names are composed of those same - property names, you can find the min and max values of each property - type and return it as a tuple. - - Args: - df (pd.DataFrame): Pandas dataframe - associatedProperties (dict[str, list[str]]): { - "Pressure (Pa)": ["Reservoir__Pressure__Pa__Source1"], - "Mass (kg)": ["CO2__Mass__kg__Source1", - "Water__Mass__kg__Source1"] - } - - Returns: - dict[str, tuple[float, float]]: { - "Pressure (Pa)": (minPressure, maxPressure), - "Mass (kg)": (minMass, maxMass) - } - """ - extremasProperties: dict[ str, tuple[ float, float ] ] = {} - for propertyType, propertyNames in associatedProperties.items(): - minValues = np.empty( len( propertyNames ) ) - maxValues = np.empty( len( propertyNames ) ) - for i, propertyName in enumerate( propertyNames ): - values: npt.NDArray[ np.float64 ] = df[ propertyName ].to_numpy() - minValues[ i ] = np.nanmin( values ) - maxValues[ i ] = np.nanmax( values ) - extrema: tuple[ float, float ] = ( - float( np.min( minValues ) ), - float( np.max( maxValues ) ), - ) - extremasProperties[ propertyType ] = extrema - return extremasProperties - - -""" -Utils for treatment of the data -""" - - -def associatePropertyToAxeType( propertyNames: list[ str ] ) -> dict[ str, list[ str ] ]: - """Identify property types. - - From a list of property names, identify if each of this property - corresponds to a certain property type like "Pressure", "Mass", - "Temperature" etc ... and returns a dict where the keys are the property - type and the value the list of property names associated to it. - - Args: - propertyNames (list[str]): ["Reservoir__Pressure__Pa__Source1", - "CO2__Mass__kg__Source1", "Water__Mass__kg__Source1"] - - Returns: - dict[str, list[str]]: { "Pressure (Pa)": ["Reservoir__Pressure__Pa__Source1"], - "Mass (kg)": ["CO2__Mass__kg__Source1", - "Water__Mass__kg__Source1"] } - """ - propertyIds: list[ str ] = fcts.identifyProperties( propertyNames ) - associationTable: dict[ str, str ] = { - "0": "Pressure", - "1": "Pressure", - "2": "Temperature", - "3": "PoreVolume", - "4": "PoreVolume", - "5": "Mass", - "6": "Mass", - "7": "Mass", - "8": "Mass", - "9": "Mass", - "10": "Mass", - "11": "BHP", - "12": "MassRate", - "13": "VolumetricRate", - "14": "VolumetricRate", - "15": "BHP", - "16": "MassRate", - "17": "VolumetricRate", - "18": "VolumetricRate", - "19": "VolumetricRate", - "20": "Volume", - "21": "VolumetricRate", - "22": "Volume", - "23": "Iterations", - "24": "Iterations", - "25": "Stress", - "26": "Displacement", - "27": "Permeability", - "28": "Porosity", - "29": "Ratio", - "30": "Fraction", - "31": "BulkModulus", - "32": "ShearModulus", - "33": "OedometricModulus", - "34": "Points", - "35": "Density", - "36": "Mass", - "37": "Mass", - "38": "Time", - "39": "Time", - } - associatedPropertyToAxeType: dict[ str, list[ str ] ] = {} - noUnitProperties: list[ str ] = [ - "Iterations", - "Porosity", - "Ratio", - "Fraction", - "OedometricModulus", - ] - for i, propId in enumerate( propertyIds ): - idProp: str = propId.split( ":" )[ 0 ] - propNoId: str = propId.split( ":" )[ 1 ] - associatedType: str = associationTable[ idProp ] - if associatedType in noUnitProperties: - axeName: str = associatedType - else: - propIdElts: list[ str ] = propNoId.split( "__" ) - # no unit was found - if len( propIdElts ) <= 2: - axeName = associatedType - # there is a unit - else: - unit: str = propIdElts[ -2 ] - axeName = associatedType + " (" + unit + ")" - if axeName not in associatedPropertyToAxeType: - associatedPropertyToAxeType[ axeName ] = [] - associatedPropertyToAxeType[ axeName ].append( propertyNames[ i ] ) - return associatedPropertyToAxeType - - -def propertiesPerIdentifier( propertyNames: list[ str ] ) -> dict[ str, list[ str ] ]: - """Extract identifiers with associatied properties. - - From a list of property names, extracts the identifier (name of the - region for flow property or name of a well for well property) and creates - a dictionnary with identifiers as keys and the properties containing them - for value in a list. - - Args: - propertyNames (list[str]): property names - Example - - .. code-block:: python - - [ - "WellControls1__BHP__Pa__Source1", - "WellControls1__TotalMassRate__kg/s__Source1", - "WellControls2__BHP__Pa__Source1", - "WellControls2__TotalMassRate__kg/s__Source1" - ] - - Returns: - dict[str, list[str]]: property identifiers - Example - - .. code-block:: python - - { - "WellControls1": [ - "WellControls1__BHP__Pa__Source1", - "WellControls1__TotalMassRate__kg/s__Source1" - ], - "WellControls2": [ - "WellControls2__BHP__Pa__Source1", - "WellControls2__TotalMassRate__kg/s__Source1" - ] - } - """ - propsPerIdentfier: dict[ str, list[ str ] ] = {} - for propertyName in propertyNames: - elements: list[ str ] = propertyName.split( "__" ) - identifier: str = elements[ 0 ] - if identifier not in propsPerIdentfier: - propsPerIdentfier[ identifier ] = [] - propsPerIdentfier[ identifier ].append( propertyName ) - return propsPerIdentfier - - -def associationIdentifiers( propertyNames: list[ str ] ) -> dict[ str, dict[ str, list[ str ] ] ]: - """Extract identifiers with associatied curves. - - From a list of property names, extracts the identifier (name of the - region for flow property or name of a well for well property) and creates - a dictionnary with identifiers as keys and the properties containing them - for value in a list. - - Args: - propertyNames (list[str]): property names - Example - - .. code-block:: python - - [ - "WellControls1__BHP__Pa__Source1", - "WellControls1__TotalMassRate__kg/s__Source1", - "WellControls1__TotalSurfaceVolumetricRate__m3/s__Source1", - "WellControls1__SurfaceVolumetricRateCO2__m3/s__Source1", - "WellControls1__SurfaceVolumetricRateWater__m3/s__Source1", - "WellControls2__BHP__Pa__Source1", - "WellControls2__TotalMassRate__kg/s__Source1", - "WellControls2__TotalSurfaceVolumetricRate__m3/s__Source1", - "WellControls2__SurfaceVolumetricRateCO2__m3/s__Source1", - "WellControls2__SurfaceVolumetricRateWater__m3/s__Source1", - "WellControls3__BHP__Pa__Source1", - "WellControls3__TotalMassRate__tons/day__Source1", - "WellControls3__TotalSurfaceVolumetricRate__bbl/day__Source1", - "WellControls3__SurfaceVolumetricRateCO2__bbl/day__Source1", - "WellControls3__SurfaceVolumetricRateWater__bbl/day__Source1", - "Mean__BHP__Pa__Source1", - "Mean__TotalMassRate__tons/day__Source1", - "Mean__TotalSurfaceVolumetricRate__bbl/day__Source1", - "Mean__SurfaceVolumetricRateCO2__bbl/day__Source1", - "Mean__SurfaceVolumetricRateWater__bbl/day__Source1" - ] - - Returns: - dict[str, dict[str, list[str]]]: property identifiers - Example - - .. code-block:: python - - { - "WellControls1": { - 'BHP (Pa)': [ - 'WellControls1__BHP__Pa__Source1' - ], - 'MassRate (kg/s)': [ - 'WellControls1__TotalMassRate__kg/s__Source1' - ], - 'VolumetricRate (m3/s)': [ - 'WellControls1__TotalSurfaceVolumetricRate__m3/s__Source1', - 'WellControls1__SurfaceVolumetricRateCO2__m3/s__Source1', - 'WellControls1__SurfaceVolumetricRateWater__m3/s__Source1' - ] - }, - "WellControls2": { - 'BHP (Pa)': [ - 'WellControls2__BHP__Pa__Source1' - ], - 'MassRate (kg/s)': [ - 'WellControls2__TotalMassRate__kg/s__Source1' - ], - 'VolumetricRate (m3/s)': [ - 'WellControls2__TotalSurfaceVolumetricRate__m3/s__Source1', - 'WellControls2__SurfaceVolumetricRateCO2__m3/s__Source1', - 'WellControls2__SurfaceVolumetricRateWater__m3/s__Source1' - ] - }, - "WellControls3": { - 'BHP (Pa)': [ - 'WellControls3__BHP__Pa__Source1' - ], - 'MassRate (tons/day)': [ - 'WellControls3__TotalMassRate__tons/day__Source1' - ], - 'VolumetricRate (bbl/day)': [ - 'WellControls3__TotalSurfaceVolumetricRate__bbl/day__Source1', - 'WellControls3__SurfaceVolumetricRateCO2__bbl/day__Source1', - 'WellControls3__SurfaceVolumetricRateWater__bbl/day__Source1' - ] - }, - "Mean": { - 'BHP (Pa)': [ - 'Mean__BHP__Pa__Source1' - ], - 'MassRate (tons/day)': [ - 'Mean__TotalMassRate__tons/day__Source1' - ], - 'VolumetricRate (bbl/day)': [ - 'Mean__TotalSurfaceVolumetricRate__bbl/day__Source1', - 'Mean__SurfaceVolumetricRateCO2__bbl/day__Source1', - 'Mean__SurfaceVolumetricRateWater__bbl/day__Source1' - ] - } - } - """ - propsPerIdentfier: dict[ str, list[ str ] ] = propertiesPerIdentifier( propertyNames ) - assosIdentifier: dict[ str, dict[ str, list[ str ] ] ] = {} - for ident, propNames in propsPerIdentfier.items(): - assosPropsToAxeType: dict[ str, list[ str ] ] = associatePropertyToAxeType( propNames ) - assosIdentifier[ ident ] = assosPropsToAxeType - return assosIdentifier - - -def buildFontTitle( userChoices: dict[ str, Any ] ) -> FontProperties: - """Builds a Fontproperties object according to user choices on title. - - Args: - userChoices (dict[str, Any]): customization parameters. - - Returns: - FontProperties: FontProperties object for the title. - """ - fontTitle: FontProperties = FontProperties() - if "titleStyle" in userChoices: - fontTitle.set_style( userChoices[ "titleStyle" ] ) - if "titleWeight" in userChoices: - fontTitle.set_weight( userChoices[ "titleWeight" ] ) - if "titleSize" in userChoices: - fontTitle.set_size( userChoices[ "titleSize" ] ) - return fontTitle - - -def buildFontVariable( userChoices: dict[ str, Any ] ) -> FontProperties: - """Builds a Fontproperties object according to user choices on variables. - - Args: - userChoices (dict[str, Any]): customization parameters. - - Returns: - FontProperties: FontProperties object for the variable axes. - """ - fontVariable: FontProperties = FontProperties() - if "variableStyle" in userChoices: - fontVariable.set_style( userChoices[ "variableStyle" ] ) - if "variableWeight" in userChoices: - fontVariable.set_weight( userChoices[ "variableWeight" ] ) - if "variableSize" in userChoices: - fontVariable.set_size( userChoices[ "variableSize" ] ) - return fontVariable - - -def buildFontCurves( userChoices: dict[ str, Any ] ) -> FontProperties: - """Builds a Fontproperties object according to user choices on curves. - - Args: - userChoices (dict[str, str]): customization parameters. - - Returns: - FontProperties: FontProperties object for the curves axes. - """ - fontCurves: FontProperties = FontProperties() - if "curvesStyle" in userChoices: - fontCurves.set_style( userChoices[ "curvesStyle" ] ) - if "curvesWeight" in userChoices: - fontCurves.set_weight( userChoices[ "curvesWeight" ] ) - if "curvesSize" in userChoices: - fontCurves.set_size( userChoices[ "curvesSize" ] ) - return fontCurves - - -def customizeLines( userChoices: dict[ str, Any ], labels: list[ str ], - linesList: list[ lines.Line2D ] ) -> list[ lines.Line2D ]: - """Customize lines according to user choices. - - By applying the user choices, we modify or not the list of lines - and return it with the same number of lines in the same order. - - Args: - userChoices (dict[str, Any]): customization parameters. - labels (list[str]): labels of lines. - linesList (list[lines.Line2D]): list of lines object. - - Returns: - list[lines.Line2D]: list of lines object modified. - """ - if "linesModified" in userChoices: - linesModifs: dict[ str, dict[ str, Any ] ] = userChoices[ "linesModified" ] - linesChanged: list[ lines.Line2D ] = [] - for i, label in enumerate( labels ): - if label in linesModifs: - lineChanged: lines.Line2D = applyCustomizationOnLine( linesList[ i ], linesModifs[ label ] ) - linesChanged.append( lineChanged ) - else: - linesChanged.append( linesList[ i ] ) - return linesChanged - else: - return linesList - - -def applyCustomizationOnLine( line: lines.Line2D, parameters: dict[ str, Any ] ) -> lines.Line2D: - """Apply modification methods on a line from parameters. - - Args: - line (lines.Line2D): Matplotlib Line2D - parameters (dict[str, Any]): dictionary of { - "linestyle": one of ["-","--","-.",":"] - "linewidth": positive int - "color": color code - "marker": one of ["",".","o","^","s","*","D","+","x"] - "markersize":positive int - } - - Returns: - lines.Line2D: Line2D object modified. - """ - if "linestyle" in parameters: - line.set_linestyle( parameters[ "linestyle" ] ) - if "linewidth" in parameters: - line.set_linewidth( parameters[ "linewidth" ] ) - if "color" in parameters: - line.set_color( parameters[ "color" ] ) - if "marker" in parameters: - line.set_marker( parameters[ "marker" ] ) - if "markersize" in parameters: - line.set_markersize( parameters[ "markersize" ] ) - return line - - -""" -Layout tools for layering subplots in a figure -""" - - -def isprime( x: int ) -> bool: - """Checks if a number is primer or not. - - Args: - x (int): Positive number to test. - - Returns: - bool: True if prime, False if not. - """ - if x < 0: - print( "Invalid number entry, needs to be positive int" ) - return False - - return all( x % n != 0 for n in range( 2, int( x**0.5 ) + 1 ) ) - - -def findClosestPairIntegers( x: int ) -> tuple[ int, int ]: - """Get the pair of integers that multiply the closest to input value. - - Finds the closest pair of integers that when multiplied together, - gives a number the closest to the input number (always above or equal). - - Args: - x (int): Positive number. - - Returns: - tuple[int, int]: (highest int, lowest int) - """ - if x < 4: - return ( x, 1 ) - while isprime( x ): - x += 1 - N: int = round( math.sqrt( x ) ) - while x > N: - if x % N == 0: - M = x // N - highest = max( M, N ) - lowest = min( M, N ) - return ( highest, lowest ) - else: - N += 1 - return ( x, 1 ) - - -def smartLayout( x: int, ratio: float ) -> tuple[ int, int, int ]: - """Return the best layout according to the number of subplots. - - For multiple subplots, we need to have a layout that can adapt to - the number of subplots automatically. This function figures out the - best layout possible knowing the number of suplots and the figure ratio. - - Args: - x (int): Positive number. - ratio (float): width to height ratio of a figure. - - Returns: - tuple[int]: (nbr_rows, nbr_columns, number of axes to remove) - """ - pair: tuple[ int, int ] = findClosestPairIntegers( x ) - nbrAxesToRemove: int = pair[ 0 ] * pair[ 1 ] - x - if ratio < 1: - return ( pair[ 0 ], pair[ 1 ], nbrAxesToRemove ) - else: - return ( pair[ 1 ], pair[ 0 ], nbrAxesToRemove ) - - -""" -Legend tools -""" - -commonAssociations: dict[ str, str ] = { - "pressuremin": "Pmin", - "pressureMax": "Pmax", - "pressureaverage": "Pavg", - "deltapressuremin": "DPmin", - "deltapressuremax": "DPmax", - "temperaturemin": "Tmin", - "temperaturemax": "Tmax", - "temperatureaverage": "Tavg", - "effectivestressxx": "ESxx", - "effectivestresszz": "ESzz", - "effectivestressratio": "ESratio", - "totaldisplacementx": "TDx", - "totaldisplacementy": "TDy", - "totaldisplacementz": "TDz", - "totalstressXX": "TSxx", - "totalstressZZ": "TSzz", - "stressxx": "Sxx", - "stressyy": "Syy", - "stresszz": "Szz", - "stressxy": "Sxy", - "stressxz": "Sxz", - "stressyz": "Syz", - "poissonratio": "PR", - "porosity": "PORO", - "specificgravity": "SG", - "theoreticalverticalstress": "TVS", - "density": "DNST", - "pressure": "P", - "permeabilityx": "PERMX", - "permeabilityy": "PERMY", - "permeabilityz": "PERMZ", - "oedometric": "OEDO", - "young": "YOUNG", - "shear": "SHEAR", - "bulk": "BULK", - "totaldynamicporevolume": "TDPORV", - "time": "TIME", - "dt": "DT", - "meanbhp": "MBHP", - "meantotalmassrate": "MTMR", - "meantotalvolumetricrate": "MTSVR", - "bhp": "BHP", - "totalmassrate": "TMR", - "cumulatedlineariter": "CLI", - "cumulatednewtoniter": "CNI", - "lineariter": "LI", - "newtoniter": "NI", -} - -phasesAssociations: dict[ str, str ] = { - "dissolvedmass": " IN ", - "immobile": "IMOB ", - "mobile": "MOB ", - "nontrapped": "NTRP ", - "dynamicporevolume": "DPORV ", - "meansurfacevolumetricrate": "MSVR ", - "surfacevolumetricrate": "SVR ", -} - - -def smartLabelsSorted( labels: list[ str ], lines: list[ lines.Line2D ], - userChoices: dict[ str, Any ] ) -> tuple[ list[ str ], list[ lines.Line2D ] ]: - """Shorten all legend labels and sort them. - - To improve readability of the legend for an axe in ParaView, we can apply the - smartLegendLabel functionnality to reduce the size of each label. Plus we sort them - alphabetically and therefore, we also sort the lines the same way. - - Args: - labels (list[str]): Labels to use ax.legend() like - ["Region1__TemperatureAvg__K__job_123456", "Region1__PressureMin__Pa__job_123456"] - lines (list[lines.Line2D]): Lines plotted on axes of matplotlib figure like [line1, line2] - userChoices (dict[str, Any]): Choices made by widget selection - in PythonViewConfigurator filter. - - Returns: - tuple[list[str], list[lines.Line2D]]: Improved labels and sorted labels / lines like - (["Region1 Pmin", "Region1 Tavg"], [line2, line1]) - """ - smartLabels: list[ str ] = [ smartLabel( label, userChoices ) for label in labels ] - # I need the labels to be ordered alphabetically for better readability of the legend - # Therefore, if I sort smartLabels, I need to also sort lines with the same order. - # But this can only be done if there are no duplicates of labels in smartLabels. - # If a duplicate is found, "sorted" will try to sort with line which has no comparison built in - # which will throw an error. - if len( set( smartLabels ) ) == len( smartLabels ): - sortedBothLists = sorted( zip( smartLabels, lines, strict=False ) ) - sortedLabels, sortedLines = zip( *sortedBothLists, strict=False ) - return ( list( sortedLabels ), list( sortedLines ) ) - else: - return ( smartLabels, lines ) - - -def smartLabel( label: str, userChoices: dict[ str, Any ] ) -> str: - """Shorten label according to user choices. - - Labels name can tend to be too long. Therefore, we need to reduce the size of the label. - Depending on the choices made by the user, the identifier and the job name can disappear. - - Args: - label (str): A label to be plotted. - Example- Reservoir__DissolvedMassphaseName0InphaseName1__kg__job123456.out - userChoices (dict[str, Any]): user choices. - - Returns: - str: "phaseName0 in phaseName1" or "Reservoir phaseName0 in phaseName1" - or "phaseName0 in phaseName1 job123456.out" or - "Reservoir phaseName0 in phaseName1 job123456.out" - """ - # first step is to abbreviate the label to reduce its size - smartLabel: str = abbreviateLabel( label ) - # When only one source is used as input, there is no need to precise which one is used - # in the label so the job name is useless. Same when removeJobName option is selected by user. - inputNames: list[ str ] = userChoices[ "inputNames" ] - removeJobName: bool = userChoices[ "removeJobName" ] - if len( inputNames ) > 1 and not removeJobName: - jobName: str = findJobName( label ) - smartLabel += " " + jobName - # When the user chooses to split the plot into subplots to plot by region or well, - # this identifier name will appear as a title of the subplot so no need to use it. - # Same applies when user decides to remove regions. - plotRegions: bool = userChoices[ "plotRegions" ] - removeRegions: bool = userChoices[ "removeRegions" ] - if not plotRegions and not removeRegions: - smartLabel = findIdentifier( label ) + " " + smartLabel - return smartLabel - - -def abbreviateLabel( label: str ) -> str: - """Get the abbreviation of the label according to reservoir nomenclature. - - When using labels to plot, the name can tend to be too long. Therefore, to respect - the logic of reservoir engineering vocabulary, abbreviations for common property names - can be used to shorten the name. The goal is therefore to generate the right abbreviation - for the label input. - - Args: - label (str): A label to be plotted. - Example- Reservoir__DissolvedMassphaseName0InphaseName1__kg__job123456.out - - Returns: - str: "phaseName0 in phaseName1" - """ - for commonAsso in commonAssociations: - if commonAsso in label.lower(): - return commonAssociations[ commonAsso ] - for phaseAsso in phasesAssociations: - if phaseAsso in label.lower(): - phases: list[ str ] = findPhasesLabel( label ) - phase0: str = "" if len( phases ) < 1 else phases[ 0 ] - phase1: str = "" if len( phases ) < 2 else phases[ 1 ] - if phaseAsso == "dissolvedmass": - return phase0 + phasesAssociations[ phaseAsso ] + phase1 - else: - return phasesAssociations[ phaseAsso ] + phase0 - return label - - -def findIdentifier( label: str ) -> str: - """Find identifier inside the label. - - When looking at a label, it may contain or not an identifier at the beginning of it. - An identifier is either a regionName or a wellName. - The goal is to find it and extract it if present. - - Args: - label (str): A label to be plotted. - Example- Reservoir__DissolvedMassphaseName0InphaseName1__kg__job123456.out - - Returns: - str: "Reservoir" - """ - identifier: str = "" - if "__" not in label: - print( "Invalid label, cannot search identifier when no '__' in label." ) - return identifier - subParts: list[ str ] = label.split( "__" ) - if len( subParts ) == 4: - identifier = subParts[ 0 ] - return identifier - - -def findJobName( label: str ) -> str: - """Find the Geos job name at the end of the label. - - When looking at a label, it may contain or not a job name at the end of it. - The goal is to find it and extract it if present. - - Args: - label (str): A label to be plotted. - Example- Reservoir__DissolvedMassphaseName0InphaseName1__kg__job123456.out - - Returns: - str: "job123456.out" - """ - jobName: str = "" - if "__" not in label: - print( "Invalid label, cannot search jobName when no '__' in label." ) - return jobName - subParts: list[ str ] = label.split( "__" ) - if len( subParts ) == 4: - jobName = subParts[ 3 ] - return jobName - - -def findPhasesLabel( label: str ) -> list[ str ]: - """Find phase name inside label. - - When looking at a label, it may contain or not patterns that indicates - the presence of a phase name within it. Therefore, if one of these patterns - is present, one or multiple phase names can be found and be extracted. - - Args: - label (str): A label to be plotted. - Example- Reservoir__DissolvedMassphaseName0InphaseName1__kg__job123456.out - - Returns: - list[str]: [phaseName0, phaseName1] - """ - phases: list[ str ] = [] - lowLabel: str = label.lower() - indexStart: int = 0 - indexEnd: int = 0 - if "__" not in label: - print( "Invalid label, cannot search phases when no '__' in label." ) - return phases - if "dissolvedmass" in lowLabel: - indexStart = lowLabel.index( "dissolvedmass" ) + len( "dissolvedmass" ) - indexEnd = lowLabel.rfind( "__" ) - phasesSubstring: str = lowLabel[ indexStart:indexEnd ] - phases = phasesSubstring.split( "in" ) - phases = [ phase.capitalize() for phase in phases ] - else: - if "dynamicporevolume" in lowLabel: - indexStart = lowLabel.index( "__" ) + 2 - indexEnd = lowLabel.index( "dynamicporevolume" ) - else: - for pattern in [ "nontrapped", "trapped", "immobile", "mobile", "rate" ]: - if pattern in lowLabel: - indexStart = lowLabel.index( pattern ) + len( pattern ) - indexEnd = lowLabel.rfind( "mass" ) - if indexEnd < 0: - indexEnd = indexStart + lowLabel[ indexStart: ].find( "__" ) - break - if indexStart < indexEnd: - phases = [ lowLabel[ indexStart:indexEnd ].capitalize() ] - return phases - - -""" -Under this is the first version of smartLabels without abbreviations. -""" - -# def smartLegendLabelsAndLines( -# labelNames: list[str], lines: list[Any], userChoices: dict[str, Any], regionName="" -# ) -> tuple[list[str], list[Any]]: -# """To improve readability of the legend for an axe in ParaView, we can apply the -# smartLegendLabel functionnality to reduce the size of each label. Plus we sort them -# alphabetically and therefore, we also sort the lines the same way. - -# Args: -# labelNames (list[str]): Labels to use ax.legend() like -# ["Region1__PressureMin__Pa__job_123456", "Region1__Temperature__K__job_123456"] -# lines (list[Any]): Lines plotted on axes of matplotlib figure like [line1, line2] -# userChoices (dict[str, Any]): Choices made by widget selection -# in PythonViewConfigurator filter. -# regionName (str, optional): name of the region. Defaults to "". - -# Returns: -# tuple[list[str], list[Any]]: Improved labels and sorted labels / lines like -# (["Temperature K", "PressureMin Pa"], [line2, line1]) -# """ -# smartLabels: list[str] = [ -# smartLegendLabel(labelName, userChoices, regionName) for labelName in labelNames -# ] -# # I need the labels to be ordered alphabetically for better readability of the legend -# # Therefore, if I sort smartLabels, I need to also sort lines with the same order -# sortedBothLists = sorted(zip(smartLabels, lines) -# sortedLabels, sortedLines = zip(*sortedBothLists) -# return (sortedLabels, sortedLines) - -# def smartLegendLabel(labelName: str, userChoices: dict[str, Any], regionName="") -> str: -# """When plotting legend label, the label format can be improved by removing some -# overwhelming / repetitive prefixe / suffixe and have a shorter label. - -# Args: -# labelName (str): Label to use ax.legend() like -# Region1__PressureMin__Pa__job_123456 -# userChoices (dict[str, Any]): Choices made by widget selection -# in PythonViewConfigurator filter. -# regionName (str, optional): name of the region. Defaults to "". - -# Returns: -# str: Improved label name like PressureMin Pa. -# """ -# smartLabel: str = "" -# # When only one source is used as input, there is no need to precise which one -# # is used in the label. Same when removeJobName option is selected by user. -# inputNames: list[str] = userChoices["inputNames"] -# removeJobName: bool = userChoices["removeJobName"] -# if len(inputNames) <= 1 or removeJobName: -# smartLabel = removeJobNameInLegendLabel(labelName, inputNames) -# # When the user chooses to split the plot into subplots to plot by region, -# # the region name will appear as a title of the subplot so no need to use it. -# # Same applies when user decides to remove regions. -# plotRegions: bool = userChoices["plotRegions"] -# removeRegions: bool = userChoices["removeRegions"] -# if plotRegions or removeRegions: -# smartLabel = removeIdentifierInLegendLabel(smartLabel, regionName) -# smartLabel = smartLabel.replace("__", " ") -# return smartLabel - -# def removeJobNameInLegendLabel(legendLabel: str, inputNames: list[str]) -> str: -# """When plotting legends, the name of the job is by default at the end of -# the label. Therefore, it can increase tremendously the size of the legend -# and we can avoid that by removing the job name from it. - -# Args: -# legendLabel (str): Label to use ax.legend() like -# Region1__PressureMin__Pa__job_123456 -# inputNames (list[str]): names of the sources use to plot. - -# Returns: -# str: Label without the job name like Region1__PressureMin__Pa. -# """ -# for inputName in inputNames: -# pattern: str = "__" + inputName -# if legendLabel.endswith(pattern): -# jobIndex: int = legendLabel.index(pattern) -# return legendLabel[:jobIndex] -# return legendLabel - -# def removeIdentifierInLegendLabel(legendLabel: str, regionName="") -> str: -# """When plotting legends, the name of the region is by default at the -# beginning of the label. Here we remove the region name from the legend label. - -# Args: -# legendLabel (str): Label to use ax.legend() like -# Region1__PressureMin__Pa__job_123456 -# regionName (str): name of the region. Defaults to "". - -# Returns: -# str: Label without the job name like PressureMin__Pa__job_123456 -# """ -# if "__" not in legendLabel: -# return legendLabel -# if regionName == "": -# firstRegionIndex: int = legendLabel.index("__") -# return legendLabel[firstRegionIndex + 2:] -# pattern: str = regionName + "__" -# if legendLabel.startswith(pattern): -# return legendLabel[len(pattern):] -# return legendLabel -""" -Other 2D tools for simplest figures -""" - - -def basicFigure( df: pd.DataFrame, variableName: str, curveName: str ) -> tuple[ figure.Figure, axes.Axes ]: - """Creates a plot. - - Generates a figure and axes objects from matplotlib that plots - one curve along the X axis, with legend and label for X and Y. - - Args: - df (pd.DataFrame): dataframe containing at least two columns, - one named "variableName" and the other "curveName" - variableName (str): Name of the variable column - curveName (str): Name of the column to display along that variable. - - Returns: - tuple[figure.Figure, axes.Axes]: the fig and the ax. - """ - fig, ax = plt.subplots() - x: npt.NDArray[ np.float64 ] = df[ variableName ].to_numpy() - y: npt.NDArray[ np.float64 ] = df[ curveName ].to_numpy() - ax.plot( x, y, label=curveName ) - ax.set_xlabel( variableName ) - ax.set_ylabel( curveName ) - ax.legend( loc="best" ) - return ( fig, ax ) - - -def invertedBasicFigure( df: pd.DataFrame, variableName: str, curveName: str ) -> tuple[ figure.Figure, axes.Axes ]: - """Creates a plot with inverted XY axis. - - Generates a figure and axes objects from matplotlib that plots - one curve along the Y axis, with legend and label for X and Y. - - Args: - df (pd.DataFrame): dataframe containing at least two columns, - one named "variableName" and the other "curveName" - variableName (str): Name of the variable column - curveName (str): Name of the column to display along that variable. - - Returns: - tuple[figure.Figure, axes.Axes]: the fig and the ax. - """ - fig, ax = plt.subplots() - x: npt.NDArray[ np.float64 ] = df[ curveName ].to_numpy() - y: npt.NDArray[ np.float64 ] = df[ variableName ].to_numpy() - ax.plot( x, y, label=variableName ) - ax.set_xlabel( curveName ) - ax.set_ylabel( variableName ) - ax.legend( loc="best" ) - return ( fig, ax ) - - -def adjust_subplots( fig: figure.Figure, invertXY: bool ) -> figure.Figure: - """Adjust the size of the subplot in the fig. - - Args: - fig (figure.Figure): Matplotlib figure - invertXY (bool): Choice to either intervert or not the X and Y axes - - Returns: - figure.Figure: Matplotlib figure with adjustements - """ - if invertXY: - fig.subplots_adjust( left=0.05, right=0.98, top=0.9, bottom=0.2 ) - else: - fig.subplots_adjust( left=0.06, right=0.94, top=0.95, bottom=0.08 ) - return fig diff --git a/geos-posp/src/geos_posp/visu/pythonViewUtils/mainPythonView.py b/geos-posp/src/geos_posp/visu/pythonViewUtils/mainPythonView.py deleted file mode 100644 index e45e6df4..00000000 --- a/geos-posp/src/geos_posp/visu/pythonViewUtils/mainPythonView.py +++ /dev/null @@ -1,38 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. -# SPDX-FileContributor: Alexandre Benedicto -# type: ignore -# ruff: noqa -try: - import matplotlib.pyplot as plt - from paraview import python_view - - import geos_posp.visu.PVUtils.paraviewTreatments as pvt - from geos_posp.visu.pythonViewUtils.Figure2DGenerator import ( - Figure2DGenerator, ) - - plt.close() - if len( sourceNames ) == 0: # noqa: F821 - raise ValueError( "No source name was found. Please check at least" + " one source in <>" ) - - dataframes = pvt.getDataframesFromMultipleVTKSources( - sourceNames, - variableName # noqa: F821 - ) - dataframe = pvt.mergeDataframes( dataframes, variableName ) # noqa: F821 - obj_figure = Figure2DGenerator( dataframe, userChoices ) # noqa: F821 - fig = obj_figure.getFigure() - - def setup_data( view ) -> None: # noqa - pass - - def render( view, width: int, height: int ): # noqa - fig.set_size_inches( float( width ) / 100.0, float( height ) / 100.0 ) - imageToReturn = python_view.figure_to_image( fig ) - return imageToReturn - -except Exception as e: - from geos.utils.Logger import getLogger - - logger = getLogger( "Python View Configurator" ) - logger.critical( e, exc_info=True ) diff --git a/geos-posp/tests/mainTests.py b/geos-posp/tests/mainTests.py deleted file mode 100644 index 46ec2bd9..00000000 --- a/geos-posp/tests/mainTests.py +++ /dev/null @@ -1,26 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. -# SPDX-FileContributor: Alexandre Benedicto -import os -import sys -import unittest - -dir_path = os.path.dirname( os.path.realpath( __file__ ) ) -parent_dir_path = os.path.join( os.path.dirname( dir_path ), "src" ) -if parent_dir_path not in sys.path: - sys.path.append( parent_dir_path ) - - -def main() -> None: - """Run all tests.""" - # Load all test cases in the current folder - test_loader = unittest.TestLoader() - test_suite = test_loader.discover( ".", pattern="tests*.py" ) - - # Run the test suite - runner = unittest.TextTestRunner() - runner.run( test_suite ) - - -if __name__ == "__main__": - main() diff --git a/geos-posp/tests/testsFunctionsFigure2DGenerator.py b/geos-posp/tests/testsFunctionsFigure2DGenerator.py deleted file mode 100644 index 119b5630..00000000 --- a/geos-posp/tests/testsFunctionsFigure2DGenerator.py +++ /dev/null @@ -1,184 +0,0 @@ -# SPDX-License-Identifier: Apache-2.0 -# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. -# SPDX-FileContributor: Alexandre Benedicto -# ruff: noqa: E402 # disable Module level import not at top of file -import os -import sys -import unittest - -from typing_extensions import Self - -dir_path = os.path.dirname( os.path.realpath( __file__ ) ) -parent_dir_path = os.path.join( os.path.dirname( dir_path ), "src" ) -if parent_dir_path not in sys.path: - sys.path.append( parent_dir_path ) - -from geos_posp.visu.pythonViewUtils import functionsFigure2DGenerator as utils - - -class TestsFunctionsFigure2DGenerator( unittest.TestCase ): - - def test_associatePropertyToAxeType( self: Self ) -> None: - """Test of associatePropertyToAxeType function.""" - example: list[ str ] = [ - "WellControls1__BHP__Pa__Source1", - "WellControls1__TotalMassRate__kg/s__Source1", - "WellControls1__TotalSurfaceVolumetricRate__m3/s__Source1", - "WellControls1__SurfaceVolumetricRateCO2__m3/s__Source1", - "WellControls1__SurfaceVolumetricRateWater__m3/s__Source1", - "WellControls2__BHP__Pa__Source1", - "WellControls2__TotalMassRate__kg/s__Source1", - "WellControls2__TotalSurfaceVolumetricRate__m3/s__Source1", - "WellControls2__SurfaceVolumetricRateCO2__m3/s__Source1", - "WellControls2__SurfaceVolumetricRateWater__m3/s__Source1", - "WellControls3__BHP__Pa__Source1", - "WellControls3__TotalMassRate__tons/day__Source1", - "WellControls3__TotalSurfaceVolumetricRate__bbl/day__Source1", - "WellControls3__SurfaceVolumetricRateCO2__bbl/day__Source1", - "WellControls3__SurfaceVolumetricRateWater__bbl/day__Source1", - "Mean__BHP__Pa__Source1", - "Mean__TotalMassRate__tons/day__Source1", - "Mean_TotalVolumetricRate__bbl/day__Source1", - "Mean__SurfaceVolumetricRateCO2__bbl/day__Source1", - "Mean__SurfaceVolumetricRateWater__bbl/day__Source1", - ] - expected: dict[ str, list[ str ] ] = { - "BHP (Pa)": [ - "WellControls1__BHP__Pa__Source1", - "WellControls2__BHP__Pa__Source1", - "WellControls3__BHP__Pa__Source1", - "Mean__BHP__Pa__Source1", - ], - "MassRate (kg/s)": [ - "WellControls1__TotalMassRate__kg/s__Source1", - "WellControls2__TotalMassRate__kg/s__Source1", - ], - "VolumetricRate (m3/s)": [ - "WellControls1__TotalSurfaceVolumetricRate__m3/s__Source1", - "WellControls1__SurfaceVolumetricRateCO2__m3/s__Source1", - "WellControls1__SurfaceVolumetricRateWater__m3/s__Source1", - "WellControls2__TotalSurfaceVolumetricRate__m3/s__Source1", - "WellControls2__SurfaceVolumetricRateCO2__m3/s__Source1", - "WellControls2__SurfaceVolumetricRateWater__m3/s__Source1", - ], - "MassRate (tons/day)": [ - "WellControls3__TotalMassRate__tons/day__Source1", - "Mean__TotalMassRate__tons/day__Source1", - ], - "VolumetricRate (bbl/day)": [ - "WellControls3__TotalSurfaceVolumetricRate__bbl/day__Source1", - "WellControls3__SurfaceVolumetricRateCO2__bbl/day__Source1", - "WellControls3__SurfaceVolumetricRateWater__bbl/day__Source1", - "Mean_TotalVolumetricRate__bbl/day__Source1", - "Mean__SurfaceVolumetricRateCO2__bbl/day__Source1", - "Mean__SurfaceVolumetricRateWater__bbl/day__Source1", - ], - } - obtained: dict[ str, list[ str ] ] = utils.associatePropertyToAxeType( example ) - self.assertEqual( expected, obtained ) - - def test_propertiesPerIdentifier( self: Self ) -> None: - """Test of propertiesPerIdentifier function.""" - propertyNames: list[ str ] = [ - "WellControls1__BHP__Pa__Source1", - "WellControls1__TotalMassRate__kg/s__Source1", - "WellControls2__BHP__Pa__Source1", - "WellControls2__TotalMassRate__kg/s__Source1", - ] - expected: dict[ str, list[ str ] ] = { - "WellControls1": [ - "WellControls1__BHP__Pa__Source1", - "WellControls1__TotalMassRate__kg/s__Source1", - ], - "WellControls2": [ - "WellControls2__BHP__Pa__Source1", - "WellControls2__TotalMassRate__kg/s__Source1", - ], - } - obtained = utils.propertiesPerIdentifier( propertyNames ) - self.assertEqual( expected, obtained ) - - def test_associationIdentifers( self: Self ) -> None: - """Test of associationIdentifiers function.""" - propertyNames: list[ str ] = [ - "WellControls1__BHP__Pa__Source1", - "WellControls1__TotalMassRate__kg/s__Source1", - "WellControls1__TotalSurfaceVolumetricRate__m3/s__Source1", - "WellControls1__SurfaceVolumetricRateCO2__m3/s__Source1", - "WellControls1__SurfaceVolumetricRateWater__m3/s__Source1", - "WellControls2__BHP__Pa__Source1", - "WellControls2__TotalMassRate__kg/s__Source1", - "WellControls2__TotalSurfaceVolumetricRate__m3/s__Source1", - "WellControls2__SurfaceVolumetricRateCO2__m3/s__Source1", - "WellControls2__SurfaceVolumetricRateWater__m3/s__Source1", - "WellControls3__BHP__Pa__Source1", - "WellControls3__TotalMassRate__tons/day__Source1", - "WellControls3__TotalSurfaceVolumetricRate__bbl/day__Source1", - "WellControls3__SurfaceVolumetricRateCO2__bbl/day__Source1", - "WellControls3__SurfaceVolumetricRateWater__bbl/day__Source1", - "Mean__BHP__Pa__Source1", - "Mean__TotalMassRate__tons/day__Source1", - "Mean__TotalSurfaceVolumetricRate__bbl/day__Source1", - "Mean__SurfaceVolumetricRateCO2__bbl/day__Source1", - "Mean__SurfaceVolumetricRateWater__bbl/day__Source1", - ] - expected: dict[ str, dict[ str, list[ str ] ] ] = { - "WellControls1": { - "BHP (Pa)": [ - "WellControls1__BHP__Pa__Source1", - ], - "MassRate (kg/s)": [ - "WellControls1__TotalMassRate__kg/s__Source1", - ], - "VolumetricRate (m3/s)": [ - "WellControls1__TotalSurfaceVolumetricRate__m3/s__Source1", - "WellControls1__SurfaceVolumetricRateCO2__m3/s__Source1", - "WellControls1__SurfaceVolumetricRateWater__m3/s__Source1", - ], - }, - "WellControls2": { - "BHP (Pa)": [ - "WellControls2__BHP__Pa__Source1", - ], - "MassRate (kg/s)": [ - "WellControls2__TotalMassRate__kg/s__Source1", - ], - "VolumetricRate (m3/s)": [ - "WellControls2__TotalSurfaceVolumetricRate__m3/s__Source1", - "WellControls2__SurfaceVolumetricRateCO2__m3/s__Source1", - "WellControls2__SurfaceVolumetricRateWater__m3/s__Source1", - ], - }, - "WellControls3": { - "BHP (Pa)": [ - "WellControls3__BHP__Pa__Source1", - ], - "MassRate (tons/day)": [ - "WellControls3__TotalMassRate__tons/day__Source1", - ], - "VolumetricRate (bbl/day)": [ - "WellControls3__TotalSurfaceVolumetricRate__bbl/day__Source1", - "WellControls3__SurfaceVolumetricRateCO2__bbl/day__Source1", - "WellControls3__SurfaceVolumetricRateWater__bbl/day__Source1", - ], - }, - "Mean": { - "BHP (Pa)": [ - "Mean__BHP__Pa__Source1", - ], - "MassRate (tons/day)": [ - "Mean__TotalMassRate__tons/day__Source1", - ], - "VolumetricRate (bbl/day)": [ - "Mean__TotalSurfaceVolumetricRate__bbl/day__Source1", - "Mean__SurfaceVolumetricRateCO2__bbl/day__Source1", - "Mean__SurfaceVolumetricRateWater__bbl/day__Source1", - ], - }, - } - obtained = utils.associationIdentifiers( propertyNames ) - self.assertEqual( expected, obtained ) - - -if __name__ == "__main__": - unittest.main() diff --git a/geos-prep/pyproject.toml b/geos-prep/pyproject.toml index fb0100c4..1f0789ae 100644 --- a/geos-prep/pyproject.toml +++ b/geos-prep/pyproject.toml @@ -31,7 +31,7 @@ keywords = [ ] dependencies = [ "vtk >= 9.3", - "numpy >= 1.26", + "numpy >= 2.2", "pandas >= 2.2", "typing_extensions >= 4.12", "geos_posp >=1.0", diff --git a/geos-pv/pyproject.toml b/geos-pv/pyproject.toml new file mode 100644 index 00000000..0444641c --- /dev/null +++ b/geos-pv/pyproject.toml @@ -0,0 +1,34 @@ +[build-system] +requires = ["setuptools>=61.2", "wheel >= 0.37.1"] +build-backend = "setuptools.build_meta" + +[project] +name = "geos-pv" +version = "0.1.0" +description = "geos-pv is a Python package that gathers Paraview plugins and dedicated utils to process and visualize GEOS inputs and outputs." +authors = [{name = "GEOS Contributors" }] +maintainers = [{name = "Martin Lemay", email = "martin.lemay@external.totalenergies.com"}] +license = {text = "Apache-2.0"} +classifiers = [ + "Development Status :: 4 - Beta", + "Programming Language :: Python" +] + +requires-python = ">=3.10" + +dynamic = ["dependencies"] + +[project.optional-dependencies] +# dependency to use if install together with paraview +paraview = ["paraview"] + +[tool.pytest.ini_options] +addopts = "--import-mode=importlib" +console_output_style = "count" +pythonpath = [".", "src"] +python_classes = "Test" +python_files = "test*.py" +python_functions = "test*" +testpaths = ["tests"] +norecursedirs = "bin" +filterwarnings = [] \ No newline at end of file diff --git a/geos-pv/requirements.txt b/geos-pv/requirements.txt new file mode 100644 index 00000000..edb1046c --- /dev/null +++ b/geos-pv/requirements.txt @@ -0,0 +1,4 @@ +geos-geomechanics +geos-mesh +geos-posp +geos-utils \ No newline at end of file diff --git a/geos-pv/setup.py b/geos-pv/setup.py new file mode 100644 index 00000000..b9ebf3b8 --- /dev/null +++ b/geos-pv/setup.py @@ -0,0 +1,20 @@ +from pathlib import Path +from setuptools import setup + +# geos python package dependencies are read from requirements.txt +# WARNING: only local dependencies must be included in the requirements.txt + +geos_pv_path: Path = Path( __file__ ).parent +geos_python_packages_path: Path = geos_pv_path.parent +local_package_names = [] +with open( str( geos_pv_path / "requirements.txt" ) ) as f: + local_package_names = f.read().splitlines() + +install_requires = [] +for name in local_package_names: + if ( geos_python_packages_path / name ).exists(): + install_requires += [ f"{name} @ {(geos_python_packages_path / name).as_uri()}" ] + else: + install_requires += [ name ] + +setup( install_requires=install_requires ) diff --git a/geos-posp/src/geos_posp/readers/__init__.py b/geos-pv/src/geos/pv/__init__.py similarity index 100% rename from geos-posp/src/geos_posp/readers/__init__.py rename to geos-pv/src/geos/pv/__init__.py diff --git a/geos-posp/src/geos_posp/readers/GeosLogReaderAquifers.py b/geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderAquifers.py similarity index 99% rename from geos-posp/src/geos_posp/readers/GeosLogReaderAquifers.py rename to geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderAquifers.py index d80973bd..f77c439e 100644 --- a/geos-posp/src/geos_posp/readers/GeosLogReaderAquifers.py +++ b/geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderAquifers.py @@ -4,10 +4,10 @@ from io import TextIOBase import pandas as pd # type: ignore[import-untyped] -from geos.utils.enumUnits import Unit from typing_extensions import Self -import geos_posp.processing.geosLogReaderFunctions as fcts +import geos.pv.geosLogReaderUtils.geosLogReaderFunctions as fcts +from geos.utils.enumUnits import Unit class GeosLogReaderAquifers: diff --git a/geos-posp/src/geos_posp/readers/GeosLogReaderConvergence.py b/geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderConvergence.py similarity index 98% rename from geos-posp/src/geos_posp/readers/GeosLogReaderConvergence.py rename to geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderConvergence.py index c67804b1..f0457b2c 100644 --- a/geos-posp/src/geos_posp/readers/GeosLogReaderConvergence.py +++ b/geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderConvergence.py @@ -4,10 +4,10 @@ from io import TextIOBase import pandas as pd # type: ignore[import-untyped] -from geos.utils.enumUnits import Unit from typing_extensions import Self -import geos_posp.processing.geosLogReaderFunctions as fcts +import geos.pv.geosLogReaderUtils.geosLogReaderFunctions as fcts +from geos.utils.enumUnits import Unit class GeosLogReaderConvergence: diff --git a/geos-posp/src/geos_posp/readers/GeosLogReaderFlow.py b/geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderFlow.py similarity index 97% rename from geos-posp/src/geos_posp/readers/GeosLogReaderFlow.py rename to geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderFlow.py index 4c79fa1c..fae8625a 100644 --- a/geos-posp/src/geos_posp/readers/GeosLogReaderFlow.py +++ b/geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderFlow.py @@ -8,7 +8,7 @@ from geos.utils.enumUnits import Unit from typing_extensions import Self -import geos_posp.processing.geosLogReaderFunctions as fcts +import geos.pv.geosLogReaderUtils.geosLogReaderFunctions as fcts class GeosLogReaderFlow: diff --git a/geos-posp/src/geos_posp/readers/GeosLogReaderWells.py b/geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderWells.py similarity index 99% rename from geos-posp/src/geos_posp/readers/GeosLogReaderWells.py rename to geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderWells.py index 3d14142e..eddf5fc3 100644 --- a/geos-posp/src/geos_posp/readers/GeosLogReaderWells.py +++ b/geos-pv/src/geos/pv/geosLogReaderUtils/GeosLogReaderWells.py @@ -5,10 +5,10 @@ from typing import Union import pandas as pd # type: ignore[import-untyped] -from geos.utils.enumUnits import Unit from typing_extensions import Self -import geos_posp.processing.geosLogReaderFunctions as fcts +import geos.pv.geosLogReaderUtils.geosLogReaderFunctions as fcts +from geos.utils.enumUnits import Unit class GeosLogReaderWells: @@ -234,7 +234,7 @@ def calculateMeanValues( self: Self ) -> None: for meanName, columns in differentMeanColumns.items(): if len( columns ) > 0: values: list[ list[ float ] ] = [ wpv[ c ] for c in columns ] - meanValues: list[ float ] = [ sum( item ) / nbr for item in zip( *values, strict=False ) ] + meanValues: list[ float ] = [ sum( item ) / nbr for item in zip( *values ) ] meanNameWithId: str = fcts.identifyProperties( [ meanName ] )[ 0 ] self.m_wellsPropertiesValues[ meanNameWithId ] = meanValues diff --git a/geos-posp/src/geos_posp/visu/PVUtils/__init__.py b/geos-pv/src/geos/pv/geosLogReaderUtils/__init__.py similarity index 100% rename from geos-posp/src/geos_posp/visu/PVUtils/__init__.py rename to geos-pv/src/geos/pv/geosLogReaderUtils/__init__.py diff --git a/geos-posp/src/geos_posp/processing/geosLogReaderFunctions.py b/geos-pv/src/geos/pv/geosLogReaderUtils/geosLogReaderFunctions.py similarity index 100% rename from geos-posp/src/geos_posp/processing/geosLogReaderFunctions.py rename to geos-pv/src/geos/pv/geosLogReaderUtils/geosLogReaderFunctions.py diff --git a/geos-posp/src/geos_posp/visu/pythonViewUtils/__init__.py b/geos-pv/src/geos/pv/pyplotUtils/__init__.py similarity index 100% rename from geos-posp/src/geos_posp/visu/pythonViewUtils/__init__.py rename to geos-pv/src/geos/pv/pyplotUtils/__init__.py diff --git a/geos-pv/src/geos/pv/pyplotUtils/matplotlibOptions.py b/geos-pv/src/geos/pv/pyplotUtils/matplotlibOptions.py new file mode 100644 index 00000000..311678ce --- /dev/null +++ b/geos-pv/src/geos/pv/pyplotUtils/matplotlibOptions.py @@ -0,0 +1,88 @@ +# SPDX-License-Identifier: Apache-2.0 +# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. +# SPDX-FileContributor: Martin Lemay +# ruff: noqa: E402 # disable Module level import not at top of file + +from enum import Enum + +from typing_extensions import Self + + +class OptionSelectionEnum( Enum ): + + def __init__( self: Self, displayName: str, optionValue: str ) -> None: + """Define the enumeration to options for Paraview selectors. + + Args: + displayName (str): name displayed in the selector + optionValue (str): value used by matplotlib. + + Defaults to None (same optionName as displayName) + """ + self.displayName: str = displayName + self.optionValue: str = optionValue + + +class LegendLocationEnum( OptionSelectionEnum ): + BEST = ( "best", "best" ) + UPPER_LEFT = ( "upper left", "upper left" ) + UPPER_CENTER = ( "upper center", "upper center" ) + UPPER_RIGHT = ( "upper right", "upper right" ) + CENTER_LEFT = ( "center left", "center left" ) + CENTER = ( "center", "center" ) + CENTER_RIGHT = ( "center right", "center right" ) + LOWER_LEFT = ( "lower left", "lower left" ) + LOWER_CENTER = ( "lower center", "lower center" ) + LOWER_RIGHT = ( "lower right", "lower right" ) + + +class FontStyleEnum( OptionSelectionEnum ): + NORMAL = ( "normal", "normal" ) + ITALIC = ( "italic", "italic" ) + OBLIQUE = ( "oblique", "oblique" ) + + +class FontWeightEnum( OptionSelectionEnum ): + NORMAL = ( "normal", "normal" ) + BOLD = ( "bold", "bold" ) + HEAVY = ( "heavy", "heavy" ) + LIGHT = ( "light", "light" ) + + +class LineStyleEnum( OptionSelectionEnum ): + NONE = ( "None", "None" ) + SOLID = ( "solid", "-" ) + DASHED = ( "dashed", "--" ) + DASHDOT = ( "dashdot", "-." ) + DOTTED = ( "dotted", ":" ) + + +class MarkerStyleEnum( OptionSelectionEnum ): + NONE = ( "None", "" ) + POINT = ( "point", "." ) + CIRCLE = ( "circle", "o" ) + TRIANGLE = ( "triangle", "^" ) + SQUARE = ( "square", "s" ) + STAR = ( "star", "*" ) + DIAMOND = ( "diamond", "D" ) + PLUS = ( "plus", "+" ) + X = ( "x", "x" ) + + +def optionEnumToXml( enumObj: OptionSelectionEnum ) -> str: + """Creates an enumeration domain from an OptionSelectionEnum object. + + Dedicated to the dropdown widgets of paraview plugin. + + Args: + enumObj (OptionSelectionEnum): Enumeration values to put in the dropdown + widget. + + Returns: + str: the XML string. + """ + xml: str = """""" + for i, unitObj in enumerate( list( enumObj ) ): # type: ignore[call-overload] + xml += f"""""" + xml += """""" + return xml diff --git a/geos-pv/src/geos/pv/utils/DisplayOrganizationParaview.py b/geos-pv/src/geos/pv/utils/DisplayOrganizationParaview.py new file mode 100644 index 00000000..249ec4b8 --- /dev/null +++ b/geos-pv/src/geos/pv/utils/DisplayOrganizationParaview.py @@ -0,0 +1,192 @@ +# SPDX-License-Identifier: Apache-2.0 +# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. +# SPDX-FileContributor: Alexandre Benedicto +# ruff: noqa: E402 # disable Module level import not at top of file +from typing import Any + +from paraview.simple import ( # type: ignore[import-not-found] + AssignViewToLayout, CreateLayout, CreateView, Delete, GetLayoutByName, GetLayouts, GetViews, GetViewsInLayout, + RemoveLayout, SetActiveView, +) +from typing_extensions import Self + + +def buildNewLayoutWithPythonView() -> Any: # noqa: ANN401 + """Create a new PythonView layout.""" + # create a new layout + organization: DisplayOrganizationParaview = DisplayOrganizationParaview() + layout_names: list[ str ] = organization.getLayoutsNames() + nb_layouts: int = len( layout_names ) + layoutName: str = "Layout #" + str( nb_layouts + 1 ) + # increment layout index until the layout name is a new one + cpt: int = 1 + while layoutName in layout_names: + layoutName = "Layout #" + str( nb_layouts + cpt ) + cpt += 1 + organization.addLayout( layoutName ) + + # add a new python view to the layout + organization.addViewToLayout( "PythonView", layoutName, 0 ) + return organization.getLayoutViews()[ layoutName ][ 0 ] + + +class DisplayOrganizationParaview: + """Object to manage Paraview layouts.""" + + def __init__( self: Self ) -> None: + """Keeps track of Paraview layouts and views when created or removed.""" + self._layouts_keys: list[ Any ] = [] + self._layout_names: list[ str ] = [] + self._views_cpt: int = 0 + self._layout_views: dict[ str, Any ] = {} + self._views_name: dict[ str, Any ] = {} + self.initLayouts() + self.initLayoutViews() + + def initLayouts( self: Self ) -> None: + """Initialize layouts.""" + self._layouts_keys = list( GetLayouts().keys() ) + self._layouts_names = [] + for layout_tuple in self._layouts_keys: + self._layouts_names.append( layout_tuple[ 0 ] ) + + def getLayoutsKeys( self: Self ) -> list[ Any ]: + """Get layout keys. + + Returns: + list[Any]: list of layout keys. + """ + return self._layouts_keys + + def getLayoutsNames( self: Self ) -> list[ str ]: + """Get layout names. + + Returns: + list[str]: list of layout names. + """ + return self._layouts_names + + def getNumberLayouts( self: Self ) -> int: + """Get the number of layouts. + + Returns: + int: number of layouts. + """ + return len( self._layouts_keys ) + + def getViewsCpt( self: Self ) -> int: + """Get the number of views. + + Returns: + int: number of views. + """ + return self._views_cpt + + def addOneToCpt( self: Self ) -> None: + """Increment number of views.""" + self._views_cpt += 1 + + def initLayoutViews( self: Self ) -> None: + """Initialize layout views.""" + self._views_name = {} + self._layout_views = {} + all_views: list[ Any ] = GetViews() + layouts_keys: list[ Any ] = self.getLayoutsKeys() + layout_names: list[ str ] = self.getLayoutsNames() + for i in range( self.getNumberLayouts() ): + self._layout_views[ layout_names[ i ] ] = [] + views_in_layout = GetViewsInLayout( GetLayouts()[ layouts_keys[ i ] ] ) + for view in all_views: + if view in views_in_layout: + self._layout_views[ layout_names[ i ] ].append( view ) + name_view: str = "view" + str( self.getViewsCpt() ) + self._views_name[ name_view ] = view + self.addOneToCpt() + + def getLayoutViews( self: Self ) -> dict[ str, Any ]: + """Get layout views. + + Returns: + dict[Any:Any]: dictionnary of layout views. + """ + return self._layout_views + + def getViewsName( self: Self ) -> dict[ str, Any ]: + """Get view names. + + Returns: + list[str]: list of view names. + """ + return self._views_name + + def updateOrganization( self: Self ) -> None: + """Update layouts.""" + self._views_cpt = 0 + self.initLayouts() + self.initLayoutViews() + + def addLayout( self: Self, new_layout_name: str ) -> None: + """Add a layout. + + Args: + new_layout_name (str): name of the new layout. + """ + if new_layout_name not in self.getLayoutsNames(): + CreateLayout( new_layout_name ) + else: + print( f'This layout name "{new_layout_name}" is already used, please pick a new one.\n' ) + self.updateOrganization() + + def removeLayout( self: Self, layout_name: str ) -> None: + """Remove a layout. + + Args: + layout_name (str): name of the layout to remove. + """ + if layout_name not in self.getLayoutsNames(): + RemoveLayout( GetLayoutByName( layout_name ) ) + else: + print( f'This layout name "{layout_name}" does not exist.' ) + self.updateOrganization() + + def addViewToLayout( self: Self, viewType: str, layout_name: str, position: int ) -> None: + """Add a view to a layout. + + Args: + viewType (str): type of view. + layout_name (str): name of the layout. + position (int): position of the view. + """ + SetActiveView( None ) + layout_to_use = GetLayoutByName( layout_name ) + new_view = CreateView( viewType ) + AssignViewToLayout( view=new_view, layout=layout_to_use, hint=position ) + self.updateOrganization() + + def RemoveViewFromLayout( self: Self, view_name: str, layout_name: str, position: int ) -> None: + """Remove a view from a layout. + + Args: + view_name (str): name of view. + layout_name (str): name of the layout. + position (int): position of the view. + """ + views_name: dict[ str, Any ] = self.getViewsName() + view_to_delete = views_name[ view_name ] + SetActiveView( view_to_delete ) + Delete( view_to_delete ) + del view_to_delete + layout_to_use = GetLayoutByName( layout_name ) + layout_to_use.Collapse( position ) + self.updateOrganization() + + def SwapCellsInLayout( self: Self, layout_name: str, position1: int, position2: int ) -> None: + """Swap views in a layout. + + Args: + layout_name (str): name of the layout. + position1 (int): first position of the view. + position2 (int): second position of the view. + """ + layout_to_use = GetLayoutByName( layout_name ) + layout_to_use.SwapCells( position1, position2 ) diff --git a/geos-posp/tests/__init__.py b/geos-pv/src/geos/pv/utils/__init__.py similarity index 100% rename from geos-posp/tests/__init__.py rename to geos-pv/src/geos/pv/utils/__init__.py diff --git a/geos-pv/src/geos/pv/utils/checkboxFunction.py b/geos-pv/src/geos/pv/utils/checkboxFunction.py new file mode 100644 index 00000000..6e0a250e --- /dev/null +++ b/geos-pv/src/geos/pv/utils/checkboxFunction.py @@ -0,0 +1,22 @@ +# SPDX-License-Identifier: Apache-2.0 +# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. +# SPDX-FileContributor: Alexandre Benedicto +# ruff: noqa +# type: ignore +def createModifiedCallback( anobject ): + """Helper for the creation and use of vtkDataArraySelection in ParaView. + + Args: + anobject: any object. + """ + import weakref + + weakref_obj = weakref.ref( anobject ) + anobject = None + + def _markmodified( *args, **kwars ): + o = weakref_obj() + if o is not None: + o.Modified() + + return _markmodified diff --git a/geos-pv/src/geos/pv/utils/config.py b/geos-pv/src/geos/pv/utils/config.py new file mode 100644 index 00000000..96fae896 --- /dev/null +++ b/geos-pv/src/geos/pv/utils/config.py @@ -0,0 +1,20 @@ +import sys +from pathlib import Path + + +def update_paths() -> None: + """Update sys path to load GEOS Python packages.""" + # Add other packages path to sys path + geos_pv_path: Path = Path( __file__ ).parent.parent.parent.parent.parent + geos_python_packages_path: Path = geos_pv_path.parent + + python_modules: list[ str ] = [ "geos-pv" ] + with open( str( geos_pv_path / "requirements.txt" ) ) as f: + python_modules += f.read().splitlines() + + for m in python_modules: + if not ( geos_python_packages_path / m ).exists(): + continue + m_path: str = str( geos_python_packages_path / m / "src" ) + if m_path not in sys.path: + sys.path.insert( 0, m_path ) diff --git a/geos-pv/src/geos/pv/utils/paraviewTreatments.py b/geos-pv/src/geos/pv/utils/paraviewTreatments.py new file mode 100644 index 00000000..42258ef7 --- /dev/null +++ b/geos-pv/src/geos/pv/utils/paraviewTreatments.py @@ -0,0 +1,583 @@ +# SPDX-License-Identifier: Apache-2.0 +# SPDX-FileCopyrightText: Copyright 2023-2024 TotalEnergies. +# SPDX-FileContributor: Alexandre Benedicto, Martin Lemay +# ruff: noqa: E402 # disable Module level import not at top of file +from enum import Enum +from typing import Any, Union + +import numpy as np +import numpy.typing as npt +import pandas as pd # type: ignore[import-untyped] +from paraview.modules.vtkPVVTKExtensionsMisc import ( # type: ignore[import-not-found] + vtkMergeBlocks, ) +from paraview.simple import ( # type: ignore[import-not-found] + FindSource, GetActiveView, GetAnimationScene, GetDisplayProperties, GetSources, servermanager, +) +import vtkmodules.util.numpy_support as vnp +from vtkmodules.vtkCommonCore import ( + vtkDataArray, + vtkDataArraySelection, + vtkDoubleArray, + vtkPoints, +) +from vtkmodules.vtkCommonDataModel import ( + vtkCellData, + vtkCompositeDataSet, + vtkDataObject, + vtkMultiBlockDataSet, + vtkPolyData, + vtkTable, + vtkUnstructuredGrid, +) + +from geos.utils.GeosOutputsConstants import ( + ComponentNameEnum, + GeosMeshOutputsEnum, +) + +# valid sources for Python view configurator +# TODO: need to be consolidated +HARD_CODED_VALID_PVC_TYPE: set[ str ] = { "GeosLogReader", "RenameArrays" } + + +def vtkTableToDataframe( table: vtkTable ) -> pd.DataFrame: + """From a vtkTable, creates and returns a pandas dataframe. + + Args: + table (vtkTable): vtkTable object. + + Returns: + pd.DataFrame: Pandas dataframe. + """ + data: list[ dict[ str, Any ] ] = [] + for rowIndex in range( table.GetNumberOfRows() ): + rowData: dict[ str, Any ] = {} + for colIndex in range( table.GetNumberOfColumns() ): + colName: str = table.GetColumnName( colIndex ) + cellValue: Any = table.GetValue( rowIndex, colIndex ) + # we have a vtkVariant value, we need a float + cellValueF: float = cellValue.ToFloat() + rowData[ colName ] = cellValueF + data.append( rowData ) + df: pd.DataFrame = pd.DataFrame( data ) + return df + + +def vtkPolyDataToPointsDataframe( polydata: vtkPolyData ) -> pd.DataFrame: + """Creates a pandas dataframe containing points data from vtkPolyData. + + Args: + polydata (vtkPolyData): vtkPolyData object. + + Returns: + pd.DataFrame: Pandas dataframe containing the points data. + """ + points: vtkPoints = polydata.GetPoints() + assert points is not None, "Points is undefined." + nbrPoints: int = points.GetNumberOfPoints() + data: dict[ str, Any ] = { + "Point ID": np.empty( nbrPoints ), + "PointsX": np.empty( nbrPoints ), + "PointsY": np.empty( nbrPoints ), + "PointsZ": np.empty( nbrPoints ), + } + for pointID in range( nbrPoints ): + point: tuple[ float, float, float ] = points.GetPoint( pointID ) + data[ "Point ID" ][ pointID ] = pointID + data[ "PointsX" ][ pointID ] = point[ 0 ] + data[ "PointsY" ][ pointID ] = point[ 1 ] + data[ "PointsZ" ][ pointID ] = point[ 2 ] + pointData = polydata.GetPointData() + nbrArrays: int = pointData.GetNumberOfArrays() + for i in range( nbrArrays ): + arrayToUse = pointData.GetArray( i ) + arrayName: str = pointData.GetArrayName( i ) + subArrayNames: list[ str ] = findSubArrayNames( arrayToUse, arrayName ) + # Collect the data for each sub array + for ind, name in enumerate( subArrayNames ): + data[ name ] = np.empty( nbrPoints ) + for k in range( nbrPoints ): + # Every element of the tuple correspond to one distinct + # sub array so we only need one value at a time + value: float = arrayToUse.GetTuple( k )[ ind ] + data[ name ][ k ] = value + df: pd.DataFrame = pd.DataFrame( data ).set_index( "Point ID" ) + return df + + +def vtkUnstructuredGridCellsToDataframe( grid: vtkUnstructuredGrid ) -> pd.DataFrame: + """Creates a pandas dataframe containing points data from vtkUnstructuredGrid. + + Args: + grid (vtkUnstructuredGrid): vtkUnstructuredGrid object. + + Returns: + pd.DataFrame: Pandas dataframe. + """ + cellIdAttributeName = GeosMeshOutputsEnum.VTK_ORIGINAL_CELL_ID.attributeName + cellData = grid.GetCellData() + numberCells: int = grid.GetNumberOfCells() + data: dict[ str, Any ] = {} + for i in range( cellData.GetNumberOfArrays() ): + arrayToUse = cellData.GetArray( i ) + arrayName: str = cellData.GetArrayName( i ) + subArrayNames: list[ str ] = findSubArrayNames( arrayToUse, arrayName ) + # Collect the data for each sub array + for ind, name in enumerate( subArrayNames ): + data[ name ] = np.empty( numberCells ) + for k in range( numberCells ): + # Every element of the tuple correspond to one distinct + # sub array so we only need one value at a time + value: float = arrayToUse.GetTuple( k )[ ind ] + data[ name ][ k ] = value + df: pd.DataFrame = pd.DataFrame( data ).astype( { cellIdAttributeName: int } ) + + # set cell ids as index + + # df = df.astype({cellIdAttributeName: int}) + return df.set_index( cellIdAttributeName ) + + +def vtkToDataframe( dataset: vtkDataObject ) -> pd.DataFrame: + """Creates a dataframe containing points data from vtkTable or vtkPolyData. + + Args: + dataset (Any): dataset to convert if possible. + + Returns: + pd.DataFrame: if the dataset is in the right format. + """ + if isinstance( dataset, vtkTable ): + return vtkTableToDataframe( dataset ) + elif isinstance( dataset, vtkPolyData ): + return vtkPolyDataToPointsDataframe( dataset ) + elif isinstance( dataset, vtkUnstructuredGrid ): + return vtkUnstructuredGridCellsToDataframe( dataset ) + else: + raise AssertionError( f"Invalid dataset format {type(dataset)}. " + + "Supported formats are: vtkTable, vtkpolyData and vtkUnstructuredGrid" ) + + +def findSubArrayNames( vtkArray: vtkDataArray, arrayName: str ) -> list[ str ]: + """Get sub array names from multi array attributes. + + Because arrays in ParaView can be of multiple dimensions, + it can be difficult to convert these arrays to numpy arrays. + Therefore, we can split the original array into multiple sub + one dimensional arrays. In that case, new sub names need to be + derived from the original array to be used. + + Args: + vtkArray (vtkDataArray): Array from vtk library. + arrayName (str): Name of the array. + + Returns: + list[str]: Sub array names from original array name. + """ + # The ordering of six elements can seem odd but is adapted to + # Geos output format of stress as : + # sigma11, sigma22, sigma33, sigma23, sigma13, sigma12 + sixComponents: tuple[ str, str, str, str, str, str ] = ComponentNameEnum.XYZ.value + nbrComponents: int = vtkArray.GetNumberOfComponents() + subArrayNames: list[ str ] = [] + if nbrComponents == 1: + subArrayNames.append( arrayName ) + elif nbrComponents < 6: + for j in range( nbrComponents ): + subArrayNames.append( arrayName + "_" + sixComponents[ j ] ) + else: + for j in range( nbrComponents ): + subArrayNames.append( arrayName + "_" + str( j ) ) + return subArrayNames + + +def getDataframesFromMultipleVTKSources( sourceNames: set[ str ], commonColumn: str ) -> list[ pd.DataFrame ]: + """Creates the dataframe from each source if they have the commonColumn. + + Args: + sourceNames (set[str]): list of sources. + commonColumn (str): common column name. + + Returns: + list[pd.DataFrame]: output dataframe. + """ + # indexSource: int = commonColumn.rfind("__") + # commonColumnNoSource: str = commonColumn[:indexSource] + validDataframes: list[ pd.DataFrame ] = [] + for name in sourceNames: + source = FindSource( name ) + assert source is not None, "Source is undefined." + dataset = servermanager.Fetch( source ) + assert dataset is not None, "Dataset is undefined." + currentDF: pd.DataFrame = vtkToDataframe( dataset ) + if commonColumn in currentDF.columns: + dfModified = currentDF.rename( + columns={ col: col + "__" + name + for col in currentDF.columns if col != commonColumn } ) + validDataframes.append( dfModified ) + else: + print( f"The source <<{name}>> could not be used" + " to plot because the variable named <<" + + f"{commonColumn}>> could not be found." ) + return validDataframes + + +def mergeDataframes( dataframes: list[ pd.DataFrame ], commonColumn: str ) -> pd.DataFrame: + """Merge all dataframes into a single one by using the common column. + + Args: + dataframes (list[pd.DataFrame]): List of dataframes from + getDataframesFromMultipleVTKSources. + commonColumn (str): Name of the only common column between + all of the dataframes. + + Returns: + pd.DataFrame: Merged dataframes into a single one by 'outer' + on the commonColumn. + """ + assert len( dataframes ) > 0 + if len( dataframes ) == 1: + return dataframes[ 0 ] + else: + df0: pd.DataFrame = dataframes[ 0 ] + df1: pd.DataFrame = dataframes[ 1 ] + merged: pd.DataFrame = df0.merge( df1, on=commonColumn, how="outer" ) + if len( dataframes ) > 2: + for df in dataframes[ 2: ]: + merged = merged.merge( df, on=commonColumn, how="outer" ) + return merged + + +def addDataframeColumnsToVtkPolyData( polyData: vtkPolyData, df: pd.DataFrame ) -> vtkPolyData: + """Add columns from a dataframe to a vtkPolyData. + + Args: + polyData (vtkPolyData): vtkPolyData before modifcation. + df (pd.DataFrame): Pandas dataframe. + + Returns: + vtkPolyData: vtkPolyData with new arrays. + """ + for column_name in df.columns: + column = df[ column_name ].values + array = vtkDoubleArray() + array.SetName( column_name ) + array.SetNumberOfValues( polyData.GetNumberOfPoints() ) + for i in range( polyData.GetNumberOfPoints() ): + array.SetValue( i, column[ i ] ) + polyData.GetPointData().AddArray( array ) + + # Update vtkPolyData object + polyData.GetPointData().Modified() + polyData.Modified() + return polyData + + +# Functions to help the processing of PythonViewConfigurator + + +def getPossibleSourceNames() -> set[ str ]: + """Get the list of valid source names for PythonViewConfigurator. + + In PythonViewConfigurator, multiple sources can be considered as + valid inputs. We want the user to know the names of every of these + sources that can be used to plot data. This function therefore identifies + which source names are valid to be used later as sources. + + Returns: + set[str]: Source names in the paraview pipeline. + """ + # get all sources different from PythonViewConfigurator + validNames: set[ str ] = set() + for k in GetSources(): + sourceName: str = k[ 0 ] + source = FindSource( sourceName ) + if ( source is not None ) and ( "PythonViewConfigurator" not in source.__str__() ): + dataset = servermanager.Fetch( source ) + if dataset.IsA( "vtkPolyData" ) or dataset.IsA( "vtkTable" ): + validNames.add( sourceName ) + return validNames + + +def usefulSourceNamesPipeline() -> set[ str ]: + """Get the list of valid pipelines for PythonViewConfigurator. + + When using the PythonViewConfigurator, we want to check if the sources + in the ParaView pipeline are compatible with what the filter can take as + input. So this function scans every sources of the pipeline and if it + corresponds to one of the hardcoded valid types, we keep the name. + They are right now : ["GeosLogReader", "RenameArrays"] + + Returns: + set[str]: [sourceName1, ..., sourceNameN] + """ + usefulSourceNames: set[ str ] = set() + allSourceNames: set[ str ] = { n[ 0 ] for n, s in GetSources().items() } + for name in allSourceNames: + source = FindSource( name ) + if type( source ).__name__ in HARD_CODED_VALID_PVC_TYPE: + usefulSourceNames.add( name ) + return usefulSourceNames + + +def getDatasFromSources( sourceNames: set[ str ] ) -> dict[ str, pd.DataFrame ]: + """Get the data from input sources. + + Args: + sourceNames (set[str]): [sourceName1, ..., sourceNameN] + + Returns: + dict[[str, pd.DataFrame]]: dictionary where source names are keys and + dataframe are values. + { sourceName1: servermanager.Fetch(FindSource(sourceName1)), + ... + sourceNameN: servermanager.Fetch(FindSource(sourceNameN)) } + """ + usefulDatas: dict[ str, Any ] = {} + for name in sourceNames: + dataset = servermanager.Fetch( FindSource( name ) ) + usefulDatas[ name ] = dataset + return usefulDatas + + +def usefulVisibleDatasPipeline() -> dict[ str, Any ]: + """Get the list of visible pipelines. + + When using the PythonViewConfigurator, we want to collect the data of + each source that is visible in the paraview pipeline and that is + compatible as input data for the filter. Therefore, only certain types of + sources will be considered as valid. They are right now : + ["GeosLogReader", "RenameArrays"] + + Finally, if the sources are visible and valid, we access their data and + return the names of the source and their respective data. + + Returns: + dict[str, 'vtkInformation']: dictionary of source names and data from + pipeline. + { sourceName1: servermanager.Fetch(FindSource(sourceName1)), + ... + sourceNameN: servermanager.Fetch(FindSource(sourceNameN)) } + """ + usefulDatas: dict[ str, Any ] = {} + sourceNamesVisible: set[ str ] = set() + for n, s in GetSources().items(): + if servermanager.GetRepresentation( s, GetActiveView() ) is not None: + displayProperties = GetDisplayProperties( s, view=GetActiveView() ) + if ( displayProperties is not None ) and ( displayProperties.Visibility == 1 ): + sourceNamesVisible.add( n[ 0 ] ) + + for name in sourceNamesVisible: + source = FindSource( name ) + if type( source ).__name__ in HARD_CODED_VALID_PVC_TYPE: + usefulDatas[ name ] = servermanager.Fetch( FindSource( name ) ) + return usefulDatas + + +def isFilter( sourceName: str ) -> bool: + """Identify if a source name can link to a filter in the ParaView pipeline. + + Args: + sourceName (str): name of a source object in the pipeline + + Returns: + bool: True if filter, False instead. + """ + source: Any = FindSource( sourceName ) + if source is None: + print( f"sourceName <<{sourceName}>> does not exist in the pipeline" ) + return False + else: + try: + test: Any = source.GetClientSideObject().GetInputAlgorithm() # noqa: F841 + return True + except Exception: + return False + + +def getFilterInput( sourceName: str ) -> vtkDataObject: + """Access the vtk dataset that is used as input for a filter. + + Args: + sourceName (str): name of a source object in the pipeline. + + Returns: + Any: The vtk dataset that serves as input for the filter. + """ + filtre = FindSource( sourceName ) + assert filtre is not None, "Source is undefined." + clientSideObject = filtre.GetClientSideObject() + assert clientSideObject is not None, "Client Side Object is undefined." + inputAlgo = clientSideObject.GetInputAlgorithm() + assert inputAlgo is not None, "Input Algorithm is undefined." + inputValues = inputAlgo.GetInput() + if isinstance( inputValues, vtkDataObject ): + return inputValues + return vtkDataObject() + + +def getArrayChoices( array: vtkDataArraySelection ) -> list[ str ]: + """Extracts the column names of input array when they are enabled. + + Args: + array (vtkDataArraySelection): input data + + Returns: + set[str]: [columnName1, ..., columnNameN] + """ + checkedColumns: list[ str ] = [] + for i in range( array.GetNumberOfArrays() ): + columnName: str = array.GetArrayName( i ) + if array.ArrayIsEnabled( columnName ): + checkedColumns.append( columnName ) + return checkedColumns + + +def integrateSourceNames( sourceNames: set[ str ], arrayChoices: set[ str ] ) -> set[ str ]: + """Aggregate source and arrayChoices names. + + When creating the user choices in PythonViewConfigurator, you need + to take into account both the source names and the choices of curves + to have user choices corresponding to the column names of the dataframe + with the data to be plot. + + Args: + sourceNames (set[str]): Name of sources found in ParaView pipeline. + arrayChoices (set[str]): Column names of the vtkdataarrayselection. + + Returns: + set[str]: [sourceName1__choice1, sourceName1__choice2, + ..., sourceNameN__choiceN] + """ + completeNames: set[ str ] = set() + for sourceName in sourceNames: + for choice in arrayChoices: + completeName: str = choice + "__" + sourceName + completeNames.add( completeName ) + return completeNames + + +def getVtkOriginalCellIds( mesh: Union[ vtkMultiBlockDataSet, vtkCompositeDataSet, vtkDataObject ] ) -> list[ str ]: + """Get vtkOriginalCellIds from a vtkUnstructuredGrid object. + + Args: + mesh (vtkMultiBlockDataSet|vtkCompositeDataSet|vtkDataObject): input mesh. + + Returns: + list[str]: ids of the cells. + """ + # merge blocks for vtkCompositeDataSet + mesh2: vtkUnstructuredGrid = mergeFilterPV( mesh ) + attributeName: str = GeosMeshOutputsEnum.VTK_ORIGINAL_CELL_ID.attributeName + data: vtkCellData = mesh2.GetCellData() + assert data is not None, "Cell Data are undefined." + assert bool( data.HasArray( attributeName ) ), f"Attribute {attributeName} is not in the mesh" + + array: vtkDoubleArray = data.GetArray( attributeName ) + nparray: npt.NDArray[ np.float64 ] = vnp.vtk_to_numpy( array ) # type: ignore[no-untyped-call] + return [ str( int( ide ) ) for ide in nparray ] + + +def strEnumToEnumerationDomainXml( enumObj: Enum ) -> str: + """Creates an enumeration domain from an Enum objec. + + Creates an enumeration domain from an Enum objec + for the dropdown widgets of paraview plugin. + + Args: + enumObj (Enum): Enumeration values to put in the dropdown widget. + + Returns: + str: the XML string. + """ + xml: str = """""" + for i, unitObj in enumerate( list( enumObj ) ): # type: ignore[call-overload] + xml += f"""""" + xml += """""" + return xml + + +def strListToEnumerationDomainXml( properties: Union[ list[ str ], set[ str ] ] ) -> str: + """Creates an enumeration domain from a list of strings. + + Creates an enumeration domain from a list of strings + for the dropdown widgets of paraview plugin. + + Args: + properties (set[str] | list[str]): Properties to put in the dropdown widget. + + Returns: + str: the XML string. + """ + xml: str = """""" + for i, prop in enumerate( list( properties ) ): + xml += f"""""" + xml += """""" + return xml + + +def dataframeForEachTimestep( sourceName: str ) -> dict[ str, pd.DataFrame ]: + """Get the data from source at each time step. + + In ParaView, a source object can contain data for multiple + timesteps. If so, knowing the source name, we can access its data + for each timestep and store it in a dict where the keys are the + timesteps and the values the data at each one of them. + + Args: + sourceName (str): Name of the source in ParaView pipeline. + + Returns: + dict[str, pd.DataFrame]: dictionary where time is the key and dataframe + is the value. + """ + animationScene = GetAnimationScene() + assert animationScene is not None, "animationScene is undefined." + # we set the animation to the initial timestep + animationScene.GoToFirst() + source = FindSource( sourceName ) + dataset: vtkDataObject = servermanager.Fetch( source ) + assert dataset is not None, "Dataset is undefined." + dataset2: vtkUnstructuredGrid = mergeFilterPV( dataset ) + time: str = str( animationScene.TimeKeeper.Time ) + dfPerTimestep: dict[ str, pd.DataFrame ] = { time: vtkToDataframe( dataset2 ) } + # then we iterate on the other timesteps of the source + for _ in range( animationScene.NumberOfFrames ): # type: ignore + animationScene.GoToNext() + source = FindSource( sourceName ) + dataset = servermanager.Fetch( source ) + dataset2 = mergeFilterPV( dataset ) + time = str( animationScene.TimeKeeper.Time ) + dfPerTimestep[ time ] = vtkToDataframe( dataset2 ) + return dfPerTimestep + + +def getTimeStepIndex( time: float, timeSteps: npt.NDArray[ np.float64 ] ) -> int: + """Get the time step index of input time from the list of time steps. + + Args: + time (float): time + timeSteps (npt.NDArray[np.float64]): Array of time steps + + Returns: + int: time step index + """ + indexes: npt.NDArray[ np.int64 ] = np.where( np.isclose( timeSteps, time ) )[ 0 ] + assert ( indexes.size > 0 ), f"Current time {time} does not exist in the selected object." + return int( indexes[ 0 ] ) + + +def mergeFilterPV( input: vtkDataObject, ) -> vtkUnstructuredGrid: + """Apply Paraview merge block filter. + + Args: + input (vtkMultiBlockDataSet | vtkCompositeDataSet | vtkDataObject): composite + object to merge blocks + + Returns: + vtkUnstructuredGrid: merged block object + + """ + mergeFilter: vtkMergeBlocks = vtkMergeBlocks() + mergeFilter.SetInputData( input ) + mergeFilter.Update() + return mergeFilter.GetOutputDataObject( 0 ) diff --git a/geos-posp/src/PVplugins/PVGeosLogReader.py b/geos-pv/src/readers/PVGeosLogReader.py similarity index 95% rename from geos-posp/src/PVplugins/PVGeosLogReader.py rename to geos-pv/src/readers/PVGeosLogReader.py index c218659b..75163b6b 100644 --- a/geos-posp/src/PVplugins/PVGeosLogReader.py +++ b/geos-pv/src/readers/PVGeosLogReader.py @@ -4,6 +4,7 @@ # ruff: noqa: E402 # disable Module level import not at top of file import os import sys +from pathlib import Path from enum import Enum from typing import Union, cast @@ -12,12 +13,35 @@ import pandas as pd # type: ignore[import-untyped] from typing_extensions import Self -dir_path = os.path.dirname( os.path.realpath( __file__ ) ) -parent_dir_path = os.path.dirname( dir_path ) -if parent_dir_path not in sys.path: - sys.path.append( parent_dir_path ) +# update sys.path to load all GEOS Python Package dependencies +geos_pv_path: Path = Path( __file__ ).parent.parent.parent +sys.path.insert( 0, str( geos_pv_path / "src" ) ) +from geos.pv.utils.config import update_paths + +update_paths() import vtkmodules.util.numpy_support as vnp +from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] + VTKPythonAlgorithmBase, smdomain, smhint, smproperty, smproxy, +) +from vtk import VTK_DOUBLE # type: ignore[import-untyped] +from vtkmodules.vtkCommonCore import vtkDataArraySelection as vtkDAS +from vtkmodules.vtkCommonCore import ( + vtkDoubleArray, + vtkInformation, + vtkInformationVector, +) +from vtkmodules.vtkCommonDataModel import vtkTable + +from geos.pv.geosLogReaderUtils.geosLogReaderFunctions import ( + identifyProperties, + transformUserChoiceToListPhases, +) + +from geos.pv.geosLogReaderUtils.GeosLogReaderAquifers import GeosLogReaderAquifers +from geos.pv.geosLogReaderUtils.GeosLogReaderConvergence import GeosLogReaderConvergence +from geos.pv.geosLogReaderUtils.GeosLogReaderFlow import GeosLogReaderFlow +from geos.pv.geosLogReaderUtils.GeosLogReaderWells import GeosLogReaderWells from geos.utils.enumUnits import ( Mass, MassRate, @@ -28,30 +52,12 @@ VolumetricRate, enumerationDomainUnit, ) + from geos.utils.UnitRepository import UnitRepository -from geos_posp.processing.geosLogReaderFunctions import ( - identifyProperties, - transformUserChoiceToListPhases, -) -from geos_posp.readers.GeosLogReaderAquifers import GeosLogReaderAquifers -from geos_posp.readers.GeosLogReaderConvergence import GeosLogReaderConvergence -from geos_posp.readers.GeosLogReaderFlow import GeosLogReaderFlow -from geos_posp.readers.GeosLogReaderWells import GeosLogReaderWells -from geos_posp.visu.PVUtils.checkboxFunction import ( # type: ignore[attr-defined] +from geos.pv.utils.checkboxFunction import ( # type: ignore[attr-defined] createModifiedCallback, ) -from geos_posp.visu.PVUtils.paraviewTreatments import ( +from geos.pv.utils.paraviewTreatments import ( strListToEnumerationDomainXml, ) -from paraview.util.vtkAlgorithm import ( # type: ignore[import-not-found] - VTKPythonAlgorithmBase, smdomain, smhint, smproperty, smproxy, -) -from vtk import VTK_DOUBLE # type: ignore[import-untyped] -from vtkmodules.vtkCommonCore import vtkDataArraySelection as vtkDAS -from vtkmodules.vtkCommonCore import ( - vtkDoubleArray, - vtkInformation, - vtkInformationVector, -) -from vtkmodules.vtkCommonDataModel import vtkTable __doc__ = """ PVGeosLogReader is a Paraview plugin that allows to read Geos output log. @@ -193,7 +199,7 @@ def a02SetPhaseNames( self: Self, value: str ) -> None: """Set phase names. Args: - value (str): list of phase names seprated by space. + value (str): list of phase names separated by space. """ self.m_phasesUserChoice = transformUserChoiceToListPhases( value ) self.Modified() @@ -227,9 +233,9 @@ def getDataframeChoice( self: Self ) -> int: Returns: int: The value corresponding to a certain dataframe. - "Flow" has value "0", "Wells" has value "1", - "Aquifers" has value "2", "Convergence" has - value "3". + "Flow" has value "0", "Wells" has value "1", + "Aquifers" has value "2", "Convergence" has + value "3". """ return self.m_dataframeChoice diff --git a/geos-posp/tests/Data/depleted_gas_reservoir_newwell_report.out b/geos-pv/tests/Data/depleted_gas_reservoir_newwell_report.out similarity index 100% rename from geos-posp/tests/Data/depleted_gas_reservoir_newwell_report.out rename to geos-pv/tests/Data/depleted_gas_reservoir_newwell_report.out diff --git a/geos-posp/tests/Data/empty.txt b/geos-pv/tests/Data/empty.txt similarity index 100% rename from geos-posp/tests/Data/empty.txt rename to geos-pv/tests/Data/empty.txt diff --git a/geos-posp/tests/Data/job_GEOS_246861.out b/geos-pv/tests/Data/job_GEOS_246861.out similarity index 100% rename from geos-posp/tests/Data/job_GEOS_246861.out rename to geos-pv/tests/Data/job_GEOS_246861.out diff --git a/geos-posp/tests/Data/job_GEOS_825200.out b/geos-pv/tests/Data/job_GEOS_825200.out similarity index 100% rename from geos-posp/tests/Data/job_GEOS_825200.out rename to geos-pv/tests/Data/job_GEOS_825200.out diff --git a/geos-posp/tests/Data/job_GEOS_891567.out b/geos-pv/tests/Data/job_GEOS_891567.out similarity index 100% rename from geos-posp/tests/Data/job_GEOS_891567.out rename to geos-pv/tests/Data/job_GEOS_891567.out diff --git a/geos-posp/tests/Data/job_GEOS_935933.out b/geos-pv/tests/Data/job_GEOS_935933.out similarity index 100% rename from geos-posp/tests/Data/job_GEOS_935933.out rename to geos-pv/tests/Data/job_GEOS_935933.out diff --git a/geos-posp/tests/Data/small_job_GEOS_246861.out b/geos-pv/tests/Data/small_job_GEOS_246861.out similarity index 100% rename from geos-posp/tests/Data/small_job_GEOS_246861.out rename to geos-pv/tests/Data/small_job_GEOS_246861.out diff --git a/geos-posp/tests/Data/small_job_GEOS_642571.out b/geos-pv/tests/Data/small_job_GEOS_642571.out similarity index 100% rename from geos-posp/tests/Data/small_job_GEOS_642571.out rename to geos-pv/tests/Data/small_job_GEOS_642571.out diff --git a/geos-posp/tests/Data/small_job_GEOS_825200.out b/geos-pv/tests/Data/small_job_GEOS_825200.out similarity index 100% rename from geos-posp/tests/Data/small_job_GEOS_825200.out rename to geos-pv/tests/Data/small_job_GEOS_825200.out diff --git a/geos-posp/tests/Data/small_job_GEOS_825200_wells.out b/geos-pv/tests/Data/small_job_GEOS_825200_wells.out similarity index 100% rename from geos-posp/tests/Data/small_job_GEOS_825200_wells.out rename to geos-pv/tests/Data/small_job_GEOS_825200_wells.out diff --git a/geos-posp/tests/Data/small_job_GEOS_891567.out b/geos-pv/tests/Data/small_job_GEOS_891567.out similarity index 100% rename from geos-posp/tests/Data/small_job_GEOS_891567.out rename to geos-pv/tests/Data/small_job_GEOS_891567.out diff --git a/geos-posp/tests/testsFunctionsGeosLogReader.py b/geos-pv/tests/testsFunctionsGeosLogReader.py similarity index 97% rename from geos-posp/tests/testsFunctionsGeosLogReader.py rename to geos-pv/tests/testsFunctionsGeosLogReader.py index d68b82a9..24f9f8d2 100644 --- a/geos-posp/tests/testsFunctionsGeosLogReader.py +++ b/geos-pv/tests/testsFunctionsGeosLogReader.py @@ -15,8 +15,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -from geos.utils.enumUnits import Unit, getSIUnits from geos_posp.processing import geosLogReaderFunctions as utils +from geos.utils.enumUnits import Unit, getSIUnits class TestsFunctionsGeosLogReader( unittest.TestCase ): diff --git a/geos-posp/tests/testsGeosLogReaderConvergence.py b/geos-pv/tests/testsGeosLogReaderConvergence.py similarity index 97% rename from geos-posp/tests/testsGeosLogReaderConvergence.py rename to geos-pv/tests/testsGeosLogReaderConvergence.py index 75981264..1c2e2897 100644 --- a/geos-posp/tests/testsGeosLogReaderConvergence.py +++ b/geos-pv/tests/testsGeosLogReaderConvergence.py @@ -14,8 +14,8 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -from geos.utils.UnitRepository import Unit, UnitRepository from geos_posp.readers.GeosLogReaderConvergence import GeosLogReaderConvergence +from geos.utils.UnitRepository import Unit, UnitRepository unitsObjSI: UnitRepository = UnitRepository() conversionFactors: dict[ str, Unit ] = unitsObjSI.getPropertiesUnit() @@ -59,7 +59,7 @@ def test1_readAllSimulation( self: Self ) -> None: [ 0.0, 8600.0, 25724.3 ], [ 8600.0, 17124.3, 34165.3 ], ] - for column_name, value in zip( columns_name, values, strict=False ): + for column_name, value in zip( columns_name, values ): expectedDF[ column_name ] = value obtainedDF: pd.DataFrame = obj.createDataframe() self.assertEqual( list( obtainedDF.columns ), columns_name ) diff --git a/geos-posp/tests/testsGeosLogReaderFlow.py b/geos-pv/tests/testsGeosLogReaderFlow.py similarity index 100% rename from geos-posp/tests/testsGeosLogReaderFlow.py rename to geos-pv/tests/testsGeosLogReaderFlow.py diff --git a/geos-posp/tests/testsGeosLogReaderWells.py b/geos-pv/tests/testsGeosLogReaderWells.py similarity index 98% rename from geos-posp/tests/testsGeosLogReaderWells.py rename to geos-pv/tests/testsGeosLogReaderWells.py index cc1920de..ee2b21d7 100644 --- a/geos-posp/tests/testsGeosLogReaderWells.py +++ b/geos-pv/tests/testsGeosLogReaderWells.py @@ -16,8 +16,9 @@ sys.path.append( parent_dir_path ) import pandas as pd # type: ignore[import-untyped] -from geos.utils.UnitRepository import Unit, UnitRepository + from geos_posp.readers.GeosLogReaderWells import GeosLogReaderWells +from geos.utils.UnitRepository import Unit, UnitRepository unitsObjSI = UnitRepository() conversionFactors: dict[ str, Unit ] = unitsObjSI.getPropertiesUnit() @@ -131,7 +132,7 @@ def test1_readAllSimulation2( self: Self ) -> None: [ 0.00027756801176732497, 0.00027731846270264457 ], [ 0.0, 3.1536e07 ], ] - for column_name, value in zip( columns_name, values, strict=False ): + for column_name, value in zip( columns_name, values ): expectedDF[ column_name ] = value obtainedDF: pd.DataFrame = obj.createDataframe() self.assertEqual( list( obtainedDF.columns ), columns_name ) @@ -206,7 +207,7 @@ def test3_readAllSimulation4( self: Self ) -> None: [ 1.2681312543855673e-17, 1.2681312543888312e-17 ], [ 0.0, 100.0 ], ] - for column_name, value in zip( columns_name, values, strict=False ): + for column_name, value in zip( columns_name, values ): expectedDF[ column_name ] = value obtainedDF: pd.DataFrame = obj.createDataframe() self.assertEqual( list( obtainedDF.columns ), columns_name ) diff --git a/geos-posp/tests/testsInvalidLogs.py b/geos-pv/tests/testsInvalidLogs.py similarity index 97% rename from geos-posp/tests/testsInvalidLogs.py rename to geos-pv/tests/testsInvalidLogs.py index 9d707975..fb9cce2c 100644 --- a/geos-posp/tests/testsInvalidLogs.py +++ b/geos-pv/tests/testsInvalidLogs.py @@ -15,11 +15,11 @@ if parent_dir_path not in sys.path: sys.path.append( parent_dir_path ) -from geos.utils.UnitRepository import Unit, UnitRepository from geos_posp.readers.GeosLogReaderAquifers import GeosLogReaderAquifers from geos_posp.readers.GeosLogReaderConvergence import GeosLogReaderConvergence from geos_posp.readers.GeosLogReaderFlow import GeosLogReaderFlow from geos_posp.readers.GeosLogReaderWells import GeosLogReaderWells +from geos.utils.UnitRepository import Unit, UnitRepository unitsObjSI: UnitRepository = UnitRepository() conversionFactors: dict[ str, Unit ] = unitsObjSI.getPropertiesUnit() diff --git a/geos-utils/pyproject.toml b/geos-utils/pyproject.toml index b655fd74..4a3cc054 100644 --- a/geos-utils/pyproject.toml +++ b/geos-utils/pyproject.toml @@ -1,26 +1,32 @@ [build-system] -requires = ["setuptools>=61.2"] +requires = ["setuptools>=61.2", "wheel >= 0.37.1"] build-backend = "setuptools.build_meta" +[tool.setuptools] +include-package-data = true + +[tool.setuptools.packages.find] +where = ["src"] +include = ["geos.utils*"] +exclude = ['tests*'] + [project] name = "geos-utils" version = "0.1.0" description = "geos-utils is a Python package that gathers utilities common to all GEOS python packages." authors = [{name = "GEOS Contributors" }] -maintainers = [ - {name = "Martin Lemay", email = "martin.lemay@external.totalenergies.com"} -] +maintainers = [{name = "Martin Lemay", email = "martin.lemay@external.totalenergies.com"}] license = {text = "Apache-2.0"} classifiers = [ "Development Status :: 4 - Beta", "Programming Language :: Python" ] -requires-python = ">=3.9" +requires-python = ">=3.10" dependencies = [ - "typing_extensions", - "numpy", + "numpy >= 2.2", + "typing_extensions >= 4.12", ] [project.optional-dependencies] @@ -28,8 +34,8 @@ build = [ "build ~= 1.2" ] dev = [ - "yapf", "mypy", + "yapf", ] test = [ "pytest",