From 81f4bae73b0eceb8557200dc502cde68352b9332 Mon Sep 17 00:00:00 2001 From: Stan West <38358698+stanwest@users.noreply.github.com> Date: Thu, 3 Feb 2022 17:10:14 -0500 Subject: [PATCH 1/8] Obtain path of conda from environment Enables sphinx to execute "conf.py" on Windows. Requires conda >= 4.5 for conda PR 6923. --- doc/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/conf.py b/doc/conf.py index 93174c6aaec..d41d8aa4987 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -30,7 +30,7 @@ if "conda" in sys.executable: print("conda environment:") - subprocess.run(["conda", "list"]) + subprocess.run([os.environ["CONDA_EXE"], "list"]) else: print("pip environment:") subprocess.run([sys.executable, "-m", "pip", "list"]) From f514806ded3226c68396e5aebdf9a6c775674e38 Mon Sep 17 00:00:00 2001 From: Stan West <38358698+stanwest@users.noreply.github.com> Date: Thu, 3 Feb 2022 17:44:45 -0500 Subject: [PATCH 2/8] Ignore more temp doc files and deletion errors Without the "okexcept" option, builds on Windows may fail at an `os.remove` call with messages such as, "PermissionError: [WinError 32] The process cannot access the file because it is being used by another process: 'example.nc'". --- .gitignore | 5 +++++ doc/getting-started-guide/quick-overview.rst | 1 + doc/user-guide/dask.rst | 1 + doc/user-guide/io.rst | 2 ++ doc/user-guide/weather-climate.rst | 1 + 5 files changed, 10 insertions(+) diff --git a/.gitignore b/.gitignore index 90f4a10ed5f..00847878191 100644 --- a/.gitignore +++ b/.gitignore @@ -6,7 +6,12 @@ __pycache__ # temp files from docs build doc/auto_gallery +doc/complex.nc +doc/example-data.nc +doc/example-no-leap.nc doc/example.nc +doc/manipulated-example-data.nc +doc/saved_on_disk.nc doc/savefig # C extensions diff --git a/doc/getting-started-guide/quick-overview.rst b/doc/getting-started-guide/quick-overview.rst index 5bb5bb88ad3..723b92899ce 100644 --- a/doc/getting-started-guide/quick-overview.rst +++ b/doc/getting-started-guide/quick-overview.rst @@ -218,6 +218,7 @@ You can directly read and write xarray objects to disk using :py:meth:`~xarray.D xr.open_dataset("example.nc") .. ipython:: python + :okexcept: :suppress: import os diff --git a/doc/user-guide/dask.rst b/doc/user-guide/dask.rst index 4998cc68828..7737d82fb7d 100644 --- a/doc/user-guide/dask.rst +++ b/doc/user-guide/dask.rst @@ -148,6 +148,7 @@ A dataset can also be converted to a Dask DataFrame using :py:meth:`~xarray.Data Dask DataFrames do not support multi-indexes so the coordinate variables from the dataset are included as columns in the Dask DataFrame. .. ipython:: python + :okexcept: :suppress: import os diff --git a/doc/user-guide/io.rst b/doc/user-guide/io.rst index 16b8708231e..18793fa71ce 100644 --- a/doc/user-guide/io.rst +++ b/doc/user-guide/io.rst @@ -487,6 +487,7 @@ and currently raises a warning unless ``invalid_netcdf=True`` is set: xr.open_dataarray("complex.nc", engine="h5netcdf") .. ipython:: python + :okexcept: :suppress: import os @@ -727,6 +728,7 @@ This can be useful for generating indices of dataset contents to expose to search indices or other automated data discovery tools. .. ipython:: python + :okexcept: :suppress: import os diff --git a/doc/user-guide/weather-climate.rst b/doc/user-guide/weather-climate.rst index 893e7b50429..2f9d15e54d2 100644 --- a/doc/user-guide/weather-climate.rst +++ b/doc/user-guide/weather-climate.rst @@ -221,6 +221,7 @@ For data indexed by a :py:class:`~xarray.CFTimeIndex` xarray currently supports: xr.open_dataset("example-no-leap.nc") .. ipython:: python + :okexcept: :suppress: import os From 70d8b92b7f5ed2be46feed86fcb3748d1dd884d8 Mon Sep 17 00:00:00 2001 From: Stan West <38358698+stanwest@users.noreply.github.com> Date: Fri, 4 Feb 2022 14:20:43 -0500 Subject: [PATCH 3/8] Add whats-new entry --- doc/whats-new.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/doc/whats-new.rst b/doc/whats-new.rst index 93271cca3e4..897b8e92bbb 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -38,6 +38,8 @@ Bug fixes Documentation ~~~~~~~~~~~~~ +- Enable building the documentation on Windows via `sphinx-build` (:pull:`6237`). + By `Stan West `_. Internal Changes ~~~~~~~~~~~~~~~~ From 9d01612090950014461d0c0f4d67343b41db5a68 Mon Sep 17 00:00:00 2001 From: Stan West <38358698+stanwest@users.noreply.github.com> Date: Fri, 4 Feb 2022 14:22:51 -0500 Subject: [PATCH 4/8] Execute just "conda" if the environment lacks its path --- doc/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/conf.py b/doc/conf.py index d41d8aa4987..1b3ae96ee4b 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -30,7 +30,7 @@ if "conda" in sys.executable: print("conda environment:") - subprocess.run([os.environ["CONDA_EXE"], "list"]) + subprocess.run([os.environ.get("CONDA_EXE", "conda"), "list"]) else: print("pip environment:") subprocess.run([sys.executable, "-m", "pip", "list"]) From 6dd16f4b1eade5725d929d46ed12e09bf31b942d Mon Sep 17 00:00:00 2001 From: Stan West <38358698+stanwest@users.noreply.github.com> Date: Wed, 9 Feb 2022 15:52:45 -0500 Subject: [PATCH 5/8] Ignore temporary NetCDF files with pattern Also, move the name of the temporary Zarr file to the same section. --- .gitignore | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/.gitignore b/.gitignore index 00847878191..686c7efa701 100644 --- a/.gitignore +++ b/.gitignore @@ -5,13 +5,9 @@ __pycache__ .hypothesis/ # temp files from docs build +doc/*.nc doc/auto_gallery -doc/complex.nc -doc/example-data.nc -doc/example-no-leap.nc -doc/example.nc -doc/manipulated-example-data.nc -doc/saved_on_disk.nc +doc/rasm.zarr doc/savefig # C extensions @@ -77,4 +73,3 @@ xarray/tests/data/*.grib.*.idx Icon* .ipynb_checkpoints -doc/rasm.zarr From b5584432fb3f2974112bf355084ca89e9938d70f Mon Sep 17 00:00:00 2001 From: Stan West <38358698+stanwest@users.noreply.github.com> Date: Wed, 9 Feb 2022 17:17:34 -0500 Subject: [PATCH 6/8] First detect conda environment with CONDA_DEFAULT_ENV variable --- doc/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/conf.py b/doc/conf.py index 1b3ae96ee4b..bc56033891a 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -28,7 +28,7 @@ print("python exec:", sys.executable) print("sys.path:", sys.path) -if "conda" in sys.executable: +if "CONDA_DEFAULT_ENV" in os.environ or "conda" in sys.executable: print("conda environment:") subprocess.run([os.environ.get("CONDA_EXE", "conda"), "list"]) else: From d4e64e46ea1beecf7f1566261373895d3d1757c7 Mon Sep 17 00:00:00 2001 From: Stan West <38358698+stanwest@users.noreply.github.com> Date: Thu, 10 Feb 2022 11:32:23 -0500 Subject: [PATCH 7/8] Delete files of datasets saved while building docs --- doc/getting-started-guide/quick-overview.rst | 9 ++--- doc/internals/zarr-encoding-spec.rst | 7 ++++ doc/user-guide/dask.rst | 27 ++++++++------- doc/user-guide/io.rst | 35 +++++++++++--------- doc/user-guide/weather-climate.rst | 5 +-- doc/whats-new.rst | 3 +- 6 files changed, 50 insertions(+), 36 deletions(-) diff --git a/doc/getting-started-guide/quick-overview.rst b/doc/getting-started-guide/quick-overview.rst index 723b92899ce..207cbd65d00 100644 --- a/doc/getting-started-guide/quick-overview.rst +++ b/doc/getting-started-guide/quick-overview.rst @@ -10,6 +10,8 @@ To begin, import numpy, pandas and xarray using their customary abbreviations: .. ipython:: python + import os + import numpy as np import pandas as pd import xarray as xr @@ -215,14 +217,13 @@ You can directly read and write xarray objects to disk using :py:meth:`~xarray.D .. ipython:: python ds.to_netcdf("example.nc") - xr.open_dataset("example.nc") + reopened = xr.open_dataset("example.nc") + reopened .. ipython:: python - :okexcept: :suppress: - import os - + reopened.close() os.remove("example.nc") diff --git a/doc/internals/zarr-encoding-spec.rst b/doc/internals/zarr-encoding-spec.rst index 082d7984f59..6f8501a6b11 100644 --- a/doc/internals/zarr-encoding-spec.rst +++ b/doc/internals/zarr-encoding-spec.rst @@ -63,3 +63,10 @@ re-open it directly with Zarr: print(os.listdir("rasm.zarr")) print(zgroup.tree()) dict(zgroup["Tair"].attrs) + +.. ipython:: python + :suppress: + + import shutil + + shutil.rmtree("rasm.zarr") diff --git a/doc/user-guide/dask.rst b/doc/user-guide/dask.rst index 7737d82fb7d..ebbbaa199f6 100644 --- a/doc/user-guide/dask.rst +++ b/doc/user-guide/dask.rst @@ -55,6 +55,8 @@ argument to :py:func:`~xarray.open_dataset` or using the .. ipython:: python :suppress: + import os + import numpy as np import pandas as pd import xarray as xr @@ -129,6 +131,11 @@ will return a ``dask.delayed`` object that can be computed later. with ProgressBar(): results = delayed_obj.compute() +.. ipython:: python + :suppress: + + os.remove("manipulated-example-data.nc") # Was not opened. + .. note:: When using Dask's distributed scheduler to write NETCDF4 files, @@ -147,14 +154,6 @@ A dataset can also be converted to a Dask DataFrame using :py:meth:`~xarray.Data Dask DataFrames do not support multi-indexes so the coordinate variables from the dataset are included as columns in the Dask DataFrame. -.. ipython:: python - :okexcept: - :suppress: - - import os - - os.remove("example-data.nc") - os.remove("manipulated-example-data.nc") Using Dask with xarray ---------------------- @@ -211,7 +210,7 @@ Dask arrays using the :py:meth:`~xarray.Dataset.persist` method: .. ipython:: python - ds = ds.persist() + persisted = ds.persist() :py:meth:`~xarray.Dataset.persist` is particularly useful when using a distributed cluster because the data will be loaded into distributed memory @@ -233,11 +232,6 @@ chunk size depends both on your data and on the operations you want to perform. With xarray, both converting data to a Dask arrays and converting the chunk sizes of Dask arrays is done with the :py:meth:`~xarray.Dataset.chunk` method: -.. ipython:: python - :suppress: - - ds = ds.chunk({"time": 10}) - .. ipython:: python rechunked = ds.chunk({"latitude": 100, "longitude": 100}) @@ -509,6 +503,11 @@ Notice that the 0-shaped sizes were not printed to screen. Since ``template`` ha expected = ds + 10 + 10 mapped.identical(expected) +.. ipython:: python + :suppress: + + ds.close() # Closes "example-data.nc". + os.remove("example-data.nc") .. tip:: diff --git a/doc/user-guide/io.rst b/doc/user-guide/io.rst index 18793fa71ce..713b5d7382b 100644 --- a/doc/user-guide/io.rst +++ b/doc/user-guide/io.rst @@ -11,6 +11,8 @@ format (recommended). .. ipython:: python :suppress: + import os + import numpy as np import pandas as pd import xarray as xr @@ -84,6 +86,13 @@ We can load netCDF files to create a new Dataset using ds_disk = xr.open_dataset("saved_on_disk.nc") ds_disk +.. ipython:: python + :suppress: + + # Close "saved_on_disk.nc", but retain the file until after closing or deleting other + # datasets that will refer to it. + ds_disk.close() + Similarly, a DataArray can be saved to disk using the :py:meth:`DataArray.to_netcdf` method, and loaded from disk using the :py:func:`open_dataarray` function. As netCDF files @@ -204,11 +213,6 @@ You can view this encoding information (among others) in the Note that all operations that manipulate variables other than indexing will remove encoding information. -.. ipython:: python - :suppress: - - ds_disk.close() - .. _combining multiple files: @@ -484,14 +488,13 @@ and currently raises a warning unless ``invalid_netcdf=True`` is set: da.to_netcdf("complex.nc", engine="h5netcdf", invalid_netcdf=True) # Reading it back - xr.open_dataarray("complex.nc", engine="h5netcdf") + reopened = xr.open_dataarray("complex.nc", engine="h5netcdf") + reopened .. ipython:: python - :okexcept: :suppress: - import os - + reopened.close() os.remove("complex.nc") .. warning:: @@ -724,17 +727,19 @@ To export just the dataset schema without the data itself, use the ds.to_dict(data=False) -This can be useful for generating indices of dataset contents to expose to -search indices or other automated data discovery tools. - .. ipython:: python - :okexcept: :suppress: - import os - + # We're now done with the dataset named `ds`. Although the `with` statement closed + # the dataset, displaying the unpickled pickle of `ds` re-opened "saved_on_disk.nc". + # However, `ds` (rather than the unpickled dataset) refers to the open file. Delete + # `ds` to close the file. + del ds os.remove("saved_on_disk.nc") +This can be useful for generating indices of dataset contents to expose to +search indices or other automated data discovery tools. + .. _io.rasterio: Rasterio diff --git a/doc/user-guide/weather-climate.rst b/doc/user-guide/weather-climate.rst index 2f9d15e54d2..6db0396ee4e 100644 --- a/doc/user-guide/weather-climate.rst +++ b/doc/user-guide/weather-climate.rst @@ -218,14 +218,15 @@ For data indexed by a :py:class:`~xarray.CFTimeIndex` xarray currently supports: .. ipython:: python da.to_netcdf("example-no-leap.nc") - xr.open_dataset("example-no-leap.nc") + reopened = xr.open_dataset("example-no-leap.nc") + reopened .. ipython:: python - :okexcept: :suppress: import os + reopened.close() os.remove("example-no-leap.nc") - And resampling along the time dimension for data indexed by a :py:class:`~xarray.CFTimeIndex`: diff --git a/doc/whats-new.rst b/doc/whats-new.rst index 897b8e92bbb..f7249f192bd 100644 --- a/doc/whats-new.rst +++ b/doc/whats-new.rst @@ -38,7 +38,8 @@ Bug fixes Documentation ~~~~~~~~~~~~~ -- Enable building the documentation on Windows via `sphinx-build` (:pull:`6237`). +- Delete files of datasets saved to disk while building the documentation and enable + building on Windows via `sphinx-build` (:pull:`6237`). By `Stan West `_. Internal Changes From 2c0a29c98e9ebb5b5a2001b155bd15d85fd27797 Mon Sep 17 00:00:00 2001 From: Stan West <38358698+stanwest@users.noreply.github.com> Date: Fri, 11 Feb 2022 07:52:57 -0500 Subject: [PATCH 8/8] Hide import used only to delete temporary file --- doc/getting-started-guide/quick-overview.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/getting-started-guide/quick-overview.rst b/doc/getting-started-guide/quick-overview.rst index 207cbd65d00..bdd4e0df413 100644 --- a/doc/getting-started-guide/quick-overview.rst +++ b/doc/getting-started-guide/quick-overview.rst @@ -10,8 +10,6 @@ To begin, import numpy, pandas and xarray using their customary abbreviations: .. ipython:: python - import os - import numpy as np import pandas as pd import xarray as xr @@ -223,6 +221,8 @@ You can directly read and write xarray objects to disk using :py:meth:`~xarray.D .. ipython:: python :suppress: + import os + reopened.close() os.remove("example.nc")