From bfd9da0521abedb54fb84ae43a448dc4a7a83e57 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Sun, 8 Aug 2021 11:40:22 +0200 Subject: [PATCH 01/15] Add merra2 function to api.rst --- docs/sphinx/source/api.rst | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/sphinx/source/api.rst b/docs/sphinx/source/api.rst index 6d08e742b3..cf691aaf52 100644 --- a/docs/sphinx/source/api.rst +++ b/docs/sphinx/source/api.rst @@ -497,6 +497,8 @@ of sources and file formats relevant to solar energy modeling. iotools.get_cams iotools.read_cams iotools.parse_cams + iotools.get_merra2 + iotools.read_merra2 A :py:class:`~pvlib.location.Location` object may be created from metadata in some files. From 592563cd3b2a8a4ff6302cdd301170febee0140c Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Sun, 8 Aug 2021 11:40:45 +0200 Subject: [PATCH 02/15] Add merra2 functions to iotools init file --- pvlib/iotools/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/pvlib/iotools/__init__.py b/pvlib/iotools/__init__.py index b02ce243ae..39825e301d 100644 --- a/pvlib/iotools/__init__.py +++ b/pvlib/iotools/__init__.py @@ -21,3 +21,5 @@ from pvlib.iotools.sodapro import get_cams # noqa: F401 from pvlib.iotools.sodapro import read_cams # noqa: F401 from pvlib.iotools.sodapro import parse_cams # noqa: F401 +from pvlib.iotools.merra2 import get_merra2 # noqa: F401 +from pvlib.iotools.merra2 import read_merra2 # noqa: F401 From 87a6e2bab2615bb5799fc77d3db576d8ee9c2850 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Sun, 8 Aug 2021 11:41:07 +0200 Subject: [PATCH 03/15] get_merra2 function first commit --- pvlib/iotools/merra2.py | 70 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 70 insertions(+) create mode 100644 pvlib/iotools/merra2.py diff --git a/pvlib/iotools/merra2.py b/pvlib/iotools/merra2.py new file mode 100644 index 0000000000..2cced230bb --- /dev/null +++ b/pvlib/iotools/merra2.py @@ -0,0 +1,70 @@ +"""Functions to read and retrieve MERRA2 reanalysis data from NASA. +.. codeauthor:: Adam R. Jensen +""" + +import xarray as xr # Make funky import +from pydap.cas.urs import setup_session + +MERRA2_BASE_URL = 'https://goldsmr4.gesdisc.eosdis.nasa.gov/dods' + + +def get_merra2(latitude, longitude, start, end, dataset, variables, username, + password, local_path=None): + """ + Retrieve MERRA2 reanalysis data from the NASA GESDISC repository. + + Regular latitude-longitude grid of 0.5° x 0.625°. + + Parameters + ---------- + start: datetime-like + First day of the requested period + end: datetime-like + Last day of the requested period + local_path: str or path-like, optional + If specified, path (abs. or relative) of where to save files + + Returns + ------- + data: DataFrame + Dataframe containing MERRA2 timeseries data, see [3]_ for variable units. + metadata: dict + metadata + + Notes + ----- + In order to obtain MERRA2 data, it is necessary to registre for an + Earthdata account and link it to the GES DISC as described in [2]_. + + + See Also + -------- + pvlib.iotools.read_merra2, pvlib.iotools.get_era5 + + References + ---------- + .. [1] `NASA MERRA-2 Project overview + `_ + .. [2] `Account registration and data access to NASA's GES DISC + ` + .. [3] `MERRa-2 File specification + ` + + """ # noqa: E501 + url = MERRA2_BASE_URL + '/' + dataset + session = setup_session(username, password, check_url=url) + store = xr.backends.PydapDataStore.open(url, session=session) + + ds = xr.open_dataset(store).sel( + {'lat': latitude, + 'lon': longitude, + 'times': slice(start.strftime('%Y-%m-%d'), end.strftime('%Y-%m-%d')), + }) + + data = ds[[variables]].to_dataframe() + + metadata = {} + + return data, metadata + + From 286c384ee66f7b1a07499a9f92c220af1470eab8 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Sun, 8 Aug 2021 11:57:03 +0200 Subject: [PATCH 04/15] Add xarray to requires yml files --- ci/requirements-py36-min.yml | 1 + ci/requirements-py36.yml | 1 + ci/requirements-py37.yml | 1 + ci/requirements-py38.yml | 1 + ci/requirements-py39.yml | 1 + pvlib/iotools/merra2.py | 2 +- 6 files changed, 6 insertions(+), 1 deletion(-) diff --git a/ci/requirements-py36-min.yml b/ci/requirements-py36-min.yml index 84adcb360d..d2c9b6ed81 100644 --- a/ci/requirements-py36-min.yml +++ b/ci/requirements-py36-min.yml @@ -12,6 +12,7 @@ dependencies: - python=3.6 - pytz - requests + - xarray - pip: - dataclasses - numpy==1.12.0 diff --git a/ci/requirements-py36.yml b/ci/requirements-py36.yml index c49455119f..c6f1275c03 100644 --- a/ci/requirements-py36.yml +++ b/ci/requirements-py36.yml @@ -27,6 +27,7 @@ dependencies: - shapely # pvfactors dependency - siphon # conda-forge - statsmodels + - xarray - pip: - dataclasses - nrel-pysam>=2.0 diff --git a/ci/requirements-py37.yml b/ci/requirements-py37.yml index 3203b004d1..3f30d442fe 100644 --- a/ci/requirements-py37.yml +++ b/ci/requirements-py37.yml @@ -27,6 +27,7 @@ dependencies: - shapely # pvfactors dependency - siphon # conda-forge - statsmodels + - xarray - pip: - nrel-pysam>=2.0 - pvfactors==1.4.1 diff --git a/ci/requirements-py38.yml b/ci/requirements-py38.yml index ca3a968335..53f6900f80 100644 --- a/ci/requirements-py38.yml +++ b/ci/requirements-py38.yml @@ -27,6 +27,7 @@ dependencies: - shapely # pvfactors dependency - siphon # conda-forge - statsmodels + - xarray - pip: - nrel-pysam>=2.0 - pvfactors==1.4.1 diff --git a/ci/requirements-py39.yml b/ci/requirements-py39.yml index 16c6449158..c23635b304 100644 --- a/ci/requirements-py39.yml +++ b/ci/requirements-py39.yml @@ -27,6 +27,7 @@ dependencies: - shapely # pvfactors dependency # - siphon # conda-forge - statsmodels + - xarray - pip: # - nrel-pysam>=2.0 # install error on windows - pvfactors==1.4.1 diff --git a/pvlib/iotools/merra2.py b/pvlib/iotools/merra2.py index 2cced230bb..483976ab79 100644 --- a/pvlib/iotools/merra2.py +++ b/pvlib/iotools/merra2.py @@ -11,7 +11,7 @@ def get_merra2(latitude, longitude, start, end, dataset, variables, username, password, local_path=None): """ - Retrieve MERRA2 reanalysis data from the NASA GESDISC repository. + Retrieve MERRA2 reanalysis data from the NASA GES DISC repository. Regular latitude-longitude grid of 0.5° x 0.625°. From 2a25d8253673f3cda21cfe887572f68c5c50b1aa Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Sun, 8 Aug 2021 18:54:46 +0200 Subject: [PATCH 05/15] Add read_merra2 and variable_map --- pvlib/iotools/merra2.py | 48 ++++++++++++++++++++++++++++++++++++++--- 1 file changed, 45 insertions(+), 3 deletions(-) diff --git a/pvlib/iotools/merra2.py b/pvlib/iotools/merra2.py index 483976ab79..2651046137 100644 --- a/pvlib/iotools/merra2.py +++ b/pvlib/iotools/merra2.py @@ -4,7 +4,24 @@ import xarray as xr # Make funky import from pydap.cas.urs import setup_session - +import os + +MERRA2_VARIABLE_MAP = { + # Variables from the 'M2T1NXRAD' dataset + # Hourly,Time-Averaged,Single-Level,Assimilation,Radiation Diagnostics + 'ALBEDO': 'albedo', + #'surface_incoming_shortwave_flux': , + #'surface_incoming_shortwave_flux_assuming_clear_sky': , + #'surface_net_downward_longwave_flux': , + 'SWGDN': 'ghi', + 'SWTDN': '_extra', + 'PS': 'pressure', + 'T2M': 'temp_air', + 'T2MDEW': 'temp_dew', + + } + +# goldsmr4 contains the single-level 2D MERRA-2 data files MERRA2_BASE_URL = 'https://goldsmr4.gesdisc.eosdis.nasa.gov/dods' @@ -36,6 +53,16 @@ def get_merra2(latitude, longitude, start, end, dataset, variables, username, In order to obtain MERRA2 data, it is necessary to registre for an Earthdata account and link it to the GES DISC as described in [2]_. + MERRA-2 contains 14 single-level 2D datasets with an hourly resolution. The + most important ones are 'M2T1NXAER' which contains aerosol data, 'M2T1NXRAD' + which contains radiation related parameters, and 'M2T1NXSLV' which contains + general variables (e.g., temperature and wind speed). + + Warning + ------- + Known error in calculation of radiation, hence it is strongly adviced that + radiation from MERRA-2 should not be used. Users interested in radiation + from reanalysis datasets are referred to pvlib.iotools.get_era5. See Also -------- @@ -61,10 +88,25 @@ def get_merra2(latitude, longitude, start, end, dataset, variables, username, 'times': slice(start.strftime('%Y-%m-%d'), end.strftime('%Y-%m-%d')), }) - data = ds[[variables]].to_dataframe() + data = ds[variables].to_dataframe() + + metadata = ds.attrs # Gives overall metadata but not variable stuff - metadata = {} + if local_path is not None: + ds.to_netcdf(os.path.join(local_path, metadata['Filename'])) return data, metadata +# Shoudl read_merra2 use open_mfdataset? +def read_merra2(filenames, latitude, longitude, variables, map_variables=True): + """Reading a MERRA-2 file into a pandas dataframe. + + """ + ds = xr.open_dataset(filenames).sel(lat=latitude, lon=longitude, + method='nearest') + + data = ds[variables].to_dataframe().drop(columns=['lon', 'lat']) + metadata = ds.attrs # Gives overall metadata but not variable stuff + + return data, metadata From 28bd9146c3a29d982a08eb0983ea06203f22aad7 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Fri, 20 Aug 2021 11:40:22 +0200 Subject: [PATCH 06/15] Add xarray, dask, and pydap optional dependencies --- ci/azure/conda_linux.yml | 2 ++ ci/requirements-py36-min.yml | 1 - ci/requirements-py36.yml | 2 ++ ci/requirements-py37.yml | 2 ++ ci/requirements-py38.yml | 2 ++ ci/requirements-py39.yml | 2 ++ setup.py | 3 ++- 7 files changed, 12 insertions(+), 2 deletions(-) diff --git a/ci/azure/conda_linux.yml b/ci/azure/conda_linux.yml index 3bf8215cc4..6fadfd86a1 100644 --- a/ci/azure/conda_linux.yml +++ b/ci/azure/conda_linux.yml @@ -40,6 +40,8 @@ jobs: export NREL_API_KEY=$(nrelApiKey) export BSRN_FTP_USERNAME=$(BSRN_FTP_USERNAME) export BSRN_FTP_PASSWORD=$(BSRN_FTP_PASSWORD) + export MERRA2_USERNAME=$(MERRA2_USERNAME) + export MERRA2_PASSWORD=$(MERRA2_PASSWORD) pytest pvlib --remote-data --junitxml=junit/test-results.xml --cov --cov-report=xml --cov-report=html displayName: 'pytest' - task: PublishTestResults@2 diff --git a/ci/requirements-py36-min.yml b/ci/requirements-py36-min.yml index d2c9b6ed81..84adcb360d 100644 --- a/ci/requirements-py36-min.yml +++ b/ci/requirements-py36-min.yml @@ -12,7 +12,6 @@ dependencies: - python=3.6 - pytz - requests - - xarray - pip: - dataclasses - numpy==1.12.0 diff --git a/ci/requirements-py36.yml b/ci/requirements-py36.yml index c6f1275c03..f65e5c579f 100644 --- a/ci/requirements-py36.yml +++ b/ci/requirements-py36.yml @@ -5,6 +5,7 @@ channels: dependencies: - coveralls - cython + - dask - ephem - netcdf4 - nose @@ -32,3 +33,4 @@ dependencies: - dataclasses - nrel-pysam>=2.0 - pvfactors==1.4.1 + - git+https://github.com/pydap/pydap#egg=pydap diff --git a/ci/requirements-py37.yml b/ci/requirements-py37.yml index 3f30d442fe..409dabbccf 100644 --- a/ci/requirements-py37.yml +++ b/ci/requirements-py37.yml @@ -5,6 +5,7 @@ channels: dependencies: - coveralls - cython + - dask - ephem - netcdf4 - nose @@ -31,3 +32,4 @@ dependencies: - pip: - nrel-pysam>=2.0 - pvfactors==1.4.1 + - git+https://github.com/pydap/pydap#egg=pydap diff --git a/ci/requirements-py38.yml b/ci/requirements-py38.yml index 53f6900f80..87c1168fae 100644 --- a/ci/requirements-py38.yml +++ b/ci/requirements-py38.yml @@ -5,6 +5,7 @@ channels: dependencies: - coveralls - cython + - dask - ephem - netcdf4 - nose @@ -31,3 +32,4 @@ dependencies: - pip: - nrel-pysam>=2.0 - pvfactors==1.4.1 + - git+https://github.com/pydap/pydap#egg=pydap diff --git a/ci/requirements-py39.yml b/ci/requirements-py39.yml index c23635b304..17cb69fcd9 100644 --- a/ci/requirements-py39.yml +++ b/ci/requirements-py39.yml @@ -5,6 +5,7 @@ channels: dependencies: - coveralls - cython + - dask - ephem # - netcdf4 # pulls in a different version of numpy with ImportError - nose @@ -31,3 +32,4 @@ dependencies: - pip: # - nrel-pysam>=2.0 # install error on windows - pvfactors==1.4.1 + - git+https://github.com/pydap/pydap#egg=pydap diff --git a/setup.py b/setup.py index 216dc34a28..7b11ef94d1 100755 --- a/setup.py +++ b/setup.py @@ -54,7 +54,8 @@ EXTRAS_REQUIRE = { 'optional': ['cython', 'ephem', 'netcdf4', 'nrel-pysam', 'numba', 'pvfactors', 'siphon', 'statsmodels', 'tables', - 'cftime >= 1.1.1'], + 'cftime >= 1.1.1', 'xarray', 'dask', + 'git+https://github.com/pydap/pydap#egg=pydap'], 'doc': ['ipython', 'matplotlib', 'sphinx == 3.1.2', 'sphinx_rtd_theme==0.5.0', 'sphinx-gallery', 'docutils == 0.15.2', 'pillow', 'netcdf4', 'siphon', 'tables', From 94b6ee2c453e5bef8aa711cec62f3e68a252d700 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Fri, 20 Aug 2021 11:59:52 +0200 Subject: [PATCH 07/15] Update whatsnew --- docs/sphinx/source/whatsnew/v0.9.0.rst | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/docs/sphinx/source/whatsnew/v0.9.0.rst b/docs/sphinx/source/whatsnew/v0.9.0.rst index d516685154..e5d1026a3d 100644 --- a/docs/sphinx/source/whatsnew/v0.9.0.rst +++ b/docs/sphinx/source/whatsnew/v0.9.0.rst @@ -107,8 +107,11 @@ Deprecations Enhancements ~~~~~~~~~~~~ -* Added :func:`~pvlib.iotools.read_pvgis_hourly` and - :func:`~pvlib.iotools.get_pvgis_hourly` for reading and retrieving hourly +* Added :func:`~pvlib.iotools.get_merra2` and + :func:`~pvlib.iotools.read_merra2` for retrieving and reading hourly + reanalysis data from MERRA-2. (:pull:`1247`) +* Added :func:`~pvlib.iotools.get_pvgis_hourly` and + :func:`~pvlib.iotools.read_pvgis_hourly` for retrieving and reading hourly solar radiation data and PV power output from PVGIS. (:pull:`1186`, :issue:`849`) * Add :func:`~pvlib.iotools.get_bsrn` and :func:`~pvlib.iotools.read_bsrn` From 398d6660645323f95bf215e4240695b75a4c8d9f Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Fri, 20 Aug 2021 12:01:14 +0200 Subject: [PATCH 08/15] Add test_merra2 and requires_merra2_credentials --- pvlib/tests/conftest.py | 22 +++++++++++++++++++ pvlib/tests/iotools/test_merra2.py | 34 ++++++++++++++++++++++++++++++ 2 files changed, 56 insertions(+) create mode 100644 pvlib/tests/iotools/test_merra2.py diff --git a/pvlib/tests/conftest.py b/pvlib/tests/conftest.py index a3cba1e7b8..268bd77c44 100644 --- a/pvlib/tests/conftest.py +++ b/pvlib/tests/conftest.py @@ -95,6 +95,18 @@ def assert_frame_equal(left, right, **kwargs): not has_bsrn_credentials, reason='requires bsrn credentials') +try: + # Attempt to load NASA EarthData login credentials used for testing pvlib.iotools.get_merra2 + MERRA2_USERNAME = os.environ["MERRA2_USERNAME"] + MERRA2_PASSWORD = os.environ["MERRA2_PASSWORD"] + has_merra2_credentials = True +except KeyError: + has_merra2_credentials = False + +requires_merra2_credentials = pytest.mark.skipif( + not has_merra2_credentials, reason='requires merra2 credentials') + + try: import statsmodels # noqa: F401 has_statsmodels = True @@ -105,6 +117,16 @@ def assert_frame_equal(left, right, **kwargs): not has_statsmodels, reason='requires statsmodels') +try: + import xarray as xr # noqa: F401 + has_xarray = True +except ImportError: + has_xarray = False + +requires_xarray = pytest.mark.skipif( + not has_xarray, reason='requires xarray') + + try: import tables has_tables = True diff --git a/pvlib/tests/iotools/test_merra2.py b/pvlib/tests/iotools/test_merra2.py new file mode 100644 index 0000000000..3d20ba0fee --- /dev/null +++ b/pvlib/tests/iotools/test_merra2.py @@ -0,0 +1,34 @@ +""" +tests for :mod:`pvlib.iotools.merra2` +""" + +import pandas as pd +import numpy as np +import pytest +import os +from pvlib.iotools import read_merra2, get_merra2 +from ..conftest import (DATA_DIR, RERUNS, RERUNS_DELAY, assert_index_equal, + requires_merra2_credentials, requires_xarray) + + +@pytest.fixture(scope="module") +def merra2_credentials(): + """Supplies pvlib-python's EarthData login credentials. + Users should obtain their own credentials as described in the `get_merra2` + documentation.""" + return (os.environ["MERRA2_USERNAME"], os.environ["MERRA2_PASSWORD"]) + + +@requires_xarray +def test_read_merra2(): + # data, meta = \ + # read_merra2(DATA_DIR / 'MERRA2_400.tavg1_2d_rad_Nx.20200101.nc4') + assert True + + +@requires_xarray +@requires_merra2_credentials +@pytest.mark.remote_data +@pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) +def test_get_merra2(): + assert True From 7bd3cbe7e52bb3d30514960c66a52d2d12dee56a Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Fri, 20 Aug 2021 12:01:27 +0200 Subject: [PATCH 09/15] Add helper functions to pvlib.tools --- pvlib/iotools/merra2.py | 184 +++++++++++++++++++++++++++++++--------- pvlib/tools.py | 45 ++++++++++ 2 files changed, 191 insertions(+), 38 deletions(-) diff --git a/pvlib/iotools/merra2.py b/pvlib/iotools/merra2.py index 2651046137..22dc142e9a 100644 --- a/pvlib/iotools/merra2.py +++ b/pvlib/iotools/merra2.py @@ -1,45 +1,67 @@ """Functions to read and retrieve MERRA2 reanalysis data from NASA. .. codeauthor:: Adam R. Jensen """ - -import xarray as xr # Make funky import -from pydap.cas.urs import setup_session import os +import cftime +from pvlib.tools import (_extract_metadata_from_dataset, + _convert_C_to_K_in_dataset) + +try: + import xarray as xr +except ImportError: + xr = None + +try: + from pydap.cas.urs import setup_session +except ImportError: + setup_session = None MERRA2_VARIABLE_MAP = { - # Variables from the 'M2T1NXRAD' dataset - # Hourly,Time-Averaged,Single-Level,Assimilation,Radiation Diagnostics - 'ALBEDO': 'albedo', - #'surface_incoming_shortwave_flux': , - #'surface_incoming_shortwave_flux_assuming_clear_sky': , - #'surface_net_downward_longwave_flux': , - 'SWGDN': 'ghi', - 'SWTDN': '_extra', - 'PS': 'pressure', - 'T2M': 'temp_air', - 'T2MDEW': 'temp_dew', - } -# goldsmr4 contains the single-level 2D MERRA-2 data files +# goldsmr4 contains the single-level 2D hourly MERRA-2 data files MERRA2_BASE_URL = 'https://goldsmr4.gesdisc.eosdis.nasa.gov/dods' def get_merra2(latitude, longitude, start, end, dataset, variables, username, - password, local_path=None): + password, save_path=None, output_format=None, + map_variables=True): """ - Retrieve MERRA2 reanalysis data from the NASA GES DISC repository. + Retrieve MERRA-2 reanalysis data from the NASA GES DISC repository. + + The function supports downloading of MERRA-2 Hourly 2-Dimensional + Time-Averaged Variables (M2T1NXSLV) - Regular latitude-longitude grid of 0.5° x 0.625°. + * Temporal coverage: 1980 to present (latency of 2-7 weeks) + * Temporal resolution: hourly + * Spatial coverage: global + * Spatial resolution: 0.625° longitude by 0.5° latitude Parameters ---------- + latitude: float or list + in decimal degrees, between -90 and 90, north is positive (ISO 19115). + If latitude is a list, it should have the format [S, N] and + latitudes within the range are selected according to the grid. + longitude: float or list + in decimal degrees, between -180 and 180, east is positive (ISO 19115). + If longitude is a list, it should have the format [W, E] and + longitudes within the range are selected according to the grid. start: datetime-like First day of the requested period end: datetime-like Last day of the requested period - local_path: str or path-like, optional - If specified, path (abs. or relative) of where to save files + variables: list + List of variables to retrieve + dataset: str + Name of the dataset to retrieve the variables from, e.g., + output_format: {'dataframe', 'dataset'}, optional + Type of data object to return. Default is to return a pandas DataFrame + if file only contains one location and otherwise return an xarray + dataset. + map_variables: bool, default: True + When true, renames columns to pvlib variable names where applicable. + See variable MERRRA2_VARIABLE_MAP. Returns ------- @@ -78,35 +100,121 @@ def get_merra2(latitude, longitude, start, end, dataset, variables, username, ` """ # noqa: E501 + if xr is None: + raise ImportError('Retrieving MERRA-2 data requires xarray') + if setup_session is None: + raise ImportError('Retrieving MERRA-2 data requires PyDap') + url = MERRA2_BASE_URL + '/' + dataset + session = setup_session(username, password, check_url=url) store = xr.backends.PydapDataStore.open(url, session=session) - ds = xr.open_dataset(store).sel( + start_float = cftime.date2num(start, units='days since 1-1-1 00:00:0.0') + end_float = cftime.date2num(end, units='days since 1-1-1 00:00:0.0') + + #try: + # latitude = slice(latitude[0], latitude[1]) + # longitude = slice(longitude[0], longitude[1]) + # method = None + #except TypeError: + # method = 'nearest' + + # Setting decode_times=False results in a time saving of up to some minutes + ds = xr.open_dataset(store, decode_times=False).sel( {'lat': latitude, 'lon': longitude, - 'times': slice(start.strftime('%Y-%m-%d'), end.strftime('%Y-%m-%d')), - }) + 'time': slice(start_float, end_float)}, + ) + + variables = [v.lower() for v in variables] # Make all variables lower-case - data = ds[variables].to_dataframe() + ds = xr.decode_cf(ds) # Decode timestamps - metadata = ds.attrs # Gives overall metadata but not variable stuff + ds = _convert_C_to_K_in_dataset(ds) + metadata = _extract_metadata_from_dataset(ds) - if local_path is not None: - ds.to_netcdf(os.path.join(local_path, metadata['Filename'])) + if map_variables: + # Renaming of xarray datasets throws an error if keys are missing + ds = ds.rename_vars( + {k: v for k, v in MERRA2_VARIABLE_MAP.items() if k in list(ds)}) - return data, metadata + if (output_format == 'dataframe') or ( + (output_format is None) & (ds['latitude'].size == 1) & + (ds['longitude'].size == 1)): + data = ds.to_dataframe() + # Localize timezone to UTC + data.index = data.index.set_levels(data.index.get_level_values('time').tz_localize('utc'), 'time') # noqa: E501 + if (ds['latitude'].size == 1) & (ds['longitude'].size == 1): + data = data.droplevel(['latitude', 'longitude']) + return data, metadata + else: + return ds, metadata -# Shoudl read_merra2 use open_mfdataset? -def read_merra2(filenames, latitude, longitude, variables, map_variables=True): +def read_merra2(filename, output_format=None, map_variables=True): """Reading a MERRA-2 file into a pandas dataframe. - - """ - ds = xr.open_dataset(filenames).sel(lat=latitude, lon=longitude, - method='nearest') - data = ds[variables].to_dataframe().drop(columns=['lon', 'lat']) - metadata = ds.attrs # Gives overall metadata but not variable stuff + MERRA-2 is described in [1]_ and a list of variables can be found in [2]_. - return data, metadata + Parameters + ---------- + filename: str or path-like or list + Filename of a netcdf file containing MERRA-2 data or a list of + filenames. + output_format: {'dataframe', 'dataset'}, optional + Type of data object to return. Default is to return a pandas DataFrame + if file only contains one location and otherwise return an xarray + dataset. + map_variables: bool, default: True + When true, renames columns to pvlib variable names where applicable. + See variable MERRRA2_VARIABLE_MAP. + + Returns + ------- + data: DataFrame + MERRA-2 time-series data, fields depend on the requested data. The + returned object is either a pandas DataFrame or an xarray dataset, + depending on the output_format parameter. + metadata: dict + Metadata for the time-series. + + See Also + -------- + pvlib.iotools.get_merra2, pvlib.iotools.get_era5 + + References + ---------- + .. [1] `NASA MERRA-2 Project overview + `_ + .. [2] `MERRa-2 File specification + ` + """ + if xr is None: + raise ImportError('Reading MERRA-2 data requires xarray to be installed.') # noqa: E501 + + # open multiple-files (mf) requires dask + if isinstance(filename, (list, tuple)): + ds = xr.open_mfdataset(filename) + else: + ds = xr.open_dataset(filename) + + ds = _convert_C_to_K_in_dataset(ds) + metadata = _extract_metadata_from_dataset(ds) + + if map_variables: + # Renaming of xarray datasets throws an error if keys are missing + ds = ds.rename_vars( + {k: v for k, v in MERRA2_VARIABLE_MAP.items() if k in list(ds)}) + + if (output_format == 'dataframe') or ( + (output_format is None) & (ds['latitude'].size == 1) & + (ds['longitude'].size == 1)): + data = ds.to_dataframe() + # Localize timezone to UTC + data.index = data.index.set_levels(data.index.get_level_values('time').tz_localize('utc'), 'time') # noqa: E501 + if (ds['latitude'].size == 1) & (ds['longitude'].size == 1): + data = data.droplevel(['latitude', 'longitude']) + return data, metadata + else: + return ds, metadata diff --git a/pvlib/tools.py b/pvlib/tools.py index eef80a3b37..716ebffd05 100644 --- a/pvlib/tools.py +++ b/pvlib/tools.py @@ -344,3 +344,48 @@ def _golden_sect_DataFrame(params, VL, VH, func): raise Exception("EXCEPTION:iterations exceeded maximum (50)") return func(df, 'V1'), df['V1'] + + +def _extract_metadata_from_dataset(ds): + """ + Generate a dictionary of metadata from an xarray dataset. + Parameters + ---------- + ds : dataset + dataset containing time series data. + Returns + ------- + metadata : dict + Dictionary containing metadata. + """ + metadata = {} + for v in list(ds.variables): + metadata[v] = { + 'name': ds[v].name, + 'long_name': ds[v].long_name} + if 'units' in ds[v].attrs: + metadata[v]['units'] = ds[v].units + metadata['dims'] = dict(ds.dims) + metadata.update(ds.attrs) # add arbitrary metadata + return metadata + + +def _convert_C_to_K_in_dataset(ds): + """ + Convert all variables in an xarray dataset that have the unit Kelvin to + degrees Celsius. + Parameters + ---------- + ds : dataset + dataset containing time series data. + Returns + ------- + ds : dataset + dataset where variables with temperature variables in Celsius + """ + for v in list(ds.variables): + if 'units' in ds[v].attrs: + if 'K' == ds[v].attrs['units']: + ds[v].data = ds[v].data - 273.15 + ds[v].attrs['units'] = 'C' + return ds From 9149f3770a773a109dd9589fc6a359f18bcb1c54 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Fri, 20 Aug 2021 12:08:39 +0200 Subject: [PATCH 10/15] Update pydap link in setup.py --- pvlib/iotools/merra2.py | 15 +++++++-------- pvlib/tests/conftest.py | 3 ++- setup.py | 2 +- 3 files changed, 10 insertions(+), 10 deletions(-) diff --git a/pvlib/iotools/merra2.py b/pvlib/iotools/merra2.py index 22dc142e9a..600ad1f78a 100644 --- a/pvlib/iotools/merra2.py +++ b/pvlib/iotools/merra2.py @@ -1,7 +1,6 @@ """Functions to read and retrieve MERRA2 reanalysis data from NASA. .. codeauthor:: Adam R. Jensen """ -import os import cftime from pvlib.tools import (_extract_metadata_from_dataset, _convert_C_to_K_in_dataset) @@ -113,19 +112,19 @@ def get_merra2(latitude, longitude, start, end, dataset, variables, username, start_float = cftime.date2num(start, units='days since 1-1-1 00:00:0.0') end_float = cftime.date2num(end, units='days since 1-1-1 00:00:0.0') - #try: - # latitude = slice(latitude[0], latitude[1]) - # longitude = slice(longitude[0], longitude[1]) - # method = None - #except TypeError: - # method = 'nearest' + # try: + # latitude = slice(latitude[0], latitude[1]) + # longitude = slice(longitude[0], longitude[1]) + # method = None + # except TypeError: + # method = 'nearest' # Setting decode_times=False results in a time saving of up to some minutes ds = xr.open_dataset(store, decode_times=False).sel( {'lat': latitude, 'lon': longitude, 'time': slice(start_float, end_float)}, - ) + ) variables = [v.lower() for v in variables] # Make all variables lower-case diff --git a/pvlib/tests/conftest.py b/pvlib/tests/conftest.py index 268bd77c44..84935745d8 100644 --- a/pvlib/tests/conftest.py +++ b/pvlib/tests/conftest.py @@ -96,7 +96,8 @@ def assert_frame_equal(left, right, **kwargs): try: - # Attempt to load NASA EarthData login credentials used for testing pvlib.iotools.get_merra2 + # Attempt to load NASA EarthData login credentials used for testing + # pvlib.iotools.get_merra2 MERRA2_USERNAME = os.environ["MERRA2_USERNAME"] MERRA2_PASSWORD = os.environ["MERRA2_PASSWORD"] has_merra2_credentials = True diff --git a/setup.py b/setup.py index 7b11ef94d1..6d6910144e 100755 --- a/setup.py +++ b/setup.py @@ -55,7 +55,7 @@ 'optional': ['cython', 'ephem', 'netcdf4', 'nrel-pysam', 'numba', 'pvfactors', 'siphon', 'statsmodels', 'tables', 'cftime >= 1.1.1', 'xarray', 'dask', - 'git+https://github.com/pydap/pydap#egg=pydap'], + 'pip install -e git+https://github.com/pydap/pydap#egg=pydap'], 'doc': ['ipython', 'matplotlib', 'sphinx == 3.1.2', 'sphinx_rtd_theme==0.5.0', 'sphinx-gallery', 'docutils == 0.15.2', 'pillow', 'netcdf4', 'siphon', 'tables', From c05fdb45cc9ebe2c747f5700be217ddd1286fb73 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Fri, 20 Aug 2021 12:15:46 +0200 Subject: [PATCH 11/15] Remove pydap from setup.py --- setup.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/setup.py b/setup.py index 6d6910144e..7f3cfaf55e 100755 --- a/setup.py +++ b/setup.py @@ -54,8 +54,7 @@ EXTRAS_REQUIRE = { 'optional': ['cython', 'ephem', 'netcdf4', 'nrel-pysam', 'numba', 'pvfactors', 'siphon', 'statsmodels', 'tables', - 'cftime >= 1.1.1', 'xarray', 'dask', - 'pip install -e git+https://github.com/pydap/pydap#egg=pydap'], + 'cftime >= 1.1.1', 'xarray', 'dask'], 'doc': ['ipython', 'matplotlib', 'sphinx == 3.1.2', 'sphinx_rtd_theme==0.5.0', 'sphinx-gallery', 'docutils == 0.15.2', 'pillow', 'netcdf4', 'siphon', 'tables', From ab32ff548974151d93049a0cb997905a84af23da Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Fri, 20 Aug 2021 12:30:06 +0200 Subject: [PATCH 12/15] Add cftime to requirements-pyXX.yml files --- ci/requirements-py36.yml | 1 + ci/requirements-py37.yml | 1 + ci/requirements-py38.yml | 1 + ci/requirements-py39.yml | 1 + 4 files changed, 4 insertions(+) diff --git a/ci/requirements-py36.yml b/ci/requirements-py36.yml index f65e5c579f..20e8247ec3 100644 --- a/ci/requirements-py36.yml +++ b/ci/requirements-py36.yml @@ -3,6 +3,7 @@ channels: - defaults - conda-forge dependencies: + - cftime - coveralls - cython - dask diff --git a/ci/requirements-py37.yml b/ci/requirements-py37.yml index 409dabbccf..d7a6debcbd 100644 --- a/ci/requirements-py37.yml +++ b/ci/requirements-py37.yml @@ -3,6 +3,7 @@ channels: - defaults - conda-forge dependencies: + - cftime - coveralls - cython - dask diff --git a/ci/requirements-py38.yml b/ci/requirements-py38.yml index 87c1168fae..37260e8fad 100644 --- a/ci/requirements-py38.yml +++ b/ci/requirements-py38.yml @@ -3,6 +3,7 @@ channels: - defaults - conda-forge dependencies: + - cftime - coveralls - cython - dask diff --git a/ci/requirements-py39.yml b/ci/requirements-py39.yml index 17cb69fcd9..6834c68cb5 100644 --- a/ci/requirements-py39.yml +++ b/ci/requirements-py39.yml @@ -3,6 +3,7 @@ channels: - defaults - conda-forge dependencies: + - cftime - coveralls - cython - dask From a8e802f043223bedf07de0bb6313df491ffbcf3d Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Fri, 20 Aug 2021 23:51:15 +0200 Subject: [PATCH 13/15] Add test data files and test_merra2 --- docs/sphinx/source/whatsnew/v0.9.0.rst | 1 + ...MERRA2_400.tavg1_2d_rad_Nx.20200101.SUB.nc | Bin 0 -> 50166 bytes ...MERRA2_400.tavg1_2d_rad_Nx.20200102.SUB.nc | Bin 0 -> 50166 bytes ...MERRA2_400.tavg1_2d_rad_Nx.20200103.SUB.nc | Bin 0 -> 50166 bytes pvlib/iotools/merra2.py | 76 +++++++++++------- pvlib/tests/iotools/test_merra2.py | 52 ++++++++++-- 6 files changed, 94 insertions(+), 35 deletions(-) create mode 100644 pvlib/data/MERRA2_400.tavg1_2d_rad_Nx.20200101.SUB.nc create mode 100644 pvlib/data/MERRA2_400.tavg1_2d_rad_Nx.20200102.SUB.nc create mode 100644 pvlib/data/MERRA2_400.tavg1_2d_rad_Nx.20200103.SUB.nc diff --git a/docs/sphinx/source/whatsnew/v0.9.0.rst b/docs/sphinx/source/whatsnew/v0.9.0.rst index e5d1026a3d..0f4e18959c 100644 --- a/docs/sphinx/source/whatsnew/v0.9.0.rst +++ b/docs/sphinx/source/whatsnew/v0.9.0.rst @@ -213,6 +213,7 @@ Documentation Requirements ~~~~~~~~~~~~ * ``dataclasses`` is required for python 3.6 +* xarray, dask, and pydap are now optional requirements. (:pull:`1264`, :pull:`1274`) Contributors ~~~~~~~~~~~~ diff --git a/pvlib/data/MERRA2_400.tavg1_2d_rad_Nx.20200101.SUB.nc b/pvlib/data/MERRA2_400.tavg1_2d_rad_Nx.20200101.SUB.nc new file mode 100644 index 0000000000000000000000000000000000000000..481d277565b993c5740850ddab83bc4f43d0e875 GIT binary patch literal 50166 zcmeHQ3wTsTmaa}h2pFCsFzCpIQ9(?a?z~7M`PQ&s2Gsj7SL+Pdo6A%n*aX6UlASOy!;f6~7L((Qdn#88;h7ts$3e@ zb6WNXX%l&RdX|I{8Py-0##{(F?gS_aW9bvw*yBZ(Tcy8S`k7-e-i0XcG7_OqeLm03 z*l=EnS(zfKNCQ|pjxMAV{L~6Qv^kg4amZnv~-an>sZX4o|ZOp1pWY* zCZNQgA#7ZuNVr}4d!)}+i}*wPFZgyV+y&h%QECxT|64#sM@oXH)ZR6wsC;8>U`F79*M^^; zhWeIP#zq5WZ0}$o5s2#vK|IBO>^Mj{2puL84@DyqOLw!%YHTOOxMZsikQvCQ-Xgik zsDn^=dYQ;T;X>I5DZFd8IsMN2FTAa`rKN@yw}=GS`7XAaqwn$FGJ_|bbEZrh=3<}l zr1SoL3se+45Vj(9}j$s7{pqI-|nkMQmI>K66 z`V(b#>Y>@m&Q6`4{A>>v*~Kz}eirjJRJLICu)x@uREp~Q#+s&<`Z-Ph7GGsU4a+_W z(_Cx-&(&1f$eD+^a0pSZFf^(tSEX!~L6juB7J%6WWmn%;TUTF|lrb2|>I%5W>UjAe zoFl?i?kn$@yIsVv)83xfi2XJjvY_A^%CezY{w6)q7Hlu54t0itfb7+8WNs``wio(4 z&xuTkCjyaRAQtpT0-ZXfVzmarMP-L-b%YXKK^?PYK=feF1&pRFfKHJOsxknw36^(7 zLW#J9XB+Mk)H`Cj9{1~kcp`fONIl3Q*d16A!VJB7)q>Ii)5jts1kayuKoDb9!ju0q zg(%FB4Z;Sr%%54^M1!H=R|9U~f+7nA1&X`i&aE+v*FsgYgKt2Tv>2k@&x#B zUGg~iaP52y(%{;;6MVQteJuWe+)&PTykE&YTdLUQgM5r>KDIo~$A!q$J1(yp!^Tt|42@KnEYF`R*w#JJDdqyrU-;O^ zm-yJ`B|difa36d1g(Ie)>fTP8!vTi_4hI|#{IEE1G-k5vS_F?il&t!eXR?8WoXHNx zX)(Agm+)a;mUKH0G!N%?z+Pz`yCCt)ftc<(GmYJblSnr8kvo6KLGP7d5R!6T?9Y7O z{^%F`!-qeSMG`ohPd$-E^NNZd4kW}p7ILMY$M%^k*J>Jz1}5Eurm>M|EV1O^=CNmu ztjM3$+FVnE76#4t$%mZyF6JM!)K9Y!@@Y0fVe*xQe?E;%Ps`Z0M(_z?bm2lLO-cu|1#&X?@i>8OlhY4c zz8%jOMj6xCm@GEtd<6YnAb)iKOE&L%L0w~$o4-pDYhzi*vyPg#^#nwr7qT#`Su87$z8BITBmU!;OH*d zGvB{a-E65lQJ^LyQHfJMU)Q$4mJB;vb`|O2!mG(;u(@mnl!WHgj}H^cCrcFhfd|iB zUe(%A3;ddeHV2T#{`+S7@k7~8jL2gQi}CT(8-}s>uzs*>gYli`tUH0dg1pRU4ENY> zy61wE*az@~!F~pEWZ*~Fp28|I%R<49H}`Hog>@rh7T*nH;ry3#*)@omgG<$nEs5^& zuv_p~A%oo_#%`~>v4{=A-MI?UFblLzwX6l1yP7fFa+;bxcOe^uS&Cq2fw%I+d1tf7 zu>fnuV)RSV=a;b27y)-dHh2$gTE<4h54H&ZG4^WtAC`UfKSfW8(2wpvZ~s9ij+mN> z?Ld~~ppgj9*u9X5!i){ebZli^>_VCPfT_!{zhM7?9(F7e*oD+!6KE6$j2(%UW3QS9x4@Kc z3_w@M9Y|9bH4Oqb0>=i3buQ73BZ_Un6p&aaAc4_nkiwS1PcdK|XY5Q&ft!xUluI#i zKPmulvvDP05#z$3VtvgzjO!Oy)mq;KSxJU-jC!PGym@{HSP>Z6`lqZ>`#`Qit*Tx}Z3 z=UwrWYfZxCZBOoPCSh^fBkLbFjnIeRUBBBT02W>H(QcCzS@qv*o->WFo2Fg$9?u8-l$0j<0Qt{dQb z6|NING%tYbO1O5zl?_s}4z6FowF9pGa212}O~ADkt`Fdv3}QBjqSkQ^2OJJK9B??` zaKPcfca8(Kb=8J*bZBy{9k+gq3(O-t*w!3~bm&!jM<@~rMLJsH;H9Vl8c%3V!{mTF zkEfLXxjo#uU|l7dy(SW*ylS?*{L;b__)BIR%CjvA#S_ukN`FIsL30bx$81-@MR%Lz zi|K)c9@G}E)Hp`#_PPt)d0K_mRM}Fg&8%tBs_R>PS~Q|nc6D^c6Iz}J{_?z@XsUS=R%s(JI=-l7up@{3Ef!a{dZej$3(3X9y@yg)1zSRB^lrJ6WWtl`mN zjURy%|9uV3@T0Z5X_5<8KKPNyYFKLkhsCadi;C?FhdQCyS~Z+^)#fbMV*#jTTq?t|1fc953I_B zg*$OVb!IFO=?d%h)$*p;r6<8fUAnQRxw$gWU*Pe$6M^myuRky7j|GDMrWI}&ZXS=< z<8?>c3P_yHjYWf9Z3*b@Fa~t9IDsXZB-uH!5Qv;WSZfc3b*)2>;8@XvrCKA5lUV^6 zEBRWUw=_S$v;cm!Mw; z@X8!4T5ctb%s_`8%xMAn71nbb^lm+zQyGuL`CvQ<5slT4ew@`rNI44@7U5O=nEYb<(YLF*4kR@0kG|i|Lb4xsKj|&!1k$v;^c!KiK zv1Q7H%V5b&-eNstFe;PTJZ{J` zikA&l$_CcR^i;Lrg>iI!+q&p94wF#9F`kbONj>d{alY1{`ye4Jeze6qzC&YCjE~w zuXX8n^r*6WjNKourJdR*RTqTxPL zH_ph*iSV=eGYZ_(+yyyHg6(vg+{KUX^XBSCzDFZSN0b<)S+c~nxxgdQv~^;B!E4nI zVn-15MLXfK6wzOBZ;Ea_v$1jxmt**BK&cii^yU@#JM;WY0}e;utB-(`O7sji>>;?eMFkd9sBm;K$lu0jP!EIj)8P3A%_#Y_IqmJC zHr)lI2x&I!?Rrd)wDCriMLftsGmsOgjmBVZBZoyP|+$|S|vzS-P zxtlL4BsAxUQ8sn((vuRKCni`XC1ykyx?%1II!biOR=sn1G!_W+sjsXG?nY6Gyd}kQ z$Z5G{pnSYVx!%0oeCS*gcuo2q{{8@n;%F?CSP7)KL$>1{7SWY*CAh*3(!w2$bxfu5 zw?M{(L8D<#T#Bchjd`u!rUlKF)g;23@Z=mxV@2*8#{`Ofu?r5*trY{s{@w-Odj%2| zCB@R>g$3x@1s_lWL2y@9aF5+9{S>?4tpeHq|M&gumy>MvkKy1YcXSey*9K#-RtCp+ zvYndMXr6(S=Sq|Q;(tBxJGth+ z8Pj3o6jpRu!_C@o_Blp0h$4NqZv08?GQ1l^;Aq+L;+vIhDIy>X#_wD)wVC}De^rZJ z;l}j*Hs+IAaDGv}XwYxjv+x7Atx%n_&~=S>~gUmP&*`O;`Iwk+F>calBB+tvVMgk?)!|J!FgF7;t2z;j0glBx!q!{ClkS^b{RGeQ43D{`{zROQ7vR;Y!%;|<${)~_%9DMM zADZ9bGPwYCWk@B>viP0{>~_#=FCN=<^F~$N7mJ+_Sfe7PviLvW__ZLY{!gvE1|p-t zt&)?2qoll6iMud%%_lQIQPt8evG`@EP2n3)PZes3$|s9ytCqfy?{`tv(pSfy4)^6} zaPPvhKy{O9>8D8CsFwa9Bd|?+>@moK@mwvrqy`xX5!@aN$t+2=^aZ4_lMVJ5e*;KL zwN#_1rTqmZ?;#b(^$!Ou)sk#kX6zZv)PTk=!i;UrjD3X}y9F~gCo{Hw&gH|B%TxVR zGl$B-aNt`~OWXb^3WpMcF0)#?K&}pqJX0WxrGJ#v(xg!bwi(qD77ATDyeQ{0zJ3I1 zX)MOLGEl&wgSXMZ-aEZ|Y4w47UQVr7;1x{V(L0%}^a?Jp9*hHqASme`tq=>|nKPHbzj+$sA?((2<9%GjLD*!sUIuiAX1b#d|^ItPSAAuVvQ66GeTeN>6^*wWKK zLXAZ)KzvkMOSp4^4&SSm#kO{eP$u2(ZV~>z{V)BGQBtznJ!d&g)|6gvc$73sj5v7z zDA!}Xw5*)0^k|faZhdB{s&}@|7c_cMRbx+oU-S~H1vMM_bXQgHJZRBNKCPd&U+SGb znAc~}JI5hWqm2h=FU(7iTslD(jOTi1AN~Rrk<>c@nZ;V~thUuVGZY(d^x&^wgWEW+ zpEzKtcVx>lW6v<_9b*?^#-cK?aRQ#i6Meqo+6SF67I<#%8t_yeJY9uOz3(^?-7B7EE`+D@$me`} z;rYYK2Kt1@_W__II#eDJ%a}$DM-h(iaKPb!!vTi_4hI|#I2>>|;BdgloiTED)=VdUn}Hak9TszL*XV;jcw3oFqW=d-P|2yXU3CRo3YfTsu#?ORgxStH|Dq;S2H z?u1{IgTSGE)d@>O&j9xIJttgfHQs=p&g<(VPQ9_czuYvzK(8|t=}PEvEgphLDER~J z;-Ov*UaFCguhyVv+uH*0C@6es3JZ%4S0!;JjtWKvCk|PC_z4~~{(gOv4J(U|I-v+L zMxBtt)Cnm}oshzg|E+R>kAFH<#!GVN6Tav0(f(|BWv;GHq>)O(y`dVPq^ zCHgzcfNH;dy~@mkzfEH$v;<9ncU@-!G)nk&hod!x|y2&Jet&0t(wL z$sHc8VVqXGLhSHp4dAr8I|C~Yzivb$6gf;9&!gNM4*co)JfZj&C2ptSRuOK)aDi&k z#gNxq&w_v8aO=bv#w`p0E>DJ7{j#DE35Xy^!WIv z=YM$>PkPWh>FA7q@j4Hh5z}{8amG9zu7e!fsmAdkk?Cy!^f4Q8M)vn{WaGTSIFj*b zi7`w^N+9~j?)dm0P+?%hM{m9K!Z$tsu|d@Fuk%T6H@u)H)sVN#*?W}Zedu2Bjv!}* ze;-FUE}t4lG#+L%hUpL+L_01O2l)7>FCTH>b(1%L{!Nd6JVQuhlBV{7@M1j}O*QsY z<#NV7tUSg9${4`~wFde2%)#aTu0|&%6TF3VKMk}3SHbDFQ`IO{1;b|22 z35Li$3Z70^Tj8Ho@Wz$JzVf`N;Hg}FVKI(>GYfpeVTVfPURUVLpkl!!hfpZ?e-z}DtJ`1soa+e-cAw!Q^DIER!)-)r1qlI=T>FPRPb~v z-3mWg!J{dW$_-WUbc)?7&qxJtXNrtg@O0|kivBnSPp9Fn@FyyGIwfy~pQzyJ^t=`R z6a`PG>aFloB%aacn zvmbu0;O*>(3l+Sb{jf&ClRaQH0M9lZ4=*ZsJNw}k1#f3RysqHw?1#4$JlO+Q1K>T0r*y)TTp{0XTvL9>cR1j1 zz~O+y0fz$)2l}4_M`}NOsLT&Yzgv~`Qw48lKkQZTcJ{-U3f^v>@lOSBXFsIL2|dxZ zvmY`Qyu6(id)QzFZ)ZOYRq%H9!$<{hXFrTq@OJjYI0bKKKb)xG?d*q%3f|6sI7PwR z*$-0`yq*2vR`7QAL!N@Svmc5Syq*0}s^IPHhcX3kXFpUacsu){R>6}!U^M_{Nj#+! zp5zMoH2w(Z_znjg4mcceIN)%=;lMY?fg`manw0qg>36FdG%I*J`(d7fx3eD>DtNnj z#yJY!&VE>|;O*>(b_H)|KP*-7GHAt47E$na_Crj;+u0A@3f|6sSf$|Y?1!H#csu*y zLIrPUKde#kcJ{+M1#f3RY*6ra_QMql-p+p5q~PuBhpQF5o&9i~g156Du2=AO_QQ<| pp6mgu4t=Y{Q##>Eu8?mxt|`CcI~;I0;Bdg#Jh$e(+Puw>^8uHc9Jq3g2h^|>ng?Xut1t0E%4SY_S4zE-`c*B9%evM#=Gx#v{XnRHju(>#LD zt}mUMs{ZSB>eM-Ps;awJ)>Kvx&K#4;&}C$>G&YR?q<;sb+xM7=p)lQ_M?Wn5nKhM- zDOn$+jOX`LGbD_%QT@ym=0eDECqYdZOC8TfpD2>tDgB+&Pd^^xU5Mf?EgtO9&*zC5 z8^#+kJzd+-1x2ZpO-#cCgKn6zo6TlPzt)~qShjy{jW^5GXjQt(($Ne!qE;3K@9~lR84gkZ1W5GyRVyQH%q{eZTMO-)s-s7c&#UG8GbIQl{F9p~^{7o8_>4Rx`P z`K?QTelIi>83=okda`+E?FD=LW=?{TpE?p#4cAIJu^203`Ny#Q1Hk2yNmE4o1tYAK zwLe8BCl1X@Fgr0l`B@&UvWuld{u#_ySKfrp!wO?(QY|WL>#G`?YG*dIH2KQws#w;k zc+bTK@Kg=u^_+N^3WpG-3PGoedX>vw8AP`vYXO)oE!l8abxmzWLdIYutI6jctKs#7 z@MICDdS83@tOrC4JM-Py^~kqbkOT$S5S9hiYH85pt%0`u%3wz@0LWf^BU59Ivb@0W zJS7q#7Wan({%D{j?C;RwE;efrTvT^xR(mks70@wR8boJuDqu7v0d$gN(3Am?Opv@Q z9E`^#Jj+l@KyQ!gdaOnF$KqMzK zE<|CP><~7f>HKMx4Kx_?uNiO)7Ze#NC{Wx5ChnLflJ*FSbMJp{d%-zgx7Xt?eCPiz zc~RUyqm27oW|ZC0(mKcL`tcu6E?M~V+>({q-jZtu=agL2;VoH{eOAfMuT_>@`A|-Y zRXSsQ#*<4*uXvZ+Ir_kIR{C zCQggNWzcNUFq3`qK;tlO2kev9u?uc~F%aRd^HSJdIEiFAkKc1M2koAULAaIeVt?lI z_U)hT4;}tQ7PG+FeDaAbnpafykUuWwv5+eHJhsnN*;dn7G%%?Qn#P7B(fERco5x-- zqAYKEb7NH%S{O9nCm*KTeYzU3NKB4XRM;b?cqiacz@dOc0fzz(1sn=|Zzv$-5`F1(st8k@xyK}~2*{q#_A`!tCn-@o;e z&Wh%`YM|G2v^gMa8u+O)A?=~3+26*&8|npOkApFY(eCC z54!_@6)@N>V(hmyw-&NNxI0%SI%bZxzM3^5ahEZMTTVHtv*xlvn4}Pf7I=$KnSCL9 z8Y{3;EJnW?d1(O~g%MB+lEHgm{l#n){9yC&A7i^pe}D0p|5Ny^2>syx^Zp-XqKK)R z*cK$oWHb`N8N1ljy{4Wt@d_(J_AD1$ckFTuxW(=WyIThMcH&=U;212&*z2qoi*6LV zknDO4z%I%Vgc&=Q>DbG<*o89jfQe<;Uyy%*!;Zxac3^I>2{aM|Mn+<#$W>FI2)xsc z0bq69fi$tGX%MjCI5t46bBS&oQEUy~0f}`IW-tm3QrI#$83V>~#?He#Q0Q2^^D_)Q zf(iiK3|tAA$G9*kTV1sZ_^n?}xWdH$i!Fvz_kmm zlRz}jf$LhhcEXhfQnLoGE8*G#*M7K)K>Ehv+631Ja7_d;8$eO(IEMlb1sn=E6mTfu zP~clff$Ew{!#O%MIaZEYeXk45BRtsF=nuE+6?%Ix91e!to8jQ4XaE{dXidZ9fIE+; znE$yw+__*~C6T-;9H6vnvfRAlf~oMANH&BgTM&%JBhkezb$R)XO^`n(yACcYZGtbV z`{Q~*o4;7&7_Hmu&Ufc(Wm-ddQ@J*+s!6M?ZSrZ6uvXsH-W7{$xgPk-^=jVy;)48Q z&s4Y8)Cqf;{!p>z&2@VVr=pivG*v4oa2Mtkpf{zU(5=n(M}z+PAw5>Ci6g}t9v#;B z5jgSRSJwzXnkyS7xM1ajABn7lwFYol?J~Hi+P+Y*1FEf6!g*J1W~UzYLn~uiw;sjA zteV$7)m@}bTo8|U#)@-tIy!;cad$^&yE~xgv<4z)Kvs@(DA2zYFpqzjxU&&2x|h)j z)oD?GxGSXBR?0$=r6<5eEL~sK*jS$1lJD`jHP^>j~>DqKZjFo)N zlV9vDDuy3ekT$Kp*#$>#aro82soDl{4wu&&>+CmVF6yi)^)1$N1CzbkHIc4pXz^sM zT6Q^%OnD_v0a(OHU=Y#PeL?k@9(H{u%Us`3*-yV*{;=$G!ozra& z#zQDCzkv!I$0^Yi>54DVqu~yySDie$fh55Sp=n0Fm_60w_PAgH70GwL9*a{NI<`!S za2YI-$XjGcRRw2`ty1CI7p3w}nd;7)Qa}trwQAI3kx& zd-)j@C5_rBIBA>1mi*@RSWXh7HKjGQUMOON5XM`Yn&4cm$(Uv@)^W) zdxt;bZjZIKy2JjM-`yVRcER!>CJYAja6H%+)T1q(g5O&Lk)UW3t}{|ba2C42Q#d8( z+^Of*Ur;csqG7fdc(^aUi-QFcg2Q^_YANM`yr1pyW_xn`P`NyrbqdT6_Df9qA7h^V zGIK+ZDyzjPUTzuqRIW*t)rO(#+x)Hi!K$+08F=I#Y1Pu;1DOD-Fdo!nvYhdTa-?oN zCwFp~pUpof-#x{hKY2l*jZTxh_|bjdUEN6cNEq`GHAZQcsBv|+*Tu`sU^QZX!CTc2 zVu$#p10G8e{DON^RPeO=@|j$Y;j;n7TA;w2o8Qur+p^Ff1`bf9kRmmOK&u8JC?71q zmhoyImJr5=95PsK7^GCZXRslM;Mx{dSWKZpk@+Bh>mva@1kz7~=NmMmvZMd|bsEi)>RaR%`=fLA`*)W{Nyi(5H zd=Vj`IYo@Jsh!uJl-O)B!7?c^Bf7v1b3f2gf+d^vj?PHbAL3JA*%aK3q8fRp7KtI} z)v`-K`FIPny}8+Wz+B^aOZp!EEq)Nik!Uc!7_#CH$&Pzil&_R4fkkeR7Vb#2J%{Sw z1PS8?jfN?4DV|c+=QevA<}{X9k_d0WlXE1E6}fL56Dan@E;u~5Rs=cr_b&LpwV0`> zDb^M*tU%8$_<%A9g1fGad+Z_UC)ovW7Rdhp|M`!8I>BcD7!F=?M<+3PYcK{|WpI2) z%MiBu3CR*EMb@}~y!i^kFa5(w?}r?YunNKFc`9fCch`TOHGV&wH+5WpQNXz8OQXryvuq=lBss&}fkqZF^|gstOl)E5PE+rHzGz*b zzuM&FbMO=pGXGacN9Gs)T2ut(09`2KdSw3AYJvC}iWP9p(}bTy=I;@RAJYH$O)H+^ zr~cn66U}y=<`SE*Y{8p<|AfaSKkNi}?kIzVs=@9sxFb|nmr+)8-y8TJ#=}ndC}=^s zxZ86~>!10Te4!q8D)6bOIcNUkQ=#~%6|yn5itp`Gk(a9YHXfJ#6RG%aK?Kav6N>Ku zMi>>}by!+2#Ro|+o-4k$@K>cl@y(D)5{mD20GJft#aLca#Wz7we7~&v@@34$as5L9 zOT{O9mKiyNnL5zOBFxy=%*ZRu$QI1loy^$#lLn3+**`6Eayb+V2*n34fH)#!$68NW z90u;bOH>ji1YIbDdsL#kK_He()wo55*|{RVpZ~WGdhg%xv9a_~n6WDj#SXeuSk`

^SCe9r@RJLt6+PwlvUoht5&#Lfq-QIS%a|DSLDQe>$9Pp-TMBB8)7 zk&}a?q`a1hQW(4b<7pqOYH5d9{4%tr;H~F!gj%Bd$!glFrO)L1T~xL7<%wrOx%>?7 zeOMQ0ZbB`cjF}tN((h#i_DPRD21ziUt0kAzAOj(S+hakQB%zi*gFEargFVLI0FqQK zRViv|fBw|nn2Y23hXR&rN%kx=at1SXppiwGv9FntSD2A4n6W#VvG*tcVrXJ{s()JM z_+_=eQd=0Awap@g8ztd^F_)qxS`31pGJn8FwOUzJeE~xNQsn z{F$mHHwDGrL3|k(e?PXmG^A>uO&6=Erg5wOi<^X`6kVW&KIh7N#qA5;;J4n9GY8ij zaEzSO?A<;0WhbWzK)Iu|i54YVChn6?mvBqZf+pRRz~8p{6ScKPa;DQWbGYi#3nvL( zOZ5}<70gO`p|7re9xG{}>e@eruFaLY_I*q@*Pv@@-4dJ3*FO2mrqKkvS|-6-a9vB& z`-HBg<LBWH;9Zt6fI zizJpiv9HB)r{tA{)yE~2u{)Wu_kUGdvGGXT;-o)x3J8fpTHs(M%575ns1oI=g=c+; z8jD_lxLsOHxO0IH->a6vHg$+lIu&=n2!Gf97yo9Yl&p5oSq_ygrPmuCC5;j#3O+K@ z^;9n{D<>;G8s)J&|FlrmJDbiIdGw$vMxXUJ!6noR>NfJJR8{Y6wcwJywNv&>z4JV# z^%?ZeahR#m#=BcaBy2n`ogfLubG`F9{sI+|&^vya#9Ht0w@xP2JLf1i-l)v4-hg5p z*H09%)H||gnUOQhddJ8j%-Gk=$Scgq7R=b4;+>O=XGPw`|Q zQ9R{OHUh;HooGZ<&F{2z2a%+LU^i=e9pHQ zoByobulyYjJYR`%wu94Q~$2@JV&cCN2_&CzIGJgZ)t}C_ucy( z7D@_q^f@dF(-(j!Oy>eAOlLAEylunyNBPsH^yxF6>;1p^@iX+m^uc~hiau*({E8H= zcT!3CML7r@+E<;hHuMZ&U*B`Wg;rw$^mJZdA93o9?fvDZafbXlg5j>X9@An$c!ZKa z&@LY8)!?NXdH8A#dbYjQ508Svm!`0?=x|jMSK_E*RB@t^)rX(pLF4b%H`%bU=!gkL zh%sV93KJ7jn3#~lj{l8PfRBGVRmN*_&ojOk@zMS)c;zd7fXx?)MB$w=@YH*ZV|smv z%_aDq_C&}>Fz%n=U6LurkEI!9#Q9oBHas*hp82I=Lmy+qVTB9kG_2wge+vxF2#nMA zmKZ*cd#S%$Z)pp4ElMT^4myitfB?{Zc+fjE+aK!Ep=fZ!wLabf4e|8-LaH74_`x%5 z5$4qqgx6F+WxHi|hnLq-o>!Za+2Q3ifalfS;a_z4ZKFJb;ls@1C6s%^fIdB+ClueJ z#O)N^D#C3TE>JC43~9aT0{90Gw@!><+%hqSX{!XWj^h;Ify~9-@@^q z3)Q2M73<>fx3H1{E&f=ni_3=AknWGR#4cW(Oe`E}&W^OWTn?w?lJMIlEpX%4C>>DJ z7{j#DE1UNH^!WIv=YM$AH(E#c`bI0AVm3hg z{Q0b;IN@m&_X&o`=M_AiuC~Izpx})wi+!beMZr_O`odxy|7I5WghPf(=H68DmqE)U z?(ixj+_{V-p_+u09o3f|6sSg7Fb?1!*|x3eFj3f|6s z=vMG{_QMhdZ)ZPTrr_=Dhbt7ko&B&}!IM2;H2_vgJl!We$rbWxd=Sp@9SS%Ua46tV zz@dOcfv=AOM`}N;QRWAv->q7(R>9lZ59<}Yo&B&u!Q0I|?1$|N-p+n_TEW}d z56>!iJNw~z1#f3RyrAIi?1xtryq*26!x{x|XFse}@OJjYdIfK1KWtF&cJ{-K3f|6s xxJkj=*$=lWc(Mnq82V0$r~8B_xkA3(xTf@u?@+*@fI|U?0uBWn3iLk({vRh6Ou7I7 literal 0 HcmV?d00001 diff --git a/pvlib/data/MERRA2_400.tavg1_2d_rad_Nx.20200103.SUB.nc b/pvlib/data/MERRA2_400.tavg1_2d_rad_Nx.20200103.SUB.nc new file mode 100644 index 0000000000000000000000000000000000000000..360c76174628be8757ad60d13cf8545131d99a11 GIT binary patch literal 50166 zcmeHQ33yaRwysV>2oTw16CJtmR8W(qI~ySqlFmi~Nx&om;(W#?eG?ix-Lbn9A@4o6 znL&kl=!hTAr;fvO7}rtJr}zd(MMa+rqYTbdT%Y?k?&BaT4)2_*I-PqfxqSl!*IQq1 zSE}w`+o@CM)Tyd_@5R*>Rfi3oIFzBw$Y5z~B>$Iw_DQ$n=n?~s1_c#L-;io3LUuw7ro6EilF zH)3wOxK$=0!oz!M%vU8sm8N^?M_|yEmMUY@eYItk74yq@G-FgI6`EFASJPNsD8ZDd z(7iREH~TQ=a!ueFxshQZ?kw6rYqs9hhU`$G?NRV+jrcO;s89fUB0hc16 zM4rRg#5!@~57OT$eRjTx-@ji$x0|38baO?mML_+vfC@%RfKRBpeSBF{bp;!ORX+^k z1Jh>B61Rsk0uR1Av}=BO^}LMHcqi3$pX@l|mdrhj4FkfAU~x8qHB?qrHdNM^S2owy zG&V6d4syo+4*28#m>w6IC;3Ij!JGrYFyUA*5|&sh%_^y}pAh4cy*fxHAfLEJQjrmZ zPzg?VLVoHPOf^a?;lyIBl;sU$dHaCNC6lIz_6tV1Sl0e% znVdK@Gr{b{^yFuHu*xo$4*6%W@>*XbHV-R|ok_K*sHv;0Z>*VL-`rU4tF2_2$KX8| z8^lx9`|3FHFcl6VN)>`m74`DTUKv8SBx?bfoj3c+o2#m8$`UdLBUyDG_gFQrAB3|+ znCgA`Eep1Y7JH5<<{?S1p1VD-)jlO&6jt zO?C(y)VOF)MLi9Myk8C4$OT0P3JMf=foa?4h@?G&;^aG?dL+Nt>-Kuw1#i7^*0bXN z@ul3~IT8hhiqXx>3;RLrJP{c_!6de#{K@5V{bF;>-3y@1d7 zQj8m7)O(e3?oHF}1P0ep(Mf-@2-*HZmtwDShuzHBH0*A2HrR*4zcFqtzM* z0**?_ob$Kq)XkRaqXlYm0u?{r^L14tY{{_GB&$dd7hX*+jV)j+pe8h@etd+ueXK;0 z?|<;Djf@LXlPnSBcysrb<5(9W=JMSz7Rr4on_Y#7`M6Zg z*s{n@54#cn%V)4##MmFIuPb0faCfd$bj)IHeHCj&;-1eKZaGa)U9f}=!6XGRw7^?= z^up8F<5+=<#bWe}kzLE!IE;W&kPO~^>(6B4;2(Ad{xJ4R$z5lD^}hvAiqMblm-qh= z6Gcqj#I_=qf|+lep8z%VSw*z2qY zi*69RknAc9z%I&ggc&=Q>DbG<*o89jfQe<;Uy*-+!w$y`wqtIv2{Z-+Mn+;K$W=3- z2)xsU0bq65m}4duH4Oqb3daVBbuQ73BZ{rXJ0P)+!VJctK?++2Kf!=;oUv2z4iq{G z@B9)2_oD&;HxE|=&R|>^l&+~r(R&<|CQ&BT+NK*^T;hDJ3#G{^9BI``-eBlGP zU1=K0XJ2;2)h6NcwkP{$ldw4P;WgV#BXrxlYj&6fz!?{Pw8JDty8ma@v!?NN{ftX@ znS{=k*?|{L1NBo^hN603b^is>tndGKr1hU zYa?8*z;zUe=EZPb4%ZI2GC^uq!}W8xw!*a+u0oK$akw_Y^*&tFK+FbE)H=?gfI|U? z0uBWn3OE$_&QYMMy25ad4o!}8ezt173(O-t*w)|=x9MeiTQD3BhTEFp;H78)8c%3V z!{mTFkEe+Lxjo#uU|l7VyfPf1v}&@P+@k!M@Rvw7oF`irjKw3-mCd!ec@2$_KPI~Z zE-Gzmc~tku^?dS9bYn^W1SRn#<=Ymu<#>ul?c#kCv{{N;Ex zZ(dP;UJ)Ea(;7QqFVi0?(!4otZ^2CTatmi_`T6dG+hc;Gd?7`l&8h`QS$)D`2ew99Fv&E~<8UDA*3w)+*qz?T@)TS+q$2($0)2Fw00JY=p_Kr4pK%d?ch#U`DInJTLz*4|G{$b+I z{_W340xOFXs&k_LaA!!bsgQ*tOHYN1Sh}vVp~08aoagblHP^2|= z>e^gCjFmjilV6lOqsWts!m4HaU}XB+^gvc4$ghx|U8{HLp)6l42IqtEAVegb)!+{V`F~nP(BBr0 z#Nxr07@gB?3dTbyFTaHfjOCPQjC97A>CtdI)T>sW+(43Gh0rvkUd*29aeG{_fQsZ> zq{rfvhK?;$B3uSbB=Qy-QdPp4W2;oS_C=|@GiJJTXXFz@P^}vDSR~YmB*mgc4Z)~P zB=fi-$rxTg>|TBbMMKWKNY#S;wH2C7f>eNm?U8WY-xBA< z!X<8kqI?GN+}7@oxZ7f_E$*;C=6AP6x?He4hzWxMJsc0V2K8ujhv4_-KqM&IgzJoy z5h$U2Pr;1oC(k^&?$rDRW%Ucaz{7p%T^uZs5FFMUS4$}mM{L_U2^g0&`8~E$MssH~T>pN20;_O2~>kBs=b5QNB{H1Xj2~ zTDT+8w&_&=Mo1VpXf#ZTOYxMlE~m*`zqr9yK_a{!PtK7vR^+~MOrY2oyWsHLS|Q{( z(7WKfFT+elO|iCkVFh}2!3UK>5Zo1|+++7jKglk5lRysq?-&2_i>WsI$8hkHJ35KU zTZ1v!Dud%YT86OIzmP1EQe=($$D1!8{CRgD^^Vx{rIKX@5}i*-bA&v#om}(ZfcIhJ6jpRu?G4&U_Blq>iYk4!deSlM5-bfOaI|cF{!JfSjtEGC@sF;U z-oReK|0=|;a9wI{3oDmNaDGv7#*p8#XW$>)wnBBzg4f*6_(6vb!NM);e)X={Vw1hv z^5-4zvPoDY$P}&7*qv8>kvU~AoHupcKvBTB=S!o>*t2W{mLxgD+<`_GG4-{HS4?bS z>P}Pdf3ad+V4&LM=~wMCN}e z5I?5h__Y^4!B73aUMiaHI@TpNVcD|RKKP8sB|q#0crFt~$MR_9PwOWqm$ z@5aMU_$X+;Pu%S}ruC1!v%XXhJLQ+FsKqD#{d1xCs1>p?wu^n~KuhY?1_cLkQ#OYuPxjOU8)b^Ncwp!nv=Bnid$G5}19?@TN&sp6ZeD864; ze)SyY;<$mKfTiM-J`rFv{SyXH7&9;}a&p-p3JApq zFMv2GW5-%gSsVuLx>-~bB?Mh4gL_n>yIvqxN!7SjhS|v?et>^lhP?Ard~7Uz9A@lF zL$QM{6_z#cL-QM4Iv1d>G^wOn2H*35-41%~#beuVSf`5nLb3A!YgD9Emj3?rUyBUY z-{i_`FcJz}x11atBIVUBN@47(Pv(50s-^8>@ypPf{MVnFF4Pj$Pgc`bE$xx-cTv^S zS4W-%cHqz1hP>2he$0=9kXwOZ5}<70gO`p|7re9xiF1>e|1AF3*v=_FYW3#Gq?w-4dJ3*FO2mrqKkv zL?*#na9vB&`-HBg<L zBWH;9Zt6fIizJpiv9HB)r{tA{)yE~2u{)Wu_kUATw&7sg;-uey3J8fpTA;rY<$9@o zREhG~@{>M7jYTg&d_-DHxO0IH->a6vHnodTIu&<^2!G#xi~e(rl&p5oS;E^4xue$` z9wLnrB?{g@#`Rb)Eh{H0JsRbqoBpy~)jOLOi9C8xWfM;No8S^^1$7(wRH~|X9<<<+ zPitoEm3n6R%zDSjBFxy=%*ZRu$QI1lo#LINviH~T_?w*N{KqGa(Kg;i*3OMksU?Px%ZN(Y@kFDDjj(C}8f? zd`2npgdeTMQ+@Ez4LUU+TE|!8#|n4?Px;Th%7^MBVj0t@<0!)M9SS%Ua46tVz@dOc0fzz(1sn=E z6mTf;Bcp&^|9b+Poh9}R`I-AdwEI2ci2at0UuyitD|l=W4e|@`Et?^Z#{&Z0wzt>M zV*G$D!gMBs!rQK#@&JF@lsFz%n=U6LurkEI!9#3HRdJ15zA#|3j5R`H0x8HQ#A#%Wt~ z3?Iim+ux-(w}v`bBohPu&SDr40GbaEdWRPJL!CMl4Q`ay$2*`Qp1xm5wId%tc!n*) zyxN2CnhL0Fm(1?K@*2VOYE?2ju)GHGyt>-`D-OJElt(apfO+gfxi=E%)AM;k@hwW+ zPQk4r+=k%-)q=&4)|*a+A8@#JVhrP!i7`xDC5Uw#rvM-S^sobunO9#2Z_fA*j{h8} z9*wM6Cx5?%l?-V1$6}paHnfCvf3!Jv=E`JZ;b3!iq{X>%I3<^a-z{l@8^1>BfRe@- zrj1_Nv>&F&$3H#)%bR%2gWjo!X8eoSdC-iQzO#xm=1Fk%b7&_U$NQB{Zv&u@*@!c; zzmFpu=MBb@j7Lk1VLDO*(LZ*_$N!*G0~tPg>&553>G6*pqK?cd*jC)vlp!xO>$HJ#|T(bCS%0^*2Oq-15oqGlfh;Ot`==F_OI>l^& z_WAFVlH!D?QQRjOB6liyI$dpre@4L@R~Gw9^Spwmdi90HIR4En@CkRcB?d_6}+7(GETwMsduaVCn|V44R3`%TEWvP zc`N)B1y85vt?!=|15}u8>dtPdLYSDBw`Qp@2gHhXM`-zBvjUto`6t z<_DzTty++y;O*>(0tIhpKNKl=yLrZJ1#f3Rlqq;Q{k3W?t5WbXXo=?^Ifc7aZ#(;; zUdi9ierQnecJ{+U1#f3REK%@w_QUB4-p+nls^IPHhgJn|XFn`g@OJh?Si#%b4^ahg zXFqf)csu){Tfy7e59cU&JNw}R1#f3RtWxk~4_FO=)e=wl2~To`d>S8wb9{#a4h0+v zI23Rw;85V3qrk!14{MeA0qJ+E7F?#_?d*s33f|6sxKhE}%`>i1@OJjYwF=(Oez;D- z%b*o`*i8!F&VIO6!Q0sncPMx}`(d+!x3eF%D0n;j;XVa#XFois;O*>(M-;rB{qVSg zx3eFfRPc88!%hWnXFoin;O*>(=M}u2{qVAax3eE!Q}A~7!&?fT>;bC*@SeoeeZrGm zA>VFXQ+mgDDBw`Qp@2gHhXM`-2A%>3Yd?Ib%nwMvTh;Va1#f3R>{jr0_QRJ7-fo`p zPX%vhKcvVBJ>_d>Kcp*oS)3Jl*iZ#;XFm*A@OJjYXa#R)Ka5lGcJ{+W1#f3R9IfE( z?1w1|-p+nFPQlyR4<{&iJNv<{;O*>(90hM@KNKi~79IX9NugnifzgxASLBZSE z4+|B%o&B&x!Q0IVFXQ+mgDDBw`Qp@2gHhXM`-2A%@{50H^f A!T """ -import cftime from pvlib.tools import (_extract_metadata_from_dataset, _convert_C_to_K_in_dataset) @@ -15,8 +14,22 @@ except ImportError: setup_session = None +try: + import cftime +except ImportError: + cftime = None + MERRA2_VARIABLE_MAP = { - } + # Variables from M2T1NXRAD - radiation diagnostics + 'LWGEM': 'lwu', # longwave flux emitted from surface [W/m^2] + 'SWGDN': 'ghi', # surface incoming shortwave flux [W/m^2] + 'SWGDNCLR': 'ghi_clear', # SWGDN assuming clear sky [W/m^2] + 'SWTDN': 'toa', # toa incoming shortwave flux [W/m^2] + # Variables from M2T1NXSLV - single-level diagnostics + 'PS': 'pressure', # surface pressure [Pa] + 'T2M': 'temp_air', # 2-meter air temperature [K converted to C] + 'T2MDEW': 'temp_dew', # dew point temperature at 2 m [K converted to C] +} # goldsmr4 contains the single-level 2D hourly MERRA-2 data files MERRA2_BASE_URL = 'https://goldsmr4.gesdisc.eosdis.nasa.gov/dods' @@ -28,8 +41,9 @@ def get_merra2(latitude, longitude, start, end, dataset, variables, username, """ Retrieve MERRA-2 reanalysis data from the NASA GES DISC repository. - The function supports downloading of MERRA-2 Hourly 2-Dimensional - Time-Averaged Variables (M2T1NXSLV) + The function supports downloading of MERRA-2 [1]_ hourly 2-dimensional + time-averaged variables. An list of the available datasets and parameters + are given in [2]_. * Temporal coverage: 1980 to present (latency of 2-7 weeks) * Temporal resolution: hourly @@ -51,28 +65,29 @@ def get_merra2(latitude, longitude, start, end, dataset, variables, username, end: datetime-like Last day of the requested period variables: list - List of variables to retrieve + List of variables to retrieve, e.g., ['TAUHGH', 'SWGNT']. dataset: str - Name of the dataset to retrieve the variables from, e.g., + Name of the dataset to retrieve the variables from, e.g., 'M2T1NXRAD' + for radiation parameters and 'M2T1NXAER' for aerosol parameters. output_format: {'dataframe', 'dataset'}, optional Type of data object to return. Default is to return a pandas DataFrame if file only contains one location and otherwise return an xarray dataset. map_variables: bool, default: True When true, renames columns to pvlib variable names where applicable. - See variable MERRRA2_VARIABLE_MAP. + See variable MERRA2_VARIABLE_MAP. Returns ------- data: DataFrame - Dataframe containing MERRA2 timeseries data, see [3]_ for variable units. + Dataframe containing MERRA2 timeseries data, see [2]_ for variable units. metadata: dict metadata Notes ----- In order to obtain MERRA2 data, it is necessary to registre for an - Earthdata account and link it to the GES DISC as described in [2]_. + EarthData account and link it to the GES DISC as described in [3]_. MERRA-2 contains 14 single-level 2D datasets with an hourly resolution. The most important ones are 'M2T1NXAER' which contains aerosol data, 'M2T1NXRAD' @@ -93,16 +108,17 @@ def get_merra2(latitude, longitude, start, end, dataset, variables, username, ---------- .. [1] `NASA MERRA-2 Project overview `_ - .. [2] `Account registration and data access to NASA's GES DISC - ` - .. [3] `MERRa-2 File specification + .. [2] `MERRa-2 File specification ` - + .. [3] `Account registration and data access to NASA's GES DISC + ` """ # noqa: E501 if xr is None: raise ImportError('Retrieving MERRA-2 data requires xarray') if setup_session is None: raise ImportError('Retrieving MERRA-2 data requires PyDap') + if cftime is None: + raise ImportError('Retrieving MERRA-2 data requires cftime') url = MERRA2_BASE_URL + '/' + dataset @@ -130,22 +146,22 @@ def get_merra2(latitude, longitude, start, end, dataset, variables, username, ds = xr.decode_cf(ds) # Decode timestamps - ds = _convert_C_to_K_in_dataset(ds) - metadata = _extract_metadata_from_dataset(ds) - if map_variables: # Renaming of xarray datasets throws an error if keys are missing ds = ds.rename_vars( {k: v for k, v in MERRA2_VARIABLE_MAP.items() if k in list(ds)}) + ds = _convert_C_to_K_in_dataset(ds) + metadata = _extract_metadata_from_dataset(ds) + if (output_format == 'dataframe') or ( - (output_format is None) & (ds['latitude'].size == 1) & - (ds['longitude'].size == 1)): + (output_format is None) & (ds['lat'].size == 1) & + (ds['lon'].size == 1)): data = ds.to_dataframe() # Localize timezone to UTC - data.index = data.index.set_levels(data.index.get_level_values('time').tz_localize('utc'), 'time') # noqa: E501 - if (ds['latitude'].size == 1) & (ds['longitude'].size == 1): - data = data.droplevel(['latitude', 'longitude']) + data.index = data.index.set_levels(data.index.get_level_values('time').tz_localize('utc'), level='time') # noqa: E501 + if (ds['lat'].size == 1) & (ds['lon'].size == 1): + data = data.droplevel(['lat', 'lon']) return data, metadata else: return ds, metadata @@ -167,7 +183,7 @@ def read_merra2(filename, output_format=None, map_variables=True): dataset. map_variables: bool, default: True When true, renames columns to pvlib variable names where applicable. - See variable MERRRA2_VARIABLE_MAP. + See variable MERRA2_VARIABLE_MAP. Returns ------- @@ -198,22 +214,22 @@ def read_merra2(filename, output_format=None, map_variables=True): else: ds = xr.open_dataset(filename) - ds = _convert_C_to_K_in_dataset(ds) - metadata = _extract_metadata_from_dataset(ds) - if map_variables: # Renaming of xarray datasets throws an error if keys are missing ds = ds.rename_vars( {k: v for k, v in MERRA2_VARIABLE_MAP.items() if k in list(ds)}) + ds = _convert_C_to_K_in_dataset(ds) + metadata = _extract_metadata_from_dataset(ds) + if (output_format == 'dataframe') or ( - (output_format is None) & (ds['latitude'].size == 1) & - (ds['longitude'].size == 1)): + (output_format is None) & (ds['lat'].size == 1) & + (ds['lon'].size == 1)): data = ds.to_dataframe() # Localize timezone to UTC - data.index = data.index.set_levels(data.index.get_level_values('time').tz_localize('utc'), 'time') # noqa: E501 - if (ds['latitude'].size == 1) & (ds['longitude'].size == 1): - data = data.droplevel(['latitude', 'longitude']) + data.index = data.index.set_levels(data.index.get_level_values('time').tz_localize('utc'), level='time') # noqa: E501 + if (ds['lat'].size == 1) & (ds['lon'].size == 1): + data = data.droplevel(['lat', 'lon']) return data, metadata else: return ds, metadata diff --git a/pvlib/tests/iotools/test_merra2.py b/pvlib/tests/iotools/test_merra2.py index 3d20ba0fee..da559bca87 100644 --- a/pvlib/tests/iotools/test_merra2.py +++ b/pvlib/tests/iotools/test_merra2.py @@ -4,6 +4,7 @@ import pandas as pd import numpy as np +import datetime as dt import pytest import os from pvlib.iotools import read_merra2, get_merra2 @@ -19,16 +20,57 @@ def merra2_credentials(): return (os.environ["MERRA2_USERNAME"], os.environ["MERRA2_PASSWORD"]) +@pytest.fixture +def expected_index(): + index = pd.date_range('2020-1-1-00:30', periods=24*2, freq='1h', tz='UTC') + index.name = 'time' + return index + + @requires_xarray -def test_read_merra2(): - # data, meta = \ - # read_merra2(DATA_DIR / 'MERRA2_400.tavg1_2d_rad_Nx.20200101.nc4') - assert True +def test_read_merra2(expected_index): + filenames = [DATA_DIR / 'MERRA2_400.tavg1_2d_rad_Nx.20200101.SUB.nc', + DATA_DIR / 'MERRA2_400.tavg1_2d_rad_Nx.20200102.SUB.nc'] + + data, meta = read_merra2(filenames, map_variables=False) + assert_index_equal(data.index, expected_index) + assert meta['lat'] == {'name': 'lat', 'long_name': 'latitude', + 'units': 'degrees_north'} + assert np.isclose(data.loc['2020-01-01 12:30:00+00:00', 'SWGDN'], 130.4375) + + +@requires_xarray +def test_read_merra2_dataset(expected_index): + filenames = [DATA_DIR / 'MERRA2_400.tavg1_2d_rad_Nx.20200101.SUB.nc', + DATA_DIR / 'MERRA2_400.tavg1_2d_rad_Nx.20200102.SUB.nc'] + + data, meta = read_merra2(filenames, output_format='dataset', + map_variables=False) + import xarray as xr + assert isinstance(data, xr.Dataset) + assert meta['lat'] == {'name': 'lat', 'long_name': 'latitude', + 'units': 'degrees_north'} + assert np.all([v in ['time', 'lon', 'lat', 'ALBEDO', 'EMIS', 'SWGDN', + 'SWGDNCLR', 'SWTDN'] for v in list(data.variables)]) + + +@requires_xarray +def test_read_merra2_map_variables(): + filename = DATA_DIR / 'MERRA2_400.tavg1_2d_rad_Nx.20200101.SUB.nc' + data, meta = read_merra2(filename, map_variables=True) + assert meta['ghi'] == { + 'name': 'ghi', 'long_name': 'surface_incoming_shortwave_flux', + 'units': 'W m-2'} @requires_xarray @requires_merra2_credentials @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) -def test_get_merra2(): +def test_get_merra2(merra2_credentials): + username, password = merra2_credentials() + data, meta = get_merra2( + 55, 15, dt.datetime(2020,1,1), dt.datetime(2020,1,2), + dataset='M2T1NXRAD', variables=['TAUHGH', 'SWGNT'], + username=username, password=password, map_variables=True) assert True From 7622b449a621ad7b33ab88f3435f9767ba068941 Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Mon, 23 Aug 2021 12:16:49 +0200 Subject: [PATCH 14/15] Update docs and merra2 remote test --- pvlib/iotools/merra2.py | 101 ++++++++++++++++------------- pvlib/tests/iotools/test_merra2.py | 16 +++-- 2 files changed, 66 insertions(+), 51 deletions(-) diff --git a/pvlib/iotools/merra2.py b/pvlib/iotools/merra2.py index 3c9768c668..0f2441dfa7 100644 --- a/pvlib/iotools/merra2.py +++ b/pvlib/iotools/merra2.py @@ -1,4 +1,4 @@ -"""Functions to read and retrieve MERRA2 reanalysis data from NASA. +"""Functions to read and retrieve MERRA-2 reanalysis data from NASA. .. codeauthor:: Adam R. Jensen """ from pvlib.tools import (_extract_metadata_from_dataset, @@ -42,28 +42,28 @@ def get_merra2(latitude, longitude, start, end, dataset, variables, username, Retrieve MERRA-2 reanalysis data from the NASA GES DISC repository. The function supports downloading of MERRA-2 [1]_ hourly 2-dimensional - time-averaged variables. An list of the available datasets and parameters - are given in [2]_. + time-averaged variables. A list of the available datasets and parameters + is given in [2]_. * Temporal coverage: 1980 to present (latency of 2-7 weeks) * Temporal resolution: hourly * Spatial coverage: global - * Spatial resolution: 0.625° longitude by 0.5° latitude + * Spatial resolution: 0.5° latitude by 0.625° longitude Parameters ---------- latitude: float or list in decimal degrees, between -90 and 90, north is positive (ISO 19115). - If latitude is a list, it should have the format [S, N] and + If latitude is a list, it should have the format [S, N], and latitudes within the range are selected according to the grid. longitude: float or list in decimal degrees, between -180 and 180, east is positive (ISO 19115). - If longitude is a list, it should have the format [W, E] and + If longitude is a list, it should have the format [W, E], and longitudes within the range are selected according to the grid. start: datetime-like - First day of the requested period + First day of the requested period. end: datetime-like - Last day of the requested period + Last day of the requested period. variables: list List of variables to retrieve, e.g., ['TAUHGH', 'SWGNT']. dataset: str @@ -71,8 +71,8 @@ def get_merra2(latitude, longitude, start, end, dataset, variables, username, for radiation parameters and 'M2T1NXAER' for aerosol parameters. output_format: {'dataframe', 'dataset'}, optional Type of data object to return. Default is to return a pandas DataFrame - if file only contains one location and otherwise return an xarray - dataset. + if data for a single location is requested and otherwise return an + xarray Dataset. map_variables: bool, default: True When true, renames columns to pvlib variable names where applicable. See variable MERRA2_VARIABLE_MAP. @@ -80,25 +80,28 @@ def get_merra2(latitude, longitude, start, end, dataset, variables, username, Returns ------- data: DataFrame - Dataframe containing MERRA2 timeseries data, see [2]_ for variable units. + MERRA-2 time-series data, fields depend on the requested data. The + returned object is either a pandas DataFrame or an xarray dataset, + depending on the output_format parameter. metadata: dict - metadata + Metadata extracted from the netcdf files. Notes ----- - In order to obtain MERRA2 data, it is necessary to registre for an - EarthData account and link it to the GES DISC as described in [3]_. + To obtain MERRA-2 data, it is necessary to register for an EarthData + account and link it to the GES DISC as described in [3]_. MERRA-2 contains 14 single-level 2D datasets with an hourly resolution. The - most important ones are 'M2T1NXAER' which contains aerosol data, 'M2T1NXRAD' - which contains radiation related parameters, and 'M2T1NXSLV' which contains - general variables (e.g., temperature and wind speed). + most important ones are 'M2T1NXAER', which contains aerosol data, + 'M2T1NXRAD', which contains radiation related parameters, and 'M2T1NXSLV', + which contains general variables (e.g., temperature and wind speed). Warning ------- - Known error in calculation of radiation, hence it is strongly adviced that - radiation from MERRA-2 should not be used. Users interested in radiation - from reanalysis datasets are referred to pvlib.iotools.get_era5. + There is a known error in the calculation of radiation, hence it is + strongly adviced that radiation from MERRA-2 should not be used. Users + interested in radiation from reanalysis datasets are referred to + :func:`pvlib.iotools.get_era5`. See Also -------- @@ -108,10 +111,10 @@ def get_merra2(latitude, longitude, start, end, dataset, variables, username, ---------- .. [1] `NASA MERRA-2 Project overview `_ - .. [2] `MERRa-2 File specification - ` + .. [2] `MERRA-2 File specification + `_ .. [3] `Account registration and data access to NASA's GES DISC - ` + `_ """ # noqa: E501 if xr is None: raise ImportError('Retrieving MERRA-2 data requires xarray') @@ -128,23 +131,25 @@ def get_merra2(latitude, longitude, start, end, dataset, variables, username, start_float = cftime.date2num(start, units='days since 1-1-1 00:00:0.0') end_float = cftime.date2num(end, units='days since 1-1-1 00:00:0.0') - # try: - # latitude = slice(latitude[0], latitude[1]) - # longitude = slice(longitude[0], longitude[1]) - # method = None - # except TypeError: - # method = 'nearest' + try: + sel_dict = { + 'lat': slice(latitude[0], latitude[1]), + 'lon': slice(longitude[0], longitude[1]), + 'time': slice(start_float, end_float)} + except TypeError: + sel_dict = { + 'lat': latitude, + 'lon': longitude, + 'time': slice(start_float, end_float)} # Setting decode_times=False results in a time saving of up to some minutes - ds = xr.open_dataset(store, decode_times=False).sel( - {'lat': latitude, - 'lon': longitude, - 'time': slice(start_float, end_float)}, - ) + ds = xr.open_dataset(store, decode_times=False).sel(sel_dict) + + ds = xr.decode_cf(ds) # Decode timestamps variables = [v.lower() for v in variables] # Make all variables lower-case - ds = xr.decode_cf(ds) # Decode timestamps + ds = ds[variables] # select sub-set of variables if map_variables: # Renaming of xarray datasets throws an error if keys are missing @@ -159,9 +164,11 @@ def get_merra2(latitude, longitude, start, end, dataset, variables, username, (ds['lon'].size == 1)): data = ds.to_dataframe() # Localize timezone to UTC - data.index = data.index.set_levels(data.index.get_level_values('time').tz_localize('utc'), level='time') # noqa: E501 - if (ds['lat'].size == 1) & (ds['lon'].size == 1): - data = data.droplevel(['lat', 'lon']) + if data.index.nlevels > 1: # if dataframe has a multi-index + data.index = data.index.set_levels(data.index.get_level_values('time').tz_localize('utc'), level='time') # noqa: E501 + else: # for single location dataframes (only time as index) + data.index = data.index.tz_localize('UTC') + data = data.drop(columns = ['lat', 'lon']) return data, metadata else: return ds, metadata @@ -179,8 +186,8 @@ def read_merra2(filename, output_format=None, map_variables=True): filenames. output_format: {'dataframe', 'dataset'}, optional Type of data object to return. Default is to return a pandas DataFrame - if file only contains one location and otherwise return an xarray - dataset. + if data for a single location is requested and otherwise return an + xarray Dataset. map_variables: bool, default: True When true, renames columns to pvlib variable names where applicable. See variable MERRA2_VARIABLE_MAP. @@ -192,7 +199,7 @@ def read_merra2(filename, output_format=None, map_variables=True): returned object is either a pandas DataFrame or an xarray dataset, depending on the output_format parameter. metadata: dict - Metadata for the time-series. + Metadata extracted from the netcdf files. See Also -------- @@ -202,8 +209,8 @@ def read_merra2(filename, output_format=None, map_variables=True): ---------- .. [1] `NASA MERRA-2 Project overview `_ - .. [2] `MERRa-2 File specification - ` + .. [2] `MERRA-2 File specification + `_ """ if xr is None: raise ImportError('Reading MERRA-2 data requires xarray to be installed.') # noqa: E501 @@ -226,10 +233,14 @@ def read_merra2(filename, output_format=None, map_variables=True): (output_format is None) & (ds['lat'].size == 1) & (ds['lon'].size == 1)): data = ds.to_dataframe() - # Localize timezone to UTC - data.index = data.index.set_levels(data.index.get_level_values('time').tz_localize('utc'), level='time') # noqa: E501 + # Remove lat and lon from multi-index if (ds['lat'].size == 1) & (ds['lon'].size == 1): data = data.droplevel(['lat', 'lon']) + # Localize timezone to UTC + if data.index.nlevels > 1: # if dataframe has a multi-index + data.index = data.index.set_levels(data.index.get_level_values('time').tz_localize('utc'), level='time') # noqa: E501 + else: # for single location dataframes (only time as index) + data.index = data.index.tz_localize('UTC') return data, metadata else: return ds, metadata diff --git a/pvlib/tests/iotools/test_merra2.py b/pvlib/tests/iotools/test_merra2.py index da559bca87..b49e2e2f3b 100644 --- a/pvlib/tests/iotools/test_merra2.py +++ b/pvlib/tests/iotools/test_merra2.py @@ -35,7 +35,7 @@ def test_read_merra2(expected_index): data, meta = read_merra2(filenames, map_variables=False) assert_index_equal(data.index, expected_index) assert meta['lat'] == {'name': 'lat', 'long_name': 'latitude', - 'units': 'degrees_north'} + 'units': 'degrees_north'} assert np.isclose(data.loc['2020-01-01 12:30:00+00:00', 'SWGDN'], 130.4375) @@ -49,9 +49,9 @@ def test_read_merra2_dataset(expected_index): import xarray as xr assert isinstance(data, xr.Dataset) assert meta['lat'] == {'name': 'lat', 'long_name': 'latitude', - 'units': 'degrees_north'} + 'units': 'degrees_north'} assert np.all([v in ['time', 'lon', 'lat', 'ALBEDO', 'EMIS', 'SWGDN', - 'SWGDNCLR', 'SWTDN'] for v in list(data.variables)]) + 'SWGDNCLR', 'SWTDN'] for v in list(data.variables)]) @requires_xarray @@ -68,9 +68,13 @@ def test_read_merra2_map_variables(): @pytest.mark.remote_data @pytest.mark.flaky(reruns=RERUNS, reruns_delay=RERUNS_DELAY) def test_get_merra2(merra2_credentials): - username, password = merra2_credentials() + username, password = merra2_credentials data, meta = get_merra2( - 55, 15, dt.datetime(2020,1,1), dt.datetime(2020,1,2), + latitude=55, longitude=15, + start=dt.datetime(2020, 1, 1), end=dt.datetime(2020, 1, 2), dataset='M2T1NXRAD', variables=['TAUHGH', 'SWGNT'], username=username, password=password, map_variables=True) - assert True + assert_index_equal(data.index, expected_index) + assert meta['lat'] == {'name': 'lat', 'long_name': 'latitude', + 'units': 'degrees_north'} + assert np.all([v in ['tauhgh', 'swgnt'] for v in data.columns]) From eb9939168f75a9bef8c35a2f606abcfca1f74a0e Mon Sep 17 00:00:00 2001 From: AdamRJensen Date: Mon, 23 Aug 2021 12:32:20 +0200 Subject: [PATCH 15/15] Add lxml to ci requirement files --- ci/requirements-py36.yml | 1 + ci/requirements-py37.yml | 1 + ci/requirements-py38.yml | 1 + ci/requirements-py39.yml | 1 + pvlib/iotools/merra2.py | 2 +- 5 files changed, 5 insertions(+), 1 deletion(-) diff --git a/ci/requirements-py36.yml b/ci/requirements-py36.yml index 20e8247ec3..95eb075aa5 100644 --- a/ci/requirements-py36.yml +++ b/ci/requirements-py36.yml @@ -8,6 +8,7 @@ dependencies: - cython - dask - ephem + - lxml - netcdf4 - nose - numba diff --git a/ci/requirements-py37.yml b/ci/requirements-py37.yml index d7a6debcbd..5e0f5db0ae 100644 --- a/ci/requirements-py37.yml +++ b/ci/requirements-py37.yml @@ -8,6 +8,7 @@ dependencies: - cython - dask - ephem + - lxml - netcdf4 - nose - numba diff --git a/ci/requirements-py38.yml b/ci/requirements-py38.yml index 37260e8fad..e5dff100a9 100644 --- a/ci/requirements-py38.yml +++ b/ci/requirements-py38.yml @@ -8,6 +8,7 @@ dependencies: - cython - dask - ephem + - lxml - netcdf4 - nose - numba diff --git a/ci/requirements-py39.yml b/ci/requirements-py39.yml index 6834c68cb5..11727abede 100644 --- a/ci/requirements-py39.yml +++ b/ci/requirements-py39.yml @@ -8,6 +8,7 @@ dependencies: - cython - dask - ephem + - lxml # - netcdf4 # pulls in a different version of numpy with ImportError - nose # - numba # python 3.9 compat in early 2021 diff --git a/pvlib/iotools/merra2.py b/pvlib/iotools/merra2.py index 0f2441dfa7..8bb6fd5a98 100644 --- a/pvlib/iotools/merra2.py +++ b/pvlib/iotools/merra2.py @@ -168,7 +168,7 @@ def get_merra2(latitude, longitude, start, end, dataset, variables, username, data.index = data.index.set_levels(data.index.get_level_values('time').tz_localize('utc'), level='time') # noqa: E501 else: # for single location dataframes (only time as index) data.index = data.index.tz_localize('UTC') - data = data.drop(columns = ['lat', 'lon']) + data = data.drop(columns=['lat', 'lon']) return data, metadata else: return ds, metadata