diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 2dca44222..fd2d5fb4b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -75,7 +75,7 @@ jobs: - name: Start delphi_web_epidata run: | - docker run --rm -d -p 10080:80 --env "MODULE_NAME=delphi.epidata.server.main" --env "SQLALCHEMY_DATABASE_URI=mysql+mysqldb://user:pass@delphi_database_epidata:3306/epidata" --env "FLASK_SECRET=abc" --env "FLASK_PREFIX=/epidata" --env "REDIS_HOST=delphi_redis" --env "REDIS_PASSWORD=1234" --env "API_KEY_REGISTER_WEBHOOK_TOKEN=abc" --env "API_KEY_ADMIN_PASSWORD=test_admin_password" --network delphi-net --name delphi_web_epidata delphi_web_epidata + docker run --rm -d -p 10080:80 --env "MODULE_NAME=delphi.epidata.server.main" --env "SQLALCHEMY_DATABASE_URI=mysql+mysqldb://user:pass@delphi_database_epidata:3306/epidata" --env "FLASK_SECRET=abc" --env "FLASK_PREFIX=/epidata" --env "REDIS_HOST=delphi_redis" --env "REDIS_PASSWORD=1234" --env "API_KEY_REGISTER_WEBHOOK_TOKEN=abc" --env "API_KEY_ADMIN_PASSWORD=test_admin_password" --env "TESTING_MODE=True" --network delphi-net --name delphi_web_epidata delphi_web_epidata docker ps - name: Run Unit Tests diff --git a/dev/local/Makefile b/dev/local/Makefile index e7e896aa6..fb02668ee 100644 --- a/dev/local/Makefile +++ b/dev/local/Makefile @@ -104,6 +104,7 @@ web: --env "REDIS_PASSWORD=1234" \ --env "API_KEY_ADMIN_PASSWORD=test_admin_password" \ --env "API_KEY_REGISTER_WEBHOOK_TOKEN=abc" \ + --env "TESTING_MODE=True" \ --network delphi-net --name delphi_web_epidata \ delphi_web_epidata >$(LOG_WEB) 2>&1 & diff --git a/docs/new_endpoint_tutorial.md b/docs/new_endpoint_tutorial.md index 6e6094161..abf8a5fa1 100644 --- a/docs/new_endpoint_tutorial.md +++ b/docs/new_endpoint_tutorial.md @@ -123,11 +123,9 @@ Here's what we add to each client: def fluview_meta(): """Fetch FluView metadata.""" # Set up request - params = { - 'endpoint': 'fluview_meta', - } + params = {} # Make the API call - return Epidata._request(params) + return Epidata._request("fluview_meta", params) ``` - [`delphi_epidata.R`](https://github.com/cmu-delphi/delphi-epidata/blob/dev/src/client/delphi_epidata.R) diff --git a/integrations/acquisition/covid_hosp/facility/test_scenarios.py b/integrations/acquisition/covid_hosp/facility/test_scenarios.py index c6c51e2f5..44ee3572d 100644 --- a/integrations/acquisition/covid_hosp/facility/test_scenarios.py +++ b/integrations/acquisition/covid_hosp/facility/test_scenarios.py @@ -28,7 +28,7 @@ def setUp(self): self.test_utils = UnitTestUtils(__file__) # use the local instance of the Epidata API - Epidata.BASE_URL = 'http://delphi_web_epidata/epidata/api.php' + Epidata.BASE_URL = 'http://delphi_web_epidata/epidata' Epidata.auth = ('epidata', 'key') # use the local instance of the epidata database diff --git a/integrations/acquisition/covid_hosp/state_daily/test_scenarios.py b/integrations/acquisition/covid_hosp/state_daily/test_scenarios.py index 2054d19c8..8636295bc 100644 --- a/integrations/acquisition/covid_hosp/state_daily/test_scenarios.py +++ b/integrations/acquisition/covid_hosp/state_daily/test_scenarios.py @@ -32,7 +32,7 @@ def setUp(self): self.test_utils = UnitTestUtils(__file__) # use the local instance of the Epidata API - Epidata.BASE_URL = 'http://delphi_web_epidata/epidata/api.php' + Epidata.BASE_URL = 'http://delphi_web_epidata/epidata' Epidata.auth = ('epidata', 'key') # use the local instance of the epidata database diff --git a/integrations/acquisition/covid_hosp/state_timeseries/test_scenarios.py b/integrations/acquisition/covid_hosp/state_timeseries/test_scenarios.py index 8565b8e7f..46bdeebcd 100644 --- a/integrations/acquisition/covid_hosp/state_timeseries/test_scenarios.py +++ b/integrations/acquisition/covid_hosp/state_timeseries/test_scenarios.py @@ -28,7 +28,7 @@ def setUp(self): self.test_utils = UnitTestUtils(__file__) # use the local instance of the Epidata API - Epidata.BASE_URL = 'http://delphi_web_epidata/epidata/api.php' + Epidata.BASE_URL = 'http://delphi_web_epidata/epidata' Epidata.auth = ('epidata', 'key') # use the local instance of the epidata database diff --git a/integrations/acquisition/covidcast/test_covidcast_meta_caching.py b/integrations/acquisition/covidcast/test_covidcast_meta_caching.py index 6e4c6378f..4111561eb 100644 --- a/integrations/acquisition/covidcast/test_covidcast_meta_caching.py +++ b/integrations/acquisition/covidcast/test_covidcast_meta_caching.py @@ -2,17 +2,10 @@ # standard library import json -import unittest - -# third party -import mysql.connector -import requests # first party from delphi_utils import Nans -from delphi.epidata.client.delphi_epidata import Epidata -import delphi.operations.secrets as secrets -import delphi.epidata.acquisition.covidcast.database as live +from delphi.epidata.common.covidcast_test_base import CovidcastTestBase from delphi.epidata.maintenance.covidcast_meta_cache_updater import main # py3tester coverage target (equivalent to `import *`) @@ -21,75 +14,26 @@ 'covidcast_meta_cache_updater' ) -# use the local instance of the Epidata API -BASE_URL = 'http://delphi_web_epidata/epidata/api.php' - -class CovidcastMetaCacheTests(unittest.TestCase): +class CovidcastMetaCacheTests(CovidcastTestBase): """Tests covidcast metadata caching.""" - def setUp(self): - """Perform per-test setup.""" - - # connect to the `epidata` database - cnx = mysql.connector.connect( - user='user', - password='pass', - host='delphi_database_epidata', - database='covid') - cur = cnx.cursor() - - # clear all tables - cur.execute("truncate table epimetric_load") - cur.execute("truncate table epimetric_full") - cur.execute("truncate table epimetric_latest") - cur.execute("truncate table geo_dim") - cur.execute("truncate table signal_dim") - # reset the `covidcast_meta_cache` table (it should always have one row) - cur.execute('update covidcast_meta_cache set timestamp = 0, epidata = "[]"') - cnx.commit() - cur.close() - - # make connection and cursor available to test cases - self.cnx = cnx - self.cur = cnx.cursor() - - # use the local instance of the epidata database - secrets.db.host = 'delphi_database_epidata' - secrets.db.epi = ('user', 'pass') - - # use the local instance of the Epidata API - Epidata.BASE_URL = BASE_URL - Epidata.auth = ('epidata', 'key') - - def tearDown(self): - """Perform per-test teardown.""" - self.cur.close() - self.cnx.close() - - @staticmethod - def _make_request(): - params = {'endpoint': 'covidcast_meta', 'cached': 'true'} - response = requests.get(Epidata.BASE_URL, params=params, auth=Epidata.auth) - response.raise_for_status() - return response.json() - def test_caching(self): """Populate, query, cache, query, and verify the cache.""" # insert dummy data - self.cur.execute(f''' + self._db._cursor.execute(f''' INSERT INTO `signal_dim` (`signal_key_id`, `source`, `signal`) VALUES (42, 'src', 'sig'); ''') - self.cur.execute(f''' + self._db._cursor.execute(f''' INSERT INTO `geo_dim` (`geo_key_id`, `geo_type`, `geo_value`) VALUES (96, 'state', 'pa'), (97, 'state', 'wa'); ''') - self.cur.execute(f''' + self._db._cursor.execute(f''' INSERT INTO `epimetric_latest` (`epimetric_id`, `signal_key_id`, `geo_key_id`, `time_type`, `time_value`, `value_updated_timestamp`, @@ -102,13 +46,10 @@ def test_caching(self): (16, 42, 97, 'day', 20200422, 789, 1, 2, 3, 20200423, 1, {Nans.NOT_MISSING}, {Nans.NOT_MISSING}, {Nans.NOT_MISSING}) ''') - self.cnx.commit() + self._db._connection.commit() # make sure the live utility is serving something sensible - cvc_database = live.Database() - cvc_database.connect() - epidata1 = cvc_database.compute_covidcast_meta() - cvc_database.disconnect(False) + epidata1 = self._db.compute_covidcast_meta() self.assertEqual(len(epidata1),1) self.assertEqual(epidata1, [ { @@ -133,7 +74,7 @@ def test_caching(self): # make sure the API covidcast_meta is still blank, since it only serves # the cached version and we haven't cached anything yet - epidata2 = Epidata.covidcast_meta() + epidata2 = self.epidata_client.covidcast_meta() self.assertEqual(epidata2['result'], -2, json.dumps(epidata2)) # update the cache @@ -141,21 +82,21 @@ def test_caching(self): main(args) # fetch the cached version - epidata3 = Epidata.covidcast_meta() + epidata3 = self.epidata_client.covidcast_meta() # cached version should now equal live version self.assertEqual(epidata1, epidata3) # insert dummy data timestamped as of now - self.cur.execute(''' + self._db._cursor.execute(''' update covidcast_meta_cache set timestamp = UNIX_TIMESTAMP(NOW()), epidata = '[{"hello": "world"}]' ''') - self.cnx.commit() + self._db._connection.commit() # fetch the cached version (manually) - epidata4 = self._make_request() + epidata4 = self._make_request(endpoint="covidcast_meta", json=True, params={'cached': 'true'}, auth=self.epidata_client.auth, raise_for_status=True) # make sure the cache was actually served self.assertEqual(epidata4, { @@ -167,15 +108,15 @@ def test_caching(self): }) # insert dummy data timestamped as 2 hours old - self.cur.execute(''' + self._db._cursor.execute(''' update covidcast_meta_cache set timestamp = UNIX_TIMESTAMP(NOW()) - 3600 * 2, epidata = '[{"hello": "world"}]' ''') - self.cnx.commit() + self._db._connection.commit() # fetch the cached version (manually) - epidata5 = self._make_request() + epidata5 = self._make_request(endpoint="covidcast_meta", json=True, params={'cached': 'true'}, auth=self.epidata_client.auth, raise_for_status=True) # make sure the cache was returned anyhow self.assertEqual(epidata4, epidata5) diff --git a/integrations/acquisition/covidcast/test_csv_uploading.py b/integrations/acquisition/covidcast/test_csv_uploading.py index e4c9d881e..bad0e7eb2 100644 --- a/integrations/acquisition/covidcast/test_csv_uploading.py +++ b/integrations/acquisition/covidcast/test_csv_uploading.py @@ -3,67 +3,23 @@ # standard library from datetime import date import os -import unittest -import argparse # third party -import mysql.connector import pandas as pd import numpy as np # first party from delphi_utils import Nans -from delphi.epidata.client.delphi_epidata import Epidata from delphi.epidata.acquisition.covidcast.csv_to_database import main, get_argument_parser -import delphi.operations.secrets as secrets +from delphi.epidata.common.covidcast_test_base import CovidcastTestBase # py3tester coverage target (equivalent to `import *`) __test_target__ = 'delphi.epidata.acquisition.covidcast.csv_to_database' -class CsvUploadingTests(unittest.TestCase): +class CsvUploadingTests(CovidcastTestBase): """Tests covidcast CSV uploading.""" - def setUp(self): - """Perform per-test setup.""" - - # connect to the `epidata` database and clear the `covidcast` table - cnx = mysql.connector.connect( - user='user', - password='pass', - host='delphi_database_epidata', - database='covid') - cur = cnx.cursor() - - # clear all tables - cur.execute("truncate table epimetric_load") - cur.execute("truncate table epimetric_full") - cur.execute("truncate table epimetric_latest") - cur.execute("truncate table geo_dim") - cur.execute("truncate table signal_dim") - # reset the `covidcast_meta_cache` table (it should always have one row) - cur.execute('update covidcast_meta_cache set timestamp = 0, epidata = "[]"') - - cnx.commit() - cur.close() - - # make connection and cursor available to test cases - self.cnx = cnx - self.cur = cnx.cursor() - - # use the local instance of the epidata database - secrets.db.host = 'delphi_database_epidata' - secrets.db.epi = ('user', 'pass') - - # use the local instance of the Epidata API - Epidata.BASE_URL = 'http://delphi_web_epidata/epidata/api.php' - Epidata.auth = ('epidata', 'key') - - def tearDown(self): - """Perform per-test teardown.""" - self.cur.close() - self.cnx.close() - @staticmethod def apply_lag(expected_epidata): expected_issue_day=date.today() @@ -78,11 +34,11 @@ def apply_lag(expected_epidata): return expected_epidata def verify_timestamps_and_defaults(self): - self.cur.execute(''' + self._db._cursor.execute(''' select value_updated_timestamp from epimetric_full UNION ALL select value_updated_timestamp from epimetric_latest''') - for (value_updated_timestamp,) in self.cur: + for (value_updated_timestamp,) in self._db._cursor: self.assertGreater(value_updated_timestamp, 0) def test_uploading(self): @@ -117,9 +73,9 @@ def test_uploading(self): # upload CSVs main(args) - response = Epidata.covidcast('src-name', signal_name, 'day', 'state', 20200419, '*') + response = self.epidata_client.covidcast('src-name', signal_name, 'day', 'state', 20200419, '*') - expected_values = pd.concat([values, pd.DataFrame({ "time_value": [20200419] * 3, "signal": [signal_name] * 3, "direction": [None] * 3})], axis=1).rename(columns=uploader_column_rename).to_dict(orient="records") + expected_values = pd.concat([values, pd.DataFrame({ "geo_type": "state", "source": "src-name", "time_type": "day", "time_value": [20200419] * 3, "signal": [signal_name] * 3, "direction": [None] * 3})], axis=1).rename(columns=uploader_column_rename).to_dict(orient="records") expected_response = {'result': 1, 'epidata': self.apply_lag(expected_values), 'message': 'success'} self.assertEqual(response, expected_response) @@ -145,9 +101,12 @@ def test_uploading(self): # upload CSVs main(args) - response = Epidata.covidcast('src-name', signal_name, 'day', 'state', 20200419, '*') + response = self.epidata_client.covidcast('src-name', signal_name, 'day', 'state', 20200419, '*') expected_values = pd.concat([values, pd.DataFrame({ + "geo_type": "state", + "source": "src-name", + "time_type": "day", "time_value": [20200419] * 3, "signal": [signal_name] * 3, "direction": [None] * 3, @@ -179,9 +138,9 @@ def test_uploading(self): # upload CSVs main(args) - response = Epidata.covidcast('src-name', signal_name, 'day', 'state', 20200419, '*') + response = self.epidata_client.covidcast('src-name', signal_name, 'day', 'state', 20200419, '*') - expected_response = {'result': -2, 'message': 'no results'} + expected_response = {'epidata': [], 'result': -2, 'message': 'no results'} self.assertEqual(response, expected_response) self.verify_timestamps_and_defaults() @@ -204,9 +163,12 @@ def test_uploading(self): # upload CSVs main(args) - response = Epidata.covidcast('src-name', signal_name, 'day', 'state', 20200419, '*') + response = self.epidata_client.covidcast('src-name', signal_name, 'day', 'state', 20200419, '*') expected_values_df = pd.concat([values, pd.DataFrame({ + "geo_type": "state", + "source": "src-name", + "time_type": "day", "time_value": [20200419], "signal": [signal_name], "direction": [None]})], axis=1).rename(columns=uploader_column_rename) @@ -237,9 +199,12 @@ def test_uploading(self): # upload CSVs main(args) - response = Epidata.covidcast('src-name', signal_name, 'day', 'state', 20200419, '*') + response = self.epidata_client.covidcast('src-name', signal_name, 'day', 'state', 20200419, '*') expected_values = pd.concat([values, pd.DataFrame({ + "geo_type": "state", + "source": "src-name", + "time_type": "day", "time_value": [20200419], "signal": [signal_name], "direction": [None] @@ -268,9 +233,9 @@ def test_uploading(self): # upload CSVs main(args) - response = Epidata.covidcast('src-name', signal_name, 'day', 'state', 20200419, '*') + response = self.epidata_client.covidcast('src-name', signal_name, 'day', 'state', 20200419, '*') - expected_response = {'result': -2, 'message': 'no results'} + expected_response = {'epidata': [], 'result': -2, 'message': 'no results'} self.assertEqual(response, expected_response) self.verify_timestamps_and_defaults() diff --git a/integrations/acquisition/covidcast/test_db.py b/integrations/acquisition/covidcast/test_db.py index 7b9d80770..780b318ee 100644 --- a/integrations/acquisition/covidcast/test_db.py +++ b/integrations/acquisition/covidcast/test_db.py @@ -1,13 +1,13 @@ from delphi_utils import Nans from delphi.epidata.acquisition.covidcast.database import DBLoadStateException -from delphi.epidata.acquisition.covidcast.test_utils import CovidcastBase, CovidcastTestRow +from delphi.epidata.common.covidcast_test_base import CovidcastTestBase, CovidcastTestRow # all the Nans we use here are just one value, so this is a shortcut to it: nmv = Nans.NOT_MISSING.value -class TestTest(CovidcastBase): +class TestDatabase(CovidcastTestBase): def _find_matches_for_row(self, row): # finds (if existing) row from both history and latest views that matches long-key of provided CovidcastTestRow diff --git a/integrations/acquisition/covidcast/test_delete_batch.py b/integrations/acquisition/covidcast/test_delete_batch.py index 4624df27c..c68b8a2b6 100644 --- a/integrations/acquisition/covidcast/test_delete_batch.py +++ b/integrations/acquisition/covidcast/test_delete_batch.py @@ -6,39 +6,16 @@ from os import path # first party -import delphi.operations.secrets as secrets -from delphi.epidata.acquisition.covidcast.database import Database -from delphi.epidata.acquisition.covidcast.test_utils import covidcast_rows_from_args +from delphi.epidata.common.covidcast_test_base import covidcast_rows_from_args, CovidcastTestBase # py3tester coverage target (equivalent to `import *`) __test_target__ = 'delphi.epidata.acquisition.covidcast.database' Example = namedtuple("example", "given expected") -class DeleteBatch(unittest.TestCase): +class DeleteBatch(CovidcastTestBase): """Tests batch deletions""" - - def setUp(self): - """Perform per-test setup.""" - - # use the local instance of the epidata database - secrets.db.host = 'delphi_database_epidata' - secrets.db.epi = ('user', 'pass') - - # will use secrets as set above - self._db = Database() - self._db.connect() - - for table in "epimetric_load epimetric_latest epimetric_full geo_dim signal_dim".split(): - self._db._cursor.execute(f"TRUNCATE TABLE {table}") - - - def tearDown(self): - """Perform per-test teardown.""" - self._db.disconnect(False) - del self._db - @unittest.skip("Database user would require FILE privileges") def test_delete_from_file(self): self._test_delete_batch(path.join(path.dirname(__file__), "delete_batch.csv")) diff --git a/integrations/acquisition/covidcast_nowcast/test_csv_uploading.py b/integrations/acquisition/covidcast_nowcast/test_csv_uploading.py index 1299c6144..a46ddaa40 100644 --- a/integrations/acquisition/covidcast_nowcast/test_csv_uploading.py +++ b/integrations/acquisition/covidcast_nowcast/test_csv_uploading.py @@ -3,21 +3,18 @@ # standard library from datetime import date import os -import unittest from unittest.mock import patch from functools import partialmethod from datetime import date # third party -import mysql.connector import epiweeks as epi # first party -from delphi.epidata.client.delphi_epidata import Epidata from delphi.epidata.acquisition.covidcast_nowcast.load_sensors import main from delphi.epidata.acquisition.covidcast.csv_importer import CsvImporter -import delphi.operations.secrets as secrets +from delphi.epidata.common.covidcast_test_base import CovidcastTestBase # py3tester coverage target (equivalent to `import *`) __test_target__ = 'delphi.epidata.acquisition.covidcast_nowcast.load_sensors' @@ -27,41 +24,12 @@ ) -class CsvUploadingTests(unittest.TestCase): +class CsvUploadingTests(CovidcastTestBase): """Tests covidcast nowcast CSV uploading.""" - def setUp(self): + def localSetUp(self): """Perform per-test setup.""" - - # connect to the `epidata` database and clear the `covidcast` table - cnx = mysql.connector.connect( - user='user', - password='pass', - host='delphi_database_epidata', - database='epidata') - cur = cnx.cursor() - cur.execute('truncate table covidcast_nowcast') - cur.execute('delete from api_user') - cur.execute('insert into api_user(api_key, email) values ("key", "email")') - cnx.commit() - cur.close() - - # make connection and cursor available to test cases - self.cnx = cnx - self.cur = cnx.cursor() - - # use the local instance of the epidata database - secrets.db.host = 'delphi_database_epidata' - secrets.db.epi = ('user', 'pass') - - # use the local instance of the Epidata API - Epidata.BASE_URL = 'http://delphi_web_epidata/epidata/api.php' - Epidata.auth = ('epidata', 'key') - - def tearDown(self): - """Perform per-test teardown.""" - self.cur.close() - self.cnx.close() + self.truncate_tables_list = ["covidcast_nowcast"] @patch('delphi.epidata.acquisition.covidcast_nowcast.load_sensors.CsvImporter.find_csv_files', new=FIXED_ISSUE_IMPORTER) @@ -105,7 +73,7 @@ def test_uploading(self): ) # check data uploaded - response = Epidata.covidcast_nowcast( + response = self.epidata_client.covidcast_nowcast( 'src', 'sig', 'testsensor', 'day', 'state', 20200419, 'ca') self.assertEqual(response, { 'result': 1, @@ -141,7 +109,7 @@ def test_duplicate_row(self): main() # most most recent value is the one stored - response = Epidata.covidcast_nowcast( + response = self.epidata_client.covidcast_nowcast( 'src', 'sig', 'testsensor', 'day', 'state', 20200419, 'ca') self.assertEqual(response, { 'result': 1, diff --git a/integrations/client/test_delphi_epidata.py b/integrations/client/test_delphi_epidata.py index 4ef1fa6a3..bb4db8ce3 100644 --- a/integrations/client/test_delphi_epidata.py +++ b/integrations/client/test_delphi_epidata.py @@ -10,10 +10,8 @@ from aiohttp.client_exceptions import ClientResponseError # third party -import delphi.operations.secrets as secrets from delphi.epidata.maintenance.covidcast_meta_cache_updater import main as update_covidcast_meta_cache -from delphi.epidata.acquisition.covidcast.test_utils import CovidcastBase, CovidcastTestRow, FIPS, MSA -from delphi.epidata.client.delphi_epidata import Epidata +from delphi.epidata.common.covidcast_test_base import CovidcastTestBase, CovidcastTestRow, FIPS, MSA from delphi_utils import Nans # py3tester coverage target @@ -21,31 +19,10 @@ # all the Nans we use here are just one value, so this is a shortcut to it: nmv = Nans.NOT_MISSING.value -def fake_epidata_endpoint(func): - """This can be used as a decorator to enable a bogus Epidata endpoint to return 404 responses.""" - def wrapper(*args): - Epidata.BASE_URL = 'http://delphi_web_epidata/epidata/fake_api.php' - func(*args) - Epidata.BASE_URL = 'http://delphi_web_epidata/epidata/api.php' - return wrapper -class DelphiEpidataPythonClientTests(CovidcastBase): - """Tests the Python client.""" - - def localSetUp(self): - """Perform per-test setup.""" - - # reset the `covidcast_meta_cache` table (it should always have one row) - self._db._cursor.execute('update covidcast_meta_cache set timestamp = 0, epidata = "[]"') - - # use the local instance of the Epidata API - Epidata.BASE_URL = 'http://delphi_web_epidata/epidata/api.php' - Epidata.auth = ('epidata', 'key') - - # use the local instance of the epidata database - secrets.db.host = 'delphi_database_epidata' - secrets.db.epi = ('user', 'pass') +class DelphiEpidataPythonClientTests(CovidcastTestBase): + """Tests the Python client.""" def test_covidcast(self): """Test that the covidcast endpoint returns expected data.""" @@ -60,13 +37,13 @@ def test_covidcast(self): with self.subTest(name='request two signals'): # fetch data - response = Epidata.covidcast( + response = self.epidata_client.covidcast( **self.params_from_row(rows[0], signals=[rows[0].signal, rows[-1].signal]) ) expected = [ - row_latest_issue.as_api_compatibility_row_dict(), - rows[-1].as_api_compatibility_row_dict() + row_latest_issue.as_api_row_dict(), + rows[-1].as_api_row_dict() ] self.assertEqual(response['epidata'], expected) @@ -79,16 +56,16 @@ def test_covidcast(self): with self.subTest(name='request two signals with tree format'): # fetch data - response = Epidata.covidcast( + response = self.epidata_client.covidcast( **self.params_from_row(rows[0], signals=[rows[0].signal, rows[-1].signal], format='tree') ) expected = [{ rows[0].signal: [ - row_latest_issue.as_api_compatibility_row_dict(ignore_fields=['signal']), + row_latest_issue.as_api_row_dict(ignore_fields=['signal']), ], rows[-1].signal: [ - rows[-1].as_api_compatibility_row_dict(ignore_fields=['signal']), + rows[-1].as_api_row_dict(ignore_fields=['signal']), ], }] @@ -101,11 +78,11 @@ def test_covidcast(self): with self.subTest(name='request most recent'): # fetch data, without specifying issue or lag - response_1 = Epidata.covidcast( + response_1 = self.epidata_client.covidcast( **self.params_from_row(rows[0]) ) - expected = [row_latest_issue.as_api_compatibility_row_dict()] + expected = [row_latest_issue.as_api_row_dict()] # check result self.assertEqual(response_1, { @@ -116,11 +93,11 @@ def test_covidcast(self): with self.subTest(name='request as-of a date'): # fetch data, specifying as_of - response_1a = Epidata.covidcast( + response_1a = self.epidata_client.covidcast( **self.params_from_row(rows[0], as_of=rows[1].issue) ) - expected = [rows[1].as_api_compatibility_row_dict()] + expected = [rows[1].as_api_row_dict()] # check result self.maxDiff=None @@ -132,13 +109,13 @@ def test_covidcast(self): with self.subTest(name='request a range of issues'): # fetch data, specifying issue range, not lag - response_2 = Epidata.covidcast( - **self.params_from_row(rows[0], issues=Epidata.range(rows[0].issue, rows[1].issue)) + response_2 = self.epidata_client.covidcast( + **self.params_from_row(rows[0], issues=self.epidata_client.range(rows[0].issue, rows[1].issue)) ) expected = [ - rows[0].as_api_compatibility_row_dict(), - rows[1].as_api_compatibility_row_dict() + rows[0].as_api_row_dict(), + rows[1].as_api_row_dict() ] # check result @@ -150,11 +127,11 @@ def test_covidcast(self): with self.subTest(name='request at a given lag'): # fetch data, specifying lag, not issue range - response_3 = Epidata.covidcast( + response_3 = self.epidata_client.covidcast( **self.params_from_row(rows[0], lag=2) ) - expected = [row_latest_issue.as_api_compatibility_row_dict()] + expected = [row_latest_issue.as_api_row_dict()] # check result self.assertDictEqual(response_3, { @@ -165,19 +142,19 @@ def test_covidcast(self): with self.subTest(name='long request'): # fetch data, without specifying issue or lag # TODO should also trigger a post but doesn't due to the 414 issue - response_1 = Epidata.covidcast( + response_1 = self.epidata_client.covidcast( **self.params_from_row(rows[0], signals='sig'*1000) ) # check result - self.assertEqual(response_1, {'message': 'no results', 'result': -2}) + self.assertEqual(response_1, {'epidata': [], 'message': 'no results', 'result': -2}) @patch('requests.post') @patch('requests.get') def test_request_method(self, get, post): """Test that a GET request is default and POST is used if a 414 is returned.""" with self.subTest(name='get request'): - Epidata.covidcast('src', 'sig', 'day', 'county', 20200414, '01234') + self.epidata_client.covidcast('src', 'sig', 'day', 'county', 20200414, '01234') get.assert_called_once() post.assert_not_called() with self.subTest(name='post request'): @@ -185,7 +162,7 @@ def test_request_method(self, get, post): mock_response = MagicMock() mock_response.status_code = 414 get.return_value = mock_response - Epidata.covidcast('src', 'sig', 'day', 'county', 20200414, '01234') + self.epidata_client.covidcast('src', 'sig', 'day', 'county', 20200414, '01234') get.assert_called_once() post.assert_called_once() @@ -196,7 +173,7 @@ def test_retry_request(self, get): mock_response = MagicMock() mock_response.status_code = 200 get.side_effect = [JSONDecodeError('Expecting value', "", 0), mock_response] - response = Epidata._request(None) + response = self.epidata_client._request("") self.assertEqual(get.call_count, 2) self.assertEqual(response, mock_response.json()) @@ -207,7 +184,7 @@ def test_retry_request(self, get): get.side_effect = [JSONDecodeError('Expecting value', "", 0), JSONDecodeError('Expecting value', "", 0), mock_response] - response = Epidata._request(None) + response = self.epidata_client._request("") self.assertEqual(get.call_count, 2) # 2 from previous test + 2 from this one self.assertEqual(response, {'result': 0, 'message': 'error: Expecting value: line 1 column 1 (char 0)'} @@ -228,11 +205,11 @@ def test_geo_value(self): self._insert_rows(rows) counties = [ - rows[i].as_api_compatibility_row_dict() for i in range(N) + rows[i].as_api_row_dict() for i in range(N) ] def fetch(geo): - return Epidata.covidcast( + return self.epidata_client.covidcast( **self.params_from_row(rows[0], geo_value=geo) ) @@ -289,7 +266,7 @@ def test_covidcast_meta(self): update_covidcast_meta_cache(args=None) # fetch data - response = Epidata.covidcast_meta() + response = self.epidata_client.covidcast_meta() # make sure "last updated" time is recent: updated_time = response['epidata'][0]['last_update'] @@ -335,26 +312,25 @@ def test_async_epidata(self): ] self._insert_rows(rows) - test_output = Epidata.async_epidata([ - self.params_from_row(rows[0], source='covidcast'), - self.params_from_row(rows[1], source='covidcast') + test_output = self.epidata_client.async_epidata('covidcast', [ + self.params_from_row(rows[0]), + self.params_from_row(rows[1]) ]*12, batch_size=10) responses = [i[0] for i in test_output] # check response is same as standard covidcast call, using 24 calls to test batch sizing self.assertEqual( responses, [ - Epidata.covidcast(**self.params_from_row(rows[0])), - Epidata.covidcast(**self.params_from_row(rows[1])), + self.epidata_client.covidcast(**self.params_from_row(rows[0])), + self.epidata_client.covidcast(**self.params_from_row(rows[1])), ]*12 ) - @fake_epidata_endpoint def test_async_epidata_fail(self): + self.epidata_client.BASE_URL = "http://delphi_web_epidata/fake_epidata" with pytest.raises(ClientResponseError, match="404, message='NOT FOUND'"): - Epidata.async_epidata([ + self.epidata_client.async_epidata('covidcast', [ { - 'source': 'covidcast', 'data_source': 'src', 'signals': 'sig', 'time_type': 'day', @@ -363,3 +339,4 @@ def test_async_epidata_fail(self): 'time_values': '20200414' } ]) + self.epidata_client.BASE_URL = "http://delphi_web_epidata/epidata" diff --git a/integrations/client/test_nowcast.py b/integrations/client/test_nowcast.py index f5124e021..5645e8108 100644 --- a/integrations/client/test_nowcast.py +++ b/integrations/client/test_nowcast.py @@ -1,55 +1,17 @@ """Integration tests for delphi_epidata.py.""" -# standard library -import unittest - -# third party -import mysql.connector - # first party -from delphi.epidata.client.delphi_epidata import Epidata -import delphi.operations.secrets as secrets +from delphi.epidata.common.delphi_test_base import DelphiTestBase # py3tester coverage target __test_target__ = 'delphi.epidata.client.delphi_epidata' -class DelphiEpidataPythonClientNowcastTests(unittest.TestCase): +class DelphiEpidataPythonClientNowcastTests(DelphiTestBase): """Tests the Python client.""" - def setUp(self): + def localSetUp(self): """Perform per-test setup.""" - - # connect to the `epidata` database and clear relevant tables - cnx = mysql.connector.connect( - user='user', - password='pass', - host='delphi_database_epidata', - database='epidata') - cur = cnx.cursor() - - cur.execute('truncate table covidcast_nowcast') - cur.execute('delete from api_user') - cur.execute('insert into api_user(api_key, email) values ("key", "email")') - - cnx.commit() - cur.close() - - # make connection and cursor available to test cases - self.cnx = cnx - self.cur = cnx.cursor() - - # use the local instance of the Epidata API - Epidata.BASE_URL = 'http://delphi_web_epidata/epidata/api.php' - Epidata.auth = ('epidata', 'key') - - # use the local instance of the epidata database - secrets.db.host = 'delphi_database_epidata' - secrets.db.epi = ('user', 'pass') - - def tearDown(self): - """Perform per-test teardown.""" - self.cur.close() - self.cnx.close() + self.truncate_tables_list = ["covidcast_nowcast"] def test_covidcast_nowcast(self): """Test that the covidcast_nowcast endpoint returns expected data.""" @@ -62,7 +24,7 @@ def test_covidcast_nowcast(self): self.cnx.commit() # fetch data - response = Epidata.covidcast_nowcast( + response = self.epidata_client.covidcast_nowcast( 'src', ['sig1', 'sig2'], 'sensor', 'day', 'county', 20200101, '01001') # request two signals @@ -87,9 +49,9 @@ def test_covidcast_nowcast(self): }) # request range of issues - response = Epidata.covidcast_nowcast( + response = self.epidata_client.covidcast_nowcast( 'src', 'sig1', 'sensor', 'day', 'county', 20200101, '01001', - issues=Epidata.range(20200101, 20200102)) + issues=self.epidata_client.range(20200101, 20200102)) self.assertEqual(response, { 'result': 1, @@ -112,7 +74,7 @@ def test_covidcast_nowcast(self): }) # request as_of - response = Epidata.covidcast_nowcast( + response = self.epidata_client.covidcast_nowcast( 'src', 'sig1', 'sensor', 'day', 'county', 20200101, '01001', as_of=20200101) @@ -130,7 +92,7 @@ def test_covidcast_nowcast(self): }) # request unavailable data - response = Epidata.covidcast_nowcast( + response = self.epidata_client.covidcast_nowcast( 'src', 'sig1', 'sensor', 'day', 'county', 22222222, '01001') - self.assertEqual(response, {'result': -2, 'message': 'no results'}) + self.assertEqual(response, {'epidata': [], 'result': -2, 'message': 'no results'}) diff --git a/integrations/server/test_api_keys.py b/integrations/server/test_api_keys.py new file mode 100644 index 000000000..449c5b72d --- /dev/null +++ b/integrations/server/test_api_keys.py @@ -0,0 +1,226 @@ +"""Integration tests for the API Keys""" +# first party +from delphi.epidata.common.delphi_test_base import DelphiTestBase + + +class APIKeysTets(DelphiTestBase): + """Tests the API Keys behaviour""" + + def localSetUp(self): + self.role_name = "cdc" + + def test_public_route(self): + """Test public route""" + status_codes = set() + for _ in range(10): + status_codes.add(self._make_request(endpoint="version").status_code) + self.assertEqual(status_codes, {200}) + + def test_no_multiples_data_source(self): + """Test requests with no multiples and with provided `data_source` and `signal` as a separate query params.""" + params = { + "data_source": "fb-survey", + "signal": "smoothed_wcli", + "time_type": "day", + "geo_type": "state", + "geo_value": "pa", + "time_values": "20200406", + } + status_codes = set() + for _ in range(10): + status_codes.add(self._make_request(params=params).status_code) + self.assertEqual(status_codes, {200}) + + def test_no_multiples_source_signal(self): + """Test requests with colon-delimited source-signal param presentation.""" + params = { + "signal": "fb-survey:smoothed_wcli", + "time_type": "day", + "geo_type": "state", + "geo_value": "pa", + "time_values": "20200406", + } + status_codes = set() + for _ in range(10): + status_codes.add(self._make_request(params=params).status_code) + self.assertEqual(status_codes, {200}) + + def test_multiples_allowed_signal_two_multiples(self): + """Test requests with 2 multiples and allowed dashboard signal""" + params = { + "signal": "fb-survey:smoothed_wcli", + "time_type": "day", + "geo_type": "state", + "geo_value": "pa,ny", + "time_values": "20200406,20200407", + } + status_codes = set() + for _ in range(10): + status_codes.add(self._make_request(params=params).status_code) + self.assertEqual(status_codes, {200}) + + def test_multiples_non_allowed_signal(self): + """Test requests with 2 multiples and non-allowed dashboard signal""" + params = { + "signal": "hospital-admissions:smoothed_adj_covid19_from_claims", + "time_type": "day", + "geo_type": "state", + "geo_value": "pa,ny", + "time_values": "20200406,20200407", + } + status_codes = set() + for _ in range(10): + status_codes.add(self._make_request(params=params).status_code) + self.assertEqual(status_codes, {200, 429}) + + def test_multiples_mixed_allowed_signal_two_multiples(self): + """Test requests with 2 multiples and mixed-allowed dashboard signal""" + params = { + "signal": "fb-survey:smoothed_wcli,hospital-admissions:smoothed_adj_covid19_from_claims", + "time_type": "day", + "geo_type": "state", + "geo_value": "pa", + "time_values": "20200406,20200407", + } + status_codes = set() + for _ in range(10): + status_codes.add(self._make_request(params=params).status_code) + self.assertEqual(status_codes, {200, 429}) + + def test_multiples_allowed_signal_three_multiples(self): + """Test requests with 3 multiples and allowed dashboard signal""" + params = { + "signal": "fb-survey:smoothed_wcli,fb-survey:smoothed_wcli", + "time_type": "day", + "geo_type": "state", + "geo_value": "pa,ny", + "time_values": "20200406,20200407", + } + status_codes = set() + for _ in range(10): + status_codes.add(self._make_request(params=params).status_code) + self.assertEqual(status_codes, {401}) + + def test_multiples_mixed_allowed_signal_three_multiples(self): + """Test requests with 3 multiples and mixed-allowed dashboard signal""" + params = { + "signal": "fb-survey:smoothed_wcli,fb-survey:smoothed_wcli1", + "time_type": "day", + "geo_type": "state", + "geo_value": "pa,ny", + "time_values": "20200406,20200407", + } + status_codes = set() + for _ in range(10): + status_codes.add(self._make_request(params=params).status_code) + self.assertEqual(status_codes, {401}) + + def test_multiples_mixed_allowed_signal_api_key(self): + """Test requests with 3 multiples and mixed-allowed dashboard signal + valid API Key""" + params = { + "signal": "fb-survey:smoothed_wcli,fb-survey:smoothed_wcli1", + "time_type": "day", + "geo_type": "state", + "geo_value": "pa,ny", + "time_values": "20200406,20200407", + } + status_codes = set() + for _ in range(10): + status_codes.add( + self._make_request(params=params, auth=self.epidata_client.auth).status_code + ) + self.assertEqual(status_codes, {200}) + + def test_multiples_allowed_signal_api_key(self): + """Test requests with 3 multiples and allowed dashboard signal + valid API Key""" + params = { + "signal": "fb-survey:smoothed_wcli,fb-survey:smoothed_wcli", + "time_type": "day", + "geo_type": "state", + "geo_value": "pa,ny", + "time_values": "20200406,20200407", + } + status_codes = set() + for _ in range(10): + status_codes.add( + self._make_request(params=params, auth=self.epidata_client.auth).status_code + ) + self.assertEqual(status_codes, {200}) + + def test_no_multiples_allowed_signal_api_key(self): + """Test requests with no multiples and allowed dashboard signal + valid API Key""" + params = { + "signal": "fb-survey:smoothed_wcli", + "time_type": "day", + "geo_type": "state", + "geo_value": "pa", + "time_values": "20200406", + } + status_codes = set() + for _ in range(10): + status_codes.add( + self._make_request(params=params, auth=self.epidata_client.auth).status_code + ) + self.assertEqual(status_codes, {200}) + + def test_no_multiples_allowed_signal_bad_api_key(self): + """Test requests with no multiples and allowed dashboard signal + bad API Key""" + params = { + "signal": "fb-survey:smoothed_wcli", + "time_type": "day", + "geo_type": "state", + "geo_value": "pa", + "time_values": "20200406", + } + status_codes = set() + for _ in range(10): + status_codes.add( + self._make_request( + params=params, auth=("bad_key", "bad_email") + ).status_code + ) + self.assertEqual(status_codes, {200}) + + def test_restricted_endpoint_no_key(self): + """Test restricted endpoint with no auth key""" + params = {"regions": "1as", "epiweeks": "202020"} + status_codes = set() + for _ in range(10): + status_codes.add(self._make_request(params=params, endpoint="cdc").status_code) + self.assertEqual(status_codes, {401}) + + def test_restricted_endpoint_invalid_key(self): + """Test restricted endpoint with invalid auth key""" + params = { + "regions": "1as", + "epiweeks": "202020", + "auth": "invalid_key", + } + status_codes = set() + for _ in range(10): + status_codes.add(self._make_request(params=params, endpoint="cdc").status_code) + self.assertEqual(status_codes, {401}) + + def test_restricted_endpoint_no_roles_key(self): + """Test restricted endpoint with no roles key""" + params = { + "regions": "1as", + "epiweeks": "202020", + "auth": "key", + } + status_codes = set() + for _ in range(10): + status_codes.add(self._make_request(params=params, endpoint="cdc").status_code) + self.assertEqual(status_codes, {401}) + + def test_restricted_endpoint_valid_roles_key(self): + """Test restricted endpoint with valid auth key with required role""" + params = { + "regions": "1as", + "epiweeks": "202020", + "auth": "cdc_key", + } + status_codes = set() + for _ in range(10): + status_codes.add(self._make_request(params=params, endpoint="cdc").status_code) + self.assertEqual(status_codes, {200}) diff --git a/integrations/server/test_cdc.py b/integrations/server/test_cdc.py new file mode 100644 index 000000000..7f60ddca1 --- /dev/null +++ b/integrations/server/test_cdc.py @@ -0,0 +1,42 @@ +# first party +from delphi.epidata.common.delphi_test_base import DelphiTestBase + + +class CdcTest(DelphiTestBase): + """Basic integration tests for cdc endpint.""" + + def localSetUp(self) -> None: + self.truncate_tables_list = ["cdc_extract"] + self.role_name = "cdc" + + def test_cdc(self): + """Basic integration test for cdc endpoint""" + self.cur.execute( + "INSERT INTO `cdc_extract`(`epiweek`, `state`, `num1`, `num2`, `num3`, `num4`, `num5`, `num6`, `num7`, `num8`, `total`) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)", + ("201102", "AK", "16", "35", "51", "96", "30", "748", "243", "433", "65"), + ) + self.cnx.commit() + response = self.epidata_client.cdc(auth="cdc_key", epiweeks=201102, locations="cen9") + self.assertEqual( + response, + { + "epidata": [ + { + "location": "cen9", + "epiweek": 201102, + "num1": 16, + "num2": 35, + "num3": 51, + "num4": 96, + "num5": 30, + "num6": 748, + "num7": 243, + "num8": 433, + "total": 65, + "value": None, + } + ], + "result": 1, + "message": "success", + }, + ) diff --git a/integrations/server/test_covid_hosp.py b/integrations/server/test_covid_hosp.py index 37aa77363..100d961c4 100644 --- a/integrations/server/test_covid_hosp.py +++ b/integrations/server/test_covid_hosp.py @@ -16,7 +16,7 @@ def setUp(self): """Perform per-test setup.""" # use the local instance of the Epidata API - Epidata.BASE_URL = 'http://delphi_web_epidata/epidata/api.php' + Epidata.BASE_URL = 'http://delphi_web_epidata/epidata' Epidata.auth = ('epidata', 'key') # use the local instance of the epidata database diff --git a/integrations/server/test_covidcast.py b/integrations/server/test_covidcast.py index 73787d664..3d3fadcac 100644 --- a/integrations/server/test_covidcast.py +++ b/integrations/server/test_covidcast.py @@ -1,31 +1,18 @@ """Integration tests for the `covidcast` endpoint.""" -# standard library -from typing import Callable import unittest -# third party -import mysql.connector - # first party from delphi_utils import Nans -from delphi.epidata.acquisition.covidcast.test_utils import CovidcastBase, CovidcastTestRow, FIPS, MSA -from delphi.epidata.client.delphi_epidata import Epidata +from delphi.epidata.common.covidcast_test_base import CovidcastTestBase, CovidcastTestRow, FIPS, MSA -class CovidcastTests(CovidcastBase): +class CovidcastTests(CovidcastTestBase): """Tests the `covidcast` endpoint.""" - def localSetUp(self): - """Perform per-test setup.""" - self._db._cursor.execute('update covidcast_meta_cache set timestamp = 0, epidata = "[]"') - def request_based_on_row(self, row: CovidcastTestRow, **kwargs): params = self.params_from_row(row, endpoint='covidcast', **kwargs) - # use the local instance of the Epidata API - Epidata.BASE_URL = 'http://delphi_web_epidata/epidata/api.php' - Epidata.auth = ('epidata', 'key') - response = Epidata.covidcast(**params) + response = self.epidata_client.covidcast(**params) return response @@ -90,7 +77,7 @@ def test_round_trip(self): # make the request response = self.request_based_on_row(row) - expected = [row.as_api_compatibility_row_dict()] + expected = [row.as_api_row_dict()] self.assertEqual(response, { 'result': 1, @@ -156,13 +143,15 @@ def test_csv_format(self): **{'format':'csv'} ) - # This is a hardcoded mess because of api.php. + # This is a hardcoded mess because of the field ordering constructed here: + # https://github.com/cmu-delphi/delphi-epidata/blob/f7da6598a810be8df5374e3a71512c631c3a14f1/src/server/endpoints/covidcast.py#L83-L93 column_order = [ - "geo_value", "signal", "time_value", "direction", "issue", "lag", "missing_value", + "geo_value", "signal", "source", "geo_type", "time_type", + "time_value", "direction", "issue", "lag", "missing_value", "missing_stderr", "missing_sample_size", "value", "stderr", "sample_size" ] expected = ( - row.as_api_compatibility_row_df() + row.as_api_row_df() .assign(direction = None) .to_csv(columns=column_order, index=False) ) @@ -179,7 +168,7 @@ def test_raw_json_format(self): # make the request response = self.request_based_on_row(row, **{'format':'json'}) - expected = [row.as_api_compatibility_row_dict()] + expected = [row.as_api_row_dict()] # assert that the right data came back self.assertEqual(response, expected) @@ -191,13 +180,13 @@ def test_fields(self): row = self._insert_placeholder_set_one() # limit fields - response = self.request_based_on_row(row, **{"fields":"time_value,geo_value"}) + response = self.request_based_on_row(row, **{"fields":"time_value,geo_value,geo_type,source,time_type"}) - expected = row.as_api_compatibility_row_dict() + expected = row.as_api_row_dict() expected_all = { 'result': 1, 'epidata': [{ - k: expected[k] for k in ['time_value', 'geo_value'] + k: expected[k] for k in ['time_value', 'geo_value', 'geo_type', 'source', 'time_type'] }], 'message': 'success', } @@ -206,7 +195,7 @@ def test_fields(self): self.assertEqual(response, expected_all) # limit using invalid fields - response = self.request_based_on_row(row, fields='time_value,geo_value,doesnt_exist') + response = self.request_based_on_row(row, fields='time_value,geo_value,geo_type,source,time_type,doesnt_exist') # assert that the right data came back (only valid fields) self.assertEqual(response, expected_all) @@ -226,7 +215,7 @@ def test_location_wildcard(self): # insert placeholder data rows = self._insert_placeholder_set_two() - expected = [row.as_api_compatibility_row_dict() for row in rows[:3]] + expected = [row.as_api_row_dict() for row in rows[:3]] # make the request response = self.request_based_on_row(rows[0], geo_value="*") @@ -243,7 +232,7 @@ def test_time_values_wildcard(self): # insert placeholder data rows = self._insert_placeholder_set_three() - expected = [row.as_api_compatibility_row_dict() for row in rows[:3]] + expected = [row.as_api_row_dict() for row in rows[:3]] # make the request response = self.request_based_on_row(rows[0], time_values="*") @@ -261,7 +250,7 @@ def test_issues_wildcard(self): # insert placeholder data rows = self._insert_placeholder_set_five() - expected = [row.as_api_compatibility_row_dict() for row in rows[:3]] + expected = [row.as_api_row_dict() for row in rows[:3]] # make the request response = self.request_based_on_row(rows[0], issues="*") @@ -279,7 +268,7 @@ def test_signal_wildcard(self): # insert placeholder data rows = self._insert_placeholder_set_four() - expected_signals = [row.as_api_compatibility_row_dict() for row in rows[:3]] + expected_signals = [row.as_api_row_dict() for row in rows[:3]] # make the request response = self.request_based_on_row(rows[0], signals="*") @@ -297,7 +286,7 @@ def test_geo_value(self): # insert placeholder data rows = self._insert_placeholder_set_two() - expected = [row.as_api_compatibility_row_dict() for row in rows[:3]] + expected = [row.as_api_row_dict() for row in rows[:3]] def fetch(geo_value): # make the request @@ -335,7 +324,7 @@ def test_location_timeline(self): # insert placeholder data rows = self._insert_placeholder_set_three() - expected_timeseries = [row.as_api_compatibility_row_dict() for row in rows[:3]] + expected_timeseries = [row.as_api_row_dict() for row in rows[:3]] # make the request response = self.request_based_on_row(rows[0], time_values='20000101-20000105') @@ -372,7 +361,7 @@ def test_nullable_columns(self): # make the request response = self.request_based_on_row(row) - expected = row.as_api_compatibility_row_dict() + expected = row.as_api_row_dict() # assert that the right data came back self.assertEqual(response, { @@ -393,7 +382,7 @@ def test_temporal_partitioning(self): # make the request response = self.request_based_on_row(rows[1], time_values="*") - expected = [rows[1].as_api_compatibility_row_dict()] + expected = [rows[1].as_api_row_dict()] # assert that the right data came back self.assertEqual(response, { diff --git a/integrations/server/test_covidcast_endpoints.py b/integrations/server/test_covidcast_endpoints.py index 3ba0af039..9d75e5a6a 100644 --- a/integrations/server/test_covidcast_endpoints.py +++ b/integrations/server/test_covidcast_endpoints.py @@ -6,52 +6,18 @@ # third party from more_itertools import windowed -import requests import pandas as pd from delphi.epidata.maintenance.covidcast_meta_cache_updater import main as update_cache -from delphi.epidata.acquisition.covidcast.test_utils import CovidcastBase, CovidcastTestRow +from delphi.epidata.common.covidcast_test_base import CovidcastTestBase, CovidcastTestRow -# use the local instance of the Epidata API -BASE_URL = "http://delphi_web_epidata/epidata/covidcast" -BASE_URL_OLD = "http://delphi_web_epidata/epidata/api.php" -AUTH = ('epidata', 'key') - -class CovidcastEndpointTests(CovidcastBase): +class CovidcastEndpointTests(CovidcastTestBase): """Tests the `covidcast/*` endpoint.""" def localSetUp(self): """Perform per-test setup.""" - # reset the `covidcast_meta_cache` table (it should always have one row) - self._db._cursor.execute('update covidcast_meta_cache set timestamp = 0, epidata = "[]"') - - cur = self._db._cursor - # NOTE: we must specify the db schema "epidata" here because the cursor/connection are bound to schema "covid" - cur.execute("TRUNCATE TABLE epidata.api_user") - cur.execute("TRUNCATE TABLE epidata.user_role") - cur.execute("TRUNCATE TABLE epidata.user_role_link") - cur.execute("INSERT INTO epidata.api_user (api_key, email) VALUES ('quidel_key', 'quidel_email')") - cur.execute("INSERT INTO epidata.user_role (name) VALUES ('quidel')") - cur.execute( - "INSERT INTO epidata.user_role_link (user_id, role_id) SELECT api_user.id, user_role.id FROM epidata.api_user JOIN epidata.user_role WHERE api_key='quidel_key' and user_role.name='quidel'" - ) - cur.execute("INSERT INTO epidata.api_user (api_key, email) VALUES ('key', 'email')") - - def _fetch(self, endpoint="/", is_compatibility=False, auth=AUTH, **params): - # make the request - if is_compatibility: - url = BASE_URL_OLD - # only set endpoint if it's not already set - # only set endpoint if it's not already set - params.setdefault("endpoint", "covidcast") - if params.get("source"): - params.setdefault("data_source", params.get("source")) - else: - url = f"{BASE_URL}{endpoint}" - response = requests.get(url, params=params, auth=auth) - response.raise_for_status() - return response.json() + self.role_name = "quidel" def _diff_rows(self, rows: Sequence[float]): return [ @@ -72,11 +38,16 @@ def test_basic(self): self._insert_rows(rows) with self.subTest("validation"): - out = self._fetch("/") + out = self._make_request(auth=self.epidata_client.auth, json=True, raise_for_status=True) self.assertEqual(out["result"], -1) with self.subTest("simple"): - out = self._fetch("/", signal=first.signal_pair(), geo=first.geo_pair(), time="day:*") + params = { + "signal": first.signal_pair(), + "geo": first.geo_pair(), + "time": "day:*" + } + out = self._make_request(auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(len(out["epidata"]), len(rows)) def test_basic_restricted_source(self): @@ -86,34 +57,71 @@ def test_basic_restricted_source(self): self._insert_rows(rows) with self.subTest("validation"): - out = self._fetch("/") + out = self._make_request(auth=self.epidata_client.auth, json=True, raise_for_status=True) self.assertEqual(out["result"], -1) with self.subTest("no_roles"): - out = self._fetch("/", signal=first.signal_pair(), geo=first.geo_pair(), time="day:*") + params = { + "signal": first.signal_pair(), + "geo": first.geo_pair(), + "time": "day:*" + } + out = self._make_request(auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(len(out["epidata"]), 0) with self.subTest("no_api_key"): - out = self._fetch("/", auth=None, signal=first.signal_pair(), geo=first.geo_pair(), time="day:*") + params = { + "signal": first.signal_pair(), + "geo": first.geo_pair(), + "time": "day:*" + } + out = self._make_request(json=True, raise_for_status=True, params=params) self.assertEqual(len(out["epidata"]), 0) with self.subTest("quidel_role"): - out = self._fetch("/", auth=("epidata", "quidel_key"), signal=first.signal_pair(), geo=first.geo_pair(), time="day:*") + params = { + "signal": first.signal_pair(), + "geo": first.geo_pair(), + "time": "day:*", + "auth": "quidel_key" + } + out = self._make_request(json=True, raise_for_status=True, params=params) self.assertEqual(len(out["epidata"]), len(rows)) - def test_compatibility(self): - """Request at the /api.php endpoint.""" - rows = [CovidcastTestRow.make_default_row(source="src", signal="sig", time_value=2020_04_01 + i, value=i) for i in range(10)] - first = rows[0] - self._insert_rows(rows) - - with self.subTest("validation"): - out = self._fetch("/", is_compatibility=True) - self.assertEqual(out["result"], -1) - - with self.subTest("simple"): - out = self._fetch("/", signal=first.signal_pair(), geo=first.geo_pair(), time="day:*", is_compatibility=True) - self.assertEqual(len(out["epidata"]), len(rows)) + # Commented out this section as we want go get rid of legacy .php code + # ----------------------------------------- + # def test_compatibility(self): + # """Request at the /api.php endpoint.""" + # rows = [CovidcastTestRow.make_default_row(source="src", signal="sig", time_value=2020_04_01 + i, value=i) for i in range(10)] + # first = rows[0] + # self._insert_rows(rows) + + # with self.subTest("validation"): + # out = self._fetch("/", is_compatibility=True) + # self.assertEqual(out["result"], -1) + + # with self.subTest("simple"): + # out = self._fetch("/", signal=first.signal_pair(), geo=first.geo_pair(), time="day:*", is_compatibility=True) + # self.assertEqual(out["epidata"], [row.as_api_compatibility_row_dict() for row in rows]) + + # def test_compatibility_restricted_source(self): + # """Restricted request at the /api.php endpoint.""" + # rows = [CovidcastTestRow.make_default_row(time_value=2020_04_01 + i, value=i, source="quidel") for i in range(10)] + # first = rows[0] + # self._insert_rows(rows) + + # with self.subTest("no_roles"): + # out = self._fetch("/", signal=first.signal_pair(), geo=first.geo_pair(), time="day:*", is_compatibility=True) + # self.assertTrue("epidata" not in out) + + # with self.subTest("no_api_key"): + # out = self._fetch("/", auth=None, signal=first.signal_pair(), geo=first.geo_pair(), time="day:*", is_compatibility=True) + # self.assertTrue("epidata" not in out) + + # with self.subTest("quidel_role"): + # out = self._fetch("/", auth=("epidata", "quidel_key"), signal=first.signal_pair(), geo=first.geo_pair(), time="day:*", is_compatibility=True) + # self.assertEqual(out["epidata"], [row.as_api_compatibility_row_dict() for row in rows]) + # ----------------------------------------- def test_trend(self): """Request a signal from the /trend endpoint.""" @@ -125,8 +133,14 @@ def test_trend(self): ref = rows[num_rows // 2] self._insert_rows(rows) - out = self._fetch("/trend", signal=first.signal_pair(), geo=first.geo_pair(), date=last.time_value, window="20200401-20201212", basis=ref.time_value) - + params = { + "signal": first.signal_pair(), + "geo": first.geo_pair(), + "date": last.time_value, + "window": "20200401-20201212", + "basis": ref.time_value + } + out = self._make_request(endpoint="covidcast/trend", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(out["result"], 1) self.assertEqual(len(out["epidata"]), 1) @@ -150,7 +164,6 @@ def test_trend(self): self.assertEqual(trend["max_value"], last.value) self.assertEqual(trend["max_trend"], "steady") - def test_trendseries(self): """Request a signal from the /trendseries endpoint.""" @@ -160,7 +173,14 @@ def test_trendseries(self): last = rows[-1] self._insert_rows(rows) - out = self._fetch("/trendseries", signal=first.signal_pair(), geo=first.geo_pair(), date=last.time_value, window="20200401-20200410", basis=1) + params = { + "signal": first.signal_pair(), + "geo": first.geo_pair(), + "date": last.time_value, + "window": "20200401-20200410", + "basis": 1 + } + out = self._make_request(endpoint="covidcast/trendseries", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(out["result"], 1) self.assertEqual(len(out["epidata"]), 3) @@ -217,7 +237,6 @@ def match_row(trend, row): self.assertEqual(trend["max_value"], first.value) self.assertEqual(trend["max_trend"], "decreasing") - def test_csv(self): """Request a signal from the /csv endpoint.""" @@ -225,17 +244,18 @@ def test_csv(self): first = rows[0] self._insert_rows(rows) - response = requests.get( - f"{BASE_URL}/csv", - params=dict(signal=first.signal_pair(), start_day="2020-04-01", end_day="2020-12-12", geo_type=first.geo_type), - ) - response.raise_for_status() + params = { + "signal": first.signal_pair(), + "start_day": "2020-04-01", + "end_day": "2020-12-12", + "geo_type": first.geo_type + } + response = self._make_request(endpoint="covidcast/csv", raise_for_status=True, params=params) out = response.text df = pd.read_csv(StringIO(out), index_col=0) self.assertEqual(df.shape, (len(rows), 10)) self.assertEqual(list(df.columns), ["geo_value", "signal", "time_value", "issue", "lag", "value", "stderr", "sample_size", "geo_type", "data_source"]) - def test_backfill(self): """Request a signal from the /backfill endpoint.""" @@ -247,7 +267,13 @@ def test_backfill(self): self._insert_rows([*issue_0, *issue_1, *last_issue]) first = issue_0[0] - out = self._fetch("/backfill", signal=first.signal_pair(), geo=first.geo_pair(), time="day:20200401-20201212", anchor_lag=3) + params = { + "signal": first.signal_pair(), + "geo": first.geo_pair(), + "time": "day:20200401-20201212", + "anchor_lag": 3 + } + out = self._make_request(endpoint="covidcast/backfill", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(out["result"], 1) df = pd.DataFrame(out["epidata"]) self.assertEqual(len(df), 3 * num_rows) # num issues @@ -277,7 +303,7 @@ def test_meta(self): update_cache(args=None) with self.subTest("plain"): - out = self._fetch("/meta") + out = self._make_request(endpoint="covidcast/meta", auth=self.epidata_client.auth, json=True, raise_for_status=True) self.assertEqual(len(out), 1) data_source = out[0] self.assertEqual(data_source["source"], first.source) @@ -295,20 +321,26 @@ def test_meta(self): self.assertEqual(stats_g["mean"], sum(r.value for r in rows) / len(rows)) with self.subTest("filtered"): - out = self._fetch("/meta", signal=f"{first.source}:*") + params = { + "signal": f"{first.source}:*" + } + out = self._make_request(endpoint="covidcast/meta", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(len(out), 1) data_source = out[0] self.assertEqual(data_source["source"], first.source) self.assertEqual(len(data_source["signals"]), 1) stats = data_source["signals"][0] self.assertEqual(stats["source"], first.source) - out = self._fetch("/meta", signal=f"{first.source}:X") + params = { + "signal": f"{first.source}:X" + } + out = self._make_request(endpoint="covidcast/meta", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(len(out), 0) def test_meta_restricted(self): """Request 'restricted' signals from the /meta endpoint.""" # NOTE: this method is nearly identical to ./test_covidcast_meta.py:test_restricted_sources() - # ...except the self._fetch() methods are different, as is the format of those methods' outputs + # ...except the self._make_request() methods are different, as is the format of those methods' outputs # (the other covidcast_meta endpoint uses APrinter, this one returns its own unadulterated json). # additionally, the sample data used here must match entries (that is, named sources and signals) # from covidcast_utils.model.data_sources (the `data_sources` variable from file @@ -327,12 +359,15 @@ def test_meta_restricted(self): update_cache(args=None) # verify unauthenticated (no api key) or unauthorized (user w/o privilege) only see metadata for one source - self.assertEqual(len(self._fetch("/meta", auth=None)), 1) - self.assertEqual(len(self._fetch("/meta", auth=AUTH)), 1) + unauthenticated_request = self._make_request(endpoint="covidcast/meta", json=True, raise_for_status=True) + unauthorized_request = self._make_request(endpoint="covidcast/meta", auth=self.epidata_client.auth, json=True, raise_for_status=True) + self.assertEqual(len(unauthenticated_request), 1) + self.assertEqual(len(unauthorized_request), 1) # verify authorized user sees metadata for both sources qauth = ('epidata', 'quidel_key') - self.assertEqual(len(self._fetch("/meta", auth=qauth)), 2) + authorized_request = self._make_request(endpoint="covidcast/meta", auth=qauth, json=True, raise_for_status=True) + self.assertEqual(len(authorized_request), 2) def test_coverage(self): """Request a signal from the /coverage endpoint.""" @@ -344,17 +379,34 @@ def test_coverage(self): first = rows[0] with self.subTest("default"): - out = self._fetch("/coverage", signal=first.signal_pair(), geo_type=first.geo_type, latest=dates[-1], format="json") + params = { + "signal": first.signal_pair(), + "geo_type": first.geo_type, + "latest": dates[-1], + "format": "json" + } + out = self._make_request(endpoint="covidcast/coverage", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(len(out), len(num_geos_per_date)) self.assertEqual([o["time_value"] for o in out], dates) self.assertEqual([o["count"] for o in out], num_geos_per_date) with self.subTest("specify window"): - out = self._fetch("/coverage", signal=first.signal_pair(), geo_type=first.geo_type, window=f"{dates[0]}-{dates[1]}", format="json") + params = { + "signal": first.signal_pair(), + "geo_type": first.geo_type, + "window": f"{dates[0]}-{dates[1]}", + "format": "json" + } + out = self._make_request("covidcast/coverage", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(len(out), 2) self.assertEqual([o["time_value"] for o in out], dates[:2]) self.assertEqual([o["count"] for o in out], num_geos_per_date[:2]) with self.subTest("invalid geo_type"): - out = self._fetch("/coverage", signal=first.signal_pair(), geo_type="doesnt_exist", format="json") + params = { + "signal": first.signal_pair(), + "geo_type": "doesnt_exist", + "format": "json" + } + out = self._make_request(endpoint="covidcast/coverage", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(len(out), 0) diff --git a/integrations/server/test_covidcast_meta.py b/integrations/server/test_covidcast_meta.py index d03317c98..e79ec523c 100644 --- a/integrations/server/test_covidcast_meta.py +++ b/integrations/server/test_covidcast_meta.py @@ -1,24 +1,12 @@ """Integration tests for the `covidcast_meta` endpoint.""" -# standard library -import unittest - -# third party -import mysql.connector -import requests - #first party from delphi_utils import Nans -from delphi.epidata.acquisition.covidcast.test_utils import CovidcastBase, CovidcastTestRow +from delphi.epidata.common.covidcast_test_base import CovidcastTestBase, CovidcastTestRow from delphi.epidata.maintenance.covidcast_meta_cache_updater import main as update_cache -import delphi.operations.secrets as secrets -# use the local instance of the Epidata API -BASE_URL = 'http://delphi_web_epidata/epidata/api.php' -AUTH = ('epidata', 'key') - -class CovidcastMetaTests(CovidcastBase): +class CovidcastMetaTests(CovidcastTestBase): """Tests the `covidcast_meta` endpoint.""" src_sig_lookups = { @@ -52,63 +40,23 @@ class CovidcastMetaTests(CovidcastBase): def localSetUp(self): """Perform per-test setup.""" - # connect to the `epidata` database and clear the `covidcast` table - cnx = mysql.connector.connect( - user='user', - password='pass', - host='delphi_database_epidata', - database='covid') - cur = cnx.cursor() - - # clear all tables - cur.execute("truncate table epimetric_load") - cur.execute("truncate table epimetric_full") - cur.execute("truncate table epimetric_latest") - cur.execute("truncate table geo_dim") - cur.execute("truncate table signal_dim") - # reset the `covidcast_meta_cache` table (it should always have one row) - cur.execute('update covidcast_meta_cache set timestamp = 0, epidata = "[]"') - - # NOTE: we must specify the db schema "epidata" here because the cursor/connection are bound to schema "covid" - cur.execute("TRUNCATE TABLE epidata.api_user") - cur.execute("TRUNCATE TABLE epidata.user_role") - cur.execute("TRUNCATE TABLE epidata.user_role_link") - cur.execute("INSERT INTO epidata.api_user (api_key, email) VALUES ('quidel_key', 'quidel_email')") - cur.execute("INSERT INTO epidata.user_role (name) VALUES ('quidel')") - cur.execute( - "INSERT INTO epidata.user_role_link (user_id, role_id) SELECT api_user.id, user_role.id FROM epidata.api_user JOIN epidata.user_role WHERE api_key='quidel_key' and user_role.name='quidel'" - ) - cur.execute("INSERT INTO epidata.api_user (api_key, email) VALUES ('key', 'email')") + self.role_name = "quidel" # populate dimension tables for (src,sig) in self.src_sig_lookups: - cur.execute(''' + self._db._cursor.execute(''' INSERT INTO `signal_dim` (`signal_key_id`, `source`, `signal`) VALUES (%d, '%s', '%s'); ''' % ( self.src_sig_lookups[(src,sig)], src, sig )) for (gt,gv) in self.geo_lookups: - cur.execute(''' + self._db._cursor.execute(''' INSERT INTO `geo_dim` (`geo_key_id`, `geo_type`, `geo_value`) VALUES (%d, '%s', '%s'); ''' % ( self.geo_lookups[(gt,gv)], gt, gv )) - cnx.commit() - cur.close() + self._db._connection.commit() # initialize counter for tables without non-autoincrement id self.id_counter = 666 - # make connection and cursor available to test cases - self.cnx = cnx - self.cur = cnx.cursor() - - # use the local instance of the epidata database - secrets.db.host = 'delphi_database_epidata' - secrets.db.epi = ('user', 'pass') - - - def localTearDown(self): - """Perform per-test teardown.""" - self.cur.close() - self.cnx.close() def insert_placeholder_data(self): expected = [] @@ -135,27 +83,19 @@ def insert_placeholder_data(self): }) for tv in (1, 2): for gv, v in zip(('geo1', 'geo2'), (10, 20)): - self.cur.execute(self.template % ( + self._db._cursor.execute(self.template % ( self._get_id(), self.src_sig_lookups[(src,sig)], self.geo_lookups[(gt,gv)], tt, tv, v, tv, # re-use time value for issue Nans.NOT_MISSING, Nans.NOT_MISSING, Nans.NOT_MISSING )) - self.cnx.commit() + self._db._connection.commit() update_cache(args=None) return expected def _get_id(self): self.id_counter += 1 return self.id_counter - - @staticmethod - def _fetch(auth=AUTH, **kwargs): - params = kwargs.copy() - params['endpoint'] = 'covidcast_meta' - response = requests.get(BASE_URL, params=params, auth=auth) - response.raise_for_status() - return response.json() def test_round_trip(self): """Make a simple round-trip with some sample data.""" @@ -164,7 +104,7 @@ def test_round_trip(self): expected = self.insert_placeholder_data() # make the request - response = self._fetch() + response = self._make_request(endpoint="covidcast_meta", auth=self.epidata_client.auth, json=True, raise_for_status=True) # assert that the right data came back self.assertEqual(response, { @@ -186,12 +126,15 @@ def test_restricted_sources(self): update_cache(args=None) # verify unauthenticated (no api key) or unauthorized (user w/o privilege) only see metadata for one source - self.assertEqual(len(self._fetch(auth=None)['epidata']), 1) - self.assertEqual(len(self._fetch(auth=AUTH)['epidata']), 1) + unauthenticated_request = self._make_request(endpoint="covidcast_meta", json=True, raise_for_status=True) + self.assertEqual(len(unauthenticated_request['epidata']), 1) + unauthorized_request = self._make_request(endpoint="covidcast_meta", auth=self.epidata_client.auth, json=True, raise_for_status=True) + self.assertEqual(len(unauthorized_request['epidata']), 1) # verify authorized user sees metadata for both sources qauth = ('epidata', 'quidel_key') - self.assertEqual(len(self._fetch(auth=qauth)['epidata']), 2) + authorized_request = self._make_request(endpoint="covidcast_meta", auth=qauth, json=True, raise_for_status=True) + self.assertEqual(len(authorized_request['epidata']), 2) def test_filter(self): """Test filtering options some sample data.""" @@ -199,63 +142,99 @@ def test_filter(self): # insert placeholder data and accumulate expected results (in sort order) expected = self.insert_placeholder_data() - res = self._fetch() + res = self._make_request(endpoint="covidcast_meta", auth=self.epidata_client.auth, json=True, raise_for_status=True) self.assertEqual(res['result'], 1) self.assertEqual(len(res['epidata']), len(expected)) # time types - res = self._fetch(time_types='day') + params = { + "time_types": "day" + } + res = self._make_request(endpoint="covidcast_meta", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(res['result'], 1) self.assertEqual(len(res['epidata']), sum([1 for s in expected if s['time_type'] == 'day'])) - res = self._fetch(time_types='day,week') + params = { + "time_types": "day,week" + } + res = self._make_request(endpoint="covidcast_meta", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(res['result'], 1) self.assertTrue(isinstance(res['epidata'], list)) self.assertEqual(len(res['epidata']), len(expected)) - res = self._fetch(time_types='sec') + params = { + "time_types": "sec" + } + res = self._make_request(endpoint="covidcast_meta", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(res['result'], -2) # geo types - res = self._fetch(geo_types='hrr') + params = { + "geo_types": "hrr" + } + res = self._make_request(endpoint="covidcast_meta", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(res['result'], 1) self.assertTrue(isinstance(res['epidata'], list)) self.assertEqual(len(res['epidata']), sum([1 for s in expected if s['geo_type'] == 'hrr'])) - res = self._fetch(geo_types='hrr,msa') + params = { + "geo_types": "hrr,msa" + } + res = self._make_request(endpoint="covidcast_meta", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(res['result'], 1) self.assertTrue(isinstance(res['epidata'], list)) self.assertEqual(len(res['epidata']), len(expected)) - res = self._fetch(geo_types='state') + params = { + "geo_types": "state" + } + res = self._make_request(endpoint="covidcast_meta", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(res['result'], -2) # signals - res = self._fetch(signals='src1:sig1') + params = { + "signals": "src1:sig1" + } + res = self._make_request(endpoint="covidcast_meta", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(res['result'], 1) self.assertTrue(isinstance(res['epidata'], list)) self.assertEqual(len(res['epidata']), sum([1 for s in expected if s['data_source'] == 'src1' and s['signal'] == 'sig1'])) - res = self._fetch(signals='src1') + params = { + "signals": "src1" + } + res = self._make_request(endpoint="covidcast_meta", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(res['result'], 1) self.assertTrue(isinstance(res['epidata'], list)) self.assertEqual(len(res['epidata']), sum([1 for s in expected if s['data_source'] == 'src1'])) - res = self._fetch(signals='src1:*') + params = { + "signals": "src1:*" + } + res = self._make_request(endpoint="covidcast_meta", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(res['result'], 1) self.assertTrue(isinstance(res['epidata'], list)) self.assertEqual(len(res['epidata']), sum([1 for s in expected if s['data_source'] == 'src1'])) - res = self._fetch(signals='src1:src4') + params = { + "signals": "src1:src4" + } + res = self._make_request(endpoint="covidcast_meta", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(res['result'], -2) - res = self._fetch(signals='src1:*,src2:*') + params = { + "signals": "src1:*,src2:*" + } + res = self._make_request(endpoint="covidcast_meta", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(res['result'], 1) self.assertTrue(isinstance(res['epidata'], list)) self.assertEqual(len(res['epidata']), len(expected)) # filter fields - res = self._fetch(fields='data_source,min_time') + params = { + "fields": "data_source,min_time" + } + res = self._make_request(endpoint="covidcast_meta", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(res['result'], 1) self.assertEqual(len(res['epidata']), len(expected)) self.assertTrue('data_source' in res['epidata'][0]) @@ -263,8 +242,10 @@ def test_filter(self): self.assertFalse('max_time' in res['epidata'][0]) self.assertFalse('signal' in res['epidata'][0]) - res = self._fetch(fields='xx') + params = { + "fields": "xx" + } + res = self._make_request(endpoint="covidcast_meta", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(res['result'], 1) self.assertEqual(len(res['epidata']), len(expected)) self.assertEqual(res['epidata'][0], {}) - diff --git a/integrations/server/test_covidcast_nowcast.py b/integrations/server/test_covidcast_nowcast.py index 889d962dd..a33d6b0b8 100644 --- a/integrations/server/test_covidcast_nowcast.py +++ b/integrations/server/test_covidcast_nowcast.py @@ -1,51 +1,14 @@ """Integration tests for the `covidcast_nowcast` endpoint.""" -# standard library -import unittest +from delphi.epidata.common.covidcast_test_base import CovidcastTestBase -# third party -import mysql.connector -import requests - -# use the local instance of the Epidata API -BASE_URL = 'http://delphi_web_epidata/epidata/api.php' -AUTH = ('epidata', 'key') - - -class CovidcastTests(unittest.TestCase): +class CovidcastTests(CovidcastTestBase): """Tests the `covidcast` endpoint.""" - def setUp(self): + def localSetUp(self): """Perform per-test setup.""" - - # connect to the `epidata` database and clear the `covidcast` table - cnx = mysql.connector.connect( - user='user', - password='pass', - host='delphi_database_epidata', - database='epidata') - cur = cnx.cursor() - cur.execute('truncate table covidcast_nowcast') - cur.execute('delete from api_user') - cur.execute('insert into api_user(api_key, email) values("key", "email")') - cnx.commit() - cur.close() - - # make connection and cursor available to test cases - self.cnx = cnx - self.cur = cnx.cursor() - - def tearDown(self): - """Perform per-test teardown.""" - self.cur.close() - self.cnx.close() - - @staticmethod - def _make_request(params: dict): - response = requests.get(BASE_URL, params=params, auth=AUTH) - response.raise_for_status() - return response.json() + self.truncate_tables_list = ["covidcast_nowcast"] def test_query(self): """Query nowcasts using default and specified issue.""" @@ -59,7 +22,6 @@ def test_query(self): self.cnx.commit() # make the request with specified issue date params={ - 'source': 'covidcast_nowcast', 'data_source': 'src', 'signals': 'sig', 'sensor_names': 'sensor', @@ -69,7 +31,7 @@ def test_query(self): 'geo_value': '01001', 'issues': 20200101 } - response = self._make_request(params=params) + response = self._make_request(endpoint="covidcast_nowcast", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(response, { 'result': 1, 'epidata': [{ @@ -85,7 +47,6 @@ def test_query(self): # make request without specific issue date params={ - 'source': 'covidcast_nowcast', 'data_source': 'src', 'signals': 'sig', 'sensor_names': 'sensor', @@ -94,7 +55,7 @@ def test_query(self): 'time_values': 20200101, 'geo_value': '01001', } - response = self._make_request(params=params) + response = self._make_request(endpoint="covidcast_nowcast", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(response, { 'result': 1, @@ -110,7 +71,6 @@ def test_query(self): }) params={ - 'source': 'covidcast_nowcast', 'data_source': 'src', 'signals': 'sig', 'sensor_names': 'sensor', @@ -120,7 +80,7 @@ def test_query(self): 'geo_value': '01001', 'as_of': 20200101 } - response = self._make_request(params=params) + response = self._make_request(endpoint="covidcast_nowcast", auth=self.epidata_client.auth, json=True, raise_for_status=True, params=params) self.assertEqual(response, { 'result': 1, diff --git a/integrations/server/test_delphi.py b/integrations/server/test_delphi.py new file mode 100644 index 000000000..36e31e2b2 --- /dev/null +++ b/integrations/server/test_delphi.py @@ -0,0 +1,58 @@ +import json + +# first party +from delphi.epidata.common.delphi_test_base import DelphiTestBase + + +class DelphiTest(DelphiTestBase): + """Basic integration tests for delphi endpint.""" + + def localSetUp(self): + self.truncate_tables_list = ["forecasts"] + + def test_delphi(self): + """Basic integration test for delphi endpoint""" + self.cur.execute( + "INSERT INTO `forecasts` (`system`, `epiweek`, `json`) VALUES(%s, %s, %s)", + ( + "eb", + "201441", + json.dumps( + { + "_version": "version", + "name": "name", + "season": "season", + "epiweek": "epiweek", + "year_weeks": 222, + "season_weeks": 111, + "ili_bins": "ili_bins_123", + "ili_bin_size": "ili_bin_size231", + } + ), + ), + ) + self.cnx.commit() + response = self.epidata_client.delphi(system="eb", epiweek=201441) + self.assertEqual( + response, + { + "epidata": [ + { + "epiweek": 201441, + "forecast": { + "_version": "version", + "epiweek": "epiweek", + "ili_bin_size": "ili_bin_size231", + "ili_bins": "ili_bins_123", + "name": "name", + "season": "season", + "season_weeks": 111, + "year_weeks": 222, + }, + "system": "eb", + } + ], + "message": "success", + "result": 1, + }, + ) diff --git a/integrations/server/test_dengue_nowcast.py b/integrations/server/test_dengue_nowcast.py new file mode 100644 index 000000000..8762472b8 --- /dev/null +++ b/integrations/server/test_dengue_nowcast.py @@ -0,0 +1,42 @@ +# first party +from delphi.epidata.common.delphi_test_base import DelphiTestBase + + +class DengueNowcastTest(DelphiTestBase): + """Basic integration tests for dengue_nowcast endpint.""" + + def localSetUp(self): + create_dengue_nowcasts = """ + CREATE TABLE IF NOT EXISTS `dengue_nowcasts` ( + `id` int NOT NULL AUTO_INCREMENT, + `target` varchar(32) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci NOT NULL, + `epiweek` int NOT NULL, + `location` varchar(12) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci DEFAULT NULL, + `value` float NOT NULL, + `std` float NOT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `entry` (`target`,`epiweek`,`location`), + KEY `target` (`target`), + KEY `epiweek` (`epiweek`), + KEY `location` (`location`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3; + """ + self.create_tables_list = [create_dengue_nowcasts] + self.truncate_tables_list = ["dengue_nowcasts"] + + def test_dengue_nowcasts(self): + """Basic integration test for dengue_nowcasts endpoint""" + self.cur.execute( + "INSERT INTO dengue_nowcasts(target, epiweek, location, value, std) VALUES(%s, %s, %s, %s, %s)", + ("num_dengue", "201409", "ar", "85263", "351456"), + ) + self.cnx.commit() + response = self.epidata_client.dengue_nowcast(locations="ar", epiweeks=201409) + self.assertEqual( + response, + { + "epidata": [{"location": "ar", "epiweek": 201409, "value": 85263.0, "std": 351456.0}], + "result": 1, + "message": "success", + }, + ) diff --git a/integrations/server/test_dengue_sensors.py b/integrations/server/test_dengue_sensors.py new file mode 100644 index 000000000..424bc24d6 --- /dev/null +++ b/integrations/server/test_dengue_sensors.py @@ -0,0 +1,43 @@ +# first party +from delphi.epidata.common.delphi_test_base import DelphiTestBase + + +class DengueSensorsTest(DelphiTestBase): + """Basic integration tests for dengue_sensors endpint.""" + + def localSetUp(self): + create_dengue_sensors = """ + CREATE TABLE IF NOT EXISTS `dengue_sensors` ( + `id` int NOT NULL AUTO_INCREMENT, + `target` varchar(32) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci NOT NULL, + `name` varchar(8) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci NOT NULL, + `epiweek` int NOT NULL, + `location` varchar(12) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci DEFAULT NULL, + `value` float NOT NULL, + PRIMARY KEY (`id`), + UNIQUE KEY `entry` (`target`,`name`,`epiweek`,`location`), + KEY `sensor` (`target`,`name`), + KEY `epiweek` (`epiweek`), + KEY `location` (`location`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3; + """ + self.create_tables_list = [create_dengue_sensors] + self.truncate_tables_list = ["dengue_sensors"] + self.role_name = "sensors" + + def test_dengue_sensors(self): + """Basic integration test for dengue_sensors endpoint""" + self.cur.execute( + "INSERT INTO `dengue_sensors`(`target`, `name`, `epiweek`, `location`, `value`) VALUES(%s, %s, %s, %s, %s)", + ("num_dengue", "ght", "201432", "ag", "1234"), + ) + self.cnx.commit() + response = self.epidata_client.dengue_sensors(auth="sensors_key", names="ght", locations="ag", epiweeks="201432") + self.assertEqual( + response, + { + "epidata": [{"name": "ght", "location": "ag", "epiweek": 201432, "value": 1234.0}], + "result": 1, + "message": "success", + }, + ) diff --git a/integrations/server/test_ecdc_ili.py b/integrations/server/test_ecdc_ili.py new file mode 100644 index 000000000..e49e0bcfa --- /dev/null +++ b/integrations/server/test_ecdc_ili.py @@ -0,0 +1,35 @@ +# first party +from delphi.epidata.common.delphi_test_base import DelphiTestBase + + +class EcdcIliTest(DelphiTestBase): + """Basic integration tests for edcd_ili endpint.""" + + def localSetUp(self): + self.truncate_tables_list = ["ecdc_ili"] + + def test_ecdc_ili(self): + """Basic integration test for ecdc_ili endpoint""" + self.cur.execute( + "INSERT INTO `ecdc_ili`(`release_date`, `issue`, `epiweek`, `lag`, `region`, `incidence_rate`) VALUES(%s, %s, %s, %s, %s, %s)", + ("2020-03-26", "202012", "201840", "76", "Armenia", "0"), + ) + self.cnx.commit() + response = self.epidata_client.ecdc_ili(regions="Armenia", epiweeks="201840") + self.assertEqual( + response, + { + "epidata": [ + { + "release_date": "2020-03-26", + "region": "Armenia", + "issue": 202012, + "epiweek": 201840, + "lag": 76, + "incidence_rate": 0.0, + } + ], + "result": 1, + "message": "success", + }, + ) diff --git a/integrations/server/test_flusurv.py b/integrations/server/test_flusurv.py new file mode 100644 index 000000000..586b28d1c --- /dev/null +++ b/integrations/server/test_flusurv.py @@ -0,0 +1,40 @@ +# first party +from delphi.epidata.common.delphi_test_base import DelphiTestBase + + +class FlusurvTest(DelphiTestBase): + """Basic integration tests for flusurv endpint.""" + + def localSetUp(self): + self.truncate_tables_list = ["flusurv"] + + def test_flusurv(self): + """Basic integration test for flusurv endpoint""" + self.cur.execute( + "INSERT INTO `flusurv`(`release_date`, `issue`, `epiweek`, `location`, `lag`, `rate_age_0`, `rate_age_1`, `rate_age_2`, `rate_age_3`, `rate_age_4`, `rate_overall`, `rate_age_5`, `rate_age_6`, `rate_age_7`) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)", + ("2012-11-02", "201243", "201143", "CA", "52", "0", "0", "0", "0.151", "0", "0.029", "0", "0", "0"), + ) + self.cnx.commit() + response = self.epidata_client.flusurv(epiweeks=201143, locations="CA") + self.assertEqual( + response, + { + "epidata": [ + { + "release_date": "2012-11-02", + "location": "CA", + "issue": 201243, + "epiweek": 201143, + "lag": 52, + "rate_age_0": 0.0, + "rate_age_1": 0.0, + "rate_age_2": 0.0, + "rate_age_3": 0.151, + "rate_age_4": 0.0, + "rate_overall": 0.029, + } + ], + "result": 1, + "message": "success", + }, + ) diff --git a/integrations/server/test_fluview.py b/integrations/server/test_fluview.py index c192da637..eb6c1ec0f 100644 --- a/integrations/server/test_fluview.py +++ b/integrations/server/test_fluview.py @@ -1,50 +1,16 @@ """Integration tests for the `fluview` endpoint.""" -# standard library -import unittest - -# third party -import mysql.connector - # first party -from delphi.epidata.client.delphi_epidata import Epidata +from delphi.epidata.common.delphi_test_base import DelphiTestBase -class FluviewTests(unittest.TestCase): +class FluviewTests(DelphiTestBase): """Tests the `fluview` endpoint.""" - @classmethod - def setUpClass(cls): - """Perform one-time setup.""" - - # use the local instance of the Epidata API - Epidata.BASE_URL = 'http://delphi_web_epidata/epidata/api.php' - Epidata.auth = ('epidata', 'key') - - def setUp(self): + def localSetUp(self): """Perform per-test setup.""" - # connect to the `epidata` database and clear the `fluview` table - cnx = mysql.connector.connect( - user='user', - password='pass', - host='delphi_database_epidata', - database='epidata') - cur = cnx.cursor() - cur.execute('truncate table fluview') - cur.execute('delete from api_user') - cur.execute('insert into api_user(api_key, email) values ("key", "email")') - cnx.commit() - cur.close() - - # make connection and cursor available to test cases - self.cnx = cnx - self.cur = cnx.cursor() - - def tearDown(self): - """Perform per-test teardown.""" - self.cur.close() - self.cnx.close() + self.truncate_tables_list = ["fluview"] def test_round_trip(self): """Make a simple round-trip with some sample data.""" @@ -62,7 +28,7 @@ def test_round_trip(self): self.cnx.commit() # make the request - response = Epidata.fluview('nat', 202020) + response = self.epidata_client.fluview('nat', 202020) # assert that the right data came back self.assertEqual(response, { diff --git a/integrations/server/test_fluview_clinical.py b/integrations/server/test_fluview_clinical.py new file mode 100644 index 000000000..300e0cb73 --- /dev/null +++ b/integrations/server/test_fluview_clinical.py @@ -0,0 +1,40 @@ +# first party +from delphi.epidata.common.delphi_test_base import DelphiTestBase + + +class FluviewClinicalTest(DelphiTestBase): + """Basic integration tests for fluview_clinical endpint.""" + + def localSetUp(self): + self.truncate_tables_list = ["fluview_clinical"] + + def test_fluview_clinical(self): + """Basic integration test for fluview_clinical endpoint""" + self.cur.execute( + "INSERT INTO `fluview_clinical`(`release_date`, `issue`, `epiweek`, `region`, `lag`, `total_specimens`, `total_a`, `total_b`, `percent_positive`, `percent_a`, `percent_b`) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)", + ("2018-10-10", "201839", "201640", "al", "103", "406", "4", "1", "1.32", "0.99", "0.25"), + ) + self.cnx.commit() + response = self.epidata_client.fluview_clinical(epiweeks=201640, regions="al") + self.assertEqual( + response, + { + "epidata": [ + { + "release_date": "2018-10-10", + "region": "al", + "issue": 201839, + "epiweek": 201640, + "lag": 103, + "total_specimens": 406, + "total_a": 4, + "total_b": 1, + "percent_positive": 1.32, + "percent_a": 0.99, + "percent_b": 0.25, + } + ], + "result": 1, + "message": "success", + }, + ) diff --git a/integrations/server/test_fluview_meta.py b/integrations/server/test_fluview_meta.py index 1e2cf73e3..b23d79f9d 100644 --- a/integrations/server/test_fluview_meta.py +++ b/integrations/server/test_fluview_meta.py @@ -1,50 +1,15 @@ """Integration tests for the `fluview_meta` endpoint.""" -# standard library -import unittest - -# third party -import mysql.connector - # first party -from delphi.epidata.client.delphi_epidata import Epidata +from delphi.epidata.common.delphi_test_base import DelphiTestBase -class FluviewMetaTests(unittest.TestCase): +class FluviewMetaTests(DelphiTestBase): """Tests the `fluview_meta` endpoint.""" - @classmethod - def setUpClass(cls): - """Perform one-time setup.""" - - # use the local instance of the Epidata API - Epidata.BASE_URL = 'http://delphi_web_epidata/epidata/api.php' - Epidata.auth = ('epidata', 'key') - - def setUp(self): + def localSetUp(self): """Perform per-test setup.""" - - # connect to the `epidata` database and clear the `fluview` table - cnx = mysql.connector.connect( - user='user', - password='pass', - host='delphi_database_epidata', - database='epidata') - cur = cnx.cursor() - cur.execute('truncate table fluview') - cur.execute('delete from api_user') - cur.execute('insert into api_user(api_key, email) values ("key", "email")') - cnx.commit() - cur.close() - - # make connection and cursor available to test cases - self.cnx = cnx - self.cur = cnx.cursor() - - def tearDown(self): - """Perform per-test teardown.""" - self.cur.close() - self.cnx.close() + self.truncate_tables_list = ["fluview"] def test_round_trip(self): """Make a simple round-trip with some sample data.""" @@ -64,7 +29,7 @@ def test_round_trip(self): self.cnx.commit() # make the request - response = Epidata.fluview_meta() + response = self.epidata_client.fluview_meta() # assert that the right data came back self.assertEqual(response, { diff --git a/integrations/server/test_gft.py b/integrations/server/test_gft.py new file mode 100644 index 000000000..80a0dc984 --- /dev/null +++ b/integrations/server/test_gft.py @@ -0,0 +1,22 @@ +# first party +from delphi.epidata.common.delphi_test_base import DelphiTestBase + + +class GftTest(DelphiTestBase): + """Basic integration tests for gft endpint.""" + + def localSetUp(self): + self.truncate_tables_list = ["gft"] + + def test_gft(self): + """Basic integration test for gft endpoint""" + self.cur.execute( + "INSERT INTO `gft`(`epiweek`, `location`, `num`) VALUES(%s, %s, %s)", + ("200340", "nat", "902"), + ) + self.cnx.commit() + response = self.epidata_client.gft(locations="nat", epiweeks="200340") + self.assertEqual( + response, + {"epidata": [{"location": "nat", "epiweek": 200340, "num": 902}], "result": 1, "message": "success"}, + ) diff --git a/integrations/server/test_ght.py b/integrations/server/test_ght.py new file mode 100644 index 000000000..370a08f41 --- /dev/null +++ b/integrations/server/test_ght.py @@ -0,0 +1,24 @@ +# first party +from delphi.epidata.common.delphi_test_base import DelphiTestBase + + +class GhtTest(DelphiTestBase): + """Basic integration tests for ght endpint.""" + + def localSetUp(self): + self.truncate_tables_list = ["ght"] + self.role_name = "ght" + + def test_ght(self): + """Basic integration test for ght endpoint""" + self.cur.execute( + "INSERT INTO `ght`(`query`, `location`, `epiweek`, `value`) VALUES(%s, %s, %s, %s)", + ("/n/query", "US", "200101", "12345"), + ) + self.cnx.commit() + ghtauth = "ght_key" + response = self.epidata_client.ght(locations="US", epiweeks="200101", query="/n/query", auth=ghtauth) + self.assertEqual( + response, + {"epidata": [{"location": "US", "epiweek": 200101, "value": 12345.0}], "result": 1, "message": "success"}, + ) diff --git a/integrations/server/test_kcdc_ili.py b/integrations/server/test_kcdc_ili.py new file mode 100644 index 000000000..48c1e5160 --- /dev/null +++ b/integrations/server/test_kcdc_ili.py @@ -0,0 +1,35 @@ +# first party +from delphi.epidata.common.delphi_test_base import DelphiTestBase + + +class KcdcIliTest(DelphiTestBase): + """Basic integration tests for kcdc_ili endpint.""" + + def localSetUp(self): + self.truncate_tables_list = ["kcdc_ili"] + + def test_kcdc_ili(self): + """Basic integration test for kcdc_ili endpoint""" + self.cur.execute( + "INSERT INTO `kcdc_ili`(`release_date`, `issue`, `epiweek`, `lag`, `region`, `ili`) VALUES(%s, %s, %s, %s, %s, %s)", + ("2020-03-27", "202013", "200432", "222", "REG", "0.25"), + ) + self.cnx.commit() + response = self.epidata_client.kcdc_ili(regions="REG", epiweeks="200432") + self.assertEqual( + response, + { + "epidata": [ + { + "release_date": "2020-03-27", + "region": "REG", + "issue": 202013, + "epiweek": 200432, + "lag": 222, + "ili": 0.25, + } + ], + "result": 1, + "message": "success", + }, + ) diff --git a/integrations/server/test_meta.py b/integrations/server/test_meta.py new file mode 100644 index 000000000..e0cd1013f --- /dev/null +++ b/integrations/server/test_meta.py @@ -0,0 +1,28 @@ +# first party +from delphi.epidata.common.delphi_test_base import DelphiTestBase + + +class MetaTest(DelphiTestBase): + """Basic integration tests for meta endpint.""" + + def localSetUp(self): + self.truncate_tables_list = ["forecasts", "fluview", "wiki", "wiki_meta", "twitter"] + + def test_meta(self): + """Basic integration test for meta endpoint""" + response = self.epidata_client.meta() + self.assertEqual( + response, + { + "epidata": [ + { + "delphi": [], + "fluview": [{"latest_issue": None, "latest_update": None, "table_rows": 0}], + "twitter": [], + "wiki": [{"latest_update": None, "table_rows": 0}], + } + ], + "message": "success", + "result": 1, + }, + ) diff --git a/integrations/server/test_nidss_dengue.py b/integrations/server/test_nidss_dengue.py new file mode 100644 index 000000000..1b08048ce --- /dev/null +++ b/integrations/server/test_nidss_dengue.py @@ -0,0 +1,22 @@ +# first party +from delphi.epidata.common.delphi_test_base import DelphiTestBase + + +class NiddsDengueTest(DelphiTestBase): + """Basic integration tests for nids_dengue endpint.""" + + def localSetUp(self): + self.truncate_tables_list = ["nidss_dengue"] + + def test_nidss_dengue(self): + """Basic integration test for nidds_dengue endpoint""" + self.cur.execute( + "INSERT INTO `nidss_dengue`(`epiweek`, `location`, `region`, `count`) VALUES(%s, %s, %s, %s)", + ("200340", "SomeCity", "Central", "0"), + ) + self.cnx.commit() + response = self.epidata_client.nidss_dengue(locations="SomeCity", epiweeks="200340") + self.assertEqual( + response, + {"epidata": [{"location": "SomeCity", "epiweek": 200340, "count": 0}], "result": 1, "message": "success"}, + ) diff --git a/integrations/server/test_nidss_flu.py b/integrations/server/test_nidss_flu.py new file mode 100644 index 000000000..678017502 --- /dev/null +++ b/integrations/server/test_nidss_flu.py @@ -0,0 +1,36 @@ +# first party +from delphi.epidata.common.delphi_test_base import DelphiTestBase + + +class NiddsFluTest(DelphiTestBase): + """Basic integration tests for nids_flu endpint.""" + + def localSetUp(self): + self.truncate_tables_list = ["nidss_flu"] + + def test_nidss_flu(self): + """Basic integration test for nidds_flu endpoint""" + self.cur.execute( + "INSERT INTO `nidss_flu`(`release_date`, `issue`, `epiweek`, `region`, `lag`, `visits`, `ili`) VALUES(%s, %s, %s, %s, %s, %s, %s)", + ("2015-09-05", "201530", "200111", "SomeRegion", "222", "333", "444"), + ) + self.cnx.commit() + response = self.epidata_client.nidss_flu(regions="SomeRegion", epiweeks="200111") + self.assertEqual( + response, + { + "epidata": [ + { + "release_date": "2015-09-05", + "region": "SomeRegion", + "issue": 201530, + "epiweek": 200111, + "lag": 222, + "visits": 333, + "ili": 444.0, + } + ], + "result": 1, + "message": "success", + }, + ) diff --git a/integrations/server/test_norostat.py b/integrations/server/test_norostat.py new file mode 100644 index 000000000..bfc2c2b80 --- /dev/null +++ b/integrations/server/test_norostat.py @@ -0,0 +1,113 @@ +# first party +from delphi.epidata.common.delphi_test_base import DelphiTestBase + + +class NorostatTest(DelphiTestBase): + """Basic integration tests for norostat endpint.""" + + def localSetUp(self): + create_norostat_point_diffs = """ + CREATE TABLE IF NOT EXISTS `norostat_point_diffs` ( + `release_date` date NOT NULL, + `parse_time` datetime NOT NULL, + `location_id` int NOT NULL, + `epiweek` int NOT NULL, + `new_value` varchar(255) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci DEFAULT NULL, + PRIMARY KEY (`release_date`,`parse_time`,`location_id`,`epiweek`), + UNIQUE KEY `location_id` (`location_id`,`epiweek`,`release_date`,`parse_time`,`new_value`), + CONSTRAINT `norostat_point_diffs_ibfk_1` FOREIGN KEY (`release_date`, `parse_time`) REFERENCES `norostat_point_version_list` (`release_date`, `parse_time`), + CONSTRAINT `norostat_point_diffs_ibfk_2` FOREIGN KEY (`location_id`) REFERENCES `norostat_raw_datatable_location_pool` (`location_id`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3; + """ + + create_raw_datatable_version_list = """ + CREATE TABLE IF NOT EXISTS `norostat_raw_datatable_version_list` ( + `release_date` date NOT NULL, + `parse_time` datetime NOT NULL, + PRIMARY KEY (`release_date`,`parse_time`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3; + """ + + create_norostat_version_list = """ + CREATE TABLE IF NOT EXISTS `norostat_point_version_list` ( + `release_date` date NOT NULL, + `parse_time` datetime NOT NULL, + PRIMARY KEY (`release_date`,`parse_time`), + CONSTRAINT `norostat_point_version_list_ibfk_1` FOREIGN KEY (`release_date`, `parse_time`) REFERENCES `norostat_raw_datatable_version_list` (`release_date`, `parse_time`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3; + """ + + create_norostat_datatable_location_pool = """ + CREATE TABLE IF NOT EXISTS `norostat_raw_datatable_location_pool` ( + `location_id` int NOT NULL AUTO_INCREMENT, + `location` varchar(255) CHARACTER SET utf8mb3 COLLATE utf8mb3_general_ci NOT NULL, + PRIMARY KEY (`location_id`), + UNIQUE KEY `location` (`location`) + ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb3; + """ + + self.create_tables_list = [ + create_raw_datatable_version_list, + create_norostat_version_list, + create_norostat_datatable_location_pool, + create_norostat_point_diffs, + ] + + self.delete_from_tables_list = [ + "norostat_point_diffs", + "norostat_point_version_list", + "norostat_raw_datatable_location_pool", + "norostat_raw_datatable_version_list", + ] + + self.role_name = "norostat" + + def test_norostat(self): + """Basic integration test for norostat endpoint""" + self.cur.execute( + 'INSERT INTO `norostat_raw_datatable_version_list`(`release_date`, `parse_time`) VALUES ("2023-07-19", "2023-07-10 15:24:51")' + ) + self.cur.execute( + 'INSERT INTO `norostat_raw_datatable_location_pool`(`location_id`, `location`) VALUES("1", "SomeTestLocation")' + ) + self.cur.execute( + 'INSERT INTO `norostat_point_version_list`(`release_date`, `parse_time`) VALUES("2023-07-19", "2023-07-10 15:24:51")' + ) + self.cur.execute( + 'INSERT INTO `norostat_point_diffs`(`release_date`, `parse_time`, `location_id`, `epiweek`, `new_value`) VALUES("2023-07-19", "2023-07-10 15:24:51", "1", "202329", 10)' + ) + self.cnx.commit() + response = self.epidata_client.norostat(auth="norostat_key", location="SomeTestLocation", epiweeks="202329") + self.assertEqual( + response, + { + "epidata": [{"release_date": "2023-07-19", "epiweek": 202329, "value": 10}], + "result": 1, + "message": "success", + }, + ) + return True + + def test_meta_norostat(self): + """Basic integration test for meta_norostat endpoint""" + + self.cur.execute( + "INSERT INTO `norostat_raw_datatable_version_list`(`release_date`, `parse_time`) VALUES (%s, %s)", + ("2014-10-22", "2048-12-08 15:22:51"), + ) + self.cur.execute( + 'INSERT INTO `norostat_raw_datatable_location_pool`(`location`) VALUES ("Minnesota, Ohio, Oregon, Tennessee, and Wisconsin")' + ) + self.cnx.commit() + response = self.epidata_client.meta_norostat(auth="norostat_key") + self.assertEqual( + response, + { + "epidata": { + "locations": [{"location": "Minnesota, Ohio, Oregon, Tennessee, and Wisconsin"}], + "releases": [{"release_date": "2014-10-22"}], + }, + "message": "success", + "result": 1, + }, + ) diff --git a/integrations/server/test_nowcast.py b/integrations/server/test_nowcast.py new file mode 100644 index 000000000..38897f0ae --- /dev/null +++ b/integrations/server/test_nowcast.py @@ -0,0 +1,26 @@ +# first party +from delphi.epidata.common.delphi_test_base import DelphiTestBase + + +class NowcastTest(DelphiTestBase): + """Basic integration tests for nowcast endpint.""" + + def localSetUp(self): + self.truncate_tables_list = ["nowcasts"] + + def test_nowcast(self): + """Basic integration test for nowcast endpoint""" + self.cur.execute( + "INSERT INTO `nowcasts`(`epiweek`, `location`, `value`, `std`) VALUES(%s, %s, %s, %s)", + ("201145", "nat", "12345", "0.01234"), + ) + self.cnx.commit() + response = self.epidata_client.nowcast(locations="nat", epiweeks="201145") + self.assertEqual( + response, + { + "epidata": [{"location": "nat", "epiweek": 201145, "value": 12345.0, "std": 0.01234}], + "result": 1, + "message": "success", + }, + ) diff --git a/integrations/server/test_paho_dengue.py b/integrations/server/test_paho_dengue.py new file mode 100644 index 000000000..911986897 --- /dev/null +++ b/integrations/server/test_paho_dengue.py @@ -0,0 +1,40 @@ +# first party +from delphi.epidata.common.delphi_test_base import DelphiTestBase + + +class PahoDengueTest(DelphiTestBase): + """Basic integration tests for paho_dengue endpint.""" + + def localSetUp(self): + self.truncate_tables_list = ["paho_dengue"] + + def test_paho_dengue(self): + """Basic integration test for paho_dengue endpoint""" + self.cur.execute( + "INSERT INTO `paho_dengue`(`release_date`, `issue`, `epiweek`, `lag`, `region`, `total_pop`, `serotype`, `num_dengue`, `incidence_rate`, `num_severe`, `num_deaths`) VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)", + ("2018-12-01", "201848", "201454", "204", "AG", "91", "DEN 1,4", "37", "40.66", "0", "0"), + ) + self.cnx.commit() + response = self.epidata_client.paho_dengue(regions="AG", epiweeks="201454") + self.assertEqual( + response, + { + "epidata": [ + { + "release_date": "2018-12-01", + "region": "AG", + "serotype": "DEN 1,4", + "issue": 201848, + "epiweek": 201454, + "lag": 204, + "total_pop": 91, + "num_dengue": 37, + "num_severe": 0, + "num_deaths": 0, + "incidence_rate": 40.66, + } + ], + "result": 1, + "message": "success", + }, + ) diff --git a/integrations/server/test_quidel.py b/integrations/server/test_quidel.py new file mode 100644 index 000000000..30902eeae --- /dev/null +++ b/integrations/server/test_quidel.py @@ -0,0 +1,23 @@ +# first party +from delphi.epidata.common.delphi_test_base import DelphiTestBase + + +class QuidelTest(DelphiTestBase): + """Basic integration tests for quidel endpint.""" + + def localSetUp(self): + self.truncate_tables_list = ["quidel"] + self.role_name = "quidel" + + def test_quidel(self): + """Basic integration test for quidel endpoint""" + self.cur.execute( + "INSERT INTO `quidel`(`location`, `epiweek`, `value`, `num_rows`, `num_devices`) VALUES(%s, %s, %s, %s, %s)", + ("loc1", "201111", "1", "0", "0"), + ) + self.cnx.commit() + response = self.epidata_client.quidel(locations="loc1", epiweeks="201111", auth="quidel_key") + self.assertEqual( + response, + {"epidata": [{"location": "loc1", "epiweek": 201111, "value": 1.0}], "result": 1, "message": "success"}, + ) diff --git a/integrations/server/test_sensors.py b/integrations/server/test_sensors.py new file mode 100644 index 000000000..c5afa329e --- /dev/null +++ b/integrations/server/test_sensors.py @@ -0,0 +1,27 @@ +# first party +from delphi.epidata.common.delphi_test_base import DelphiTestBase + + +class SensorsTest(DelphiTestBase): + """Basic integration tests for sensors endpint.""" + + def localSetUp(self): + self.truncate_tables_list = ["sensors"] + self.role_name = "sensors" + + def test_sensors(self): + """Basic integration test for sensors endpoint""" + self.cur.execute( + "INSERT INTO `sensors`(`name`, `epiweek`, `location`, `value`) VALUES(%s, %s, %s, %s)", + ("sens1", "201111", "loc1", "222"), + ) + self.cnx.commit() + response = self.epidata_client.sensors(names="sens1", locations="loc1", epiweeks="201111", auth="sensors_key") + self.assertEqual( + response, + { + "epidata": [{"name": "sens1", "location": "loc1", "epiweek": 201111, "value": 222.0}], + "result": 1, + "message": "success", + }, + ) diff --git a/integrations/server/test_signal_dashboard.py b/integrations/server/test_signal_dashboard.py new file mode 100644 index 000000000..18627601d --- /dev/null +++ b/integrations/server/test_signal_dashboard.py @@ -0,0 +1,59 @@ +# first party +from delphi.epidata.common.delphi_test_base import DelphiTestBase + + +class SignalDashboardTest(DelphiTestBase): + """Basic integration tests for signal_dashboard_coverage and signal_dashboard_status endpints.""" + + # NOTE: In all other tests localSetUp() method was used. But it is not applicable for this test + # due to order of commands, so thats why method reload + calling super was required. + def setUp(self) -> None: + """Perform per-test setup.""" + + self.delete_from_tables_list = ["dashboard_signal_coverage", "dashboard_signal"] + super().setUp() + + self.cur.execute( + "INSERT INTO `dashboard_signal`(`id`, `name`, `source`, `covidcast_signal`, `enabled`, `latest_coverage_update`, `latest_status_update`) VALUES(%s, %s, %s, %s, %s, %s, %s)", + ("1", "Change", "chng", "smoothed_outpatient_covid", "1", "2021-10-02", "2021-11-27"), + ) + self.cur.execute( + "INSERT INTO `dashboard_signal_coverage`(`signal_id`, `date`, `geo_type`, `count`) VALUES(%s, %s, %s, %s)", + ("1", "2021-10-02", "county", "2222"), + ) + + self.cnx.commit() + + def test_signal_dashboard_coverage(self): + """Basic integration test for signal_dashboard_coverage endpoint""" + response = self.epidata_client._request(endpoint="signal_dashboard_coverage", params={}) + self.assertEqual( + response, + { + "epidata": {"Change": {"county": [{"count": 2222, "date": "2021-10-02"}]}}, + "message": "success", + "result": 1, + }, + ) + + def test_signal_dashboard_status(self): + """Basic integration test for signal_dashboard_status endpoint""" + + response = self.epidata_client._request(endpoint="signal_dashboard_status", params={}) + self.assertEqual( + response, + { + "epidata": [ + { + "name": "Change", + "source": "chng", + "covidcast_signal": "smoothed_outpatient_covid", + "latest_issue": None, + "latest_time_value": None, + "coverage": {"county": [{"date": "2021-10-02", "count": 2222}]}, + } + ], + "result": 1, + "message": "success", + }, + ) diff --git a/integrations/server/test_twitter.py b/integrations/server/test_twitter.py new file mode 100644 index 000000000..18891c009 --- /dev/null +++ b/integrations/server/test_twitter.py @@ -0,0 +1,27 @@ +# first party +from delphi.epidata.common.delphi_test_base import DelphiTestBase + + +class TwitterTest(DelphiTestBase): + """Basic integration tests for twitter endpint.""" + + def localSetUp(self): + self.truncate_tables_list = ["twitter"] + self.role_name = "twitter" + + def test_twitter(self): + """Basic integration test for twitter endpoint""" + + self.cur.execute( + 'INSERT INTO `twitter`(`date`, `state`, `num`, `total`) VALUES ("2015-07-29", "AK", "1", "223"), ("2020-07-29", "CT", "12", "778")', + ) + self.cnx.commit() + response = self.epidata_client.twitter(auth="twitter_key", locations="cen9", dates="20150701-20160101") + self.assertEqual( + response, + { + "epidata": [{"location": "cen9", "date": "2015-07-29", "num": 1, "total": 223, "percent": 0.4484}], + "result": 1, + "message": "success", + }, + ) diff --git a/integrations/server/test_wiki.py b/integrations/server/test_wiki.py new file mode 100644 index 000000000..21638b43e --- /dev/null +++ b/integrations/server/test_wiki.py @@ -0,0 +1,31 @@ +# first party +from delphi.epidata.common.delphi_test_base import DelphiTestBase + + +class WikiTest(DelphiTestBase): + """Basic integration tests for wiki endpint.""" + + def localSetUp(self): + self.truncate_tables_list = ["wiki", "wiki_meta"] + + def test_wiki(self): + """Basic integration test for wiki endpoint""" + + self.cur.execute( + 'INSERT INTO `wiki`(`datetime`, `article`, `count`, `language`) VALUES ("2007-12-09 18:00:00", "amantadine", "3", "en"), ("2008-12-09 18:00:00", "test", "5", "en")', + ) + self.cur.execute( + 'INSERT INTO `wiki_meta`(`datetime`, `date`, `epiweek`, `total`, `language`) VALUES ("2007-12-09 18:00:00", "2007-12-09", "200750", "969214", "en"), ("2008-12-09 18:00:00", "2008-12-09", "200750", "123321", "en")' + ) + self.cnx.commit() + response = self.epidata_client.wiki(articles="test", epiweeks="200701-200801") + self.assertEqual( + response, + { + "epidata": [ + {"article": "test", "count": 5, "total": 123321, "hour": -1, "epiweek": 200750, "value": 40.544595} + ], + "result": 1, + "message": "success", + }, + ) diff --git a/src/client/delphi_epidata.py b/src/client/delphi_epidata.py index fe8dbe51d..e773bdc50 100644 --- a/src/client/delphi_epidata.py +++ b/src/client/delphi_epidata.py @@ -44,7 +44,7 @@ class Epidata: """An interface to DELPHI's Epidata API.""" # API base url - BASE_URL = "https://api.delphi.cmu.edu/epidata/api.php" + BASE_URL = "https://api.delphi.cmu.edu/epidata" auth = None client_version = _version @@ -68,17 +68,18 @@ def _list(values): @staticmethod @retry(reraise=True, stop=stop_after_attempt(2)) - def _request_with_retry(params): + def _request_with_retry(endpoint, params={}): """Make request with a retry if an exception is thrown.""" - req = requests.get(Epidata.BASE_URL, params, auth=Epidata.auth, headers=_HEADERS) + request_url = f"{Epidata.BASE_URL}/{endpoint}" + req = requests.get(request_url, params, auth=Epidata.auth, headers=_HEADERS) if req.status_code == 414: - req = requests.post(Epidata.BASE_URL, params, auth=Epidata.auth, headers=_HEADERS) + req = requests.post(request_url, params, auth=Epidata.auth, headers=_HEADERS) # handle 401 and 429 req.raise_for_status() return req @staticmethod - def _request(params): + def _request(endpoint, params={}): """Request and parse epidata. We default to GET since it has better caching and logging @@ -86,7 +87,7 @@ def _request(params): long and returns a 414. """ try: - result = Epidata._request_with_retry(params) + result = Epidata._request_with_retry(endpoint, params) except Exception as e: return {"result": 0, "message": "error: " + str(e)} if params is not None and "format" in params and params["format"] == "csv": @@ -125,7 +126,6 @@ def fluview(regions, epiweeks, issues=None, lag=None, auth=None): raise EpidataBadRequestException(ISSUES_LAG_EXCLUSIVE) # Set up request params = { - "endpoint": "fluview", "regions": Epidata._list(regions), "epiweeks": Epidata._list(epiweeks), } @@ -136,18 +136,13 @@ def fluview(regions, epiweeks, issues=None, lag=None, auth=None): if auth is not None: params["auth"] = auth # Make the API call - return Epidata._request(params) + return Epidata._request("fluview", params) # Fetch FluView metadata @staticmethod def fluview_meta(): """Fetch FluView metadata.""" - # Set up request - params = { - "endpoint": "fluview_meta", - } - # Make the API call - return Epidata._request(params) + return Epidata._request("fluview_meta") # Fetch FluView clinical data @staticmethod @@ -160,7 +155,6 @@ def fluview_clinical(regions, epiweeks, issues=None, lag=None): raise EpidataBadRequestException(REGIONS_EPIWEEKS_REQUIRED) # Set up request params = { - "endpoint": "fluview_clinical", "regions": Epidata._list(regions), "epiweeks": Epidata._list(epiweeks), } @@ -169,7 +163,7 @@ def fluview_clinical(regions, epiweeks, issues=None, lag=None): if lag is not None: params["lag"] = lag # Make the API call - return Epidata._request(params) + return Epidata._request("fluview_clinical", params) # Fetch FluSurv data @staticmethod @@ -182,7 +176,6 @@ def flusurv(locations, epiweeks, issues=None, lag=None): raise EpidataBadRequestException(REGIONS_EPIWEEKS_REQUIRED) # Set up request params = { - "endpoint": "flusurv", "locations": Epidata._list(locations), "epiweeks": Epidata._list(epiweeks), } @@ -191,7 +184,7 @@ def flusurv(locations, epiweeks, issues=None, lag=None): if lag is not None: params["lag"] = lag # Make the API call - return Epidata._request(params) + return Epidata._request("flusurv", params) # Fetch PAHO Dengue data @staticmethod @@ -204,7 +197,6 @@ def paho_dengue(regions, epiweeks, issues=None, lag=None): raise EpidataBadRequestException(REGIONS_EPIWEEKS_REQUIRED) # Set up request params = { - "endpoint": "paho_dengue", "regions": Epidata._list(regions), "epiweeks": Epidata._list(epiweeks), } @@ -213,7 +205,7 @@ def paho_dengue(regions, epiweeks, issues=None, lag=None): if lag is not None: params["lag"] = lag # Make the API call - return Epidata._request(params) + return Epidata._request("paho_dengue", params) # Fetch ECDC ILI data @staticmethod @@ -226,7 +218,6 @@ def ecdc_ili(regions, epiweeks, issues=None, lag=None): raise EpidataBadRequestException(REGIONS_EPIWEEKS_REQUIRED) # Set up request params = { - "endpoint": "ecdc_ili", "regions": Epidata._list(regions), "epiweeks": Epidata._list(epiweeks), } @@ -235,7 +226,7 @@ def ecdc_ili(regions, epiweeks, issues=None, lag=None): if lag is not None: params["lag"] = lag # Make the API call - return Epidata._request(params) + return Epidata._request("ecdc_ili", params) # Fetch KCDC ILI data @staticmethod @@ -248,7 +239,6 @@ def kcdc_ili(regions, epiweeks, issues=None, lag=None): raise EpidataBadRequestException(REGIONS_EPIWEEKS_REQUIRED) # Set up request params = { - "endpoint": "kcdc_ili", "regions": Epidata._list(regions), "epiweeks": Epidata._list(epiweeks), } @@ -257,7 +247,7 @@ def kcdc_ili(regions, epiweeks, issues=None, lag=None): if lag is not None: params["lag"] = lag # Make the API call - return Epidata._request(params) + return Epidata._request("kcdc_ili", params) # Fetch Google Flu Trends data @staticmethod @@ -268,12 +258,11 @@ def gft(locations, epiweeks): raise EpidataBadRequestException(LOCATIONS_EPIWEEKS_REQUIRED) # Set up request params = { - "endpoint": "gft", "locations": Epidata._list(locations), "epiweeks": Epidata._list(epiweeks), } # Make the API call - return Epidata._request(params) + return Epidata._request("gft", params) # Fetch Google Health Trends data @staticmethod @@ -286,14 +275,13 @@ def ght(auth, locations, epiweeks, query): ) # Set up request params = { - "endpoint": "ght", "auth": auth, "locations": Epidata._list(locations), "epiweeks": Epidata._list(epiweeks), "query": query, } # Make the API call - return Epidata._request(params) + return Epidata._request("ght", params) # Fetch HealthTweets data @staticmethod @@ -306,7 +294,6 @@ def twitter(auth, locations, dates=None, epiweeks=None): raise EpidataBadRequestException("exactly one of `dates` and `epiweeks` is required") # Set up request params = { - "endpoint": "twitter", "auth": auth, "locations": Epidata._list(locations), } @@ -315,7 +302,7 @@ def twitter(auth, locations, dates=None, epiweeks=None): if epiweeks is not None: params["epiweeks"] = Epidata._list(epiweeks) # Make the API call - return Epidata._request(params) + return Epidata._request("twitter", params) # Fetch Wikipedia access data @staticmethod @@ -328,7 +315,6 @@ def wiki(articles, dates=None, epiweeks=None, hours=None, language="en"): raise EpidataBadRequestException("exactly one of `dates` and `epiweeks` is required") # Set up request params = { - "endpoint": "wiki", "articles": Epidata._list(articles), "language": language, } @@ -339,7 +325,7 @@ def wiki(articles, dates=None, epiweeks=None, hours=None, language="en"): if hours is not None: params["hours"] = Epidata._list(hours) # Make the API call - return Epidata._request(params) + return Epidata._request("wiki", params) # Fetch CDC page hits @staticmethod @@ -350,13 +336,12 @@ def cdc(auth, epiweeks, locations): raise EpidataBadRequestException("`auth`, `epiweeks`, and `locations` are all required") # Set up request params = { - "endpoint": "cdc", "auth": auth, "epiweeks": Epidata._list(epiweeks), "locations": Epidata._list(locations), } # Make the API call - return Epidata._request(params) + return Epidata._request("cdc", params) # Fetch Quidel data @staticmethod @@ -367,13 +352,12 @@ def quidel(auth, epiweeks, locations): raise EpidataBadRequestException("`auth`, `epiweeks`, and `locations` are all required") # Set up request params = { - "endpoint": "quidel", "auth": auth, "epiweeks": Epidata._list(epiweeks), "locations": Epidata._list(locations), } # Make the API call - return Epidata._request(params) + return Epidata._request("quidel", params) # Fetch NoroSTAT data (point data, no min/max) @staticmethod @@ -384,13 +368,12 @@ def norostat(auth, location, epiweeks): raise EpidataBadRequestException("`auth`, `location`, and `epiweeks` are all required") # Set up request params = { - "endpoint": "norostat", "auth": auth, "location": location, "epiweeks": Epidata._list(epiweeks), } # Make the API call - return Epidata._request(params) + return Epidata._request("norostat", params) # Fetch NoroSTAT metadata @staticmethod @@ -401,11 +384,10 @@ def meta_norostat(auth): raise EpidataBadRequestException("`auth` is required") # Set up request params = { - "endpoint": "meta_norostat", "auth": auth, } # Make the API call - return Epidata._request(params) + return Epidata._request("meta_norostat", params) # Fetch NIDSS flu data @staticmethod @@ -418,7 +400,6 @@ def nidss_flu(regions, epiweeks, issues=None, lag=None): raise EpidataBadRequestException(REGIONS_EPIWEEKS_REQUIRED) # Set up request params = { - "endpoint": "nidss_flu", "regions": Epidata._list(regions), "epiweeks": Epidata._list(epiweeks), } @@ -427,7 +408,7 @@ def nidss_flu(regions, epiweeks, issues=None, lag=None): if lag is not None: params["lag"] = lag # Make the API call - return Epidata._request(params) + return Epidata._request("nidss_flu", params) # Fetch NIDSS dengue data @staticmethod @@ -438,12 +419,11 @@ def nidss_dengue(locations, epiweeks): raise EpidataBadRequestException(REGIONS_EPIWEEKS_REQUIRED) # Set up request params = { - "endpoint": "nidss_dengue", "locations": Epidata._list(locations), "epiweeks": Epidata._list(epiweeks), } # Make the API call - return Epidata._request(params) + return Epidata._request("nidss_dengue", params) # Fetch Delphi's forecast @staticmethod @@ -454,12 +434,11 @@ def delphi(system, epiweek): raise EpidataBadRequestException("`system` and `epiweek` are both required") # Set up request params = { - "endpoint": "delphi", "system": system, "epiweek": epiweek, } # Make the API call - return Epidata._request(params) + return Epidata._request("delphi", params) # Fetch Delphi's digital surveillance sensors @staticmethod @@ -472,7 +451,6 @@ def sensors(auth, names, locations, epiweeks): ) # Set up request params = { - "endpoint": "sensors", "names": Epidata._list(names), "locations": Epidata._list(locations), "epiweeks": Epidata._list(epiweeks), @@ -480,7 +458,7 @@ def sensors(auth, names, locations, epiweeks): if auth is not None: params["auth"] = auth # Make the API call - return Epidata._request(params) + return Epidata._request("sensors", params) # Fetch Delphi's dengue digital surveillance sensors @staticmethod @@ -493,14 +471,13 @@ def dengue_sensors(auth, names, locations, epiweeks): ) # Set up request params = { - "endpoint": "dengue_sensors", "auth": auth, "names": Epidata._list(names), "locations": Epidata._list(locations), "epiweeks": Epidata._list(epiweeks), } # Make the API call - return Epidata._request(params) + return Epidata._request("dengue_sensors", params) # Fetch Delphi's wILI nowcast @staticmethod @@ -511,12 +488,11 @@ def nowcast(locations, epiweeks): raise EpidataBadRequestException(REGIONS_EPIWEEKS_REQUIRED) # Set up request params = { - "endpoint": "nowcast", "locations": Epidata._list(locations), "epiweeks": Epidata._list(epiweeks), } # Make the API call - return Epidata._request(params) + return Epidata._request("nowcast", params) # Fetch Delphi's dengue nowcast @staticmethod @@ -527,18 +503,17 @@ def dengue_nowcast(locations, epiweeks): raise EpidataBadRequestException(REGIONS_EPIWEEKS_REQUIRED) # Set up request params = { - "endpoint": "dengue_nowcast", "locations": Epidata._list(locations), "epiweeks": Epidata._list(epiweeks), } # Make the API call - return Epidata._request(params) + return Epidata._request("dengue_nowcast", params) # Fetch API metadata @staticmethod def meta(): """Fetch API metadata.""" - return Epidata._request({"endpoint": "meta"}) + return Epidata._request("meta") # Fetch Delphi's COVID-19 Surveillance Streams @staticmethod @@ -568,7 +543,6 @@ def covidcast( raise EpidataBadRequestException(REGIONS_EPIWEEKS_REQUIRED) # Set up request params = { - "endpoint": "covidcast", "data_source": data_source, "signals": Epidata._list(signals), "time_type": time_type, @@ -594,13 +568,13 @@ def covidcast( params["fields"] = kwargs["fields"] # Make the API call - return Epidata._request(params) + return Epidata._request("covidcast", params) # Fetch Delphi's COVID-19 Surveillance Streams metadata @staticmethod def covidcast_meta(): """Fetch Delphi's COVID-19 Surveillance Streams metadata""" - return Epidata._request({"endpoint": "covidcast_meta"}) + return Epidata._request("covidcast_meta") # Fetch COVID hospitalization data @staticmethod @@ -611,7 +585,6 @@ def covid_hosp(states, dates, issues=None, as_of=None): raise EpidataBadRequestException("`states` and `dates` are both required") # Set up request params = { - "endpoint": "covid_hosp", "states": Epidata._list(states), "dates": Epidata._list(dates), } @@ -620,7 +593,7 @@ def covid_hosp(states, dates, issues=None, as_of=None): if as_of is not None: params["as_of"] = as_of # Make the API call - return Epidata._request(params) + return Epidata._request("covid_hosp_state_timeseries", params) # Fetch COVID hospitalization data for specific facilities @staticmethod @@ -633,21 +606,20 @@ def covid_hosp_facility(hospital_pks, collection_weeks, publication_dates=None): ) # Set up request params = { - "source": "covid_hosp_facility", "hospital_pks": Epidata._list(hospital_pks), "collection_weeks": Epidata._list(collection_weeks), } if publication_dates is not None: params["publication_dates"] = Epidata._list(publication_dates) # Make the API call - return Epidata._request(params) + return Epidata._request("covid_hosp_facility", params) # Lookup COVID hospitalization facility identifiers @staticmethod def covid_hosp_facility_lookup(state=None, ccn=None, city=None, zip=None, fips_code=None): """Lookup COVID hospitalization facility identifiers.""" # Set up request - params = {"source": "covid_hosp_facility_lookup"} + params = {} if state is not None: params["state"] = state elif ccn is not None: @@ -663,7 +635,7 @@ def covid_hosp_facility_lookup(state=None, ccn=None, city=None, zip=None, fips_c "one of `state`, `ccn`, `city`, `zip`, or `fips_code` is required" ) # Make the API call - return Epidata._request(params) + return Epidata._request("covid_hosp_facility_lookup", params) # Fetch Delphi's COVID-19 Nowcast sensors @staticmethod @@ -693,7 +665,6 @@ def covidcast_nowcast( raise EpidataBadRequestException(REGIONS_EPIWEEKS_REQUIRED) # Set up request params = { - "source": "covidcast_nowcast", "data_source": data_source, "signals": Epidata._list(signals), "sensor_names": Epidata._list(sensor_names), @@ -717,15 +688,17 @@ def covidcast_nowcast( params["format"] = kwargs["format"] # Make the API call - return Epidata._request(params) + return Epidata._request("covidcast_nowcast", params) @staticmethod - def async_epidata(param_list, batch_size=50): + def async_epidata(endpoint, param_list, batch_size=50): """Make asynchronous Epidata calls for a list of parameters.""" + request_url = f"{Epidata.BASE_URL}/{endpoint}" + async def async_get(params, session): """Helper function to make Epidata GET requests.""" - async with session.get(Epidata.BASE_URL, params=params) as response: + async with session.get(request_url, params=params) as response: response.raise_for_status() return await response.json(), params diff --git a/src/acquisition/covidcast/test_utils.py b/src/common/covidcast_test_base.py similarity index 60% rename from src/acquisition/covidcast/test_utils.py rename to src/common/covidcast_test_base.py index 5a978f8cd..1ab58a911 100644 --- a/src/acquisition/covidcast/test_utils.py +++ b/src/common/covidcast_test_base.py @@ -1,17 +1,15 @@ from dataclasses import fields from datetime import date from typing import Any, Dict, Iterable, List, Optional, Sequence -import unittest import pandas as pd -from redis import Redis - -from delphi_utils import Nans -from delphi.epidata.common.covidcast_row import CovidcastRow from delphi.epidata.acquisition.covidcast.database import Database -from delphi.epidata.server._config import REDIS_HOST, REDIS_PASSWORD +from delphi.epidata.common.covidcast_row import CovidcastRow from delphi.epidata.server.utils.dates import day_to_time_value, time_value_to_day import delphi.operations.secrets as secrets +from delphi_utils import Nans + +from delphi.epidata.common.delphi_test_base import DelphiTestBase # all the Nans we use here are just one value, so this is a shortcut to it: nmv = Nans.NOT_MISSING.value @@ -19,8 +17,14 @@ # TODO replace these real geo_values with fake values, and use patch and mock to mock the return values of # delphi_utils.geomap.GeoMapper().get_geo_values(geo_type) in parse_geo_sets() of _params.py -FIPS = ['04019', '19143', '29063', '36083'] # Example list of valid FIPS codes as strings -MSA = ['40660', '44180', '48620', '49420'] # Example list of valid MSA codes as strings +FIPS = [ + "04019", + "19143", + "29063", + "36083", +] # Example list of valid FIPS codes as strings +MSA = ["40660", "44180", "48620", "49420"] # Example list of valid MSA codes as strings + class CovidcastTestRow(CovidcastRow): @staticmethod @@ -51,31 +55,89 @@ def __post_init__(self): if isinstance(self.issue, date): self.issue = day_to_time_value(self.issue) if isinstance(self.value_updated_timestamp, date): - self.value_updated_timestamp = day_to_time_value(self.value_updated_timestamp) + self.value_updated_timestamp = day_to_time_value( + self.value_updated_timestamp + ) def _sanitize_fields(self, extra_checks: bool = True): if self.issue and self.issue < self.time_value: self.issue = self.time_value if self.issue: - self.lag = (time_value_to_day(self.issue) - time_value_to_day(self.time_value)).days + self.lag = ( + time_value_to_day(self.issue) - time_value_to_day(self.time_value) + ).days else: self.lag = None # This sanity checking is already done in CsvImporter, but it's here so the testing class gets it too. if pd.isna(self.value) and self.missing_value == Nans.NOT_MISSING: - self.missing_value = Nans.NOT_APPLICABLE.value if extra_checks else Nans.OTHER.value + self.missing_value = ( + Nans.NOT_APPLICABLE.value if extra_checks else Nans.OTHER.value + ) if pd.isna(self.stderr) and self.missing_stderr == Nans.NOT_MISSING: - self.missing_stderr = Nans.NOT_APPLICABLE.value if extra_checks else Nans.OTHER.value + self.missing_stderr = ( + Nans.NOT_APPLICABLE.value if extra_checks else Nans.OTHER.value + ) if pd.isna(self.sample_size) and self.missing_sample_size == Nans.NOT_MISSING: - self.missing_sample_size = Nans.NOT_APPLICABLE.value if extra_checks else Nans.OTHER.value + self.missing_sample_size = ( + Nans.NOT_APPLICABLE.value if extra_checks else Nans.OTHER.value + ) return self -def covidcast_rows_from_args(sanitize_fields: bool = False, test_mode: bool = True, **kwargs: Dict[str, Iterable]) -> List[CovidcastTestRow]: +class CovidcastTestBase(DelphiTestBase): + def setUp(self): + # use the local test instance of the database + secrets.db.host = 'delphi_database_epidata' + secrets.db.epi = ('user', 'pass') + + self._db = Database() + self._db.connect() + + # empty all of the data tables + for ( + table + ) in "epimetric_load epimetric_latest epimetric_full geo_dim signal_dim".split(): + self._db._cursor.execute(f"TRUNCATE TABLE {table};") + + # reset the `covidcast_meta_cache` table (it should always have one row) + self._db._cursor.execute( + 'update covidcast_meta_cache set timestamp = 0, epidata = "[]"' + ) + self._db._connection.commit() + super().setUp() + + def localTearDown(self): + self._db.disconnect(False) + del self._db + + def _insert_rows(self, rows: Sequence[CovidcastTestRow]): + # inserts rows into the database using the full acquisition process, including 'dbjobs' load into history & latest tables + n = self._db.insert_or_update_bulk(rows) + print(f"{n} rows added to load table & dispatched to v4 schema") + # NOTE: this isnt expressly needed for our test cases, but would be if using external access (like through client lib) to ensure changes are visible outside of this db session + self._db._connection.commit() + + def params_from_row(self, row: CovidcastTestRow, **kwargs): + ret = { + "data_source": row.source, + "signals": row.signal, + "time_type": row.time_type, + "geo_type": row.geo_type, + "time_values": row.time_value, + "geo_value": row.geo_value, + } + ret.update(kwargs) + return ret + + +def covidcast_rows_from_args( + sanitize_fields: bool = False, test_mode: bool = True, **kwargs: Dict[str, Iterable] +) -> List[CovidcastTestRow]: """A convenience constructor for test rows. Example: @@ -89,12 +151,22 @@ def covidcast_rows_from_args(sanitize_fields: bool = False, test_mode: bool = Tr assert len(set(len(lst) for lst in kwargs.values())) == 1 if sanitize_fields: - return [CovidcastTestRow.make_default_row(**_kwargs)._sanitize_fields(extra_checks=test_mode) for _kwargs in transpose_dict(kwargs)] + return [ + CovidcastTestRow.make_default_row(**_kwargs)._sanitize_fields( + extra_checks=test_mode + ) + for _kwargs in transpose_dict(kwargs) + ] else: - return [CovidcastTestRow.make_default_row(**_kwargs) for _kwargs in transpose_dict(kwargs)] + return [ + CovidcastTestRow.make_default_row(**_kwargs) + for _kwargs in transpose_dict(kwargs) + ] -def covidcast_rows_from_records(records: Iterable[dict], sanity_check: bool = False) -> List[CovidcastTestRow]: +def covidcast_rows_from_records( + records: Iterable[dict], sanity_check: bool = False +) -> List[CovidcastTestRow]: """A convenience constructor. Default is different from from_args, because from_records is usually called on faux-API returns in tests, @@ -103,36 +175,60 @@ def covidcast_rows_from_records(records: Iterable[dict], sanity_check: bool = Fa You can use csv.DictReader before this to read a CSV file. """ records = list(records) - return [CovidcastTestRow.make_default_row(**record) if not sanity_check else CovidcastTestRow.make_default_row(**record)._sanitize_fields() for record in records] + return [ + CovidcastTestRow.make_default_row(**record) + if not sanity_check + else CovidcastTestRow.make_default_row(**record)._sanitize_fields() + for record in records + ] -def covidcast_rows_as_dicts(rows: Iterable[CovidcastTestRow], ignore_fields: Optional[List[str]] = None) -> List[dict]: +def covidcast_rows_as_dicts( + rows: Iterable[CovidcastTestRow], ignore_fields: Optional[List[str]] = None +) -> List[dict]: return [row.as_dict(ignore_fields=ignore_fields) for row in rows] -def covidcast_rows_as_dataframe(rows: Iterable[CovidcastTestRow], ignore_fields: Optional[List[str]] = None) -> pd.DataFrame: +def covidcast_rows_as_dataframe( + rows: Iterable[CovidcastTestRow], ignore_fields: Optional[List[str]] = None +) -> pd.DataFrame: if ignore_fields is None: ignore_fields = [] - columns = [field.name for field in fields(CovidcastTestRow) if field.name not in ignore_fields] + columns = [ + field.name + for field in fields(CovidcastTestRow) + if field.name not in ignore_fields + ] if rows: - df = pd.concat([row.as_dataframe(ignore_fields=ignore_fields) for row in rows], ignore_index=True) + df = pd.concat( + [row.as_dataframe(ignore_fields=ignore_fields) for row in rows], + ignore_index=True, + ) return df[columns] else: return pd.DataFrame(columns=columns) def covidcast_rows_as_api_row_df(rows: Iterable[CovidcastTestRow]) -> pd.DataFrame: - return covidcast_rows_as_dataframe(rows, ignore_fields=CovidcastTestRow._api_row_ignore_fields) + return covidcast_rows_as_dataframe( + rows, ignore_fields=CovidcastTestRow._api_row_ignore_fields + ) -def covidcast_rows_as_api_compatibility_row_df(rows: Iterable[CovidcastTestRow]) -> pd.DataFrame: - return covidcast_rows_as_dataframe(rows, ignore_fields=CovidcastTestRow._api_row_compatibility_ignore_fields) +def covidcast_rows_as_api_compatibility_row_df( + rows: Iterable[CovidcastTestRow], +) -> pd.DataFrame: + return covidcast_rows_as_dataframe( + rows, ignore_fields=CovidcastTestRow._api_row_compatibility_ignore_fields + ) def covidcast_rows_as_db_row_df(rows: Iterable[CovidcastTestRow]) -> pd.DataFrame: - return covidcast_rows_as_dataframe(rows, ignore_fields=CovidcastTestRow._db_row_ignore_fields) + return covidcast_rows_as_dataframe( + rows, ignore_fields=CovidcastTestRow._db_row_ignore_fields + ) def transpose_dict(d: Dict[Any, List[Any]]) -> List[Dict[Any, Any]]: @@ -145,65 +241,11 @@ def transpose_dict(d: Dict[Any, List[Any]]) -> List[Dict[Any, Any]]: return [dict(zip(d.keys(), values)) for values in zip(*d.values())] -def assert_frame_equal_no_order(df1: pd.DataFrame, df2: pd.DataFrame, index: List[str], **kwargs: Any) -> None: +def assert_frame_equal_no_order( + df1: pd.DataFrame, df2: pd.DataFrame, index: List[str], **kwargs: Any +) -> None: """Assert that two DataFrames are equal, ignoring the order of rows.""" # Remove any existing index. If it wasn't named, drop it. Set a new index and sort it. df1 = df1.reset_index().drop(columns="index").set_index(index).sort_index() df2 = df2.reset_index().drop(columns="index").set_index(index).sort_index() pd.testing.assert_frame_equal(df1, df2, **kwargs) - - -class CovidcastBase(unittest.TestCase): - def setUp(self): - # use the local test instance of the database - secrets.db.host = 'delphi_database_epidata' - secrets.db.epi = ('user', 'pass') - - self._db = Database() - self._db.connect() - - # empty all of the data tables - for table in "epimetric_load epimetric_latest epimetric_full geo_dim signal_dim".split(): - self._db._cursor.execute(f"TRUNCATE TABLE {table};") - self.localSetUp() - self._db._connection.commit() - - # clear all rate-limiting info from redis - r = Redis(host=REDIS_HOST, password=REDIS_PASSWORD) - for k in r.keys("LIMITER/*"): - r.delete(k) - - - def tearDown(self): - # close and destroy conenction to the database - self.localTearDown() - self._db.disconnect(False) - del self._db - - def localSetUp(self): - # stub; override in subclasses to perform custom setup. - # runs after tables have been truncated but before database changes have been committed - pass - - def localTearDown(self): - # stub; override in subclasses to perform custom teardown. - # runs after database changes have been committed - pass - - def _insert_rows(self, rows: Sequence[CovidcastTestRow]): - # inserts rows into the database using the full acquisition process, including 'dbjobs' load into history & latest tables - n = self._db.insert_or_update_bulk(rows) - print(f"{n} rows added to load table & dispatched to v4 schema") - self._db._connection.commit() # NOTE: this isnt expressly needed for our test cases, but would be if using external access (like through client lib) to ensure changes are visible outside of this db session - - def params_from_row(self, row: CovidcastTestRow, **kwargs): - ret = { - 'data_source': row.source, - 'signals': row.signal, - 'time_type': row.time_type, - 'geo_type': row.geo_type, - 'time_values': row.time_value, - 'geo_value': row.geo_value, - } - ret.update(kwargs) - return ret diff --git a/src/common/delphi_test_base.py b/src/common/delphi_test_base.py new file mode 100644 index 000000000..675f024a3 --- /dev/null +++ b/src/common/delphi_test_base.py @@ -0,0 +1,110 @@ +# standard library +import unittest + +# third party +import mysql.connector +import requests + +# first party +from delphi.epidata.client.delphi_epidata import Epidata +from delphi.epidata.server._limiter import limiter + + +class DelphiTestBase(unittest.TestCase): + """Basic integration test class""" + + def __init__(self, methodName: str = "runTest") -> None: + super().__init__(methodName) + self.delete_from_tables_list = [] + self.truncate_tables_list = [] + self.create_tables_list = [] + self.role_name = None + self.epidata_client = Epidata + self.epidata_client.BASE_URL = "http://delphi_web_epidata/epidata" + self.epidata_client.auth = ("epidata", "key") + + def create_key_with_role(self, cur, role_name: str): + cur.execute( + f'INSERT INTO `api_user`(`api_key`, `email`) VALUES("{role_name}_key", "{role_name}_email")' + ) + cur.execute(f'INSERT INTO `user_role`(`name`) VALUES("{role_name}")') + cur.execute( + f'INSERT INTO `user_role_link`(`user_id`, `role_id`) SELECT `api_user`.`id`, `user_role`.`id` FROM `api_user` JOIN `user_role` WHERE `api_user`.`api_key`="{role_name}_key" AND `user_role`.`name`="{role_name}"' + ) + + def _make_request( + self, + endpoint: str = "covidcast", + auth: tuple = None, + json: bool = False, + raise_for_status: bool = False, + params: dict = None, + ): + response = requests.get( + f"{self.epidata_client.BASE_URL}/{endpoint}", params=params, auth=auth + ) + if raise_for_status: + response.raise_for_status() + if json: + return response.json() + return response + + def setUp(self) -> None: + """Perform per-test setup.""" + + # connect to the `epidata` database + cnx = mysql.connector.connect( + user="user", + password="pass", + host="delphi_database_epidata", + database="epidata", + ) + cur = cnx.cursor() + + cur.execute("DELETE FROM `api_user`") + cur.execute("TRUNCATE TABLE `user_role`") + cur.execute("TRUNCATE TABLE `user_role_link`") + cur.execute( + 'INSERT INTO `api_user`(`api_key`, `email`) VALUES ("key", "email")' + ) + + self.localSetUp() + + for stmt in self.create_tables_list: + cur.execute(stmt) + + for table_name in self.delete_from_tables_list: + cur.execute(f"DELETE FROM `{table_name}`") + + for table_name in self.truncate_tables_list: + cur.execute(f"TRUNCATE TABLE `{table_name}`") + + if self.role_name: + self.create_key_with_role(cur, self.role_name) + + cnx.commit() + cur.close() + + self.cnx = cnx + self.cur = cnx.cursor() + + def localSetUp(self): + # stub; override in subclasses to perform custom setup. + # runs after user/api_key tables have been truncated, but before test-specific tables are created/deleted/truncated and before database changes have been committed + pass + + def localTearDown(self): + # stub; override in subclasses to perform custom teardown. + # runs after database changes have been committed + pass + + @staticmethod + def _clear_limits() -> None: + limiter.storage.reset() + + def tearDown(self) -> None: + """Perform per-test teardown.""" + self.localTearDown() + self.cur.close() + self.cnx.close() + self._clear_limits() diff --git a/src/maintenance/signal_dash_data_generator.py b/src/maintenance/signal_dash_data_generator.py index 6eea06579..b7f1048f5 100644 --- a/src/maintenance/signal_dash_data_generator.py +++ b/src/maintenance/signal_dash_data_generator.py @@ -19,7 +19,7 @@ LOOKBACK_DAYS_FOR_COVERAGE = 56 -BASE_COVIDCAST = covidcast.covidcast.Epidata.BASE_URL[:-len("api.php")] + "covidcast" +BASE_COVIDCAST = covidcast.covidcast.Epidata.BASE_URL + "/covidcast" COVERAGE_URL = f"{BASE_COVIDCAST}/coverage?format=csv&signal={{source}}:{{signal}}&days={LOOKBACK_DAYS_FOR_COVERAGE}" @dataclass diff --git a/src/server/_config.py b/src/server/_config.py index 4a96250c6..cf743300c 100644 --- a/src/server/_config.py +++ b/src/server/_config.py @@ -93,8 +93,16 @@ REDIS_HOST = os.environ.get("REDIS_HOST", "delphi_redis") REDIS_PASSWORD = os.environ.get("REDIS_PASSWORD", "1234") +# mode to reduce number of required requests to hit rate limit while running tests, +# by default is set to False +TESTING_MODE = os.environ.get("TESTING_MODE", False) + # https://flask-limiter.readthedocs.io/en/stable/#rate-limit-string-notation RATE_LIMIT = os.environ.get("RATE_LIMIT", "60/hour") + +if TESTING_MODE is not False: + RATE_LIMIT = "5/hour" + # fixed-window, fixed-window-elastic-expiry, or moving-window # see also https://flask-limiter.readthedocs.io/en/stable/#rate-limiting-strategies RATELIMIT_STRATEGY = os.environ.get("RATELIMIT_STRATEGY", "fixed-window") diff --git a/tests/common/test_covidcast_row.py b/tests/common/test_covidcast_row.py index 273596ebb..77e2e441e 100644 --- a/tests/common/test_covidcast_row.py +++ b/tests/common/test_covidcast_row.py @@ -5,20 +5,21 @@ from delphi_utils.nancodes import Nans from delphi.epidata.common.covidcast_row import CovidcastRow, set_df_dtypes -from delphi.epidata.acquisition.covidcast.test_utils import ( +from delphi.epidata.common.covidcast_test_base import ( CovidcastTestRow, covidcast_rows_as_api_compatibility_row_df, covidcast_rows_as_api_row_df, covidcast_rows_from_args, transpose_dict, + MSA, + CovidcastTestBase ) -from delphi.epidata.acquisition.covidcast.test_utils import MSA # py3tester coverage target (equivalent to `import *`) __test_target__ = "delphi.epidata.common.covidcast_row" -class TestCovidcastRows(unittest.TestCase): +class TestCovidcastRows(CovidcastTestBase): expected_df = set_df_dtypes( DataFrame( { diff --git a/tests/server/test_params.py b/tests/server/test_params.py index 6d6de4fdc..cd7d701c2 100644 --- a/tests/server/test_params.py +++ b/tests/server/test_params.py @@ -28,7 +28,7 @@ from delphi.epidata.server._exceptions import ( ValidationFailedException, ) -from delphi.epidata.acquisition.covidcast.test_utils import FIPS, MSA +from delphi.epidata.common.covidcast_test_base import FIPS, MSA # py3tester coverage target __test_target__ = "delphi.epidata.server._params" diff --git a/tests/server/test_query.py b/tests/server/test_query.py index ec07d3e8b..d3d904fab 100644 --- a/tests/server/test_query.py +++ b/tests/server/test_query.py @@ -21,7 +21,7 @@ TimeSet, SourceSignalSet, ) -from delphi.epidata.acquisition.covidcast.test_utils import FIPS, MSA +from delphi.epidata.common.covidcast_test_base import FIPS, MSA # py3tester coverage target __test_target__ = "delphi.epidata.server._query"