diff --git a/tableauserverclient/server/endpoint/__init__.py b/tableauserverclient/server/endpoint/__init__.py index c75fe8519..24881b2e4 100644 --- a/tableauserverclient/server/endpoint/__init__.py +++ b/tableauserverclient/server/endpoint/__init__.py @@ -4,6 +4,7 @@ from .exceptions import ServerResponseError, MissingRequiredFieldError, ServerInfoEndpointNotFoundError from .groups_endpoint import Groups from .jobs_endpoint import Jobs +from .metadata_endpoint import Metadata from .projects_endpoint import Projects from .schedules_endpoint import Schedules from .server_info_endpoint import ServerInfo diff --git a/tableauserverclient/server/endpoint/exceptions.py b/tableauserverclient/server/endpoint/exceptions.py index 080eca9c8..0648d8814 100644 --- a/tableauserverclient/server/endpoint/exceptions.py +++ b/tableauserverclient/server/endpoint/exceptions.py @@ -44,3 +44,12 @@ class EndpointUnavailableError(Exception): class ItemTypeNotAllowed(Exception): pass + + +class GraphQLError(Exception): + def __init__(self, error_payload): + self.error = error_payload + + def __str__(self): + from pprint import pformat + return pformat(self.error) diff --git a/tableauserverclient/server/endpoint/metadata_endpoint.py b/tableauserverclient/server/endpoint/metadata_endpoint.py new file mode 100644 index 000000000..82f91844c --- /dev/null +++ b/tableauserverclient/server/endpoint/metadata_endpoint.py @@ -0,0 +1,32 @@ +from .endpoint import Endpoint, api +from .exceptions import GraphQLError +import logging +import json + +logger = logging.getLogger('tableau.endpoint.metadata') + + +class Metadata(Endpoint): + @property + def baseurl(self): + return "{0}/api/exp/metadata/graphql".format(self.parent_srv._server_address) + + @api("3.2") + def query(self, query, abort_on_error=False): + logger.info('Querying Metadata API') + url = self.baseurl + + try: + graphql_query = json.dumps({'query': query}) + except Exception: + # Place holder for now + raise Exception('Must provide a string') + + # Setting content type because post_reuqest defaults to text/xml + server_response = self.post_request(url, graphql_query, content_type='text/json') + results = server_response.json() + + if abort_on_error and results.get('errors', None): + raise GraphQLError(results['errors']) + + return results diff --git a/tableauserverclient/server/server.py b/tableauserverclient/server/server.py index 95ee564ee..536b3982a 100644 --- a/tableauserverclient/server/server.py +++ b/tableauserverclient/server/server.py @@ -3,8 +3,8 @@ from .exceptions import NotSignedInError from ..namespace import Namespace from .endpoint import Sites, Views, Users, Groups, Workbooks, Datasources, Projects, Auth, \ - Schedules, ServerInfo, Tasks, ServerInfoEndpointNotFoundError, Subscriptions, Jobs -from .endpoint.exceptions import EndpointUnavailableError + Schedules, ServerInfo, Tasks, ServerInfoEndpointNotFoundError, Subscriptions, Jobs, Metadata +from .endpoint.exceptions import EndpointUnavailableError, ServerInfoEndpointNotFoundError import requests @@ -50,6 +50,7 @@ def __init__(self, server_address, use_server_version=False): self.server_info = ServerInfo(self) self.tasks = Tasks(self) self.subscriptions = Subscriptions(self) + self.metadata = Metadata(self) self._namespace = Namespace() if use_server_version: diff --git a/test/assets/metadata_query_error.json b/test/assets/metadata_query_error.json new file mode 100644 index 000000000..1c575ee23 --- /dev/null +++ b/test/assets/metadata_query_error.json @@ -0,0 +1,29 @@ +{ + "data": { + "publishedDatasources": [ + { + "id": "01cf92b2-2d17-b656-fc48-5c25ef6d5352", + "name": "Batters (TestV1)" + }, + { + "id": "020ae1cd-c356-f1ad-a846-b0094850d22a", + "name": "SharePoint_List_sharepoint2010.test.tsi.lan" + }, + { + "id": "061493a0-c3b2-6f39-d08c-bc3f842b44af", + "name": "Batters_mongodb" + }, + { + "id": "089fe515-ad2f-89bc-94bd-69f55f69a9c2", + "name": "Sample - Superstore" + } + ] + }, + "errors": [ + { + "message": "Reached time limit of PT5S for query execution.", + "path": null, + "extensions": null + } + ] +} \ No newline at end of file diff --git a/test/assets/metadata_query_success.json b/test/assets/metadata_query_success.json new file mode 100644 index 000000000..056f29fb6 --- /dev/null +++ b/test/assets/metadata_query_success.json @@ -0,0 +1,22 @@ +{ + "data": { + "publishedDatasources": [ + { + "id": "01cf92b2-2d17-b656-fc48-5c25ef6d5352", + "name": "Batters (TestV1)" + }, + { + "id": "020ae1cd-c356-f1ad-a846-b0094850d22a", + "name": "SharePoint_List_sharepoint2010.test.tsi.lan" + }, + { + "id": "061493a0-c3b2-6f39-d08c-bc3f842b44af", + "name": "Batters_mongodb" + }, + { + "id": "089fe515-ad2f-89bc-94bd-69f55f69a9c2", + "name": "Sample - Superstore" + } + ] + } + } \ No newline at end of file diff --git a/test/test_metadata.py b/test/test_metadata.py new file mode 100644 index 000000000..e2a44734c --- /dev/null +++ b/test/test_metadata.py @@ -0,0 +1,69 @@ +import unittest +import os.path +import json +import requests_mock +import tableauserverclient as TSC + +from tableauserverclient.server.endpoint.exceptions import GraphQLError + +TEST_ASSET_DIR = os.path.join(os.path.dirname(__file__), 'assets') + +METADATA_QUERY_SUCCESS = os.path.join(TEST_ASSET_DIR, 'metadata_query_success.json') +METADATA_QUERY_ERROR = os.path.join(TEST_ASSET_DIR, 'metadata_query_error.json') + +EXPECTED_DICT = {'publishedDatasources': + [{'id': '01cf92b2-2d17-b656-fc48-5c25ef6d5352', 'name': 'Batters (TestV1)'}, + {'id': '020ae1cd-c356-f1ad-a846-b0094850d22a', 'name': 'SharePoint_List_sharepoint2010.test.tsi.lan'}, + {'id': '061493a0-c3b2-6f39-d08c-bc3f842b44af', 'name': 'Batters_mongodb'}, + {'id': '089fe515-ad2f-89bc-94bd-69f55f69a9c2', 'name': 'Sample - Superstore'}]} + +EXPECTED_DICT_ERROR = [ + { + "message": "Reached time limit of PT5S for query execution.", + "path": None, + "extensions": None + } +] + + +class MetadataTests(unittest.TestCase): + def setUp(self): + self.server = TSC.Server('http://test') + self.baseurl = self.server.metadata.baseurl + self.server.version = "3.2" + + self.server._site_id = 'dad65087-b08b-4603-af4e-2887b8aafc67' + self.server._auth_token = 'j80k54ll2lfMZ0tv97mlPvvSCRyD0DOM' + + def test_metadata_query(self): + with open(METADATA_QUERY_SUCCESS, 'rb') as f: + response_json = json.loads(f.read().decode()) + with requests_mock.mock() as m: + m.post(self.baseurl, json=response_json) + actual = self.server.metadata.query('fake query') + + datasources = actual['data'] + + self.assertDictEqual(EXPECTED_DICT, datasources) + + def test_metadata_query_ignore_error(self): + with open(METADATA_QUERY_ERROR, 'rb') as f: + response_json = json.loads(f.read().decode()) + with requests_mock.mock() as m: + m.post(self.baseurl, json=response_json) + actual = self.server.metadata.query('fake query') + datasources = actual['data'] + + self.assertNotEqual(actual.get('errors', None), None) + self.assertListEqual(EXPECTED_DICT_ERROR, actual['errors']) + self.assertDictEqual(EXPECTED_DICT, datasources) + + def test_metadata_query_abort_on_error(self): + with open(METADATA_QUERY_ERROR, 'rb') as f: + response_json = json.loads(f.read().decode()) + with requests_mock.mock() as m: + m.post(self.baseurl, json=response_json) + + with self.assertRaises(GraphQLError) as e: + self.server.metadata.query('fake query', abort_on_error=True) + self.assertListEqual(e.error, EXPECTED_DICT_ERROR)