diff --git a/samples/update_workbook_data_acceleration.py b/samples/update_workbook_data_acceleration.py new file mode 100644 index 000000000..75f12262f --- /dev/null +++ b/samples/update_workbook_data_acceleration.py @@ -0,0 +1,109 @@ +#### +# This script demonstrates how to update workbook data acceleration using the Tableau +# Server Client. +# +# To run the script, you must have installed Python 3.7 or later. +#### + + +import argparse +import logging + +import tableauserverclient as TSC +from tableauserverclient import IntervalItem + + +def main(): + parser = argparse.ArgumentParser(description="Creates sample schedules for each type of frequency.") + # Common options; please keep those in sync across all samples + parser.add_argument("--server", "-s", help="server address") + parser.add_argument("--site", "-S", help="site name") + parser.add_argument("--token-name", "-p", help="name of the personal access token used to sign into the server") + parser.add_argument("--token-value", "-v", help="value of the personal access token used to sign into the server") + parser.add_argument( + "--logging-level", + "-l", + choices=["debug", "info", "error"], + default="error", + help="desired logging level (set to error by default)", + ) + # Options specific to this sample: + # This sample has no additional options, yet. If you add some, please add them here + + args = parser.parse_args() + + # Set logging level based on user input, or error by default + logging_level = getattr(logging, args.logging_level.upper()) + logging.basicConfig(level=logging_level) + + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) + server = TSC.Server(args.server, use_server_version=False) + server.add_http_options({"verify": False}) + server.use_server_version() + with server.auth.sign_in(tableau_auth): + # Get workbook + all_workbooks, pagination_item = server.workbooks.get() + print("\nThere are {} workbooks on site: ".format(pagination_item.total_available)) + print([workbook.name for workbook in all_workbooks]) + + if all_workbooks: + # Pick 1 workbook to try data acceleration. + # Note that data acceleration has a couple of requirements, please check the Tableau help page + # to verify your workbook/view is eligible for data acceleration. + + # Assuming 1st workbook is eligible for sample purposes + sample_workbook = all_workbooks[2] + + # Enable acceleration for all the views in the workbook + enable_config = dict() + enable_config["acceleration_enabled"] = True + enable_config["accelerate_now"] = True + + sample_workbook.data_acceleration_config = enable_config + updated: TSC.WorkbookItem = server.workbooks.update(sample_workbook) + # Since we did not set any specific view, we will enable all views in the workbook + print("Enable acceleration for all the views in the workbook " + updated.name + ".") + + # Disable acceleration on one of the view in the workbook + # You have to populate_views first, then set the views of the workbook + # to the ones you want to update. + server.workbooks.populate_views(sample_workbook) + view_to_disable = sample_workbook.views[0] + sample_workbook.views = [view_to_disable] + + disable_config = dict() + disable_config["acceleration_enabled"] = False + disable_config["accelerate_now"] = True + + sample_workbook.data_acceleration_config = disable_config + # To get the acceleration status on the response, set includeViewAccelerationStatus=true + # Note that you have to populate_views first to get the acceleration status, since + # acceleration status is per view basis (not per workbook) + updated: TSC.WorkbookItem = server.workbooks.update(sample_workbook, True) + view1 = updated.views[0] + print('Disabled acceleration for 1 view "' + view1.name + '" in the workbook ' + updated.name + ".") + + # Get acceleration status of the views in workbook using workbooks.get_by_id + # This won't need to do populate_views beforehand + my_workbook = server.workbooks.get_by_id(sample_workbook.id) + view1 = my_workbook.views[0] + view2 = my_workbook.views[1] + print( + "Fetching acceleration status for views in the workbook " + + updated.name + + ".\n" + + 'View "' + + view1.name + + '" has acceleration_status = ' + + view1.data_acceleration_config["acceleration_status"] + + ".\n" + + 'View "' + + view2.name + + '" has acceleration_status = ' + + view2.data_acceleration_config["acceleration_status"] + + "." + ) + + +if __name__ == "__main__": + main() diff --git a/samples/update_workbook_data_freshness_policy.py b/samples/update_workbook_data_freshness_policy.py new file mode 100644 index 000000000..9e4d63dc1 --- /dev/null +++ b/samples/update_workbook_data_freshness_policy.py @@ -0,0 +1,218 @@ +#### +# This script demonstrates how to update workbook data freshness policy using the Tableau +# Server Client. +# +# To run the script, you must have installed Python 3.7 or later. +#### + + +import argparse +import logging + +import tableauserverclient as TSC +from tableauserverclient import IntervalItem + + +def main(): + parser = argparse.ArgumentParser(description="Creates sample schedules for each type of frequency.") + # Common options; please keep those in sync across all samples + parser.add_argument("--server", "-s", help="server address") + parser.add_argument("--site", "-S", help="site name") + parser.add_argument("--token-name", "-p", help="name of the personal access token " "used to sign into the server") + parser.add_argument( + "--token-value", "-v", help="value of the personal access token " "used to sign into the server" + ) + parser.add_argument( + "--logging-level", + "-l", + choices=["debug", "info", "error"], + default="error", + help="desired logging level (set to error by default)", + ) + # Options specific to this sample: + # This sample has no additional options, yet. If you add some, please add them here + + args = parser.parse_args() + + # Set logging level based on user input, or error by default + logging_level = getattr(logging, args.logging_level.upper()) + logging.basicConfig(level=logging_level) + + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) + server = TSC.Server(args.server, use_server_version=False) + server.add_http_options({"verify": False}) + server.use_server_version() + with server.auth.sign_in(tableau_auth): + # Get workbook + all_workbooks, pagination_item = server.workbooks.get() + print("\nThere are {} workbooks on site: ".format(pagination_item.total_available)) + print([workbook.name for workbook in all_workbooks]) + + if all_workbooks: + # Pick 1 workbook that has live datasource connection. + # Assuming 1st workbook met the criteria for sample purposes + # Data Freshness Policy is not available on extract & file-based datasource. + sample_workbook = all_workbooks[2] + + # Get more info from the workbook selected + # Troubleshoot: if sample_workbook_extended.data_freshness_policy.option returns with AttributeError + # it could mean the workbook selected does not have live connection, which means it doesn't have + # data freshness policy. Change to another workbook with live datasource connection. + sample_workbook_extended = server.workbooks.get_by_id(sample_workbook.id) + try: + print( + "Workbook " + + sample_workbook.name + + " has data freshness policy option set to: " + + sample_workbook_extended.data_freshness_policy.option + ) + except AttributeError as e: + print( + "Workbook does not have data freshness policy, possibly due to the workbook selected " + "does not have live connection. Change to another workbook using live datasource connection." + ) + + # Update Workbook Data Freshness Policy to "AlwaysLive" + sample_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.AlwaysLive + ) + updated: TSC.WorkbookItem = server.workbooks.update(sample_workbook) + print( + "Workbook " + + updated.name + + " updated data freshness policy option to: " + + updated.data_freshness_policy.option + ) + + # Update Workbook Data Freshness Policy to "SiteDefault" + sample_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.SiteDefault + ) + updated: TSC.WorkbookItem = server.workbooks.update(sample_workbook) + print( + "Workbook " + + updated.name + + " updated data freshness policy option to: " + + updated.data_freshness_policy.option + ) + + # Update Workbook Data Freshness Policy to "FreshEvery" schedule. + # Set the schedule to be fresh every 10 hours + # Once the data_freshness_policy is already populated (e.g. due to previous calls), + # it is possible to directly change the option & other parameters directly like below + sample_workbook.data_freshness_policy.option = TSC.DataFreshnessPolicyItem.Option.FreshEvery + fresh_every_ten_hours = TSC.DataFreshnessPolicyItem.FreshEvery( + TSC.DataFreshnessPolicyItem.FreshEvery.Frequency.Hours, 10 + ) + sample_workbook.data_freshness_policy.fresh_every_schedule = fresh_every_ten_hours + updated: TSC.WorkbookItem = server.workbooks.update(sample_workbook) + print( + "Workbook " + + updated.name + + " updated data freshness policy option to: " + + updated.data_freshness_policy.option + + " with frequency of " + + str(updated.data_freshness_policy.fresh_every_schedule.value) + + " " + + updated.data_freshness_policy.fresh_every_schedule.frequency + ) + + # Update Workbook Data Freshness Policy to "FreshAt" schedule. + # Set the schedule to be fresh at 10AM every day + sample_workbook.data_freshness_policy.option = TSC.DataFreshnessPolicyItem.Option.FreshAt + fresh_at_ten_daily = TSC.DataFreshnessPolicyItem.FreshAt( + TSC.DataFreshnessPolicyItem.FreshAt.Frequency.Day, "10:00:00", "America/Los_Angeles" + ) + sample_workbook.data_freshness_policy.fresh_at_schedule = fresh_at_ten_daily + updated: TSC.WorkbookItem = server.workbooks.update(sample_workbook) + print( + "Workbook " + + updated.name + + " updated data freshness policy option to: " + + updated.data_freshness_policy.option + + " with frequency of " + + str(updated.data_freshness_policy.fresh_at_schedule.time) + + " every " + + updated.data_freshness_policy.fresh_at_schedule.frequency + ) + + # Set the schedule to be fresh at 6PM every week on Wednesday and Sunday + sample_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.FreshAt + ) + fresh_at_6pm_wed_sun = TSC.DataFreshnessPolicyItem.FreshAt( + TSC.DataFreshnessPolicyItem.FreshAt.Frequency.Week, + "18:00:00", + "America/Los_Angeles", + [IntervalItem.Day.Wednesday, "Sunday"], + ) + + sample_workbook.data_freshness_policy.fresh_at_schedule = fresh_at_6pm_wed_sun + updated: TSC.WorkbookItem = server.workbooks.update(sample_workbook) + new_fresh_at_schedule = updated.data_freshness_policy.fresh_at_schedule + print( + "Workbook " + + updated.name + + " updated data freshness policy option to: " + + updated.data_freshness_policy.option + + " with frequency of " + + str(new_fresh_at_schedule.time) + + " every " + + new_fresh_at_schedule.frequency + + " on " + + new_fresh_at_schedule.interval_item[0] + + "," + + new_fresh_at_schedule.interval_item[1] + ) + + # Set the schedule to be fresh at 12AM every last day of the month + sample_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.FreshAt + ) + fresh_at_last_day_of_month = TSC.DataFreshnessPolicyItem.FreshAt( + TSC.DataFreshnessPolicyItem.FreshAt.Frequency.Month, "00:00:00", "America/Los_Angeles", ["LastDay"] + ) + + sample_workbook.data_freshness_policy.fresh_at_schedule = fresh_at_last_day_of_month + updated: TSC.WorkbookItem = server.workbooks.update(sample_workbook) + new_fresh_at_schedule = updated.data_freshness_policy.fresh_at_schedule + print( + "Workbook " + + updated.name + + " updated data freshness policy option to: " + + updated.data_freshness_policy.option + + " with frequency of " + + str(new_fresh_at_schedule.time) + + " every " + + new_fresh_at_schedule.frequency + + " on " + + new_fresh_at_schedule.interval_item[0] + ) + + # Set the schedule to be fresh at 8PM every 1st,13th,20th day of the month + fresh_at_dates_of_month = TSC.DataFreshnessPolicyItem.FreshAt( + TSC.DataFreshnessPolicyItem.FreshAt.Frequency.Month, + "00:00:00", + "America/Los_Angeles", + ["1", "13", "20"], + ) + + sample_workbook.data_freshness_policy.fresh_at_schedule = fresh_at_dates_of_month + updated: TSC.WorkbookItem = server.workbooks.update(sample_workbook) + new_fresh_at_schedule = updated.data_freshness_policy.fresh_at_schedule + print( + "Workbook " + + updated.name + + " updated data freshness policy option to: " + + updated.data_freshness_policy.option + + " with frequency of " + + str(new_fresh_at_schedule.time) + + " every " + + new_fresh_at_schedule.frequency + + " on " + + str(new_fresh_at_schedule.interval_item) + ) + + +if __name__ == "__main__": + main() diff --git a/tableauserverclient/__init__.py b/tableauserverclient/__init__.py index c5c3c1922..f093f521b 100644 --- a/tableauserverclient/__init__.py +++ b/tableauserverclient/__init__.py @@ -10,6 +10,7 @@ DailyInterval, DataAlertItem, DatabaseItem, + DataFreshnessPolicyItem, DatasourceItem, FavoriteItem, FlowItem, diff --git a/tableauserverclient/models/__init__.py b/tableauserverclient/models/__init__.py index 03d692583..e7a853d9a 100644 --- a/tableauserverclient/models/__init__.py +++ b/tableauserverclient/models/__init__.py @@ -5,6 +5,7 @@ from .data_acceleration_report_item import DataAccelerationReportItem from .data_alert_item import DataAlertItem from .database_item import DatabaseItem +from .data_freshness_policy_item import DataFreshnessPolicyItem from .datasource_item import DatasourceItem from .dqw_item import DQWItem from .exceptions import UnpopulatedPropertyError diff --git a/tableauserverclient/models/data_freshness_policy_item.py b/tableauserverclient/models/data_freshness_policy_item.py new file mode 100644 index 000000000..f567c501c --- /dev/null +++ b/tableauserverclient/models/data_freshness_policy_item.py @@ -0,0 +1,210 @@ +import xml.etree.ElementTree as ET + +from typing import Optional, Union, List +from tableauserverclient.models.property_decorators import property_is_enum, property_not_nullable +from .interval_item import IntervalItem + + +class DataFreshnessPolicyItem: + class Option: + AlwaysLive = "AlwaysLive" + SiteDefault = "SiteDefault" + FreshEvery = "FreshEvery" + FreshAt = "FreshAt" + + class FreshEvery: + class Frequency: + Minutes = "Minutes" + Hours = "Hours" + Days = "Days" + Weeks = "Weeks" + + def __init__(self, frequency: str, value: int): + self.frequency: str = frequency + self.value: int = value + + def __repr__(self): + return "".format(**vars(self)) + + @property + def frequency(self) -> str: + return self._frequency + + @frequency.setter + @property_is_enum(Frequency) + def frequency(self, value: str): + self._frequency = value + + @classmethod + def from_xml_element(cls, fresh_every_schedule_elem: ET.Element): + frequency = fresh_every_schedule_elem.get("frequency", None) + value_str = fresh_every_schedule_elem.get("value", None) + if (frequency is None) or (value_str is None): + return None + value = int(value_str) + return DataFreshnessPolicyItem.FreshEvery(frequency, value) + + class FreshAt: + class Frequency: + Day = "Day" + Week = "Week" + Month = "Month" + + def __init__(self, frequency: str, time: str, timezone, interval_item: Optional[List[str]] = None): + self.frequency = frequency + self.time = time + self.timezone = timezone + self.interval_item: Optional[List[str]] = interval_item + + def __repr__(self): + return ( + " timezone={_timezone} " "interval_item={_interval_time}" + ).format(**vars(self)) + + @property + def interval_item(self) -> Optional[List[str]]: + return self._interval_item + + @interval_item.setter + def interval_item(self, value: List[str]): + self._interval_item = value + + @property + def time(self): + return self._time + + @time.setter + @property_not_nullable + def time(self, value): + self._time = value + + @property + def timezone(self) -> str: + return self._timezone + + @timezone.setter + def timezone(self, value: str): + self._timezone = value + + @property + def frequency(self) -> str: + return self._frequency + + @frequency.setter + @property_is_enum(Frequency) + def frequency(self, value: str): + self._frequency = value + + @classmethod + def from_xml_element(cls, fresh_at_schedule_elem: ET.Element, ns): + frequency = fresh_at_schedule_elem.get("frequency", None) + time = fresh_at_schedule_elem.get("time", None) + if (frequency is None) or (time is None): + return None + timezone = fresh_at_schedule_elem.get("timezone", None) + interval = parse_intervals(fresh_at_schedule_elem, frequency, ns) + return DataFreshnessPolicyItem.FreshAt(frequency, time, timezone, interval) + + def __init__(self, option: str): + self.option = option + self.fresh_every_schedule: Optional[DataFreshnessPolicyItem.FreshEvery] = None + self.fresh_at_schedule: Optional[DataFreshnessPolicyItem.FreshAt] = None + + def __repr__(self): + return "".format(**vars(self)) + + @property + def option(self) -> str: + return self._option + + @option.setter + @property_is_enum(Option) + def option(self, value: str): + self._option = value + + @property + def fresh_every_schedule(self) -> Optional[FreshEvery]: + return self._fresh_every_schedule + + @fresh_every_schedule.setter + def fresh_every_schedule(self, value: FreshEvery): + self._fresh_every_schedule = value + + @property + def fresh_at_schedule(self) -> Optional[FreshAt]: + return self._fresh_at_schedule + + @fresh_at_schedule.setter + def fresh_at_schedule(self, value: FreshAt): + self._fresh_at_schedule = value + + @classmethod + def from_xml_element(cls, data_freshness_policy_elem, ns): + option = data_freshness_policy_elem.get("option", None) + if option is None: + return None + data_freshness_policy = DataFreshnessPolicyItem(option) + + fresh_at_schedule = None + fresh_every_schedule = None + if option == "FreshAt": + fresh_at_schedule_elem = data_freshness_policy_elem.find(".//t:freshAtSchedule", namespaces=ns) + fresh_at_schedule = DataFreshnessPolicyItem.FreshAt.from_xml_element(fresh_at_schedule_elem, ns) + data_freshness_policy.fresh_at_schedule = fresh_at_schedule + elif option == "FreshEvery": + fresh_every_schedule_elem = data_freshness_policy_elem.find(".//t:freshEverySchedule", namespaces=ns) + fresh_every_schedule = DataFreshnessPolicyItem.FreshEvery.from_xml_element(fresh_every_schedule_elem) + data_freshness_policy.fresh_every_schedule = fresh_every_schedule + + return data_freshness_policy + + +def parse_intervals(intervals_elem, frequency, ns): + interval_elems = intervals_elem.findall(".//t:intervals/t:interval", namespaces=ns) + interval = [] + for interval_elem in interval_elems: + interval.extend(interval_elem.attrib.items()) + + # No intervals expected for Day frequency + if frequency == DataFreshnessPolicyItem.FreshAt.Frequency.Day: + return None + + if frequency == DataFreshnessPolicyItem.FreshAt.Frequency.Week: + interval_values = [(i[1]).title() for i in interval] + return parse_week_intervals(interval_values) + + if frequency == DataFreshnessPolicyItem.FreshAt.Frequency.Month: + interval_values = [(i[1]) for i in interval] + return parse_month_intervals(interval_values) + + +def parse_week_intervals(interval_values): + # Using existing IntervalItem.Day to check valid weekday string + if not all(hasattr(IntervalItem.Day, day) for day in interval_values): + raise ValueError("Invalid week day defined " + str(interval_values)) + return interval_values + + +def parse_month_intervals(interval_values): + error = "Invalid interval value for a monthly frequency: {}.".format(interval_values) + + # Month interval can have value either only ['LastDay'] or list of dates e.g. ["1", 20", "30"] + # First check if the list only have LastDay value. When using LastDay, there shouldn't be + # any other values, hence checking the first element of the list is enough. + # If the value is not "LastDay", we assume intervals is on list of dates format. + # We created this function instead of using existing MonthlyInterval because we allow list of dates interval, + + intervals = [] + if interval_values[0] == "LastDay": + intervals.append(interval_values[0]) + else: + for interval in interval_values: + try: + if 1 <= int(interval) <= 31: + intervals.append(interval) + else: + raise ValueError(error) + except ValueError: + if interval_values[0] != "LastDay": + raise ValueError(error) + return intervals diff --git a/tableauserverclient/models/property_decorators.py b/tableauserverclient/models/property_decorators.py index 7c801a4b5..b91f6ca36 100644 --- a/tableauserverclient/models/property_decorators.py +++ b/tableauserverclient/models/property_decorators.py @@ -144,15 +144,7 @@ def property_is_data_acceleration_config(func): def wrapper(self, value): if not isinstance(value, dict): raise ValueError("{} is not type 'dict', cannot update {})".format(value.__class__.__name__, func.__name__)) - if len(value) != 4 or not all( - attr in value.keys() - for attr in ( - "acceleration_enabled", - "accelerate_now", - "last_updated_at", - "acceleration_status", - ) - ): + if len(value) < 2 or not all(attr in value.keys() for attr in ("acceleration_enabled", "accelerate_now")): error = "{} should have 2 keys ".format(func.__name__) error += "'acceleration_enabled' and 'accelerate_now'" error += "instead you have {}".format(value.keys()) diff --git a/tableauserverclient/models/view_item.py b/tableauserverclient/models/view_item.py index 90cff490b..a26e364a3 100644 --- a/tableauserverclient/models/view_item.py +++ b/tableauserverclient/models/view_item.py @@ -31,6 +31,10 @@ def __init__(self) -> None: self._workbook_id: Optional[str] = None self._permissions: Optional[Callable[[], List[PermissionsRule]]] = None self.tags: Set[str] = set() + self._data_acceleration_config = { + "acceleration_enabled": None, + "acceleration_status": None, + } def __str__(self): return "".format( @@ -133,6 +137,14 @@ def updated_at(self) -> Optional[datetime]: def workbook_id(self) -> Optional[str]: return self._workbook_id + @property + def data_acceleration_config(self): + return self._data_acceleration_config + + @data_acceleration_config.setter + def data_acceleration_config(self, value): + self._data_acceleration_config = value + @property def permissions(self) -> List[PermissionsRule]: if self._permissions is None: @@ -164,6 +176,7 @@ def from_xml(cls, view_xml, ns, workbook_id="") -> "ViewItem": owner_elem = view_xml.find(".//t:owner", namespaces=ns) project_elem = view_xml.find(".//t:project", namespaces=ns) tags_elem = view_xml.find(".//t:tags", namespaces=ns) + data_acceleration_config_elem = view_xml.find(".//t:dataAccelerationConfig", namespaces=ns) view_item._created_at = parse_datetime(view_xml.get("createdAt", None)) view_item._updated_at = parse_datetime(view_xml.get("updatedAt", None)) view_item._id = view_xml.get("id", None) @@ -186,4 +199,25 @@ def from_xml(cls, view_xml, ns, workbook_id="") -> "ViewItem": tags = TagItem.from_xml_element(tags_elem, ns) view_item.tags = tags view_item._initial_tags = copy.copy(tags) + if data_acceleration_config_elem is not None: + data_acceleration_config = parse_data_acceleration_config(data_acceleration_config_elem) + view_item.data_acceleration_config = data_acceleration_config return view_item + + +def parse_data_acceleration_config(data_acceleration_elem): + data_acceleration_config = dict() + + acceleration_enabled = data_acceleration_elem.get("accelerationEnabled", None) + if acceleration_enabled is not None: + acceleration_enabled = string_to_bool(acceleration_enabled) + + acceleration_status = data_acceleration_elem.get("accelerationStatus", None) + + data_acceleration_config["acceleration_enabled"] = acceleration_enabled + data_acceleration_config["acceleration_status"] = acceleration_status + return data_acceleration_config + + +def string_to_bool(s: str) -> bool: + return s.lower() == "true" diff --git a/tableauserverclient/models/workbook_item.py b/tableauserverclient/models/workbook_item.py index 86a9a2f18..273fec40e 100644 --- a/tableauserverclient/models/workbook_item.py +++ b/tableauserverclient/models/workbook_item.py @@ -17,6 +17,7 @@ from .revision_item import RevisionItem from .tag_item import TagItem from .view_item import ViewItem +from .data_freshness_policy_item import DataFreshnessPolicyItem class WorkbookItem(object): @@ -34,7 +35,7 @@ def __init__(self, project_id: Optional[str] = None, name: Optional[str] = None, self._revisions = None self._size = None self._updated_at = None - self._views = None + self._views: Optional[Callable[[], List[ViewItem]]] = None self.name = name self._description = None self.owner_id: Optional[str] = None @@ -49,6 +50,7 @@ def __init__(self, project_id: Optional[str] = None, name: Optional[str] = None, "last_updated_at": None, "acceleration_status": None, } + self.data_freshness_policy = None self._permissions = None return None @@ -162,6 +164,10 @@ def views(self) -> List[ViewItem]: # We had views included in a WorkbookItem response return self._views + @views.setter + def views(self, value): + self._views = value + @property def data_acceleration_config(self): return self._data_acceleration_config @@ -171,6 +177,15 @@ def data_acceleration_config(self): def data_acceleration_config(self, value): self._data_acceleration_config = value + @property + def data_freshness_policy(self): + return self._data_freshness_policy + + @data_freshness_policy.setter + # @property_is_data_freshness_policy + def data_freshness_policy(self, value): + self._data_freshness_policy = value + @property def revisions(self) -> List[RevisionItem]: if self._revisions is None: @@ -217,8 +232,9 @@ def _parse_common_tags(self, workbook_xml, ns): project_name, owner_id, _, - _, + views, data_acceleration_config, + data_freshness_policy, ) = self._parse_element(workbook_xml, ns) self._set_values( @@ -235,8 +251,9 @@ def _parse_common_tags(self, workbook_xml, ns): project_name, owner_id, None, - None, + views, data_acceleration_config, + data_freshness_policy, ) return self @@ -258,6 +275,7 @@ def _set_values( tags, views, data_acceleration_config, + data_freshness_policy, ): if id is not None: self._id = id @@ -286,10 +304,12 @@ def _set_values( if tags: self.tags = tags self._initial_tags = copy.copy(tags) - if views: + if views is not None: self._views = views if data_acceleration_config is not None: self.data_acceleration_config = data_acceleration_config + if data_freshness_policy is not None: + self.data_freshness_policy = data_freshness_policy @classmethod def from_response(cls, resp: str, ns: Dict[str, str]) -> List["WorkbookItem"]: @@ -356,6 +376,11 @@ def _parse_element(workbook_xml, ns): if data_acceleration_elem is not None: data_acceleration_config = parse_data_acceleration_config(data_acceleration_elem) + data_freshness_policy = None + data_freshness_policy_elem = workbook_xml.find(".//t:dataFreshnessPolicy", namespaces=ns) + if data_freshness_policy_elem is not None: + data_freshness_policy = DataFreshnessPolicyItem.from_xml_element(data_freshness_policy_elem, ns) + return ( id, name, @@ -372,6 +397,7 @@ def _parse_element(workbook_xml, ns): tags, views, data_acceleration_config, + data_freshness_policy, ) diff --git a/tableauserverclient/server/endpoint/workbooks_endpoint.py b/tableauserverclient/server/endpoint/workbooks_endpoint.py index a73b0f0d5..f7f4d0d71 100644 --- a/tableauserverclient/server/endpoint/workbooks_endpoint.py +++ b/tableauserverclient/server/endpoint/workbooks_endpoint.py @@ -135,7 +135,12 @@ def delete(self, workbook_id: str) -> None: # Update workbook @api(version="2.0") - def update(self, workbook_item: WorkbookItem) -> WorkbookItem: + @parameter_added_in(include_view_acceleration_status="3.22") + def update( + self, + workbook_item: WorkbookItem, + include_view_acceleration_status: bool = False, + ) -> WorkbookItem: if not workbook_item.id: error = "Workbook item missing ID. Workbook must be retrieved from server first." raise MissingRequiredFieldError(error) @@ -144,6 +149,9 @@ def update(self, workbook_item: WorkbookItem) -> WorkbookItem: # Update the workbook itself url = "{0}/{1}".format(self.baseurl, workbook_item.id) + if include_view_acceleration_status: + url += "?includeViewAccelerationStatus=True" + update_req = RequestFactory.Workbook.update_req(workbook_item) server_response = self.put_request(url, update_req) logger.info("Updated workbook item (ID: {0})".format(workbook_item.id)) diff --git a/tableauserverclient/server/request_factory.py b/tableauserverclient/server/request_factory.py index 7fb9bf9ed..fcce1f941 100644 --- a/tableauserverclient/server/request_factory.py +++ b/tableauserverclient/server/request_factory.py @@ -57,6 +57,11 @@ def _add_hiddenview_element(views_element, view_name): view_element.attrib["hidden"] = "true" +def _add_view_element(views_element, view_id): + view_element = ET.SubElement(views_element, "view") + view_element.attrib["id"] = view_id + + def _add_credentials_element(parent_element, connection_credentials): credentials_element = ET.SubElement(parent_element, "connectionCredentials") if connection_credentials.password is None or connection_credentials.name is None: @@ -941,16 +946,61 @@ def update_req(self, workbook_item): if workbook_item.owner_id: owner_element = ET.SubElement(workbook_element, "owner") owner_element.attrib["id"] = workbook_item.owner_id - if workbook_item.data_acceleration_config["acceleration_enabled"] is not None: + if workbook_item._views is not None: + views_element = ET.SubElement(workbook_element, "views") + for view in workbook_item.views: + _add_view_element(views_element, view.id) + if workbook_item.data_acceleration_config: data_acceleration_config = workbook_item.data_acceleration_config data_acceleration_element = ET.SubElement(workbook_element, "dataAccelerationConfig") - data_acceleration_element.attrib["accelerationEnabled"] = str( - data_acceleration_config["acceleration_enabled"] - ).lower() + if data_acceleration_config["acceleration_enabled"] is not None: + data_acceleration_element.attrib["accelerationEnabled"] = str( + data_acceleration_config["acceleration_enabled"] + ).lower() if data_acceleration_config["accelerate_now"] is not None: data_acceleration_element.attrib["accelerateNow"] = str( data_acceleration_config["accelerate_now"] ).lower() + if workbook_item.data_freshness_policy is not None: + data_freshness_policy_config = workbook_item.data_freshness_policy + data_freshness_policy_element = ET.SubElement(workbook_element, "dataFreshnessPolicy") + data_freshness_policy_element.attrib["option"] = str(data_freshness_policy_config.option) + # Fresh Every Schedule + if data_freshness_policy_config.option == "FreshEvery": + if data_freshness_policy_config.fresh_every_schedule is not None: + fresh_every_element = ET.SubElement(data_freshness_policy_element, "freshEverySchedule") + fresh_every_element.attrib[ + "frequency" + ] = data_freshness_policy_config.fresh_every_schedule.frequency + fresh_every_element.attrib["value"] = str(data_freshness_policy_config.fresh_every_schedule.value) + else: + raise ValueError(f"data_freshness_policy_config.fresh_every_schedule must be populated.") + # Fresh At Schedule + if data_freshness_policy_config.option == "FreshAt": + if data_freshness_policy_config.fresh_at_schedule is not None: + fresh_at_element = ET.SubElement(data_freshness_policy_element, "freshAtSchedule") + frequency = data_freshness_policy_config.fresh_at_schedule.frequency + fresh_at_element.attrib["frequency"] = frequency + fresh_at_element.attrib["time"] = str(data_freshness_policy_config.fresh_at_schedule.time) + fresh_at_element.attrib["timezone"] = str(data_freshness_policy_config.fresh_at_schedule.timezone) + intervals = data_freshness_policy_config.fresh_at_schedule.interval_item + # Fresh At Schedule intervals if Frequency is Week or Month + if frequency != DataFreshnessPolicyItem.FreshAt.Frequency.Day: + if intervals is not None: + # if intervals is not None or frequency != DataFreshnessPolicyItem.FreshAt.Frequency.Day: + intervals_element = ET.SubElement(fresh_at_element, "intervals") + for interval in intervals: + expression = IntervalItem.Occurrence.WeekDay + if frequency == DataFreshnessPolicyItem.FreshAt.Frequency.Month: + expression = IntervalItem.Occurrence.MonthDay + single_interval_element = ET.SubElement(intervals_element, "interval") + single_interval_element.attrib[expression] = interval + else: + raise ValueError( + f"fresh_at_schedule.interval_item must be populated for " f"Week & Month frequency." + ) + else: + raise ValueError(f"data_freshness_policy_config.fresh_at_schedule must be populated.") return ET.tostring(xml_request) diff --git a/test/assets/workbook_get_by_id_acceleration_status.xml b/test/assets/workbook_get_by_id_acceleration_status.xml new file mode 100644 index 000000000..0d1f9b93d --- /dev/null +++ b/test/assets/workbook_get_by_id_acceleration_status.xml @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + diff --git a/test/assets/workbook_update_acceleration_status.xml b/test/assets/workbook_update_acceleration_status.xml new file mode 100644 index 000000000..7c3366fee --- /dev/null +++ b/test/assets/workbook_update_acceleration_status.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/test/assets/workbook_update_data_freshness_policy.xml b/test/assets/workbook_update_data_freshness_policy.xml new file mode 100644 index 000000000..a69a097ba --- /dev/null +++ b/test/assets/workbook_update_data_freshness_policy.xml @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/test/assets/workbook_update_data_freshness_policy2.xml b/test/assets/workbook_update_data_freshness_policy2.xml new file mode 100644 index 000000000..384f79ec0 --- /dev/null +++ b/test/assets/workbook_update_data_freshness_policy2.xml @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/test/assets/workbook_update_data_freshness_policy3.xml b/test/assets/workbook_update_data_freshness_policy3.xml new file mode 100644 index 000000000..195013517 --- /dev/null +++ b/test/assets/workbook_update_data_freshness_policy3.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/test/assets/workbook_update_data_freshness_policy4.xml b/test/assets/workbook_update_data_freshness_policy4.xml new file mode 100644 index 000000000..8208d986a --- /dev/null +++ b/test/assets/workbook_update_data_freshness_policy4.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/test/assets/workbook_update_data_freshness_policy5.xml b/test/assets/workbook_update_data_freshness_policy5.xml new file mode 100644 index 000000000..b6e0358b6 --- /dev/null +++ b/test/assets/workbook_update_data_freshness_policy5.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/test/assets/workbook_update_data_freshness_policy6.xml b/test/assets/workbook_update_data_freshness_policy6.xml new file mode 100644 index 000000000..c8be8f6c1 --- /dev/null +++ b/test/assets/workbook_update_data_freshness_policy6.xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/test/assets/workbook_update_views_acceleration_status.xml b/test/assets/workbook_update_views_acceleration_status.xml new file mode 100644 index 000000000..f2055fb79 --- /dev/null +++ b/test/assets/workbook_update_views_acceleration_status.xml @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/test/test_data_freshness_policy.py b/test/test_data_freshness_policy.py new file mode 100644 index 000000000..9591a6380 --- /dev/null +++ b/test/test_data_freshness_policy.py @@ -0,0 +1,189 @@ +import os +import requests_mock +import unittest + +import tableauserverclient as TSC + +TEST_ASSET_DIR = os.path.join(os.path.dirname(__file__), "assets") + +UPDATE_DFP_ALWAYS_LIVE_XML = os.path.join(TEST_ASSET_DIR, "workbook_update_data_freshness_policy.xml") +UPDATE_DFP_SITE_DEFAULT_XML = os.path.join(TEST_ASSET_DIR, "workbook_update_data_freshness_policy2.xml") +UPDATE_DFP_FRESH_EVERY_XML = os.path.join(TEST_ASSET_DIR, "workbook_update_data_freshness_policy3.xml") +UPDATE_DFP_FRESH_AT_DAILY_XML = os.path.join(TEST_ASSET_DIR, "workbook_update_data_freshness_policy4.xml") +UPDATE_DFP_FRESH_AT_WEEKLY_XML = os.path.join(TEST_ASSET_DIR, "workbook_update_data_freshness_policy5.xml") +UPDATE_DFP_FRESH_AT_MONTHLY_XML = os.path.join(TEST_ASSET_DIR, "workbook_update_data_freshness_policy6.xml") + + +class WorkbookTests(unittest.TestCase): + def setUp(self) -> None: + self.server = TSC.Server("http://test", False) + + # Fake sign in + self.server._site_id = "dad65087-b08b-4603-af4e-2887b8aafc67" + self.server._auth_token = "j80k54ll2lfMZ0tv97mlPvvSCRyD0DOM" + + self.baseurl = self.server.workbooks.baseurl + + def test_update_DFP_always_live(self) -> None: + with open(UPDATE_DFP_ALWAYS_LIVE_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.put(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2", text=response_xml) + single_workbook = TSC.WorkbookItem("1d0304cd-3796-429f-b815-7258370b9b74", show_tabs=True) + single_workbook._id = "1f951daf-4061-451a-9df1-69a8062664f2" + single_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.AlwaysLive + ) + single_workbook = self.server.workbooks.update(single_workbook) + + self.assertEqual("1f951daf-4061-451a-9df1-69a8062664f2", single_workbook.id) + self.assertEqual("AlwaysLive", single_workbook.data_freshness_policy.option) + + def test_update_DFP_site_default(self) -> None: + with open(UPDATE_DFP_SITE_DEFAULT_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.put(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2", text=response_xml) + single_workbook = TSC.WorkbookItem("1d0304cd-3796-429f-b815-7258370b9b74", show_tabs=True) + single_workbook._id = "1f951daf-4061-451a-9df1-69a8062664f2" + single_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.SiteDefault + ) + single_workbook = self.server.workbooks.update(single_workbook) + + self.assertEqual("1f951daf-4061-451a-9df1-69a8062664f2", single_workbook.id) + self.assertEqual("SiteDefault", single_workbook.data_freshness_policy.option) + + def test_update_DFP_fresh_every(self) -> None: + with open(UPDATE_DFP_FRESH_EVERY_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.put(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2", text=response_xml) + single_workbook = TSC.WorkbookItem("1d0304cd-3796-429f-b815-7258370b9b74", show_tabs=True) + single_workbook._id = "1f951daf-4061-451a-9df1-69a8062664f2" + single_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.FreshEvery + ) + fresh_every_ten_hours = TSC.DataFreshnessPolicyItem.FreshEvery( + TSC.DataFreshnessPolicyItem.FreshEvery.Frequency.Hours, 10 + ) + single_workbook.data_freshness_policy.fresh_every_schedule = fresh_every_ten_hours + single_workbook = self.server.workbooks.update(single_workbook) + + self.assertEqual("1f951daf-4061-451a-9df1-69a8062664f2", single_workbook.id) + self.assertEqual("FreshEvery", single_workbook.data_freshness_policy.option) + self.assertEqual("Hours", single_workbook.data_freshness_policy.fresh_every_schedule.frequency) + self.assertEqual(10, single_workbook.data_freshness_policy.fresh_every_schedule.value) + + def test_update_DFP_fresh_every_missing_attributes(self) -> None: + with open(UPDATE_DFP_FRESH_EVERY_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.put(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2", text=response_xml) + single_workbook = TSC.WorkbookItem("1d0304cd-3796-429f-b815-7258370b9b74", show_tabs=True) + single_workbook._id = "1f951daf-4061-451a-9df1-69a8062664f2" + single_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.FreshEvery + ) + + self.assertRaises(ValueError, self.server.workbooks.update, single_workbook) + + def test_update_DFP_fresh_at_day(self) -> None: + with open(UPDATE_DFP_FRESH_AT_DAILY_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.put(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2", text=response_xml) + single_workbook = TSC.WorkbookItem("1d0304cd-3796-429f-b815-7258370b9b74", show_tabs=True) + single_workbook._id = "1f951daf-4061-451a-9df1-69a8062664f2" + single_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.FreshAt + ) + fresh_at_10pm_daily = TSC.DataFreshnessPolicyItem.FreshAt( + TSC.DataFreshnessPolicyItem.FreshAt.Frequency.Day, "22:00:00", " Asia/Singapore" + ) + single_workbook.data_freshness_policy.fresh_at_schedule = fresh_at_10pm_daily + single_workbook = self.server.workbooks.update(single_workbook) + + self.assertEqual("1f951daf-4061-451a-9df1-69a8062664f2", single_workbook.id) + self.assertEqual("FreshAt", single_workbook.data_freshness_policy.option) + self.assertEqual("Day", single_workbook.data_freshness_policy.fresh_at_schedule.frequency) + self.assertEqual("22:00:00", single_workbook.data_freshness_policy.fresh_at_schedule.time) + self.assertEqual("Asia/Singapore", single_workbook.data_freshness_policy.fresh_at_schedule.timezone) + + def test_update_DFP_fresh_at_week(self) -> None: + with open(UPDATE_DFP_FRESH_AT_WEEKLY_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.put(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2", text=response_xml) + single_workbook = TSC.WorkbookItem("1d0304cd-3796-429f-b815-7258370b9b74", show_tabs=True) + single_workbook._id = "1f951daf-4061-451a-9df1-69a8062664f2" + single_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.FreshAt + ) + fresh_at_10am_mon_wed = TSC.DataFreshnessPolicyItem.FreshAt( + TSC.DataFreshnessPolicyItem.FreshAt.Frequency.Week, + "10:00:00", + "America/Los_Angeles", + ["Monday", "Wednesday"], + ) + single_workbook.data_freshness_policy.fresh_at_schedule = fresh_at_10am_mon_wed + single_workbook = self.server.workbooks.update(single_workbook) + + self.assertEqual("1f951daf-4061-451a-9df1-69a8062664f2", single_workbook.id) + self.assertEqual("FreshAt", single_workbook.data_freshness_policy.option) + self.assertEqual("Week", single_workbook.data_freshness_policy.fresh_at_schedule.frequency) + self.assertEqual("10:00:00", single_workbook.data_freshness_policy.fresh_at_schedule.time) + self.assertEqual("Wednesday", single_workbook.data_freshness_policy.fresh_at_schedule.interval_item[0]) + self.assertEqual("Monday", single_workbook.data_freshness_policy.fresh_at_schedule.interval_item[1]) + + def test_update_DFP_fresh_at_month(self) -> None: + with open(UPDATE_DFP_FRESH_AT_MONTHLY_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.put(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2", text=response_xml) + single_workbook = TSC.WorkbookItem("1d0304cd-3796-429f-b815-7258370b9b74", show_tabs=True) + single_workbook._id = "1f951daf-4061-451a-9df1-69a8062664f2" + single_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.FreshAt + ) + fresh_at_00am_lastDayOfMonth = TSC.DataFreshnessPolicyItem.FreshAt( + TSC.DataFreshnessPolicyItem.FreshAt.Frequency.Month, "00:00:00", "America/Los_Angeles", ["LastDay"] + ) + single_workbook.data_freshness_policy.fresh_at_schedule = fresh_at_00am_lastDayOfMonth + single_workbook = self.server.workbooks.update(single_workbook) + + self.assertEqual("1f951daf-4061-451a-9df1-69a8062664f2", single_workbook.id) + self.assertEqual("FreshAt", single_workbook.data_freshness_policy.option) + self.assertEqual("Month", single_workbook.data_freshness_policy.fresh_at_schedule.frequency) + self.assertEqual("00:00:00", single_workbook.data_freshness_policy.fresh_at_schedule.time) + self.assertEqual("LastDay", single_workbook.data_freshness_policy.fresh_at_schedule.interval_item[0]) + + def test_update_DFP_fresh_at_missing_params(self) -> None: + with open(UPDATE_DFP_FRESH_AT_DAILY_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.put(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2", text=response_xml) + single_workbook = TSC.WorkbookItem("1d0304cd-3796-429f-b815-7258370b9b74", show_tabs=True) + single_workbook._id = "1f951daf-4061-451a-9df1-69a8062664f2" + single_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.FreshAt + ) + + self.assertRaises(ValueError, self.server.workbooks.update, single_workbook) + + def test_update_DFP_fresh_at_missing_interval(self) -> None: + with open(UPDATE_DFP_FRESH_AT_DAILY_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.put(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2", text=response_xml) + single_workbook = TSC.WorkbookItem("1d0304cd-3796-429f-b815-7258370b9b74", show_tabs=True) + single_workbook._id = "1f951daf-4061-451a-9df1-69a8062664f2" + single_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.FreshAt + ) + fresh_at_month_no_interval = TSC.DataFreshnessPolicyItem.FreshAt( + TSC.DataFreshnessPolicyItem.FreshAt.Frequency.Month, "00:00:00", "America/Los_Angeles" + ) + single_workbook.data_freshness_policy.fresh_at_schedule = fresh_at_month_no_interval + + self.assertRaises(ValueError, self.server.workbooks.update, single_workbook) diff --git a/test/test_view_acceleration.py b/test/test_view_acceleration.py new file mode 100644 index 000000000..6f94f0c10 --- /dev/null +++ b/test/test_view_acceleration.py @@ -0,0 +1,119 @@ +import os +import requests_mock +import unittest + +import tableauserverclient as TSC +from tableauserverclient.datetime_helpers import format_datetime + +TEST_ASSET_DIR = os.path.join(os.path.dirname(__file__), "assets") + +GET_BY_ID_ACCELERATION_STATUS_XML = os.path.join(TEST_ASSET_DIR, "workbook_get_by_id_acceleration_status.xml") +POPULATE_VIEWS_XML = os.path.join(TEST_ASSET_DIR, "workbook_populate_views.xml") +UPDATE_VIEWS_ACCELERATION_STATUS_XML = os.path.join(TEST_ASSET_DIR, "workbook_update_views_acceleration_status.xml") +UPDATE_WORKBOOK_ACCELERATION_STATUS_XML = os.path.join(TEST_ASSET_DIR, "workbook_update_acceleration_status.xml") + + +class WorkbookTests(unittest.TestCase): + def setUp(self) -> None: + self.server = TSC.Server("http://test", False) + + # Fake sign in + self.server._site_id = "dad65087-b08b-4603-af4e-2887b8aafc67" + self.server._auth_token = "j80k54ll2lfMZ0tv97mlPvvSCRyD0DOM" + + self.baseurl = self.server.workbooks.baseurl + + def test_get_by_id(self) -> None: + with open(GET_BY_ID_ACCELERATION_STATUS_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.get(self.baseurl + "/3cc6cd06-89ce-4fdc-b935-5294135d6d42", text=response_xml) + single_workbook = self.server.workbooks.get_by_id("3cc6cd06-89ce-4fdc-b935-5294135d6d42") + + self.assertEqual("3cc6cd06-89ce-4fdc-b935-5294135d6d42", single_workbook.id) + self.assertEqual("SafariSample", single_workbook.name) + self.assertEqual("SafariSample", single_workbook.content_url) + self.assertEqual("http://tableauserver/#/workbooks/2/views", single_workbook.webpage_url) + self.assertEqual(False, single_workbook.show_tabs) + self.assertEqual(26, single_workbook.size) + self.assertEqual("2016-07-26T20:34:56Z", format_datetime(single_workbook.created_at)) + self.assertEqual("description for SafariSample", single_workbook.description) + self.assertEqual("2016-07-26T20:35:05Z", format_datetime(single_workbook.updated_at)) + self.assertEqual("ee8c6e70-43b6-11e6-af4f-f7b0d8e20760", single_workbook.project_id) + self.assertEqual("default", single_workbook.project_name) + self.assertEqual("5de011f8-5aa9-4d5b-b991-f462c8dd6bb7", single_workbook.owner_id) + self.assertEqual(set(["Safari", "Sample"]), single_workbook.tags) + self.assertEqual("d79634e1-6063-4ec9-95ff-50acbf609ff5", single_workbook.views[0].id) + self.assertEqual("ENDANGERED SAFARI", single_workbook.views[0].name) + self.assertEqual("SafariSample/sheets/ENDANGEREDSAFARI", single_workbook.views[0].content_url) + self.assertEqual(True, single_workbook.views[0].data_acceleration_config["acceleration_enabled"]) + self.assertEqual("Enabled", single_workbook.views[0].data_acceleration_config["acceleration_status"]) + self.assertEqual("d79634e1-6063-4ec9-95ff-50acbf609ff9", single_workbook.views[1].id) + self.assertEqual("ENDANGERED SAFARI 2", single_workbook.views[1].name) + self.assertEqual("SafariSample/sheets/ENDANGEREDSAFARI2", single_workbook.views[1].content_url) + self.assertEqual(False, single_workbook.views[1].data_acceleration_config["acceleration_enabled"]) + self.assertEqual("Suspended", single_workbook.views[1].data_acceleration_config["acceleration_status"]) + + def test_update_workbook_acceleration(self) -> None: + with open(UPDATE_WORKBOOK_ACCELERATION_STATUS_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.put(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2", text=response_xml) + single_workbook = TSC.WorkbookItem("1d0304cd-3796-429f-b815-7258370b9b74", show_tabs=True) + single_workbook._id = "1f951daf-4061-451a-9df1-69a8062664f2" + single_workbook.data_acceleration_config = { + "acceleration_enabled": True, + "accelerate_now": False, + "last_updated_at": None, + "acceleration_status": None, + } + # update with parameter includeViewAccelerationStatus=True + single_workbook = self.server.workbooks.update(single_workbook, True) + + self.assertEqual("1f951daf-4061-451a-9df1-69a8062664f2", single_workbook.id) + self.assertEqual("1d0304cd-3796-429f-b815-7258370b9b74", single_workbook.project_id) + self.assertEqual("SafariSample/sheets/ENDANGEREDSAFARI", single_workbook.views[0].content_url) + self.assertEqual(True, single_workbook.views[0].data_acceleration_config["acceleration_enabled"]) + self.assertEqual("Pending", single_workbook.views[0].data_acceleration_config["acceleration_status"]) + self.assertEqual("d79634e1-6063-4ec9-95ff-50acbf609ff9", single_workbook.views[1].id) + self.assertEqual("ENDANGERED SAFARI 2", single_workbook.views[1].name) + self.assertEqual("SafariSample/sheets/ENDANGEREDSAFARI2", single_workbook.views[1].content_url) + self.assertEqual(True, single_workbook.views[1].data_acceleration_config["acceleration_enabled"]) + self.assertEqual("Pending", single_workbook.views[1].data_acceleration_config["acceleration_status"]) + + def test_update_views_acceleration(self) -> None: + with open(POPULATE_VIEWS_XML, "rb") as f: + views_xml = f.read().decode("utf-8") + with open(UPDATE_VIEWS_ACCELERATION_STATUS_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.get(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2/views", text=views_xml) + m.put(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2", text=response_xml) + single_workbook = TSC.WorkbookItem("1d0304cd-3796-429f-b815-7258370b9b74", show_tabs=True) + single_workbook._id = "1f951daf-4061-451a-9df1-69a8062664f2" + single_workbook.data_acceleration_config = { + "acceleration_enabled": False, + "accelerate_now": False, + "last_updated_at": None, + "acceleration_status": None, + } + self.server.workbooks.populate_views(single_workbook) + single_workbook.views = [single_workbook.views[1], single_workbook.views[2]] + # update with parameter includeViewAccelerationStatus=True + single_workbook = self.server.workbooks.update(single_workbook, True) + + views_list = single_workbook.views + self.assertEqual("097dbe13-de89-445f-b2c3-02f28bd010c1", views_list[0].id) + self.assertEqual("GDP per capita", views_list[0].name) + self.assertEqual(False, views_list[0].data_acceleration_config["acceleration_enabled"]) + self.assertEqual("Disabled", views_list[0].data_acceleration_config["acceleration_status"]) + + self.assertEqual("2c1ab9d7-8d64-4cc6-b495-52e40c60c330", views_list[1].id) + self.assertEqual("Country ranks", views_list[1].name) + self.assertEqual(True, views_list[1].data_acceleration_config["acceleration_enabled"]) + self.assertEqual("Pending", views_list[1].data_acceleration_config["acceleration_status"]) + + self.assertEqual("0599c28c-6d82-457e-a453-e52c1bdb00f5", views_list[2].id) + self.assertEqual("Interest rates", views_list[2].name) + self.assertEqual(True, views_list[2].data_acceleration_config["acceleration_enabled"]) + self.assertEqual("Pending", views_list[2].data_acceleration_config["acceleration_status"])