From 1d3a642fea206108d540ea2fa9a87510af26f363 Mon Sep 17 00:00:00 2001 From: markm Date: Fri, 19 Jan 2024 12:27:31 -0600 Subject: [PATCH 01/27] Changes to alter cgi dependency to email.Messages --- .../server/endpoint/datasources_endpoint.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/tableauserverclient/server/endpoint/datasources_endpoint.py b/tableauserverclient/server/endpoint/datasources_endpoint.py index 66ad9f710..be3733d67 100644 --- a/tableauserverclient/server/endpoint/datasources_endpoint.py +++ b/tableauserverclient/server/endpoint/datasources_endpoint.py @@ -1,4 +1,4 @@ -import cgi +from email.message import Message import copy import json import io @@ -437,14 +437,16 @@ def download_revision( url += "?includeExtract=False" with closing(self.get_request(url, parameters={"stream": True})) as server_response: - _, params = cgi.parse_header(server_response.headers["Content-Disposition"]) + m = Message() + m['Content-Disposition'] = server_response.headers["Content-Disposition"] + params = m.get_filename() if isinstance(filepath, io_types_w): for chunk in server_response.iter_content(1024): # 1KB filepath.write(chunk) return_path = filepath else: params = fix_filename(params) - filename = to_filename(os.path.basename(params["filename"])) + filename = to_filename(os.path.basename(params)) download_path = make_download_path(filepath, filename) with open(download_path, "wb") as f: for chunk in server_response.iter_content(1024): # 1KB From 2cc51710d62ceebddd89ee1f34fcb66948f7af1c Mon Sep 17 00:00:00 2001 From: markm Date: Sat, 20 Jan 2024 01:33:52 -0600 Subject: [PATCH 02/27] Changes to alter cgi dependency to email.Messages --- tableauserverclient/server/endpoint/flows_endpoint.py | 8 +++++--- tableauserverclient/server/endpoint/workbooks_endpoint.py | 8 +++++--- 2 files changed, 10 insertions(+), 6 deletions(-) diff --git a/tableauserverclient/server/endpoint/flows_endpoint.py b/tableauserverclient/server/endpoint/flows_endpoint.py index 21c16b1cc..a9b937ea5 100644 --- a/tableauserverclient/server/endpoint/flows_endpoint.py +++ b/tableauserverclient/server/endpoint/flows_endpoint.py @@ -1,4 +1,4 @@ -import cgi +from email.message import Message import copy import io import logging @@ -120,14 +120,16 @@ def download(self, flow_id: str, filepath: Optional[PathOrFileW] = None) -> Path url = "{0}/{1}/content".format(self.baseurl, flow_id) with closing(self.get_request(url, parameters={"stream": True})) as server_response: - _, params = cgi.parse_header(server_response.headers["Content-Disposition"]) + m = Message() + m['Content-Disposition'] = server_response.headers["Content-Disposition"] + params = m.get_filename() if isinstance(filepath, io_types_w): for chunk in server_response.iter_content(1024): # 1KB filepath.write(chunk) return_path = filepath else: params = fix_filename(params) - filename = to_filename(os.path.basename(params["filename"])) + filename = to_filename(os.path.basename(params)) download_path = make_download_path(filepath, filename) with open(download_path, "wb") as f: for chunk in server_response.iter_content(1024): # 1KB diff --git a/tableauserverclient/server/endpoint/workbooks_endpoint.py b/tableauserverclient/server/endpoint/workbooks_endpoint.py index 506fe02c2..73f69a145 100644 --- a/tableauserverclient/server/endpoint/workbooks_endpoint.py +++ b/tableauserverclient/server/endpoint/workbooks_endpoint.py @@ -1,4 +1,4 @@ -import cgi +from email.message import Message import copy import io import logging @@ -483,14 +483,16 @@ def download_revision( url += "?includeExtract=False" with closing(self.get_request(url, parameters={"stream": True})) as server_response: - _, params = cgi.parse_header(server_response.headers["Content-Disposition"]) + m = Message() + m['Content-Disposition'] = server_response.headers["Content-Disposition"] + params = m.get_filename() if isinstance(filepath, io_types_w): for chunk in server_response.iter_content(1024): # 1KB filepath.write(chunk) return_path = filepath else: params = fix_filename(params) - filename = to_filename(os.path.basename(params["filename"])) + filename = to_filename(os.path.basename(params)) download_path = make_download_path(filepath, filename) with open(download_path, "wb") as f: for chunk in server_response.iter_content(1024): # 1KB From 999b3019a50f8860168b276e86b30cc925080c89 Mon Sep 17 00:00:00 2001 From: markm Date: Sat, 20 Jan 2024 01:34:44 -0600 Subject: [PATCH 03/27] Changes to alter cgi dependency to email.Messages --- tableauserverclient/server/endpoint/datasources_endpoint.py | 2 +- tableauserverclient/server/endpoint/flows_endpoint.py | 2 +- tableauserverclient/server/endpoint/workbooks_endpoint.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tableauserverclient/server/endpoint/datasources_endpoint.py b/tableauserverclient/server/endpoint/datasources_endpoint.py index be3733d67..7a797cf4c 100644 --- a/tableauserverclient/server/endpoint/datasources_endpoint.py +++ b/tableauserverclient/server/endpoint/datasources_endpoint.py @@ -438,7 +438,7 @@ def download_revision( with closing(self.get_request(url, parameters={"stream": True})) as server_response: m = Message() - m['Content-Disposition'] = server_response.headers["Content-Disposition"] + m["Content-Disposition"] = server_response.headers["Content-Disposition"] params = m.get_filename() if isinstance(filepath, io_types_w): for chunk in server_response.iter_content(1024): # 1KB diff --git a/tableauserverclient/server/endpoint/flows_endpoint.py b/tableauserverclient/server/endpoint/flows_endpoint.py index a9b937ea5..5132ee454 100644 --- a/tableauserverclient/server/endpoint/flows_endpoint.py +++ b/tableauserverclient/server/endpoint/flows_endpoint.py @@ -121,7 +121,7 @@ def download(self, flow_id: str, filepath: Optional[PathOrFileW] = None) -> Path with closing(self.get_request(url, parameters={"stream": True})) as server_response: m = Message() - m['Content-Disposition'] = server_response.headers["Content-Disposition"] + m["Content-Disposition"] = server_response.headers["Content-Disposition"] params = m.get_filename() if isinstance(filepath, io_types_w): for chunk in server_response.iter_content(1024): # 1KB diff --git a/tableauserverclient/server/endpoint/workbooks_endpoint.py b/tableauserverclient/server/endpoint/workbooks_endpoint.py index 73f69a145..58fa4fe98 100644 --- a/tableauserverclient/server/endpoint/workbooks_endpoint.py +++ b/tableauserverclient/server/endpoint/workbooks_endpoint.py @@ -484,7 +484,7 @@ def download_revision( with closing(self.get_request(url, parameters={"stream": True})) as server_response: m = Message() - m['Content-Disposition'] = server_response.headers["Content-Disposition"] + m["Content-Disposition"] = server_response.headers["Content-Disposition"] params = m.get_filename() if isinstance(filepath, io_types_w): for chunk in server_response.iter_content(1024): # 1KB From 68b915774737083ecad5f365f1946a4ef778050f Mon Sep 17 00:00:00 2001 From: markm Date: Sat, 20 Jan 2024 01:44:20 -0600 Subject: [PATCH 04/27] Changes to alter cgi dependency to email.Messages --- tableauserverclient/server/endpoint/datasources_endpoint.py | 2 +- tableauserverclient/server/endpoint/flows_endpoint.py | 2 +- tableauserverclient/server/endpoint/workbooks_endpoint.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/tableauserverclient/server/endpoint/datasources_endpoint.py b/tableauserverclient/server/endpoint/datasources_endpoint.py index 7a797cf4c..28226d280 100644 --- a/tableauserverclient/server/endpoint/datasources_endpoint.py +++ b/tableauserverclient/server/endpoint/datasources_endpoint.py @@ -439,7 +439,7 @@ def download_revision( with closing(self.get_request(url, parameters={"stream": True})) as server_response: m = Message() m["Content-Disposition"] = server_response.headers["Content-Disposition"] - params = m.get_filename() + params = m.get_filename(failobj="") if isinstance(filepath, io_types_w): for chunk in server_response.iter_content(1024): # 1KB filepath.write(chunk) diff --git a/tableauserverclient/server/endpoint/flows_endpoint.py b/tableauserverclient/server/endpoint/flows_endpoint.py index 5132ee454..77b01c478 100644 --- a/tableauserverclient/server/endpoint/flows_endpoint.py +++ b/tableauserverclient/server/endpoint/flows_endpoint.py @@ -122,7 +122,7 @@ def download(self, flow_id: str, filepath: Optional[PathOrFileW] = None) -> Path with closing(self.get_request(url, parameters={"stream": True})) as server_response: m = Message() m["Content-Disposition"] = server_response.headers["Content-Disposition"] - params = m.get_filename() + params = m.get_filename(failobj="") if isinstance(filepath, io_types_w): for chunk in server_response.iter_content(1024): # 1KB filepath.write(chunk) diff --git a/tableauserverclient/server/endpoint/workbooks_endpoint.py b/tableauserverclient/server/endpoint/workbooks_endpoint.py index 58fa4fe98..393a028c8 100644 --- a/tableauserverclient/server/endpoint/workbooks_endpoint.py +++ b/tableauserverclient/server/endpoint/workbooks_endpoint.py @@ -485,7 +485,7 @@ def download_revision( with closing(self.get_request(url, parameters={"stream": True})) as server_response: m = Message() m["Content-Disposition"] = server_response.headers["Content-Disposition"] - params = m.get_filename() + params = m.get_filename(failobj="") if isinstance(filepath, io_types_w): for chunk in server_response.iter_content(1024): # 1KB filepath.write(chunk) From 5611859114abb76b2ef921330980d73b6d2c9b7d Mon Sep 17 00:00:00 2001 From: Jordan Woods <13803242+jorwoods@users.noreply.github.com> Date: Thu, 18 Jan 2024 22:16:14 -0600 Subject: [PATCH 05/27] feat: allow viz height and width parameters --- .../models/property_decorators.py | 8 +++-- tableauserverclient/server/request_options.py | 33 ++++++++++++++++++- test/test_view.py | 29 ++++++++++++++++ 3 files changed, 67 insertions(+), 3 deletions(-) diff --git a/tableauserverclient/models/property_decorators.py b/tableauserverclient/models/property_decorators.py index 7c801a4b5..6ffcf6f85 100644 --- a/tableauserverclient/models/property_decorators.py +++ b/tableauserverclient/models/property_decorators.py @@ -1,6 +1,8 @@ +from collections.abc import Container import datetime import re from functools import wraps +from typing import Any, Optional from tableauserverclient.datetime_helpers import parse_datetime @@ -65,7 +67,7 @@ def wrapper(self, value): return wrapper -def property_is_int(range, allowed=None): +def property_is_int(range: tuple[int, int], allowed: Optional[Container[Any]] = None): """Takes a range of ints and a list of exemptions to check against when setting a property on a model. The range is a tuple of (min, max) and the allowed list (empty by default) allows values outside that range. @@ -89,8 +91,10 @@ def wrapper(self, value): raise ValueError(error) min, max = range + if value in allowed: + return func(self, value) - if (value < min or value > max) and (value not in allowed): + if value < min or value > max: raise ValueError(error) return func(self, value) diff --git a/tableauserverclient/server/request_options.py b/tableauserverclient/server/request_options.py index 95233f8fc..f2bd3c939 100644 --- a/tableauserverclient/server/request_options.py +++ b/tableauserverclient/server/request_options.py @@ -1,3 +1,5 @@ +import sys + from tableauserverclient.models.property_decorators import property_is_int import logging @@ -261,11 +263,13 @@ class Orientation: Portrait = "portrait" Landscape = "landscape" - def __init__(self, page_type=None, orientation=None, maxage=-1): + def __init__(self, page_type=None, orientation=None, maxage=-1, viz_height=None, viz_width=None): super(PDFRequestOptions, self).__init__() self.page_type = page_type self.orientation = orientation self.max_age = maxage + self.viz_height = viz_height + self.viz_width = viz_width @property def max_age(self): @@ -276,6 +280,24 @@ def max_age(self): def max_age(self, value): self._max_age = value + @property + def viz_height(self): + return self._viz_height + + @viz_height.setter + @property_is_int(range=(0, sys.maxsize), allowed=(None,)) + def viz_height(self, value): + self._viz_height = value + + @property + def viz_width(self): + return self._viz_width + + @viz_width.setter + @property_is_int(range=(0, sys.maxsize), allowed=(None,)) + def viz_width(self, value): + self._viz_width = value + def get_query_params(self): params = {} if self.page_type: @@ -287,6 +309,15 @@ def get_query_params(self): if self.max_age != -1: params["maxAge"] = self.max_age + if (self.viz_height is None) ^ (self.viz_width is None): + raise ValueError("viz_height and viz_width must be specified together") + + if self.viz_height is not None: + params["vizHeight"] = self.viz_height + + if self.viz_width is not None: + params["vizWidth"] = self.viz_width + self._append_view_filters(params) return params diff --git a/test/test_view.py b/test/test_view.py index 1459150bb..720a0ce64 100644 --- a/test/test_view.py +++ b/test/test_view.py @@ -315,3 +315,32 @@ def test_filter_excel(self) -> None: excel_file = b"".join(single_view.excel) self.assertEqual(response, excel_file) + + def test_pdf_height(self) -> None: + self.server.version = "3.8" + self.baseurl = self.server.views.baseurl + with open(POPULATE_PDF, "rb") as f: + response = f.read() + with requests_mock.mock() as m: + m.get( + self.baseurl + "/d79634e1-6063-4ec9-95ff-50acbf609ff5/pdf?vizHeight=1080&vizWidth=1920", + content=response, + ) + single_view = TSC.ViewItem() + single_view._id = "d79634e1-6063-4ec9-95ff-50acbf609ff5" + + req_option = TSC.PDFRequestOptions( + viz_height=1080, + viz_width=1920, + ) + + self.server.views.populate_pdf(single_view, req_option) + self.assertEqual(response, single_view.pdf) + + def test_pdf_errors(self) -> None: + req_option = TSC.PDFRequestOptions(viz_height=1080) + with self.assertRaises(ValueError): + req_option.get_query_params() + req_option = TSC.PDFRequestOptions(viz_width=1920) + with self.assertRaises(ValueError): + req_option.get_query_params() From 8ad3c03b89a3851a780dc57bd7e5a4f2970c608c Mon Sep 17 00:00:00 2001 From: Jordan Woods <13803242+jorwoods@users.noreply.github.com> Date: Tue, 23 Jan 2024 21:04:14 -0600 Subject: [PATCH 06/27] fix: use python3.8 syntax --- tableauserverclient/models/property_decorators.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tableauserverclient/models/property_decorators.py b/tableauserverclient/models/property_decorators.py index 6ffcf6f85..ea781cd51 100644 --- a/tableauserverclient/models/property_decorators.py +++ b/tableauserverclient/models/property_decorators.py @@ -2,7 +2,7 @@ import datetime import re from functools import wraps -from typing import Any, Optional +from typing import Any, Optional, Tuple from tableauserverclient.datetime_helpers import parse_datetime @@ -67,7 +67,7 @@ def wrapper(self, value): return wrapper -def property_is_int(range: tuple[int, int], allowed: Optional[Container[Any]] = None): +def property_is_int(range: Tuple[int, int], allowed: Optional[Container[Any]] = None): """Takes a range of ints and a list of exemptions to check against when setting a property on a model. The range is a tuple of (min, max) and the allowed list (empty by default) allows values outside that range. From 7e44b5ec47b777cd43e2725be2019892d6e4d31a Mon Sep 17 00:00:00 2001 From: Jordan Woods <13803242+jorwoods@users.noreply.github.com> Date: Tue, 23 Jan 2024 21:06:43 -0600 Subject: [PATCH 07/27] fix: python3.8 syntax --- tableauserverclient/models/property_decorators.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/tableauserverclient/models/property_decorators.py b/tableauserverclient/models/property_decorators.py index ea781cd51..58c33699b 100644 --- a/tableauserverclient/models/property_decorators.py +++ b/tableauserverclient/models/property_decorators.py @@ -1,8 +1,7 @@ -from collections.abc import Container import datetime import re from functools import wraps -from typing import Any, Optional, Tuple +from typing import Any, Container, Optional, Tuple from tableauserverclient.datetime_helpers import parse_datetime From ffd0b8fd8452ec8fcaf78a03a838d8670256ba02 Mon Sep 17 00:00:00 2001 From: Jordan Woods <13803242+jorwoods@users.noreply.github.com> Date: Wed, 24 Jan 2024 07:30:24 -0600 Subject: [PATCH 08/27] docs: comment PDF viz dimensions XOR --- tableauserverclient/server/request_options.py | 1 + 1 file changed, 1 insertion(+) diff --git a/tableauserverclient/server/request_options.py b/tableauserverclient/server/request_options.py index f2bd3c939..8304b8f68 100644 --- a/tableauserverclient/server/request_options.py +++ b/tableauserverclient/server/request_options.py @@ -309,6 +309,7 @@ def get_query_params(self): if self.max_age != -1: params["maxAge"] = self.max_age + # XOR. Either both are None or both are not None. if (self.viz_height is None) ^ (self.viz_width is None): raise ValueError("viz_height and viz_width must be specified together") From 9ddbad56b8f9fff464f25f8262d97d01e67a8563 Mon Sep 17 00:00:00 2001 From: Brian Cantoni Date: Thu, 1 Feb 2024 15:58:16 -0800 Subject: [PATCH 09/27] Add support for System schedule type I'm not fully clear on where these might come from, but this change should let TSC work in such cases. Fixes #1349 --- tableauserverclient/models/schedule_item.py | 1 + test/assets/schedule_get.xml | 1 + test/test_schedule.py | 10 ++++++++++ 3 files changed, 12 insertions(+) diff --git a/tableauserverclient/models/schedule_item.py b/tableauserverclient/models/schedule_item.py index db187a5f9..e416643ba 100644 --- a/tableauserverclient/models/schedule_item.py +++ b/tableauserverclient/models/schedule_item.py @@ -26,6 +26,7 @@ class Type: Subscription = "Subscription" DataAcceleration = "DataAcceleration" ActiveDirectorySync = "ActiveDirectorySync" + System = "System" class ExecutionOrder: Parallel = "Parallel" diff --git a/test/assets/schedule_get.xml b/test/assets/schedule_get.xml index 66e4d6e51..db5e1a05e 100644 --- a/test/assets/schedule_get.xml +++ b/test/assets/schedule_get.xml @@ -5,5 +5,6 @@ + \ No newline at end of file diff --git a/test/test_schedule.py b/test/test_schedule.py index 76c8720b9..3bbf5709b 100644 --- a/test/test_schedule.py +++ b/test/test_schedule.py @@ -50,6 +50,7 @@ def test_get(self) -> None: extract = all_schedules[0] subscription = all_schedules[1] flow = all_schedules[2] + system = all_schedules[3] self.assertEqual(2, pagination_item.total_available) self.assertEqual("c9cff7f9-309c-4361-99ff-d4ba8c9f5467", extract.id) @@ -79,6 +80,15 @@ def test_get(self) -> None: self.assertEqual("Flow", flow.schedule_type) self.assertEqual("2019-03-01T09:00:00Z", format_datetime(flow.next_run_at)) + self.assertEqual("3cfa4713-ce7c-4fa7-aa2e-f752bfc8dd04", system.id) + self.assertEqual("First of the month 2:00AM", system.name) + self.assertEqual("Active", system.state) + self.assertEqual(30, system.priority) + self.assertEqual("2019-02-19T18:52:19Z", format_datetime(system.created_at)) + self.assertEqual("2019-02-19T18:55:51Z", format_datetime(system.updated_at)) + self.assertEqual("System", system.schedule_type) + self.assertEqual("2019-03-01T09:00:00Z", format_datetime(system.next_run_at)) + def test_get_empty(self) -> None: with open(GET_EMPTY_XML, "rb") as f: response_xml = f.read().decode("utf-8") From 60fa87f07d54cdc635c06614a9f0455675bbb973 Mon Sep 17 00:00:00 2001 From: Brian Cantoni Date: Tue, 13 Feb 2024 20:18:21 -0800 Subject: [PATCH 10/27] Add failing test retrieving a task with 24 hour (aka daily) interval --- test/assets/tasks_with_interval.xml | 20 ++++++++++++++++++++ test/test_task.py | 10 ++++++++++ 2 files changed, 30 insertions(+) create mode 100644 test/assets/tasks_with_interval.xml diff --git a/test/assets/tasks_with_interval.xml b/test/assets/tasks_with_interval.xml new file mode 100644 index 000000000..a317408fb --- /dev/null +++ b/test/assets/tasks_with_interval.xml @@ -0,0 +1,20 @@ + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/test/test_task.py b/test/test_task.py index 4e0157dfd..53da7c160 100644 --- a/test/test_task.py +++ b/test/test_task.py @@ -19,6 +19,7 @@ GET_XML_RUN_NOW_RESPONSE = os.path.join(TEST_ASSET_DIR, "tasks_run_now_response.xml") GET_XML_CREATE_TASK_RESPONSE = os.path.join(TEST_ASSET_DIR, "tasks_create_extract_task.xml") GET_XML_WITHOUT_SCHEDULE = TEST_ASSET_DIR / "tasks_without_schedule.xml" +GET_XML_WITH_INTERVAL = TEST_ASSET_DIR / "tasks_with_interval.xml" class TaskTests(unittest.TestCase): @@ -97,6 +98,15 @@ def test_get_task_without_schedule(self): self.assertEqual("c7a9327e-1cda-4504-b026-ddb43b976d1d", task.target.id) self.assertEqual("datasource", task.target.type) + def test_get_task_with_interval(self): + with requests_mock.mock() as m: + m.get(self.baseurl, text=GET_XML_WITH_INTERVAL.read_text()) + all_tasks, pagination_item = self.server.tasks.get() + + task = all_tasks[0] + self.assertEqual("e4de0575-fcc7-4232-5659-be09bb8e7654", task.target.id) + self.assertEqual("datasource", task.target.type) + def test_delete(self): with requests_mock.mock() as m: m.delete(self.baseurl + "/c7a9327e-1cda-4504-b026-ddb43b976d1d", status_code=204) From 0dca1aae66703fb932f364bee9cdd899a9cc51ee Mon Sep 17 00:00:00 2001 From: Brian Cantoni Date: Tue, 13 Feb 2024 22:38:55 -0800 Subject: [PATCH 11/27] Add 24 (hours) as a valid interval which can be returned from the server --- tableauserverclient/models/interval_item.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tableauserverclient/models/interval_item.py b/tableauserverclient/models/interval_item.py index 537e6c14f..3ee1fee08 100644 --- a/tableauserverclient/models/interval_item.py +++ b/tableauserverclient/models/interval_item.py @@ -136,7 +136,7 @@ def interval(self): @interval.setter def interval(self, intervals): - VALID_INTERVALS = {0.25, 0.5, 1, 2, 4, 6, 8, 12} + VALID_INTERVALS = {0.25, 0.5, 1, 2, 4, 6, 8, 12, 24} for interval in intervals: # if an hourly interval is a string, then it is a weekDay interval From 3cc0f8ee57fcb0d9ffa1adb7bb7b62b70c54e0f5 Mon Sep 17 00:00:00 2001 From: Brian Cantoni Date: Wed, 14 Feb 2024 11:17:32 -0800 Subject: [PATCH 12/27] Add Python 3.12 to test matrix --- .github/workflows/run-tests.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index 6b1629bfd..fb89d5de1 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -8,7 +8,7 @@ jobs: fail-fast: false matrix: os: [ubuntu-latest, macos-latest, windows-latest] - python-version: ['3.8', '3.9', '3.10', '3.11'] + python-version: ['3.8', '3.9', '3.10', '3.11', '3.12'] runs-on: ${{ matrix.os }} From 0fb214e22b2aac6d4bab54d17a43e80851e66e93 Mon Sep 17 00:00:00 2001 From: Brian Cantoni Date: Wed, 14 Feb 2024 11:45:21 -0800 Subject: [PATCH 13/27] Tweak test action to stop double-running everything --- .github/workflows/run-tests.yml | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/.github/workflows/run-tests.yml b/.github/workflows/run-tests.yml index fb89d5de1..d70539582 100644 --- a/.github/workflows/run-tests.yml +++ b/.github/workflows/run-tests.yml @@ -1,6 +1,11 @@ name: Python tests -on: [push, pull_request] +on: + pull_request: {} + push: + branches: + - development + - master jobs: build: From 0ddae7ce24c457b522c87867531b91213263f7f1 Mon Sep 17 00:00:00 2001 From: Jordan Woods <13803242+jorwoods@users.noreply.github.com> Date: Wed, 14 Feb 2024 21:15:45 -0600 Subject: [PATCH 14/27] feat: add description support on wb publish --- tableauserverclient/models/workbook_item.py | 4 ++++ tableauserverclient/server/request_factory.py | 3 +++ test/assets/workbook_publish.xml | 4 ++-- test/test_workbook.py | 3 +++ 4 files changed, 12 insertions(+), 2 deletions(-) diff --git a/tableauserverclient/models/workbook_item.py b/tableauserverclient/models/workbook_item.py index 86a9a2f18..57ddf83f8 100644 --- a/tableauserverclient/models/workbook_item.py +++ b/tableauserverclient/models/workbook_item.py @@ -91,6 +91,10 @@ def created_at(self) -> Optional[datetime.datetime]: def description(self) -> Optional[str]: return self._description + @description.setter + def description(self, value: str): + self._description = value + @property def id(self) -> Optional[str]: return self._id diff --git a/tableauserverclient/server/request_factory.py b/tableauserverclient/server/request_factory.py index 6316527ec..70d2b30fc 100644 --- a/tableauserverclient/server/request_factory.py +++ b/tableauserverclient/server/request_factory.py @@ -911,6 +911,9 @@ def _generate_xml( for connection in connections: _add_connections_element(connections_element, connection) + if workbook_item.description is not None: + workbook_element.attrib["description"] = workbook_item.description + if hidden_views is not None: import warnings diff --git a/test/assets/workbook_publish.xml b/test/assets/workbook_publish.xml index dcfc79936..3e23bda71 100644 --- a/test/assets/workbook_publish.xml +++ b/test/assets/workbook_publish.xml @@ -1,6 +1,6 @@ - + @@ -8,4 +8,4 @@ - \ No newline at end of file + diff --git a/test/test_workbook.py b/test/test_workbook.py index 212d55a37..ac3d44b28 100644 --- a/test/test_workbook.py +++ b/test/test_workbook.py @@ -488,6 +488,8 @@ def test_publish(self) -> None: name="Sample", show_tabs=False, project_id="ee8c6e70-43b6-11e6-af4f-f7b0d8e20760" ) + new_workbook.description = "REST API Testing" + sample_workbook = os.path.join(TEST_ASSET_DIR, "SampleWB.twbx") publish_mode = self.server.PublishMode.CreateNew @@ -506,6 +508,7 @@ def test_publish(self) -> None: self.assertEqual("fe0b4e89-73f4-435e-952d-3a263fbfa56c", new_workbook.views[0].id) self.assertEqual("GDP per capita", new_workbook.views[0].name) self.assertEqual("RESTAPISample_0/sheets/GDPpercapita", new_workbook.views[0].content_url) + self.assertEqual("REST API Testing", new_workbook.description) def test_publish_a_packaged_file_object(self) -> None: with open(PUBLISH_XML, "rb") as f: From eaedc29fe6a16a2060b3dbe32f9fa047f48b9994 Mon Sep 17 00:00:00 2001 From: ltiffanydev <148500608+ltiffanydev@users.noreply.github.com> Date: Mon, 4 Mar 2024 22:21:39 -0800 Subject: [PATCH 15/27] Add Data Acceleration and Data Freshness Policy support (#1343) * Add data acceleration & data freshness policy functions * Add unit tests and raise errors on missing params * fix types & spell checks * addressed some feedback * addressed feedback * cleanup code * Revert "Merge branch 'add_data_acceleration_and_data_freshness_policy_support' of https://github.com/tableau/server-client-python into add_data_acceleration_and_data_freshness_policy_support" This reverts commit 5b30e57d959ae80b8279d7eeb2e4f374fc111664, reversing changes made to 5789e32bd57f4459209da05003f1ccf4e93e01a1. * fix formatting * Address feedback * mypy & formatting changes --- samples/update_workbook_data_acceleration.py | 109 +++++++++ .../update_workbook_data_freshness_policy.py | 218 ++++++++++++++++++ tableauserverclient/__init__.py | 1 + tableauserverclient/models/__init__.py | 1 + .../models/data_freshness_policy_item.py | 210 +++++++++++++++++ .../models/property_decorators.py | 10 +- tableauserverclient/models/view_item.py | 34 +++ tableauserverclient/models/workbook_item.py | 34 ++- .../server/endpoint/workbooks_endpoint.py | 10 +- tableauserverclient/server/request_factory.py | 58 ++++- ...workbook_get_by_id_acceleration_status.xml | 19 ++ .../workbook_update_acceleration_status.xml | 16 ++ .../workbook_update_data_freshness_policy.xml | 9 + ...workbook_update_data_freshness_policy2.xml | 9 + ...workbook_update_data_freshness_policy3.xml | 11 + ...workbook_update_data_freshness_policy4.xml | 12 + ...workbook_update_data_freshness_policy5.xml | 16 ++ ...workbook_update_data_freshness_policy6.xml | 15 ++ ...kbook_update_views_acceleration_status.xml | 19 ++ test/test_data_freshness_policy.py | 189 +++++++++++++++ test/test_view_acceleration.py | 119 ++++++++++ 21 files changed, 1101 insertions(+), 18 deletions(-) create mode 100644 samples/update_workbook_data_acceleration.py create mode 100644 samples/update_workbook_data_freshness_policy.py create mode 100644 tableauserverclient/models/data_freshness_policy_item.py create mode 100644 test/assets/workbook_get_by_id_acceleration_status.xml create mode 100644 test/assets/workbook_update_acceleration_status.xml create mode 100644 test/assets/workbook_update_data_freshness_policy.xml create mode 100644 test/assets/workbook_update_data_freshness_policy2.xml create mode 100644 test/assets/workbook_update_data_freshness_policy3.xml create mode 100644 test/assets/workbook_update_data_freshness_policy4.xml create mode 100644 test/assets/workbook_update_data_freshness_policy5.xml create mode 100644 test/assets/workbook_update_data_freshness_policy6.xml create mode 100644 test/assets/workbook_update_views_acceleration_status.xml create mode 100644 test/test_data_freshness_policy.py create mode 100644 test/test_view_acceleration.py diff --git a/samples/update_workbook_data_acceleration.py b/samples/update_workbook_data_acceleration.py new file mode 100644 index 000000000..75f12262f --- /dev/null +++ b/samples/update_workbook_data_acceleration.py @@ -0,0 +1,109 @@ +#### +# This script demonstrates how to update workbook data acceleration using the Tableau +# Server Client. +# +# To run the script, you must have installed Python 3.7 or later. +#### + + +import argparse +import logging + +import tableauserverclient as TSC +from tableauserverclient import IntervalItem + + +def main(): + parser = argparse.ArgumentParser(description="Creates sample schedules for each type of frequency.") + # Common options; please keep those in sync across all samples + parser.add_argument("--server", "-s", help="server address") + parser.add_argument("--site", "-S", help="site name") + parser.add_argument("--token-name", "-p", help="name of the personal access token used to sign into the server") + parser.add_argument("--token-value", "-v", help="value of the personal access token used to sign into the server") + parser.add_argument( + "--logging-level", + "-l", + choices=["debug", "info", "error"], + default="error", + help="desired logging level (set to error by default)", + ) + # Options specific to this sample: + # This sample has no additional options, yet. If you add some, please add them here + + args = parser.parse_args() + + # Set logging level based on user input, or error by default + logging_level = getattr(logging, args.logging_level.upper()) + logging.basicConfig(level=logging_level) + + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) + server = TSC.Server(args.server, use_server_version=False) + server.add_http_options({"verify": False}) + server.use_server_version() + with server.auth.sign_in(tableau_auth): + # Get workbook + all_workbooks, pagination_item = server.workbooks.get() + print("\nThere are {} workbooks on site: ".format(pagination_item.total_available)) + print([workbook.name for workbook in all_workbooks]) + + if all_workbooks: + # Pick 1 workbook to try data acceleration. + # Note that data acceleration has a couple of requirements, please check the Tableau help page + # to verify your workbook/view is eligible for data acceleration. + + # Assuming 1st workbook is eligible for sample purposes + sample_workbook = all_workbooks[2] + + # Enable acceleration for all the views in the workbook + enable_config = dict() + enable_config["acceleration_enabled"] = True + enable_config["accelerate_now"] = True + + sample_workbook.data_acceleration_config = enable_config + updated: TSC.WorkbookItem = server.workbooks.update(sample_workbook) + # Since we did not set any specific view, we will enable all views in the workbook + print("Enable acceleration for all the views in the workbook " + updated.name + ".") + + # Disable acceleration on one of the view in the workbook + # You have to populate_views first, then set the views of the workbook + # to the ones you want to update. + server.workbooks.populate_views(sample_workbook) + view_to_disable = sample_workbook.views[0] + sample_workbook.views = [view_to_disable] + + disable_config = dict() + disable_config["acceleration_enabled"] = False + disable_config["accelerate_now"] = True + + sample_workbook.data_acceleration_config = disable_config + # To get the acceleration status on the response, set includeViewAccelerationStatus=true + # Note that you have to populate_views first to get the acceleration status, since + # acceleration status is per view basis (not per workbook) + updated: TSC.WorkbookItem = server.workbooks.update(sample_workbook, True) + view1 = updated.views[0] + print('Disabled acceleration for 1 view "' + view1.name + '" in the workbook ' + updated.name + ".") + + # Get acceleration status of the views in workbook using workbooks.get_by_id + # This won't need to do populate_views beforehand + my_workbook = server.workbooks.get_by_id(sample_workbook.id) + view1 = my_workbook.views[0] + view2 = my_workbook.views[1] + print( + "Fetching acceleration status for views in the workbook " + + updated.name + + ".\n" + + 'View "' + + view1.name + + '" has acceleration_status = ' + + view1.data_acceleration_config["acceleration_status"] + + ".\n" + + 'View "' + + view2.name + + '" has acceleration_status = ' + + view2.data_acceleration_config["acceleration_status"] + + "." + ) + + +if __name__ == "__main__": + main() diff --git a/samples/update_workbook_data_freshness_policy.py b/samples/update_workbook_data_freshness_policy.py new file mode 100644 index 000000000..9e4d63dc1 --- /dev/null +++ b/samples/update_workbook_data_freshness_policy.py @@ -0,0 +1,218 @@ +#### +# This script demonstrates how to update workbook data freshness policy using the Tableau +# Server Client. +# +# To run the script, you must have installed Python 3.7 or later. +#### + + +import argparse +import logging + +import tableauserverclient as TSC +from tableauserverclient import IntervalItem + + +def main(): + parser = argparse.ArgumentParser(description="Creates sample schedules for each type of frequency.") + # Common options; please keep those in sync across all samples + parser.add_argument("--server", "-s", help="server address") + parser.add_argument("--site", "-S", help="site name") + parser.add_argument("--token-name", "-p", help="name of the personal access token " "used to sign into the server") + parser.add_argument( + "--token-value", "-v", help="value of the personal access token " "used to sign into the server" + ) + parser.add_argument( + "--logging-level", + "-l", + choices=["debug", "info", "error"], + default="error", + help="desired logging level (set to error by default)", + ) + # Options specific to this sample: + # This sample has no additional options, yet. If you add some, please add them here + + args = parser.parse_args() + + # Set logging level based on user input, or error by default + logging_level = getattr(logging, args.logging_level.upper()) + logging.basicConfig(level=logging_level) + + tableau_auth = TSC.PersonalAccessTokenAuth(args.token_name, args.token_value, site_id=args.site) + server = TSC.Server(args.server, use_server_version=False) + server.add_http_options({"verify": False}) + server.use_server_version() + with server.auth.sign_in(tableau_auth): + # Get workbook + all_workbooks, pagination_item = server.workbooks.get() + print("\nThere are {} workbooks on site: ".format(pagination_item.total_available)) + print([workbook.name for workbook in all_workbooks]) + + if all_workbooks: + # Pick 1 workbook that has live datasource connection. + # Assuming 1st workbook met the criteria for sample purposes + # Data Freshness Policy is not available on extract & file-based datasource. + sample_workbook = all_workbooks[2] + + # Get more info from the workbook selected + # Troubleshoot: if sample_workbook_extended.data_freshness_policy.option returns with AttributeError + # it could mean the workbook selected does not have live connection, which means it doesn't have + # data freshness policy. Change to another workbook with live datasource connection. + sample_workbook_extended = server.workbooks.get_by_id(sample_workbook.id) + try: + print( + "Workbook " + + sample_workbook.name + + " has data freshness policy option set to: " + + sample_workbook_extended.data_freshness_policy.option + ) + except AttributeError as e: + print( + "Workbook does not have data freshness policy, possibly due to the workbook selected " + "does not have live connection. Change to another workbook using live datasource connection." + ) + + # Update Workbook Data Freshness Policy to "AlwaysLive" + sample_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.AlwaysLive + ) + updated: TSC.WorkbookItem = server.workbooks.update(sample_workbook) + print( + "Workbook " + + updated.name + + " updated data freshness policy option to: " + + updated.data_freshness_policy.option + ) + + # Update Workbook Data Freshness Policy to "SiteDefault" + sample_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.SiteDefault + ) + updated: TSC.WorkbookItem = server.workbooks.update(sample_workbook) + print( + "Workbook " + + updated.name + + " updated data freshness policy option to: " + + updated.data_freshness_policy.option + ) + + # Update Workbook Data Freshness Policy to "FreshEvery" schedule. + # Set the schedule to be fresh every 10 hours + # Once the data_freshness_policy is already populated (e.g. due to previous calls), + # it is possible to directly change the option & other parameters directly like below + sample_workbook.data_freshness_policy.option = TSC.DataFreshnessPolicyItem.Option.FreshEvery + fresh_every_ten_hours = TSC.DataFreshnessPolicyItem.FreshEvery( + TSC.DataFreshnessPolicyItem.FreshEvery.Frequency.Hours, 10 + ) + sample_workbook.data_freshness_policy.fresh_every_schedule = fresh_every_ten_hours + updated: TSC.WorkbookItem = server.workbooks.update(sample_workbook) + print( + "Workbook " + + updated.name + + " updated data freshness policy option to: " + + updated.data_freshness_policy.option + + " with frequency of " + + str(updated.data_freshness_policy.fresh_every_schedule.value) + + " " + + updated.data_freshness_policy.fresh_every_schedule.frequency + ) + + # Update Workbook Data Freshness Policy to "FreshAt" schedule. + # Set the schedule to be fresh at 10AM every day + sample_workbook.data_freshness_policy.option = TSC.DataFreshnessPolicyItem.Option.FreshAt + fresh_at_ten_daily = TSC.DataFreshnessPolicyItem.FreshAt( + TSC.DataFreshnessPolicyItem.FreshAt.Frequency.Day, "10:00:00", "America/Los_Angeles" + ) + sample_workbook.data_freshness_policy.fresh_at_schedule = fresh_at_ten_daily + updated: TSC.WorkbookItem = server.workbooks.update(sample_workbook) + print( + "Workbook " + + updated.name + + " updated data freshness policy option to: " + + updated.data_freshness_policy.option + + " with frequency of " + + str(updated.data_freshness_policy.fresh_at_schedule.time) + + " every " + + updated.data_freshness_policy.fresh_at_schedule.frequency + ) + + # Set the schedule to be fresh at 6PM every week on Wednesday and Sunday + sample_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.FreshAt + ) + fresh_at_6pm_wed_sun = TSC.DataFreshnessPolicyItem.FreshAt( + TSC.DataFreshnessPolicyItem.FreshAt.Frequency.Week, + "18:00:00", + "America/Los_Angeles", + [IntervalItem.Day.Wednesday, "Sunday"], + ) + + sample_workbook.data_freshness_policy.fresh_at_schedule = fresh_at_6pm_wed_sun + updated: TSC.WorkbookItem = server.workbooks.update(sample_workbook) + new_fresh_at_schedule = updated.data_freshness_policy.fresh_at_schedule + print( + "Workbook " + + updated.name + + " updated data freshness policy option to: " + + updated.data_freshness_policy.option + + " with frequency of " + + str(new_fresh_at_schedule.time) + + " every " + + new_fresh_at_schedule.frequency + + " on " + + new_fresh_at_schedule.interval_item[0] + + "," + + new_fresh_at_schedule.interval_item[1] + ) + + # Set the schedule to be fresh at 12AM every last day of the month + sample_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.FreshAt + ) + fresh_at_last_day_of_month = TSC.DataFreshnessPolicyItem.FreshAt( + TSC.DataFreshnessPolicyItem.FreshAt.Frequency.Month, "00:00:00", "America/Los_Angeles", ["LastDay"] + ) + + sample_workbook.data_freshness_policy.fresh_at_schedule = fresh_at_last_day_of_month + updated: TSC.WorkbookItem = server.workbooks.update(sample_workbook) + new_fresh_at_schedule = updated.data_freshness_policy.fresh_at_schedule + print( + "Workbook " + + updated.name + + " updated data freshness policy option to: " + + updated.data_freshness_policy.option + + " with frequency of " + + str(new_fresh_at_schedule.time) + + " every " + + new_fresh_at_schedule.frequency + + " on " + + new_fresh_at_schedule.interval_item[0] + ) + + # Set the schedule to be fresh at 8PM every 1st,13th,20th day of the month + fresh_at_dates_of_month = TSC.DataFreshnessPolicyItem.FreshAt( + TSC.DataFreshnessPolicyItem.FreshAt.Frequency.Month, + "00:00:00", + "America/Los_Angeles", + ["1", "13", "20"], + ) + + sample_workbook.data_freshness_policy.fresh_at_schedule = fresh_at_dates_of_month + updated: TSC.WorkbookItem = server.workbooks.update(sample_workbook) + new_fresh_at_schedule = updated.data_freshness_policy.fresh_at_schedule + print( + "Workbook " + + updated.name + + " updated data freshness policy option to: " + + updated.data_freshness_policy.option + + " with frequency of " + + str(new_fresh_at_schedule.time) + + " every " + + new_fresh_at_schedule.frequency + + " on " + + str(new_fresh_at_schedule.interval_item) + ) + + +if __name__ == "__main__": + main() diff --git a/tableauserverclient/__init__.py b/tableauserverclient/__init__.py index c5c3c1922..f093f521b 100644 --- a/tableauserverclient/__init__.py +++ b/tableauserverclient/__init__.py @@ -10,6 +10,7 @@ DailyInterval, DataAlertItem, DatabaseItem, + DataFreshnessPolicyItem, DatasourceItem, FavoriteItem, FlowItem, diff --git a/tableauserverclient/models/__init__.py b/tableauserverclient/models/__init__.py index 03d692583..e7a853d9a 100644 --- a/tableauserverclient/models/__init__.py +++ b/tableauserverclient/models/__init__.py @@ -5,6 +5,7 @@ from .data_acceleration_report_item import DataAccelerationReportItem from .data_alert_item import DataAlertItem from .database_item import DatabaseItem +from .data_freshness_policy_item import DataFreshnessPolicyItem from .datasource_item import DatasourceItem from .dqw_item import DQWItem from .exceptions import UnpopulatedPropertyError diff --git a/tableauserverclient/models/data_freshness_policy_item.py b/tableauserverclient/models/data_freshness_policy_item.py new file mode 100644 index 000000000..f567c501c --- /dev/null +++ b/tableauserverclient/models/data_freshness_policy_item.py @@ -0,0 +1,210 @@ +import xml.etree.ElementTree as ET + +from typing import Optional, Union, List +from tableauserverclient.models.property_decorators import property_is_enum, property_not_nullable +from .interval_item import IntervalItem + + +class DataFreshnessPolicyItem: + class Option: + AlwaysLive = "AlwaysLive" + SiteDefault = "SiteDefault" + FreshEvery = "FreshEvery" + FreshAt = "FreshAt" + + class FreshEvery: + class Frequency: + Minutes = "Minutes" + Hours = "Hours" + Days = "Days" + Weeks = "Weeks" + + def __init__(self, frequency: str, value: int): + self.frequency: str = frequency + self.value: int = value + + def __repr__(self): + return "".format(**vars(self)) + + @property + def frequency(self) -> str: + return self._frequency + + @frequency.setter + @property_is_enum(Frequency) + def frequency(self, value: str): + self._frequency = value + + @classmethod + def from_xml_element(cls, fresh_every_schedule_elem: ET.Element): + frequency = fresh_every_schedule_elem.get("frequency", None) + value_str = fresh_every_schedule_elem.get("value", None) + if (frequency is None) or (value_str is None): + return None + value = int(value_str) + return DataFreshnessPolicyItem.FreshEvery(frequency, value) + + class FreshAt: + class Frequency: + Day = "Day" + Week = "Week" + Month = "Month" + + def __init__(self, frequency: str, time: str, timezone, interval_item: Optional[List[str]] = None): + self.frequency = frequency + self.time = time + self.timezone = timezone + self.interval_item: Optional[List[str]] = interval_item + + def __repr__(self): + return ( + " timezone={_timezone} " "interval_item={_interval_time}" + ).format(**vars(self)) + + @property + def interval_item(self) -> Optional[List[str]]: + return self._interval_item + + @interval_item.setter + def interval_item(self, value: List[str]): + self._interval_item = value + + @property + def time(self): + return self._time + + @time.setter + @property_not_nullable + def time(self, value): + self._time = value + + @property + def timezone(self) -> str: + return self._timezone + + @timezone.setter + def timezone(self, value: str): + self._timezone = value + + @property + def frequency(self) -> str: + return self._frequency + + @frequency.setter + @property_is_enum(Frequency) + def frequency(self, value: str): + self._frequency = value + + @classmethod + def from_xml_element(cls, fresh_at_schedule_elem: ET.Element, ns): + frequency = fresh_at_schedule_elem.get("frequency", None) + time = fresh_at_schedule_elem.get("time", None) + if (frequency is None) or (time is None): + return None + timezone = fresh_at_schedule_elem.get("timezone", None) + interval = parse_intervals(fresh_at_schedule_elem, frequency, ns) + return DataFreshnessPolicyItem.FreshAt(frequency, time, timezone, interval) + + def __init__(self, option: str): + self.option = option + self.fresh_every_schedule: Optional[DataFreshnessPolicyItem.FreshEvery] = None + self.fresh_at_schedule: Optional[DataFreshnessPolicyItem.FreshAt] = None + + def __repr__(self): + return "".format(**vars(self)) + + @property + def option(self) -> str: + return self._option + + @option.setter + @property_is_enum(Option) + def option(self, value: str): + self._option = value + + @property + def fresh_every_schedule(self) -> Optional[FreshEvery]: + return self._fresh_every_schedule + + @fresh_every_schedule.setter + def fresh_every_schedule(self, value: FreshEvery): + self._fresh_every_schedule = value + + @property + def fresh_at_schedule(self) -> Optional[FreshAt]: + return self._fresh_at_schedule + + @fresh_at_schedule.setter + def fresh_at_schedule(self, value: FreshAt): + self._fresh_at_schedule = value + + @classmethod + def from_xml_element(cls, data_freshness_policy_elem, ns): + option = data_freshness_policy_elem.get("option", None) + if option is None: + return None + data_freshness_policy = DataFreshnessPolicyItem(option) + + fresh_at_schedule = None + fresh_every_schedule = None + if option == "FreshAt": + fresh_at_schedule_elem = data_freshness_policy_elem.find(".//t:freshAtSchedule", namespaces=ns) + fresh_at_schedule = DataFreshnessPolicyItem.FreshAt.from_xml_element(fresh_at_schedule_elem, ns) + data_freshness_policy.fresh_at_schedule = fresh_at_schedule + elif option == "FreshEvery": + fresh_every_schedule_elem = data_freshness_policy_elem.find(".//t:freshEverySchedule", namespaces=ns) + fresh_every_schedule = DataFreshnessPolicyItem.FreshEvery.from_xml_element(fresh_every_schedule_elem) + data_freshness_policy.fresh_every_schedule = fresh_every_schedule + + return data_freshness_policy + + +def parse_intervals(intervals_elem, frequency, ns): + interval_elems = intervals_elem.findall(".//t:intervals/t:interval", namespaces=ns) + interval = [] + for interval_elem in interval_elems: + interval.extend(interval_elem.attrib.items()) + + # No intervals expected for Day frequency + if frequency == DataFreshnessPolicyItem.FreshAt.Frequency.Day: + return None + + if frequency == DataFreshnessPolicyItem.FreshAt.Frequency.Week: + interval_values = [(i[1]).title() for i in interval] + return parse_week_intervals(interval_values) + + if frequency == DataFreshnessPolicyItem.FreshAt.Frequency.Month: + interval_values = [(i[1]) for i in interval] + return parse_month_intervals(interval_values) + + +def parse_week_intervals(interval_values): + # Using existing IntervalItem.Day to check valid weekday string + if not all(hasattr(IntervalItem.Day, day) for day in interval_values): + raise ValueError("Invalid week day defined " + str(interval_values)) + return interval_values + + +def parse_month_intervals(interval_values): + error = "Invalid interval value for a monthly frequency: {}.".format(interval_values) + + # Month interval can have value either only ['LastDay'] or list of dates e.g. ["1", 20", "30"] + # First check if the list only have LastDay value. When using LastDay, there shouldn't be + # any other values, hence checking the first element of the list is enough. + # If the value is not "LastDay", we assume intervals is on list of dates format. + # We created this function instead of using existing MonthlyInterval because we allow list of dates interval, + + intervals = [] + if interval_values[0] == "LastDay": + intervals.append(interval_values[0]) + else: + for interval in interval_values: + try: + if 1 <= int(interval) <= 31: + intervals.append(interval) + else: + raise ValueError(error) + except ValueError: + if interval_values[0] != "LastDay": + raise ValueError(error) + return intervals diff --git a/tableauserverclient/models/property_decorators.py b/tableauserverclient/models/property_decorators.py index 58c33699b..ce31b1428 100644 --- a/tableauserverclient/models/property_decorators.py +++ b/tableauserverclient/models/property_decorators.py @@ -147,15 +147,7 @@ def property_is_data_acceleration_config(func): def wrapper(self, value): if not isinstance(value, dict): raise ValueError("{} is not type 'dict', cannot update {})".format(value.__class__.__name__, func.__name__)) - if len(value) != 4 or not all( - attr in value.keys() - for attr in ( - "acceleration_enabled", - "accelerate_now", - "last_updated_at", - "acceleration_status", - ) - ): + if len(value) < 2 or not all(attr in value.keys() for attr in ("acceleration_enabled", "accelerate_now")): error = "{} should have 2 keys ".format(func.__name__) error += "'acceleration_enabled' and 'accelerate_now'" error += "instead you have {}".format(value.keys()) diff --git a/tableauserverclient/models/view_item.py b/tableauserverclient/models/view_item.py index 90cff490b..a26e364a3 100644 --- a/tableauserverclient/models/view_item.py +++ b/tableauserverclient/models/view_item.py @@ -31,6 +31,10 @@ def __init__(self) -> None: self._workbook_id: Optional[str] = None self._permissions: Optional[Callable[[], List[PermissionsRule]]] = None self.tags: Set[str] = set() + self._data_acceleration_config = { + "acceleration_enabled": None, + "acceleration_status": None, + } def __str__(self): return "".format( @@ -133,6 +137,14 @@ def updated_at(self) -> Optional[datetime]: def workbook_id(self) -> Optional[str]: return self._workbook_id + @property + def data_acceleration_config(self): + return self._data_acceleration_config + + @data_acceleration_config.setter + def data_acceleration_config(self, value): + self._data_acceleration_config = value + @property def permissions(self) -> List[PermissionsRule]: if self._permissions is None: @@ -164,6 +176,7 @@ def from_xml(cls, view_xml, ns, workbook_id="") -> "ViewItem": owner_elem = view_xml.find(".//t:owner", namespaces=ns) project_elem = view_xml.find(".//t:project", namespaces=ns) tags_elem = view_xml.find(".//t:tags", namespaces=ns) + data_acceleration_config_elem = view_xml.find(".//t:dataAccelerationConfig", namespaces=ns) view_item._created_at = parse_datetime(view_xml.get("createdAt", None)) view_item._updated_at = parse_datetime(view_xml.get("updatedAt", None)) view_item._id = view_xml.get("id", None) @@ -186,4 +199,25 @@ def from_xml(cls, view_xml, ns, workbook_id="") -> "ViewItem": tags = TagItem.from_xml_element(tags_elem, ns) view_item.tags = tags view_item._initial_tags = copy.copy(tags) + if data_acceleration_config_elem is not None: + data_acceleration_config = parse_data_acceleration_config(data_acceleration_config_elem) + view_item.data_acceleration_config = data_acceleration_config return view_item + + +def parse_data_acceleration_config(data_acceleration_elem): + data_acceleration_config = dict() + + acceleration_enabled = data_acceleration_elem.get("accelerationEnabled", None) + if acceleration_enabled is not None: + acceleration_enabled = string_to_bool(acceleration_enabled) + + acceleration_status = data_acceleration_elem.get("accelerationStatus", None) + + data_acceleration_config["acceleration_enabled"] = acceleration_enabled + data_acceleration_config["acceleration_status"] = acceleration_status + return data_acceleration_config + + +def string_to_bool(s: str) -> bool: + return s.lower() == "true" diff --git a/tableauserverclient/models/workbook_item.py b/tableauserverclient/models/workbook_item.py index 57ddf83f8..58fd2a9a9 100644 --- a/tableauserverclient/models/workbook_item.py +++ b/tableauserverclient/models/workbook_item.py @@ -17,6 +17,7 @@ from .revision_item import RevisionItem from .tag_item import TagItem from .view_item import ViewItem +from .data_freshness_policy_item import DataFreshnessPolicyItem class WorkbookItem(object): @@ -34,7 +35,7 @@ def __init__(self, project_id: Optional[str] = None, name: Optional[str] = None, self._revisions = None self._size = None self._updated_at = None - self._views = None + self._views: Optional[Callable[[], List[ViewItem]]] = None self.name = name self._description = None self.owner_id: Optional[str] = None @@ -49,6 +50,7 @@ def __init__(self, project_id: Optional[str] = None, name: Optional[str] = None, "last_updated_at": None, "acceleration_status": None, } + self.data_freshness_policy = None self._permissions = None return None @@ -166,6 +168,10 @@ def views(self) -> List[ViewItem]: # We had views included in a WorkbookItem response return self._views + @views.setter + def views(self, value): + self._views = value + @property def data_acceleration_config(self): return self._data_acceleration_config @@ -175,6 +181,15 @@ def data_acceleration_config(self): def data_acceleration_config(self, value): self._data_acceleration_config = value + @property + def data_freshness_policy(self): + return self._data_freshness_policy + + @data_freshness_policy.setter + # @property_is_data_freshness_policy + def data_freshness_policy(self, value): + self._data_freshness_policy = value + @property def revisions(self) -> List[RevisionItem]: if self._revisions is None: @@ -221,8 +236,9 @@ def _parse_common_tags(self, workbook_xml, ns): project_name, owner_id, _, - _, + views, data_acceleration_config, + data_freshness_policy, ) = self._parse_element(workbook_xml, ns) self._set_values( @@ -239,8 +255,9 @@ def _parse_common_tags(self, workbook_xml, ns): project_name, owner_id, None, - None, + views, data_acceleration_config, + data_freshness_policy, ) return self @@ -262,6 +279,7 @@ def _set_values( tags, views, data_acceleration_config, + data_freshness_policy, ): if id is not None: self._id = id @@ -290,10 +308,12 @@ def _set_values( if tags: self.tags = tags self._initial_tags = copy.copy(tags) - if views: + if views is not None: self._views = views if data_acceleration_config is not None: self.data_acceleration_config = data_acceleration_config + if data_freshness_policy is not None: + self.data_freshness_policy = data_freshness_policy @classmethod def from_response(cls, resp: str, ns: Dict[str, str]) -> List["WorkbookItem"]: @@ -360,6 +380,11 @@ def _parse_element(workbook_xml, ns): if data_acceleration_elem is not None: data_acceleration_config = parse_data_acceleration_config(data_acceleration_elem) + data_freshness_policy = None + data_freshness_policy_elem = workbook_xml.find(".//t:dataFreshnessPolicy", namespaces=ns) + if data_freshness_policy_elem is not None: + data_freshness_policy = DataFreshnessPolicyItem.from_xml_element(data_freshness_policy_elem, ns) + return ( id, name, @@ -376,6 +401,7 @@ def _parse_element(workbook_xml, ns): tags, views, data_acceleration_config, + data_freshness_policy, ) diff --git a/tableauserverclient/server/endpoint/workbooks_endpoint.py b/tableauserverclient/server/endpoint/workbooks_endpoint.py index 393a028c8..bc535b2d6 100644 --- a/tableauserverclient/server/endpoint/workbooks_endpoint.py +++ b/tableauserverclient/server/endpoint/workbooks_endpoint.py @@ -137,7 +137,12 @@ def delete(self, workbook_id: str) -> None: # Update workbook @api(version="2.0") - def update(self, workbook_item: WorkbookItem) -> WorkbookItem: + @parameter_added_in(include_view_acceleration_status="3.22") + def update( + self, + workbook_item: WorkbookItem, + include_view_acceleration_status: bool = False, + ) -> WorkbookItem: if not workbook_item.id: error = "Workbook item missing ID. Workbook must be retrieved from server first." raise MissingRequiredFieldError(error) @@ -146,6 +151,9 @@ def update(self, workbook_item: WorkbookItem) -> WorkbookItem: # Update the workbook itself url = "{0}/{1}".format(self.baseurl, workbook_item.id) + if include_view_acceleration_status: + url += "?includeViewAccelerationStatus=True" + update_req = RequestFactory.Workbook.update_req(workbook_item) server_response = self.put_request(url, update_req) logger.info("Updated workbook item (ID: {0})".format(workbook_item.id)) diff --git a/tableauserverclient/server/request_factory.py b/tableauserverclient/server/request_factory.py index 70d2b30fc..1f6dfbfc6 100644 --- a/tableauserverclient/server/request_factory.py +++ b/tableauserverclient/server/request_factory.py @@ -57,6 +57,11 @@ def _add_hiddenview_element(views_element, view_name): view_element.attrib["hidden"] = "true" +def _add_view_element(views_element, view_id): + view_element = ET.SubElement(views_element, "view") + view_element.attrib["id"] = view_id + + def _add_credentials_element(parent_element, connection_credentials): credentials_element = ET.SubElement(parent_element, "connectionCredentials") if connection_credentials.password is None or connection_credentials.name is None: @@ -944,16 +949,61 @@ def update_req(self, workbook_item): if workbook_item.owner_id: owner_element = ET.SubElement(workbook_element, "owner") owner_element.attrib["id"] = workbook_item.owner_id - if workbook_item.data_acceleration_config["acceleration_enabled"] is not None: + if workbook_item._views is not None: + views_element = ET.SubElement(workbook_element, "views") + for view in workbook_item.views: + _add_view_element(views_element, view.id) + if workbook_item.data_acceleration_config: data_acceleration_config = workbook_item.data_acceleration_config data_acceleration_element = ET.SubElement(workbook_element, "dataAccelerationConfig") - data_acceleration_element.attrib["accelerationEnabled"] = str( - data_acceleration_config["acceleration_enabled"] - ).lower() + if data_acceleration_config["acceleration_enabled"] is not None: + data_acceleration_element.attrib["accelerationEnabled"] = str( + data_acceleration_config["acceleration_enabled"] + ).lower() if data_acceleration_config["accelerate_now"] is not None: data_acceleration_element.attrib["accelerateNow"] = str( data_acceleration_config["accelerate_now"] ).lower() + if workbook_item.data_freshness_policy is not None: + data_freshness_policy_config = workbook_item.data_freshness_policy + data_freshness_policy_element = ET.SubElement(workbook_element, "dataFreshnessPolicy") + data_freshness_policy_element.attrib["option"] = str(data_freshness_policy_config.option) + # Fresh Every Schedule + if data_freshness_policy_config.option == "FreshEvery": + if data_freshness_policy_config.fresh_every_schedule is not None: + fresh_every_element = ET.SubElement(data_freshness_policy_element, "freshEverySchedule") + fresh_every_element.attrib[ + "frequency" + ] = data_freshness_policy_config.fresh_every_schedule.frequency + fresh_every_element.attrib["value"] = str(data_freshness_policy_config.fresh_every_schedule.value) + else: + raise ValueError(f"data_freshness_policy_config.fresh_every_schedule must be populated.") + # Fresh At Schedule + if data_freshness_policy_config.option == "FreshAt": + if data_freshness_policy_config.fresh_at_schedule is not None: + fresh_at_element = ET.SubElement(data_freshness_policy_element, "freshAtSchedule") + frequency = data_freshness_policy_config.fresh_at_schedule.frequency + fresh_at_element.attrib["frequency"] = frequency + fresh_at_element.attrib["time"] = str(data_freshness_policy_config.fresh_at_schedule.time) + fresh_at_element.attrib["timezone"] = str(data_freshness_policy_config.fresh_at_schedule.timezone) + intervals = data_freshness_policy_config.fresh_at_schedule.interval_item + # Fresh At Schedule intervals if Frequency is Week or Month + if frequency != DataFreshnessPolicyItem.FreshAt.Frequency.Day: + if intervals is not None: + # if intervals is not None or frequency != DataFreshnessPolicyItem.FreshAt.Frequency.Day: + intervals_element = ET.SubElement(fresh_at_element, "intervals") + for interval in intervals: + expression = IntervalItem.Occurrence.WeekDay + if frequency == DataFreshnessPolicyItem.FreshAt.Frequency.Month: + expression = IntervalItem.Occurrence.MonthDay + single_interval_element = ET.SubElement(intervals_element, "interval") + single_interval_element.attrib[expression] = interval + else: + raise ValueError( + f"fresh_at_schedule.interval_item must be populated for " f"Week & Month frequency." + ) + else: + raise ValueError(f"data_freshness_policy_config.fresh_at_schedule must be populated.") return ET.tostring(xml_request) diff --git a/test/assets/workbook_get_by_id_acceleration_status.xml b/test/assets/workbook_get_by_id_acceleration_status.xml new file mode 100644 index 000000000..0d1f9b93d --- /dev/null +++ b/test/assets/workbook_get_by_id_acceleration_status.xml @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + diff --git a/test/assets/workbook_update_acceleration_status.xml b/test/assets/workbook_update_acceleration_status.xml new file mode 100644 index 000000000..7c3366fee --- /dev/null +++ b/test/assets/workbook_update_acceleration_status.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/test/assets/workbook_update_data_freshness_policy.xml b/test/assets/workbook_update_data_freshness_policy.xml new file mode 100644 index 000000000..a69a097ba --- /dev/null +++ b/test/assets/workbook_update_data_freshness_policy.xml @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/test/assets/workbook_update_data_freshness_policy2.xml b/test/assets/workbook_update_data_freshness_policy2.xml new file mode 100644 index 000000000..384f79ec0 --- /dev/null +++ b/test/assets/workbook_update_data_freshness_policy2.xml @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/test/assets/workbook_update_data_freshness_policy3.xml b/test/assets/workbook_update_data_freshness_policy3.xml new file mode 100644 index 000000000..195013517 --- /dev/null +++ b/test/assets/workbook_update_data_freshness_policy3.xml @@ -0,0 +1,11 @@ + + + + + + + + + + + \ No newline at end of file diff --git a/test/assets/workbook_update_data_freshness_policy4.xml b/test/assets/workbook_update_data_freshness_policy4.xml new file mode 100644 index 000000000..8208d986a --- /dev/null +++ b/test/assets/workbook_update_data_freshness_policy4.xml @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/test/assets/workbook_update_data_freshness_policy5.xml b/test/assets/workbook_update_data_freshness_policy5.xml new file mode 100644 index 000000000..b6e0358b6 --- /dev/null +++ b/test/assets/workbook_update_data_freshness_policy5.xml @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/test/assets/workbook_update_data_freshness_policy6.xml b/test/assets/workbook_update_data_freshness_policy6.xml new file mode 100644 index 000000000..c8be8f6c1 --- /dev/null +++ b/test/assets/workbook_update_data_freshness_policy6.xml @@ -0,0 +1,15 @@ + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/test/assets/workbook_update_views_acceleration_status.xml b/test/assets/workbook_update_views_acceleration_status.xml new file mode 100644 index 000000000..f2055fb79 --- /dev/null +++ b/test/assets/workbook_update_views_acceleration_status.xml @@ -0,0 +1,19 @@ + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/test/test_data_freshness_policy.py b/test/test_data_freshness_policy.py new file mode 100644 index 000000000..9591a6380 --- /dev/null +++ b/test/test_data_freshness_policy.py @@ -0,0 +1,189 @@ +import os +import requests_mock +import unittest + +import tableauserverclient as TSC + +TEST_ASSET_DIR = os.path.join(os.path.dirname(__file__), "assets") + +UPDATE_DFP_ALWAYS_LIVE_XML = os.path.join(TEST_ASSET_DIR, "workbook_update_data_freshness_policy.xml") +UPDATE_DFP_SITE_DEFAULT_XML = os.path.join(TEST_ASSET_DIR, "workbook_update_data_freshness_policy2.xml") +UPDATE_DFP_FRESH_EVERY_XML = os.path.join(TEST_ASSET_DIR, "workbook_update_data_freshness_policy3.xml") +UPDATE_DFP_FRESH_AT_DAILY_XML = os.path.join(TEST_ASSET_DIR, "workbook_update_data_freshness_policy4.xml") +UPDATE_DFP_FRESH_AT_WEEKLY_XML = os.path.join(TEST_ASSET_DIR, "workbook_update_data_freshness_policy5.xml") +UPDATE_DFP_FRESH_AT_MONTHLY_XML = os.path.join(TEST_ASSET_DIR, "workbook_update_data_freshness_policy6.xml") + + +class WorkbookTests(unittest.TestCase): + def setUp(self) -> None: + self.server = TSC.Server("http://test", False) + + # Fake sign in + self.server._site_id = "dad65087-b08b-4603-af4e-2887b8aafc67" + self.server._auth_token = "j80k54ll2lfMZ0tv97mlPvvSCRyD0DOM" + + self.baseurl = self.server.workbooks.baseurl + + def test_update_DFP_always_live(self) -> None: + with open(UPDATE_DFP_ALWAYS_LIVE_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.put(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2", text=response_xml) + single_workbook = TSC.WorkbookItem("1d0304cd-3796-429f-b815-7258370b9b74", show_tabs=True) + single_workbook._id = "1f951daf-4061-451a-9df1-69a8062664f2" + single_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.AlwaysLive + ) + single_workbook = self.server.workbooks.update(single_workbook) + + self.assertEqual("1f951daf-4061-451a-9df1-69a8062664f2", single_workbook.id) + self.assertEqual("AlwaysLive", single_workbook.data_freshness_policy.option) + + def test_update_DFP_site_default(self) -> None: + with open(UPDATE_DFP_SITE_DEFAULT_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.put(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2", text=response_xml) + single_workbook = TSC.WorkbookItem("1d0304cd-3796-429f-b815-7258370b9b74", show_tabs=True) + single_workbook._id = "1f951daf-4061-451a-9df1-69a8062664f2" + single_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.SiteDefault + ) + single_workbook = self.server.workbooks.update(single_workbook) + + self.assertEqual("1f951daf-4061-451a-9df1-69a8062664f2", single_workbook.id) + self.assertEqual("SiteDefault", single_workbook.data_freshness_policy.option) + + def test_update_DFP_fresh_every(self) -> None: + with open(UPDATE_DFP_FRESH_EVERY_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.put(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2", text=response_xml) + single_workbook = TSC.WorkbookItem("1d0304cd-3796-429f-b815-7258370b9b74", show_tabs=True) + single_workbook._id = "1f951daf-4061-451a-9df1-69a8062664f2" + single_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.FreshEvery + ) + fresh_every_ten_hours = TSC.DataFreshnessPolicyItem.FreshEvery( + TSC.DataFreshnessPolicyItem.FreshEvery.Frequency.Hours, 10 + ) + single_workbook.data_freshness_policy.fresh_every_schedule = fresh_every_ten_hours + single_workbook = self.server.workbooks.update(single_workbook) + + self.assertEqual("1f951daf-4061-451a-9df1-69a8062664f2", single_workbook.id) + self.assertEqual("FreshEvery", single_workbook.data_freshness_policy.option) + self.assertEqual("Hours", single_workbook.data_freshness_policy.fresh_every_schedule.frequency) + self.assertEqual(10, single_workbook.data_freshness_policy.fresh_every_schedule.value) + + def test_update_DFP_fresh_every_missing_attributes(self) -> None: + with open(UPDATE_DFP_FRESH_EVERY_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.put(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2", text=response_xml) + single_workbook = TSC.WorkbookItem("1d0304cd-3796-429f-b815-7258370b9b74", show_tabs=True) + single_workbook._id = "1f951daf-4061-451a-9df1-69a8062664f2" + single_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.FreshEvery + ) + + self.assertRaises(ValueError, self.server.workbooks.update, single_workbook) + + def test_update_DFP_fresh_at_day(self) -> None: + with open(UPDATE_DFP_FRESH_AT_DAILY_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.put(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2", text=response_xml) + single_workbook = TSC.WorkbookItem("1d0304cd-3796-429f-b815-7258370b9b74", show_tabs=True) + single_workbook._id = "1f951daf-4061-451a-9df1-69a8062664f2" + single_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.FreshAt + ) + fresh_at_10pm_daily = TSC.DataFreshnessPolicyItem.FreshAt( + TSC.DataFreshnessPolicyItem.FreshAt.Frequency.Day, "22:00:00", " Asia/Singapore" + ) + single_workbook.data_freshness_policy.fresh_at_schedule = fresh_at_10pm_daily + single_workbook = self.server.workbooks.update(single_workbook) + + self.assertEqual("1f951daf-4061-451a-9df1-69a8062664f2", single_workbook.id) + self.assertEqual("FreshAt", single_workbook.data_freshness_policy.option) + self.assertEqual("Day", single_workbook.data_freshness_policy.fresh_at_schedule.frequency) + self.assertEqual("22:00:00", single_workbook.data_freshness_policy.fresh_at_schedule.time) + self.assertEqual("Asia/Singapore", single_workbook.data_freshness_policy.fresh_at_schedule.timezone) + + def test_update_DFP_fresh_at_week(self) -> None: + with open(UPDATE_DFP_FRESH_AT_WEEKLY_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.put(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2", text=response_xml) + single_workbook = TSC.WorkbookItem("1d0304cd-3796-429f-b815-7258370b9b74", show_tabs=True) + single_workbook._id = "1f951daf-4061-451a-9df1-69a8062664f2" + single_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.FreshAt + ) + fresh_at_10am_mon_wed = TSC.DataFreshnessPolicyItem.FreshAt( + TSC.DataFreshnessPolicyItem.FreshAt.Frequency.Week, + "10:00:00", + "America/Los_Angeles", + ["Monday", "Wednesday"], + ) + single_workbook.data_freshness_policy.fresh_at_schedule = fresh_at_10am_mon_wed + single_workbook = self.server.workbooks.update(single_workbook) + + self.assertEqual("1f951daf-4061-451a-9df1-69a8062664f2", single_workbook.id) + self.assertEqual("FreshAt", single_workbook.data_freshness_policy.option) + self.assertEqual("Week", single_workbook.data_freshness_policy.fresh_at_schedule.frequency) + self.assertEqual("10:00:00", single_workbook.data_freshness_policy.fresh_at_schedule.time) + self.assertEqual("Wednesday", single_workbook.data_freshness_policy.fresh_at_schedule.interval_item[0]) + self.assertEqual("Monday", single_workbook.data_freshness_policy.fresh_at_schedule.interval_item[1]) + + def test_update_DFP_fresh_at_month(self) -> None: + with open(UPDATE_DFP_FRESH_AT_MONTHLY_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.put(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2", text=response_xml) + single_workbook = TSC.WorkbookItem("1d0304cd-3796-429f-b815-7258370b9b74", show_tabs=True) + single_workbook._id = "1f951daf-4061-451a-9df1-69a8062664f2" + single_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.FreshAt + ) + fresh_at_00am_lastDayOfMonth = TSC.DataFreshnessPolicyItem.FreshAt( + TSC.DataFreshnessPolicyItem.FreshAt.Frequency.Month, "00:00:00", "America/Los_Angeles", ["LastDay"] + ) + single_workbook.data_freshness_policy.fresh_at_schedule = fresh_at_00am_lastDayOfMonth + single_workbook = self.server.workbooks.update(single_workbook) + + self.assertEqual("1f951daf-4061-451a-9df1-69a8062664f2", single_workbook.id) + self.assertEqual("FreshAt", single_workbook.data_freshness_policy.option) + self.assertEqual("Month", single_workbook.data_freshness_policy.fresh_at_schedule.frequency) + self.assertEqual("00:00:00", single_workbook.data_freshness_policy.fresh_at_schedule.time) + self.assertEqual("LastDay", single_workbook.data_freshness_policy.fresh_at_schedule.interval_item[0]) + + def test_update_DFP_fresh_at_missing_params(self) -> None: + with open(UPDATE_DFP_FRESH_AT_DAILY_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.put(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2", text=response_xml) + single_workbook = TSC.WorkbookItem("1d0304cd-3796-429f-b815-7258370b9b74", show_tabs=True) + single_workbook._id = "1f951daf-4061-451a-9df1-69a8062664f2" + single_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.FreshAt + ) + + self.assertRaises(ValueError, self.server.workbooks.update, single_workbook) + + def test_update_DFP_fresh_at_missing_interval(self) -> None: + with open(UPDATE_DFP_FRESH_AT_DAILY_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.put(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2", text=response_xml) + single_workbook = TSC.WorkbookItem("1d0304cd-3796-429f-b815-7258370b9b74", show_tabs=True) + single_workbook._id = "1f951daf-4061-451a-9df1-69a8062664f2" + single_workbook.data_freshness_policy = TSC.DataFreshnessPolicyItem( + TSC.DataFreshnessPolicyItem.Option.FreshAt + ) + fresh_at_month_no_interval = TSC.DataFreshnessPolicyItem.FreshAt( + TSC.DataFreshnessPolicyItem.FreshAt.Frequency.Month, "00:00:00", "America/Los_Angeles" + ) + single_workbook.data_freshness_policy.fresh_at_schedule = fresh_at_month_no_interval + + self.assertRaises(ValueError, self.server.workbooks.update, single_workbook) diff --git a/test/test_view_acceleration.py b/test/test_view_acceleration.py new file mode 100644 index 000000000..6f94f0c10 --- /dev/null +++ b/test/test_view_acceleration.py @@ -0,0 +1,119 @@ +import os +import requests_mock +import unittest + +import tableauserverclient as TSC +from tableauserverclient.datetime_helpers import format_datetime + +TEST_ASSET_DIR = os.path.join(os.path.dirname(__file__), "assets") + +GET_BY_ID_ACCELERATION_STATUS_XML = os.path.join(TEST_ASSET_DIR, "workbook_get_by_id_acceleration_status.xml") +POPULATE_VIEWS_XML = os.path.join(TEST_ASSET_DIR, "workbook_populate_views.xml") +UPDATE_VIEWS_ACCELERATION_STATUS_XML = os.path.join(TEST_ASSET_DIR, "workbook_update_views_acceleration_status.xml") +UPDATE_WORKBOOK_ACCELERATION_STATUS_XML = os.path.join(TEST_ASSET_DIR, "workbook_update_acceleration_status.xml") + + +class WorkbookTests(unittest.TestCase): + def setUp(self) -> None: + self.server = TSC.Server("http://test", False) + + # Fake sign in + self.server._site_id = "dad65087-b08b-4603-af4e-2887b8aafc67" + self.server._auth_token = "j80k54ll2lfMZ0tv97mlPvvSCRyD0DOM" + + self.baseurl = self.server.workbooks.baseurl + + def test_get_by_id(self) -> None: + with open(GET_BY_ID_ACCELERATION_STATUS_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.get(self.baseurl + "/3cc6cd06-89ce-4fdc-b935-5294135d6d42", text=response_xml) + single_workbook = self.server.workbooks.get_by_id("3cc6cd06-89ce-4fdc-b935-5294135d6d42") + + self.assertEqual("3cc6cd06-89ce-4fdc-b935-5294135d6d42", single_workbook.id) + self.assertEqual("SafariSample", single_workbook.name) + self.assertEqual("SafariSample", single_workbook.content_url) + self.assertEqual("http://tableauserver/#/workbooks/2/views", single_workbook.webpage_url) + self.assertEqual(False, single_workbook.show_tabs) + self.assertEqual(26, single_workbook.size) + self.assertEqual("2016-07-26T20:34:56Z", format_datetime(single_workbook.created_at)) + self.assertEqual("description for SafariSample", single_workbook.description) + self.assertEqual("2016-07-26T20:35:05Z", format_datetime(single_workbook.updated_at)) + self.assertEqual("ee8c6e70-43b6-11e6-af4f-f7b0d8e20760", single_workbook.project_id) + self.assertEqual("default", single_workbook.project_name) + self.assertEqual("5de011f8-5aa9-4d5b-b991-f462c8dd6bb7", single_workbook.owner_id) + self.assertEqual(set(["Safari", "Sample"]), single_workbook.tags) + self.assertEqual("d79634e1-6063-4ec9-95ff-50acbf609ff5", single_workbook.views[0].id) + self.assertEqual("ENDANGERED SAFARI", single_workbook.views[0].name) + self.assertEqual("SafariSample/sheets/ENDANGEREDSAFARI", single_workbook.views[0].content_url) + self.assertEqual(True, single_workbook.views[0].data_acceleration_config["acceleration_enabled"]) + self.assertEqual("Enabled", single_workbook.views[0].data_acceleration_config["acceleration_status"]) + self.assertEqual("d79634e1-6063-4ec9-95ff-50acbf609ff9", single_workbook.views[1].id) + self.assertEqual("ENDANGERED SAFARI 2", single_workbook.views[1].name) + self.assertEqual("SafariSample/sheets/ENDANGEREDSAFARI2", single_workbook.views[1].content_url) + self.assertEqual(False, single_workbook.views[1].data_acceleration_config["acceleration_enabled"]) + self.assertEqual("Suspended", single_workbook.views[1].data_acceleration_config["acceleration_status"]) + + def test_update_workbook_acceleration(self) -> None: + with open(UPDATE_WORKBOOK_ACCELERATION_STATUS_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.put(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2", text=response_xml) + single_workbook = TSC.WorkbookItem("1d0304cd-3796-429f-b815-7258370b9b74", show_tabs=True) + single_workbook._id = "1f951daf-4061-451a-9df1-69a8062664f2" + single_workbook.data_acceleration_config = { + "acceleration_enabled": True, + "accelerate_now": False, + "last_updated_at": None, + "acceleration_status": None, + } + # update with parameter includeViewAccelerationStatus=True + single_workbook = self.server.workbooks.update(single_workbook, True) + + self.assertEqual("1f951daf-4061-451a-9df1-69a8062664f2", single_workbook.id) + self.assertEqual("1d0304cd-3796-429f-b815-7258370b9b74", single_workbook.project_id) + self.assertEqual("SafariSample/sheets/ENDANGEREDSAFARI", single_workbook.views[0].content_url) + self.assertEqual(True, single_workbook.views[0].data_acceleration_config["acceleration_enabled"]) + self.assertEqual("Pending", single_workbook.views[0].data_acceleration_config["acceleration_status"]) + self.assertEqual("d79634e1-6063-4ec9-95ff-50acbf609ff9", single_workbook.views[1].id) + self.assertEqual("ENDANGERED SAFARI 2", single_workbook.views[1].name) + self.assertEqual("SafariSample/sheets/ENDANGEREDSAFARI2", single_workbook.views[1].content_url) + self.assertEqual(True, single_workbook.views[1].data_acceleration_config["acceleration_enabled"]) + self.assertEqual("Pending", single_workbook.views[1].data_acceleration_config["acceleration_status"]) + + def test_update_views_acceleration(self) -> None: + with open(POPULATE_VIEWS_XML, "rb") as f: + views_xml = f.read().decode("utf-8") + with open(UPDATE_VIEWS_ACCELERATION_STATUS_XML, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.get(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2/views", text=views_xml) + m.put(self.baseurl + "/1f951daf-4061-451a-9df1-69a8062664f2", text=response_xml) + single_workbook = TSC.WorkbookItem("1d0304cd-3796-429f-b815-7258370b9b74", show_tabs=True) + single_workbook._id = "1f951daf-4061-451a-9df1-69a8062664f2" + single_workbook.data_acceleration_config = { + "acceleration_enabled": False, + "accelerate_now": False, + "last_updated_at": None, + "acceleration_status": None, + } + self.server.workbooks.populate_views(single_workbook) + single_workbook.views = [single_workbook.views[1], single_workbook.views[2]] + # update with parameter includeViewAccelerationStatus=True + single_workbook = self.server.workbooks.update(single_workbook, True) + + views_list = single_workbook.views + self.assertEqual("097dbe13-de89-445f-b2c3-02f28bd010c1", views_list[0].id) + self.assertEqual("GDP per capita", views_list[0].name) + self.assertEqual(False, views_list[0].data_acceleration_config["acceleration_enabled"]) + self.assertEqual("Disabled", views_list[0].data_acceleration_config["acceleration_status"]) + + self.assertEqual("2c1ab9d7-8d64-4cc6-b495-52e40c60c330", views_list[1].id) + self.assertEqual("Country ranks", views_list[1].name) + self.assertEqual(True, views_list[1].data_acceleration_config["acceleration_enabled"]) + self.assertEqual("Pending", views_list[1].data_acceleration_config["acceleration_status"]) + + self.assertEqual("0599c28c-6d82-457e-a453-e52c1bdb00f5", views_list[2].id) + self.assertEqual("Interest rates", views_list[2].name) + self.assertEqual(True, views_list[2].data_acceleration_config["acceleration_enabled"]) + self.assertEqual("Pending", views_list[2].data_acceleration_config["acceleration_status"]) From 114214beb947db6bf74926337bb14fbd8e7d1c45 Mon Sep 17 00:00:00 2001 From: Brian Cantoni Date: Fri, 26 Apr 2024 18:27:19 -0700 Subject: [PATCH 16/27] Improve robustness of Pager results In some cases, Tableau Server might have a different between the advertised total number of object and the actual number returned via the Pager. This change adds one more check to prevent errors from happening in these situations. Fixes #1304 --- tableauserverclient/server/pager.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/tableauserverclient/server/pager.py b/tableauserverclient/server/pager.py index b65d75ae5..3220f5372 100644 --- a/tableauserverclient/server/pager.py +++ b/tableauserverclient/server/pager.py @@ -47,7 +47,11 @@ def __iter__(self): # Get the rest on demand as a generator while self._count < last_pagination_item.total_available: - if len(current_item_list) == 0: + if ( + len(current_item_list) == 0 + and (last_pagination_item.page_number * last_pagination_item.page_size) + < last_pagination_item.total_available + ): current_item_list, last_pagination_item = self._load_next_page(last_pagination_item) try: From bdce9822ffbac122b5a7072497fe1e841084c012 Mon Sep 17 00:00:00 2001 From: "liu.r" Date: Tue, 7 May 2024 21:41:32 -0700 Subject: [PATCH 17/27] Add Cloud Flow Task endpoint --- tableauserverclient/models/task_item.py | 1 + .../server/endpoint/flow_task_endpoint.py | 29 +++++++++ tableauserverclient/server/request_factory.py | 37 +++++++++++ tableauserverclient/server/server.py | 2 + test/test_flowtask.py | 61 +++++++++++++++++++ 5 files changed, 130 insertions(+) create mode 100644 tableauserverclient/server/endpoint/flow_task_endpoint.py create mode 100644 test/test_flowtask.py diff --git a/tableauserverclient/models/task_item.py b/tableauserverclient/models/task_item.py index 0ffc3bfab..01cfcfb11 100644 --- a/tableauserverclient/models/task_item.py +++ b/tableauserverclient/models/task_item.py @@ -18,6 +18,7 @@ class Type: _TASK_TYPE_MAPPING = { "RefreshExtractTask": Type.ExtractRefresh, "MaterializeViewsTask": Type.DataAcceleration, + "RunFlowTask": Type.RunFlow, } def __init__( diff --git a/tableauserverclient/server/endpoint/flow_task_endpoint.py b/tableauserverclient/server/endpoint/flow_task_endpoint.py new file mode 100644 index 000000000..1e53b22f1 --- /dev/null +++ b/tableauserverclient/server/endpoint/flow_task_endpoint.py @@ -0,0 +1,29 @@ +import logging +from typing import List, Optional, Tuple, TYPE_CHECKING + +from tableauserverclient.server.endpoint.endpoint import Endpoint, api +from tableauserverclient.server.endpoint.exceptions import MissingRequiredFieldError +from tableauserverclient.models import TaskItem, PaginationItem +from tableauserverclient.server import RequestFactory + +from tableauserverclient.helpers.logging import logger + +if TYPE_CHECKING: + from tableauserverclient.server.request_options import RequestOptions + + +class FlowTasks(Endpoint): + @property + def baseurl(self) -> str: + return "{0}/sites/{1}/tasks/flows".format(self.parent_srv.baseurl, self.parent_srv.site_id) + + @api(version="3.22") + def create(self, flow_item: TaskItem) -> TaskItem: + if not flow_item: + error = "No flow provided" + raise ValueError(error) + logger.info("Creating an flow task %s", flow_item) + url = self.baseurl + create_req = RequestFactory.Task.create_flow_task_req(flow_item) + server_response = self.post_request(url, create_req) + return server_response.content \ No newline at end of file diff --git a/tableauserverclient/server/request_factory.py b/tableauserverclient/server/request_factory.py index 1f6dfbfc6..904df1215 100644 --- a/tableauserverclient/server/request_factory.py +++ b/tableauserverclient/server/request_factory.py @@ -1113,6 +1113,43 @@ def create_extract_req(self, xml_request: ET.Element, extract_item: "TaskItem") return ET.tostring(xml_request) +class FlowTaskRequest(object): + @_tsrequest_wrapped + def run_req(self, xml_request, task_item): + # Send an empty tsRequest + pass + + @_tsrequest_wrapped + def create_flow_task_req(self, xml_request: ET.Element, flow_item: "TaskItem") -> bytes: + flow_element = ET.SubElement(xml_request, "runFlow") + + # Main attributes + flow_element.attrib["type"] = flow_item.task_type + + if flow_item.target is not None: + target_element = ET.SubElement(flow_element, flow_item.target.type) + target_element.attrib["id"] = flow_item.target.id + + if flow_item.schedule_item is None: + return ET.tostring(xml_request) + + # Schedule attributes + schedule_element = ET.SubElement(xml_request, "schedule") + + interval_item = flow_item.schedule_item.interval_item + schedule_element.attrib["frequency"] = interval_item._frequency + frequency_element = ET.SubElement(schedule_element, "frequencyDetails") + frequency_element.attrib["start"] = str(interval_item.start_time) + if hasattr(interval_item, "end_time") and interval_item.end_time is not None: + frequency_element.attrib["end"] = str(interval_item.end_time) + if hasattr(interval_item, "interval") and interval_item.interval: + intervals_element = ET.SubElement(frequency_element, "intervals") + for interval in interval_item._interval_type_pairs(): # type: ignore + expression, value = interval + single_interval_element = ET.SubElement(intervals_element, "interval") + single_interval_element.attrib[expression] = value + + return ET.tostring(xml_request) class SubscriptionRequest(object): @_tsrequest_wrapped diff --git a/tableauserverclient/server/server.py b/tableauserverclient/server/server.py index ee23789b1..3a6831458 100644 --- a/tableauserverclient/server/server.py +++ b/tableauserverclient/server/server.py @@ -25,6 +25,7 @@ Databases, Tables, Flows, + FlowTasks, Webhooks, DataAccelerationReport, Favorites, @@ -82,6 +83,7 @@ def __init__(self, server_address, use_server_version=False, http_options=None, self.datasources = Datasources(self) self.favorites = Favorites(self) self.flows = Flows(self) + self.flow_tasks = FlowTasks(self) self.projects = Projects(self) self.schedules = Schedules(self) self.server_info = ServerInfo(self) diff --git a/test/test_flowtask.py b/test/test_flowtask.py new file mode 100644 index 000000000..aaa4b0932 --- /dev/null +++ b/test/test_flowtask.py @@ -0,0 +1,61 @@ +import os +import unittest +from datetime import time +from pathlib import Path + +import requests_mock + +import tableauserverclient as TSC +from tableauserverclient.datetime_helpers import parse_datetime +from tableauserverclient.models.task_item import TaskItem + +TEST_ASSET_DIR = Path(__file__).parent / "assets" + +GET_XML_NO_WORKBOOK = os.path.join(TEST_ASSET_DIR, "tasks_no_workbook_or_datasource.xml") +GET_XML_WITH_WORKBOOK = os.path.join(TEST_ASSET_DIR, "tasks_with_workbook.xml") +GET_XML_WITH_DATASOURCE = os.path.join(TEST_ASSET_DIR, "tasks_with_datasource.xml") +GET_XML_RUN_NOW_RESPONSE = os.path.join(TEST_ASSET_DIR, "tasks_run_now_response.xml") +GET_XML_CREATE_TASK_RESPONSE = os.path.join(TEST_ASSET_DIR, "tasks_create_extract_task.xml") +GET_XML_WITHOUT_SCHEDULE = TEST_ASSET_DIR / "tasks_without_schedule.xml" +GET_XML_WITH_INTERVAL = TEST_ASSET_DIR / "tasks_with_interval.xml" + +GET_XML_CREATE_FLOW_TASK_RESPONSE = os.path.join(TEST_ASSET_DIR, "tasks_create_flow_task.xml") + + + +class TaskTests(unittest.TestCase): + def setUp(self): + self.server = TSC.Server("http://test", False) + self.server.version = "3.22" + + # Fake Signin + self.server._site_id = "dad65087-b08b-4603-af4e-2887b8aafc67" + self.server._auth_token = "j80k54ll2lfMZ0tv97mlPvvSCRyD0DOM" + + # default task type is extractRefreshes TODO change this + # self.baseurl = "{}/{}".format(self.server.tasks.baseurl, "extractRefreshes") + self.baseurl = self.server.flow_tasks.baseurl + + def test_create_flow_task(self): + monthly_interval = TSC.MonthlyInterval(start_time=time(23, 30), interval_value=15) + monthly_schedule = TSC.ScheduleItem( + None, + None, + None, + None, + monthly_interval, + ) + target_item = TSC.Target("flow_id", "flow") + + task = TaskItem(schedule_item=monthly_schedule, target=target_item) + # task = TaskItem(None, "FullRefresh", None, schedule_item=monthly_schedule, target=target_item) + + with open(GET_XML_CREATE_FLOW_TASK_RESPONSE, "rb") as f: + response_xml = f.read().decode("utf-8") + with requests_mock.mock() as m: + m.post("{}".format(self.baseurl), text=response_xml) + create_response_content = self.server.flow_tasks.create(task).decode("utf-8") + + self.assertTrue("task_id" in create_response_content) + self.assertTrue("flow_id" in create_response_content) + #self.assertTrue("FullRefresh" in create_response_content) From 67812858dd4ce43154d8ce9e22fbdc069875ffce Mon Sep 17 00:00:00 2001 From: "liu.r" Date: Wed, 8 May 2024 11:44:54 -0700 Subject: [PATCH 18/27] cleanup --- tableauserverclient/server/endpoint/__init__.py | 1 + tableauserverclient/server/endpoint/flow_task_endpoint.py | 2 +- tableauserverclient/server/request_factory.py | 1 + test/test_flowtask.py | 4 ---- 4 files changed, 3 insertions(+), 5 deletions(-) diff --git a/tableauserverclient/server/endpoint/__init__.py b/tableauserverclient/server/endpoint/__init__.py index c018d8334..b2f291369 100644 --- a/tableauserverclient/server/endpoint/__init__.py +++ b/tableauserverclient/server/endpoint/__init__.py @@ -10,6 +10,7 @@ from .fileuploads_endpoint import Fileuploads from .flow_runs_endpoint import FlowRuns from .flows_endpoint import Flows +from .flow_task_endpoint import FlowTasks from .groups_endpoint import Groups from .jobs_endpoint import Jobs from .metadata_endpoint import Metadata diff --git a/tableauserverclient/server/endpoint/flow_task_endpoint.py b/tableauserverclient/server/endpoint/flow_task_endpoint.py index 1e53b22f1..18a9c2550 100644 --- a/tableauserverclient/server/endpoint/flow_task_endpoint.py +++ b/tableauserverclient/server/endpoint/flow_task_endpoint.py @@ -24,6 +24,6 @@ def create(self, flow_item: TaskItem) -> TaskItem: raise ValueError(error) logger.info("Creating an flow task %s", flow_item) url = self.baseurl - create_req = RequestFactory.Task.create_flow_task_req(flow_item) + create_req = RequestFactory.FlowTask.create_flow_task_req(flow_item) server_response = self.post_request(url, create_req) return server_response.content \ No newline at end of file diff --git a/tableauserverclient/server/request_factory.py b/tableauserverclient/server/request_factory.py index 904df1215..825451187 100644 --- a/tableauserverclient/server/request_factory.py +++ b/tableauserverclient/server/request_factory.py @@ -1290,6 +1290,7 @@ class RequestFactory(object): Favorite = FavoriteRequest() Fileupload = FileuploadRequest() Flow = FlowRequest() + FlowTask = FlowTaskRequest() Group = GroupRequest() Metric = MetricRequest() Permission = PermissionRequest() diff --git a/test/test_flowtask.py b/test/test_flowtask.py index aaa4b0932..8588d5701 100644 --- a/test/test_flowtask.py +++ b/test/test_flowtask.py @@ -32,8 +32,6 @@ def setUp(self): self.server._site_id = "dad65087-b08b-4603-af4e-2887b8aafc67" self.server._auth_token = "j80k54ll2lfMZ0tv97mlPvvSCRyD0DOM" - # default task type is extractRefreshes TODO change this - # self.baseurl = "{}/{}".format(self.server.tasks.baseurl, "extractRefreshes") self.baseurl = self.server.flow_tasks.baseurl def test_create_flow_task(self): @@ -48,7 +46,6 @@ def test_create_flow_task(self): target_item = TSC.Target("flow_id", "flow") task = TaskItem(schedule_item=monthly_schedule, target=target_item) - # task = TaskItem(None, "FullRefresh", None, schedule_item=monthly_schedule, target=target_item) with open(GET_XML_CREATE_FLOW_TASK_RESPONSE, "rb") as f: response_xml = f.read().decode("utf-8") @@ -58,4 +55,3 @@ def test_create_flow_task(self): self.assertTrue("task_id" in create_response_content) self.assertTrue("flow_id" in create_response_content) - #self.assertTrue("FullRefresh" in create_response_content) From 06b76d6dbce43cecb1b872d265c764b614d4fad7 Mon Sep 17 00:00:00 2001 From: "liu.r" Date: Wed, 8 May 2024 14:12:53 -0700 Subject: [PATCH 19/27] black format --- tableauserverclient/server/endpoint/flow_task_endpoint.py | 2 +- tableauserverclient/server/request_factory.py | 8 +++++--- test/test_flowtask.py | 1 - 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/tableauserverclient/server/endpoint/flow_task_endpoint.py b/tableauserverclient/server/endpoint/flow_task_endpoint.py index 18a9c2550..eea3f9710 100644 --- a/tableauserverclient/server/endpoint/flow_task_endpoint.py +++ b/tableauserverclient/server/endpoint/flow_task_endpoint.py @@ -26,4 +26,4 @@ def create(self, flow_item: TaskItem) -> TaskItem: url = self.baseurl create_req = RequestFactory.FlowTask.create_flow_task_req(flow_item) server_response = self.post_request(url, create_req) - return server_response.content \ No newline at end of file + return server_response.content diff --git a/tableauserverclient/server/request_factory.py b/tableauserverclient/server/request_factory.py index 825451187..cca4b82a6 100644 --- a/tableauserverclient/server/request_factory.py +++ b/tableauserverclient/server/request_factory.py @@ -972,9 +972,9 @@ def update_req(self, workbook_item): if data_freshness_policy_config.option == "FreshEvery": if data_freshness_policy_config.fresh_every_schedule is not None: fresh_every_element = ET.SubElement(data_freshness_policy_element, "freshEverySchedule") - fresh_every_element.attrib[ - "frequency" - ] = data_freshness_policy_config.fresh_every_schedule.frequency + fresh_every_element.attrib["frequency"] = ( + data_freshness_policy_config.fresh_every_schedule.frequency + ) fresh_every_element.attrib["value"] = str(data_freshness_policy_config.fresh_every_schedule.value) else: raise ValueError(f"data_freshness_policy_config.fresh_every_schedule must be populated.") @@ -1113,6 +1113,7 @@ def create_extract_req(self, xml_request: ET.Element, extract_item: "TaskItem") return ET.tostring(xml_request) + class FlowTaskRequest(object): @_tsrequest_wrapped def run_req(self, xml_request, task_item): @@ -1151,6 +1152,7 @@ def create_flow_task_req(self, xml_request: ET.Element, flow_item: "TaskItem") - return ET.tostring(xml_request) + class SubscriptionRequest(object): @_tsrequest_wrapped def create_req(self, xml_request: ET.Element, subscription_item: "SubscriptionItem") -> bytes: diff --git a/test/test_flowtask.py b/test/test_flowtask.py index 8588d5701..61a09b429 100644 --- a/test/test_flowtask.py +++ b/test/test_flowtask.py @@ -22,7 +22,6 @@ GET_XML_CREATE_FLOW_TASK_RESPONSE = os.path.join(TEST_ASSET_DIR, "tasks_create_flow_task.xml") - class TaskTests(unittest.TestCase): def setUp(self): self.server = TSC.Server("http://test", False) From 4735bd31185c6dec8b1fdccce86ee8aa32f129dd Mon Sep 17 00:00:00 2001 From: "liu.r" Date: Wed, 8 May 2024 14:29:26 -0700 Subject: [PATCH 20/27] add xml --- test/assets/tasks_create_flow_task.xml | 14 ++++++++++++++ test/test_flowtask.py | 9 --------- 2 files changed, 14 insertions(+), 9 deletions(-) create mode 100644 test/assets/tasks_create_flow_task.xml diff --git a/test/assets/tasks_create_flow_task.xml b/test/assets/tasks_create_flow_task.xml new file mode 100644 index 000000000..44826a94a --- /dev/null +++ b/test/assets/tasks_create_flow_task.xml @@ -0,0 +1,14 @@ + + + + + + + + + + + + + + \ No newline at end of file diff --git a/test/test_flowtask.py b/test/test_flowtask.py index 61a09b429..1f7d82c30 100644 --- a/test/test_flowtask.py +++ b/test/test_flowtask.py @@ -10,15 +10,6 @@ from tableauserverclient.models.task_item import TaskItem TEST_ASSET_DIR = Path(__file__).parent / "assets" - -GET_XML_NO_WORKBOOK = os.path.join(TEST_ASSET_DIR, "tasks_no_workbook_or_datasource.xml") -GET_XML_WITH_WORKBOOK = os.path.join(TEST_ASSET_DIR, "tasks_with_workbook.xml") -GET_XML_WITH_DATASOURCE = os.path.join(TEST_ASSET_DIR, "tasks_with_datasource.xml") -GET_XML_RUN_NOW_RESPONSE = os.path.join(TEST_ASSET_DIR, "tasks_run_now_response.xml") -GET_XML_CREATE_TASK_RESPONSE = os.path.join(TEST_ASSET_DIR, "tasks_create_extract_task.xml") -GET_XML_WITHOUT_SCHEDULE = TEST_ASSET_DIR / "tasks_without_schedule.xml" -GET_XML_WITH_INTERVAL = TEST_ASSET_DIR / "tasks_with_interval.xml" - GET_XML_CREATE_FLOW_TASK_RESPONSE = os.path.join(TEST_ASSET_DIR, "tasks_create_flow_task.xml") From d6fd8291378d2393a02a8dc96cd46853d2455515 Mon Sep 17 00:00:00 2001 From: "liu.r" Date: Wed, 8 May 2024 15:17:40 -0700 Subject: [PATCH 21/27] edit test initialization --- test/assets/tasks_create_flow_task.xml | 38 ++++++++++++++++++-------- test/test_flowtask.py | 8 +++--- 2 files changed, 30 insertions(+), 16 deletions(-) diff --git a/test/assets/tasks_create_flow_task.xml b/test/assets/tasks_create_flow_task.xml index 44826a94a..b5a6aa6f4 100644 --- a/test/assets/tasks_create_flow_task.xml +++ b/test/assets/tasks_create_flow_task.xml @@ -1,14 +1,28 @@ - - - - - - - - - - - - + + + + + + + + + + + + + \ No newline at end of file diff --git a/test/test_flowtask.py b/test/test_flowtask.py index 1f7d82c30..ed2627147 100644 --- a/test/test_flowtask.py +++ b/test/test_flowtask.py @@ -27,10 +27,10 @@ def setUp(self): def test_create_flow_task(self): monthly_interval = TSC.MonthlyInterval(start_time=time(23, 30), interval_value=15) monthly_schedule = TSC.ScheduleItem( - None, - None, - None, - None, + "Monthly Schedule", + 50, + TSC.ScheduleItem.Type.Flow, + TSC.ScheduleItem.ExecutionOrder.Parallel, monthly_interval, ) target_item = TSC.Target("flow_id", "flow") From 7f11a6d4ff7d4da1d526784d30ef30182f9592aa Mon Sep 17 00:00:00 2001 From: "liu.r" Date: Wed, 8 May 2024 15:31:14 -0700 Subject: [PATCH 22/27] fix task initialization --- test/test_flowtask.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/test/test_flowtask.py b/test/test_flowtask.py index ed2627147..dd2d07eef 100644 --- a/test/test_flowtask.py +++ b/test/test_flowtask.py @@ -35,7 +35,7 @@ def test_create_flow_task(self): ) target_item = TSC.Target("flow_id", "flow") - task = TaskItem(schedule_item=monthly_schedule, target=target_item) + task = TaskItem(None, "RunFlow", None, schedule_item=monthly_schedule, target=target_item) with open(GET_XML_CREATE_FLOW_TASK_RESPONSE, "rb") as f: response_xml = f.read().decode("utf-8") From c746957b3293f1fedc46af86f07432d86bc803b5 Mon Sep 17 00:00:00 2001 From: "liu.r" Date: Wed, 8 May 2024 15:45:12 -0700 Subject: [PATCH 23/27] third times the charm --- test/assets/tasks_create_flow_task.xml | 12 ++++++------ test/test_flowtask.py | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/test/assets/tasks_create_flow_task.xml b/test/assets/tasks_create_flow_task.xml index b5a6aa6f4..11c9a4ff0 100644 --- a/test/assets/tasks_create_flow_task.xml +++ b/test/assets/tasks_create_flow_task.xml @@ -1,11 +1,11 @@ - - - - + - diff --git a/test/test_flowtask.py b/test/test_flowtask.py index dd2d07eef..034066e64 100644 --- a/test/test_flowtask.py +++ b/test/test_flowtask.py @@ -43,5 +43,5 @@ def test_create_flow_task(self): m.post("{}".format(self.baseurl), text=response_xml) create_response_content = self.server.flow_tasks.create(task).decode("utf-8") - self.assertTrue("task_id" in create_response_content) + self.assertTrue("schedule_id" in create_response_content) self.assertTrue("flow_id" in create_response_content) From 0e5ce785d601a3c013c97a305188d281a867c866 Mon Sep 17 00:00:00 2001 From: "liu.r" Date: Wed, 8 May 2024 15:51:58 -0700 Subject: [PATCH 24/27] cleanup --- tableauserverclient/server/request_factory.py | 5 ----- 1 file changed, 5 deletions(-) diff --git a/tableauserverclient/server/request_factory.py b/tableauserverclient/server/request_factory.py index cca4b82a6..61507ea2e 100644 --- a/tableauserverclient/server/request_factory.py +++ b/tableauserverclient/server/request_factory.py @@ -1115,11 +1115,6 @@ def create_extract_req(self, xml_request: ET.Element, extract_item: "TaskItem") class FlowTaskRequest(object): - @_tsrequest_wrapped - def run_req(self, xml_request, task_item): - # Send an empty tsRequest - pass - @_tsrequest_wrapped def create_flow_task_req(self, xml_request: ET.Element, flow_item: "TaskItem") -> bytes: flow_element = ET.SubElement(xml_request, "runFlow") From bcb02ac5e294246e07859ddc1281bba11b58ee09 Mon Sep 17 00:00:00 2001 From: "liu.r" Date: Thu, 9 May 2024 17:33:27 -0700 Subject: [PATCH 25/27] fix formatting --- tableauserverclient/server/request_factory.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/tableauserverclient/server/request_factory.py b/tableauserverclient/server/request_factory.py index 61507ea2e..c204e7217 100644 --- a/tableauserverclient/server/request_factory.py +++ b/tableauserverclient/server/request_factory.py @@ -972,9 +972,9 @@ def update_req(self, workbook_item): if data_freshness_policy_config.option == "FreshEvery": if data_freshness_policy_config.fresh_every_schedule is not None: fresh_every_element = ET.SubElement(data_freshness_policy_element, "freshEverySchedule") - fresh_every_element.attrib["frequency"] = ( - data_freshness_policy_config.fresh_every_schedule.frequency - ) + fresh_every_element.attrib[ + "frequency" + ] = data_freshness_policy_config.fresh_every_schedule.frequency fresh_every_element.attrib["value"] = str(data_freshness_policy_config.fresh_every_schedule.value) else: raise ValueError(f"data_freshness_policy_config.fresh_every_schedule must be populated.") From 435f1aed2e25542b894070440558289f8527a53c Mon Sep 17 00:00:00 2001 From: Jordan Woods <13803242+jorwoods@users.noreply.github.com> Date: Thu, 9 May 2024 21:06:35 -0500 Subject: [PATCH 26/27] feat: pass parameters in request options --- tableauserverclient/server/request_options.py | 16 +++++++++++++-- test/test_request_option.py | 20 +++++++++++++++++++ 2 files changed, 34 insertions(+), 2 deletions(-) diff --git a/tableauserverclient/server/request_options.py b/tableauserverclient/server/request_options.py index 8304b8f68..5cc06bf9d 100644 --- a/tableauserverclient/server/request_options.py +++ b/tableauserverclient/server/request_options.py @@ -1,5 +1,7 @@ import sys +from typing_extensions import Self + from tableauserverclient.models.property_decorators import property_is_int import logging @@ -154,17 +156,27 @@ class _FilterOptionsBase(RequestOptionsBase): def __init__(self): self.view_filters = [] + self.view_parameters = [] def get_query_params(self): raise NotImplementedError() - def vf(self, name, value): + def vf(self, name: str, value: str) -> Self: + """Apply a filter to the view for a filter that is a normal column + within the view.""" self.view_filters.append((name, value)) return self - def _append_view_filters(self, params): + def parameter(self, name: str, value: str) -> Self: + """Apply a filter based on a parameter within the workbook.""" + self.view_parameters.append((name, value)) + return self + + def _append_view_filters(self, params) -> None: for name, value in self.view_filters: params["vf_" + name] = value + for name, value in self.view_parameters: + params[name] = value class CSVRequestOptions(_FilterOptionsBase): diff --git a/test/test_request_option.py b/test/test_request_option.py index 32526d1e6..40dd3345a 100644 --- a/test/test_request_option.py +++ b/test/test_request_option.py @@ -2,6 +2,7 @@ from pathlib import Path import re import unittest +from urllib.parse import parse_qs import requests_mock @@ -311,3 +312,22 @@ def test_slicing_queryset_multi_page(self) -> None: def test_queryset_filter_args_error(self) -> None: with self.assertRaises(RuntimeError): workbooks = self.server.workbooks.filter("argument") + + def test_filtering_parameters(self) -> None: + self.server.version = "3.6" + with requests_mock.mock() as m: + m.get(requests_mock.ANY) + url = self.baseurl + "/views/456/data" + opts = TSC.PDFRequestOptions() + opts.parameter("name1@", "value1") + opts.parameter("name2$", "value2") + opts.page_type = TSC.PDFRequestOptions.PageType.Tabloid + + resp = self.server.workbooks.get_request(url, request_object=opts) + query_params = parse_qs(resp.request.query) + self.assertIn("name1@", query_params) + self.assertIn("value1", query_params["name1@"]) + self.assertIn("name2$", query_params) + self.assertIn("value2", query_params["name2$"]) + self.assertIn("type", query_params) + self.assertIn("tabloid", query_params["type"]) From 397e275804a7321a7c2b0e45ee8e91c2f6ca11c8 Mon Sep 17 00:00:00 2001 From: Jordan Woods <13803242+jorwoods@users.noreply.github.com> Date: Thu, 9 May 2024 21:09:41 -0500 Subject: [PATCH 27/27] chore: pin typing_extensions version --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 9c35a42e7..fceb37237 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,6 +16,7 @@ dependencies = [ 'packaging>=23.1', # latest as at 7/31/23 'requests>=2.31', # latest as at 7/31/23 'urllib3==2.0.7', # latest as at 7/31/23 + 'typing_extensions>=4.0.1', ] requires-python = ">=3.7" classifiers = [