diff --git a/samples/snippets/conftest.py b/samples/snippets/conftest.py index 69cef8727d..e8253bc5a7 100644 --- a/samples/snippets/conftest.py +++ b/samples/snippets/conftest.py @@ -12,9 +12,9 @@ # See the License for the specific language governing permissions and # limitations under the License. -from typing import Generator, Iterator +from typing import Iterator -from google.cloud import bigquery, storage +from google.cloud import bigquery import pytest import test_utils.prefixer @@ -42,27 +42,11 @@ def bigquery_client() -> bigquery.Client: return bigquery_client -@pytest.fixture(scope="session") -def storage_client(project_id: str) -> storage.Client: - return storage.Client(project=project_id) - - @pytest.fixture(scope="session") def project_id(bigquery_client: bigquery.Client) -> str: return bigquery_client.project -@pytest.fixture(scope="session") -def gcs_bucket(storage_client: storage.Client) -> Generator[str, None, None]: - bucket_name = "bigframes_blob_test" - - yield bucket_name - - bucket = storage_client.get_bucket(bucket_name) - for blob in bucket.list_blobs(): - blob.delete() - - @pytest.fixture(autouse=True) def reset_session() -> None: """An autouse fixture ensuring each sample runs in a fresh session. @@ -94,6 +78,11 @@ def dataset_id_eu(bigquery_client: bigquery.Client, project_id: str) -> Iterator bigquery_client.delete_dataset(dataset, delete_contents=True, not_found_ok=True) +@pytest.fixture(scope="session") +def gcs_dst_bucket() -> str: + return "gs://bigframes_blob_test" + + @pytest.fixture def random_model_id( bigquery_client: bigquery.Client, project_id: str, dataset_id: str diff --git a/samples/snippets/multimodal_test.py b/samples/snippets/multimodal_test.py index 1ea6a3f0a6..087299aa0a 100644 --- a/samples/snippets/multimodal_test.py +++ b/samples/snippets/multimodal_test.py @@ -13,9 +13,9 @@ # limitations under the License. -def test_multimodal_dataframe(gcs_bucket: str) -> None: +def test_multimodal_dataframe(gcs_dst_bucket: str) -> None: # destination folder must be in a GCS bucket that the BQ connection service account (default or user provided) has write access to. - dst_bucket = f"gs://{gcs_bucket}" + dst_bucket = gcs_dst_bucket # [START bigquery_dataframes_multimodal_dataframe_create] import bigframes diff --git a/samples/snippets/sessions_and_io_test.py b/samples/snippets/sessions_and_io_test.py index 06f0c4ab3c..24290c7279 100644 --- a/samples/snippets/sessions_and_io_test.py +++ b/samples/snippets/sessions_and_io_test.py @@ -13,11 +13,10 @@ # limitations under the License. -def test_sessions_and_io(project_id: str, dataset_id: str, gcs_bucket: str) -> None: +def test_sessions_and_io(project_id: str, dataset_id: str) -> None: YOUR_PROJECT_ID = project_id YOUR_DATASET_ID = dataset_id YOUR_LOCATION = "us" - YOUR_BUCKET = gcs_bucket # [START bigquery_dataframes_create_and_use_session_instance] import bigframes @@ -140,15 +139,6 @@ def test_sessions_and_io(project_id: str, dataset_id: str, gcs_bucket: str) -> N # [END bigquery_dataframes_read_data_from_csv] assert df is not None - # [START bigquery_dataframes_write_data_to_csv] - import bigframes.pandas as bpd - - df = bpd.DataFrame({"my_col": [1, 2, 3]}) - # Write a dataframe to a CSV file in GCS - df.to_csv(f"gs://{YOUR_BUCKET}/myfile*.csv") - # [END bigquery_dataframes_write_data_to_csv] - assert df is not None - # [START bigquery_dataframes_read_data_from_bigquery_table] import bigframes.pandas as bpd