8000 chore: run doctest and notebook tests in bigframes-testing project by shobsi · Pull Request #976 · googleapis/python-bigquery-dataframes · GitHub
[go: up one dir, main page]

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8000
7 changes: 1 addition & 6 deletions .kokoro/continuous/doctest.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,5 @@ env_vars: {

env_vars: {
key: "GOOGLE_CLOUD_PROJECT"
value: "bigframes-load-testing"
}

env_vars: {
key: "BIGFRAMES_TEST_MODEL_VERTEX_ENDPOINT"
value: "https://us-central1-aiplatform.googleapis.com/v1/projects/272725758477/locations/us-central1/endpoints/590545496255234048"
value: "bigframes-testing"
}
7 changes: 1 addition & 6 deletions .kokoro/continuous/notebook.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,5 @@ env_vars: {

env_vars: {
key: "GOOGLE_CLOUD_PROJECT"
value: "bigframes-load-testing"
}

env_vars: {
key: "BIGFRAMES_TEST_MODEL_VERTEX_ENDPOINT"
value: "https://us-central1-aiplatform.googleapis.com/v1/projects/272725758477/locations/us-central1/endpoints/590545496255234048"
value: "bigframes-testing"
}
7 changes: 1 addition & 6 deletions .kokoro/presubmit/doctest.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,5 @@ env_vars: {

env_vars: {
key: "GOOGLE_CLOUD_PROJECT"
value: "bigframes-load-testing"
}

env_vars: {
key: "BIGFRAMES_TEST_MODEL_VERTEX_ENDPOINT"
value: "https://us-central1-aiplatform.googleapis.com/v1/projects/272725758477/locations/us-central1/endpoints/590545496255234048"
value: "bigframes-testing"
}
7 changes: 1 addition & 6 deletions .kokoro/presubmit/notebook.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,5 @@ env_vars: {

env_vars: {
key: "GOOGLE_CLOUD_PROJECT"
value: "bigframes-load-testing"
}

env_vars: {
key: "BIGFRAMES_TEST_MODEL_VERTEX_ENDPOINT"
value: "https://us-central1-aiplatform.googleapis.com/v1/projects/272725758477/locations/us-central1/endpoints/590545496255234048"
value: "bigframes-testing"
}
26 changes: 15 additions & 11 deletions notebooks/location/regionalized.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -47,32 +47,36 @@
],
"source": [
"# Take multi-region US as the default BQ location, where most of the BQ data lies including the BQ public datasets\n",
"BQ_LOCATION = \"us\"\n",
"PROJECT = \"bigframes-dev\"\n",
"import os\n",
"\n",
"PROJECT_ID = os.environ.get(\"GOOGLE_CLOUD_PROJECT\")\n",
"BQ_LOCATION = os.environ.get(\"BIGQUERY_LOCATION\")\n",
"\n",
"if not PROJECT_ID:\n",
" raise ValueError(\"Project must be set via environment variable GOOGLE_CLOUD_PROJECT\")\n",
"if not BQ_LOCATION:\n",
" raise ValueError(\"BQ location must be set via environment variable BIGQUERY_LOCATION\")\n",
"\n",
"DATASET = \"bigframes_testing\"\n",
"PENGUINS_TABLE = \"bigquery-public-data.ml_datasets.penguins\"\n",
"\n",
"\n",
"# Check for a location set in the environment and do location-specific setup if needed\n",
"\n",
"import os\n",
"import google.api_core.exceptions\n",
"from google.cloud import bigquery\n",
"import bigframes\n",
" \n",
"env_bq_location = os.getenv(\"BIGQUERY_LOCATION\")\n",
"if env_bq_location and env_bq_location != BQ_LOCATION:\n",
" BQ_LOCATION = env_bq_location.lower()\n",
"\n",
"client = bigquery.Client()\n",
"\n",
"BQ_LOCATION = BQ_LOCATION.lower()\n",
"if BQ_LOCATION != \"us\":\n",
" bq_location_normalized = BQ_LOCATION.replace('-', '_')\n",
"\n",
" # Nominate a local penguins table\n",
" penguins_table_ref = bigquery.TableReference.from_string(PENGUINS_TABLE)\n",
" penguins_local_dataset_name = f\"{DATASET}_{bq_location_normalized}\"\n",
" penguins_local_dataset_ref = bigquery.DatasetReference(project=PROJECT, dataset_id=penguins_local_dataset_name)\n",
" penguins_local_dataset_ref = bigquery.DatasetReference(project=PROJECT_ID, dataset_id=penguins_local_dataset_name)\n",
" penguins_local_dataset = bigquery.Dataset(penguins_local_dataset_ref)\n",
" penguins_local_dataset.location = BQ_LOCATION\n",
" penguins_local_table_ref= bigquery.TableReference(penguins_local_dataset, penguins_table_ref.table_id)\n",
Expand All @@ -94,13 +98,13 @@
" DATASET = f\"{DATASET}_{bq_location_normalized}\"\n",
"\n",
"# Create the dataset to store the model if it doesn't exist \n",
"model_local_dataset = bigquery.Dataset(bigquery.DatasetReference(project=PROJECT, dataset_id=DATASET))\n",
"model_local_dataset = bigquery.Dataset(bigquery.DatasetReference(project=PROJECT_ID, dataset_id=DATASET))\n",
"model_local_dataset.location = BQ_LOCATION\n",
"model_dataset = client.create_dataset(model_local_dataset, exists_ok=True)\n",
"\n",
"# Finally log the variables driving the core notebook execution\n",
"log = ('\\n'.join(f\"{name}: {str(value)}\" for name, value in {\n",
" \"BigQuery project\" : PROJECT,\n",
" \"BigQuery project\" : PROJECT_ID,\n",
" \"BigQuery location\" : BQ_LOCATION,\n",
" \"Penguins Table\" : PENGUINS_TABLE,\n",
" \"ML Model Dataset\" : model_dataset.reference\n",
Expand Down Expand Up @@ -134,7 +138,7 @@
"\n",
"# Note: The project option is not required in all environments.\n",
"# On BigQuery Studio, the project ID is automatically detected.\n",
"bigframes.pandas.options.bigquery.project = PROJECT\n",
"bigframes.pandas.options.bigquery.project = PROJECT_ID\n",
"\n",
"# Note: The location option is not required.\n",
"# It defaults to the location of the first table or query\n",
Expand Down
3 changes: 3 additions & 0 deletions scripts/setup-project-for-testing.sh
Original file line number Diff line number Diff line change
Expand Up @@ -57,12 +57,14 @@ function log_and_execute() {
################################################################################
function enable_apis() {
for service in aiplatform.googleapis.com \
artifactregistry.googleapis.com \
bigquery.googleapis.com \
bigqueryconnection.googleapis.com \
bigquerystorage.googleapis.com \
cloudbuild.googleapis.com \
cloudfunctions.googleapis.com \
cloudresourcemanager.googleapis.com \
compute.googleapis.com \
run.googleapis.com \
; do
log_and_execute gcloud --project=$PROJECT_ID services enable $service
Expand Down Expand Up @@ -148,6 +150,7 @@ function ensure_bq_connections_with_iam() {
southamerica-west1 \
us \
us-central1 \
us-east5 \
; do
ensure_bq_connection_with_iam "$location" "$BIGFRAMES_RF_CONNECTION_NAME"
done
Expand Down
0