8000 object detection · mmmarklu/python-docs-samples@7e0f02d · GitHub
[go: up one dir, main page]

Skip to content

Commit 7e0f02d

Browse files
object detection
1 parent eb8bb45 commit 7e0f02d

File tree

3 files changed

+518
-0
lines changed

3 files changed

+518
-0
lines changed
Lines changed: 231 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,231 @@
1+
#!/usr/bin/env python
2+
3+
# Copyright 2019 Google LLC
4+
#
5+
# Licensed under the Apache License, Version 2.0 (the "License");
6+
# you may not use this file except in compliance with the License.
7+
# You may obtain a copy of the License at
8+
#
9+
# http://www.apache.org/licenses/LICENSE-2.0
10+
#
11+
# Unless required by applicable law or agreed to in writing, software
12+
# distributed under the License is distributed on an "AS IS" BASIS,
13+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14+
# See the License for the specific language governing permissions and
15+
# limitations under the License.
16+
"""This application demonstrates how to perform basic operations on
17+
18+
object detection datasets with the Google AutoML API. For more information, the
19+
documentation at
20+
https://cloud.google.com/vision/automl/docs.
21+
"""
22+
from __future__ import print_function
23+
24+
import argparse
25+
import os
26+
27+
28+
def encode_image_example(image):
29+
"""Demonstrate how to base64 an image (or any file)."""
30+
# Import the base64 encoding library.
31+
import base64
32+
33+
# Pass the image data to an encoding function.
34+
def encode_image(image):
35+
image_content = image.read()
36+
return base64.b64encode(image_content)
37+
38+
return encode_image(image)
39+
40+
41+
def create_dataset(project_id, compute_region, dataset_name):
42+
"""Creates a dataset."""
43+
# [START automl_vision_iod_create_dataset]
44+
## To do: Uncomment and set the following variables
45+
# project_id = '[PROJECT_ID]'
46+
# compute_region = '[COMPUTE_REGION]'
47+
# dataset_name = '[DATASET_NAME]'
48+
49+
from google.cloud import automl_v1beta1 as automl
50+
51+
client = automl.AutoMlClient()
52+
53+
# A resource that represents Google Cloud Platform location.
54+
project_location = client.location_path(project_id, compute_region)
55+
56+
# Set dataset name and metadata of the dataset.
57+
my_dataset = {
58+
'display_name': dataset_name,
59+
'image_object_detection_dataset_metadata': {},
60+
}
61+
62+
# Create a dataset with the dataset metadata in the region.
63+
dataset = client.create_dataset(project_location, my_dataset)
64+
65+
# Display the dataset information.
66+
print('Dataset name: {}'.format(dataset.name))
67+
print('Dataset id: {}'.format(dataset.name.split('/')[-1]))
68+
print('Dataset display name: {}'.format(dataset.display_name))
69+
print('Dataset example count: {}'.format(dataset.example_count))
70+
print('Dataset create time:')
71+
print('\tseconds: {}'.format(dataset.create_time.seconds))
72+
print('\tnanos: {}'.format(dataset.create_time.nanos))
73+
dataset_spec = {}
74+
my_dataset = {
75+
'display_name': dataset_name,
76+
'image_object_detection_dataset_metadata': dataset_spec
77+
}
78+
response = client.create_dataset(project_location, my_dataset)
79+
print('\nDataset creation: {}'.format(response))
80+
dataset_full_id = response.name
81+
print('Dataset full id: {}'.format(dataset_full_id))
82+
83+
# [END automl_vision_iod_import_data]
84+
85+
86+
def get_dataset(project_id, compute_region, dataset_id):
87+
"""Describes a dataset."""
88+
# [START automl_vision_iod_get_dataset]
89+
## To do: Uncomment and set the following variables
90+
# project_id = '[PROJECT_ID]' 6D40
91+
# compute_region = '[COMPUTE_REGION]'
92+
# dataset_id = '[DATASET_ID]'
93+
94+
from google.cloud import automl_v1beta1 as automl
95+
96+
client = automl.AutoMlClient()
97+
98+
# A resource that represents Google Cloud Platform location.
99+
project_location = client.location_path(project_id, compute_region)
100+
101+
# Get the full path of the dataset.
102+
dataset_full_id = client.dataset_path(project_id, compute_region, dataset_id)
103+
104+
# Get the dataset
105+
response = client.get_dataset(dataset_full_id)
106+
print('\nDataset description: {}'.format(response))
107+
108+
# [END automl_vision_iod_get_dataset]
109+
110+
111+
def list_datasets(project_id, compute_region, filter_):
112+
"""Lists all datasets."""
113+
# [START automl_vision_iod_lists_dataset]
114+
## To do: Uncomment and set the following variables
115+
# project_id = '[PROJECT_ID]'
116+
# compute_region = '[COMPUTE_REGION]'
117+
# filter_ = ''
118+
119+
from google.cloud import automl_v1beta1 as automl
120+
121+
client = automl.AutoMlClient()
122+
123+
# A resource that represents Google Cloud Platform location.
124+
project_location = client.location_path(project_id, compute_region)
125+
126+
# List all the datasets available in the region by applying filter.
127+
response = client.list_datasets(project_location, filter_=filter_)
128+
129+
print('List of datasets:')
130+
for dataset in response:
131+
# Display the dataset information.
132+
print('Dataset name: {}'.format(dataset.name))
133+
print('Dataset id: {}'.format(dataset.name.split('/')[-1]))
134+
print('Dataset display name: {}'.format(dataset.display_name))
135+
print('Dataset example count: {}'.format(dataset.example_count))
136+
print('Dataset create time:&# F438 39;)
137+
print('\tseconds: {}'.format(dataset.create_time.seconds))
138+
print('\tnanos: {}'.format(dataset.create_time.nanos))
139+
140+
# [END automl_vision_iod_lists_dataset]
141+
142+
143+
def import_data(project_id, compute_region, dataset_id, paths):
144+
"""Imports images and bounding boxes."""
145+
# [START automl_vision_iod_import_data]
146+
## To do: Uncomment and set the following variables
147+
# project_id = '[PROJECT_ID]'
148+
# compute_region = '[COMPUTE_REGION]'
149+
# dataset_id = '[DATASET_ID]'
150+
# paths = '[Storage path. For example: gs://path/to/file.csv]'
151+
152+
from google.cloud import automl_v1beta1 as automl
153+
154+
client = automl.AutoMlClient()
155+
156+
# Get the full path of the dataset.
157+
dataset_full_id = client.dataset_path(project_id, compute_region, dataset_id)
158+
159+
# Get the multiple Google Cloud Storage URIs.
160+
input_config = {'gcs_source': {'input_uris': paths}}
161+
162+
# Import data from the input URI.
163+
response = client.import_data(dataset_full_id, input_config)
164+
165+
print('Processing import...')
166+
# synchronous check of operation status.
167+
print('Data imported. {}'.format(response.result()))
168+
169+
# [END automl_vision_iod_import_data]
170+
171+
172+
def delete_dataset(project_id, compute_region, dataset_id):
173+
"""Deletes a dataset."""
174+
# [START automl_vision_iod_delete_dataset]
175+
## To do: Uncomment and set the following variables
176+
# project_id = '[PROJECT_ID]'
177+
# compute_region = '[COMPUTE_REGION]'
178+
# dataset_id = '[DATASET_ID]'
179+
180+
from google.cloud import automl_v1beta1 as automl
181+
182+
client = automl.AutoMlClient()
183+
184+
# Get the full path of the dataset.
185+
dataset_full_id = client.dataset_path(project_id, compute_region, dataset_id)
186+
187+
# Delete a dataset.
188+
response = client.delete_dataset(dataset_full_id)
189+
190+
# synchronous check of operation status.
191+
print('Dataset deleted. {}'.format(response.result()))
192+
193+
# [END automl_vision_iod_delete_dataset]
194+
195+
196+
if __name__ == '__main__':
197+
parser = argparse.ArgumentParser(
198+
description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter)
199+
subparsers = parser.add_subparsers(dest='command')
200+
create_dataset_parser = subparsers.add_parser(
201+
'create_dataset', help=create_dataset.__doc__)
202+
create_dataset_parser.add_argument('dataset_name')
203+
get_dataset_parser = subparsers.add_parser(
204+
'get_dataset', help=get_dataset.__doc__)
205+
get_dataset_parser.add_argument('dataset_id')
206+
import_data_parser = subparsers.add_parser(
207+
'import_data', help=import_data.__doc__)
208+
import_data_parser.add_argument('path')
209+
import_data_parser.add_argument('dataset_id')
210+
list_datasets_parser = subparsers.add_parser(
211+
'list_datasets', help=list_datasets.__doc__)
212+
delete_dataset_parser = subparsers.add_parser(
213+
'delete_dataset', help=delete_dataset.__doc__)
214+
delete_dataset_parser.add_argument('dataset_id')
215+
216+
args = parser.parse_args()
217+
218+
# Setup.
219+
project_id = os.environ['PROJECT_ID']
220+
compute_region = 'us-central1'
221+
222+
if args.command == 'create_dataset':
223+
create_dataset(project_id, compute_region, args.dataset_name)
224+
if args.command == 'get_dataset':
225+
get_dataset(project_id, compute_region, args.dataset_id)
226+
if args.command == 'delete_dataset':
227+
delete_dataset(project_id, compute_region, args.dataset_id)
228+
if args.command == 'list_datasets':
229+
list_datasets(project_id, compute_region, '')
230+
if args.command == 'import_data':
231+
import_data(project_id, compute_region, args.dataset_id, [args.path])

0 commit comments

Comments
 (0)
0