8000 Working commit · marcusjc/python-docs-samples@5dd9b31 · GitHub
[go: up one dir, main page]

Skip to content

Commit 5dd9b31

Browse files
committed
Working commit
1 parent 97ceae9 commit 5dd9b31

File tree

4 files changed

+1620
-0
lines changed

4 files changed

+1620
-0
lines changed
Lines changed: 100 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,100 @@
1+
import argparse
2+
import json
3+
# [START import_libraries]
4+
import googleapiclient.discovery
5+
# [END import_libraries]
6+
7+
# [START authenticating]
8+
def get_ml_engine_service():
9+
return googleapiclient.discovery.build_from_document(
10+
json.load(open('staging_ml.json')))
11+
# [END authenticating]
12+
13+
# [START predict_json]
14+
def predict_json(project, model, instances, version=None):
15+
"""Send data instances to a deployed model for prediction
16+
Args:
17+
project: str, project where the Cloud ML Engine Model is deployed
18+
model: str, model name
19+
instances: [dict], dictionaries from string keys defined by the model
20+
to data.
21+
version: [optional] str, version of the model to target.
22+
Returns:
23+
A dictionary of prediction results defined by the model.
24+
"""
25+
service = get_ml_engine_service()
26+
name = 'projects/{}/models/{}'.format(project, model)
27+
if version is not None:
28+
name += '/versions/{}'.format(version)
29+
30+
response = service.projects().predict(
31+
name=name,
32+
body={"instances": instances}
33+
).execute()
34+
35+
if 'error' in response:
36+
raise RuntimeError(response['error'])
37+
38+
return response['predictions']
39+
# [END predict_json]
40+
41+
# [START predict_tf_records]
42+
def predict_tf_records(project, model, example_bytes_list, key='tfrecord', version=None):
43+
"""Send data instances to a deployed model for prediction
44+
Args:
45+
project: str, project where the Cloud ML Engine Model is deployed
46+
model: str, model name
47+
feature_dict_list: [dict], dictionaries from string keys to
48+
tf.train.Feature protos.
49+
version: [optional] str, version of the model to target.
50+
Returns:
51+
A dictionary of prediction results defined by the model.
52+
"""
53+
import base64
54+
service = get_ml_engine_service()
55+
name = 'projects/{}/models/{}'.format(project, model)
56+
if version is not None:
57+
name += '/versions/{}'.format(version)
58+
59+
response = service.projects().predict(
60+
name=name,
61+
body={"instances": [
62+
{key: {'b64': base64.b64encode(example_bytes)}}
63+
for example_bytes in example_bytes_list
64+
]}
65+
).execute()
66+
if 'error' in response:
67+
raise RuntimeError(response['error'])
68+
69+
return response['predictions']
70+
71+
def census_to_example_bytes(json_instance):
72+
import tensorflow as tf
73+
feature_dict = {}
74+
for key, data in json_instance.iteritems():
75+
if isinstance(data, str) or isinstance(data, unicode):
76+
feature_dict[key] = tf.train.Feature(
77+
bytes_list=tf.train.BytesList(value=[str(data)]))
78+
elif isinstance(data, int) or isinstance(data, float):
79+
feature_dict[key] = tf.train.Feature(
80+
float_list=tf.train.FloatList(value=[data]))
81+
return tf.train.Example(
82+
features=tf.train.Features(
83+
feature=feature_dict
84+
)
85+
).SerializeToString()
86+
# [END predict_tf_records]
87+
88+
if __name__=='__main__':
89+
import sys
90+
import base64
91+
import json
92+
with open(sys.argv[1]) as f:
93+
instances = [json.loads(line) for line in f.readlines()]
94+
95+
with open(sys.argv[2], 'w') as f:
96+
for instance in instances:
97+
f.write(json.dumps(
98+
{'tfrecord': {'b64': base64.b64encode(
99+
census_to_example_string(instance)
100+
)}}))

0 commit comments

Comments
 (0)
0