blob: 414e67eb552121dede0d68f87729fa5df34b1494 [file] [log] [blame]
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -07001<html><body>
2<style>
3
4body, h1, h2, h3, div, span, p, pre, a {
5 margin: 0;
6 padding: 0;
7 border: 0;
8 font-weight: inherit;
9 font-style: inherit;
10 font-size: 100%;
11 font-family: inherit;
12 vertical-align: baseline;
13}
14
15body {
16 font-size: 13px;
17 padding: 1em;
18}
19
20h1 {
21 font-size: 26px;
22 margin-bottom: 1em;
23}
24
25h2 {
26 font-size: 24px;
27 margin-bottom: 1em;
28}
29
30h3 {
31 font-size: 20px;
32 margin-bottom: 1em;
33 margin-top: 1em;
34}
35
36pre, code {
37 line-height: 1.5;
38 font-family: Monaco, 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', 'Lucida Console', monospace;
39}
40
41pre {
42 margin-top: 0.5em;
43}
44
45h1, h2, h3, p {
46 font-family: Arial, sans serif;
47}
48
49h1, h2, h3 {
50 border-bottom: solid #CCC 1px;
51}
52
53.toc_element {
54 margin-top: 0.5em;
55}
56
57.firstline {
58 margin-left: 2 em;
59}
60
61.method {
62 margin-top: 1em;
63 border: solid 1px #CCC;
64 padding: 1em;
65 background: #EEE;
66}
67
68.details {
69 font-weight: bold;
70 font-size: 14px;
71}
72
73</style>
74
Bu Sun Kim715bd7f2019-06-14 16:50:42 -070075<h1><a href="vision_v1.html">Cloud Vision API</a> . <a href="vision_v1.images.html">images</a></h1>
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -070076<h2>Instance Methods</h2>
77<p class="toc_element">
Dan O'Mearadd494642020-05-01 07:42:23 -070078 <code><a href="#annotate">annotate(body=None, x__xgafv=None)</a></code></p>
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -070079<p class="firstline">Run image detection and annotation for a batch of images.</p>
Bu Sun Kim715bd7f2019-06-14 16:50:42 -070080<p class="toc_element">
Dan O'Mearadd494642020-05-01 07:42:23 -070081 <code><a href="#asyncBatchAnnotate">asyncBatchAnnotate(body=None, x__xgafv=None)</a></code></p>
Bu Sun Kim715bd7f2019-06-14 16:50:42 -070082<p class="firstline">Run asynchronous image detection and annotation for a list of images.</p>
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -070083<h3>Method Details</h3>
84<div class="method">
Dan O'Mearadd494642020-05-01 07:42:23 -070085 <code class="details" id="annotate">annotate(body=None, x__xgafv=None)</code>
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -070086 <pre>Run image detection and annotation for a batch of images.
87
88Args:
Dan O'Mearadd494642020-05-01 07:42:23 -070089 body: object, The request body.
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -070090 The object takes the form of:
91
92{ # Multiple image annotation requests are batched into a single service call.
Bu Sun Kim65020912020-05-20 12:08:20 -070093 &quot;requests&quot;: [ # Required. Individual image annotation requests for this batch.
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -070094 { # Request for performing Google Cloud Vision API tasks over a user-provided
Bu Sun Kim715bd7f2019-06-14 16:50:42 -070095 # image, with user-requested features, and with context information.
Bu Sun Kim65020912020-05-20 12:08:20 -070096 &quot;image&quot;: { # Client image to perform Google Cloud Vision API tasks over. # The image to be processed.
Bu Sun Kim65020912020-05-20 12:08:20 -070097 &quot;source&quot;: { # External image source (Google Cloud Storage or web URL image location). # Google Cloud Storage image location, or publicly-accessible image
Bu Sun Kim715bd7f2019-06-14 16:50:42 -070098 # URL. If both `content` and `source` are provided for an image, `content`
99 # takes precedence and is used to perform the image annotation request.
Bu Sun Kim65020912020-05-20 12:08:20 -0700100 &quot;gcsImageUri&quot;: &quot;A String&quot;, # **Use `image_uri` instead.**
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700101 #
102 # The Google Cloud Storage URI of the form
103 # `gs://bucket_name/object_name`. Object versioning is not supported. See
Sai Cheemalapatic30d2b52017-03-13 12:12:03 -0400104 # [Google Cloud Storage Request
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700105 # URIs](https://cloud.google.com/storage/docs/reference-uris) for more info.
Bu Sun Kim65020912020-05-20 12:08:20 -0700106 &quot;imageUri&quot;: &quot;A String&quot;, # The URI of the source image. Can be either:
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700107 #
108 # 1. A Google Cloud Storage URI of the form
109 # `gs://bucket_name/object_name`. Object versioning is not supported. See
110 # [Google Cloud Storage Request
111 # URIs](https://cloud.google.com/storage/docs/reference-uris) for more
112 # info.
113 #
114 # 2. A publicly-accessible image HTTP/HTTPS URL. When fetching images from
115 # HTTP/HTTPS URLs, Google cannot guarantee that the request will be
116 # completed. Your request may fail if the specified host denies the
117 # request (e.g. due to request throttling or DOS prevention), or if Google
118 # throttles requests to the site for abuse prevention. You should not
119 # depend on externally-hosted images for production applications.
120 #
121 # When both `gcs_image_uri` and `image_uri` are specified, `image_uri` takes
Sai Cheemalapatic30d2b52017-03-13 12:12:03 -0400122 # precedence.
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700123 },
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700124 &quot;content&quot;: &quot;A String&quot;, # Image content, represented as a stream of bytes.
125 # Note: As with all `bytes` fields, protobuffers use a pure binary
126 # representation, whereas JSON representations use base64.
127 #
128 # Currently, this field only works for BatchAnnotateImages requests. It does
129 # not work for AsyncBatchAnnotateImages requests.
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700130 },
Bu Sun Kim65020912020-05-20 12:08:20 -0700131 &quot;imageContext&quot;: { # Image context and/or feature-specific parameters. # Additional context that may accompany the image.
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700132 &quot;latLongRect&quot;: { # Rectangle determined by min and max `LatLng` pairs. # Not used.
133 &quot;maxLatLng&quot;: { # An object representing a latitude/longitude pair. This is expressed as a pair # Max lat/long pair.
134 # of doubles representing degrees latitude and degrees longitude. Unless
135 # specified otherwise, this must conform to the
136 # &lt;a href=&quot;http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf&quot;&gt;WGS84
137 # standard&lt;/a&gt;. Values must be within normalized ranges.
138 &quot;longitude&quot;: 3.14, # The longitude in degrees. It must be in the range [-180.0, +180.0].
139 &quot;latitude&quot;: 3.14, # The latitude in degrees. It must be in the range [-90.0, +90.0].
140 },
141 &quot;minLatLng&quot;: { # An object representing a latitude/longitude pair. This is expressed as a pair # Min lat/long pair.
142 # of doubles representing degrees latitude and degrees longitude. Unless
143 # specified otherwise, this must conform to the
144 # &lt;a href=&quot;http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf&quot;&gt;WGS84
145 # standard&lt;/a&gt;. Values must be within normalized ranges.
146 &quot;longitude&quot;: 3.14, # The longitude in degrees. It must be in the range [-180.0, +180.0].
147 &quot;latitude&quot;: 3.14, # The latitude in degrees. It must be in the range [-90.0, +90.0].
148 },
149 },
150 &quot;webDetectionParams&quot;: { # Parameters for web detection request. # Parameters for web detection.
151 &quot;includeGeoResults&quot;: True or False, # Whether to include results derived from the geo information in the image.
152 },
Bu Sun Kim65020912020-05-20 12:08:20 -0700153 &quot;languageHints&quot;: [ # List of languages to use for TEXT_DETECTION. In most cases, an empty value
154 # yields the best results since it enables automatic language detection. For
155 # languages based on the Latin alphabet, setting `language_hints` is not
156 # needed. In rare cases, when the language of the text in the image is known,
157 # setting a hint will help get better results (although it will be a
158 # significant hindrance if the hint is wrong). Text detection returns an
159 # error if one or more of the specified languages is not one of the
160 # [supported languages](https://cloud.google.com/vision/docs/languages).
161 &quot;A String&quot;,
162 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700163 &quot;productSearchParams&quot;: { # Parameters for a product search request. # Parameters for product search.
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700164 &quot;productCategories&quot;: [ # The list of product categories to search in. Currently, we only consider
165 # the first category, and either &quot;homegoods-v2&quot;, &quot;apparel-v2&quot;, &quot;toys-v2&quot;,
166 # &quot;packagedgoods-v1&quot;, or &quot;general-v1&quot; should be specified. The legacy
167 # categories &quot;homegoods&quot;, &quot;apparel&quot;, and &quot;toys&quot; are still supported but will
168 # be deprecated. For new products, please use &quot;homegoods-v2&quot;, &quot;apparel-v2&quot;,
169 # or &quot;toys-v2&quot; for better product search accuracy. It is recommended to
170 # migrate existing products to these categories as well.
171 &quot;A String&quot;,
172 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700173 &quot;boundingPoly&quot;: { # A bounding polygon for the detected image annotation. # The bounding polygon around the area of interest in the image.
174 # If it is not specified, system discretion will be applied.
175 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
176 { # A vertex represents a 2D point in the image.
177 # NOTE: the normalized vertex coordinates are relative to the original image
178 # and range from 0 to 1.
179 &quot;y&quot;: 3.14, # Y coordinate.
180 &quot;x&quot;: 3.14, # X coordinate.
181 },
182 ],
183 &quot;vertices&quot;: [ # The bounding polygon vertices.
184 { # A vertex represents a 2D point in the image.
185 # NOTE: the vertex coordinates are in the same scale as the original image.
Bu Sun Kim65020912020-05-20 12:08:20 -0700186 &quot;x&quot;: 42, # X coordinate.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700187 &quot;y&quot;: 42, # Y coordinate.
Bu Sun Kim65020912020-05-20 12:08:20 -0700188 },
189 ],
190 },
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700191 &quot;filter&quot;: &quot;A String&quot;, # The filtering expression. This can be used to restrict search results based
192 # on Product labels. We currently support an AND of OR of key-value
193 # expressions, where each expression within an OR must have the same key. An
194 # &#x27;=&#x27; should be used to connect the key and value.
195 #
196 # For example, &quot;(color = red OR color = blue) AND brand = Google&quot; is
197 # acceptable, but &quot;(color = red OR brand = Google)&quot; is not acceptable.
198 # &quot;color: red&quot; is not acceptable because it uses a &#x27;:&#x27; instead of an &#x27;=&#x27;.
199 &quot;productSet&quot;: &quot;A String&quot;, # The resource name of a ProductSet to be searched for similar images.
200 #
201 # Format is:
202 # `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`.
203 },
204 &quot;cropHintsParams&quot;: { # Parameters for crop hints annotation request. # Parameters for crop hints annotation request.
205 &quot;aspectRatios&quot;: [ # Aspect ratios in floats, representing the ratio of the width to the height
206 # of the image. For example, if the desired aspect ratio is 4/3, the
207 # corresponding float value should be 1.33333. If not specified, the
208 # best possible crop is returned. The number of provided aspect ratios is
209 # limited to a maximum of 16; any aspect ratios provided after the 16th are
210 # ignored.
211 3.14,
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700212 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700213 },
214 },
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700215 &quot;features&quot;: [ # Requested features.
216 { # The type of Google Cloud Vision API detection to perform, and the maximum
217 # number of results to return for that type. Multiple `Feature` objects can
218 # be specified in the `features` list.
219 &quot;maxResults&quot;: 42, # Maximum number of results of this type. Does not apply to
220 # `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`.
221 &quot;type&quot;: &quot;A String&quot;, # The feature type.
222 &quot;model&quot;: &quot;A String&quot;, # Model to use for the feature.
223 # Supported values: &quot;builtin/stable&quot; (the default if unset) and
224 # &quot;builtin/latest&quot;.
225 },
226 ],
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700227 },
228 ],
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700229 &quot;parent&quot;: &quot;A String&quot;, # Optional. Target project and location to make a call.
230 #
231 # Format: `projects/{project-id}/locations/{location-id}`.
232 #
233 # If no parent is specified, a region will be chosen automatically.
234 #
235 # Supported location-ids:
236 # `us`: USA country only,
237 # `asia`: East asia areas, like Japan, Taiwan,
238 # `eu`: The European Union.
239 #
240 # Example: `projects/project-A/locations/eu`.
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700241 }
242
243 x__xgafv: string, V1 error format.
244 Allowed values
245 1 - v1 error format
246 2 - v2 error format
247
248Returns:
249 An object of the form:
250
251 { # Response to a batch image annotation request.
Bu Sun Kim65020912020-05-20 12:08:20 -0700252 &quot;responses&quot;: [ # Individual responses to image annotation requests within the batch.
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700253 { # Response to an image annotation request.
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700254 &quot;labelAnnotations&quot;: [ # If present, label detection has completed successfully.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700255 { # Set of detected entity features.
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700256 &quot;description&quot;: &quot;A String&quot;, # Entity textual description, expressed in its `locale` language.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700257 &quot;confidence&quot;: 3.14, # **Deprecated. Use `score` instead.**
258 # The accuracy of the entity detection in an image.
259 # For example, for an image in which the &quot;Eiffel Tower&quot; entity is detected,
260 # this field represents the confidence that there is a tower in the query
261 # image. Range [0, 1].
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700262 &quot;boundingPoly&quot;: { # A bounding polygon for the detected image annotation. # Image region to which this entity belongs. Not produced
263 # for `LABEL_DETECTION` features.
264 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
265 { # A vertex represents a 2D point in the image.
266 # NOTE: the normalized vertex coordinates are relative to the original image
267 # and range from 0 to 1.
268 &quot;y&quot;: 3.14, # Y coordinate.
269 &quot;x&quot;: 3.14, # X coordinate.
270 },
271 ],
272 &quot;vertices&quot;: [ # The bounding polygon vertices.
273 { # A vertex represents a 2D point in the image.
274 # NOTE: the vertex coordinates are in the same scale as the original image.
275 &quot;x&quot;: 42, # X coordinate.
276 &quot;y&quot;: 42, # Y coordinate.
277 },
278 ],
279 },
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700280 &quot;mid&quot;: &quot;A String&quot;, # Opaque entity ID. Some IDs may be available in
281 # [Google Knowledge Graph Search
282 # API](https://developers.google.com/knowledge-graph/).
283 &quot;locale&quot;: &quot;A String&quot;, # The language code for the locale in which the entity textual
284 # `description` is expressed.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700285 &quot;topicality&quot;: 3.14, # The relevancy of the ICA (Image Content Annotation) label to the
286 # image. For example, the relevancy of &quot;tower&quot; is likely higher to an image
287 # containing the detected &quot;Eiffel Tower&quot; than to an image containing a
288 # detected distant towering building, even though the confidence that
289 # there is a tower in each image may be the same. Range [0, 1].
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700290 &quot;locations&quot;: [ # The location information for the detected entity. Multiple
291 # `LocationInfo` elements can be present because one location may
292 # indicate the location of the scene in the image, and another location
293 # may indicate the location of the place where the image was taken.
294 # Location information is usually present for landmarks.
295 { # Detected entity location information.
296 &quot;latLng&quot;: { # An object representing a latitude/longitude pair. This is expressed as a pair # lat/long location coordinates.
297 # of doubles representing degrees latitude and degrees longitude. Unless
298 # specified otherwise, this must conform to the
299 # &lt;a href=&quot;http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf&quot;&gt;WGS84
300 # standard&lt;/a&gt;. Values must be within normalized ranges.
301 &quot;longitude&quot;: 3.14, # The longitude in degrees. It must be in the range [-180.0, +180.0].
302 &quot;latitude&quot;: 3.14, # The latitude in degrees. It must be in the range [-90.0, +90.0].
303 },
304 },
305 ],
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700306 &quot;properties&quot;: [ # Some entities may have optional user-supplied `Property` (name/value)
307 # fields, such a score or string that qualifies the entity.
308 { # A `Property` consists of a user-supplied name/value pair.
309 &quot;value&quot;: &quot;A String&quot;, # Value of the property.
310 &quot;uint64Value&quot;: &quot;A String&quot;, # Value of numeric properties.
311 &quot;name&quot;: &quot;A String&quot;, # Name of the property.
312 },
313 ],
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700314 &quot;score&quot;: 3.14, # Overall score of the result. Range [0, 1].
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700315 },
316 ],
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700317 &quot;safeSearchAnnotation&quot;: { # Set of features pertaining to the image, computed by computer vision # If present, safe-search annotation has completed successfully.
318 # methods over safe-search verticals (for example, adult, spoof, medical,
319 # violence).
320 &quot;spoof&quot;: &quot;A String&quot;, # Spoof likelihood. The likelihood that an modification
321 # was made to the image&#x27;s canonical version to make it appear
322 # funny or offensive.
323 &quot;racy&quot;: &quot;A String&quot;, # Likelihood that the request image contains racy content. Racy content may
324 # include (but is not limited to) skimpy or sheer clothing, strategically
325 # covered nudity, lewd or provocative poses, or close-ups of sensitive
326 # body areas.
327 &quot;adult&quot;: &quot;A String&quot;, # Represents the adult content likelihood for the image. Adult content may
328 # contain elements such as nudity, pornographic images or cartoons, or
329 # sexual activities.
330 &quot;violence&quot;: &quot;A String&quot;, # Likelihood that this image contains violent content.
331 &quot;medical&quot;: &quot;A String&quot;, # Likelihood that this is a medical image.
332 },
333 &quot;webDetection&quot;: { # Relevant information for the image from the Internet. # If present, web detection has completed successfully.
334 &quot;partialMatchingImages&quot;: [ # Partial matching images from the Internet.
335 # Those images are similar enough to share some key-point features. For
336 # example an original image will likely have partial matching for its crops.
337 { # Metadata for online images.
338 &quot;url&quot;: &quot;A String&quot;, # The result image URL.
339 &quot;score&quot;: 3.14, # (Deprecated) Overall relevancy score for the image.
Bu Sun Kim65020912020-05-20 12:08:20 -0700340 },
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700341 ],
342 &quot;bestGuessLabels&quot;: [ # The service&#x27;s best guess as to the topic of the request image.
343 # Inferred from similar images on the open web.
344 { # Label to provide extra metadata for the web detection.
345 &quot;label&quot;: &quot;A String&quot;, # Label for extra metadata.
346 &quot;languageCode&quot;: &quot;A String&quot;, # The BCP-47 language code for `label`, such as &quot;en-US&quot; or &quot;sr-Latn&quot;.
347 # For more information, see
348 # http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
Bu Sun Kim65020912020-05-20 12:08:20 -0700349 },
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700350 ],
351 &quot;visuallySimilarImages&quot;: [ # The visually similar image results.
352 { # Metadata for online images.
353 &quot;url&quot;: &quot;A String&quot;, # The result image URL.
354 &quot;score&quot;: 3.14, # (Deprecated) Overall relevancy score for the image.
355 },
356 ],
357 &quot;webEntities&quot;: [ # Deduced entities from similar images on the Internet.
358 { # Entity deduced from similar images on the Internet.
359 &quot;entityId&quot;: &quot;A String&quot;, # Opaque entity ID.
360 &quot;score&quot;: 3.14, # Overall relevancy score for the entity.
361 # Not normalized and not comparable across different image queries.
362 &quot;description&quot;: &quot;A String&quot;, # Canonical description of the entity, in English.
363 },
364 ],
365 &quot;fullMatchingImages&quot;: [ # Fully matching images from the Internet.
366 # Can include resized copies of the query image.
367 { # Metadata for online images.
368 &quot;url&quot;: &quot;A String&quot;, # The result image URL.
369 &quot;score&quot;: 3.14, # (Deprecated) Overall relevancy score for the image.
370 },
371 ],
372 &quot;pagesWithMatchingImages&quot;: [ # Web pages containing the matching images from the Internet.
373 { # Metadata for web pages.
374 &quot;fullMatchingImages&quot;: [ # Fully matching images on the page.
375 # Can include resized copies of the query image.
376 { # Metadata for online images.
377 &quot;url&quot;: &quot;A String&quot;, # The result image URL.
378 &quot;score&quot;: 3.14, # (Deprecated) Overall relevancy score for the image.
379 },
380 ],
381 &quot;pageTitle&quot;: &quot;A String&quot;, # Title for the web page, may contain HTML markups.
382 &quot;score&quot;: 3.14, # (Deprecated) Overall relevancy score for the web page.
383 &quot;url&quot;: &quot;A String&quot;, # The result web page URL.
384 &quot;partialMatchingImages&quot;: [ # Partial matching images on the page.
385 # Those images are similar enough to share some key-point features. For
386 # example an original image will likely have partial matching for its
387 # crops.
388 { # Metadata for online images.
389 &quot;url&quot;: &quot;A String&quot;, # The result image URL.
390 &quot;score&quot;: 3.14, # (Deprecated) Overall relevancy score for the image.
391 },
392 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700393 },
394 ],
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700395 },
Bu Sun Kim65020912020-05-20 12:08:20 -0700396 &quot;productSearchResults&quot;: { # Results for a product search request. # If present, product search has completed successfully.
Bu Sun Kim65020912020-05-20 12:08:20 -0700397 &quot;productGroupedResults&quot;: [ # List of results grouped by products detected in the query image. Each entry
398 # corresponds to one bounding polygon in the query image, and contains the
399 # matching products specific to that region. There may be duplicate product
400 # matches in the union of all the per-product results.
401 { # Information about the products similar to a single product in a query
402 # image.
403 &quot;objectAnnotations&quot;: [ # List of generic predictions for the object in the bounding box.
404 { # Prediction for what the object in the bounding box is.
Bu Sun Kim65020912020-05-20 12:08:20 -0700405 &quot;score&quot;: 3.14, # Score of the result. Range [0, 1].
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700406 &quot;mid&quot;: &quot;A String&quot;, # Object ID that should align with EntityAnnotation mid.
Bu Sun Kim65020912020-05-20 12:08:20 -0700407 &quot;languageCode&quot;: &quot;A String&quot;, # The BCP-47 language code, such as &quot;en-US&quot; or &quot;sr-Latn&quot;. For more
408 # information, see
409 # http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700410 &quot;name&quot;: &quot;A String&quot;, # Object name, expressed in its `language_code` language.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700411 },
412 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700413 &quot;boundingPoly&quot;: { # A bounding polygon for the detected image annotation. # The bounding polygon around the product detected in the query image.
414 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
415 { # A vertex represents a 2D point in the image.
416 # NOTE: the normalized vertex coordinates are relative to the original image
417 # and range from 0 to 1.
418 &quot;y&quot;: 3.14, # Y coordinate.
419 &quot;x&quot;: 3.14, # X coordinate.
420 },
421 ],
422 &quot;vertices&quot;: [ # The bounding polygon vertices.
423 { # A vertex represents a 2D point in the image.
424 # NOTE: the vertex coordinates are in the same scale as the original image.
Bu Sun Kim65020912020-05-20 12:08:20 -0700425 &quot;x&quot;: 42, # X coordinate.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700426 &quot;y&quot;: 42, # Y coordinate.
Bu Sun Kim65020912020-05-20 12:08:20 -0700427 },
428 ],
429 },
430 &quot;results&quot;: [ # List of results, one for each product match.
431 { # Information about a product.
432 &quot;image&quot;: &quot;A String&quot;, # The resource name of the image from the product that is the closest match
433 # to the query.
434 &quot;product&quot;: { # A Product contains ReferenceImages. # The Product.
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700435 &quot;displayName&quot;: &quot;A String&quot;, # The user-provided name for this Product. Must not be empty. Must be at most
436 # 4096 characters long.
Bu Sun Kim65020912020-05-20 12:08:20 -0700437 &quot;name&quot;: &quot;A String&quot;, # The resource name of the product.
438 #
439 # Format is:
440 # `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`.
441 #
442 # This field is ignored when creating a product.
Bu Sun Kim65020912020-05-20 12:08:20 -0700443 &quot;productLabels&quot;: [ # Key-value pairs that can be attached to a product. At query time,
444 # constraints can be specified based on the product_labels.
445 #
446 # Note that integer values can be provided as strings, e.g. &quot;1199&quot;. Only
447 # strings with integer values can match a range-based restriction which is
448 # to be supported soon.
449 #
450 # Multiple values can be assigned to the same key. One product may have up to
451 # 500 product_labels.
452 #
453 # Notice that the total number of distinct product_labels over all products
454 # in one ProductSet cannot exceed 1M, otherwise the product search pipeline
455 # will refuse to work for that ProductSet.
456 { # A product label represented as a key-value pair.
Bu Sun Kim65020912020-05-20 12:08:20 -0700457 &quot;key&quot;: &quot;A String&quot;, # The key of the label attached to the product. Cannot be empty and cannot
458 # exceed 128 bytes.
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700459 &quot;value&quot;: &quot;A String&quot;, # The value of the label attached to the product. Cannot be empty and
460 # cannot exceed 128 bytes.
Bu Sun Kim65020912020-05-20 12:08:20 -0700461 },
462 ],
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700463 &quot;description&quot;: &quot;A String&quot;, # User-provided metadata to be stored with this product. Must be at most 4096
464 # characters long.
465 &quot;productCategory&quot;: &quot;A String&quot;, # Immutable. The category for the product identified by the reference image. This should
466 # be either &quot;homegoods-v2&quot;, &quot;apparel-v2&quot;, or &quot;toys-v2&quot;. The legacy categories
467 # &quot;homegoods&quot;, &quot;apparel&quot;, and &quot;toys&quot; are still supported, but these should
468 # not be used for new products.
Bu Sun Kim65020912020-05-20 12:08:20 -0700469 },
470 &quot;score&quot;: 3.14, # A confidence level on the match, ranging from 0 (no confidence) to
471 # 1 (full confidence).
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700472 },
473 ],
474 },
475 ],
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700476 &quot;indexTime&quot;: &quot;A String&quot;, # Timestamp of the index which provided these results. Products added to the
477 # product set and products removed from the product set after this time are
478 # not reflected in the current results.
Bu Sun Kim65020912020-05-20 12:08:20 -0700479 &quot;results&quot;: [ # List of results, one for each product match.
480 { # Information about a product.
481 &quot;image&quot;: &quot;A String&quot;, # The resource name of the image from the product that is the closest match
482 # to the query.
483 &quot;product&quot;: { # A Product contains ReferenceImages. # The Product.
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700484 &quot;displayName&quot;: &quot;A String&quot;, # The user-provided name for this Product. Must not be empty. Must be at most
485 # 4096 characters long.
Bu Sun Kim65020912020-05-20 12:08:20 -0700486 &quot;name&quot;: &quot;A String&quot;, # The resource name of the product.
487 #
488 # Format is:
489 # `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`.
490 #
491 # This field is ignored when creating a product.
Bu Sun Kim65020912020-05-20 12:08:20 -0700492 &quot;productLabels&quot;: [ # Key-value pairs that can be attached to a product. At query time,
493 # constraints can be specified based on the product_labels.
494 #
495 # Note that integer values can be provided as strings, e.g. &quot;1199&quot;. Only
496 # strings with integer values can match a range-based restriction which is
497 # to be supported soon.
498 #
499 # Multiple values can be assigned to the same key. One product may have up to
500 # 500 product_labels.
501 #
502 # Notice that the total number of distinct product_labels over all products
503 # in one ProductSet cannot exceed 1M, otherwise the product search pipeline
504 # will refuse to work for that ProductSet.
505 { # A product label represented as a key-value pair.
Bu Sun Kim65020912020-05-20 12:08:20 -0700506 &quot;key&quot;: &quot;A String&quot;, # The key of the label attached to the product. Cannot be empty and cannot
507 # exceed 128 bytes.
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700508 &quot;value&quot;: &quot;A String&quot;, # The value of the label attached to the product. Cannot be empty and
509 # cannot exceed 128 bytes.
Bu Sun Kim65020912020-05-20 12:08:20 -0700510 },
511 ],
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700512 &quot;description&quot;: &quot;A String&quot;, # User-provided metadata to be stored with this product. Must be at most 4096
513 # characters long.
514 &quot;productCategory&quot;: &quot;A String&quot;, # Immutable. The category for the product identified by the reference image. This should
515 # be either &quot;homegoods-v2&quot;, &quot;apparel-v2&quot;, or &quot;toys-v2&quot;. The legacy categories
516 # &quot;homegoods&quot;, &quot;apparel&quot;, and &quot;toys&quot; are still supported, but these should
517 # not be used for new products.
Bu Sun Kim65020912020-05-20 12:08:20 -0700518 },
519 &quot;score&quot;: 3.14, # A confidence level on the match, ranging from 0 (no confidence) to
520 # 1 (full confidence).
Sai Cheemalapatic30d2b52017-03-13 12:12:03 -0400521 },
522 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700523 },
524 &quot;fullTextAnnotation&quot;: { # TextAnnotation contains a structured representation of OCR extracted text. # If present, text (OCR) detection or document (OCR) text detection has
Sai Cheemalapati4ba8c232017-06-06 18:46:08 -0400525 # completed successfully.
526 # This annotation provides the structural hierarchy for the OCR detected
527 # text.
528 # The hierarchy of an OCR extracted text structure is like this:
Dan O'Mearadd494642020-05-01 07:42:23 -0700529 # TextAnnotation -&gt; Page -&gt; Block -&gt; Paragraph -&gt; Word -&gt; Symbol
Sai Cheemalapati4ba8c232017-06-06 18:46:08 -0400530 # Each structural component, starting from Page, may further have their own
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700531 # properties. Properties describe detected languages, breaks etc.. Please refer
532 # to the TextAnnotation.TextProperty message definition below for more
533 # detail.
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700534 &quot;text&quot;: &quot;A String&quot;, # UTF-8 text detected on the pages.
Bu Sun Kim65020912020-05-20 12:08:20 -0700535 &quot;pages&quot;: [ # List of pages detected by OCR.
Sai Cheemalapati4ba8c232017-06-06 18:46:08 -0400536 { # Detected page from OCR.
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700537 &quot;width&quot;: 42, # Page width. For PDFs the unit is points. For images (including
538 # TIFFs) the unit is pixels.
Bu Sun Kim65020912020-05-20 12:08:20 -0700539 &quot;blocks&quot;: [ # List of blocks of text, images etc on this page.
Sai Cheemalapati4ba8c232017-06-06 18:46:08 -0400540 { # Logical element on the page.
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700541 &quot;paragraphs&quot;: [ # List of paragraphs in this block (if this blocks is of type text).
542 { # Structural unit of text representing a number of words in certain order.
543 &quot;boundingBox&quot;: { # A bounding polygon for the detected image annotation. # The bounding box for the paragraph.
544 # The vertices are in the order of top-left, top-right, bottom-right,
545 # bottom-left. When a rotation of the bounding box is detected the rotation
546 # is represented as around the top-left corner as defined when the text is
547 # read in the &#x27;natural&#x27; orientation.
548 # For example:
549 # * when the text is horizontal it might look like:
550 # 0----1
551 # | |
552 # 3----2
553 # * when it&#x27;s rotated 180 degrees around the top-left corner it becomes:
554 # 2----3
555 # | |
556 # 1----0
557 # and the vertex order will still be (0, 1, 2, 3).
558 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
559 { # A vertex represents a 2D point in the image.
560 # NOTE: the normalized vertex coordinates are relative to the original image
561 # and range from 0 to 1.
562 &quot;y&quot;: 3.14, # Y coordinate.
563 &quot;x&quot;: 3.14, # X coordinate.
564 },
565 ],
566 &quot;vertices&quot;: [ # The bounding polygon vertices.
567 { # A vertex represents a 2D point in the image.
568 # NOTE: the vertex coordinates are in the same scale as the original image.
569 &quot;x&quot;: 42, # X coordinate.
570 &quot;y&quot;: 42, # Y coordinate.
571 },
572 ],
573 },
574 &quot;property&quot;: { # Additional information detected on the structural component. # Additional information detected for the paragraph.
575 &quot;detectedBreak&quot;: { # Detected start or end of a structural component. # Detected start or end of a text segment.
576 &quot;isPrefix&quot;: True or False, # True if break prepends the element.
577 &quot;type&quot;: &quot;A String&quot;, # Detected break type.
578 },
579 &quot;detectedLanguages&quot;: [ # A list of detected languages together with confidence.
580 { # Detected language for a structural component.
581 &quot;languageCode&quot;: &quot;A String&quot;, # The BCP-47 language code, such as &quot;en-US&quot; or &quot;sr-Latn&quot;. For more
582 # information, see
583 # http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
584 &quot;confidence&quot;: 3.14, # Confidence of detected language. Range [0, 1].
585 },
586 ],
587 },
588 &quot;words&quot;: [ # List of all words in this paragraph.
589 { # A word representation.
590 &quot;property&quot;: { # Additional information detected on the structural component. # Additional information detected for the word.
591 &quot;detectedBreak&quot;: { # Detected start or end of a structural component. # Detected start or end of a text segment.
592 &quot;isPrefix&quot;: True or False, # True if break prepends the element.
593 &quot;type&quot;: &quot;A String&quot;, # Detected break type.
594 },
595 &quot;detectedLanguages&quot;: [ # A list of detected languages together with confidence.
596 { # Detected language for a structural component.
597 &quot;languageCode&quot;: &quot;A String&quot;, # The BCP-47 language code, such as &quot;en-US&quot; or &quot;sr-Latn&quot;. For more
598 # information, see
599 # http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
600 &quot;confidence&quot;: 3.14, # Confidence of detected language. Range [0, 1].
601 },
602 ],
603 },
604 &quot;confidence&quot;: 3.14, # Confidence of the OCR results for the word. Range [0, 1].
605 &quot;symbols&quot;: [ # List of symbols in the word.
606 # The order of the symbols follows the natural reading order.
607 { # A single symbol representation.
608 &quot;confidence&quot;: 3.14, # Confidence of the OCR results for the symbol. Range [0, 1].
609 &quot;boundingBox&quot;: { # A bounding polygon for the detected image annotation. # The bounding box for the symbol.
610 # The vertices are in the order of top-left, top-right, bottom-right,
611 # bottom-left. When a rotation of the bounding box is detected the rotation
612 # is represented as around the top-left corner as defined when the text is
613 # read in the &#x27;natural&#x27; orientation.
614 # For example:
615 # * when the text is horizontal it might look like:
616 # 0----1
617 # | |
618 # 3----2
619 # * when it&#x27;s rotated 180 degrees around the top-left corner it becomes:
620 # 2----3
621 # | |
622 # 1----0
623 # and the vertex order will still be (0, 1, 2, 3).
624 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
625 { # A vertex represents a 2D point in the image.
626 # NOTE: the normalized vertex coordinates are relative to the original image
627 # and range from 0 to 1.
628 &quot;y&quot;: 3.14, # Y coordinate.
629 &quot;x&quot;: 3.14, # X coordinate.
630 },
631 ],
632 &quot;vertices&quot;: [ # The bounding polygon vertices.
633 { # A vertex represents a 2D point in the image.
634 # NOTE: the vertex coordinates are in the same scale as the original image.
635 &quot;x&quot;: 42, # X coordinate.
636 &quot;y&quot;: 42, # Y coordinate.
637 },
638 ],
639 },
640 &quot;property&quot;: { # Additional information detected on the structural component. # Additional information detected for the symbol.
641 &quot;detectedBreak&quot;: { # Detected start or end of a structural component. # Detected start or end of a text segment.
642 &quot;isPrefix&quot;: True or False, # True if break prepends the element.
643 &quot;type&quot;: &quot;A String&quot;, # Detected break type.
644 },
645 &quot;detectedLanguages&quot;: [ # A list of detected languages together with confidence.
646 { # Detected language for a structural component.
647 &quot;languageCode&quot;: &quot;A String&quot;, # The BCP-47 language code, such as &quot;en-US&quot; or &quot;sr-Latn&quot;. For more
648 # information, see
649 # http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
650 &quot;confidence&quot;: 3.14, # Confidence of detected language. Range [0, 1].
651 },
652 ],
653 },
654 &quot;text&quot;: &quot;A String&quot;, # The actual UTF-8 representation of the symbol.
655 },
656 ],
657 &quot;boundingBox&quot;: { # A bounding polygon for the detected image annotation. # The bounding box for the word.
658 # The vertices are in the order of top-left, top-right, bottom-right,
659 # bottom-left. When a rotation of the bounding box is detected the rotation
660 # is represented as around the top-left corner as defined when the text is
661 # read in the &#x27;natural&#x27; orientation.
662 # For example:
663 # * when the text is horizontal it might look like:
664 # 0----1
665 # | |
666 # 3----2
667 # * when it&#x27;s rotated 180 degrees around the top-left corner it becomes:
668 # 2----3
669 # | |
670 # 1----0
671 # and the vertex order will still be (0, 1, 2, 3).
672 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
673 { # A vertex represents a 2D point in the image.
674 # NOTE: the normalized vertex coordinates are relative to the original image
675 # and range from 0 to 1.
676 &quot;y&quot;: 3.14, # Y coordinate.
677 &quot;x&quot;: 3.14, # X coordinate.
678 },
679 ],
680 &quot;vertices&quot;: [ # The bounding polygon vertices.
681 { # A vertex represents a 2D point in the image.
682 # NOTE: the vertex coordinates are in the same scale as the original image.
683 &quot;x&quot;: 42, # X coordinate.
684 &quot;y&quot;: 42, # Y coordinate.
685 },
686 ],
687 },
688 },
689 ],
690 &quot;confidence&quot;: 3.14, # Confidence of the OCR results for the paragraph. Range [0, 1].
691 },
692 ],
693 &quot;blockType&quot;: &quot;A String&quot;, # Detected block type (text, image etc) for this block.
Bu Sun Kim65020912020-05-20 12:08:20 -0700694 &quot;property&quot;: { # Additional information detected on the structural component. # Additional information detected for the block.
Bu Sun Kim65020912020-05-20 12:08:20 -0700695 &quot;detectedBreak&quot;: { # Detected start or end of a structural component. # Detected start or end of a text segment.
Bu Sun Kim65020912020-05-20 12:08:20 -0700696 &quot;isPrefix&quot;: True or False, # True if break prepends the element.
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700697 &quot;type&quot;: &quot;A String&quot;, # Detected break type.
Bu Sun Kim65020912020-05-20 12:08:20 -0700698 },
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700699 &quot;detectedLanguages&quot;: [ # A list of detected languages together with confidence.
700 { # Detected language for a structural component.
701 &quot;languageCode&quot;: &quot;A String&quot;, # The BCP-47 language code, such as &quot;en-US&quot; or &quot;sr-Latn&quot;. For more
702 # information, see
703 # http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
704 &quot;confidence&quot;: 3.14, # Confidence of detected language. Range [0, 1].
705 },
706 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700707 },
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700708 &quot;confidence&quot;: 3.14, # Confidence of the OCR results on the block. Range [0, 1].
Bu Sun Kim65020912020-05-20 12:08:20 -0700709 &quot;boundingBox&quot;: { # A bounding polygon for the detected image annotation. # The bounding box for the block.
Sai Cheemalapati4ba8c232017-06-06 18:46:08 -0400710 # The vertices are in the order of top-left, top-right, bottom-right,
711 # bottom-left. When a rotation of the bounding box is detected the rotation
712 # is represented as around the top-left corner as defined when the text is
Bu Sun Kim65020912020-05-20 12:08:20 -0700713 # read in the &#x27;natural&#x27; orientation.
Sai Cheemalapati4ba8c232017-06-06 18:46:08 -0400714 # For example:
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700715 #
716 # * when the text is horizontal it might look like:
717 #
718 # 0----1
719 # | |
720 # 3----2
721 #
Bu Sun Kim65020912020-05-20 12:08:20 -0700722 # * when it&#x27;s rotated 180 degrees around the top-left corner it becomes:
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700723 #
724 # 2----3
725 # | |
726 # 1----0
727 #
728 # and the vertex order will still be (0, 1, 2, 3).
Bu Sun Kim65020912020-05-20 12:08:20 -0700729 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700730 { # A vertex represents a 2D point in the image.
731 # NOTE: the normalized vertex coordinates are relative to the original image
732 # and range from 0 to 1.
Bu Sun Kim65020912020-05-20 12:08:20 -0700733 &quot;y&quot;: 3.14, # Y coordinate.
734 &quot;x&quot;: 3.14, # X coordinate.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700735 },
736 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700737 &quot;vertices&quot;: [ # The bounding polygon vertices.
Sai Cheemalapati4ba8c232017-06-06 18:46:08 -0400738 { # A vertex represents a 2D point in the image.
739 # NOTE: the vertex coordinates are in the same scale as the original image.
Bu Sun Kim65020912020-05-20 12:08:20 -0700740 &quot;x&quot;: 42, # X coordinate.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700741 &quot;y&quot;: 42, # Y coordinate.
Sai Cheemalapati4ba8c232017-06-06 18:46:08 -0400742 },
743 ],
744 },
Sai Cheemalapati4ba8c232017-06-06 18:46:08 -0400745 },
746 ],
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700747 &quot;confidence&quot;: 3.14, # Confidence of the OCR results on the page. Range [0, 1].
748 &quot;height&quot;: 42, # Page height. For PDFs the unit is points. For images (including
749 # TIFFs) the unit is pixels.
Bu Sun Kim65020912020-05-20 12:08:20 -0700750 &quot;property&quot;: { # Additional information detected on the structural component. # Additional information detected on the page.
Bu Sun Kim65020912020-05-20 12:08:20 -0700751 &quot;detectedBreak&quot;: { # Detected start or end of a structural component. # Detected start or end of a text segment.
Bu Sun Kim65020912020-05-20 12:08:20 -0700752 &quot;isPrefix&quot;: True or False, # True if break prepends the element.
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700753 &quot;type&quot;: &quot;A String&quot;, # Detected break type.
Bu Sun Kim65020912020-05-20 12:08:20 -0700754 },
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700755 &quot;detectedLanguages&quot;: [ # A list of detected languages together with confidence.
756 { # Detected language for a structural component.
757 &quot;languageCode&quot;: &quot;A String&quot;, # The BCP-47 language code, such as &quot;en-US&quot; or &quot;sr-Latn&quot;. For more
758 # information, see
759 # http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
760 &quot;confidence&quot;: 3.14, # Confidence of detected language. Range [0, 1].
761 },
762 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700763 },
Sai Cheemalapatic30d2b52017-03-13 12:12:03 -0400764 },
765 ],
766 },
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700767 &quot;localizedObjectAnnotations&quot;: [ # If present, localized object detection has completed successfully.
768 # This will be sorted descending by confidence score.
769 { # Set of detected objects with bounding boxes.
770 &quot;name&quot;: &quot;A String&quot;, # Object name, expressed in its `language_code` language.
771 &quot;score&quot;: 3.14, # Score of the result. Range [0, 1].
772 &quot;languageCode&quot;: &quot;A String&quot;, # The BCP-47 language code, such as &quot;en-US&quot; or &quot;sr-Latn&quot;. For more
773 # information, see
774 # http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
775 &quot;boundingPoly&quot;: { # A bounding polygon for the detected image annotation. # Image region to which this object belongs. This must be populated.
Bu Sun Kim65020912020-05-20 12:08:20 -0700776 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700777 { # A vertex represents a 2D point in the image.
778 # NOTE: the normalized vertex coordinates are relative to the original image
779 # and range from 0 to 1.
Bu Sun Kim65020912020-05-20 12:08:20 -0700780 &quot;y&quot;: 3.14, # Y coordinate.
781 &quot;x&quot;: 3.14, # X coordinate.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700782 },
783 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700784 &quot;vertices&quot;: [ # The bounding polygon vertices.
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700785 { # A vertex represents a 2D point in the image.
786 # NOTE: the vertex coordinates are in the same scale as the original image.
Bu Sun Kim65020912020-05-20 12:08:20 -0700787 &quot;x&quot;: 42, # X coordinate.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700788 &quot;y&quot;: 42, # Y coordinate.
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700789 },
790 ],
791 },
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700792 &quot;mid&quot;: &quot;A String&quot;, # Object ID that should align with EntityAnnotation mid.
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700793 },
794 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700795 &quot;imagePropertiesAnnotation&quot;: { # Stores image properties, such as dominant colors. # If present, image properties were extracted successfully.
796 &quot;dominantColors&quot;: { # Set of dominant colors and their corresponding scores. # If present, dominant colors completed successfully.
797 &quot;colors&quot;: [ # RGB color values with their score and pixel fraction.
Jon Wayne Parrott692617a2017-01-06 09:58:29 -0800798 { # Color information consists of RGB channels, score, and the fraction of
799 # the image that the color occupies in the image.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700800 &quot;score&quot;: 3.14, # Image-specific score for this color. Value in range [0, 1].
Bu Sun Kim65020912020-05-20 12:08:20 -0700801 &quot;color&quot;: { # Represents a color in the RGBA color space. This representation is designed # RGB components of the color.
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700802 # for simplicity of conversion to/from color representations in various
803 # languages over compactness; for example, the fields of this representation
Bu Sun Kim65020912020-05-20 12:08:20 -0700804 # can be trivially provided to the constructor of &quot;java.awt.Color&quot; in Java; it
805 # can also be trivially provided to UIColor&#x27;s &quot;+colorWithRed:green:blue:alpha&quot;
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700806 # method in iOS; and, with just a little work, it can be easily formatted into
Bu Sun Kim65020912020-05-20 12:08:20 -0700807 # a CSS &quot;rgba()&quot; string in JavaScript, as well.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700808 #
809 # Note: this proto does not carry information about the absolute color space
810 # that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB,
811 # DCI-P3, BT.2020, etc.). By default, applications SHOULD assume the sRGB color
812 # space.
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700813 #
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700814 # Note: when color equality needs to be decided, implementations, unless
815 # documented otherwise, will treat two colors to be equal if all their red,
816 # green, blue and alpha values each differ by at most 1e-5.
817 #
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700818 # Example (Java):
819 #
820 # import com.google.type.Color;
821 #
822 # // ...
823 # public static java.awt.Color fromProto(Color protocolor) {
824 # float alpha = protocolor.hasAlpha()
825 # ? protocolor.getAlpha().getValue()
826 # : 1.0;
827 #
828 # return new java.awt.Color(
829 # protocolor.getRed(),
830 # protocolor.getGreen(),
831 # protocolor.getBlue(),
832 # alpha);
833 # }
834 #
835 # public static Color toProto(java.awt.Color color) {
836 # float red = (float) color.getRed();
837 # float green = (float) color.getGreen();
838 # float blue = (float) color.getBlue();
839 # float denominator = 255.0;
840 # Color.Builder resultBuilder =
841 # Color
842 # .newBuilder()
843 # .setRed(red / denominator)
844 # .setGreen(green / denominator)
845 # .setBlue(blue / denominator);
846 # int alpha = color.getAlpha();
847 # if (alpha != 255) {
848 # result.setAlpha(
849 # FloatValue
850 # .newBuilder()
851 # .setValue(((float) alpha) / denominator)
852 # .build());
853 # }
854 # return resultBuilder.build();
855 # }
856 # // ...
857 #
858 # Example (iOS / Obj-C):
859 #
860 # // ...
861 # static UIColor* fromProto(Color* protocolor) {
862 # float red = [protocolor red];
863 # float green = [protocolor green];
864 # float blue = [protocolor blue];
865 # FloatValue* alpha_wrapper = [protocolor alpha];
866 # float alpha = 1.0;
867 # if (alpha_wrapper != nil) {
868 # alpha = [alpha_wrapper value];
869 # }
870 # return [UIColor colorWithRed:red green:green blue:blue alpha:alpha];
871 # }
872 #
873 # static Color* toProto(UIColor* color) {
874 # CGFloat red, green, blue, alpha;
Dan O'Mearadd494642020-05-01 07:42:23 -0700875 # if (![color getRed:&amp;red green:&amp;green blue:&amp;blue alpha:&amp;alpha]) {
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700876 # return nil;
877 # }
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700878 # Color* result = [[Color alloc] init];
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700879 # [result setRed:red];
880 # [result setGreen:green];
881 # [result setBlue:blue];
Dan O'Mearadd494642020-05-01 07:42:23 -0700882 # if (alpha &lt;= 0.9999) {
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700883 # [result setAlpha:floatWrapperWithValue(alpha)];
884 # }
885 # [result autorelease];
886 # return result;
887 # }
888 # // ...
889 #
890 # Example (JavaScript):
891 #
892 # // ...
893 #
894 # var protoToCssColor = function(rgb_color) {
895 # var redFrac = rgb_color.red || 0.0;
896 # var greenFrac = rgb_color.green || 0.0;
897 # var blueFrac = rgb_color.blue || 0.0;
898 # var red = Math.floor(redFrac * 255);
899 # var green = Math.floor(greenFrac * 255);
900 # var blue = Math.floor(blueFrac * 255);
901 #
Bu Sun Kim65020912020-05-20 12:08:20 -0700902 # if (!(&#x27;alpha&#x27; in rgb_color)) {
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700903 # return rgbToCssColor_(red, green, blue);
904 # }
905 #
906 # var alphaFrac = rgb_color.alpha.value || 0.0;
Bu Sun Kim65020912020-05-20 12:08:20 -0700907 # var rgbParams = [red, green, blue].join(&#x27;,&#x27;);
908 # return [&#x27;rgba(&#x27;, rgbParams, &#x27;,&#x27;, alphaFrac, &#x27;)&#x27;].join(&#x27;&#x27;);
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700909 # };
910 #
911 # var rgbToCssColor_ = function(red, green, blue) {
Dan O'Mearadd494642020-05-01 07:42:23 -0700912 # var rgbNumber = new Number((red &lt;&lt; 16) | (green &lt;&lt; 8) | blue);
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700913 # var hexString = rgbNumber.toString(16);
914 # var missingZeros = 6 - hexString.length;
Bu Sun Kim65020912020-05-20 12:08:20 -0700915 # var resultBuilder = [&#x27;#&#x27;];
Dan O'Mearadd494642020-05-01 07:42:23 -0700916 # for (var i = 0; i &lt; missingZeros; i++) {
Bu Sun Kim65020912020-05-20 12:08:20 -0700917 # resultBuilder.push(&#x27;0&#x27;);
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700918 # }
919 # resultBuilder.push(hexString);
Bu Sun Kim65020912020-05-20 12:08:20 -0700920 # return resultBuilder.join(&#x27;&#x27;);
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700921 # };
922 #
923 # // ...
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700924 &quot;blue&quot;: 3.14, # The amount of blue in the color as a value in the interval [0, 1].
Bu Sun Kim65020912020-05-20 12:08:20 -0700925 &quot;red&quot;: 3.14, # The amount of red in the color as a value in the interval [0, 1].
926 &quot;green&quot;: 3.14, # The amount of green in the color as a value in the interval [0, 1].
Bu Sun Kim65020912020-05-20 12:08:20 -0700927 &quot;alpha&quot;: 3.14, # The fraction of this color that should be applied to the pixel. That is,
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700928 # the final pixel color is defined by the equation:
929 #
930 # pixel color = alpha * (this color) + (1.0 - alpha) * (background color)
931 #
932 # This means that a value of 1.0 corresponds to a solid color, whereas
933 # a value of 0.0 corresponds to a completely transparent color. This
934 # uses a wrapper message rather than a simple float scalar so that it is
935 # possible to distinguish between a default value and the value being unset.
936 # If omitted, this color object is to be rendered as a solid color
937 # (as if the alpha value had been explicitly given with a value of 1.0).
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700938 },
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700939 &quot;pixelFraction&quot;: 3.14, # The fraction of pixels the color occupies in the image.
940 # Value in range [0, 1].
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700941 },
942 ],
943 },
944 },
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700945 &quot;faceAnnotations&quot;: [ # If present, face detection has completed successfully.
946 { # A face annotation object contains the results of face detection.
947 &quot;fdBoundingPoly&quot;: { # A bounding polygon for the detected image annotation. # The `fd_bounding_poly` bounding polygon is tighter than the
948 # `boundingPoly`, and encloses only the skin part of the face. Typically, it
949 # is used to eliminate the face from any image analysis that detects the
950 # &quot;amount of skin&quot; visible in an image. It is not based on the
951 # landmarker results, only on the initial face detection, hence
952 # the &lt;code&gt;fd&lt;/code&gt; (face detection) prefix.
953 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
954 { # A vertex represents a 2D point in the image.
955 # NOTE: the normalized vertex coordinates are relative to the original image
956 # and range from 0 to 1.
957 &quot;y&quot;: 3.14, # Y coordinate.
958 &quot;x&quot;: 3.14, # X coordinate.
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -0700959 },
Bu Sun Kimd059ad82020-07-22 17:02:09 -0700960 ],
961 &quot;vertices&quot;: [ # The bounding polygon vertices.
962 { # A vertex represents a 2D point in the image.
963 # NOTE: the vertex coordinates are in the same scale as the original image.
964 &quot;x&quot;: 42, # X coordinate.
965 &quot;y&quot;: 42, # Y coordinate.
966 },
967 ],
968 },
969 &quot;sorrowLikelihood&quot;: &quot;A String&quot;, # Sorrow likelihood.
970 &quot;rollAngle&quot;: 3.14, # Roll angle, which indicates the amount of clockwise/anti-clockwise rotation
971 # of the face relative to the image vertical about the axis perpendicular to
972 # the face. Range [-180,180].
973 &quot;angerLikelihood&quot;: &quot;A String&quot;, # Anger likelihood.
974 &quot;surpriseLikelihood&quot;: &quot;A String&quot;, # Surprise likelihood.
975 &quot;boundingPoly&quot;: { # A bounding polygon for the detected image annotation. # The bounding polygon around the face. The coordinates of the bounding box
976 # are in the original image&#x27;s scale.
977 # The bounding box is computed to &quot;frame&quot; the face in accordance with human
978 # expectations. It is based on the landmarker results.
979 # Note that one or more x and/or y coordinates may not be generated in the
980 # `BoundingPoly` (the polygon will be unbounded) if only a partial face
981 # appears in the image to be annotated.
982 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
983 { # A vertex represents a 2D point in the image.
984 # NOTE: the normalized vertex coordinates are relative to the original image
985 # and range from 0 to 1.
986 &quot;y&quot;: 3.14, # Y coordinate.
987 &quot;x&quot;: 3.14, # X coordinate.
988 },
989 ],
990 &quot;vertices&quot;: [ # The bounding polygon vertices.
991 { # A vertex represents a 2D point in the image.
992 # NOTE: the vertex coordinates are in the same scale as the original image.
993 &quot;x&quot;: 42, # X coordinate.
994 &quot;y&quot;: 42, # Y coordinate.
995 },
996 ],
997 },
998 &quot;detectionConfidence&quot;: 3.14, # Detection confidence. Range [0, 1].
999 &quot;headwearLikelihood&quot;: &quot;A String&quot;, # Headwear likelihood.
1000 &quot;panAngle&quot;: 3.14, # Yaw angle, which indicates the leftward/rightward angle that the face is
1001 # pointing relative to the vertical plane perpendicular to the image. Range
1002 # [-180,180].
1003 &quot;landmarks&quot;: [ # Detected face landmarks.
1004 { # A face-specific landmark (for example, a face feature).
1005 &quot;position&quot;: { # A 3D position in the image, used primarily for Face detection landmarks. # Face landmark position.
1006 # A valid Position must have both x and y coordinates.
1007 # The position coordinates are in the same scale as the original image.
1008 &quot;z&quot;: 3.14, # Z coordinate (or depth).
1009 &quot;y&quot;: 3.14, # Y coordinate.
1010 &quot;x&quot;: 3.14, # X coordinate.
1011 },
1012 &quot;type&quot;: &quot;A String&quot;, # Face landmark type.
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -07001013 },
1014 ],
Bu Sun Kimd059ad82020-07-22 17:02:09 -07001015 &quot;landmarkingConfidence&quot;: 3.14, # Face landmarking confidence. Range [0, 1].
1016 &quot;joyLikelihood&quot;: &quot;A String&quot;, # Joy likelihood.
1017 &quot;tiltAngle&quot;: 3.14, # Pitch angle, which indicates the upwards/downwards angle that the face is
1018 # pointing relative to the image&#x27;s horizontal plane. Range [-180,180].
1019 &quot;underExposedLikelihood&quot;: &quot;A String&quot;, # Under-exposed likelihood.
1020 &quot;blurredLikelihood&quot;: &quot;A String&quot;, # Blurred likelihood.
1021 },
1022 ],
1023 &quot;logoAnnotations&quot;: [ # If present, logo detection has completed successfully.
1024 { # Set of detected entity features.
1025 &quot;description&quot;: &quot;A String&quot;, # Entity textual description, expressed in its `locale` language.
Bu Sun Kim65020912020-05-20 12:08:20 -07001026 &quot;confidence&quot;: 3.14, # **Deprecated. Use `score` instead.**
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001027 # The accuracy of the entity detection in an image.
Bu Sun Kim65020912020-05-20 12:08:20 -07001028 # For example, for an image in which the &quot;Eiffel Tower&quot; entity is detected,
Jon Wayne Parrott692617a2017-01-06 09:58:29 -08001029 # this field represents the confidence that there is a tower in the query
1030 # image. Range [0, 1].
Bu Sun Kim65020912020-05-20 12:08:20 -07001031 &quot;boundingPoly&quot;: { # A bounding polygon for the detected image annotation. # Image region to which this entity belongs. Not produced
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001032 # for `LABEL_DETECTION` features.
Bu Sun Kim65020912020-05-20 12:08:20 -07001033 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001034 { # A vertex represents a 2D point in the image.
1035 # NOTE: the normalized vertex coordinates are relative to the original image
1036 # and range from 0 to 1.
Bu Sun Kim65020912020-05-20 12:08:20 -07001037 &quot;y&quot;: 3.14, # Y coordinate.
1038 &quot;x&quot;: 3.14, # X coordinate.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001039 },
1040 ],
Bu Sun Kim65020912020-05-20 12:08:20 -07001041 &quot;vertices&quot;: [ # The bounding polygon vertices.
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -07001042 { # A vertex represents a 2D point in the image.
1043 # NOTE: the vertex coordinates are in the same scale as the original image.
Bu Sun Kim65020912020-05-20 12:08:20 -07001044 &quot;x&quot;: 42, # X coordinate.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -07001045 &quot;y&quot;: 42, # Y coordinate.
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -07001046 },
1047 ],
1048 },
Bu Sun Kimd059ad82020-07-22 17:02:09 -07001049 &quot;mid&quot;: &quot;A String&quot;, # Opaque entity ID. Some IDs may be available in
1050 # [Google Knowledge Graph Search
1051 # API](https://developers.google.com/knowledge-graph/).
1052 &quot;locale&quot;: &quot;A String&quot;, # The language code for the locale in which the entity textual
1053 # `description` is expressed.
Bu Sun Kim65020912020-05-20 12:08:20 -07001054 &quot;topicality&quot;: 3.14, # The relevancy of the ICA (Image Content Annotation) label to the
1055 # image. For example, the relevancy of &quot;tower&quot; is likely higher to an image
1056 # containing the detected &quot;Eiffel Tower&quot; than to an image containing a
1057 # detected distant towering building, even though the confidence that
1058 # there is a tower in each image may be the same. Range [0, 1].
Bu Sun Kimd059ad82020-07-22 17:02:09 -07001059 &quot;locations&quot;: [ # The location information for the detected entity. Multiple
1060 # `LocationInfo` elements can be present because one location may
1061 # indicate the location of the scene in the image, and another location
1062 # may indicate the location of the place where the image was taken.
1063 # Location information is usually present for landmarks.
1064 { # Detected entity location information.
1065 &quot;latLng&quot;: { # An object representing a latitude/longitude pair. This is expressed as a pair # lat/long location coordinates.
1066 # of doubles representing degrees latitude and degrees longitude. Unless
1067 # specified otherwise, this must conform to the
1068 # &lt;a href=&quot;http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf&quot;&gt;WGS84
1069 # standard&lt;/a&gt;. Values must be within normalized ranges.
1070 &quot;longitude&quot;: 3.14, # The longitude in degrees. It must be in the range [-180.0, +180.0].
1071 &quot;latitude&quot;: 3.14, # The latitude in degrees. It must be in the range [-90.0, +90.0].
1072 },
1073 },
1074 ],
Bu Sun Kim65020912020-05-20 12:08:20 -07001075 &quot;properties&quot;: [ # Some entities may have optional user-supplied `Property` (name/value)
Jon Wayne Parrott692617a2017-01-06 09:58:29 -08001076 # fields, such a score or string that qualifies the entity.
1077 { # A `Property` consists of a user-supplied name/value pair.
Bu Sun Kim65020912020-05-20 12:08:20 -07001078 &quot;value&quot;: &quot;A String&quot;, # Value of the property.
1079 &quot;uint64Value&quot;: &quot;A String&quot;, # Value of numeric properties.
1080 &quot;name&quot;: &quot;A String&quot;, # Name of the property.
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -07001081 },
1082 ],
Bu Sun Kimd059ad82020-07-22 17:02:09 -07001083 &quot;score&quot;: 3.14, # Overall score of the result. Range [0, 1].
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -07001084 },
1085 ],
Bu Sun Kim65020912020-05-20 12:08:20 -07001086 &quot;context&quot;: { # If an image was produced from a file (e.g. a PDF), this message gives # If present, contextual information is needed to understand where this image
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001087 # comes from.
1088 # information about the source of that image.
Bu Sun Kim65020912020-05-20 12:08:20 -07001089 &quot;pageNumber&quot;: 42, # If the file was a PDF or TIFF, this field gives the page number within
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001090 # the file used to produce the image.
Bu Sun Kimd059ad82020-07-22 17:02:09 -07001091 &quot;uri&quot;: &quot;A String&quot;, # The URI of the file used to produce the image.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001092 },
Bu Sun Kimd059ad82020-07-22 17:02:09 -07001093 &quot;cropHintsAnnotation&quot;: { # Set of crop hints that are used to generate new crops when serving images. # If present, crop hints have completed successfully.
1094 &quot;cropHints&quot;: [ # Crop hint results.
1095 { # Single crop hint that is used to generate a new crop when serving an image.
1096 &quot;importanceFraction&quot;: 3.14, # Fraction of importance of this salient region with respect to the original
1097 # image.
1098 &quot;boundingPoly&quot;: { # A bounding polygon for the detected image annotation. # The bounding polygon for the crop region. The coordinates of the bounding
1099 # box are in the original image&#x27;s scale.
1100 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
1101 { # A vertex represents a 2D point in the image.
1102 # NOTE: the normalized vertex coordinates are relative to the original image
1103 # and range from 0 to 1.
1104 &quot;y&quot;: 3.14, # Y coordinate.
1105 &quot;x&quot;: 3.14, # X coordinate.
1106 },
1107 ],
1108 &quot;vertices&quot;: [ # The bounding polygon vertices.
1109 { # A vertex represents a 2D point in the image.
1110 # NOTE: the vertex coordinates are in the same scale as the original image.
1111 &quot;x&quot;: 42, # X coordinate.
1112 &quot;y&quot;: 42, # Y coordinate.
1113 },
1114 ],
1115 },
1116 &quot;confidence&quot;: 3.14, # Confidence of this being a salient region. Range [0, 1].
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -07001117 },
1118 ],
Bu Sun Kimd059ad82020-07-22 17:02:09 -07001119 },
1120 &quot;error&quot;: { # The `Status` type defines a logical error model that is suitable for # If set, represents the error message for the operation.
1121 # Note that filled-in image annotations are guaranteed to be
1122 # correct, even when `error` is set.
1123 # different programming environments, including REST APIs and RPC APIs. It is
1124 # used by [gRPC](https://github.com/grpc). Each `Status` message contains
1125 # three pieces of data: error code, error message, and error details.
1126 #
1127 # You can find out more about this error model and how to work with it in the
1128 # [API Design Guide](https://cloud.google.com/apis/design/errors).
1129 &quot;code&quot;: 42, # The status code, which should be an enum value of google.rpc.Code.
1130 &quot;details&quot;: [ # A list of messages that carry the error details. There is a common set of
1131 # message types for APIs to use.
1132 {
1133 &quot;a_key&quot;: &quot;&quot;, # Properties of the object. Contains field @type with type URL.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -07001134 },
1135 ],
Bu Sun Kimd059ad82020-07-22 17:02:09 -07001136 &quot;message&quot;: &quot;A String&quot;, # A developer-facing error message, which should be in English. Any
1137 # user-facing error message should be localized and sent in the
1138 # google.rpc.Status.details field, or localized by the client.
1139 },
1140 &quot;landmarkAnnotations&quot;: [ # If present, landmark detection has completed successfully.
1141 { # Set of detected entity features.
1142 &quot;description&quot;: &quot;A String&quot;, # Entity textual description, expressed in its `locale` language.
1143 &quot;confidence&quot;: 3.14, # **Deprecated. Use `score` instead.**
1144 # The accuracy of the entity detection in an image.
1145 # For example, for an image in which the &quot;Eiffel Tower&quot; entity is detected,
1146 # this field represents the confidence that there is a tower in the query
1147 # image. Range [0, 1].
1148 &quot;boundingPoly&quot;: { # A bounding polygon for the detected image annotation. # Image region to which this entity belongs. Not produced
1149 # for `LABEL_DETECTION` features.
1150 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
1151 { # A vertex represents a 2D point in the image.
1152 # NOTE: the normalized vertex coordinates are relative to the original image
1153 # and range from 0 to 1.
1154 &quot;y&quot;: 3.14, # Y coordinate.
1155 &quot;x&quot;: 3.14, # X coordinate.
Bu Sun Kim65020912020-05-20 12:08:20 -07001156 },
1157 ],
Bu Sun Kimd059ad82020-07-22 17:02:09 -07001158 &quot;vertices&quot;: [ # The bounding polygon vertices.
1159 { # A vertex represents a 2D point in the image.
1160 # NOTE: the vertex coordinates are in the same scale as the original image.
1161 &quot;x&quot;: 42, # X coordinate.
1162 &quot;y&quot;: 42, # Y coordinate.
Bu Sun Kim65020912020-05-20 12:08:20 -07001163 },
1164 ],
1165 },
Bu Sun Kimd059ad82020-07-22 17:02:09 -07001166 &quot;mid&quot;: &quot;A String&quot;, # Opaque entity ID. Some IDs may be available in
1167 # [Google Knowledge Graph Search
1168 # API](https://developers.google.com/knowledge-graph/).
1169 &quot;locale&quot;: &quot;A String&quot;, # The language code for the locale in which the entity textual
1170 # `description` is expressed.
1171 &quot;topicality&quot;: 3.14, # The relevancy of the ICA (Image Content Annotation) label to the
1172 # image. For example, the relevancy of &quot;tower&quot; is likely higher to an image
1173 # containing the detected &quot;Eiffel Tower&quot; than to an image containing a
1174 # detected distant towering building, even though the confidence that
1175 # there is a tower in each image may be the same. Range [0, 1].
1176 &quot;locations&quot;: [ # The location information for the detected entity. Multiple
1177 # `LocationInfo` elements can be present because one location may
1178 # indicate the location of the scene in the image, and another location
1179 # may indicate the location of the place where the image was taken.
1180 # Location information is usually present for landmarks.
1181 { # Detected entity location information.
1182 &quot;latLng&quot;: { # An object representing a latitude/longitude pair. This is expressed as a pair # lat/long location coordinates.
1183 # of doubles representing degrees latitude and degrees longitude. Unless
1184 # specified otherwise, this must conform to the
1185 # &lt;a href=&quot;http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf&quot;&gt;WGS84
1186 # standard&lt;/a&gt;. Values must be within normalized ranges.
1187 &quot;longitude&quot;: 3.14, # The longitude in degrees. It must be in the range [-180.0, +180.0].
1188 &quot;latitude&quot;: 3.14, # The latitude in degrees. It must be in the range [-90.0, +90.0].
1189 },
1190 },
1191 ],
1192 &quot;properties&quot;: [ # Some entities may have optional user-supplied `Property` (name/value)
1193 # fields, such a score or string that qualifies the entity.
1194 { # A `Property` consists of a user-supplied name/value pair.
1195 &quot;value&quot;: &quot;A String&quot;, # Value of the property.
1196 &quot;uint64Value&quot;: &quot;A String&quot;, # Value of numeric properties.
1197 &quot;name&quot;: &quot;A String&quot;, # Name of the property.
1198 },
1199 ],
1200 &quot;score&quot;: 3.14, # Overall score of the result. Range [0, 1].
1201 },
1202 ],
1203 &quot;textAnnotations&quot;: [ # If present, text (OCR) detection has completed successfully.
1204 { # Set of detected entity features.
1205 &quot;description&quot;: &quot;A String&quot;, # Entity textual description, expressed in its `locale` language.
1206 &quot;confidence&quot;: 3.14, # **Deprecated. Use `score` instead.**
1207 # The accuracy of the entity detection in an image.
1208 # For example, for an image in which the &quot;Eiffel Tower&quot; entity is detected,
1209 # this field represents the confidence that there is a tower in the query
1210 # image. Range [0, 1].
1211 &quot;boundingPoly&quot;: { # A bounding polygon for the detected image annotation. # Image region to which this entity belongs. Not produced
1212 # for `LABEL_DETECTION` features.
1213 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
1214 { # A vertex represents a 2D point in the image.
1215 # NOTE: the normalized vertex coordinates are relative to the original image
1216 # and range from 0 to 1.
1217 &quot;y&quot;: 3.14, # Y coordinate.
1218 &quot;x&quot;: 3.14, # X coordinate.
1219 },
1220 ],
1221 &quot;vertices&quot;: [ # The bounding polygon vertices.
1222 { # A vertex represents a 2D point in the image.
1223 # NOTE: the vertex coordinates are in the same scale as the original image.
1224 &quot;x&quot;: 42, # X coordinate.
1225 &quot;y&quot;: 42, # Y coordinate.
1226 },
1227 ],
Sai Cheemalapatic30d2b52017-03-13 12:12:03 -04001228 },
Bu Sun Kimd059ad82020-07-22 17:02:09 -07001229 &quot;mid&quot;: &quot;A String&quot;, # Opaque entity ID. Some IDs may be available in
1230 # [Google Knowledge Graph Search
1231 # API](https://developers.google.com/knowledge-graph/).
1232 &quot;locale&quot;: &quot;A String&quot;, # The language code for the locale in which the entity textual
1233 # `description` is expressed.
1234 &quot;topicality&quot;: 3.14, # The relevancy of the ICA (Image Content Annotation) label to the
1235 # image. For example, the relevancy of &quot;tower&quot; is likely higher to an image
1236 # containing the detected &quot;Eiffel Tower&quot; than to an image containing a
1237 # detected distant towering building, even though the confidence that
1238 # there is a tower in each image may be the same. Range [0, 1].
1239 &quot;locations&quot;: [ # The location information for the detected entity. Multiple
1240 # `LocationInfo` elements can be present because one location may
1241 # indicate the location of the scene in the image, and another location
1242 # may indicate the location of the place where the image was taken.
1243 # Location information is usually present for landmarks.
1244 { # Detected entity location information.
1245 &quot;latLng&quot;: { # An object representing a latitude/longitude pair. This is expressed as a pair # lat/long location coordinates.
1246 # of doubles representing degrees latitude and degrees longitude. Unless
1247 # specified otherwise, this must conform to the
1248 # &lt;a href=&quot;http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf&quot;&gt;WGS84
1249 # standard&lt;/a&gt;. Values must be within normalized ranges.
1250 &quot;longitude&quot;: 3.14, # The longitude in degrees. It must be in the range [-180.0, +180.0].
1251 &quot;latitude&quot;: 3.14, # The latitude in degrees. It must be in the range [-90.0, +90.0].
1252 },
1253 },
1254 ],
1255 &quot;properties&quot;: [ # Some entities may have optional user-supplied `Property` (name/value)
1256 # fields, such a score or string that qualifies the entity.
1257 { # A `Property` consists of a user-supplied name/value pair.
1258 &quot;value&quot;: &quot;A String&quot;, # Value of the property.
1259 &quot;uint64Value&quot;: &quot;A String&quot;, # Value of numeric properties.
1260 &quot;name&quot;: &quot;A String&quot;, # Name of the property.
1261 },
1262 ],
1263 &quot;score&quot;: 3.14, # Overall score of the result. Range [0, 1].
1264 },
1265 ],
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -07001266 },
1267 ],
1268 }</pre>
1269</div>
1270
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001271<div class="method">
Dan O'Mearadd494642020-05-01 07:42:23 -07001272 <code class="details" id="asyncBatchAnnotate">asyncBatchAnnotate(body=None, x__xgafv=None)</code>
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001273 <pre>Run asynchronous image detection and annotation for a list of images.
1274
1275Progress and results can be retrieved through the
1276`google.longrunning.Operations` interface.
1277`Operation.metadata` contains `OperationMetadata` (metadata).
1278`Operation.response` contains `AsyncBatchAnnotateImagesResponse` (results).
1279
1280This service will write image annotation outputs to json files in customer
1281GCS bucket, each json file containing BatchAnnotateImagesResponse proto.
1282
1283Args:
Dan O'Mearadd494642020-05-01 07:42:23 -07001284 body: object, The request body.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001285 The object takes the form of:
1286
1287{ # Request for async image annotation for a list of images.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -07001288 &quot;parent&quot;: &quot;A String&quot;, # Optional. Target project and location to make a call.
1289 #
1290 # Format: `projects/{project-id}/locations/{location-id}`.
1291 #
1292 # If no parent is specified, a region will be chosen automatically.
1293 #
1294 # Supported location-ids:
1295 # `us`: USA country only,
1296 # `asia`: East asia areas, like Japan, Taiwan,
1297 # `eu`: The European Union.
1298 #
1299 # Example: `projects/project-A/locations/eu`.
1300 &quot;outputConfig&quot;: { # The desired output location and metadata. # Required. The desired output location and metadata (e.g. format).
1301 &quot;batchSize&quot;: 42, # The max number of response protos to put into each output JSON file on
1302 # Google Cloud Storage.
1303 # The valid range is [1, 100]. If not specified, the default value is 20.
1304 #
1305 # For example, for one pdf file with 100 pages, 100 response protos will
1306 # be generated. If `batch_size` = 20, then 5 json files each
1307 # containing 20 response protos will be written under the prefix
1308 # `gcs_destination`.`uri`.
1309 #
1310 # Currently, batch_size only applies to GcsDestination, with potential future
1311 # support for other output configurations.
1312 &quot;gcsDestination&quot;: { # The Google Cloud Storage location where the output will be written to. # The Google Cloud Storage location to write the output(s) to.
1313 &quot;uri&quot;: &quot;A String&quot;, # Google Cloud Storage URI prefix where the results will be stored. Results
1314 # will be in JSON format and preceded by its corresponding input URI prefix.
1315 # This field can either represent a gcs file prefix or gcs directory. In
1316 # either case, the uri should be unique because in order to get all of the
1317 # output files, you will need to do a wildcard gcs search on the uri prefix
1318 # you provide.
1319 #
1320 # Examples:
1321 #
1322 # * File Prefix: gs://bucket-name/here/filenameprefix The output files
1323 # will be created in gs://bucket-name/here/ and the names of the
1324 # output files will begin with &quot;filenameprefix&quot;.
1325 #
1326 # * Directory Prefix: gs://bucket-name/some/location/ The output files
1327 # will be created in gs://bucket-name/some/location/ and the names of the
1328 # output files could be anything because there was no filename prefix
1329 # specified.
1330 #
1331 # If multiple outputs, each response is still AnnotateFileResponse, each of
1332 # which contains some subset of the full list of AnnotateImageResponse.
1333 # Multiple outputs can happen if, for example, the output JSON is too large
1334 # and overflows into multiple sharded files.
1335 },
1336 },
Bu Sun Kimd059ad82020-07-22 17:02:09 -07001337 &quot;requests&quot;: [ # Required. Individual image annotation requests for this batch.
1338 { # Request for performing Google Cloud Vision API tasks over a user-provided
1339 # image, with user-requested features, and with context information.
1340 &quot;image&quot;: { # Client image to perform Google Cloud Vision API tasks over. # The image to be processed.
1341 &quot;source&quot;: { # External image source (Google Cloud Storage or web URL image location). # Google Cloud Storage image location, or publicly-accessible image
1342 # URL. If both `content` and `source` are provided for an image, `content`
1343 # takes precedence and is used to perform the image annotation request.
1344 &quot;gcsImageUri&quot;: &quot;A String&quot;, # **Use `image_uri` instead.**
1345 #
1346 # The Google Cloud Storage URI of the form
1347 # `gs://bucket_name/object_name`. Object versioning is not supported. See
1348 # [Google Cloud Storage Request
1349 # URIs](https://cloud.google.com/storage/docs/reference-uris) for more info.
1350 &quot;imageUri&quot;: &quot;A String&quot;, # The URI of the source image. Can be either:
1351 #
1352 # 1. A Google Cloud Storage URI of the form
1353 # `gs://bucket_name/object_name`. Object versioning is not supported. See
1354 # [Google Cloud Storage Request
1355 # URIs](https://cloud.google.com/storage/docs/reference-uris) for more
1356 # info.
1357 #
1358 # 2. A publicly-accessible image HTTP/HTTPS URL. When fetching images from
1359 # HTTP/HTTPS URLs, Google cannot guarantee that the request will be
1360 # completed. Your request may fail if the specified host denies the
1361 # request (e.g. due to request throttling or DOS prevention), or if Google
1362 # throttles requests to the site for abuse prevention. You should not
1363 # depend on externally-hosted images for production applications.
1364 #
1365 # When both `gcs_image_uri` and `image_uri` are specified, `image_uri` takes
1366 # precedence.
1367 },
1368 &quot;content&quot;: &quot;A String&quot;, # Image content, represented as a stream of bytes.
1369 # Note: As with all `bytes` fields, protobuffers use a pure binary
1370 # representation, whereas JSON representations use base64.
1371 #
1372 # Currently, this field only works for BatchAnnotateImages requests. It does
1373 # not work for AsyncBatchAnnotateImages requests.
1374 },
1375 &quot;imageContext&quot;: { # Image context and/or feature-specific parameters. # Additional context that may accompany the image.
1376 &quot;latLongRect&quot;: { # Rectangle determined by min and max `LatLng` pairs. # Not used.
1377 &quot;maxLatLng&quot;: { # An object representing a latitude/longitude pair. This is expressed as a pair # Max lat/long pair.
1378 # of doubles representing degrees latitude and degrees longitude. Unless
1379 # specified otherwise, this must conform to the
1380 # &lt;a href=&quot;http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf&quot;&gt;WGS84
1381 # standard&lt;/a&gt;. Values must be within normalized ranges.
1382 &quot;longitude&quot;: 3.14, # The longitude in degrees. It must be in the range [-180.0, +180.0].
1383 &quot;latitude&quot;: 3.14, # The latitude in degrees. It must be in the range [-90.0, +90.0].
1384 },
1385 &quot;minLatLng&quot;: { # An object representing a latitude/longitude pair. This is expressed as a pair # Min lat/long pair.
1386 # of doubles representing degrees latitude and degrees longitude. Unless
1387 # specified otherwise, this must conform to the
1388 # &lt;a href=&quot;http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf&quot;&gt;WGS84
1389 # standard&lt;/a&gt;. Values must be within normalized ranges.
1390 &quot;longitude&quot;: 3.14, # The longitude in degrees. It must be in the range [-180.0, +180.0].
1391 &quot;latitude&quot;: 3.14, # The latitude in degrees. It must be in the range [-90.0, +90.0].
1392 },
1393 },
1394 &quot;webDetectionParams&quot;: { # Parameters for web detection request. # Parameters for web detection.
1395 &quot;includeGeoResults&quot;: True or False, # Whether to include results derived from the geo information in the image.
1396 },
1397 &quot;languageHints&quot;: [ # List of languages to use for TEXT_DETECTION. In most cases, an empty value
1398 # yields the best results since it enables automatic language detection. For
1399 # languages based on the Latin alphabet, setting `language_hints` is not
1400 # needed. In rare cases, when the language of the text in the image is known,
1401 # setting a hint will help get better results (although it will be a
1402 # significant hindrance if the hint is wrong). Text detection returns an
1403 # error if one or more of the specified languages is not one of the
1404 # [supported languages](https://cloud.google.com/vision/docs/languages).
1405 &quot;A String&quot;,
1406 ],
1407 &quot;productSearchParams&quot;: { # Parameters for a product search request. # Parameters for product search.
1408 &quot;productCategories&quot;: [ # The list of product categories to search in. Currently, we only consider
1409 # the first category, and either &quot;homegoods-v2&quot;, &quot;apparel-v2&quot;, &quot;toys-v2&quot;,
1410 # &quot;packagedgoods-v1&quot;, or &quot;general-v1&quot; should be specified. The legacy
1411 # categories &quot;homegoods&quot;, &quot;apparel&quot;, and &quot;toys&quot; are still supported but will
1412 # be deprecated. For new products, please use &quot;homegoods-v2&quot;, &quot;apparel-v2&quot;,
1413 # or &quot;toys-v2&quot; for better product search accuracy. It is recommended to
1414 # migrate existing products to these categories as well.
1415 &quot;A String&quot;,
1416 ],
1417 &quot;boundingPoly&quot;: { # A bounding polygon for the detected image annotation. # The bounding polygon around the area of interest in the image.
1418 # If it is not specified, system discretion will be applied.
1419 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
1420 { # A vertex represents a 2D point in the image.
1421 # NOTE: the normalized vertex coordinates are relative to the original image
1422 # and range from 0 to 1.
1423 &quot;y&quot;: 3.14, # Y coordinate.
1424 &quot;x&quot;: 3.14, # X coordinate.
1425 },
1426 ],
1427 &quot;vertices&quot;: [ # The bounding polygon vertices.
1428 { # A vertex represents a 2D point in the image.
1429 # NOTE: the vertex coordinates are in the same scale as the original image.
1430 &quot;x&quot;: 42, # X coordinate.
1431 &quot;y&quot;: 42, # Y coordinate.
1432 },
1433 ],
1434 },
1435 &quot;filter&quot;: &quot;A String&quot;, # The filtering expression. This can be used to restrict search results based
1436 # on Product labels. We currently support an AND of OR of key-value
1437 # expressions, where each expression within an OR must have the same key. An
1438 # &#x27;=&#x27; should be used to connect the key and value.
1439 #
1440 # For example, &quot;(color = red OR color = blue) AND brand = Google&quot; is
1441 # acceptable, but &quot;(color = red OR brand = Google)&quot; is not acceptable.
1442 # &quot;color: red&quot; is not acceptable because it uses a &#x27;:&#x27; instead of an &#x27;=&#x27;.
1443 &quot;productSet&quot;: &quot;A String&quot;, # The resource name of a ProductSet to be searched for similar images.
1444 #
1445 # Format is:
1446 # `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`.
1447 },
1448 &quot;cropHintsParams&quot;: { # Parameters for crop hints annotation request. # Parameters for crop hints annotation request.
1449 &quot;aspectRatios&quot;: [ # Aspect ratios in floats, representing the ratio of the width to the height
1450 # of the image. For example, if the desired aspect ratio is 4/3, the
1451 # corresponding float value should be 1.33333. If not specified, the
1452 # best possible crop is returned. The number of provided aspect ratios is
1453 # limited to a maximum of 16; any aspect ratios provided after the 16th are
1454 # ignored.
1455 3.14,
1456 ],
1457 },
1458 },
1459 &quot;features&quot;: [ # Requested features.
1460 { # The type of Google Cloud Vision API detection to perform, and the maximum
1461 # number of results to return for that type. Multiple `Feature` objects can
1462 # be specified in the `features` list.
1463 &quot;maxResults&quot;: 42, # Maximum number of results of this type. Does not apply to
1464 # `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`.
1465 &quot;type&quot;: &quot;A String&quot;, # The feature type.
1466 &quot;model&quot;: &quot;A String&quot;, # Model to use for the feature.
1467 # Supported values: &quot;builtin/stable&quot; (the default if unset) and
1468 # &quot;builtin/latest&quot;.
1469 },
1470 ],
1471 },
1472 ],
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001473 }
1474
1475 x__xgafv: string, V1 error format.
1476 Allowed values
1477 1 - v1 error format
1478 2 - v2 error format
1479
1480Returns:
1481 An object of the form:
1482
1483 { # This resource represents a long-running operation that is the result of a
1484 # network API call.
Bu Sun Kim65020912020-05-20 12:08:20 -07001485 &quot;done&quot;: True or False, # If the value is `false`, it means the operation is still in progress.
1486 # If `true`, the operation is completed, and either `error` or `response` is
1487 # available.
Bu Sun Kim65020912020-05-20 12:08:20 -07001488 &quot;name&quot;: &quot;A String&quot;, # The server-assigned name, which is only unique within the same service that
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001489 # originally returns it. If you use the default HTTP mapping, the
1490 # `name` should be a resource name ending with `operations/{unique_id}`.
Bu Sun Kim65020912020-05-20 12:08:20 -07001491 &quot;error&quot;: { # The `Status` type defines a logical error model that is suitable for # The error result of the operation in case of failure or cancellation.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001492 # different programming environments, including REST APIs and RPC APIs. It is
1493 # used by [gRPC](https://github.com/grpc). Each `Status` message contains
1494 # three pieces of data: error code, error message, and error details.
1495 #
1496 # You can find out more about this error model and how to work with it in the
1497 # [API Design Guide](https://cloud.google.com/apis/design/errors).
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -07001498 &quot;code&quot;: 42, # The status code, which should be an enum value of google.rpc.Code.
Bu Sun Kim65020912020-05-20 12:08:20 -07001499 &quot;details&quot;: [ # A list of messages that carry the error details. There is a common set of
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001500 # message types for APIs to use.
1501 {
Bu Sun Kim65020912020-05-20 12:08:20 -07001502 &quot;a_key&quot;: &quot;&quot;, # Properties of the object. Contains field @type with type URL.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001503 },
1504 ],
Bu Sun Kimd059ad82020-07-22 17:02:09 -07001505 &quot;message&quot;: &quot;A String&quot;, # A developer-facing error message, which should be in English. Any
1506 # user-facing error message should be localized and sent in the
1507 # google.rpc.Status.details field, or localized by the client.
1508 },
1509 &quot;response&quot;: { # The normal response of the operation in case of success. If the original
1510 # method returns no data on success, such as `Delete`, the response is
1511 # `google.protobuf.Empty`. If the original method is standard
1512 # `Get`/`Create`/`Update`, the response should be the resource. For other
1513 # methods, the response should have the type `XxxResponse`, where `Xxx`
1514 # is the original method name. For example, if the original method name
1515 # is `TakeSnapshot()`, the inferred response type is
1516 # `TakeSnapshotResponse`.
1517 &quot;a_key&quot;: &quot;&quot;, # Properties of the object. Contains field @type with type URL.
Bu Sun Kim65020912020-05-20 12:08:20 -07001518 },
1519 &quot;metadata&quot;: { # Service-specific metadata associated with the operation. It typically
1520 # contains progress information and common metadata such as create time.
1521 # Some services might not provide such metadata. Any method that returns a
1522 # long-running operation should document the metadata type, if any.
1523 &quot;a_key&quot;: &quot;&quot;, # Properties of the object. Contains field @type with type URL.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001524 },
1525 }</pre>
1526</div>
1527
Jon Wayne Parrott0a471d32016-05-19 10:54:38 -07001528</body></html>