blob: b6a37f10bb223dbf88daa8500397fcd44351c3a6 [file] [log] [blame]
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001<html><body>
2<style>
3
4body, h1, h2, h3, div, span, p, pre, a {
5 margin: 0;
6 padding: 0;
7 border: 0;
8 font-weight: inherit;
9 font-style: inherit;
10 font-size: 100%;
11 font-family: inherit;
12 vertical-align: baseline;
13}
14
15body {
16 font-size: 13px;
17 padding: 1em;
18}
19
20h1 {
21 font-size: 26px;
22 margin-bottom: 1em;
23}
24
25h2 {
26 font-size: 24px;
27 margin-bottom: 1em;
28}
29
30h3 {
31 font-size: 20px;
32 margin-bottom: 1em;
33 margin-top: 1em;
34}
35
36pre, code {
37 line-height: 1.5;
38 font-family: Monaco, 'DejaVu Sans Mono', 'Bitstream Vera Sans Mono', 'Lucida Console', monospace;
39}
40
41pre {
42 margin-top: 0.5em;
43}
44
45h1, h2, h3, p {
46 font-family: Arial, sans serif;
47}
48
49h1, h2, h3 {
50 border-bottom: solid #CCC 1px;
51}
52
53.toc_element {
54 margin-top: 0.5em;
55}
56
57.firstline {
58 margin-left: 2 em;
59}
60
61.method {
62 margin-top: 1em;
63 border: solid 1px #CCC;
64 padding: 1em;
65 background: #EEE;
66}
67
68.details {
69 font-weight: bold;
70 font-size: 14px;
71}
72
73</style>
74
75<h1><a href="vision_v1.html">Cloud Vision API</a> . <a href="vision_v1.files.html">files</a></h1>
76<h2>Instance Methods</h2>
77<p class="toc_element">
Dan O'Mearadd494642020-05-01 07:42:23 -070078 <code><a href="#annotate">annotate(body=None, x__xgafv=None)</a></code></p>
Bu Sun Kim715bd7f2019-06-14 16:50:42 -070079<p class="firstline">Service that performs image detection and annotation for a batch of files.</p>
80<p class="toc_element">
Dan O'Mearadd494642020-05-01 07:42:23 -070081 <code><a href="#asyncBatchAnnotate">asyncBatchAnnotate(body=None, x__xgafv=None)</a></code></p>
Bu Sun Kim715bd7f2019-06-14 16:50:42 -070082<p class="firstline">Run asynchronous image detection and annotation for a list of generic</p>
83<h3>Method Details</h3>
84<div class="method">
Dan O'Mearadd494642020-05-01 07:42:23 -070085 <code class="details" id="annotate">annotate(body=None, x__xgafv=None)</code>
Bu Sun Kim715bd7f2019-06-14 16:50:42 -070086 <pre>Service that performs image detection and annotation for a batch of files.
Bu Sun Kim65020912020-05-20 12:08:20 -070087Now only &quot;application/pdf&quot;, &quot;image/tiff&quot; and &quot;image/gif&quot; are supported.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -070088
89This service will extract at most 5 (customers can specify which 5 in
90AnnotateFileRequest.pages) frames (gif) or pages (pdf or tiff) from each
91file provided and perform detection and annotation for each image
92extracted.
93
94Args:
Dan O'Mearadd494642020-05-01 07:42:23 -070095 body: object, The request body.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -070096 The object takes the form of:
97
98{ # A list of requests to annotate files using the BatchAnnotateFiles API.
Bu Sun Kim65020912020-05-20 12:08:20 -070099 &quot;parent&quot;: &quot;A String&quot;, # Optional. Target project and location to make a call.
100 #
101 # Format: `projects/{project-id}/locations/{location-id}`.
102 #
103 # If no parent is specified, a region will be chosen automatically.
104 #
105 # Supported location-ids:
106 # `us`: USA country only,
107 # `asia`: East asia areas, like Japan, Taiwan,
108 # `eu`: The European Union.
109 #
110 # Example: `projects/project-A/locations/eu`.
111 &quot;requests&quot;: [ # Required. The list of file annotation requests. Right now we support only one
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700112 # AnnotateFileRequest in BatchAnnotateFilesRequest.
113 { # A request to annotate one single file, e.g. a PDF, TIFF or GIF file.
Bu Sun Kim65020912020-05-20 12:08:20 -0700114 &quot;inputConfig&quot;: { # The desired input location and metadata. # Required. Information about the input file.
Bu Sun Kim65020912020-05-20 12:08:20 -0700115 &quot;mimeType&quot;: &quot;A String&quot;, # The type of the file. Currently only &quot;application/pdf&quot;, &quot;image/tiff&quot; and
116 # &quot;image/gif&quot; are supported. Wildcards are not supported.
117 &quot;content&quot;: &quot;A String&quot;, # File content, represented as a stream of bytes.
118 # Note: As with all `bytes` fields, protobuffers use a pure binary
119 # representation, whereas JSON representations use base64.
120 #
121 # Currently, this field only works for BatchAnnotateFiles requests. It does
122 # not work for AsyncBatchAnnotateFiles requests.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700123 &quot;gcsSource&quot;: { # The Google Cloud Storage location where the input will be read from. # The Google Cloud Storage location to read the input from.
124 &quot;uri&quot;: &quot;A String&quot;, # Google Cloud Storage URI for the input file. This must only be a
125 # Google Cloud Storage object. Wildcards are not currently supported.
126 },
Bu Sun Kim65020912020-05-20 12:08:20 -0700127 },
128 &quot;features&quot;: [ # Required. Requested features.
129 { # The type of Google Cloud Vision API detection to perform, and the maximum
130 # number of results to return for that type. Multiple `Feature` objects can
131 # be specified in the `features` list.
Bu Sun Kim65020912020-05-20 12:08:20 -0700132 &quot;type&quot;: &quot;A String&quot;, # The feature type.
133 &quot;maxResults&quot;: 42, # Maximum number of results of this type. Does not apply to
134 # `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700135 &quot;model&quot;: &quot;A String&quot;, # Model to use for the feature.
136 # Supported values: &quot;builtin/stable&quot; (the default if unset) and
137 # &quot;builtin/latest&quot;.
Bu Sun Kim65020912020-05-20 12:08:20 -0700138 },
139 ],
140 &quot;imageContext&quot;: { # Image context and/or feature-specific parameters. # Additional context that may accompany the image(s) in the file.
141 &quot;languageHints&quot;: [ # List of languages to use for TEXT_DETECTION. In most cases, an empty value
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700142 # yields the best results since it enables automatic language detection. For
143 # languages based on the Latin alphabet, setting `language_hints` is not
144 # needed. In rare cases, when the language of the text in the image is known,
145 # setting a hint will help get better results (although it will be a
146 # significant hindrance if the hint is wrong). Text detection returns an
147 # error if one or more of the specified languages is not one of the
Dan O'Mearadd494642020-05-01 07:42:23 -0700148 # [supported languages](https://cloud.google.com/vision/docs/languages).
Bu Sun Kim65020912020-05-20 12:08:20 -0700149 &quot;A String&quot;,
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700150 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700151 &quot;webDetectionParams&quot;: { # Parameters for web detection request. # Parameters for web detection.
152 &quot;includeGeoResults&quot;: True or False, # Whether to include results derived from the geo information in the image.
153 },
154 &quot;latLongRect&quot;: { # Rectangle determined by min and max `LatLng` pairs. # Not used.
155 &quot;maxLatLng&quot;: { # An object representing a latitude/longitude pair. This is expressed as a pair # Max lat/long pair.
156 # of doubles representing degrees latitude and degrees longitude. Unless
157 # specified otherwise, this must conform to the
158 # &lt;a href=&quot;http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf&quot;&gt;WGS84
159 # standard&lt;/a&gt;. Values must be within normalized ranges.
160 &quot;latitude&quot;: 3.14, # The latitude in degrees. It must be in the range [-90.0, +90.0].
161 &quot;longitude&quot;: 3.14, # The longitude in degrees. It must be in the range [-180.0, +180.0].
162 },
163 &quot;minLatLng&quot;: { # An object representing a latitude/longitude pair. This is expressed as a pair # Min lat/long pair.
164 # of doubles representing degrees latitude and degrees longitude. Unless
165 # specified otherwise, this must conform to the
166 # &lt;a href=&quot;http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf&quot;&gt;WGS84
167 # standard&lt;/a&gt;. Values must be within normalized ranges.
168 &quot;latitude&quot;: 3.14, # The latitude in degrees. It must be in the range [-90.0, +90.0].
169 &quot;longitude&quot;: 3.14, # The longitude in degrees. It must be in the range [-180.0, +180.0].
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700170 },
171 },
Bu Sun Kim65020912020-05-20 12:08:20 -0700172 &quot;cropHintsParams&quot;: { # Parameters for crop hints annotation request. # Parameters for crop hints annotation request.
173 &quot;aspectRatios&quot;: [ # Aspect ratios in floats, representing the ratio of the width to the height
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700174 # of the image. For example, if the desired aspect ratio is 4/3, the
175 # corresponding float value should be 1.33333. If not specified, the
176 # best possible crop is returned. The number of provided aspect ratios is
177 # limited to a maximum of 16; any aspect ratios provided after the 16th are
178 # ignored.
179 3.14,
180 ],
181 },
Bu Sun Kim65020912020-05-20 12:08:20 -0700182 &quot;productSearchParams&quot;: { # Parameters for a product search request. # Parameters for product search.
Bu Sun Kim65020912020-05-20 12:08:20 -0700183 &quot;filter&quot;: &quot;A String&quot;, # The filtering expression. This can be used to restrict search results based
184 # on Product labels. We currently support an AND of OR of key-value
185 # expressions, where each expression within an OR must have the same key. An
186 # &#x27;=&#x27; should be used to connect the key and value.
187 #
188 # For example, &quot;(color = red OR color = blue) AND brand = Google&quot; is
189 # acceptable, but &quot;(color = red OR brand = Google)&quot; is not acceptable.
190 # &quot;color: red&quot; is not acceptable because it uses a &#x27;:&#x27; instead of an &#x27;=&#x27;.
191 &quot;productSet&quot;: &quot;A String&quot;, # The resource name of a ProductSet to be searched for similar images.
192 #
193 # Format is:
194 # `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`.
195 &quot;boundingPoly&quot;: { # A bounding polygon for the detected image annotation. # The bounding polygon around the area of interest in the image.
196 # If it is not specified, system discretion will be applied.
197 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
198 { # A vertex represents a 2D point in the image.
199 # NOTE: the normalized vertex coordinates are relative to the original image
200 # and range from 0 to 1.
201 &quot;y&quot;: 3.14, # Y coordinate.
202 &quot;x&quot;: 3.14, # X coordinate.
203 },
204 ],
205 &quot;vertices&quot;: [ # The bounding polygon vertices.
206 { # A vertex represents a 2D point in the image.
207 # NOTE: the vertex coordinates are in the same scale as the original image.
Bu Sun Kim65020912020-05-20 12:08:20 -0700208 &quot;x&quot;: 42, # X coordinate.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700209 &quot;y&quot;: 42, # Y coordinate.
Bu Sun Kim65020912020-05-20 12:08:20 -0700210 },
211 ],
212 },
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700213 &quot;productCategories&quot;: [ # The list of product categories to search in. Currently, we only consider
214 # the first category, and either &quot;homegoods-v2&quot;, &quot;apparel-v2&quot;, &quot;toys-v2&quot;,
215 # &quot;packagedgoods-v1&quot;, or &quot;general-v1&quot; should be specified. The legacy
216 # categories &quot;homegoods&quot;, &quot;apparel&quot;, and &quot;toys&quot; are still supported but will
217 # be deprecated. For new products, please use &quot;homegoods-v2&quot;, &quot;apparel-v2&quot;,
218 # or &quot;toys-v2&quot; for better product search accuracy. It is recommended to
219 # migrate existing products to these categories as well.
220 &quot;A String&quot;,
221 ],
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700222 },
223 },
Bu Sun Kim65020912020-05-20 12:08:20 -0700224 &quot;pages&quot;: [ # Pages of the file to perform image annotation.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700225 #
226 # Pages starts from 1, we assume the first page of the file is page 1.
227 # At most 5 pages are supported per request. Pages can be negative.
228 #
229 # Page 1 means the first page.
230 # Page 2 means the second page.
231 # Page -1 means the last page.
232 # Page -2 means the second to the last page.
233 #
234 # If the file is GIF instead of PDF or TIFF, page refers to GIF frames.
235 #
236 # If this field is empty, by default the service performs image annotation
237 # for the first 5 pages of the file.
238 42,
239 ],
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700240 },
241 ],
242 }
243
244 x__xgafv: string, V1 error format.
245 Allowed values
246 1 - v1 error format
247 2 - v2 error format
248
249Returns:
250 An object of the form:
251
252 { # A list of file annotation responses.
Bu Sun Kim65020912020-05-20 12:08:20 -0700253 &quot;responses&quot;: [ # The list of file annotation responses, each response corresponding to each
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700254 # AnnotateFileRequest in BatchAnnotateFilesRequest.
255 { # Response to a single file annotation request. A file may contain one or more
256 # images, which individually have their own responses.
Bu Sun Kim65020912020-05-20 12:08:20 -0700257 &quot;responses&quot;: [ # Individual responses to images found within the file. This field will be
Dan O'Mearadd494642020-05-01 07:42:23 -0700258 # empty if the `error` field is set.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700259 { # Response to an image annotation request.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700260 &quot;landmarkAnnotations&quot;: [ # If present, landmark detection has completed successfully.
261 { # Set of detected entity features.
262 &quot;score&quot;: 3.14, # Overall score of the result. Range [0, 1].
263 &quot;locations&quot;: [ # The location information for the detected entity. Multiple
264 # `LocationInfo` elements can be present because one location may
265 # indicate the location of the scene in the image, and another location
266 # may indicate the location of the place where the image was taken.
267 # Location information is usually present for landmarks.
268 { # Detected entity location information.
269 &quot;latLng&quot;: { # An object representing a latitude/longitude pair. This is expressed as a pair # lat/long location coordinates.
270 # of doubles representing degrees latitude and degrees longitude. Unless
271 # specified otherwise, this must conform to the
272 # &lt;a href=&quot;http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf&quot;&gt;WGS84
273 # standard&lt;/a&gt;. Values must be within normalized ranges.
274 &quot;latitude&quot;: 3.14, # The latitude in degrees. It must be in the range [-90.0, +90.0].
275 &quot;longitude&quot;: 3.14, # The longitude in degrees. It must be in the range [-180.0, +180.0].
276 },
277 },
278 ],
279 &quot;mid&quot;: &quot;A String&quot;, # Opaque entity ID. Some IDs may be available in
280 # [Google Knowledge Graph Search
281 # API](https://developers.google.com/knowledge-graph/).
282 &quot;confidence&quot;: 3.14, # **Deprecated. Use `score` instead.**
283 # The accuracy of the entity detection in an image.
284 # For example, for an image in which the &quot;Eiffel Tower&quot; entity is detected,
285 # this field represents the confidence that there is a tower in the query
286 # image. Range [0, 1].
287 &quot;locale&quot;: &quot;A String&quot;, # The language code for the locale in which the entity textual
288 # `description` is expressed.
289 &quot;boundingPoly&quot;: { # A bounding polygon for the detected image annotation. # Image region to which this entity belongs. Not produced
290 # for `LABEL_DETECTION` features.
291 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
292 { # A vertex represents a 2D point in the image.
293 # NOTE: the normalized vertex coordinates are relative to the original image
294 # and range from 0 to 1.
295 &quot;y&quot;: 3.14, # Y coordinate.
296 &quot;x&quot;: 3.14, # X coordinate.
297 },
298 ],
299 &quot;vertices&quot;: [ # The bounding polygon vertices.
300 { # A vertex represents a 2D point in the image.
301 # NOTE: the vertex coordinates are in the same scale as the original image.
302 &quot;x&quot;: 42, # X coordinate.
303 &quot;y&quot;: 42, # Y coordinate.
304 },
305 ],
306 },
307 &quot;description&quot;: &quot;A String&quot;, # Entity textual description, expressed in its `locale` language.
308 &quot;topicality&quot;: 3.14, # The relevancy of the ICA (Image Content Annotation) label to the
309 # image. For example, the relevancy of &quot;tower&quot; is likely higher to an image
310 # containing the detected &quot;Eiffel Tower&quot; than to an image containing a
311 # detected distant towering building, even though the confidence that
312 # there is a tower in each image may be the same. Range [0, 1].
313 &quot;properties&quot;: [ # Some entities may have optional user-supplied `Property` (name/value)
314 # fields, such a score or string that qualifies the entity.
315 { # A `Property` consists of a user-supplied name/value pair.
316 &quot;value&quot;: &quot;A String&quot;, # Value of the property.
317 &quot;uint64Value&quot;: &quot;A String&quot;, # Value of numeric properties.
318 &quot;name&quot;: &quot;A String&quot;, # Name of the property.
319 },
320 ],
321 },
322 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700323 &quot;faceAnnotations&quot;: [ # If present, face detection has completed successfully.
324 { # A face annotation object contains the results of face detection.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700325 &quot;sorrowLikelihood&quot;: &quot;A String&quot;, # Sorrow likelihood.
Bu Sun Kim65020912020-05-20 12:08:20 -0700326 &quot;tiltAngle&quot;: 3.14, # Pitch angle, which indicates the upwards/downwards angle that the face is
327 # pointing relative to the image&#x27;s horizontal plane. Range [-180,180].
328 &quot;fdBoundingPoly&quot;: { # A bounding polygon for the detected image annotation. # The `fd_bounding_poly` bounding polygon is tighter than the
329 # `boundingPoly`, and encloses only the skin part of the face. Typically, it
330 # is used to eliminate the face from any image analysis that detects the
331 # &quot;amount of skin&quot; visible in an image. It is not based on the
332 # landmarker results, only on the initial face detection, hence
333 # the &lt;code&gt;fd&lt;/code&gt; (face detection) prefix.
334 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
335 { # A vertex represents a 2D point in the image.
336 # NOTE: the normalized vertex coordinates are relative to the original image
337 # and range from 0 to 1.
338 &quot;y&quot;: 3.14, # Y coordinate.
339 &quot;x&quot;: 3.14, # X coordinate.
340 },
341 ],
342 &quot;vertices&quot;: [ # The bounding polygon vertices.
343 { # A vertex represents a 2D point in the image.
344 # NOTE: the vertex coordinates are in the same scale as the original image.
Bu Sun Kim65020912020-05-20 12:08:20 -0700345 &quot;x&quot;: 42, # X coordinate.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700346 &quot;y&quot;: 42, # Y coordinate.
Bu Sun Kim65020912020-05-20 12:08:20 -0700347 },
348 ],
349 },
Bu Sun Kim65020912020-05-20 12:08:20 -0700350 &quot;landmarks&quot;: [ # Detected face landmarks.
351 { # A face-specific landmark (for example, a face feature).
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700352 &quot;type&quot;: &quot;A String&quot;, # Face landmark type.
Bu Sun Kim65020912020-05-20 12:08:20 -0700353 &quot;position&quot;: { # A 3D position in the image, used primarily for Face detection landmarks. # Face landmark position.
354 # A valid Position must have both x and y coordinates.
355 # The position coordinates are in the same scale as the original image.
356 &quot;y&quot;: 3.14, # Y coordinate.
357 &quot;x&quot;: 3.14, # X coordinate.
358 &quot;z&quot;: 3.14, # Z coordinate (or depth).
359 },
Bu Sun Kim65020912020-05-20 12:08:20 -0700360 },
361 ],
362 &quot;surpriseLikelihood&quot;: &quot;A String&quot;, # Surprise likelihood.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700363 &quot;angerLikelihood&quot;: &quot;A String&quot;, # Anger likelihood.
Bu Sun Kim65020912020-05-20 12:08:20 -0700364 &quot;landmarkingConfidence&quot;: 3.14, # Face landmarking confidence. Range [0, 1].
365 &quot;joyLikelihood&quot;: &quot;A String&quot;, # Joy likelihood.
366 &quot;underExposedLikelihood&quot;: &quot;A String&quot;, # Under-exposed likelihood.
367 &quot;panAngle&quot;: 3.14, # Yaw angle, which indicates the leftward/rightward angle that the face is
368 # pointing relative to the vertical plane perpendicular to the image. Range
369 # [-180,180].
370 &quot;detectionConfidence&quot;: 3.14, # Detection confidence. Range [0, 1].
371 &quot;blurredLikelihood&quot;: &quot;A String&quot;, # Blurred likelihood.
372 &quot;headwearLikelihood&quot;: &quot;A String&quot;, # Headwear likelihood.
373 &quot;boundingPoly&quot;: { # A bounding polygon for the detected image annotation. # The bounding polygon around the face. The coordinates of the bounding box
374 # are in the original image&#x27;s scale.
375 # The bounding box is computed to &quot;frame&quot; the face in accordance with human
376 # expectations. It is based on the landmarker results.
377 # Note that one or more x and/or y coordinates may not be generated in the
378 # `BoundingPoly` (the polygon will be unbounded) if only a partial face
379 # appears in the image to be annotated.
380 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
381 { # A vertex represents a 2D point in the image.
382 # NOTE: the normalized vertex coordinates are relative to the original image
383 # and range from 0 to 1.
384 &quot;y&quot;: 3.14, # Y coordinate.
385 &quot;x&quot;: 3.14, # X coordinate.
386 },
387 ],
388 &quot;vertices&quot;: [ # The bounding polygon vertices.
389 { # A vertex represents a 2D point in the image.
390 # NOTE: the vertex coordinates are in the same scale as the original image.
Bu Sun Kim65020912020-05-20 12:08:20 -0700391 &quot;x&quot;: 42, # X coordinate.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700392 &quot;y&quot;: 42, # Y coordinate.
Bu Sun Kim65020912020-05-20 12:08:20 -0700393 },
394 ],
395 },
396 &quot;rollAngle&quot;: 3.14, # Roll angle, which indicates the amount of clockwise/anti-clockwise rotation
397 # of the face relative to the image vertical about the axis perpendicular to
398 # the face. Range [-180,180].
Bu Sun Kim65020912020-05-20 12:08:20 -0700399 },
400 ],
401 &quot;cropHintsAnnotation&quot;: { # Set of crop hints that are used to generate new crops when serving images. # If present, crop hints have completed successfully.
402 &quot;cropHints&quot;: [ # Crop hint results.
403 { # Single crop hint that is used to generate a new crop when serving an image.
404 &quot;confidence&quot;: 3.14, # Confidence of this being a salient region. Range [0, 1].
405 &quot;importanceFraction&quot;: 3.14, # Fraction of importance of this salient region with respect to the original
406 # image.
407 &quot;boundingPoly&quot;: { # A bounding polygon for the detected image annotation. # The bounding polygon for the crop region. The coordinates of the bounding
408 # box are in the original image&#x27;s scale.
409 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
410 { # A vertex represents a 2D point in the image.
411 # NOTE: the normalized vertex coordinates are relative to the original image
412 # and range from 0 to 1.
413 &quot;y&quot;: 3.14, # Y coordinate.
414 &quot;x&quot;: 3.14, # X coordinate.
415 },
416 ],
417 &quot;vertices&quot;: [ # The bounding polygon vertices.
418 { # A vertex represents a 2D point in the image.
419 # NOTE: the vertex coordinates are in the same scale as the original image.
Bu Sun Kim65020912020-05-20 12:08:20 -0700420 &quot;x&quot;: 42, # X coordinate.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700421 &quot;y&quot;: 42, # Y coordinate.
Bu Sun Kim65020912020-05-20 12:08:20 -0700422 },
423 ],
424 },
425 },
426 ],
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700427 },
Bu Sun Kim65020912020-05-20 12:08:20 -0700428 &quot;labelAnnotations&quot;: [ # If present, label detection has completed successfully.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700429 { # Set of detected entity features.
Bu Sun Kim65020912020-05-20 12:08:20 -0700430 &quot;score&quot;: 3.14, # Overall score of the result. Range [0, 1].
431 &quot;locations&quot;: [ # The location information for the detected entity. Multiple
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700432 # `LocationInfo` elements can be present because one location may
433 # indicate the location of the scene in the image, and another location
434 # may indicate the location of the place where the image was taken.
435 # Location information is usually present for landmarks.
436 { # Detected entity location information.
Bu Sun Kim65020912020-05-20 12:08:20 -0700437 &quot;latLng&quot;: { # An object representing a latitude/longitude pair. This is expressed as a pair # lat/long location coordinates.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700438 # of doubles representing degrees latitude and degrees longitude. Unless
439 # specified otherwise, this must conform to the
Bu Sun Kim65020912020-05-20 12:08:20 -0700440 # &lt;a href=&quot;http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf&quot;&gt;WGS84
Dan O'Mearadd494642020-05-01 07:42:23 -0700441 # standard&lt;/a&gt;. Values must be within normalized ranges.
Bu Sun Kim65020912020-05-20 12:08:20 -0700442 &quot;latitude&quot;: 3.14, # The latitude in degrees. It must be in the range [-90.0, +90.0].
443 &quot;longitude&quot;: 3.14, # The longitude in degrees. It must be in the range [-180.0, +180.0].
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700444 },
445 },
446 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700447 &quot;mid&quot;: &quot;A String&quot;, # Opaque entity ID. Some IDs may be available in
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700448 # [Google Knowledge Graph Search
449 # API](https://developers.google.com/knowledge-graph/).
Bu Sun Kim65020912020-05-20 12:08:20 -0700450 &quot;confidence&quot;: 3.14, # **Deprecated. Use `score` instead.**
451 # The accuracy of the entity detection in an image.
452 # For example, for an image in which the &quot;Eiffel Tower&quot; entity is detected,
453 # this field represents the confidence that there is a tower in the query
454 # image. Range [0, 1].
455 &quot;locale&quot;: &quot;A String&quot;, # The language code for the locale in which the entity textual
456 # `description` is expressed.
457 &quot;boundingPoly&quot;: { # A bounding polygon for the detected image annotation. # Image region to which this entity belongs. Not produced
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700458 # for `LABEL_DETECTION` features.
Bu Sun Kim65020912020-05-20 12:08:20 -0700459 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700460 { # A vertex represents a 2D point in the image.
461 # NOTE: the normalized vertex coordinates are relative to the original image
462 # and range from 0 to 1.
Bu Sun Kim65020912020-05-20 12:08:20 -0700463 &quot;y&quot;: 3.14, # Y coordinate.
464 &quot;x&quot;: 3.14, # X coordinate.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700465 },
466 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700467 &quot;vertices&quot;: [ # The bounding polygon vertices.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700468 { # A vertex represents a 2D point in the image.
469 # NOTE: the vertex coordinates are in the same scale as the original image.
Bu Sun Kim65020912020-05-20 12:08:20 -0700470 &quot;x&quot;: 42, # X coordinate.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700471 &quot;y&quot;: 42, # Y coordinate.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700472 },
473 ],
474 },
Bu Sun Kim65020912020-05-20 12:08:20 -0700475 &quot;description&quot;: &quot;A String&quot;, # Entity textual description, expressed in its `locale` language.
476 &quot;topicality&quot;: 3.14, # The relevancy of the ICA (Image Content Annotation) label to the
477 # image. For example, the relevancy of &quot;tower&quot; is likely higher to an image
478 # containing the detected &quot;Eiffel Tower&quot; than to an image containing a
479 # detected distant towering building, even though the confidence that
480 # there is a tower in each image may be the same. Range [0, 1].
481 &quot;properties&quot;: [ # Some entities may have optional user-supplied `Property` (name/value)
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700482 # fields, such a score or string that qualifies the entity.
483 { # A `Property` consists of a user-supplied name/value pair.
Bu Sun Kim65020912020-05-20 12:08:20 -0700484 &quot;value&quot;: &quot;A String&quot;, # Value of the property.
485 &quot;uint64Value&quot;: &quot;A String&quot;, # Value of numeric properties.
486 &quot;name&quot;: &quot;A String&quot;, # Name of the property.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700487 },
488 ],
489 },
490 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700491 &quot;productSearchResults&quot;: { # Results for a product search request. # If present, product search has completed successfully.
Bu Sun Kim65020912020-05-20 12:08:20 -0700492 &quot;productGroupedResults&quot;: [ # List of results grouped by products detected in the query image. Each entry
493 # corresponds to one bounding polygon in the query image, and contains the
494 # matching products specific to that region. There may be duplicate product
495 # matches in the union of all the per-product results.
496 { # Information about the products similar to a single product in a query
497 # image.
498 &quot;objectAnnotations&quot;: [ # List of generic predictions for the object in the bounding box.
499 { # Prediction for what the object in the bounding box is.
Bu Sun Kim65020912020-05-20 12:08:20 -0700500 &quot;score&quot;: 3.14, # Score of the result. Range [0, 1].
501 &quot;languageCode&quot;: &quot;A String&quot;, # The BCP-47 language code, such as &quot;en-US&quot; or &quot;sr-Latn&quot;. For more
502 # information, see
503 # http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
504 &quot;mid&quot;: &quot;A String&quot;, # Object ID that should align with EntityAnnotation mid.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700505 &quot;name&quot;: &quot;A String&quot;, # Object name, expressed in its `language_code` language.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700506 },
507 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700508 &quot;boundingPoly&quot;: { # A bounding polygon for the detected image annotation. # The bounding polygon around the product detected in the query image.
509 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
510 { # A vertex represents a 2D point in the image.
511 # NOTE: the normalized vertex coordinates are relative to the original image
512 # and range from 0 to 1.
513 &quot;y&quot;: 3.14, # Y coordinate.
514 &quot;x&quot;: 3.14, # X coordinate.
515 },
516 ],
517 &quot;vertices&quot;: [ # The bounding polygon vertices.
518 { # A vertex represents a 2D point in the image.
519 # NOTE: the vertex coordinates are in the same scale as the original image.
Bu Sun Kim65020912020-05-20 12:08:20 -0700520 &quot;x&quot;: 42, # X coordinate.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700521 &quot;y&quot;: 42, # Y coordinate.
Bu Sun Kim65020912020-05-20 12:08:20 -0700522 },
523 ],
524 },
525 &quot;results&quot;: [ # List of results, one for each product match.
526 { # Information about a product.
527 &quot;image&quot;: &quot;A String&quot;, # The resource name of the image from the product that is the closest match
528 # to the query.
529 &quot;product&quot;: { # A Product contains ReferenceImages. # The Product.
530 &quot;name&quot;: &quot;A String&quot;, # The resource name of the product.
531 #
532 # Format is:
533 # `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`.
534 #
535 # This field is ignored when creating a product.
536 &quot;displayName&quot;: &quot;A String&quot;, # The user-provided name for this Product. Must not be empty. Must be at most
537 # 4096 characters long.
538 &quot;description&quot;: &quot;A String&quot;, # User-provided metadata to be stored with this product. Must be at most 4096
539 # characters long.
540 &quot;productCategory&quot;: &quot;A String&quot;, # Immutable. The category for the product identified by the reference image. This should
541 # be either &quot;homegoods-v2&quot;, &quot;apparel-v2&quot;, or &quot;toys-v2&quot;. The legacy categories
542 # &quot;homegoods&quot;, &quot;apparel&quot;, and &quot;toys&quot; are still supported, but these should
543 # not be used for new products.
544 &quot;productLabels&quot;: [ # Key-value pairs that can be attached to a product. At query time,
545 # constraints can be specified based on the product_labels.
546 #
547 # Note that integer values can be provided as strings, e.g. &quot;1199&quot;. Only
548 # strings with integer values can match a range-based restriction which is
549 # to be supported soon.
550 #
551 # Multiple values can be assigned to the same key. One product may have up to
552 # 500 product_labels.
553 #
554 # Notice that the total number of distinct product_labels over all products
555 # in one ProductSet cannot exceed 1M, otherwise the product search pipeline
556 # will refuse to work for that ProductSet.
557 { # A product label represented as a key-value pair.
558 &quot;value&quot;: &quot;A String&quot;, # The value of the label attached to the product. Cannot be empty and
559 # cannot exceed 128 bytes.
560 &quot;key&quot;: &quot;A String&quot;, # The key of the label attached to the product. Cannot be empty and cannot
561 # exceed 128 bytes.
562 },
563 ],
564 },
565 &quot;score&quot;: 3.14, # A confidence level on the match, ranging from 0 (no confidence) to
566 # 1 (full confidence).
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700567 },
568 ],
569 },
570 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700571 &quot;results&quot;: [ # List of results, one for each product match.
572 { # Information about a product.
573 &quot;image&quot;: &quot;A String&quot;, # The resource name of the image from the product that is the closest match
574 # to the query.
575 &quot;product&quot;: { # A Product contains ReferenceImages. # The Product.
576 &quot;name&quot;: &quot;A String&quot;, # The resource name of the product.
577 #
578 # Format is:
579 # `projects/PROJECT_ID/locations/LOC_ID/products/PRODUCT_ID`.
580 #
581 # This field is ignored when creating a product.
582 &quot;displayName&quot;: &quot;A String&quot;, # The user-provided name for this Product. Must not be empty. Must be at most
583 # 4096 characters long.
584 &quot;description&quot;: &quot;A String&quot;, # User-provided metadata to be stored with this product. Must be at most 4096
585 # characters long.
586 &quot;productCategory&quot;: &quot;A String&quot;, # Immutable. The category for the product identified by the reference image. This should
587 # be either &quot;homegoods-v2&quot;, &quot;apparel-v2&quot;, or &quot;toys-v2&quot;. The legacy categories
588 # &quot;homegoods&quot;, &quot;apparel&quot;, and &quot;toys&quot; are still supported, but these should
589 # not be used for new products.
590 &quot;productLabels&quot;: [ # Key-value pairs that can be attached to a product. At query time,
591 # constraints can be specified based on the product_labels.
592 #
593 # Note that integer values can be provided as strings, e.g. &quot;1199&quot;. Only
594 # strings with integer values can match a range-based restriction which is
595 # to be supported soon.
596 #
597 # Multiple values can be assigned to the same key. One product may have up to
598 # 500 product_labels.
599 #
600 # Notice that the total number of distinct product_labels over all products
601 # in one ProductSet cannot exceed 1M, otherwise the product search pipeline
602 # will refuse to work for that ProductSet.
603 { # A product label represented as a key-value pair.
604 &quot;value&quot;: &quot;A String&quot;, # The value of the label attached to the product. Cannot be empty and
605 # cannot exceed 128 bytes.
606 &quot;key&quot;: &quot;A String&quot;, # The key of the label attached to the product. Cannot be empty and cannot
607 # exceed 128 bytes.
608 },
609 ],
610 },
611 &quot;score&quot;: 3.14, # A confidence level on the match, ranging from 0 (no confidence) to
612 # 1 (full confidence).
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700613 },
614 ],
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700615 &quot;indexTime&quot;: &quot;A String&quot;, # Timestamp of the index which provided these results. Products added to the
616 # product set and products removed from the product set after this time are
617 # not reflected in the current results.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700618 },
Bu Sun Kim65020912020-05-20 12:08:20 -0700619 &quot;localizedObjectAnnotations&quot;: [ # If present, localized object detection has completed successfully.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700620 # This will be sorted descending by confidence score.
621 { # Set of detected objects with bounding boxes.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700622 &quot;score&quot;: 3.14, # Score of the result. Range [0, 1].
623 &quot;languageCode&quot;: &quot;A String&quot;, # The BCP-47 language code, such as &quot;en-US&quot; or &quot;sr-Latn&quot;. For more
624 # information, see
625 # http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
626 &quot;mid&quot;: &quot;A String&quot;, # Object ID that should align with EntityAnnotation mid.
Bu Sun Kim65020912020-05-20 12:08:20 -0700627 &quot;name&quot;: &quot;A String&quot;, # Object name, expressed in its `language_code` language.
628 &quot;boundingPoly&quot;: { # A bounding polygon for the detected image annotation. # Image region to which this object belongs. This must be populated.
629 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700630 { # A vertex represents a 2D point in the image.
631 # NOTE: the normalized vertex coordinates are relative to the original image
632 # and range from 0 to 1.
Bu Sun Kim65020912020-05-20 12:08:20 -0700633 &quot;y&quot;: 3.14, # Y coordinate.
634 &quot;x&quot;: 3.14, # X coordinate.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700635 },
636 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700637 &quot;vertices&quot;: [ # The bounding polygon vertices.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700638 { # A vertex represents a 2D point in the image.
639 # NOTE: the vertex coordinates are in the same scale as the original image.
Bu Sun Kim65020912020-05-20 12:08:20 -0700640 &quot;x&quot;: 42, # X coordinate.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700641 &quot;y&quot;: 42, # Y coordinate.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700642 },
643 ],
644 },
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700645 },
646 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700647 &quot;error&quot;: { # The `Status` type defines a logical error model that is suitable for # If set, represents the error message for the operation.
648 # Note that filled-in image annotations are guaranteed to be
649 # correct, even when `error` is set.
650 # different programming environments, including REST APIs and RPC APIs. It is
651 # used by [gRPC](https://github.com/grpc). Each `Status` message contains
652 # three pieces of data: error code, error message, and error details.
653 #
654 # You can find out more about this error model and how to work with it in the
655 # [API Design Guide](https://cloud.google.com/apis/design/errors).
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700656 &quot;code&quot;: 42, # The status code, which should be an enum value of google.rpc.Code.
657 &quot;message&quot;: &quot;A String&quot;, # A developer-facing error message, which should be in English. Any
658 # user-facing error message should be localized and sent in the
659 # google.rpc.Status.details field, or localized by the client.
Bu Sun Kim65020912020-05-20 12:08:20 -0700660 &quot;details&quot;: [ # A list of messages that carry the error details. There is a common set of
661 # message types for APIs to use.
662 {
663 &quot;a_key&quot;: &quot;&quot;, # Properties of the object. Contains field @type with type URL.
664 },
665 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700666 },
667 &quot;fullTextAnnotation&quot;: { # TextAnnotation contains a structured representation of OCR extracted text. # If present, text (OCR) detection or document (OCR) text detection has
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700668 # completed successfully.
669 # This annotation provides the structural hierarchy for the OCR detected
670 # text.
671 # The hierarchy of an OCR extracted text structure is like this:
Dan O'Mearadd494642020-05-01 07:42:23 -0700672 # TextAnnotation -&gt; Page -&gt; Block -&gt; Paragraph -&gt; Word -&gt; Symbol
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700673 # Each structural component, starting from Page, may further have their own
674 # properties. Properties describe detected languages, breaks etc.. Please refer
675 # to the TextAnnotation.TextProperty message definition below for more
676 # detail.
Bu Sun Kim65020912020-05-20 12:08:20 -0700677 &quot;pages&quot;: [ # List of pages detected by OCR.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700678 { # Detected page from OCR.
Bu Sun Kim65020912020-05-20 12:08:20 -0700679 &quot;blocks&quot;: [ # List of blocks of text, images etc on this page.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700680 { # Logical element on the page.
Bu Sun Kim65020912020-05-20 12:08:20 -0700681 &quot;property&quot;: { # Additional information detected on the structural component. # Additional information detected for the block.
Bu Sun Kim65020912020-05-20 12:08:20 -0700682 &quot;detectedBreak&quot;: { # Detected start or end of a structural component. # Detected start or end of a text segment.
683 &quot;type&quot;: &quot;A String&quot;, # Detected break type.
684 &quot;isPrefix&quot;: True or False, # True if break prepends the element.
685 },
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700686 &quot;detectedLanguages&quot;: [ # A list of detected languages together with confidence.
687 { # Detected language for a structural component.
688 &quot;languageCode&quot;: &quot;A String&quot;, # The BCP-47 language code, such as &quot;en-US&quot; or &quot;sr-Latn&quot;. For more
689 # information, see
690 # http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
691 &quot;confidence&quot;: 3.14, # Confidence of detected language. Range [0, 1].
692 },
693 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700694 },
695 &quot;blockType&quot;: &quot;A String&quot;, # Detected block type (text, image etc) for this block.
696 &quot;boundingBox&quot;: { # A bounding polygon for the detected image annotation. # The bounding box for the block.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700697 # The vertices are in the order of top-left, top-right, bottom-right,
698 # bottom-left. When a rotation of the bounding box is detected the rotation
699 # is represented as around the top-left corner as defined when the text is
Bu Sun Kim65020912020-05-20 12:08:20 -0700700 # read in the &#x27;natural&#x27; orientation.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700701 # For example:
702 #
703 # * when the text is horizontal it might look like:
704 #
705 # 0----1
706 # | |
707 # 3----2
708 #
Bu Sun Kim65020912020-05-20 12:08:20 -0700709 # * when it&#x27;s rotated 180 degrees around the top-left corner it becomes:
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700710 #
711 # 2----3
712 # | |
713 # 1----0
714 #
715 # and the vertex order will still be (0, 1, 2, 3).
Bu Sun Kim65020912020-05-20 12:08:20 -0700716 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700717 { # A vertex represents a 2D point in the image.
718 # NOTE: the normalized vertex coordinates are relative to the original image
719 # and range from 0 to 1.
Bu Sun Kim65020912020-05-20 12:08:20 -0700720 &quot;y&quot;: 3.14, # Y coordinate.
721 &quot;x&quot;: 3.14, # X coordinate.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700722 },
723 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700724 &quot;vertices&quot;: [ # The bounding polygon vertices.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700725 { # A vertex represents a 2D point in the image.
726 # NOTE: the vertex coordinates are in the same scale as the original image.
Bu Sun Kim65020912020-05-20 12:08:20 -0700727 &quot;x&quot;: 42, # X coordinate.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700728 &quot;y&quot;: 42, # Y coordinate.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700729 },
730 ],
731 },
Bu Sun Kim65020912020-05-20 12:08:20 -0700732 &quot;confidence&quot;: 3.14, # Confidence of the OCR results on the block. Range [0, 1].
733 &quot;paragraphs&quot;: [ # List of paragraphs in this block (if this blocks is of type text).
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700734 { # Structural unit of text representing a number of words in certain order.
Bu Sun Kim65020912020-05-20 12:08:20 -0700735 &quot;property&quot;: { # Additional information detected on the structural component. # Additional information detected for the paragraph.
Bu Sun Kim65020912020-05-20 12:08:20 -0700736 &quot;detectedBreak&quot;: { # Detected start or end of a structural component. # Detected start or end of a text segment.
737 &quot;type&quot;: &quot;A String&quot;, # Detected break type.
738 &quot;isPrefix&quot;: True or False, # True if break prepends the element.
739 },
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700740 &quot;detectedLanguages&quot;: [ # A list of detected languages together with confidence.
741 { # Detected language for a structural component.
742 &quot;languageCode&quot;: &quot;A String&quot;, # The BCP-47 language code, such as &quot;en-US&quot; or &quot;sr-Latn&quot;. For more
743 # information, see
744 # http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
745 &quot;confidence&quot;: 3.14, # Confidence of detected language. Range [0, 1].
746 },
747 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700748 },
749 &quot;boundingBox&quot;: { # A bounding polygon for the detected image annotation. # The bounding box for the paragraph.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700750 # The vertices are in the order of top-left, top-right, bottom-right,
751 # bottom-left. When a rotation of the bounding box is detected the rotation
752 # is represented as around the top-left corner as defined when the text is
Bu Sun Kim65020912020-05-20 12:08:20 -0700753 # read in the &#x27;natural&#x27; orientation.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700754 # For example:
755 # * when the text is horizontal it might look like:
756 # 0----1
757 # | |
758 # 3----2
Bu Sun Kim65020912020-05-20 12:08:20 -0700759 # * when it&#x27;s rotated 180 degrees around the top-left corner it becomes:
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700760 # 2----3
761 # | |
762 # 1----0
763 # and the vertex order will still be (0, 1, 2, 3).
Bu Sun Kim65020912020-05-20 12:08:20 -0700764 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700765 { # A vertex represents a 2D point in the image.
766 # NOTE: the normalized vertex coordinates are relative to the original image
767 # and range from 0 to 1.
Bu Sun Kim65020912020-05-20 12:08:20 -0700768 &quot;y&quot;: 3.14, # Y coordinate.
769 &quot;x&quot;: 3.14, # X coordinate.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700770 },
771 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700772 &quot;vertices&quot;: [ # The bounding polygon vertices.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700773 { # A vertex represents a 2D point in the image.
774 # NOTE: the vertex coordinates are in the same scale as the original image.
Bu Sun Kim65020912020-05-20 12:08:20 -0700775 &quot;x&quot;: 42, # X coordinate.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700776 &quot;y&quot;: 42, # Y coordinate.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700777 },
778 ],
779 },
Bu Sun Kim65020912020-05-20 12:08:20 -0700780 &quot;confidence&quot;: 3.14, # Confidence of the OCR results for the paragraph. Range [0, 1].
781 &quot;words&quot;: [ # List of all words in this paragraph.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700782 { # A word representation.
Bu Sun Kim65020912020-05-20 12:08:20 -0700783 &quot;boundingBox&quot;: { # A bounding polygon for the detected image annotation. # The bounding box for the word.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700784 # The vertices are in the order of top-left, top-right, bottom-right,
785 # bottom-left. When a rotation of the bounding box is detected the rotation
786 # is represented as around the top-left corner as defined when the text is
Bu Sun Kim65020912020-05-20 12:08:20 -0700787 # read in the &#x27;natural&#x27; orientation.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700788 # For example:
789 # * when the text is horizontal it might look like:
790 # 0----1
791 # | |
792 # 3----2
Bu Sun Kim65020912020-05-20 12:08:20 -0700793 # * when it&#x27;s rotated 180 degrees around the top-left corner it becomes:
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700794 # 2----3
795 # | |
796 # 1----0
797 # and the vertex order will still be (0, 1, 2, 3).
Bu Sun Kim65020912020-05-20 12:08:20 -0700798 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700799 { # A vertex represents a 2D point in the image.
800 # NOTE: the normalized vertex coordinates are relative to the original image
801 # and range from 0 to 1.
Bu Sun Kim65020912020-05-20 12:08:20 -0700802 &quot;y&quot;: 3.14, # Y coordinate.
803 &quot;x&quot;: 3.14, # X coordinate.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700804 },
805 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700806 &quot;vertices&quot;: [ # The bounding polygon vertices.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700807 { # A vertex represents a 2D point in the image.
808 # NOTE: the vertex coordinates are in the same scale as the original image.
Bu Sun Kim65020912020-05-20 12:08:20 -0700809 &quot;x&quot;: 42, # X coordinate.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700810 &quot;y&quot;: 42, # Y coordinate.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700811 },
812 ],
813 },
Bu Sun Kim65020912020-05-20 12:08:20 -0700814 &quot;confidence&quot;: 3.14, # Confidence of the OCR results for the word. Range [0, 1].
815 &quot;symbols&quot;: [ # List of symbols in the word.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700816 # The order of the symbols follows the natural reading order.
817 { # A single symbol representation.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700818 &quot;property&quot;: { # Additional information detected on the structural component. # Additional information detected for the symbol.
819 &quot;detectedBreak&quot;: { # Detected start or end of a structural component. # Detected start or end of a text segment.
820 &quot;type&quot;: &quot;A String&quot;, # Detected break type.
821 &quot;isPrefix&quot;: True or False, # True if break prepends the element.
822 },
823 &quot;detectedLanguages&quot;: [ # A list of detected languages together with confidence.
824 { # Detected language for a structural component.
825 &quot;languageCode&quot;: &quot;A String&quot;, # The BCP-47 language code, such as &quot;en-US&quot; or &quot;sr-Latn&quot;. For more
826 # information, see
827 # http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
828 &quot;confidence&quot;: 3.14, # Confidence of detected language. Range [0, 1].
829 },
830 ],
831 },
Bu Sun Kim65020912020-05-20 12:08:20 -0700832 &quot;boundingBox&quot;: { # A bounding polygon for the detected image annotation. # The bounding box for the symbol.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700833 # The vertices are in the order of top-left, top-right, bottom-right,
834 # bottom-left. When a rotation of the bounding box is detected the rotation
835 # is represented as around the top-left corner as defined when the text is
Bu Sun Kim65020912020-05-20 12:08:20 -0700836 # read in the &#x27;natural&#x27; orientation.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700837 # For example:
838 # * when the text is horizontal it might look like:
839 # 0----1
840 # | |
841 # 3----2
Bu Sun Kim65020912020-05-20 12:08:20 -0700842 # * when it&#x27;s rotated 180 degrees around the top-left corner it becomes:
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700843 # 2----3
844 # | |
845 # 1----0
Dan O'Mearadd494642020-05-01 07:42:23 -0700846 # and the vertex order will still be (0, 1, 2, 3).
Bu Sun Kim65020912020-05-20 12:08:20 -0700847 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700848 { # A vertex represents a 2D point in the image.
849 # NOTE: the normalized vertex coordinates are relative to the original image
850 # and range from 0 to 1.
Bu Sun Kim65020912020-05-20 12:08:20 -0700851 &quot;y&quot;: 3.14, # Y coordinate.
852 &quot;x&quot;: 3.14, # X coordinate.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700853 },
854 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700855 &quot;vertices&quot;: [ # The bounding polygon vertices.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700856 { # A vertex represents a 2D point in the image.
857 # NOTE: the vertex coordinates are in the same scale as the original image.
Bu Sun Kim65020912020-05-20 12:08:20 -0700858 &quot;x&quot;: 42, # X coordinate.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700859 &quot;y&quot;: 42, # Y coordinate.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700860 },
861 ],
862 },
Bu Sun Kim65020912020-05-20 12:08:20 -0700863 &quot;confidence&quot;: 3.14, # Confidence of the OCR results for the symbol. Range [0, 1].
864 &quot;text&quot;: &quot;A String&quot;, # The actual UTF-8 representation of the symbol.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700865 },
866 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700867 &quot;property&quot;: { # Additional information detected on the structural component. # Additional information detected for the word.
Bu Sun Kim65020912020-05-20 12:08:20 -0700868 &quot;detectedBreak&quot;: { # Detected start or end of a structural component. # Detected start or end of a text segment.
869 &quot;type&quot;: &quot;A String&quot;, # Detected break type.
870 &quot;isPrefix&quot;: True or False, # True if break prepends the element.
871 },
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700872 &quot;detectedLanguages&quot;: [ # A list of detected languages together with confidence.
873 { # Detected language for a structural component.
874 &quot;languageCode&quot;: &quot;A String&quot;, # The BCP-47 language code, such as &quot;en-US&quot; or &quot;sr-Latn&quot;. For more
875 # information, see
876 # http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
877 &quot;confidence&quot;: 3.14, # Confidence of detected language. Range [0, 1].
878 },
879 ],
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700880 },
881 },
882 ],
883 },
884 ],
885 },
886 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700887 &quot;property&quot;: { # Additional information detected on the structural component. # Additional information detected on the page.
Bu Sun Kim65020912020-05-20 12:08:20 -0700888 &quot;detectedBreak&quot;: { # Detected start or end of a structural component. # Detected start or end of a text segment.
889 &quot;type&quot;: &quot;A String&quot;, # Detected break type.
890 &quot;isPrefix&quot;: True or False, # True if break prepends the element.
891 },
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700892 &quot;detectedLanguages&quot;: [ # A list of detected languages together with confidence.
893 { # Detected language for a structural component.
894 &quot;languageCode&quot;: &quot;A String&quot;, # The BCP-47 language code, such as &quot;en-US&quot; or &quot;sr-Latn&quot;. For more
895 # information, see
896 # http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
897 &quot;confidence&quot;: 3.14, # Confidence of detected language. Range [0, 1].
898 },
899 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700900 },
901 &quot;confidence&quot;: 3.14, # Confidence of the OCR results on the page. Range [0, 1].
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700902 &quot;height&quot;: 42, # Page height. For PDFs the unit is points. For images (including
903 # TIFFs) the unit is pixels.
904 &quot;width&quot;: 42, # Page width. For PDFs the unit is points. For images (including
905 # TIFFs) the unit is pixels.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700906 },
907 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700908 &quot;text&quot;: &quot;A String&quot;, # UTF-8 text detected on the pages.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700909 },
Bu Sun Kim65020912020-05-20 12:08:20 -0700910 &quot;textAnnotations&quot;: [ # If present, text (OCR) detection has completed successfully.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700911 { # Set of detected entity features.
Bu Sun Kim65020912020-05-20 12:08:20 -0700912 &quot;score&quot;: 3.14, # Overall score of the result. Range [0, 1].
913 &quot;locations&quot;: [ # The location information for the detected entity. Multiple
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700914 # `LocationInfo` elements can be present because one location may
915 # indicate the location of the scene in the image, and another location
916 # may indicate the location of the place where the image was taken.
917 # Location information is usually present for landmarks.
918 { # Detected entity location information.
Bu Sun Kim65020912020-05-20 12:08:20 -0700919 &quot;latLng&quot;: { # An object representing a latitude/longitude pair. This is expressed as a pair # lat/long location coordinates.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700920 # of doubles representing degrees latitude and degrees longitude. Unless
921 # specified otherwise, this must conform to the
Bu Sun Kim65020912020-05-20 12:08:20 -0700922 # &lt;a href=&quot;http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf&quot;&gt;WGS84
Dan O'Mearadd494642020-05-01 07:42:23 -0700923 # standard&lt;/a&gt;. Values must be within normalized ranges.
Bu Sun Kim65020912020-05-20 12:08:20 -0700924 &quot;latitude&quot;: 3.14, # The latitude in degrees. It must be in the range [-90.0, +90.0].
925 &quot;longitude&quot;: 3.14, # The longitude in degrees. It must be in the range [-180.0, +180.0].
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700926 },
927 },
928 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700929 &quot;mid&quot;: &quot;A String&quot;, # Opaque entity ID. Some IDs may be available in
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700930 # [Google Knowledge Graph Search
931 # API](https://developers.google.com/knowledge-graph/).
Bu Sun Kim65020912020-05-20 12:08:20 -0700932 &quot;confidence&quot;: 3.14, # **Deprecated. Use `score` instead.**
933 # The accuracy of the entity detection in an image.
934 # For example, for an image in which the &quot;Eiffel Tower&quot; entity is detected,
935 # this field represents the confidence that there is a tower in the query
936 # image. Range [0, 1].
937 &quot;locale&quot;: &quot;A String&quot;, # The language code for the locale in which the entity textual
938 # `description` is expressed.
939 &quot;boundingPoly&quot;: { # A bounding polygon for the detected image annotation. # Image region to which this entity belongs. Not produced
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700940 # for `LABEL_DETECTION` features.
Bu Sun Kim65020912020-05-20 12:08:20 -0700941 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700942 { # A vertex represents a 2D point in the image.
943 # NOTE: the normalized vertex coordinates are relative to the original image
944 # and range from 0 to 1.
Bu Sun Kim65020912020-05-20 12:08:20 -0700945 &quot;y&quot;: 3.14, # Y coordinate.
946 &quot;x&quot;: 3.14, # X coordinate.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700947 },
948 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700949 &quot;vertices&quot;: [ # The bounding polygon vertices.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700950 { # A vertex represents a 2D point in the image.
951 # NOTE: the vertex coordinates are in the same scale as the original image.
Bu Sun Kim65020912020-05-20 12:08:20 -0700952 &quot;x&quot;: 42, # X coordinate.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700953 &quot;y&quot;: 42, # Y coordinate.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700954 },
955 ],
956 },
Bu Sun Kim65020912020-05-20 12:08:20 -0700957 &quot;description&quot;: &quot;A String&quot;, # Entity textual description, expressed in its `locale` language.
958 &quot;topicality&quot;: 3.14, # The relevancy of the ICA (Image Content Annotation) label to the
959 # image. For example, the relevancy of &quot;tower&quot; is likely higher to an image
960 # containing the detected &quot;Eiffel Tower&quot; than to an image containing a
961 # detected distant towering building, even though the confidence that
962 # there is a tower in each image may be the same. Range [0, 1].
963 &quot;properties&quot;: [ # Some entities may have optional user-supplied `Property` (name/value)
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700964 # fields, such a score or string that qualifies the entity.
965 { # A `Property` consists of a user-supplied name/value pair.
Bu Sun Kim65020912020-05-20 12:08:20 -0700966 &quot;value&quot;: &quot;A String&quot;, # Value of the property.
967 &quot;uint64Value&quot;: &quot;A String&quot;, # Value of numeric properties.
968 &quot;name&quot;: &quot;A String&quot;, # Name of the property.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700969 },
970 ],
971 },
972 ],
Bu Sun Kim65020912020-05-20 12:08:20 -0700973 &quot;imagePropertiesAnnotation&quot;: { # Stores image properties, such as dominant colors. # If present, image properties were extracted successfully.
974 &quot;dominantColors&quot;: { # Set of dominant colors and their corresponding scores. # If present, dominant colors completed successfully.
975 &quot;colors&quot;: [ # RGB color values with their score and pixel fraction.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700976 { # Color information consists of RGB channels, score, and the fraction of
977 # the image that the color occupies in the image.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -0700978 &quot;score&quot;: 3.14, # Image-specific score for this color. Value in range [0, 1].
Bu Sun Kim65020912020-05-20 12:08:20 -0700979 &quot;pixelFraction&quot;: 3.14, # The fraction of pixels the color occupies in the image.
980 # Value in range [0, 1].
981 &quot;color&quot;: { # Represents a color in the RGBA color space. This representation is designed # RGB components of the color.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700982 # for simplicity of conversion to/from color representations in various
983 # languages over compactness; for example, the fields of this representation
Bu Sun Kim65020912020-05-20 12:08:20 -0700984 # can be trivially provided to the constructor of &quot;java.awt.Color&quot; in Java; it
985 # can also be trivially provided to UIColor&#x27;s &quot;+colorWithRed:green:blue:alpha&quot;
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700986 # method in iOS; and, with just a little work, it can be easily formatted into
Bu Sun Kim65020912020-05-20 12:08:20 -0700987 # a CSS &quot;rgba()&quot; string in JavaScript, as well.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -0700988 #
989 # Note: this proto does not carry information about the absolute color space
990 # that should be used to interpret the RGB value (e.g. sRGB, Adobe RGB,
991 # DCI-P3, BT.2020, etc.). By default, applications SHOULD assume the sRGB color
992 # space.
993 #
994 # Example (Java):
995 #
996 # import com.google.type.Color;
997 #
998 # // ...
999 # public static java.awt.Color fromProto(Color protocolor) {
1000 # float alpha = protocolor.hasAlpha()
1001 # ? protocolor.getAlpha().getValue()
1002 # : 1.0;
1003 #
1004 # return new java.awt.Color(
1005 # protocolor.getRed(),
1006 # protocolor.getGreen(),
1007 # protocolor.getBlue(),
1008 # alpha);
1009 # }
1010 #
1011 # public static Color toProto(java.awt.Color color) {
1012 # float red = (float) color.getRed();
1013 # float green = (float) color.getGreen();
1014 # float blue = (float) color.getBlue();
1015 # float denominator = 255.0;
1016 # Color.Builder resultBuilder =
1017 # Color
1018 # .newBuilder()
1019 # .setRed(red / denominator)
1020 # .setGreen(green / denominator)
1021 # .setBlue(blue / denominator);
1022 # int alpha = color.getAlpha();
1023 # if (alpha != 255) {
1024 # result.setAlpha(
1025 # FloatValue
1026 # .newBuilder()
1027 # .setValue(((float) alpha) / denominator)
1028 # .build());
1029 # }
1030 # return resultBuilder.build();
1031 # }
1032 # // ...
1033 #
1034 # Example (iOS / Obj-C):
1035 #
1036 # // ...
1037 # static UIColor* fromProto(Color* protocolor) {
1038 # float red = [protocolor red];
1039 # float green = [protocolor green];
1040 # float blue = [protocolor blue];
1041 # FloatValue* alpha_wrapper = [protocolor alpha];
1042 # float alpha = 1.0;
1043 # if (alpha_wrapper != nil) {
1044 # alpha = [alpha_wrapper value];
1045 # }
1046 # return [UIColor colorWithRed:red green:green blue:blue alpha:alpha];
1047 # }
1048 #
1049 # static Color* toProto(UIColor* color) {
1050 # CGFloat red, green, blue, alpha;
Dan O'Mearadd494642020-05-01 07:42:23 -07001051 # if (![color getRed:&amp;red green:&amp;green blue:&amp;blue alpha:&amp;alpha]) {
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001052 # return nil;
1053 # }
1054 # Color* result = [[Color alloc] init];
1055 # [result setRed:red];
1056 # [result setGreen:green];
1057 # [result setBlue:blue];
Dan O'Mearadd494642020-05-01 07:42:23 -07001058 # if (alpha &lt;= 0.9999) {
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001059 # [result setAlpha:floatWrapperWithValue(alpha)];
1060 # }
1061 # [result autorelease];
1062 # return result;
1063 # }
1064 # // ...
1065 #
1066 # Example (JavaScript):
1067 #
1068 # // ...
1069 #
1070 # var protoToCssColor = function(rgb_color) {
1071 # var redFrac = rgb_color.red || 0.0;
1072 # var greenFrac = rgb_color.green || 0.0;
1073 # var blueFrac = rgb_color.blue || 0.0;
1074 # var red = Math.floor(redFrac * 255);
1075 # var green = Math.floor(greenFrac * 255);
1076 # var blue = Math.floor(blueFrac * 255);
1077 #
Bu Sun Kim65020912020-05-20 12:08:20 -07001078 # if (!(&#x27;alpha&#x27; in rgb_color)) {
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001079 # return rgbToCssColor_(red, green, blue);
1080 # }
1081 #
1082 # var alphaFrac = rgb_color.alpha.value || 0.0;
Bu Sun Kim65020912020-05-20 12:08:20 -07001083 # var rgbParams = [red, green, blue].join(&#x27;,&#x27;);
1084 # return [&#x27;rgba(&#x27;, rgbParams, &#x27;,&#x27;, alphaFrac, &#x27;)&#x27;].join(&#x27;&#x27;);
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001085 # };
1086 #
1087 # var rgbToCssColor_ = function(red, green, blue) {
Dan O'Mearadd494642020-05-01 07:42:23 -07001088 # var rgbNumber = new Number((red &lt;&lt; 16) | (green &lt;&lt; 8) | blue);
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001089 # var hexString = rgbNumber.toString(16);
1090 # var missingZeros = 6 - hexString.length;
Bu Sun Kim65020912020-05-20 12:08:20 -07001091 # var resultBuilder = [&#x27;#&#x27;];
Dan O'Mearadd494642020-05-01 07:42:23 -07001092 # for (var i = 0; i &lt; missingZeros; i++) {
Bu Sun Kim65020912020-05-20 12:08:20 -07001093 # resultBuilder.push(&#x27;0&#x27;);
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001094 # }
1095 # resultBuilder.push(hexString);
Bu Sun Kim65020912020-05-20 12:08:20 -07001096 # return resultBuilder.join(&#x27;&#x27;);
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001097 # };
1098 #
1099 # // ...
Bu Sun Kim65020912020-05-20 12:08:20 -07001100 &quot;red&quot;: 3.14, # The amount of red in the color as a value in the interval [0, 1].
1101 &quot;green&quot;: 3.14, # The amount of green in the color as a value in the interval [0, 1].
1102 &quot;blue&quot;: 3.14, # The amount of blue in the color as a value in the interval [0, 1].
1103 &quot;alpha&quot;: 3.14, # The fraction of this color that should be applied to the pixel. That is,
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001104 # the final pixel color is defined by the equation:
1105 #
1106 # pixel color = alpha * (this color) + (1.0 - alpha) * (background color)
1107 #
1108 # This means that a value of 1.0 corresponds to a solid color, whereas
1109 # a value of 0.0 corresponds to a completely transparent color. This
1110 # uses a wrapper message rather than a simple float scalar so that it is
1111 # possible to distinguish between a default value and the value being unset.
1112 # If omitted, this color object is to be rendered as a solid color
1113 # (as if the alpha value had been explicitly given with a value of 1.0).
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001114 },
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001115 },
1116 ],
1117 },
1118 },
Bu Sun Kim65020912020-05-20 12:08:20 -07001119 &quot;logoAnnotations&quot;: [ # If present, logo detection has completed successfully.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001120 { # Set of detected entity features.
Bu Sun Kim65020912020-05-20 12:08:20 -07001121 &quot;score&quot;: 3.14, # Overall score of the result. Range [0, 1].
1122 &quot;locations&quot;: [ # The location information for the detected entity. Multiple
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001123 # `LocationInfo` elements can be present because one location may
1124 # indicate the location of the scene in the image, and another location
1125 # may indicate the location of the place where the image was taken.
1126 # Location information is usually present for landmarks.
1127 { # Detected entity location information.
Bu Sun Kim65020912020-05-20 12:08:20 -07001128 &quot;latLng&quot;: { # An object representing a latitude/longitude pair. This is expressed as a pair # lat/long location coordinates.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001129 # of doubles representing degrees latitude and degrees longitude. Unless
1130 # specified otherwise, this must conform to the
Bu Sun Kim65020912020-05-20 12:08:20 -07001131 # &lt;a href=&quot;http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf&quot;&gt;WGS84
Dan O'Mearadd494642020-05-01 07:42:23 -07001132 # standard&lt;/a&gt;. Values must be within normalized ranges.
Bu Sun Kim65020912020-05-20 12:08:20 -07001133 &quot;latitude&quot;: 3.14, # The latitude in degrees. It must be in the range [-90.0, +90.0].
1134 &quot;longitude&quot;: 3.14, # The longitude in degrees. It must be in the range [-180.0, +180.0].
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001135 },
1136 },
1137 ],
Bu Sun Kim65020912020-05-20 12:08:20 -07001138 &quot;mid&quot;: &quot;A String&quot;, # Opaque entity ID. Some IDs may be available in
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001139 # [Google Knowledge Graph Search
1140 # API](https://developers.google.com/knowledge-graph/).
Bu Sun Kim65020912020-05-20 12:08:20 -07001141 &quot;confidence&quot;: 3.14, # **Deprecated. Use `score` instead.**
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001142 # The accuracy of the entity detection in an image.
Bu Sun Kim65020912020-05-20 12:08:20 -07001143 # For example, for an image in which the &quot;Eiffel Tower&quot; entity is detected,
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001144 # this field represents the confidence that there is a tower in the query
1145 # image. Range [0, 1].
Bu Sun Kim65020912020-05-20 12:08:20 -07001146 &quot;locale&quot;: &quot;A String&quot;, # The language code for the locale in which the entity textual
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001147 # `description` is expressed.
Bu Sun Kim65020912020-05-20 12:08:20 -07001148 &quot;boundingPoly&quot;: { # A bounding polygon for the detected image annotation. # Image region to which this entity belongs. Not produced
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001149 # for `LABEL_DETECTION` features.
Bu Sun Kim65020912020-05-20 12:08:20 -07001150 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001151 { # A vertex represents a 2D point in the image.
1152 # NOTE: the normalized vertex coordinates are relative to the original image
1153 # and range from 0 to 1.
Bu Sun Kim65020912020-05-20 12:08:20 -07001154 &quot;y&quot;: 3.14, # Y coordinate.
1155 &quot;x&quot;: 3.14, # X coordinate.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001156 },
1157 ],
Bu Sun Kim65020912020-05-20 12:08:20 -07001158 &quot;vertices&quot;: [ # The bounding polygon vertices.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001159 { # A vertex represents a 2D point in the image.
1160 # NOTE: the vertex coordinates are in the same scale as the original image.
Bu Sun Kim65020912020-05-20 12:08:20 -07001161 &quot;x&quot;: 42, # X coordinate.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -07001162 &quot;y&quot;: 42, # Y coordinate.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001163 },
1164 ],
1165 },
Bu Sun Kim65020912020-05-20 12:08:20 -07001166 &quot;description&quot;: &quot;A String&quot;, # Entity textual description, expressed in its `locale` language.
1167 &quot;topicality&quot;: 3.14, # The relevancy of the ICA (Image Content Annotation) label to the
1168 # image. For example, the relevancy of &quot;tower&quot; is likely higher to an image
1169 # containing the detected &quot;Eiffel Tower&quot; than to an image containing a
1170 # detected distant towering building, even though the confidence that
1171 # there is a tower in each image may be the same. Range [0, 1].
1172 &quot;properties&quot;: [ # Some entities may have optional user-supplied `Property` (name/value)
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001173 # fields, such a score or string that qualifies the entity.
1174 { # A `Property` consists of a user-supplied name/value pair.
Bu Sun Kim65020912020-05-20 12:08:20 -07001175 &quot;value&quot;: &quot;A String&quot;, # Value of the property.
1176 &quot;uint64Value&quot;: &quot;A String&quot;, # Value of numeric properties.
1177 &quot;name&quot;: &quot;A String&quot;, # Name of the property.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001178 },
1179 ],
1180 },
1181 ],
Bu Sun Kim65020912020-05-20 12:08:20 -07001182 &quot;context&quot;: { # If an image was produced from a file (e.g. a PDF), this message gives # If present, contextual information is needed to understand where this image
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001183 # comes from.
1184 # information about the source of that image.
Bu Sun Kim65020912020-05-20 12:08:20 -07001185 &quot;uri&quot;: &quot;A String&quot;, # The URI of the file used to produce the image.
1186 &quot;pageNumber&quot;: 42, # If the file was a PDF or TIFF, this field gives the page number within
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001187 # the file used to produce the image.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001188 },
Bu Sun Kim65020912020-05-20 12:08:20 -07001189 &quot;webDetection&quot;: { # Relevant information for the image from the Internet. # If present, web detection has completed successfully.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -07001190 &quot;visuallySimilarImages&quot;: [ # The visually similar image results.
1191 { # Metadata for online images.
1192 &quot;score&quot;: 3.14, # (Deprecated) Overall relevancy score for the image.
1193 &quot;url&quot;: &quot;A String&quot;, # The result image URL.
1194 },
1195 ],
1196 &quot;bestGuessLabels&quot;: [ # The service&#x27;s best guess as to the topic of the request image.
1197 # Inferred from similar images on the open web.
1198 { # Label to provide extra metadata for the web detection.
1199 &quot;label&quot;: &quot;A String&quot;, # Label for extra metadata.
1200 &quot;languageCode&quot;: &quot;A String&quot;, # The BCP-47 language code for `label`, such as &quot;en-US&quot; or &quot;sr-Latn&quot;.
1201 # For more information, see
1202 # http://www.unicode.org/reports/tr35/#Unicode_locale_identifier.
1203 },
1204 ],
Bu Sun Kim65020912020-05-20 12:08:20 -07001205 &quot;fullMatchingImages&quot;: [ # Fully matching images from the Internet.
1206 # Can include resized copies of the query image.
1207 { # Metadata for online images.
Bu Sun Kim65020912020-05-20 12:08:20 -07001208 &quot;score&quot;: 3.14, # (Deprecated) Overall relevancy score for the image.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -07001209 &quot;url&quot;: &quot;A String&quot;, # The result image URL.
Bu Sun Kim65020912020-05-20 12:08:20 -07001210 },
1211 ],
1212 &quot;webEntities&quot;: [ # Deduced entities from similar images on the Internet.
1213 { # Entity deduced from similar images on the Internet.
Bu Sun Kim65020912020-05-20 12:08:20 -07001214 &quot;entityId&quot;: &quot;A String&quot;, # Opaque entity ID.
1215 &quot;description&quot;: &quot;A String&quot;, # Canonical description of the entity, in English.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -07001216 &quot;score&quot;: 3.14, # Overall relevancy score for the entity.
1217 # Not normalized and not comparable across different image queries.
Bu Sun Kim65020912020-05-20 12:08:20 -07001218 },
1219 ],
1220 &quot;pagesWithMatchingImages&quot;: [ # Web pages containing the matching images from the Internet.
1221 { # Metadata for web pages.
1222 &quot;score&quot;: 3.14, # (Deprecated) Overall relevancy score for the web page.
1223 &quot;partialMatchingImages&quot;: [ # Partial matching images on the page.
1224 # Those images are similar enough to share some key-point features. For
1225 # example an original image will likely have partial matching for its
1226 # crops.
1227 { # Metadata for online images.
Bu Sun Kim65020912020-05-20 12:08:20 -07001228 &quot;score&quot;: 3.14, # (Deprecated) Overall relevancy score for the image.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -07001229 &quot;url&quot;: &quot;A String&quot;, # The result image URL.
Bu Sun Kim65020912020-05-20 12:08:20 -07001230 },
1231 ],
1232 &quot;url&quot;: &quot;A String&quot;, # The result web page URL.
1233 &quot;pageTitle&quot;: &quot;A String&quot;, # Title for the web page, may contain HTML markups.
1234 &quot;fullMatchingImages&quot;: [ # Fully matching images on the page.
1235 # Can include resized copies of the query image.
1236 { # Metadata for online images.
Bu Sun Kim65020912020-05-20 12:08:20 -07001237 &quot;score&quot;: 3.14, # (Deprecated) Overall relevancy score for the image.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -07001238 &quot;url&quot;: &quot;A String&quot;, # The result image URL.
Bu Sun Kim65020912020-05-20 12:08:20 -07001239 },
1240 ],
1241 },
1242 ],
1243 &quot;partialMatchingImages&quot;: [ # Partial matching images from the Internet.
1244 # Those images are similar enough to share some key-point features. For
1245 # example an original image will likely have partial matching for its crops.
1246 { # Metadata for online images.
Bu Sun Kim65020912020-05-20 12:08:20 -07001247 &quot;score&quot;: 3.14, # (Deprecated) Overall relevancy score for the image.
Bu Sun Kim65020912020-05-20 12:08:20 -07001248 &quot;url&quot;: &quot;A String&quot;, # The result image URL.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001249 },
1250 ],
1251 },
Bu Sun Kim65020912020-05-20 12:08:20 -07001252 &quot;safeSearchAnnotation&quot;: { # Set of features pertaining to the image, computed by computer vision # If present, safe-search annotation has completed successfully.
1253 # methods over safe-search verticals (for example, adult, spoof, medical,
1254 # violence).
1255 &quot;adult&quot;: &quot;A String&quot;, # Represents the adult content likelihood for the image. Adult content may
1256 # contain elements such as nudity, pornographic images or cartoons, or
1257 # sexual activities.
1258 &quot;spoof&quot;: &quot;A String&quot;, # Spoof likelihood. The likelihood that an modification
1259 # was made to the image&#x27;s canonical version to make it appear
1260 # funny or offensive.
1261 &quot;medical&quot;: &quot;A String&quot;, # Likelihood that this is a medical image.
1262 &quot;racy&quot;: &quot;A String&quot;, # Likelihood that the request image contains racy content. Racy content may
1263 # include (but is not limited to) skimpy or sheer clothing, strategically
1264 # covered nudity, lewd or provocative poses, or close-ups of sensitive
1265 # body areas.
1266 &quot;violence&quot;: &quot;A String&quot;, # Likelihood that this image contains violent content.
1267 },
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001268 },
1269 ],
Bu Sun Kim65020912020-05-20 12:08:20 -07001270 &quot;inputConfig&quot;: { # The desired input location and metadata. # Information about the file for which this response is generated.
Bu Sun Kim65020912020-05-20 12:08:20 -07001271 &quot;mimeType&quot;: &quot;A String&quot;, # The type of the file. Currently only &quot;application/pdf&quot;, &quot;image/tiff&quot; and
1272 # &quot;image/gif&quot; are supported. Wildcards are not supported.
1273 &quot;content&quot;: &quot;A String&quot;, # File content, represented as a stream of bytes.
1274 # Note: As with all `bytes` fields, protobuffers use a pure binary
1275 # representation, whereas JSON representations use base64.
1276 #
1277 # Currently, this field only works for BatchAnnotateFiles requests. It does
1278 # not work for AsyncBatchAnnotateFiles requests.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -07001279 &quot;gcsSource&quot;: { # The Google Cloud Storage location where the input will be read from. # The Google Cloud Storage location to read the input from.
1280 &quot;uri&quot;: &quot;A String&quot;, # Google Cloud Storage URI for the input file. This must only be a
1281 # Google Cloud Storage object. Wildcards are not currently supported.
1282 },
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001283 },
Bu Sun Kim65020912020-05-20 12:08:20 -07001284 &quot;totalPages&quot;: 42, # This field gives the total number of pages in the file.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -07001285 &quot;error&quot;: { # The `Status` type defines a logical error model that is suitable for # If set, represents the error message for the failed request. The
1286 # `responses` field will not be set in this case.
1287 # different programming environments, including REST APIs and RPC APIs. It is
1288 # used by [gRPC](https://github.com/grpc). Each `Status` message contains
1289 # three pieces of data: error code, error message, and error details.
1290 #
1291 # You can find out more about this error model and how to work with it in the
1292 # [API Design Guide](https://cloud.google.com/apis/design/errors).
1293 &quot;code&quot;: 42, # The status code, which should be an enum value of google.rpc.Code.
1294 &quot;message&quot;: &quot;A String&quot;, # A developer-facing error message, which should be in English. Any
1295 # user-facing error message should be localized and sent in the
1296 # google.rpc.Status.details field, or localized by the client.
1297 &quot;details&quot;: [ # A list of messages that carry the error details. There is a common set of
1298 # message types for APIs to use.
1299 {
1300 &quot;a_key&quot;: &quot;&quot;, # Properties of the object. Contains field @type with type URL.
1301 },
1302 ],
1303 },
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001304 },
1305 ],
1306 }</pre>
1307</div>
1308
1309<div class="method">
Dan O'Mearadd494642020-05-01 07:42:23 -07001310 <code class="details" id="asyncBatchAnnotate">asyncBatchAnnotate(body=None, x__xgafv=None)</code>
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001311 <pre>Run asynchronous image detection and annotation for a list of generic
1312files, such as PDF files, which may contain multiple pages and multiple
1313images per page. Progress and results can be retrieved through the
1314`google.longrunning.Operations` interface.
1315`Operation.metadata` contains `OperationMetadata` (metadata).
1316`Operation.response` contains `AsyncBatchAnnotateFilesResponse` (results).
1317
1318Args:
Dan O'Mearadd494642020-05-01 07:42:23 -07001319 body: object, The request body.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001320 The object takes the form of:
1321
1322{ # Multiple async file annotation requests are batched into a single service
1323 # call.
Bu Sun Kim65020912020-05-20 12:08:20 -07001324 &quot;requests&quot;: [ # Required. Individual async file annotation requests for this batch.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001325 { # An offline file annotation request.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -07001326 &quot;inputConfig&quot;: { # The desired input location and metadata. # Required. Information about the input file.
1327 &quot;mimeType&quot;: &quot;A String&quot;, # The type of the file. Currently only &quot;application/pdf&quot;, &quot;image/tiff&quot; and
1328 # &quot;image/gif&quot; are supported. Wildcards are not supported.
1329 &quot;content&quot;: &quot;A String&quot;, # File content, represented as a stream of bytes.
1330 # Note: As with all `bytes` fields, protobuffers use a pure binary
1331 # representation, whereas JSON representations use base64.
1332 #
1333 # Currently, this field only works for BatchAnnotateFiles requests. It does
1334 # not work for AsyncBatchAnnotateFiles requests.
1335 &quot;gcsSource&quot;: { # The Google Cloud Storage location where the input will be read from. # The Google Cloud Storage location to read the input from.
1336 &quot;uri&quot;: &quot;A String&quot;, # Google Cloud Storage URI for the input file. This must only be a
1337 # Google Cloud Storage object. Wildcards are not currently supported.
1338 },
1339 },
1340 &quot;features&quot;: [ # Required. Requested features.
1341 { # The type of Google Cloud Vision API detection to perform, and the maximum
1342 # number of results to return for that type. Multiple `Feature` objects can
1343 # be specified in the `features` list.
1344 &quot;type&quot;: &quot;A String&quot;, # The feature type.
1345 &quot;maxResults&quot;: 42, # Maximum number of results of this type. Does not apply to
1346 # `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`.
1347 &quot;model&quot;: &quot;A String&quot;, # Model to use for the feature.
1348 # Supported values: &quot;builtin/stable&quot; (the default if unset) and
1349 # &quot;builtin/latest&quot;.
1350 },
1351 ],
Bu Sun Kim65020912020-05-20 12:08:20 -07001352 &quot;imageContext&quot;: { # Image context and/or feature-specific parameters. # Additional context that may accompany the image(s) in the file.
1353 &quot;languageHints&quot;: [ # List of languages to use for TEXT_DETECTION. In most cases, an empty value
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001354 # yields the best results since it enables automatic language detection. For
1355 # languages based on the Latin alphabet, setting `language_hints` is not
1356 # needed. In rare cases, when the language of the text in the image is known,
1357 # setting a hint will help get better results (although it will be a
1358 # significant hindrance if the hint is wrong). Text detection returns an
1359 # error if one or more of the specified languages is not one of the
Dan O'Mearadd494642020-05-01 07:42:23 -07001360 # [supported languages](https://cloud.google.com/vision/docs/languages).
Bu Sun Kim65020912020-05-20 12:08:20 -07001361 &quot;A String&quot;,
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001362 ],
Bu Sun Kim65020912020-05-20 12:08:20 -07001363 &quot;webDetectionParams&quot;: { # Parameters for web detection request. # Parameters for web detection.
1364 &quot;includeGeoResults&quot;: True or False, # Whether to include results derived from the geo information in the image.
1365 },
1366 &quot;latLongRect&quot;: { # Rectangle determined by min and max `LatLng` pairs. # Not used.
1367 &quot;maxLatLng&quot;: { # An object representing a latitude/longitude pair. This is expressed as a pair # Max lat/long pair.
1368 # of doubles representing degrees latitude and degrees longitude. Unless
1369 # specified otherwise, this must conform to the
1370 # &lt;a href=&quot;http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf&quot;&gt;WGS84
1371 # standard&lt;/a&gt;. Values must be within normalized ranges.
1372 &quot;latitude&quot;: 3.14, # The latitude in degrees. It must be in the range [-90.0, +90.0].
1373 &quot;longitude&quot;: 3.14, # The longitude in degrees. It must be in the range [-180.0, +180.0].
1374 },
1375 &quot;minLatLng&quot;: { # An object representing a latitude/longitude pair. This is expressed as a pair # Min lat/long pair.
1376 # of doubles representing degrees latitude and degrees longitude. Unless
1377 # specified otherwise, this must conform to the
1378 # &lt;a href=&quot;http://www.unoosa.org/pdf/icg/2012/template/WGS_84.pdf&quot;&gt;WGS84
1379 # standard&lt;/a&gt;. Values must be within normalized ranges.
1380 &quot;latitude&quot;: 3.14, # The latitude in degrees. It must be in the range [-90.0, +90.0].
1381 &quot;longitude&quot;: 3.14, # The longitude in degrees. It must be in the range [-180.0, +180.0].
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001382 },
1383 },
Bu Sun Kim65020912020-05-20 12:08:20 -07001384 &quot;cropHintsParams&quot;: { # Parameters for crop hints annotation request. # Parameters for crop hints annotation request.
1385 &quot;aspectRatios&quot;: [ # Aspect ratios in floats, representing the ratio of the width to the height
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001386 # of the image. For example, if the desired aspect ratio is 4/3, the
1387 # corresponding float value should be 1.33333. If not specified, the
1388 # best possible crop is returned. The number of provided aspect ratios is
1389 # limited to a maximum of 16; any aspect ratios provided after the 16th are
1390 # ignored.
1391 3.14,
1392 ],
1393 },
Bu Sun Kim65020912020-05-20 12:08:20 -07001394 &quot;productSearchParams&quot;: { # Parameters for a product search request. # Parameters for product search.
Bu Sun Kim65020912020-05-20 12:08:20 -07001395 &quot;filter&quot;: &quot;A String&quot;, # The filtering expression. This can be used to restrict search results based
1396 # on Product labels. We currently support an AND of OR of key-value
1397 # expressions, where each expression within an OR must have the same key. An
1398 # &#x27;=&#x27; should be used to connect the key and value.
1399 #
1400 # For example, &quot;(color = red OR color = blue) AND brand = Google&quot; is
1401 # acceptable, but &quot;(color = red OR brand = Google)&quot; is not acceptable.
1402 # &quot;color: red&quot; is not acceptable because it uses a &#x27;:&#x27; instead of an &#x27;=&#x27;.
1403 &quot;productSet&quot;: &quot;A String&quot;, # The resource name of a ProductSet to be searched for similar images.
1404 #
1405 # Format is:
1406 # `projects/PROJECT_ID/locations/LOC_ID/productSets/PRODUCT_SET_ID`.
1407 &quot;boundingPoly&quot;: { # A bounding polygon for the detected image annotation. # The bounding polygon around the area of interest in the image.
1408 # If it is not specified, system discretion will be applied.
1409 &quot;normalizedVertices&quot;: [ # The bounding polygon normalized vertices.
1410 { # A vertex represents a 2D point in the image.
1411 # NOTE: the normalized vertex coordinates are relative to the original image
1412 # and range from 0 to 1.
1413 &quot;y&quot;: 3.14, # Y coordinate.
1414 &quot;x&quot;: 3.14, # X coordinate.
1415 },
1416 ],
1417 &quot;vertices&quot;: [ # The bounding polygon vertices.
1418 { # A vertex represents a 2D point in the image.
1419 # NOTE: the vertex coordinates are in the same scale as the original image.
Bu Sun Kim65020912020-05-20 12:08:20 -07001420 &quot;x&quot;: 42, # X coordinate.
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -07001421 &quot;y&quot;: 42, # Y coordinate.
Bu Sun Kim65020912020-05-20 12:08:20 -07001422 },
1423 ],
1424 },
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -07001425 &quot;productCategories&quot;: [ # The list of product categories to search in. Currently, we only consider
1426 # the first category, and either &quot;homegoods-v2&quot;, &quot;apparel-v2&quot;, &quot;toys-v2&quot;,
1427 # &quot;packagedgoods-v1&quot;, or &quot;general-v1&quot; should be specified. The legacy
1428 # categories &quot;homegoods&quot;, &quot;apparel&quot;, and &quot;toys&quot; are still supported but will
1429 # be deprecated. For new products, please use &quot;homegoods-v2&quot;, &quot;apparel-v2&quot;,
1430 # or &quot;toys-v2&quot; for better product search accuracy. It is recommended to
1431 # migrate existing products to these categories as well.
1432 &quot;A String&quot;,
1433 ],
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001434 },
1435 },
Bu Sun Kim65020912020-05-20 12:08:20 -07001436 &quot;outputConfig&quot;: { # The desired output location and metadata. # Required. The desired output location and metadata (e.g. format).
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -07001437 &quot;batchSize&quot;: 42, # The max number of response protos to put into each output JSON file on
1438 # Google Cloud Storage.
1439 # The valid range is [1, 100]. If not specified, the default value is 20.
1440 #
1441 # For example, for one pdf file with 100 pages, 100 response protos will
1442 # be generated. If `batch_size` = 20, then 5 json files each
1443 # containing 20 response protos will be written under the prefix
1444 # `gcs_destination`.`uri`.
1445 #
1446 # Currently, batch_size only applies to GcsDestination, with potential future
1447 # support for other output configurations.
Bu Sun Kim65020912020-05-20 12:08:20 -07001448 &quot;gcsDestination&quot;: { # The Google Cloud Storage location where the output will be written to. # The Google Cloud Storage location to write the output(s) to.
1449 &quot;uri&quot;: &quot;A String&quot;, # Google Cloud Storage URI prefix where the results will be stored. Results
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001450 # will be in JSON format and preceded by its corresponding input URI prefix.
1451 # This field can either represent a gcs file prefix or gcs directory. In
1452 # either case, the uri should be unique because in order to get all of the
1453 # output files, you will need to do a wildcard gcs search on the uri prefix
1454 # you provide.
1455 #
1456 # Examples:
1457 #
1458 # * File Prefix: gs://bucket-name/here/filenameprefix The output files
1459 # will be created in gs://bucket-name/here/ and the names of the
Bu Sun Kim65020912020-05-20 12:08:20 -07001460 # output files will begin with &quot;filenameprefix&quot;.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001461 #
1462 # * Directory Prefix: gs://bucket-name/some/location/ The output files
1463 # will be created in gs://bucket-name/some/location/ and the names of the
1464 # output files could be anything because there was no filename prefix
1465 # specified.
1466 #
1467 # If multiple outputs, each response is still AnnotateFileResponse, each of
1468 # which contains some subset of the full list of AnnotateImageResponse.
1469 # Multiple outputs can happen if, for example, the output JSON is too large
1470 # and overflows into multiple sharded files.
1471 },
1472 },
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001473 },
1474 ],
Bu Sun Kim65020912020-05-20 12:08:20 -07001475 &quot;parent&quot;: &quot;A String&quot;, # Optional. Target project and location to make a call.
Dan O'Mearadd494642020-05-01 07:42:23 -07001476 #
1477 # Format: `projects/{project-id}/locations/{location-id}`.
1478 #
1479 # If no parent is specified, a region will be chosen automatically.
1480 #
1481 # Supported location-ids:
1482 # `us`: USA country only,
1483 # `asia`: East asia areas, like Japan, Taiwan,
1484 # `eu`: The European Union.
1485 #
1486 # Example: `projects/project-A/locations/eu`.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001487 }
1488
1489 x__xgafv: string, V1 error format.
1490 Allowed values
1491 1 - v1 error format
1492 2 - v2 error format
1493
1494Returns:
1495 An object of the form:
1496
1497 { # This resource represents a long-running operation that is the result of a
1498 # network API call.
Bu Sun Kim65020912020-05-20 12:08:20 -07001499 &quot;done&quot;: True or False, # If the value is `false`, it means the operation is still in progress.
1500 # If `true`, the operation is completed, and either `error` or `response` is
1501 # available.
1502 &quot;response&quot;: { # The normal response of the operation in case of success. If the original
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001503 # method returns no data on success, such as `Delete`, the response is
1504 # `google.protobuf.Empty`. If the original method is standard
1505 # `Get`/`Create`/`Update`, the response should be the resource. For other
1506 # methods, the response should have the type `XxxResponse`, where `Xxx`
1507 # is the original method name. For example, if the original method name
1508 # is `TakeSnapshot()`, the inferred response type is
1509 # `TakeSnapshotResponse`.
Bu Sun Kim65020912020-05-20 12:08:20 -07001510 &quot;a_key&quot;: &quot;&quot;, # Properties of the object. Contains field @type with type URL.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001511 },
Bu Sun Kim65020912020-05-20 12:08:20 -07001512 &quot;name&quot;: &quot;A String&quot;, # The server-assigned name, which is only unique within the same service that
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001513 # originally returns it. If you use the default HTTP mapping, the
1514 # `name` should be a resource name ending with `operations/{unique_id}`.
Bu Sun Kim65020912020-05-20 12:08:20 -07001515 &quot;error&quot;: { # The `Status` type defines a logical error model that is suitable for # The error result of the operation in case of failure or cancellation.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001516 # different programming environments, including REST APIs and RPC APIs. It is
1517 # used by [gRPC](https://github.com/grpc). Each `Status` message contains
1518 # three pieces of data: error code, error message, and error details.
1519 #
1520 # You can find out more about this error model and how to work with it in the
1521 # [API Design Guide](https://cloud.google.com/apis/design/errors).
Bu Sun Kim4ed7d3f2020-05-27 12:20:54 -07001522 &quot;code&quot;: 42, # The status code, which should be an enum value of google.rpc.Code.
1523 &quot;message&quot;: &quot;A String&quot;, # A developer-facing error message, which should be in English. Any
1524 # user-facing error message should be localized and sent in the
1525 # google.rpc.Status.details field, or localized by the client.
Bu Sun Kim65020912020-05-20 12:08:20 -07001526 &quot;details&quot;: [ # A list of messages that carry the error details. There is a common set of
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001527 # message types for APIs to use.
1528 {
Bu Sun Kim65020912020-05-20 12:08:20 -07001529 &quot;a_key&quot;: &quot;&quot;, # Properties of the object. Contains field @type with type URL.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001530 },
1531 ],
Bu Sun Kim65020912020-05-20 12:08:20 -07001532 },
1533 &quot;metadata&quot;: { # Service-specific metadata associated with the operation. It typically
1534 # contains progress information and common metadata such as create time.
1535 # Some services might not provide such metadata. Any method that returns a
1536 # long-running operation should document the metadata type, if any.
1537 &quot;a_key&quot;: &quot;&quot;, # Properties of the object. Contains field @type with type URL.
Bu Sun Kim715bd7f2019-06-14 16:50:42 -07001538 },
1539 }</pre>
1540</div>
1541
1542</body></html>