blob: 45c145cd4b8092531273c4c9586db05be77b30c8 [file] [log] [blame]
Robert Phillipsad248452020-06-30 09:27:52 -04001/*
2 * Copyright 2020 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef GrDirectContext_DEFINED
9#define GrDirectContext_DEFINED
10
Adlai Holler0ce2c542020-10-06 14:04:35 -040011#include "include/private/GrContext.h"
Robert Phillipsad248452020-06-30 09:27:52 -040012
13class GrAtlasManager;
Robert Phillips5edf5102020-08-10 16:30:36 -040014class GrSmallPathAtlasMgr;
Robert Phillipsad248452020-06-30 09:27:52 -040015
Robert Phillipsc7228c62020-07-14 12:57:39 -040016class SK_API GrDirectContext : public GrContext {
Robert Phillipsad248452020-06-30 09:27:52 -040017public:
Robert Phillipsf4f80112020-07-13 16:13:31 -040018#ifdef SK_GL
19 /**
20 * Creates a GrDirectContext for a backend context. If no GrGLInterface is provided then the
21 * result of GrGLMakeNativeInterface() is used if it succeeds.
22 */
23 static sk_sp<GrDirectContext> MakeGL(sk_sp<const GrGLInterface>, const GrContextOptions&);
24 static sk_sp<GrDirectContext> MakeGL(sk_sp<const GrGLInterface>);
25 static sk_sp<GrDirectContext> MakeGL(const GrContextOptions&);
26 static sk_sp<GrDirectContext> MakeGL();
27#endif
28
29#ifdef SK_VULKAN
30 /**
31 * The Vulkan context (VkQueue, VkDevice, VkInstance) must be kept alive until the returned
32 * GrDirectContext is destroyed. This also means that any objects created with this
33 * GrDirectContext (e.g. SkSurfaces, SkImages, etc.) must also be released as they may hold
34 * refs on the GrDirectContext. Once all these objects and the GrDirectContext are released,
35 * then it is safe to delete the vulkan objects.
36 */
37 static sk_sp<GrDirectContext> MakeVulkan(const GrVkBackendContext&, const GrContextOptions&);
38 static sk_sp<GrDirectContext> MakeVulkan(const GrVkBackendContext&);
39#endif
40
41#ifdef SK_METAL
42 /**
43 * Makes a GrDirectContext which uses Metal as the backend. The device parameter is an
44 * MTLDevice and queue is an MTLCommandQueue which should be used by the backend. These objects
45 * must have a ref on them which can be transferred to Ganesh which will release the ref
46 * when the GrDirectContext is destroyed.
47 */
48 static sk_sp<GrDirectContext> MakeMetal(void* device, void* queue, const GrContextOptions&);
49 static sk_sp<GrDirectContext> MakeMetal(void* device, void* queue);
50#endif
51
52#ifdef SK_DIRECT3D
53 /**
54 * Makes a GrDirectContext which uses Direct3D as the backend. The Direct3D context
55 * must be kept alive until the returned GrDirectContext is first destroyed or abandoned.
56 */
57 static sk_sp<GrDirectContext> MakeDirect3D(const GrD3DBackendContext&, const GrContextOptions&);
58 static sk_sp<GrDirectContext> MakeDirect3D(const GrD3DBackendContext&);
59#endif
60
61#ifdef SK_DAWN
62 static sk_sp<GrDirectContext> MakeDawn(const wgpu::Device&,
63 const GrContextOptions&);
64 static sk_sp<GrDirectContext> MakeDawn(const wgpu::Device&);
65#endif
66
67 static sk_sp<GrDirectContext> MakeMock(const GrMockOptions*, const GrContextOptions&);
68 static sk_sp<GrDirectContext> MakeMock(const GrMockOptions*);
Robert Phillipsad248452020-06-30 09:27:52 -040069
70 ~GrDirectContext() override;
71
Adlai Hollera7a40442020-10-09 09:49:42 -040072 /**
73 * The context normally assumes that no outsider is setting state
74 * within the underlying 3D API's context/device/whatever. This call informs
75 * the context that the state was modified and it should resend. Shouldn't
76 * be called frequently for good performance.
77 * The flag bits, state, is dependent on which backend is used by the
78 * context, either GL or D3D (possible in future).
79 */
80 void resetContext(uint32_t state = kAll_GrBackendState);
81
82 /**
83 * If the backend is GrBackendApi::kOpenGL, then all texture unit/target combinations for which
84 * the context has modified the bound texture will have texture id 0 bound. This does not
85 * flush the context. Calling resetContext() does not change the set that will be bound
86 * to texture id 0 on the next call to resetGLTextureBindings(). After this is called
87 * all unit/target combinations are considered to have unmodified bindings until the context
88 * subsequently modifies them (meaning if this is called twice in a row with no intervening
89 * context usage then the second call is a no-op.)
90 */
91 void resetGLTextureBindings();
92
93 /**
94 * Abandons all GPU resources and assumes the underlying backend 3D API context is no longer
95 * usable. Call this if you have lost the associated GPU context, and thus internal texture,
96 * buffer, etc. references/IDs are now invalid. Calling this ensures that the destructors of the
Adlai Holler98dd0042020-10-13 10:04:00 -040097 * context and any of its created resource objects will not make backend 3D API calls. Content
Adlai Hollera7a40442020-10-09 09:49:42 -040098 * rendered but not previously flushed may be lost. After this function is called all subsequent
Adlai Holler98dd0042020-10-13 10:04:00 -040099 * calls on the context will fail or be no-ops.
Adlai Hollera7a40442020-10-09 09:49:42 -0400100 *
101 * The typical use case for this function is that the underlying 3D context was lost and further
102 * API calls may crash.
103 *
104 * For Vulkan, even if the device becomes lost, the VkQueue, VkDevice, or VkInstance used to
105 * create the context must be kept alive even after abandoning the context. Those objects must
106 * live for the lifetime of the context object itself. The reason for this is so that
107 * we can continue to delete any outstanding GrBackendTextures/RenderTargets which must be
108 * cleaned up even in a device lost state.
109 */
Robert Phillipsad248452020-06-30 09:27:52 -0400110 void abandonContext() override;
111
Adlai Hollera7a40442020-10-09 09:49:42 -0400112 /**
113 * Returns true if the context was abandoned or if the if the backend specific context has
114 * gotten into an unrecoverarble, lost state (e.g. in Vulkan backend if we've gotten a
115 * VK_ERROR_DEVICE_LOST). If the backend context is lost, this call will also abandon the
116 * GrContext.
117 */
118 bool abandoned() override;
119
Adlai Holler61a591c2020-10-12 12:38:33 -0400120 // TODO: Remove this from public after migrating Chrome.
121 sk_sp<GrContextThreadSafeProxy> threadSafeProxy();
122
123 /**
124 * Checks if the underlying 3D API reported an out-of-memory error. If this returns true it is
125 * reset and will return false until another out-of-memory error is reported by the 3D API. If
126 * the context is abandoned then this will report false.
127 *
128 * Currently this is implemented for:
129 *
130 * OpenGL [ES] - Note that client calls to glGetError() may swallow GL_OUT_OF_MEMORY errors and
131 * therefore hide the error from Skia. Also, it is not advised to use this in combination with
132 * enabling GrContextOptions::fSkipGLErrorChecks. That option may prevent the context from ever
133 * checking the GL context for OOM.
134 *
135 * Vulkan - Reports true if VK_ERROR_OUT_OF_HOST_MEMORY or VK_ERROR_OUT_OF_DEVICE_MEMORY has
136 * occurred.
137 */
138 bool oomed();
139
140 /**
141 * This is similar to abandonContext() however the underlying 3D context is not yet lost and
142 * the context will cleanup all allocated resources before returning. After returning it will
143 * assume that the underlying context may no longer be valid.
144 *
145 * The typical use case for this function is that the client is going to destroy the 3D context
146 * but can't guarantee that context will be destroyed first (perhaps because it may be ref'ed
147 * elsewhere by either the client or Skia objects).
148 *
149 * For Vulkan, even if the device becomes lost, the VkQueue, VkDevice, or VkInstance used to
150 * create the context must be alive before calling releaseResourcesAndAbandonContext.
151 */
152 void releaseResourcesAndAbandonContext();
Robert Phillipsad248452020-06-30 09:27:52 -0400153
Adlai Holler3a508e92020-10-12 13:58:01 -0400154 ///////////////////////////////////////////////////////////////////////////
155 // Resource Cache
156
157 /** DEPRECATED
158 * Return the current GPU resource cache limits.
159 *
160 * @param maxResources If non-null, will be set to -1.
161 * @param maxResourceBytes If non-null, returns maximum number of bytes of
162 * video memory that can be held in the cache.
163 */
164 void getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const;
165
166 /**
167 * Return the current GPU resource cache limit in bytes.
168 */
169 size_t getResourceCacheLimit() const;
170
171 /**
172 * Gets the current GPU resource cache usage.
173 *
174 * @param resourceCount If non-null, returns the number of resources that are held in the
175 * cache.
176 * @param maxResourceBytes If non-null, returns the total number of bytes of video memory held
177 * in the cache.
178 */
179 void getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const;
180
181 /**
182 * Gets the number of bytes in the cache consumed by purgeable (e.g. unlocked) resources.
183 */
184 size_t getResourceCachePurgeableBytes() const;
185
186 /** DEPRECATED
187 * Specify the GPU resource cache limits. If the current cache exceeds the maxResourceBytes
188 * limit, it will be purged (LRU) to keep the cache within the limit.
189 *
190 * @param maxResources Unused.
191 * @param maxResourceBytes The maximum number of bytes of video memory
192 * that can be held in the cache.
193 */
194 void setResourceCacheLimits(int maxResources, size_t maxResourceBytes);
195
196 /**
197 * Specify the GPU resource cache limit. If the cache currently exceeds this limit,
198 * it will be purged (LRU) to keep the cache within the limit.
199 *
200 * @param maxResourceBytes The maximum number of bytes of video memory
201 * that can be held in the cache.
202 */
203 void setResourceCacheLimit(size_t maxResourceBytes);
204
Adlai Holler4aa4c602020-10-12 13:58:52 -0400205 /**
206 * Frees GPU created by the context. Can be called to reduce GPU memory
207 * pressure.
208 */
209 void freeGpuResources();
210
211 /**
212 * Purge GPU resources that haven't been used in the past 'msNotUsed' milliseconds or are
213 * otherwise marked for deletion, regardless of whether the context is under budget.
214 */
215 void performDeferredCleanup(std::chrono::milliseconds msNotUsed);
216
217 // Temporary compatibility API for Android.
218 void purgeResourcesNotUsedInMs(std::chrono::milliseconds msNotUsed) {
219 this->performDeferredCleanup(msNotUsed);
220 }
221
222 /**
223 * Purge unlocked resources from the cache until the the provided byte count has been reached
224 * or we have purged all unlocked resources. The default policy is to purge in LRU order, but
225 * can be overridden to prefer purging scratch resources (in LRU order) prior to purging other
226 * resource types.
227 *
228 * @param maxBytesToPurge the desired number of bytes to be purged.
229 * @param preferScratchResources If true scratch resources will be purged prior to other
230 * resource types.
231 */
232 void purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources);
233
234 /**
235 * This entry point is intended for instances where an app has been backgrounded or
236 * suspended.
237 * If 'scratchResourcesOnly' is true all unlocked scratch resources will be purged but the
238 * unlocked resources with persistent data will remain. If 'scratchResourcesOnly' is false
239 * then all unlocked resources will be purged.
240 * In either case, after the unlocked resources are purged a separate pass will be made to
241 * ensure that resource usage is under budget (i.e., even if 'scratchResourcesOnly' is true
242 * some resources with persistent data may be purged to be under budget).
243 *
244 * @param scratchResourcesOnly If true only unlocked scratch resources will be purged prior
245 * enforcing the budget requirements.
246 */
247 void purgeUnlockedResources(bool scratchResourcesOnly);
248
249 /**
250 * Gets the maximum supported texture size.
251 */
252 using GrRecordingContext::maxTextureSize;
253
254 /**
255 * Gets the maximum supported render target size.
256 */
257 using GrRecordingContext::maxRenderTargetSize;
258
259 /**
260 * Can a SkImage be created with the given color type.
261 */
262 using GrRecordingContext::colorTypeSupportedAsImage;
263
264 /**
265 * Can a SkSurface be created with the given color type. To check whether MSAA is supported
266 * use maxSurfaceSampleCountForColorType().
267 */
268 using GrRecordingContext::colorTypeSupportedAsSurface;
269
270 /**
271 * Gets the maximum supported sample count for a color type. 1 is returned if only non-MSAA
272 * rendering is supported for the color type. 0 is returned if rendering to this color type
273 * is not supported at all.
274 */
275 using GrRecordingContext::maxSurfaceSampleCountForColorType;
Robert Phillipsad248452020-06-30 09:27:52 -0400276
Adlai Holler3acc69a2020-10-13 08:20:51 -0400277 ///////////////////////////////////////////////////////////////////////////
278 // Misc.
279
280 /**
281 * Inserts a list of GPU semaphores that the current GPU-backed API must wait on before
282 * executing any more commands on the GPU. If this call returns false, then the GPU back-end
283 * will not wait on any passed in semaphores, and the client will still own the semaphores,
284 * regardless of the value of deleteSemaphoresAfterWait.
285 *
286 * If deleteSemaphoresAfterWait is false then Skia will not delete the semaphores. In this case
287 * it is the client's responsibility to not destroy or attempt to reuse the semaphores until it
288 * knows that Skia has finished waiting on them. This can be done by using finishedProcs on
289 * flush calls.
290 */
291 bool wait(int numSemaphores, const GrBackendSemaphore* waitSemaphores,
292 bool deleteSemaphoresAfterWait = true);
293
294 /**
295 * Call to ensure all drawing to the context has been flushed and submitted to the underlying 3D
296 * API. This is equivalent to calling GrContext::flush with a default GrFlushInfo followed by
297 * GrContext::submit(syncCpu).
298 */
299 void flushAndSubmit(bool syncCpu = false) {
300 this->flush(GrFlushInfo());
301 this->submit(syncCpu);
302 }
303
304 /**
305 * Call to ensure all drawing to the context has been flushed to underlying 3D API specific
306 * objects. A call to `submit` is always required to ensure work is actually sent to
307 * the gpu. Some specific API details:
308 * GL: Commands are actually sent to the driver, but glFlush is never called. Thus some
309 * sync objects from the flush will not be valid until a submission occurs.
310 *
311 * Vulkan/Metal/D3D/Dawn: Commands are recorded to the backend APIs corresponding command
312 * buffer or encoder objects. However, these objects are not sent to the gpu until a
313 * submission occurs.
314 *
315 * If the return is GrSemaphoresSubmitted::kYes, only initialized GrBackendSemaphores will be
316 * submitted to the gpu during the next submit call (it is possible Skia failed to create a
317 * subset of the semaphores). The client should not wait on these semaphores until after submit
318 * has been called, and must keep them alive until then. If this call returns
319 * GrSemaphoresSubmitted::kNo, the GPU backend will not submit any semaphores to be signaled on
320 * the GPU. Thus the client should not have the GPU wait on any of the semaphores passed in with
321 * the GrFlushInfo. Regardless of whether semaphores were submitted to the GPU or not, the
322 * client is still responsible for deleting any initialized semaphores.
323 * Regardleess of semaphore submission the context will still be flushed. It should be
324 * emphasized that a return value of GrSemaphoresSubmitted::kNo does not mean the flush did not
325 * happen. It simply means there were no semaphores submitted to the GPU. A caller should only
326 * take this as a failure if they passed in semaphores to be submitted.
327 */
328 GrSemaphoresSubmitted flush(const GrFlushInfo& info);
329
330 void flush() { this->flush({}); }
331
332 /**
333 * Submit outstanding work to the gpu from all previously un-submitted flushes. The return
334 * value of the submit will indicate whether or not the submission to the GPU was successful.
335 *
336 * If the call returns true, all previously passed in semaphores in flush calls will have been
337 * submitted to the GPU and they can safely be waited on. The caller should wait on those
338 * semaphores or perform some other global synchronization before deleting the semaphores.
339 *
340 * If it returns false, then those same semaphores will not have been submitted and we will not
341 * try to submit them again. The caller is free to delete the semaphores at any time.
342 *
343 * If the syncCpu flag is true this function will return once the gpu has finished with all
344 * submitted work.
345 */
346 bool submit(bool syncCpu = false);
347
348 /**
349 * Checks whether any asynchronous work is complete and if so calls related callbacks.
350 */
351 void checkAsyncWorkCompletion();
352
353 /** Enumerates all cached GPU resources and dumps their memory to traceMemoryDump. */
354 // Chrome is using this!
355 void dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const;
356
357 bool supportsDistanceFieldText() const;
358
359 void storeVkPipelineCacheData();
360
361 // Returns the gpu memory size of the the texture that backs the passed in SkImage. Returns 0 if
362 // the SkImage is not texture backed. For external format textures this will also return 0 as we
363 // cannot determine the correct size.
364 static size_t ComputeImageSize(sk_sp<SkImage> image, GrMipmapped, bool useNextPow2 = false);
365
366 /**
367 * Retrieve the default GrBackendFormat for a given SkColorType and renderability.
368 * It is guaranteed that this backend format will be the one used by the following
369 * SkColorType and SkSurfaceCharacterization-based createBackendTexture methods.
370 *
371 * The caller should check that the returned format is valid.
372 */
373 GrBackendFormat defaultBackendFormat(SkColorType ct, GrRenderable renderable) const {
374 return INHERITED::defaultBackendFormat(ct, renderable);
375 }
Adlai Holler98dd0042020-10-13 10:04:00 -0400376
377 /**
378 * The explicitly allocated backend texture API allows clients to use Skia to create backend
379 * objects outside of Skia proper (i.e., Skia's caching system will not know about them.)
380 *
381 * It is the client's responsibility to delete all these objects (using deleteBackendTexture)
382 * before deleting the context used to create them. If the backend is Vulkan, the textures must
383 * be deleted before abandoning the context as well. Additionally, clients should only delete
384 * these objects on the thread for which that context is active.
385 *
386 * The client is responsible for ensuring synchronization between different uses
387 * of the backend object (i.e., wrapping it in a surface, rendering to it, deleting the
388 * surface, rewrapping it in a image and drawing the image will require explicit
389 * synchronization on the client's part).
390 */
391
392 /**
393 * If possible, create an uninitialized backend texture. The client should ensure that the
394 * returned backend texture is valid.
395 * For the Vulkan backend the layout of the created VkImage will be:
396 * VK_IMAGE_LAYOUT_UNDEFINED.
397 */
398 GrBackendTexture createBackendTexture(int width, int height,
399 const GrBackendFormat&,
400 GrMipmapped,
401 GrRenderable,
402 GrProtected = GrProtected::kNo);
403
404 /**
405 * If possible, create an uninitialized backend texture. The client should ensure that the
406 * returned backend texture is valid.
407 * If successful, the created backend texture will be compatible with the provided
408 * SkColorType.
409 * For the Vulkan backend the layout of the created VkImage will be:
410 * VK_IMAGE_LAYOUT_UNDEFINED.
411 */
412 GrBackendTexture createBackendTexture(int width, int height,
413 SkColorType,
414 GrMipmapped,
415 GrRenderable,
416 GrProtected = GrProtected::kNo);
417
418 /**
419 * If possible, create a backend texture initialized to a particular color. The client should
420 * ensure that the returned backend texture is valid. The client can pass in a finishedProc
421 * to be notified when the data has been uploaded by the gpu and the texture can be deleted. The
422 * client is required to call `submit` to send the upload work to the gpu. The
423 * finishedProc will always get called even if we failed to create the GrBackendTexture.
424 * For the Vulkan backend the layout of the created VkImage will be:
425 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
426 */
427 GrBackendTexture createBackendTexture(int width, int height,
428 const GrBackendFormat&,
429 const SkColor4f& color,
430 GrMipmapped,
431 GrRenderable,
432 GrProtected = GrProtected::kNo,
433 GrGpuFinishedProc finishedProc = nullptr,
434 GrGpuFinishedContext finishedContext = nullptr);
435
436 /**
437 * If possible, create a backend texture initialized to a particular color. The client should
438 * ensure that the returned backend texture is valid. The client can pass in a finishedProc
439 * to be notified when the data has been uploaded by the gpu and the texture can be deleted. The
440 * client is required to call `submit` to send the upload work to the gpu. The
441 * finishedProc will always get called even if we failed to create the GrBackendTexture.
442 * If successful, the created backend texture will be compatible with the provided
443 * SkColorType.
444 * For the Vulkan backend the layout of the created VkImage will be:
445 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
446 */
447 GrBackendTexture createBackendTexture(int width, int height,
448 SkColorType,
449 const SkColor4f& color,
450 GrMipmapped,
451 GrRenderable,
452 GrProtected = GrProtected::kNo,
453 GrGpuFinishedProc finishedProc = nullptr,
454 GrGpuFinishedContext finishedContext = nullptr);
455
456 /**
457 * If possible, create a backend texture initialized with the provided pixmap data. The client
458 * should ensure that the returned backend texture is valid. The client can pass in a
459 * finishedProc to be notified when the data has been uploaded by the gpu and the texture can be
460 * deleted. The client is required to call `submit` to send the upload work to the gpu.
461 * The finishedProc will always get called even if we failed to create the GrBackendTexture.
462 * If successful, the created backend texture will be compatible with the provided
463 * pixmap(s). Compatible, in this case, means that the backend format will be the result
464 * of calling defaultBackendFormat on the base pixmap's colortype. The src data can be deleted
465 * when this call returns.
466 * If numLevels is 1 a non-mipMapped texture will result. If a mipMapped texture is desired
467 * the data for all the mipmap levels must be provided. In the mipmapped case all the
468 * colortypes of the provided pixmaps must be the same. Additionally, all the miplevels
469 * must be sized correctly (please see SkMipmap::ComputeLevelSize and ComputeLevelCount).
470 * Note: the pixmap's alphatypes and colorspaces are ignored.
471 * For the Vulkan backend the layout of the created VkImage will be:
472 * VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL
473 */
474 GrBackendTexture createBackendTexture(const SkPixmap srcData[], int numLevels,
475 GrRenderable, GrProtected,
476 GrGpuFinishedProc finishedProc = nullptr,
477 GrGpuFinishedContext finishedContext = nullptr);
478
479 // Helper version of above for a single level.
480 GrBackendTexture createBackendTexture(const SkPixmap& srcData,
481 GrRenderable renderable,
482 GrProtected isProtected,
483 GrGpuFinishedProc finishedProc = nullptr,
484 GrGpuFinishedContext finishedContext = nullptr) {
485 return this->createBackendTexture(&srcData, 1, renderable, isProtected, finishedProc,
486 finishedContext);
487 }
Robert Phillipsad248452020-06-30 09:27:52 -0400488protected:
Robert Phillipsf4f80112020-07-13 16:13:31 -0400489 GrDirectContext(GrBackendApi backend, const GrContextOptions& options);
490
Robert Phillipsad248452020-06-30 09:27:52 -0400491 bool init() override;
492
Robert Phillips3262bc82020-08-10 12:11:58 -0400493 GrAtlasManager* onGetAtlasManager() override { return fAtlasManager.get(); }
Robert Phillips5edf5102020-08-10 16:30:36 -0400494 GrSmallPathAtlasMgr* onGetSmallPathAtlasMgr() override;
Robert Phillipsad248452020-06-30 09:27:52 -0400495
Robert Phillips44333c52020-06-30 13:28:00 -0400496 GrDirectContext* asDirectContext() override { return this; }
497
Robert Phillipsad248452020-06-30 09:27:52 -0400498private:
Robert Phillips3262bc82020-08-10 12:11:58 -0400499 std::unique_ptr<GrAtlasManager> fAtlasManager;
Robert Phillipsad248452020-06-30 09:27:52 -0400500
Robert Phillips079455c2020-08-11 15:18:46 -0400501 std::unique_ptr<GrSmallPathAtlasMgr> fSmallPathAtlasMgr;
Robert Phillips5edf5102020-08-10 16:30:36 -0400502
John Stiles7571f9e2020-09-02 22:42:33 -0400503 using INHERITED = GrContext;
Robert Phillipsad248452020-06-30 09:27:52 -0400504};
505
506
507#endif