blob: 65a0ba7391ac76a1e5fdd1f886e523f790cd9d98 [file] [log] [blame]
bsalomon@google.com27847de2011-02-22 20:59:41 +00001/*
epoger@google.comec3ed6a2011-07-28 14:26:00 +00002 * Copyright 2010 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
bsalomon@google.com27847de2011-02-22 20:59:41 +00006 */
7
8#ifndef GrContext_DEFINED
9#define GrContext_DEFINED
10
Mike Kleinc0bd9f92019-04-23 12:05:21 -050011#include "include/core/SkMatrix.h"
12#include "include/core/SkPathEffect.h"
13#include "include/core/SkTypes.h"
14#include "include/gpu/GrContextOptions.h"
15#include "include/private/GrRecordingContext.h"
bsalomon@google.com27847de2011-02-22 20:59:41 +000016
Brian Salomon79bf7ac2018-05-11 13:08:30 -040017// We shouldn't need this but currently Android is relying on this being include transitively.
Mike Kleinc0bd9f92019-04-23 12:05:21 -050018#include "include/core/SkUnPreMultiply.h"
Brian Salomon79bf7ac2018-05-11 13:08:30 -040019
Robert Phillipsc4039ea2018-03-01 11:36:45 -050020class GrAtlasManager;
Robert Phillipsfc711a22018-02-13 17:03:00 -050021class GrBackendFormat;
Greg Daniel51316782017-08-02 15:10:09 +000022class GrBackendSemaphore;
Brian Salomonc7fe0f72018-05-11 10:14:21 -040023class GrCaps;
robertphillips4fd74ae2016-08-03 14:26:53 -070024class GrContextPriv;
bsalomon41b952c2016-03-11 06:46:33 -080025class GrContextThreadSafeProxy;
joshualittb0a8a372014-09-23 09:50:21 -070026class GrFragmentProcessor;
Greg Daniel02611d92017-07-25 10:05:01 -040027struct GrGLInterface;
bsalomon@google.com05ef5102011-05-02 21:14:59 +000028class GrGpu;
Greg Daniel02611d92017-07-25 10:05:01 -040029struct GrMockOptions;
commit-bot@chromium.org5c8ee252013-11-01 15:23:44 +000030class GrPath;
Greg Daniel02611d92017-07-25 10:05:01 -040031class GrRenderTargetContext;
bsalomon0ea80f42015-02-11 10:49:59 -080032class GrResourceCache;
bsalomond309e7a2015-04-30 14:18:54 -070033class GrResourceProvider;
Brian Salomon2bbdcc42017-09-07 12:36:34 -040034class GrSamplerState;
Robert Phillipsa0bc39d2019-01-29 13:14:47 -050035class GrSkSLFPFactoryCache;
Robert Phillips7ee385e2017-03-30 08:02:11 -040036class GrSurfaceProxy;
Robert Phillipsfc711a22018-02-13 17:03:00 -050037class GrSwizzle;
kkinnunenc6cb56f2014-06-24 00:12:27 -070038class GrTextContext;
Mike Reed84dd8572017-03-08 22:21:00 -050039class GrTextureProxy;
Greg Daniel02611d92017-07-25 10:05:01 -040040struct GrVkBackendContext;
bsalomon@google.com50398bf2011-07-26 20:45:30 +000041
Mike Reed84dd8572017-03-08 22:21:00 -050042class SkImage;
43class SkSurfaceProps;
Brian Osman51279982017-08-23 10:12:00 -040044class SkTaskGroup;
Robert Phillipsfc711a22018-02-13 17:03:00 -050045class SkTraceMemoryDump;
Mike Reed84dd8572017-03-08 22:21:00 -050046
Robert Phillips4217ea72019-01-30 13:08:28 -050047class SK_API GrContext : public GrRecordingContext {
bsalomon@google.com27847de2011-02-22 20:59:41 +000048public:
49 /**
Brian Salomonc1b9c102018-04-06 09:18:00 -040050 * Creates a GrContext for a backend context. If no GrGLInterface is provided then the result of
51 * GrGLMakeNativeInterface() is used if it succeeds.
bsalomon@google.com27847de2011-02-22 20:59:41 +000052 */
Brian Salomon384fab42017-12-07 12:33:05 -050053 static sk_sp<GrContext> MakeGL(sk_sp<const GrGLInterface>, const GrContextOptions&);
54 static sk_sp<GrContext> MakeGL(sk_sp<const GrGLInterface>);
Brian Salomonc1b9c102018-04-06 09:18:00 -040055 static sk_sp<GrContext> MakeGL(const GrContextOptions&);
56 static sk_sp<GrContext> MakeGL();
57
Greg Daniel10a83da2018-06-14 09:31:11 -040058 static sk_sp<GrContext> MakeVulkan(const GrVkBackendContext&, const GrContextOptions&);
59 static sk_sp<GrContext> MakeVulkan(const GrVkBackendContext&);
Greg Daniel02611d92017-07-25 10:05:01 -040060
Greg Danielb76a72a2017-07-13 15:07:54 -040061#ifdef SK_METAL
62 /**
63 * Makes a GrContext which uses Metal as the backend. The device parameter is an MTLDevice
64 * and queue is an MTLCommandQueue which should be used by the backend. These objects must
65 * have a ref on them which can be transferred to Ganesh which will release the ref when the
66 * GrContext is destroyed.
67 */
68 static sk_sp<GrContext> MakeMetal(void* device, void* queue, const GrContextOptions& options);
Greg Daniel02611d92017-07-25 10:05:01 -040069 static sk_sp<GrContext> MakeMetal(void* device, void* queue);
Greg Danielb76a72a2017-07-13 15:07:54 -040070#endif
71
Greg Daniel02611d92017-07-25 10:05:01 -040072 static sk_sp<GrContext> MakeMock(const GrMockOptions*, const GrContextOptions&);
73 static sk_sp<GrContext> MakeMock(const GrMockOptions*);
74
Robert Phillipsbb606772019-02-04 17:50:57 -050075 ~GrContext() override;
bsalomon@google.com27847de2011-02-22 20:59:41 +000076
bungeman6bd52842016-10-27 09:30:08 -070077 sk_sp<GrContextThreadSafeProxy> threadSafeProxy();
bsalomon41b952c2016-03-11 06:46:33 -080078
bsalomon@google.com27847de2011-02-22 20:59:41 +000079 /**
80 * The GrContext normally assumes that no outsider is setting state
81 * within the underlying 3D API's context/device/whatever. This call informs
82 * the context that the state was modified and it should resend. Shouldn't
83 * be called frequently for good performance.
bsalomon@google.com0a208a12013-06-28 18:57:35 +000084 * The flag bits, state, is dpendent on which backend is used by the
85 * context, either GL or D3D (possible in future).
bsalomon@google.com27847de2011-02-22 20:59:41 +000086 */
bsalomon@google.com0a208a12013-06-28 18:57:35 +000087 void resetContext(uint32_t state = kAll_GrBackendState);
bsalomon@google.com27847de2011-02-22 20:59:41 +000088
bsalomon@google.com8fe72472011-03-30 21:26:44 +000089 /**
Brian Salomon1f05d452019-02-08 12:33:08 -050090 * If the backend is GrBackendApi::kOpenGL, then all texture unit/target combinations for which
91 * the GrContext has modified the bound texture will have texture id 0 bound. This does not
92 * flush the GrContext. Calling resetContext() does not change the set that will be bound
93 * to texture id 0 on the next call to resetGLTextureBindings(). After this is called
94 * all unit/target combinations are considered to have unmodified bindings until the GrContext
95 * subsequently modifies them (meaning if this is called twice in a row with no intervening
96 * GrContext usage then the second call is a no-op.)
97 */
98 void resetGLTextureBindings();
99
100 /**
Robert Phillips88260b52018-01-19 12:56:09 -0500101 * Abandons all GPU resources and assumes the underlying backend 3D API context is no longer
bsalomon6e2aad42016-04-01 11:54:31 -0700102 * usable. Call this if you have lost the associated GPU context, and thus internal texture,
103 * buffer, etc. references/IDs are now invalid. Calling this ensures that the destructors of the
104 * GrContext and any of its created resource objects will not make backend 3D API calls. Content
105 * rendered but not previously flushed may be lost. After this function is called all subsequent
106 * calls on the GrContext will fail or be no-ops.
107 *
108 * The typical use case for this function is that the underlying 3D context was lost and further
109 * API calls may crash.
bsalomon@google.com8fe72472011-03-30 21:26:44 +0000110 */
Robert Phillipsa9162df2019-02-11 14:12:03 -0500111 void abandonContext() override;
junov@google.com53a55842011-06-08 22:55:10 +0000112
bsalomon6e2aad42016-04-01 11:54:31 -0700113 /**
Khushalc421ca12018-06-26 14:38:34 -0700114 * Returns true if the context was abandoned.
115 */
Robert Phillipsa9162df2019-02-11 14:12:03 -0500116 using GrImageContext::abandoned;
Khushalc421ca12018-06-26 14:38:34 -0700117
118 /**
bsalomon6e2aad42016-04-01 11:54:31 -0700119 * This is similar to abandonContext() however the underlying 3D context is not yet lost and
120 * the GrContext will cleanup all allocated resources before returning. After returning it will
121 * assume that the underlying context may no longer be valid.
122 *
123 * The typical use case for this function is that the client is going to destroy the 3D context
124 * but can't guarantee that GrContext will be destroyed first (perhaps because it may be ref'ed
125 * elsewhere by either the client or Skia objects).
126 */
Robert Phillips1056eb82018-03-01 14:16:41 -0500127 virtual void releaseResourcesAndAbandonContext();
bsalomon6e2aad42016-04-01 11:54:31 -0700128
commit-bot@chromium.org95c20032014-05-09 14:29:32 +0000129 ///////////////////////////////////////////////////////////////////////////
130 // Resource Cache
131
132 /**
133 * Return the current GPU resource cache limits.
134 *
135 * @param maxResources If non-null, returns maximum number of resources that
136 * can be held in the cache.
137 * @param maxResourceBytes If non-null, returns maximum number of bytes of
138 * video memory that can be held in the cache.
139 */
140 void getResourceCacheLimits(int* maxResources, size_t* maxResourceBytes) const;
commit-bot@chromium.org95c20032014-05-09 14:29:32 +0000141
142 /**
143 * Gets the current GPU resource cache usage.
144 *
145 * @param resourceCount If non-null, returns the number of resources that are held in the
146 * cache.
147 * @param maxResourceBytes If non-null, returns the total number of bytes of video memory held
148 * in the cache.
149 */
150 void getResourceCacheUsage(int* resourceCount, size_t* resourceBytes) const;
151
commit-bot@chromium.org95c20032014-05-09 14:29:32 +0000152 /**
Derek Sollenbergeree479142017-05-24 11:41:33 -0400153 * Gets the number of bytes in the cache consumed by purgeable (e.g. unlocked) resources.
154 */
155 size_t getResourceCachePurgeableBytes() const;
156
157 /**
commit-bot@chromium.org95c20032014-05-09 14:29:32 +0000158 * Specify the GPU resource cache limits. If the current cache exceeds either
159 * of these, it will be purged (LRU) to keep the cache within these limits.
160 *
161 * @param maxResources The maximum number of resources that can be held in
162 * the cache.
163 * @param maxResourceBytes The maximum number of bytes of video memory
164 * that can be held in the cache.
165 */
166 void setResourceCacheLimits(int maxResources, size_t maxResourceBytes);
commit-bot@chromium.org95c20032014-05-09 14:29:32 +0000167
junov@google.com53a55842011-06-08 22:55:10 +0000168 /**
bsalomon@google.com1e269b52012-10-15 14:25:31 +0000169 * Frees GPU created by the context. Can be called to reduce GPU memory
bsalomon@google.com8fe72472011-03-30 21:26:44 +0000170 * pressure.
bsalomon@google.com27847de2011-02-22 20:59:41 +0000171 */
Robert Phillips1056eb82018-03-01 14:16:41 -0500172 virtual void freeGpuResources();
bsalomon@google.com8fe72472011-03-30 21:26:44 +0000173
bsalomon@google.com07fc0d12012-06-22 15:15:59 +0000174 /**
Jim Van Verth76d917c2017-12-13 09:26:37 -0500175 * Purge GPU resources that haven't been used in the past 'msNotUsed' milliseconds or are
176 * otherwise marked for deletion, regardless of whether the context is under budget.
Brian Salomon5e150852017-03-22 14:53:13 -0400177 */
Jim Van Verth76d917c2017-12-13 09:26:37 -0500178 void performDeferredCleanup(std::chrono::milliseconds msNotUsed);
Brian Salomon5e150852017-03-22 14:53:13 -0400179
Mike Klein4b6bde72017-12-14 07:46:55 -0500180 // Temporary compatibility API for Android.
181 void purgeResourcesNotUsedInMs(std::chrono::milliseconds msNotUsed) {
182 this->performDeferredCleanup(msNotUsed);
183 }
184
Derek Sollenberger5480a182017-05-25 16:43:59 -0400185 /**
186 * Purge unlocked resources from the cache until the the provided byte count has been reached
187 * or we have purged all unlocked resources. The default policy is to purge in LRU order, but
188 * can be overridden to prefer purging scratch resources (in LRU order) prior to purging other
189 * resource types.
190 *
191 * @param maxBytesToPurge the desired number of bytes to be purged.
192 * @param preferScratchResources If true scratch resources will be purged prior to other
193 * resource types.
194 */
195 void purgeUnlockedResources(size_t bytesToPurge, bool preferScratchResources);
196
Robert Phillips6eba0632018-03-28 12:25:42 -0400197 /**
198 * This entry point is intended for instances where an app has been backgrounded or
199 * suspended.
200 * If 'scratchResourcesOnly' is true all unlocked scratch resources will be purged but the
201 * unlocked resources with persistent data will remain. If 'scratchResourcesOnly' is false
202 * then all unlocked resources will be purged.
203 * In either case, after the unlocked resources are purged a separate pass will be made to
204 * ensure that resource usage is under budget (i.e., even if 'scratchResourcesOnly' is true
205 * some resources with persistent data may be purged to be under budget).
206 *
207 * @param scratchResourcesOnly If true only unlocked scratch resources will be purged prior
208 * enforcing the budget requirements.
209 */
210 void purgeUnlockedResources(bool scratchResourcesOnly);
211
Brian Salomonbdecacf2018-02-02 20:32:49 -0500212 /**
Brian Salomonf932a632018-04-05 12:46:09 -0400213 * Gets the maximum supported texture size.
214 */
215 int maxTextureSize() const;
216
217 /**
218 * Gets the maximum supported render target size.
219 */
220 int maxRenderTargetSize() const;
221
222 /**
Brian Salomonbdecacf2018-02-02 20:32:49 -0500223 * Can a SkImage be created with the given color type.
224 */
225 bool colorTypeSupportedAsImage(SkColorType) const;
226
227 /**
228 * Can a SkSurface be created with the given color type. To check whether MSAA is supported
229 * use maxSurfaceSampleCountForColorType().
230 */
231 bool colorTypeSupportedAsSurface(SkColorType colorType) const {
232 return this->maxSurfaceSampleCountForColorType(colorType) > 0;
233 }
234
235 /**
236 * Gets the maximum supported sample count for a color type. 1 is returned if only non-MSAA
237 * rendering is supported for the color type. 0 is returned if rendering to this color type
238 * is not supported at all.
239 */
240 int maxSurfaceSampleCountForColorType(SkColorType) const;
241
bsalomon@google.com27847de2011-02-22 20:59:41 +0000242 ///////////////////////////////////////////////////////////////////////////
243 // Misc.
244
Greg Daniel06be0792019-04-22 15:53:23 -0400245
246 /**
247 * Inserts a list of GPU semaphores that the current GPU-backed API must wait on before
248 * executing any more commands on the GPU. Skia will take ownership of the underlying semaphores
249 * and delete them once they have been signaled and waited on. If this call returns false, then
250 * the GPU back-end will not wait on any passed in semaphores, and the client will still own the
251 * semaphores.
252 */
253 bool wait(int numSemaphores, const GrBackendSemaphore* waitSemaphores);
254
bsalomon@google.com27847de2011-02-22 20:59:41 +0000255 /**
Greg Daniel51316782017-08-02 15:10:09 +0000256 * Call to ensure all drawing to the context has been issued to the underlying 3D API.
bsalomon@google.com27847de2011-02-22 20:59:41 +0000257 */
Greg Daniele6bfb7d2019-04-17 15:26:11 -0400258 void flush() {
259 this->flush(GrFlushInfo());
260 }
robertphillipsea461502015-05-26 11:38:03 -0700261
senorblanco@chromium.org027de5f2011-07-08 18:03:33 +0000262 /**
Greg Daniele6bfb7d2019-04-17 15:26:11 -0400263 * Call to ensure all drawing to the context has been issued to the underlying 3D API.
Greg Daniel51316782017-08-02 15:10:09 +0000264 *
Greg Daniela3aa75a2019-04-12 14:24:55 -0400265 * If this call returns GrSemaphoresSubmitted::kNo, the GPU backend will not have created or
Greg Daniel51316782017-08-02 15:10:09 +0000266 * added any semaphores to signal on the GPU. Thus the client should not have the GPU wait on
Greg Daniele6bfb7d2019-04-17 15:26:11 -0400267 * any of the semaphores passed in with the GrFlushInfo. However, any pending commands to the
268 * context will still be flushed.
Greg Daniel51316782017-08-02 15:10:09 +0000269 */
Greg Daniele6bfb7d2019-04-17 15:26:11 -0400270 GrSemaphoresSubmitted flush(const GrFlushInfo&);
Greg Danielb9990e42019-04-10 16:28:52 -0400271
272 /**
273 * Deprecated.
274 */
Greg Danield10c9012019-04-22 10:22:41 -0400275 GrSemaphoresSubmitted flush(GrFlushFlags flags, int numSemaphores,
Greg Daniel5816b3d2019-04-22 11:46:41 -0400276 GrBackendSemaphore signalSemaphores[],
277 GrGpuFinishedProc finishedProc = nullptr,
278 GrGpuFinishedContext finishedContext = nullptr) {
Greg Danield10c9012019-04-22 10:22:41 -0400279 GrFlushInfo info;
280 info.fFlags = flags;
281 info.fNumSemaphores = numSemaphores;
282 info.fSignalSemaphores = signalSemaphores;
Greg Daniel5816b3d2019-04-22 11:46:41 -0400283 info.fFinishedProc = finishedProc;
284 info.fFinishedContext = finishedContext;
Greg Danield10c9012019-04-22 10:22:41 -0400285 return this->flush(info);
286 }
287
288 /**
289 * Deprecated.
290 */
Greg Daniel51316782017-08-02 15:10:09 +0000291 GrSemaphoresSubmitted flushAndSignalSemaphores(int numSemaphores,
Greg Danielb9990e42019-04-10 16:28:52 -0400292 GrBackendSemaphore signalSemaphores[]) {
Greg Daniele6bfb7d2019-04-17 15:26:11 -0400293 GrFlushInfo info;
294 info.fNumSemaphores = numSemaphores;
295 info.fSignalSemaphores = signalSemaphores;
296 return this->flush(info);
Greg Danielb9990e42019-04-10 16:28:52 -0400297 }
Greg Daniel51316782017-08-02 15:10:09 +0000298
Brian Salomonb0d8b762019-05-06 16:58:22 -0400299 /**
300 * Checks whether any asynchronous work is complete and if so calls related callbacks.
301 */
302 void checkAsyncWorkCompletion();
303
robertphillips4fd74ae2016-08-03 14:26:53 -0700304 // Provides access to functions that aren't part of the public API.
Robert Phillips9da87e02019-02-04 13:26:26 -0500305 GrContextPriv priv();
306 const GrContextPriv priv() const;
robertphillips4fd74ae2016-08-03 14:26:53 -0700307
Robert Phillips0c4b7b12018-03-06 08:20:37 -0500308 /** Enumerates all cached GPU resources and dumps their memory to traceMemoryDump. */
309 // Chrome is using this!
310 void dumpMemoryStatistics(SkTraceMemoryDump* traceMemoryDump) const;
311
Khushal3e7548c2018-05-23 15:45:01 -0700312 bool supportsDistanceFieldText() const;
313
Greg Daniela870b462019-01-08 15:49:46 -0500314 void storeVkPipelineCacheData();
315
Adrienne Walker0f827972019-03-26 13:46:14 -0700316 static size_t ComputeTextureSize(SkColorType type, int width, int height, GrMipMapped,
317 bool useNextPow2 = false);
318
Robert Phillips88260b52018-01-19 12:56:09 -0500319protected:
Robert Phillipsa41c6852019-02-07 10:44:10 -0500320 GrContext(GrBackendApi, const GrContextOptions&, int32_t contextID = SK_InvalidGenID);
321
Robert Phillipsbb606772019-02-04 17:50:57 -0500322 bool init(sk_sp<const GrCaps>, sk_sp<GrSkSLFPFactoryCache>) override;
Robert Phillips1056eb82018-03-01 14:16:41 -0500323
Robert Phillips292a6b22019-02-14 14:49:02 -0500324 GrContext* asDirectContext() override { return this; }
325
Robert Phillips5a66efb2018-03-07 15:13:18 -0500326 virtual GrAtlasManager* onGetAtlasManager() = 0;
Robert Phillips1056eb82018-03-01 14:16:41 -0500327
Robert Phillipsfde6fa02018-03-02 08:53:14 -0500328 sk_sp<GrContextThreadSafeProxy> fThreadSafeProxy;
Robert Phillips1056eb82018-03-01 14:16:41 -0500329
bsalomon@google.com27847de2011-02-22 20:59:41 +0000330private:
Ethan Nicholas8e265a72018-12-12 16:22:40 -0500331 // fTaskGroup must appear before anything that uses it (e.g. fGpu), so that it is destroyed
332 // after all of its users. Clients of fTaskGroup will generally want to ensure that they call
333 // wait() on it as they are being destroyed, to avoid the possibility of pending tasks being
334 // invoked after objects they depend upon have already been destroyed.
335 std::unique_ptr<SkTaskGroup> fTaskGroup;
Brian Salomon384fab42017-12-07 12:33:05 -0500336 sk_sp<GrGpu> fGpu;
bsalomon41b952c2016-03-11 06:46:33 -0800337 GrResourceCache* fResourceCache;
Brian Osman32342f02017-03-04 08:12:46 -0500338 GrResourceProvider* fResourceProvider;
bsalomond309e7a2015-04-30 14:18:54 -0700339
bsalomon41b952c2016-03-11 06:46:33 -0800340 bool fDidTestPMConversions;
Brian Osman28804f32017-04-20 10:24:36 -0400341 // true if the PM/UPM conversion succeeded; false otherwise
342 bool fPMUPMConversionsRoundTrip;
bsalomon@google.coma04e8e82012-08-27 12:53:13 +0000343
Ethan Nicholasd1b2eec2017-11-01 15:45:43 -0400344 GrContextOptions::PersistentCache* fPersistentCache;
Brian Osman5e7fbfd2019-05-03 13:13:35 -0400345 GrContextOptions::ShaderErrorHandler* fShaderErrorHandler;
Ethan Nicholasd1b2eec2017-11-01 15:45:43 -0400346
Brian Osman11052242016-10-27 14:47:55 -0400347 // TODO: have the GrClipStackClip use renderTargetContexts and rm this friending
robertphillips4fd74ae2016-08-03 14:26:53 -0700348 friend class GrContextPriv;
csmartdaltonc6f411e2016-08-05 22:32:12 -0700349
skia.committer@gmail.comcdcb2ce2013-01-29 07:05:52 +0000350 /**
Brian Osman5ea96bf2018-10-02 14:58:05 -0400351 * These functions create premul <-> unpremul effects, using the specialized round-trip effects
352 * from GrConfigConversionEffect.
bsalomon@google.comadc65362013-01-28 14:26:09 +0000353 */
Brian Osman5ea96bf2018-10-02 14:58:05 -0400354 std::unique_ptr<GrFragmentProcessor> createPMToUPMEffect(std::unique_ptr<GrFragmentProcessor>);
355 std::unique_ptr<GrFragmentProcessor> createUPMToPMEffect(std::unique_ptr<GrFragmentProcessor>);
Brian Osman409e74f2017-04-17 11:48:28 -0400356
357 /**
Brian Osman5ea96bf2018-10-02 14:58:05 -0400358 * Returns true if createPMToUPMEffect and createUPMToPMEffect will succeed. In other words,
359 * did we find a pair of round-trip preserving conversion effects?
Brian Osman409e74f2017-04-17 11:48:28 -0400360 */
361 bool validPMUPMConversionExists();
bsalomon@google.coma04e8e82012-08-27 12:53:13 +0000362
Robert Phillips4217ea72019-01-30 13:08:28 -0500363 typedef GrRecordingContext INHERITED;
bsalomon@google.com27847de2011-02-22 20:59:41 +0000364};
365
bsalomon@google.com27847de2011-02-22 20:59:41 +0000366#endif