compiler: Connect shader depth flags to driver

Populate the bits to inform driver of how the fragment shader
is going to use depth, including KILL.

Also slight change to send more information than was available
before, letting hardware know if new depth is GE/LE source depth.

v2: Create enum for depth mode for cleaner use.
diff --git a/icd/intel/pipeline.h b/icd/intel/pipeline.h
index dd08e52..25542e8 100644
--- a/icd/intel/pipeline.h
+++ b/icd/intel/pipeline.h
@@ -38,9 +38,16 @@
     INTEL_SHADER_USE_IID                = (1 << 1),
 
     INTEL_SHADER_USE_KILL               = (1 << 2),
-    INTEL_SHADER_USE_COMPUTED_DEPTH     = (1 << 3),
-    INTEL_SHADER_USE_DEPTH              = (1 << 4),
-    INTEL_SHADER_USE_W                  = (1 << 5),
+    INTEL_SHADER_USE_DEPTH              = (1 << 3),
+    INTEL_SHADER_USE_W                  = (1 << 4),
+};
+
+/* This order must match Pixel Shader Computed Depth Mode in 3DSTATE_WM */
+enum intel_computed_depth_mode {
+    INTEL_COMPUTED_DEPTH_MODE_NONE,
+    INTEL_COMPUTED_DEPTH_MODE_ON,
+    INTEL_COMPUTED_DEPTH_MODE_ON_GE,
+    INTEL_COMPUTED_DEPTH_MODE_ON_LE
 };
 
 #define INTEL_PIPELINE_RMAP_SLOT_RT ((XGL_UINT) -1)
@@ -123,6 +130,8 @@
 
     XGL_GPU_SIZE per_thread_scratch_size;
 
+    enum intel_computed_depth_mode computed_depth_mode;
+
     struct intel_pipeline_rmap *rmap;
 
     /* these are set up by the driver */