Set proper error code for glShaderBinary function

This function is not supported, but we still need to set proper error
code when it is called, instead of doing nothing.

Change-Id: I3f880e9b5e661fe4bd8e8f51f38df71c24b420e5
diff --git a/system/GLESv2_enc/GL2Encoder.cpp b/system/GLESv2_enc/GL2Encoder.cpp
index 90aa61d..18c796e 100755
--- a/system/GLESv2_enc/GL2Encoder.cpp
+++ b/system/GLESv2_enc/GL2Encoder.cpp
@@ -71,6 +71,7 @@
     OVERRIDE(glGetVertexAttribfv);
     OVERRIDE(glGetVertexAttribPointerv);
 
+    this->glShaderBinary = &s_glShaderBinary;
     this->glShaderSource = &s_glShaderSource;
     this->glFinish = &s_glFinish;
 
@@ -652,6 +653,13 @@
     return true;
 }
 
+void GL2Encoder::s_glShaderBinary(void *self, GLsizei n, const GLuint *shaders, GLenum binaryformat, const void* binary, GLsizei length)
+{
+    GL2Encoder* ctx = (GL2Encoder*)self;
+    // Although it is not supported, need to set proper error code.
+    SET_ERROR_IF(1, GL_INVALID_ENUM);
+}
+
 void GL2Encoder::s_glShaderSource(void *self, GLuint shader, GLsizei count, const GLchar * const *string, const GLint *length)
 {
     GL2Encoder* ctx = (GL2Encoder*)self;
diff --git a/system/GLESv2_enc/GL2Encoder.h b/system/GLESv2_enc/GL2Encoder.h
index 5f255d1..21e7932 100644
--- a/system/GLESv2_enc/GL2Encoder.h
+++ b/system/GLESv2_enc/GL2Encoder.h
@@ -117,6 +117,8 @@
     glGetVertexAttribPointerv_client_proc_t m_glGetVertexAttribPointerv_enc;
     static void s_glGetVertexAttribPointerv(void *self, GLuint index, GLenum pname, GLvoid **pointer);
 
+    static void s_glShaderBinary(void *self, GLsizei n, const GLuint *shaders, GLenum binaryformat, const void* binary, GLsizei length);
+
     static void s_glShaderSource(void *self, GLuint shader, GLsizei count, const GLchar * const *string, const GLint *length);
 
     static void s_glFinish(void *self);