Set proper error code for glShaderBinary function

This function is not supported, but we still need to set proper error
code when it is called, instead of doing nothing.

Change-Id: I3f880e9b5e661fe4bd8e8f51f38df71c24b420e5
diff --git a/system/GLESv2_enc/GL2Encoder.cpp b/system/GLESv2_enc/GL2Encoder.cpp
index 90aa61d..18c796e 100755
--- a/system/GLESv2_enc/GL2Encoder.cpp
+++ b/system/GLESv2_enc/GL2Encoder.cpp
@@ -71,6 +71,7 @@
     OVERRIDE(glGetVertexAttribfv);
     OVERRIDE(glGetVertexAttribPointerv);
 
+    this->glShaderBinary = &s_glShaderBinary;
     this->glShaderSource = &s_glShaderSource;
     this->glFinish = &s_glFinish;
 
@@ -652,6 +653,13 @@
     return true;
 }
 
+void GL2Encoder::s_glShaderBinary(void *self, GLsizei n, const GLuint *shaders, GLenum binaryformat, const void* binary, GLsizei length)
+{
+    GL2Encoder* ctx = (GL2Encoder*)self;
+    // Although it is not supported, need to set proper error code.
+    SET_ERROR_IF(1, GL_INVALID_ENUM);
+}
+
 void GL2Encoder::s_glShaderSource(void *self, GLuint shader, GLsizei count, const GLchar * const *string, const GLint *length)
 {
     GL2Encoder* ctx = (GL2Encoder*)self;