Fix a regression in xmlGetDocCompressMode()

The switch to xzlib had for consequence that the compression
level of the input was not gathered anymore in ctxt->input->buf,
then the parser compression flags was left to -1 and propagated
to the resulting document.
Fix the I/O layer to get compression detection in xzlib,
then carry it in the input buffer and the resulting document

  This should fix
    https://lsbbugs.linuxfoundation.org/show_bug.cgi?id=3456
diff --git a/xmlIO.c b/xmlIO.c
index 847cb7e..fc4e111 100644
--- a/xmlIO.c
+++ b/xmlIO.c
@@ -2669,6 +2669,12 @@
 #endif
 	}
 #endif
+#ifdef HAVE_LZMA_H
+	if ((xmlInputCallbackTable[i].opencallback == xmlXzfileOpen) &&
+		(strcmp(URI, "-") != 0)) {
+            ret->compressed = __libxml2_xzcompressed(context);
+	}
+#endif
     }
     else
       xmlInputCallbackTable[i].closecallback (context);
@@ -3325,6 +3331,17 @@
     if (res < 0) {
 	return(-1);
     }
+
+    /*
+     * try to establish compressed status of input if not done already
+     */
+    if (in->compressed == -1) {
+#ifdef HAVE_LZMA_H
+	if (in->readcallback == xmlXzfileRead)
+            in->compressed = __libxml2_xzcompressed(in->context);
+#endif
+    }
+
     len = res;
     if (in->encoder != NULL) {
         unsigned int use;