Fix a bug related to tile based decoding

When the sample size is larger than 1.
The calculation of image's height and width should be rounding up not down.

Change-Id: I6c2ad1f630d1f8f9392594887e23f294ecde2352
diff --git a/jdcoefct.c b/jdcoefct.c
index 7a9f993..b10f9bc 100644
--- a/jdcoefct.c
+++ b/jdcoefct.c
@@ -272,10 +272,9 @@
   unsigned int MCUs_per_row = cinfo->MCUs_per_row;
 #ifdef ANDROID_TILE_BASED_DECODE
   if (cinfo->tile_decode) {
-    MCUs_per_row =
+    MCUs_per_row = jmin(MCUs_per_row,
         (cinfo->coef->column_right_boundary - cinfo->coef->column_left_boundary)
-        * cinfo->entropy->index->MCU_sample_size * cinfo->max_h_samp_factor;
-    MCUs_per_row = jmin(MCUs_per_row, cinfo->MCUs_per_row);
+        * cinfo->entropy->index->MCU_sample_size * cinfo->max_h_samp_factor);
   }
 #endif