Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 1 | // Copyright 2010 Google Inc. All Rights Reserved. |
| 2 | // |
| 3 | // Licensed under the Apache License, Version 2.0 (the "License"); |
| 4 | // you may not use this file except in compliance with the License. |
| 5 | // You may obtain a copy of the License at |
| 6 | // |
| 7 | // http://www.apache.org/licenses/LICENSE-2.0 |
| 8 | // |
| 9 | // Unless required by applicable law or agreed to in writing, software |
| 10 | // distributed under the License is distributed on an "AS IS" BASIS, |
| 11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. |
| 12 | // See the License for the specific language governing permissions and |
| 13 | // limitations under the License. |
| 14 | // |
| 15 | // Entropy encoding (Huffman) utilities. |
| 16 | |
| 17 | #include "./entropy_encode.h" |
| 18 | |
| 19 | #include <stdint.h> |
| 20 | #include <algorithm> |
| 21 | #include <limits> |
| 22 | #include <vector> |
Roderick Sheeter | c23cb1e | 2013-12-12 10:43:05 -0800 | [diff] [blame] | 23 | #include <cstdlib> |
Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 24 | |
| 25 | #include "./histogram.h" |
| 26 | |
| 27 | namespace brotli { |
| 28 | |
| 29 | namespace { |
| 30 | |
| 31 | struct HuffmanTree { |
| 32 | HuffmanTree(); |
| 33 | HuffmanTree(int count, int16_t left, int16_t right) |
| 34 | : total_count_(count), |
| 35 | index_left_(left), |
| 36 | index_right_or_value_(right) { |
| 37 | } |
| 38 | int total_count_; |
| 39 | int16_t index_left_; |
| 40 | int16_t index_right_or_value_; |
| 41 | }; |
| 42 | |
| 43 | HuffmanTree::HuffmanTree() {} |
| 44 | |
| 45 | // Sort the root nodes, least popular first. |
| 46 | bool SortHuffmanTree(const HuffmanTree &v0, const HuffmanTree &v1) { |
Zoltan Szabadka | c6b9c7c | 2013-11-15 19:02:17 +0100 | [diff] [blame] | 47 | if (v0.total_count_ == v1.total_count_) { |
| 48 | return v0.index_right_or_value_ > v1.index_right_or_value_; |
| 49 | } |
Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 50 | return v0.total_count_ < v1.total_count_; |
| 51 | } |
| 52 | |
| 53 | void SetDepth(const HuffmanTree &p, |
| 54 | HuffmanTree *pool, |
| 55 | uint8_t *depth, |
| 56 | int level) { |
| 57 | if (p.index_left_ >= 0) { |
| 58 | ++level; |
| 59 | SetDepth(pool[p.index_left_], pool, depth, level); |
| 60 | SetDepth(pool[p.index_right_or_value_], pool, depth, level); |
| 61 | } else { |
| 62 | depth[p.index_right_or_value_] = level; |
| 63 | } |
| 64 | } |
| 65 | |
| 66 | } // namespace |
| 67 | |
| 68 | // This function will create a Huffman tree. |
| 69 | // |
| 70 | // The catch here is that the tree cannot be arbitrarily deep. |
| 71 | // Brotli specifies a maximum depth of 15 bits for "code trees" |
| 72 | // and 7 bits for "code length code trees." |
| 73 | // |
| 74 | // count_limit is the value that is to be faked as the minimum value |
| 75 | // and this minimum value is raised until the tree matches the |
| 76 | // maximum length requirement. |
| 77 | // |
| 78 | // This algorithm is not of excellent performance for very long data blocks, |
| 79 | // especially when population counts are longer than 2**tree_limit, but |
| 80 | // we are not planning to use this with extremely long blocks. |
| 81 | // |
| 82 | // See http://en.wikipedia.org/wiki/Huffman_coding |
| 83 | void CreateHuffmanTree(const int *data, |
| 84 | const int length, |
| 85 | const int tree_limit, |
| 86 | uint8_t *depth) { |
| 87 | // For block sizes below 64 kB, we never need to do a second iteration |
| 88 | // of this loop. Probably all of our block sizes will be smaller than |
| 89 | // that, so this loop is mostly of academic interest. If we actually |
| 90 | // would need this, we would be better off with the Katajainen algorithm. |
| 91 | for (int count_limit = 1; ; count_limit *= 2) { |
| 92 | std::vector<HuffmanTree> tree; |
| 93 | tree.reserve(2 * length + 1); |
| 94 | |
| 95 | for (int i = 0; i < length; ++i) { |
| 96 | if (data[i]) { |
| 97 | const int count = std::max(data[i], count_limit); |
| 98 | tree.push_back(HuffmanTree(count, -1, i)); |
| 99 | } |
| 100 | } |
| 101 | |
| 102 | const int n = tree.size(); |
| 103 | if (n == 1) { |
| 104 | depth[tree[0].index_right_or_value_] = 1; // Only one element. |
| 105 | break; |
| 106 | } |
| 107 | |
| 108 | std::sort(tree.begin(), tree.end(), SortHuffmanTree); |
| 109 | |
| 110 | // The nodes are: |
| 111 | // [0, n): the sorted leaf nodes that we start with. |
| 112 | // [n]: we add a sentinel here. |
| 113 | // [n + 1, 2n): new parent nodes are added here, starting from |
| 114 | // (n+1). These are naturally in ascending order. |
| 115 | // [2n]: we add a sentinel at the end as well. |
| 116 | // There will be (2n+1) elements at the end. |
| 117 | const HuffmanTree sentinel(std::numeric_limits<int>::max(), -1, -1); |
| 118 | tree.push_back(sentinel); |
| 119 | tree.push_back(sentinel); |
| 120 | |
| 121 | int i = 0; // Points to the next leaf node. |
| 122 | int j = n + 1; // Points to the next non-leaf node. |
| 123 | for (int k = n - 1; k > 0; --k) { |
| 124 | int left, right; |
| 125 | if (tree[i].total_count_ <= tree[j].total_count_) { |
| 126 | left = i; |
| 127 | ++i; |
| 128 | } else { |
| 129 | left = j; |
| 130 | ++j; |
| 131 | } |
| 132 | if (tree[i].total_count_ <= tree[j].total_count_) { |
| 133 | right = i; |
| 134 | ++i; |
| 135 | } else { |
| 136 | right = j; |
| 137 | ++j; |
| 138 | } |
| 139 | |
| 140 | // The sentinel node becomes the parent node. |
| 141 | int j_end = tree.size() - 1; |
| 142 | tree[j_end].total_count_ = |
| 143 | tree[left].total_count_ + tree[right].total_count_; |
| 144 | tree[j_end].index_left_ = left; |
| 145 | tree[j_end].index_right_or_value_ = right; |
| 146 | |
| 147 | // Add back the last sentinel node. |
| 148 | tree.push_back(sentinel); |
| 149 | } |
| 150 | SetDepth(tree[2 * n - 1], &tree[0], depth, 0); |
| 151 | |
| 152 | // We need to pack the Huffman tree in tree_limit bits. |
| 153 | // If this was not successful, add fake entities to the lowest values |
| 154 | // and retry. |
| 155 | if (*std::max_element(&depth[0], &depth[length]) <= tree_limit) { |
| 156 | break; |
| 157 | } |
| 158 | } |
| 159 | } |
| 160 | |
Zoltan Szabadka | 60c24c0 | 2013-12-12 13:18:04 +0100 | [diff] [blame] | 161 | void Reverse(uint8_t* v, int start, int end) { |
| 162 | --end; |
| 163 | while (start < end) { |
| 164 | int tmp = v[start]; |
| 165 | v[start] = v[end]; |
| 166 | v[end] = tmp; |
| 167 | ++start; |
| 168 | --end; |
| 169 | } |
| 170 | } |
| 171 | |
Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 172 | void WriteHuffmanTreeRepetitions( |
| 173 | const int previous_value, |
| 174 | const int value, |
| 175 | int repetitions, |
| 176 | uint8_t* tree, |
| 177 | uint8_t* extra_bits, |
| 178 | int* tree_size) { |
| 179 | if (previous_value != value) { |
| 180 | tree[*tree_size] = value; |
| 181 | extra_bits[*tree_size] = 0; |
| 182 | ++(*tree_size); |
| 183 | --repetitions; |
| 184 | } |
Zoltan Szabadka | 0454ab4 | 2014-02-14 15:04:23 +0100 | [diff] [blame^] | 185 | if (repetitions == 7) { |
| 186 | tree[*tree_size] = value; |
| 187 | extra_bits[*tree_size] = 0; |
| 188 | ++(*tree_size); |
| 189 | --repetitions; |
| 190 | } |
Zoltan Szabadka | 60c24c0 | 2013-12-12 13:18:04 +0100 | [diff] [blame] | 191 | if (repetitions < 3) { |
| 192 | for (int i = 0; i < repetitions; ++i) { |
| 193 | tree[*tree_size] = value; |
| 194 | extra_bits[*tree_size] = 0; |
Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 195 | ++(*tree_size); |
Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 196 | } |
Zoltan Szabadka | 60c24c0 | 2013-12-12 13:18:04 +0100 | [diff] [blame] | 197 | } else { |
| 198 | repetitions -= 3; |
| 199 | int start = *tree_size; |
| 200 | while (repetitions >= 0) { |
| 201 | tree[*tree_size] = 16; |
| 202 | extra_bits[*tree_size] = repetitions & 0x3; |
| 203 | ++(*tree_size); |
| 204 | repetitions >>= 2; |
| 205 | --repetitions; |
| 206 | } |
| 207 | Reverse(tree, start, *tree_size); |
| 208 | Reverse(extra_bits, start, *tree_size); |
Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 209 | } |
| 210 | } |
| 211 | |
| 212 | void WriteHuffmanTreeRepetitionsZeros( |
| 213 | int repetitions, |
| 214 | uint8_t* tree, |
| 215 | uint8_t* extra_bits, |
| 216 | int* tree_size) { |
Zoltan Szabadka | 0454ab4 | 2014-02-14 15:04:23 +0100 | [diff] [blame^] | 217 | if (repetitions == 11) { |
| 218 | tree[*tree_size] = 0; |
| 219 | extra_bits[*tree_size] = 0; |
| 220 | ++(*tree_size); |
| 221 | --repetitions; |
| 222 | } |
Zoltan Szabadka | 60c24c0 | 2013-12-12 13:18:04 +0100 | [diff] [blame] | 223 | if (repetitions < 3) { |
| 224 | for (int i = 0; i < repetitions; ++i) { |
| 225 | tree[*tree_size] = 0; |
| 226 | extra_bits[*tree_size] = 0; |
Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 227 | ++(*tree_size); |
Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 228 | } |
Zoltan Szabadka | 60c24c0 | 2013-12-12 13:18:04 +0100 | [diff] [blame] | 229 | } else { |
| 230 | repetitions -= 3; |
| 231 | int start = *tree_size; |
| 232 | while (repetitions >= 0) { |
| 233 | tree[*tree_size] = 17; |
| 234 | extra_bits[*tree_size] = repetitions & 0x7; |
| 235 | ++(*tree_size); |
| 236 | repetitions >>= 3; |
| 237 | --repetitions; |
| 238 | } |
| 239 | Reverse(tree, start, *tree_size); |
| 240 | Reverse(extra_bits, start, *tree_size); |
Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 241 | } |
| 242 | } |
| 243 | |
| 244 | |
Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 245 | int OptimizeHuffmanCountsForRle(int length, int* counts) { |
| 246 | int stride; |
| 247 | int limit; |
| 248 | int sum; |
| 249 | uint8_t* good_for_rle; |
| 250 | // Let's make the Huffman code more compatible with rle encoding. |
| 251 | int i; |
| 252 | for (; length >= 0; --length) { |
| 253 | if (length == 0) { |
| 254 | return 1; // All zeros. |
| 255 | } |
| 256 | if (counts[length - 1] != 0) { |
| 257 | // Now counts[0..length - 1] does not have trailing zeros. |
| 258 | break; |
| 259 | } |
| 260 | } |
Zoltan Szabadka | 0454ab4 | 2014-02-14 15:04:23 +0100 | [diff] [blame^] | 261 | { |
| 262 | int nonzeros = 0; |
| 263 | int smallest_nonzero = 1 << 30; |
| 264 | for (i = 0; i < length; ++i) { |
| 265 | if (counts[i] != 0) { |
| 266 | ++nonzeros; |
| 267 | if (smallest_nonzero > counts[i]) { |
| 268 | smallest_nonzero = counts[i]; |
| 269 | } |
| 270 | } |
| 271 | } |
| 272 | if (nonzeros < 5) { |
| 273 | // Small histogram will model it well. |
| 274 | return 1; |
| 275 | } |
| 276 | int zeros = length - nonzeros; |
| 277 | if (smallest_nonzero < 4) { |
| 278 | if (zeros < 6) { |
| 279 | for (i = 1; i < length - 1; ++i) { |
| 280 | if (counts[i - 1] != 0 && counts[i] == 0 && counts[i + 1] != 0) { |
| 281 | counts[i] = 1; |
| 282 | } |
| 283 | } |
| 284 | } |
| 285 | } |
| 286 | if (nonzeros < 28) { |
| 287 | return 1; |
| 288 | } |
| 289 | } |
Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 290 | // 2) Let's mark all population counts that already can be encoded |
| 291 | // with an rle code. |
| 292 | good_for_rle = (uint8_t*)calloc(length, 1); |
| 293 | if (good_for_rle == NULL) { |
| 294 | return 0; |
| 295 | } |
| 296 | { |
| 297 | // Let's not spoil any of the existing good rle codes. |
| 298 | // Mark any seq of 0's that is longer as 5 as a good_for_rle. |
| 299 | // Mark any seq of non-0's that is longer as 7 as a good_for_rle. |
| 300 | int symbol = counts[0]; |
| 301 | int stride = 0; |
| 302 | for (i = 0; i < length + 1; ++i) { |
| 303 | if (i == length || counts[i] != symbol) { |
| 304 | if ((symbol == 0 && stride >= 5) || |
| 305 | (symbol != 0 && stride >= 7)) { |
| 306 | int k; |
| 307 | for (k = 0; k < stride; ++k) { |
| 308 | good_for_rle[i - k - 1] = 1; |
| 309 | } |
| 310 | } |
| 311 | stride = 1; |
| 312 | if (i != length) { |
| 313 | symbol = counts[i]; |
| 314 | } |
| 315 | } else { |
| 316 | ++stride; |
| 317 | } |
| 318 | } |
| 319 | } |
| 320 | // 3) Let's replace those population counts that lead to more rle codes. |
Zoltan Szabadka | 0454ab4 | 2014-02-14 15:04:23 +0100 | [diff] [blame^] | 321 | // Math here is in 24.8 fixed point representation. |
| 322 | const int streak_limit = 1240; |
Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 323 | stride = 0; |
Zoltan Szabadka | 0454ab4 | 2014-02-14 15:04:23 +0100 | [diff] [blame^] | 324 | limit = 256 * (counts[0] + counts[1] + counts[2]) / 3 + 420; |
Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 325 | sum = 0; |
| 326 | for (i = 0; i < length + 1; ++i) { |
| 327 | if (i == length || good_for_rle[i] || |
| 328 | (i != 0 && good_for_rle[i - 1]) || |
Zoltan Szabadka | 0454ab4 | 2014-02-14 15:04:23 +0100 | [diff] [blame^] | 329 | abs(256 * counts[i] - limit) >= streak_limit) { |
Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 330 | if (stride >= 4 || (stride >= 3 && sum == 0)) { |
| 331 | int k; |
| 332 | // The stride must end, collapse what we have, if we have enough (4). |
| 333 | int count = (sum + stride / 2) / stride; |
| 334 | if (count < 1) { |
| 335 | count = 1; |
| 336 | } |
| 337 | if (sum == 0) { |
| 338 | // Don't make an all zeros stride to be upgraded to ones. |
| 339 | count = 0; |
| 340 | } |
| 341 | for (k = 0; k < stride; ++k) { |
| 342 | // We don't want to change value at counts[i], |
| 343 | // that is already belonging to the next stride. Thus - 1. |
| 344 | counts[i - k - 1] = count; |
| 345 | } |
| 346 | } |
| 347 | stride = 0; |
| 348 | sum = 0; |
Zoltan Szabadka | c6b9c7c | 2013-11-15 19:02:17 +0100 | [diff] [blame] | 349 | if (i < length - 2) { |
Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 350 | // All interesting strides have a count of at least 4, |
| 351 | // at least when non-zeros. |
Zoltan Szabadka | 0454ab4 | 2014-02-14 15:04:23 +0100 | [diff] [blame^] | 352 | limit = 256 * (counts[i] + counts[i + 1] + counts[i + 2]) / 3 + 420; |
Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 353 | } else if (i < length) { |
Zoltan Szabadka | 0454ab4 | 2014-02-14 15:04:23 +0100 | [diff] [blame^] | 354 | limit = 256 * counts[i]; |
Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 355 | } else { |
| 356 | limit = 0; |
| 357 | } |
| 358 | } |
| 359 | ++stride; |
| 360 | if (i != length) { |
| 361 | sum += counts[i]; |
| 362 | if (stride >= 4) { |
Zoltan Szabadka | 0454ab4 | 2014-02-14 15:04:23 +0100 | [diff] [blame^] | 363 | limit = (256 * sum + stride / 2) / stride; |
| 364 | } |
| 365 | if (stride == 4) { |
| 366 | limit += 120; |
Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 367 | } |
| 368 | } |
| 369 | } |
| 370 | free(good_for_rle); |
| 371 | return 1; |
| 372 | } |
| 373 | |
| 374 | |
Zoltan Szabadka | 0454ab4 | 2014-02-14 15:04:23 +0100 | [diff] [blame^] | 375 | static void DecideOverRleUse(const uint8_t* depth, const int length, |
| 376 | bool *use_rle_for_non_zero, |
| 377 | bool *use_rle_for_zero) { |
| 378 | int total_reps_zero = 0; |
| 379 | int total_reps_non_zero = 0; |
| 380 | int count_reps_zero = 0; |
| 381 | int count_reps_non_zero = 0; |
| 382 | int new_length = length; |
| 383 | for (int i = 0; i < length; ++i) { |
| 384 | if (depth[length - i - 1] == 0) { |
| 385 | --new_length; |
| 386 | } else { |
| 387 | break; |
| 388 | } |
| 389 | } |
| 390 | for (uint32_t i = 0; i < new_length;) { |
| 391 | const int value = depth[i]; |
| 392 | int reps = 1; |
| 393 | // Find rle coding for longer codes. |
| 394 | // Shorter codes seem not to benefit from rle. |
| 395 | for (uint32_t k = i + 1; k < new_length && depth[k] == value; ++k) { |
| 396 | ++reps; |
| 397 | } |
| 398 | if (reps >= 3 && value == 0) { |
| 399 | total_reps_zero += reps; |
| 400 | ++count_reps_zero; |
| 401 | } |
| 402 | if (reps >= 4 && value != 0) { |
| 403 | total_reps_non_zero += reps; |
| 404 | ++count_reps_non_zero; |
| 405 | } |
| 406 | i += reps; |
| 407 | } |
| 408 | total_reps_non_zero -= count_reps_non_zero * 2; |
| 409 | total_reps_zero -= count_reps_zero * 2; |
| 410 | *use_rle_for_non_zero = total_reps_non_zero > 2; |
| 411 | *use_rle_for_zero = total_reps_zero > 2; |
| 412 | } |
| 413 | |
| 414 | |
Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 415 | void WriteHuffmanTree(const uint8_t* depth, const int length, |
| 416 | uint8_t* tree, |
| 417 | uint8_t* extra_bits_data, |
| 418 | int* huffman_tree_size) { |
Zoltan Szabadka | c6b9c7c | 2013-11-15 19:02:17 +0100 | [diff] [blame] | 419 | int previous_value = 8; |
Zoltan Szabadka | 0454ab4 | 2014-02-14 15:04:23 +0100 | [diff] [blame^] | 420 | |
| 421 | // First gather statistics on if it is a good idea to do rle. |
| 422 | bool use_rle_for_non_zero; |
| 423 | bool use_rle_for_zero; |
| 424 | DecideOverRleUse(depth, length, &use_rle_for_non_zero, &use_rle_for_zero); |
| 425 | |
| 426 | // Actual rle coding. |
Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 427 | for (uint32_t i = 0; i < length;) { |
| 428 | const int value = depth[i]; |
| 429 | int reps = 1; |
Zoltan Szabadka | 0454ab4 | 2014-02-14 15:04:23 +0100 | [diff] [blame^] | 430 | if (length > 50) { |
| 431 | // Find rle coding for longer codes. |
| 432 | // Shorter codes seem not to benefit from rle. |
| 433 | if ((value != 0 && use_rle_for_non_zero) || |
| 434 | (value == 0 && use_rle_for_zero)) { |
| 435 | for (uint32_t k = i + 1; k < length && depth[k] == value; ++k) { |
| 436 | ++reps; |
| 437 | } |
| 438 | } |
Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 439 | } |
| 440 | if (value == 0) { |
| 441 | WriteHuffmanTreeRepetitionsZeros(reps, tree, extra_bits_data, |
| 442 | huffman_tree_size); |
| 443 | } else { |
| 444 | WriteHuffmanTreeRepetitions(previous_value, value, reps, tree, |
| 445 | extra_bits_data, huffman_tree_size); |
| 446 | previous_value = value; |
| 447 | } |
| 448 | i += reps; |
| 449 | } |
Zoltan Szabadka | 1447345 | 2013-12-17 17:17:57 +0100 | [diff] [blame] | 450 | // Throw away trailing zeros. |
| 451 | for (; *huffman_tree_size > 0; --(*huffman_tree_size)) { |
| 452 | if (tree[*huffman_tree_size - 1] > 0 && tree[*huffman_tree_size - 1] < 17) { |
| 453 | break; |
| 454 | } |
| 455 | } |
Zoltan Szabadka | c66e4e3 | 2013-10-23 13:06:13 +0200 | [diff] [blame] | 456 | } |
| 457 | |
| 458 | namespace { |
| 459 | |
| 460 | uint16_t ReverseBits(int num_bits, uint16_t bits) { |
| 461 | static const size_t kLut[16] = { // Pre-reversed 4-bit values. |
| 462 | 0x0, 0x8, 0x4, 0xc, 0x2, 0xa, 0x6, 0xe, |
| 463 | 0x1, 0x9, 0x5, 0xd, 0x3, 0xb, 0x7, 0xf |
| 464 | }; |
| 465 | size_t retval = kLut[bits & 0xf]; |
| 466 | for (int i = 4; i < num_bits; i += 4) { |
| 467 | retval <<= 4; |
| 468 | bits >>= 4; |
| 469 | retval |= kLut[bits & 0xf]; |
| 470 | } |
| 471 | retval >>= (-num_bits & 0x3); |
| 472 | return retval; |
| 473 | } |
| 474 | |
| 475 | } // namespace |
| 476 | |
| 477 | void ConvertBitDepthsToSymbols(const uint8_t *depth, int len, uint16_t *bits) { |
| 478 | // In Brotli, all bit depths are [1..15] |
| 479 | // 0 bit depth means that the symbol does not exist. |
| 480 | const int kMaxBits = 16; // 0..15 are values for bits |
| 481 | uint16_t bl_count[kMaxBits] = { 0 }; |
| 482 | { |
| 483 | for (int i = 0; i < len; ++i) { |
| 484 | ++bl_count[depth[i]]; |
| 485 | } |
| 486 | bl_count[0] = 0; |
| 487 | } |
| 488 | uint16_t next_code[kMaxBits]; |
| 489 | next_code[0] = 0; |
| 490 | { |
| 491 | int code = 0; |
| 492 | for (int bits = 1; bits < kMaxBits; ++bits) { |
| 493 | code = (code + bl_count[bits - 1]) << 1; |
| 494 | next_code[bits] = code; |
| 495 | } |
| 496 | } |
| 497 | for (int i = 0; i < len; ++i) { |
| 498 | if (depth[i]) { |
| 499 | bits[i] = ReverseBits(depth[i], next_code[depth[i]]++); |
| 500 | } |
| 501 | } |
| 502 | } |
| 503 | |
| 504 | } // namespace brotli |