blob: a05eb0a28bbf427acbd24d54746f642df477558b [file] [log] [blame]
// Copyright 2020 Google LLC
//
// This source code is licensed under the BSD-style license found in the
// LICENSE file in the root directory of this source tree.
#include <assert.h>
#include <xnnpack/fill.h>
void xnn_xx_fill_ukernel__scalar_x16(
size_t rows,
size_t channels,
void* output,
size_t output_stride,
const uint32_t fill_pattern)
{
assert(rows != 0);
assert(channels != 0);
const size_t output_increment = output_stride - channels;
do {
uint32_t vfill_pattern = fill_pattern;
size_t c = channels;
for (; c >= 16 * sizeof(uint8_t); c -= 16 * sizeof(uint8_t)) {
((uint32_t*) output)[0] = vfill_pattern;
((uint32_t*) output)[1] = vfill_pattern;
((uint32_t*) output)[2] = vfill_pattern;
((uint32_t*) output)[3] = vfill_pattern;
output = ((uint8_t*) output + 16);
}
if XNN_UNLIKELY(c != 0) {
if XNN_LIKELY(c & (8 * sizeof(uint8_t))) {
((uint32_t*) output)[0] = vfill_pattern;
((uint32_t*) output)[1] = vfill_pattern;
output = ((uint8_t*) output + 8);
}
if XNN_LIKELY(c & (4 * sizeof(uint8_t))) {
*((uint32_t*) output) = vfill_pattern;
output = ((uint8_t*) output + 4);
}
if XNN_LIKELY(c & (2 * sizeof(uint8_t))) {
*((uint16_t*) output) = (uint16_t) vfill_pattern;
vfill_pattern >>= 16;
output = ((uint8_t*) output + 2);
}
if XNN_LIKELY(c & (1 * sizeof(uint8_t))) {
*((uint8_t*) output) = (uint8_t) vfill_pattern;
output = ((uint8_t*) output + 1);
}
}
output = (void*) ((uintptr_t) output + output_increment);
} while (--rows != 0);
}