blob: 4f3512417f48588f3deaf5d5b33c284e69798240 [file] [log] [blame]
Mike Kleinb9c4a6f2017-04-03 13:54:55 -04001/*
2 * Copyright 2017 Google Inc.
3 *
4 * Use of this source code is governed by a BSD-style license that can be
5 * found in the LICENSE file.
6 */
7
8#ifndef SkJumper_misc_DEFINED
9#define SkJumper_misc_DEFINED
10
Mike Klein376fd312017-12-11 16:53:26 -050011#include <string.h> // for memcpy()
Mike Kleinb9c4a6f2017-04-03 13:54:55 -040012
Mike Kleinadc78d52018-01-01 09:06:37 -050013// Miscellany used by SkJumper_stages.cpp and SkJumper_stages_lowp.cpp.
Mike Kleinb9c4a6f2017-04-03 13:54:55 -040014
15// Every function in this file should be marked static and inline using SI.
Mike Kleinbe226362017-12-12 11:30:36 -050016#if defined(__clang__)
Herb Derby4de13042017-05-15 10:49:39 -040017 #define SI __attribute__((always_inline)) static inline
18#else
19 #define SI static inline
20#endif
21
Mike Kleinb9c4a6f2017-04-03 13:54:55 -040022
23template <typename T, typename P>
24SI T unaligned_load(const P* p) { // const void* would work too, but const P* helps ARMv7 codegen.
25 T v;
26 memcpy(&v, p, sizeof(v));
27 return v;
28}
29
Mike Kleinc33aa902017-05-15 10:20:48 -040030template <typename T, typename P>
31SI void unaligned_store(P* p, T v) {
32 memcpy(p, &v, sizeof(v));
33}
34
Mike Kleinb9c4a6f2017-04-03 13:54:55 -040035template <typename Dst, typename Src>
36SI Dst bit_cast(const Src& src) {
37 static_assert(sizeof(Dst) == sizeof(Src), "");
38 return unaligned_load<Dst>(&src);
39}
40
Mike Klein95f53be2017-04-04 10:24:56 -040041template <typename Dst, typename Src>
42SI Dst widen_cast(const Src& src) {
43 static_assert(sizeof(Dst) > sizeof(Src), "");
44 Dst dst;
45 memcpy(&dst, &src, sizeof(Src));
46 return dst;
47}
48
Mike Klein0d930102017-06-02 11:08:39 -040049// Our program is an array of void*, either
50// - 1 void* per stage with no context pointer, the next stage;
51// - 2 void* per stage with a context pointer, first the context pointer, then the next stage.
52
53// load_and_inc() steps the program forward by 1 void*, returning that pointer.
54SI void* load_and_inc(void**& program) {
55#if defined(__GNUC__) && defined(__x86_64__)
56 // If program is in %rsi (we try to make this likely) then this is a single instruction.
57 void* rax;
58 asm("lodsq" : "=a"(rax), "+S"(program)); // Write-only %rax, read-write %rsi.
59 return rax;
60#else
61 // On ARM *program++ compiles into pretty ideal code without any handholding.
62 return *program++;
63#endif
64}
65
Mike Klein7c557262017-09-15 14:02:08 -040066// Lazily resolved on first cast. Does nothing if cast to Ctx::None.
67struct Ctx {
Mike Kleinf7729c22017-09-27 11:42:30 -040068 struct None {};
Mike Klein7c557262017-09-15 14:02:08 -040069
Mike Klein0d930102017-06-02 11:08:39 -040070 void* ptr;
71 void**& program;
72
Mike Klein7c557262017-09-15 14:02:08 -040073 explicit Ctx(void**& p) : ptr(nullptr), program(p) {}
Mike Klein0d930102017-06-02 11:08:39 -040074
75 template <typename T>
76 operator T*() {
77 if (!ptr) { ptr = load_and_inc(program); }
78 return (T*)ptr;
79 }
Mike Kleinf7729c22017-09-27 11:42:30 -040080 operator None() { return None{}; }
Mike Klein0d930102017-06-02 11:08:39 -040081};
82
Mike Kleinb9c4a6f2017-04-03 13:54:55 -040083#endif//SkJumper_misc_DEFINED