Timothy B. Terriberry | 972a34e | 2013-05-19 17:11:17 -0700 | [diff] [blame] | 1 | /* Copyright (C) 2013 Xiph.Org Foundation and contributors */ |
| 2 | /* |
| 3 | Redistribution and use in source and binary forms, with or without |
| 4 | modification, are permitted provided that the following conditions |
| 5 | are met: |
| 6 | |
| 7 | - Redistributions of source code must retain the above copyright |
| 8 | notice, this list of conditions and the following disclaimer. |
| 9 | |
| 10 | - Redistributions in binary form must reproduce the above copyright |
| 11 | notice, this list of conditions and the following disclaimer in the |
| 12 | documentation and/or other materials provided with the distribution. |
| 13 | |
| 14 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS |
| 15 | ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT |
| 16 | LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR |
| 17 | A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER |
| 18 | OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, |
| 19 | EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, |
| 20 | PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR |
| 21 | PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF |
| 22 | LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING |
| 23 | NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS |
| 24 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. |
| 25 | */ |
| 26 | |
| 27 | #ifndef FIXED_ARMv4_H |
| 28 | #define FIXED_ARMv4_H |
| 29 | |
| 30 | /** 16x32 multiplication, followed by a 16-bit shift right. Results fits in 32 bits */ |
| 31 | #undef MULT16_32_Q16 |
Gregory Maxwell | 7830cf1 | 2013-10-17 15:56:52 -0700 | [diff] [blame] | 32 | static OPUS_INLINE opus_val32 MULT16_32_Q16_armv4(opus_val16 a, opus_val32 b) |
Timothy B. Terriberry | 972a34e | 2013-05-19 17:11:17 -0700 | [diff] [blame] | 33 | { |
| 34 | unsigned rd_lo; |
| 35 | int rd_hi; |
| 36 | __asm__( |
| 37 | "#MULT16_32_Q16\n\t" |
| 38 | "smull %0, %1, %2, %3\n\t" |
Timothy B. Terriberry | b518b56 | 2013-05-20 15:29:04 -0700 | [diff] [blame] | 39 | : "=&r"(rd_lo), "=&r"(rd_hi) |
| 40 | : "%r"(b),"r"(a<<16) |
Timothy B. Terriberry | 972a34e | 2013-05-19 17:11:17 -0700 | [diff] [blame] | 41 | ); |
| 42 | return rd_hi; |
| 43 | } |
| 44 | #define MULT16_32_Q16(a, b) (MULT16_32_Q16_armv4(a, b)) |
| 45 | |
| 46 | |
| 47 | /** 16x32 multiplication, followed by a 15-bit shift right. Results fits in 32 bits */ |
| 48 | #undef MULT16_32_Q15 |
Gregory Maxwell | 7830cf1 | 2013-10-17 15:56:52 -0700 | [diff] [blame] | 49 | static OPUS_INLINE opus_val32 MULT16_32_Q15_armv4(opus_val16 a, opus_val32 b) |
Timothy B. Terriberry | 972a34e | 2013-05-19 17:11:17 -0700 | [diff] [blame] | 50 | { |
| 51 | unsigned rd_lo; |
| 52 | int rd_hi; |
| 53 | __asm__( |
| 54 | "#MULT16_32_Q15\n\t" |
| 55 | "smull %0, %1, %2, %3\n\t" |
Timothy B. Terriberry | b518b56 | 2013-05-20 15:29:04 -0700 | [diff] [blame] | 56 | : "=&r"(rd_lo), "=&r"(rd_hi) |
Timothy B. Terriberry | 972a34e | 2013-05-19 17:11:17 -0700 | [diff] [blame] | 57 | : "%r"(b), "r"(a<<16) |
| 58 | ); |
| 59 | /*We intentionally don't OR in the high bit of rd_lo for speed.*/ |
| 60 | return rd_hi<<1; |
| 61 | } |
| 62 | #define MULT16_32_Q15(a, b) (MULT16_32_Q15_armv4(a, b)) |
| 63 | |
| 64 | |
| 65 | /** 16x32 multiply, followed by a 15-bit shift right and 32-bit add. |
| 66 | b must fit in 31 bits. |
| 67 | Result fits in 32 bits. */ |
| 68 | #undef MAC16_32_Q15 |
| 69 | #define MAC16_32_Q15(c, a, b) ADD32(c, MULT16_32_Q15(a, b)) |
| 70 | |
Nils Wallménius | 70485d8 | 2013-05-22 23:05:07 +0200 | [diff] [blame] | 71 | |
| 72 | /** 32x32 multiplication, followed by a 31-bit shift right. Results fits in 32 bits */ |
| 73 | #undef MULT32_32_Q31 |
| 74 | #define MULT32_32_Q31(a,b) (opus_val32)((((opus_int64)(a)) * ((opus_int64)(b)))>>31) |
| 75 | |
Timothy B. Terriberry | 972a34e | 2013-05-19 17:11:17 -0700 | [diff] [blame] | 76 | #endif |