Lines Matching refs:op1
349 __STATIC_FORCEINLINE uint32_t __ROR(uint32_t op1, uint32_t op2) in __ROR() argument
354 return op1; in __ROR()
356 return (op1 >> op2) | (op1 << (32U - op2)); in __ROR()
1646 __STATIC_FORCEINLINE uint32_t __SADD8(uint32_t op1, uint32_t op2) in __SADD8() argument
1650 __ASM volatile ("sadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SADD8()
1654 __STATIC_FORCEINLINE uint32_t __QADD8(uint32_t op1, uint32_t op2) in __QADD8() argument
1658 __ASM ("qadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __QADD8()
1662 __STATIC_FORCEINLINE uint32_t __SHADD8(uint32_t op1, uint32_t op2) in __SHADD8() argument
1666 __ASM ("shadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SHADD8()
1670 __STATIC_FORCEINLINE uint32_t __UADD8(uint32_t op1, uint32_t op2) in __UADD8() argument
1674 __ASM volatile ("uadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UADD8()
1678 __STATIC_FORCEINLINE uint32_t __UQADD8(uint32_t op1, uint32_t op2) in __UQADD8() argument
1682 __ASM ("uqadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UQADD8()
1686 __STATIC_FORCEINLINE uint32_t __UHADD8(uint32_t op1, uint32_t op2) in __UHADD8() argument
1690 __ASM ("uhadd8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UHADD8()
1695 __STATIC_FORCEINLINE uint32_t __SSUB8(uint32_t op1, uint32_t op2) in __SSUB8() argument
1699 __ASM volatile ("ssub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SSUB8()
1703 __STATIC_FORCEINLINE uint32_t __QSUB8(uint32_t op1, uint32_t op2) in __QSUB8() argument
1707 __ASM ("qsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __QSUB8()
1711 __STATIC_FORCEINLINE uint32_t __SHSUB8(uint32_t op1, uint32_t op2) in __SHSUB8() argument
1715 __ASM ("shsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SHSUB8()
1719 __STATIC_FORCEINLINE uint32_t __USUB8(uint32_t op1, uint32_t op2) in __USUB8() argument
1723 __ASM volatile ("usub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __USUB8()
1727 __STATIC_FORCEINLINE uint32_t __UQSUB8(uint32_t op1, uint32_t op2) in __UQSUB8() argument
1731 __ASM ("uqsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UQSUB8()
1735 __STATIC_FORCEINLINE uint32_t __UHSUB8(uint32_t op1, uint32_t op2) in __UHSUB8() argument
1739 __ASM ("uhsub8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UHSUB8()
1744 __STATIC_FORCEINLINE uint32_t __SADD16(uint32_t op1, uint32_t op2) in __SADD16() argument
1748 __ASM volatile ("sadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SADD16()
1752 __STATIC_FORCEINLINE uint32_t __QADD16(uint32_t op1, uint32_t op2) in __QADD16() argument
1756 __ASM ("qadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __QADD16()
1760 __STATIC_FORCEINLINE uint32_t __SHADD16(uint32_t op1, uint32_t op2) in __SHADD16() argument
1764 __ASM ("shadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SHADD16()
1768 __STATIC_FORCEINLINE uint32_t __UADD16(uint32_t op1, uint32_t op2) in __UADD16() argument
1772 __ASM volatile ("uadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UADD16()
1776 __STATIC_FORCEINLINE uint32_t __UQADD16(uint32_t op1, uint32_t op2) in __UQADD16() argument
1780 __ASM ("uqadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UQADD16()
1784 __STATIC_FORCEINLINE uint32_t __UHADD16(uint32_t op1, uint32_t op2) in __UHADD16() argument
1788 __ASM ("uhadd16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UHADD16()
1792 __STATIC_FORCEINLINE uint32_t __SSUB16(uint32_t op1, uint32_t op2) in __SSUB16() argument
1796 __ASM volatile ("ssub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SSUB16()
1800 __STATIC_FORCEINLINE uint32_t __QSUB16(uint32_t op1, uint32_t op2) in __QSUB16() argument
1804 __ASM ("qsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __QSUB16()
1808 __STATIC_FORCEINLINE uint32_t __SHSUB16(uint32_t op1, uint32_t op2) in __SHSUB16() argument
1812 __ASM ("shsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SHSUB16()
1816 __STATIC_FORCEINLINE uint32_t __USUB16(uint32_t op1, uint32_t op2) in __USUB16() argument
1820 __ASM volatile ("usub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __USUB16()
1824 __STATIC_FORCEINLINE uint32_t __UQSUB16(uint32_t op1, uint32_t op2) in __UQSUB16() argument
1828 __ASM ("uqsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UQSUB16()
1832 __STATIC_FORCEINLINE uint32_t __UHSUB16(uint32_t op1, uint32_t op2) in __UHSUB16() argument
1836 __ASM ("uhsub16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UHSUB16()
1840 __STATIC_FORCEINLINE uint32_t __SASX(uint32_t op1, uint32_t op2) in __SASX() argument
1844 __ASM volatile ("sasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SASX()
1848 __STATIC_FORCEINLINE uint32_t __QASX(uint32_t op1, uint32_t op2) in __QASX() argument
1852 __ASM ("qasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __QASX()
1856 __STATIC_FORCEINLINE uint32_t __SHASX(uint32_t op1, uint32_t op2) in __SHASX() argument
1860 __ASM ("shasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SHASX()
1864 __STATIC_FORCEINLINE uint32_t __UASX(uint32_t op1, uint32_t op2) in __UASX() argument
1868 __ASM volatile ("uasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UASX()
1872 __STATIC_FORCEINLINE uint32_t __UQASX(uint32_t op1, uint32_t op2) in __UQASX() argument
1876 __ASM ("uqasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UQASX()
1880 __STATIC_FORCEINLINE uint32_t __UHASX(uint32_t op1, uint32_t op2) in __UHASX() argument
1884 __ASM ("uhasx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UHASX()
1888 __STATIC_FORCEINLINE uint32_t __SSAX(uint32_t op1, uint32_t op2) in __SSAX() argument
1892 __ASM volatile ("ssax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SSAX()
1896 __STATIC_FORCEINLINE uint32_t __QSAX(uint32_t op1, uint32_t op2) in __QSAX() argument
1900 __ASM ("qsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __QSAX()
1904 __STATIC_FORCEINLINE uint32_t __SHSAX(uint32_t op1, uint32_t op2) in __SHSAX() argument
1908 __ASM ("shsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SHSAX()
1912 __STATIC_FORCEINLINE uint32_t __USAX(uint32_t op1, uint32_t op2) in __USAX() argument
1916 __ASM volatile ("usax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __USAX()
1920 __STATIC_FORCEINLINE uint32_t __UQSAX(uint32_t op1, uint32_t op2) in __UQSAX() argument
1924 __ASM ("uqsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UQSAX()
1928 __STATIC_FORCEINLINE uint32_t __UHSAX(uint32_t op1, uint32_t op2) in __UHSAX() argument
1932 __ASM ("uhsax %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UHSAX()
1936 __STATIC_FORCEINLINE uint32_t __USAD8(uint32_t op1, uint32_t op2) in __USAD8() argument
1940 __ASM ("usad8 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __USAD8()
1944 __STATIC_FORCEINLINE uint32_t __USADA8(uint32_t op1, uint32_t op2, uint32_t op3) in __USADA8() argument
1948 __ASM ("usada8 %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); in __USADA8()
1968 __STATIC_FORCEINLINE uint32_t __UXTB16(uint32_t op1) in __UXTB16() argument
1972 __ASM ("uxtb16 %0, %1" : "=r" (result) : "r" (op1)); in __UXTB16()
1976 __STATIC_FORCEINLINE uint32_t __UXTAB16(uint32_t op1, uint32_t op2) in __UXTAB16() argument
1980 __ASM ("uxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __UXTAB16()
1984 __STATIC_FORCEINLINE uint32_t __SXTB16(uint32_t op1) in __SXTB16() argument
1988 __ASM ("sxtb16 %0, %1" : "=r" (result) : "r" (op1)); in __SXTB16()
1992 __STATIC_FORCEINLINE uint32_t __SXTB16_RORn(uint32_t op1, uint32_t rotate) in __SXTB16_RORn() argument
1996 __ASM volatile ("sxtb16 %0, %1, ROR %2" : "=r" (result) : "r" (op1), "i" (rotate) ); in __SXTB16_RORn()
1998 result = __SXTB16(__ROR(op1, rotate)) ; in __SXTB16_RORn()
2003 __STATIC_FORCEINLINE uint32_t __SXTAB16(uint32_t op1, uint32_t op2) in __SXTAB16() argument
2007 __ASM ("sxtab16 %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SXTAB16()
2011 __STATIC_FORCEINLINE uint32_t __SXTAB16_RORn(uint32_t op1, uint32_t op2, uint32_t rotate) in __SXTAB16_RORn() argument
2015 …__ASM volatile ("sxtab16 %0, %1, %2, ROR %3" : "=r" (result) : "r" (op1) , "r" (op2) , "i" (rotate… in __SXTAB16_RORn()
2017 result = __SXTAB16(op1, __ROR(op2, rotate)); in __SXTAB16_RORn()
2023 __STATIC_FORCEINLINE uint32_t __SMUAD (uint32_t op1, uint32_t op2) in __SMUAD() argument
2027 __ASM volatile ("smuad %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SMUAD()
2031 __STATIC_FORCEINLINE uint32_t __SMUADX (uint32_t op1, uint32_t op2) in __SMUADX() argument
2035 __ASM volatile ("smuadx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SMUADX()
2039 __STATIC_FORCEINLINE uint32_t __SMLAD (uint32_t op1, uint32_t op2, uint32_t op3) in __SMLAD() argument
2043 __ASM volatile ("smlad %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); in __SMLAD()
2047 __STATIC_FORCEINLINE uint32_t __SMLADX (uint32_t op1, uint32_t op2, uint32_t op3) in __SMLADX() argument
2051 __ASM volatile ("smladx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); in __SMLADX()
2055 __STATIC_FORCEINLINE uint64_t __SMLALD (uint32_t op1, uint32_t op2, uint64_t acc) in __SMLALD() argument
2064 …__ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op… in __SMLALD()
2066 …__ASM volatile ("smlald %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op… in __SMLALD()
2072 __STATIC_FORCEINLINE uint64_t __SMLALDX (uint32_t op1, uint32_t op2, uint64_t acc) in __SMLALDX() argument
2081 …__ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (o… in __SMLALDX()
2083 …__ASM volatile ("smlaldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (o… in __SMLALDX()
2089 __STATIC_FORCEINLINE uint32_t __SMUSD (uint32_t op1, uint32_t op2) in __SMUSD() argument
2093 __ASM volatile ("smusd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SMUSD()
2097 __STATIC_FORCEINLINE uint32_t __SMUSDX (uint32_t op1, uint32_t op2) in __SMUSDX() argument
2101 __ASM volatile ("smusdx %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SMUSDX()
2105 __STATIC_FORCEINLINE uint32_t __SMLSD (uint32_t op1, uint32_t op2, uint32_t op3) in __SMLSD() argument
2109 __ASM volatile ("smlsd %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); in __SMLSD()
2113 __STATIC_FORCEINLINE uint32_t __SMLSDX (uint32_t op1, uint32_t op2, uint32_t op3) in __SMLSDX() argument
2117 __ASM volatile ("smlsdx %0, %1, %2, %3" : "=r" (result) : "r" (op1), "r" (op2), "r" (op3) ); in __SMLSDX()
2121 __STATIC_FORCEINLINE uint64_t __SMLSLD (uint32_t op1, uint32_t op2, uint64_t acc) in __SMLSLD() argument
2130 …__ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (op… in __SMLSLD()
2132 …__ASM volatile ("smlsld %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (op… in __SMLSLD()
2138 __STATIC_FORCEINLINE uint64_t __SMLSLDX (uint32_t op1, uint32_t op2, uint64_t acc) in __SMLSLDX() argument
2147 …__ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[0]), "=r" (llr.w32[1]): "r" (op1), "r" (o… in __SMLSLDX()
2149 …__ASM volatile ("smlsldx %0, %1, %2, %3" : "=r" (llr.w32[1]), "=r" (llr.w32[0]): "r" (op1), "r" (o… in __SMLSLDX()
2155 __STATIC_FORCEINLINE uint32_t __SEL (uint32_t op1, uint32_t op2) in __SEL() argument
2159 __ASM volatile ("sel %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __SEL()
2163 __STATIC_FORCEINLINE int32_t __QADD( int32_t op1, int32_t op2) in __QADD() argument
2167 __ASM volatile ("qadd %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __QADD()
2171 __STATIC_FORCEINLINE int32_t __QSUB( int32_t op1, int32_t op2) in __QSUB() argument
2175 __ASM volatile ("qsub %0, %1, %2" : "=r" (result) : "r" (op1), "r" (op2) ); in __QSUB()
2200 __STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1, int32_t op2, int32_t op3) in __SMMLA() argument
2204 __ASM ("smmla %0, %1, %2, %3" : "=r" (result): "r" (op1), "r" (op2), "r" (op3) ); in __SMMLA()