@@ -428,18 +428,49 @@ __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
428428#if (defined (__ARM_FEATURE_DSP ) && (__ARM_FEATURE_DSP == 1 ))
429429
430430#define __SADD8 __builtin_arm_sadd8
431- #define __SADD16 __builtin_arm_sadd16
432431#define __QADD8 __builtin_arm_qadd8
432+ #define __SHADD8 __builtin_arm_shadd8
433+ #define __UADD8 __builtin_arm_uadd8
434+ #define __UQADD8 __builtin_arm_uqadd8
435+ #define __UHADD8 __builtin_arm_uhadd8
436+ #define __SSUB8 __builtin_arm_ssub8
433437#define __QSUB8 __builtin_arm_qsub8
438+ #define __SHSUB8 __builtin_arm_shsub8
439+ #define __USUB8 __builtin_arm_usub8
440+ #define __UQSUB8 __builtin_arm_uqsub8
441+ #define __UHSUB8 __builtin_arm_uhsub8
442+ #define __SADD16 __builtin_arm_sadd16
434443#define __QADD16 __builtin_arm_qadd16
435444#define __SHADD16 __builtin_arm_shadd16
445+ #define __UADD16 __builtin_arm_uadd16
446+ #define __UQADD16 __builtin_arm_uqadd16
447+ #define __UHADD16 __builtin_arm_uhadd16
448+ #define __SSUB16 __builtin_arm_ssub16
436449#define __QSUB16 __builtin_arm_qsub16
437450#define __SHSUB16 __builtin_arm_shsub16
451+ #define __USUB16 __builtin_arm_usub16
452+ #define __UQSUB16 __builtin_arm_uqsub16
453+ #define __UHSUB16 __builtin_arm_uhsub16
454+ #define __SASX __builtin_arm_sasx
438455#define __QASX __builtin_arm_qasx
439456#define __SHASX __builtin_arm_shasx
457+ #define __UASX __builtin_arm_uasx
458+ #define __UQASX __builtin_arm_uqasx
459+ #define __UHASX __builtin_arm_uhasx
460+ #define __SSAX __builtin_arm_ssax
440461#define __QSAX __builtin_arm_qsax
441462#define __SHSAX __builtin_arm_shsax
463+ #define __USAX __builtin_arm_usax
464+ #define __UQSAX __builtin_arm_uqsax
465+ #define __UHSAX __builtin_arm_uhsax
466+ #define __USAD8 __builtin_arm_usad8
467+ #define __USADA8 __builtin_arm_usada8
468+ #define __SSAT16 __builtin_arm_ssat16
469+ #define __USAT16 __builtin_arm_usat16
470+ #define __UXTB16 __builtin_arm_uxtb16
471+ #define __UXTAB16 __builtin_arm_uxtab16
442472#define __SXTB16 __builtin_arm_sxtb16
473+ #define __SXTAB16 __builtin_arm_sxtab16
443474#define __SMUAD __builtin_arm_smuad
444475#define __SMUADX __builtin_arm_smuadx
445476#define __SMLAD __builtin_arm_smlad
@@ -448,35 +479,24 @@ __STATIC_FORCEINLINE void __STRT(uint32_t value, volatile uint32_t *ptr)
448479#define __SMLALDX __builtin_arm_smlaldx
449480#define __SMUSD __builtin_arm_smusd
450481#define __SMUSDX __builtin_arm_smusdx
482+ #define __SMLSD __builtin_arm_smlsd
451483#define __SMLSDX __builtin_arm_smlsdx
452- #define __USAT16 __builtin_arm_usat16
453- #define __SSUB8 __builtin_arm_ssub8
454- #define __SXTB16 __builtin_arm_sxtb16
455- #define __SXTAB16 __builtin_arm_sxtab16
456-
457-
458- __STATIC_FORCEINLINE int32_t __QADD ( int32_t op1 , int32_t op2 )
459- {
460- int32_t result ;
461-
462- __ASM volatile ("qadd %0, %1, %2" : "=r" (result ) : "r" (op1 ), "r" (op2 ) );
463- return (result );
464- }
465-
466- __STATIC_FORCEINLINE int32_t __QSUB ( int32_t op1 , int32_t op2 )
467- {
468- int32_t result ;
469-
470- __ASM volatile ("qsub %0, %1, %2" : "=r" (result ) : "r" (op1 ), "r" (op2 ) );
471- return (result );
472- }
484+ #define __SMLSLD __builtin_arm_smlsld
485+ #define __SMLSLDX __builtin_arm_smlsldx
486+ #define __SEL __builtin_arm_sel
487+ #define __QADD __builtin_arm_qadd
488+ #define __QSUB __builtin_arm_qsub
473489
474490#define __PKHBT (ARG1 ,ARG2 ,ARG3 ) ( ((((uint32_t)(ARG1)) ) & 0x0000FFFFUL) | \
475491 ((((uint32_t)(ARG2)) << (ARG3)) & 0xFFFF0000UL) )
476492
477493#define __PKHTB (ARG1 ,ARG2 ,ARG3 ) ( ((((uint32_t)(ARG1)) ) & 0xFFFF0000UL) | \
478494 ((((uint32_t)(ARG2)) >> (ARG3)) & 0x0000FFFFUL) )
479495
496+ #define __SXTB16_RORn (ARG1 , ARG2 ) __SXTB16(__ROR(ARG1, ARG2))
497+
498+ #define __SXTAB16_RORn (ARG1 , ARG2 , ARG3 ) __SXTAB16(ARG1, __ROR(ARG2, ARG3))
499+
480500__STATIC_FORCEINLINE int32_t __SMMLA (int32_t op1 , int32_t op2 , int32_t op3 )
481501{
482502 int32_t result ;
0 commit comments