r191590 - Implements some of the more commonly used intrinsics in Intrin.h

Warren Hunt whunt at google.com
Mon Sep 30 14:17:53 PDT 2013


Fixed.


On Fri, Sep 27, 2013 at 10:59 PM, Charles Davis <cdavis5x at gmail.com> wrote:

>
> On Sep 27, 2013, at 5:57 PM, Warren Hunt wrote:
> > Modified: cfe/trunk/lib/Headers/Intrin.h
> > URL:
> http://llvm.org/viewvc/llvm-project/cfe/trunk/lib/Headers/Intrin.h?rev=191590&r1=191589&r2=191590&view=diff
> >
> ==============================================================================
> > --- cfe/trunk/lib/Headers/Intrin.h (original)
> > +++ cfe/trunk/lib/Headers/Intrin.h Fri Sep 27 18:57:26 2013
> [...]
> > @@ -343,6 +392,369 @@ void __cdecl _xsaveopt64(void *, unsigne
> >
> > #endif /* __X86_64__ */
> This should be testing for __x86_64__ (i.e. lowercase 'x').
> >
> >
> +/*----------------------------------------------------------------------------*\
> > +|* Bit Twiddling
> >
> +\*----------------------------------------------------------------------------*/
> > +static __inline__ unsigned char __attribute__((__always_inline__,
> __nodebug__))
> > +_rotl8(unsigned char _Value, unsigned char _Shift) {
> > +  _Shift &= 0x7;
> > +  return _Shift ? (_Value << _Shift) | (_Value >> (8 - _Shift)) :
> _Value;
> > +}
> > +static __inline__ unsigned char __attribute__((__always_inline__,
> __nodebug__))
> > +_rotr8(unsigned char _Value, unsigned char _Shift) {
> > +  _Shift &= 0x7;
> > +  return _Shift ? (_Value >> _Shift) | (_Value << (8 - _Shift)) :
> _Value;
> > +}
> > +static __inline__ unsigned short __attribute__((__always_inline__,
> __nodebug__))
> > +_rotl16(unsigned short _Value, unsigned char _Shift) {
> > +  _Shift &= 0xf;
> > +  return _Shift ? (_Value << _Shift) | (_Value >> (16 - _Shift)) :
> _Value;
> > +}
> > +static __inline__ unsigned short __attribute__((__always_inline__,
> __nodebug__))
> > +_rotr16(unsigned short _Value, unsigned char _Shift) {
> > +  _Shift &= 0xf;
> > +  return _Shift ? (_Value >> _Shift) | (_Value << (16 - _Shift)) :
> _Value;
> > +}
> > +static __inline__ unsigned int __attribute__((__always_inline__,
> __nodebug__))
> > +_rotl(unsigned int _Value, int _Shift) {
> > +  _Shift &= 0x1f;
> > +  return _Shift ? (_Value << _Shift) | (_Value >> (32 - _Shift)) :
> _Value;
> > +}
> > +static __inline__ unsigned int __attribute__((__always_inline__,
> __nodebug__))
> > +_rotr(unsigned int _Value, int _Shift) {
> > +  _Shift &= 0x1f;
> > +  return _Shift ? (_Value >> _Shift) | (_Value << (32 - _Shift)) :
> _Value;
> > +}
> > +static __inline__ unsigned long __attribute__((__always_inline__,
> __nodebug__))
> > +_lrotl(unsigned long _Value, int _Shift) {
> > +  _Shift &= 0x1f;
> > +  return _Shift ? (_Value << _Shift) | (_Value >> (32 - _Shift)) :
> _Value;
> > +}
> > +static __inline__ unsigned long __attribute__((__always_inline__,
> __nodebug__))
> > +_lrotr(unsigned long _Value, int _Shift) {
> > +  _Shift &= 0x1f;
> > +  return _Shift ? (_Value >> _Shift) | (_Value << (32 - _Shift)) :
> _Value;
> > +}
> > +static
> > +__inline__ unsigned __int64 __attribute__((__always_inline__,
> __nodebug__))
> > +_rotl64(unsigned __int64 _Value, int _Shift) {
> > +  _Shift &= 0x3f;
> > +  return _Shift ? (_Value << _Shift) | (_Value >> (64 - _Shift)) :
> _Value;
> > +}
> > +static
> > +__inline__ unsigned __int64 __attribute__((__always_inline__,
> __nodebug__))
> > +_rotr64(unsigned __int64 _Value, int _Shift) {
> > +  _Shift &= 0x3f;
> > +  return _Shift ? (_Value >> _Shift) | (_Value << (64 - _Shift)) :
> _Value;
> > +}
> >
> +/*----------------------------------------------------------------------------*\
> > +|* Bit Counting and Testing
> >
> +\*----------------------------------------------------------------------------*/
> > +static __inline__ unsigned char __attribute__((__always_inline__,
> __nodebug__))
> > +_BitScanForward(unsigned long *_Index, unsigned long _Mask) {
> > +  if (!_Mask)
> > +    return 0;
> > +  *_Index = __builtin_ctzl(_Mask);
> > +  return 1;
> > +}
> > +static __inline__ unsigned char __attribute__((__always_inline__,
> __nodebug__))
> > +_BitScanReverse(unsigned long *_Index, unsigned long _Mask) {
> > +  if (!_Mask)
> > +    return 0;
> > +  *_Index = 31 - __builtin_clzl(_Mask);
> > +  return 1;
> > +}
> > +static __inline__ unsigned int __attribute__((__always_inline__,
> __nodebug__))
> > +_lzcnt_u32(unsigned int a) {
> > +  if (!a)
> > +    return 32;
> > +  return __builtin_clzl(a);
> > +}
> > +static __inline__ unsigned short __attribute__((__always_inline__,
> __nodebug__))
> > +__popcnt16(unsigned short value) {
> > +  return __builtin_popcount((int)value);
> > +}
> > +static __inline__ unsigned int __attribute__((__always_inline__,
> __nodebug__))
> > +__popcnt(unsigned int value) {
> > +  return __builtin_popcount(value);
> > +}
> > +static __inline__ unsigned char __attribute__((__always_inline__,
> __nodebug__))
> > +_bittest(long const *a, long b) {
> > +  return (*a >> b) & 1;
> > +}
> > +static __inline__ unsigned char __attribute__((__always_inline__,
> __nodebug__))
> > +_bittestandcomplement(long *a, long b) {
> > +  unsigned char x = (*a >> b) & 1;
> > +  *a = *a ^ (1 << b);
> > +  return x;
> > +}
> > +static __inline__ unsigned char __attribute__((__always_inline__,
> __nodebug__))
> > +_bittestandreset(long *a, long b) {
> > +  unsigned char x = (*a >> b) & 1;
> > +  *a = *a & ~(1 << b);
> > +  return x;
> > +}
> > +static __inline__ unsigned char __attribute__((__always_inline__,
> __nodebug__))
> > +_bittestandset(long *a, long b) {
> > +  unsigned char x = (*a >> b) & 1;
> > +  *a = *a & (1 << b);
> > +  return x;
> > +}
> > +#ifdef __X86_64__
> In fact, all the ones that test this macro should. I know this isn't your
> fault--it was testing __X86_64__ (uppercase 'X') before. But just so you
> know, no one that I know of defines __X86_64__--certainly not Clang.
>
> Chip
>
>
-------------- next part --------------
An HTML attachment was scrubbed...
URL: <http://lists.llvm.org/pipermail/cfe-commits/attachments/20130930/25e07532/attachment.html>


More information about the cfe-commits mailing list