00001 
00002 
00003 
00004 
00005 
00006 
00007 
00008 
00009 
00010 
00011 
00012 
00013 
00014 
00015 
00016 
00017 
00018 
00019 
00020 
00021 
00022 
00023 #ifndef ALPHA_ASM_H
00024 #define ALPHA_ASM_H
00025 
00026 #include <inttypes.h>
00027 
00028 #if defined __GNUC__
00029 # define GNUC_PREREQ(maj, min) \
00030         ((__GNUC__ << 16) + __GNUC_MINOR__ >= ((maj) << 16) + (min))
00031 #else
00032 # define GNUC_PREREQ(maj, min) 0
00033 #endif
00034 
00035 #define AMASK_BWX (1 << 0)
00036 #define AMASK_FIX (1 << 1)
00037 #define AMASK_CIX (1 << 2)
00038 #define AMASK_MVI (1 << 8)
00039 
00040 #ifdef __alpha_bwx__
00041 # define HAVE_BWX() 1
00042 #else
00043 # define HAVE_BWX() (amask(AMASK_BWX) == 0)
00044 #endif
00045 #ifdef __alpha_fix__
00046 # define HAVE_FIX() 1
00047 #else
00048 # define HAVE_FIX() (amask(AMASK_FIX) == 0)
00049 #endif
00050 #ifdef __alpha_max__
00051 # define HAVE_MVI() 1
00052 #else
00053 # define HAVE_MVI() (amask(AMASK_MVI) == 0)
00054 #endif
00055 #ifdef __alpha_cix__
00056 # define HAVE_CIX() 1
00057 #else
00058 # define HAVE_CIX() (amask(AMASK_CIX) == 0)
00059 #endif
00060 
00061 inline static uint64_t BYTE_VEC(uint64_t x)
00062 {
00063     x |= x <<  8;
00064     x |= x << 16;
00065     x |= x << 32;
00066     return x;
00067 }
00068 inline static uint64_t WORD_VEC(uint64_t x)
00069 {
00070     x |= x << 16;
00071     x |= x << 32;
00072     return x;
00073 }
00074 
00075 #define ldq(p) (*(const uint64_t *) (p))
00076 #define ldl(p) (*(const int32_t *) (p))
00077 #define stl(l, p) do { *(uint32_t *) (p) = (l); } while (0)
00078 #define stq(l, p) do { *(uint64_t *) (p) = (l); } while (0)
00079 #define sextw(x) ((int16_t) (x))
00080 
00081 #ifdef __GNUC__
00082 struct unaligned_long { uint64_t l; } __attribute__((packed));
00083 #define ldq_u(p)     (*(const uint64_t *) (((uint64_t) (p)) & ~7ul))
00084 #define uldq(a)      (((const struct unaligned_long *) (a))->l)
00085 
00086 #if GNUC_PREREQ(3,0)
00087 
00088 
00089 
00090 # define prefetch(p)     __builtin_prefetch((p), 0, 1)
00091 # define prefetch_en(p)  __builtin_prefetch((p), 1, 1)
00092 # define prefetch_m(p)   __builtin_prefetch((p), 0, 0)
00093 # define prefetch_men(p) __builtin_prefetch((p), 1, 0)
00094 #else
00095 # define prefetch(p)     asm volatile("ldl $31,%0"  : : "m"(*(const char *) (p)) : "memory")
00096 # define prefetch_en(p)  asm volatile("ldq $31,%0"  : : "m"(*(const char *) (p)) : "memory")
00097 # define prefetch_m(p)   asm volatile("lds $f31,%0" : : "m"(*(const char *) (p)) : "memory")
00098 # define prefetch_men(p) asm volatile("ldt $f31,%0" : : "m"(*(const char *) (p)) : "memory")
00099 #endif
00100 
00101 #if GNUC_PREREQ(3,3)
00102 #define cmpbge  __builtin_alpha_cmpbge
00103 
00104 #define extql(a, b)     __builtin_alpha_extql(a, (uint64_t) (b))
00105 #define extwl(a, b)     __builtin_alpha_extwl(a, (uint64_t) (b))
00106 #define extqh(a, b)     __builtin_alpha_extqh(a, (uint64_t) (b))
00107 #define zap     __builtin_alpha_zap
00108 #define zapnot  __builtin_alpha_zapnot
00109 #define amask   __builtin_alpha_amask
00110 #define implver __builtin_alpha_implver
00111 #define rpcc    __builtin_alpha_rpcc
00112 #define minub8  __builtin_alpha_minub8
00113 #define minsb8  __builtin_alpha_minsb8
00114 #define minuw4  __builtin_alpha_minuw4
00115 #define minsw4  __builtin_alpha_minsw4
00116 #define maxub8  __builtin_alpha_maxub8
00117 #define maxsb8  __builtin_alpha_maxsb8
00118 #define maxuw4  __builtin_alpha_maxuw4  
00119 #define maxsw4  __builtin_alpha_maxsw4
00120 #define perr    __builtin_alpha_perr
00121 #define pklb    __builtin_alpha_pklb
00122 #define pkwb    __builtin_alpha_pkwb
00123 #define unpkbl  __builtin_alpha_unpkbl
00124 #define unpkbw  __builtin_alpha_unpkbw
00125 #else
00126 #define cmpbge(a, b) ({ uint64_t __r; asm ("cmpbge  %r1,%2,%0"  : "=r" (__r) : "rJ"  (a), "rI" (b)); __r; })
00127 #define extql(a, b)  ({ uint64_t __r; asm ("extql   %r1,%2,%0"  : "=r" (__r) : "rJ"  (a), "rI" (b)); __r; })
00128 #define extwl(a, b)  ({ uint64_t __r; asm ("extwl   %r1,%2,%0"  : "=r" (__r) : "rJ"  (a), "rI" (b)); __r; })
00129 #define extqh(a, b)  ({ uint64_t __r; asm ("extqh   %r1,%2,%0"  : "=r" (__r) : "rJ"  (a), "rI" (b)); __r; })
00130 #define zap(a, b)    ({ uint64_t __r; asm ("zap     %r1,%2,%0"  : "=r" (__r) : "rJ"  (a), "rI" (b)); __r; })
00131 #define zapnot(a, b) ({ uint64_t __r; asm ("zapnot  %r1,%2,%0"  : "=r" (__r) : "rJ"  (a), "rI" (b)); __r; })
00132 #define amask(a)     ({ uint64_t __r; asm ("amask   %1,%0"      : "=r" (__r) : "rI"  (a));           __r; })
00133 #define implver()    ({ uint64_t __r; asm ("implver %0"         : "=r" (__r));                       __r; })
00134 #define rpcc()       ({ uint64_t __r; asm volatile ("rpcc %0"   : "=r" (__r));                       __r; })
00135 #define minub8(a, b) ({ uint64_t __r; asm ("minub8  %r1,%2,%0"  : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
00136 #define minsb8(a, b) ({ uint64_t __r; asm ("minsb8  %r1,%2,%0"  : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
00137 #define minuw4(a, b) ({ uint64_t __r; asm ("minuw4  %r1,%2,%0"  : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
00138 #define minsw4(a, b) ({ uint64_t __r; asm ("minsw4  %r1,%2,%0"  : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
00139 #define maxub8(a, b) ({ uint64_t __r; asm ("maxub8  %r1,%2,%0"  : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
00140 #define maxsb8(a, b) ({ uint64_t __r; asm ("maxsb8  %r1,%2,%0"  : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
00141 #define maxuw4(a, b) ({ uint64_t __r; asm ("maxuw4  %r1,%2,%0"  : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
00142 #define maxsw4(a, b) ({ uint64_t __r; asm ("maxsw4  %r1,%2,%0"  : "=r" (__r) : "%rJ" (a), "rI" (b)); __r; })
00143 #define perr(a, b)   ({ uint64_t __r; asm ("perr    %r1,%r2,%0" : "=r" (__r) : "%rJ" (a), "rJ" (b)); __r; })
00144 #define pklb(a)      ({ uint64_t __r; asm ("pklb    %r1,%0"     : "=r" (__r) : "rJ"  (a));           __r; })
00145 #define pkwb(a)      ({ uint64_t __r; asm ("pkwb    %r1,%0"     : "=r" (__r) : "rJ"  (a));           __r; })
00146 #define unpkbl(a)    ({ uint64_t __r; asm ("unpkbl  %r1,%0"     : "=r" (__r) : "rJ"  (a));           __r; })
00147 #define unpkbw(a)    ({ uint64_t __r; asm ("unpkbw  %r1,%0"     : "=r" (__r) : "rJ"  (a));           __r; })
00148 #endif
00149 #define wh64(p) asm volatile("wh64 (%0)" : : "r"(p) : "memory")
00150 
00151 #elif defined(__DECC)           
00152 
00153 #include <c_asm.h>
00154 #define ldq_u(a)     asm ("ldq_u   %v0,0(%a0)", a)
00155 #define uldq(a)      (*(const __unaligned uint64_t *) (a))
00156 #define cmpbge(a, b) asm ("cmpbge  %a0,%a1,%v0", a, b)
00157 #define extql(a, b)  asm ("extql   %a0,%a1,%v0", a, b)
00158 #define extwl(a, b)  asm ("extwl   %a0,%a1,%v0", a, b)
00159 #define extqh(a, b)  asm ("extqh   %a0,%a1,%v0", a, b)
00160 #define zap(a, b)    asm ("zap     %a0,%a1,%v0", a, b)
00161 #define zapnot(a, b) asm ("zapnot  %a0,%a1,%v0", a, b)
00162 #define amask(a)     asm ("amask   %a0,%v0", a)
00163 #define implver()    asm ("implver %v0")
00164 #define rpcc()       asm ("rpcc    %v0")
00165 #define minub8(a, b) asm ("minub8  %a0,%a1,%v0", a, b)
00166 #define minsb8(a, b) asm ("minsb8  %a0,%a1,%v0", a, b)
00167 #define minuw4(a, b) asm ("minuw4  %a0,%a1,%v0", a, b)
00168 #define minsw4(a, b) asm ("minsw4  %a0,%a1,%v0", a, b)
00169 #define maxub8(a, b) asm ("maxub8  %a0,%a1,%v0", a, b)
00170 #define maxsb8(a, b) asm ("maxsb8  %a0,%a1,%v0", a, b)
00171 #define maxuw4(a, b) asm ("maxuw4  %a0,%a1,%v0", a, b)
00172 #define maxsw4(a, b) asm ("maxsw4  %a0,%a1,%v0", a, b)
00173 #define perr(a, b)   asm ("perr    %a0,%a1,%v0", a, b)
00174 #define pklb(a)      asm ("pklb    %a0,%v0", a)
00175 #define pkwb(a)      asm ("pkwb    %a0,%v0", a)
00176 #define unpkbl(a)    asm ("unpkbl  %a0,%v0", a)
00177 #define unpkbw(a)    asm ("unpkbw  %a0,%v0", a)
00178 #define wh64(a)      asm ("wh64    %a0", a)
00179 
00180 #else
00181 #error "Unknown compiler!"
00182 #endif
00183 
00184 #endif