-rw-r--r-- 12190 saferewrite-20240620/src/int64_max/supercopnew/crypto_int64.h raw
/* auto-generated: cd inttypes; ./autogen */
#ifndef crypto_int64_h
#define crypto_int64_h
#include <inttypes.h>
#define crypto_int64 int64_t
#define crypto_int64_unsigned uint64_t
#define crypto_int64_optblocker supercop_int64_optblocker
extern volatile crypto_int64 crypto_int64_optblocker;
__attribute__((unused))
static inline
crypto_int64 crypto_int64_load(const unsigned char *crypto_int64_s) {
crypto_int64 crypto_int64_z = 0;
crypto_int64_z |= ((crypto_int64) (*crypto_int64_s++)) << 0;
crypto_int64_z |= ((crypto_int64) (*crypto_int64_s++)) << 8;
crypto_int64_z |= ((crypto_int64) (*crypto_int64_s++)) << 16;
crypto_int64_z |= ((crypto_int64) (*crypto_int64_s++)) << 24;
crypto_int64_z |= ((crypto_int64) (*crypto_int64_s++)) << 32;
crypto_int64_z |= ((crypto_int64) (*crypto_int64_s++)) << 40;
crypto_int64_z |= ((crypto_int64) (*crypto_int64_s++)) << 48;
crypto_int64_z |= ((crypto_int64) (*crypto_int64_s++)) << 56;
return crypto_int64_z;
}
__attribute__((unused))
static inline
void crypto_int64_store(unsigned char *crypto_int64_s,crypto_int64 crypto_int64_x) {
*crypto_int64_s++ = crypto_int64_x >> 0;
*crypto_int64_s++ = crypto_int64_x >> 8;
*crypto_int64_s++ = crypto_int64_x >> 16;
*crypto_int64_s++ = crypto_int64_x >> 24;
*crypto_int64_s++ = crypto_int64_x >> 32;
*crypto_int64_s++ = crypto_int64_x >> 40;
*crypto_int64_s++ = crypto_int64_x >> 48;
*crypto_int64_s++ = crypto_int64_x >> 56;
}
__attribute__((unused))
static inline
crypto_int64 crypto_int64_negative_mask(crypto_int64 crypto_int64_x) {
#if defined(__GNUC__) && defined(__x86_64__)
__asm__ ("sarq $63,%0" : "+r"(crypto_int64_x) : : "cc");
return crypto_int64_x;
#elif defined(__GNUC__) && defined(__aarch64__)
crypto_int64 crypto_int64_y;
__asm__ ("asr %0,%1,63" : "=r"(crypto_int64_y) : "r"(crypto_int64_x) : );
return crypto_int64_y;
#else
crypto_int64_x >>= 64-6;
crypto_int64_x ^= crypto_int64_optblocker;
crypto_int64_x >>= 5;
return crypto_int64_x;
#endif
}
__attribute__((unused))
static inline
crypto_int64 crypto_int64_bottombit_mask(crypto_int64 crypto_int64_x) {
#if defined(__GNUC__) && defined(__x86_64__)
__asm__ ("andq $1,%0" : "+r"(crypto_int64_x) : : "cc");
return -crypto_int64_x;
#elif defined(__GNUC__) && defined(__aarch64__)
crypto_int64 crypto_int64_y;
__asm__ ("sbfx %0,%1,0,1" : "=r"(crypto_int64_y) : "r"(crypto_int64_x) : );
return crypto_int64_y;
#else
crypto_int64_x &= 1 ^ crypto_int64_optblocker;
return -crypto_int64_x;
#endif
}
__attribute__((unused))
static inline
crypto_int64 crypto_int64_bitinrangepublicpos_mask(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_s) {
#if defined(__GNUC__) && defined(__x86_64__)
__asm__ ("sarq %%cl,%0" : "+r"(crypto_int64_x) : "c"(crypto_int64_s) : "cc");
#elif defined(__GNUC__) && defined(__aarch64__)
__asm__ ("asr %0,%0,%1" : "+r"(crypto_int64_x) : "r"(crypto_int64_s) : );
#else
crypto_int64_x >>= crypto_int64_s ^ crypto_int64_optblocker;
#endif
return crypto_int64_bottombit_mask(crypto_int64_x);
}
__attribute__((unused))
static inline
crypto_int64 crypto_int64_shlmod(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_s) {
#if defined(__GNUC__) && defined(__x86_64__)
__asm__ ("shlq %%cl,%0" : "+r"(crypto_int64_x) : "c"(crypto_int64_s) : "cc");
#elif defined(__GNUC__) && defined(__aarch64__)
__asm__ ("lsl %0,%0,%1" : "+r"(crypto_int64_x) : "r"(crypto_int64_s) : );
#else
int crypto_int64_k, crypto_int64_l;
for (crypto_int64_l = 0,crypto_int64_k = 1;crypto_int64_k < 64;++crypto_int64_l,crypto_int64_k *= 2)
crypto_int64_x ^= (crypto_int64_x ^ (crypto_int64_x << crypto_int64_k)) & crypto_int64_bitinrangepublicpos_mask(crypto_int64_s,crypto_int64_l);
#endif
return crypto_int64_x;
}
__attribute__((unused))
static inline
crypto_int64 crypto_int64_shrmod(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_s) {
#if defined(__GNUC__) && defined(__x86_64__)
__asm__ ("sarq %%cl,%0" : "+r"(crypto_int64_x) : "c"(crypto_int64_s) : "cc");
#elif defined(__GNUC__) && defined(__aarch64__)
__asm__ ("asr %0,%0,%1" : "+r"(crypto_int64_x) : "r"(crypto_int64_s) : );
#else
int crypto_int64_k, crypto_int64_l;
for (crypto_int64_l = 0,crypto_int64_k = 1;crypto_int64_k < 64;++crypto_int64_l,crypto_int64_k *= 2)
crypto_int64_x ^= (crypto_int64_x ^ (crypto_int64_x >> crypto_int64_k)) & crypto_int64_bitinrangepublicpos_mask(crypto_int64_s,crypto_int64_l);
#endif
return crypto_int64_x;
}
__attribute__((unused))
static inline
crypto_int64 crypto_int64_bitmod_mask(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_s) {
crypto_int64_x = crypto_int64_shrmod(crypto_int64_x,crypto_int64_s);
return crypto_int64_bottombit_mask(crypto_int64_x);
}
__attribute__((unused))
static inline
crypto_int64 crypto_int64_nonzero_mask(crypto_int64 crypto_int64_x) {
#if defined(__GNUC__) && defined(__aarch64__)
crypto_int64 crypto_int64_z;
__asm__ ("cmp %1,0\n csetm %0,ne" : "=r"(crypto_int64_z) : "r"(crypto_int64_x) : "cc");
return crypto_int64_z;
#else
crypto_int64_x |= -crypto_int64_x;
return crypto_int64_negative_mask(crypto_int64_x);
#endif
}
__attribute__((unused))
static inline
crypto_int64 crypto_int64_positive_mask(crypto_int64 crypto_int64_x)
{
#if defined(__GNUC__) && defined(__aarch64__)
crypto_int64 crypto_int64_z;
__asm__ ("cmp %1,0\n csetm %0,gt" : "=r"(crypto_int64_z) : "r"(crypto_int64_x) : "cc");
return crypto_int64_z;
#else
crypto_int64 crypto_int64_z = -crypto_int64_x;
crypto_int64_z ^= crypto_int64_x & crypto_int64_z;
return crypto_int64_negative_mask(crypto_int64_z);
#endif
}
__attribute__((unused))
static inline
crypto_int64 crypto_int64_zero_mask(crypto_int64 crypto_int64_x) {
#if defined(__GNUC__) && defined(__aarch64__)
crypto_int64 crypto_int64_z;
__asm__ ("cmp %1,0\n csetm %0,eq" : "=r"(crypto_int64_z) : "r"(crypto_int64_x) : "cc");
return crypto_int64_z;
#else
return ~crypto_int64_nonzero_mask(crypto_int64_x);
#endif
}
__attribute__((unused))
static inline
crypto_int64 crypto_int64_unequal_mask(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_y) {
#if defined(__GNUC__) && defined(__aarch64__)
crypto_int64 crypto_int64_z;
__asm__ ("cmp %1,%2\n csetm %0,ne" : "=r"(crypto_int64_z) : "r"(crypto_int64_x), "r"(crypto_int64_y) : "cc");
return crypto_int64_z;
#else
return crypto_int64_nonzero_mask(crypto_int64_x ^ crypto_int64_y);
#endif
}
__attribute__((unused))
static inline
crypto_int64 crypto_int64_equal_mask(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_y) {
#if defined(__GNUC__) && defined(__aarch64__)
crypto_int64 crypto_int64_z;
__asm__ ("cmp %1,%2\n csetm %0,eq" : "=r"(crypto_int64_z) : "r"(crypto_int64_x), "r"(crypto_int64_y) : "cc");
return crypto_int64_z;
#else
return ~crypto_int64_unequal_mask(crypto_int64_x,crypto_int64_y);
#endif
}
__attribute__((unused))
static inline
crypto_int64 crypto_int64_min(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_y) {
#if defined(__GNUC__) && defined(__x86_64__)
asm("cmpq %1,%0\n cmovgq %1,%0" : "+r"(crypto_int64_x), "+r"(crypto_int64_y) : : "cc");
return crypto_int64_x;
#elif defined(__GNUC__) && defined(__aarch64__)
__asm__ ("cmp %0,%1\n csel %0,%0,%1,lt" : "+r"(crypto_int64_x), "+r"(crypto_int64_y) : : "cc");
return crypto_int64_x;
#else
crypto_int64 crypto_int64_r = crypto_int64_y ^ crypto_int64_x;
crypto_int64 crypto_int64_z = crypto_int64_y - crypto_int64_x;
crypto_int64_z ^= crypto_int64_r & (crypto_int64_z ^ crypto_int64_y);
crypto_int64_z = crypto_int64_negative_mask(crypto_int64_z);
crypto_int64_z &= crypto_int64_r;
return crypto_int64_x ^ crypto_int64_z;
#endif
}
__attribute__((unused))
static inline
crypto_int64 crypto_int64_max(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_y) {
#if defined(__GNUC__) && defined(__x86_64__)
asm("cmpq %1,%0\n cmovlq %1,%0" : "+r"(crypto_int64_x), "+r"(crypto_int64_y) : : "cc");
return crypto_int64_x;
#elif defined(__GNUC__) && defined(__aarch64__)
__asm__ ("cmp %0,%1\n csel %0,%1,%0,lt" : "+r"(crypto_int64_x), "+r"(crypto_int64_y) : : "cc");
return crypto_int64_x;
#else
crypto_int64 crypto_int64_r = crypto_int64_y ^ crypto_int64_x;
crypto_int64 crypto_int64_z = crypto_int64_y - crypto_int64_x;
crypto_int64_z ^= crypto_int64_r & (crypto_int64_z ^ crypto_int64_y);
crypto_int64_z = crypto_int64_negative_mask(crypto_int64_z);
crypto_int64_z &= crypto_int64_r;
return crypto_int64_y ^ crypto_int64_z;
#endif
}
__attribute__((unused))
static inline
void crypto_int64_minmax(crypto_int64 *crypto_int64_p,crypto_int64 *crypto_int64_q) {
crypto_int64 crypto_int64_x = *crypto_int64_p;
crypto_int64 crypto_int64_y = *crypto_int64_q;
#if defined(__GNUC__) && defined(__x86_64__)
crypto_int64 crypto_int64_z;
asm("cmpq %1,%0\n movq %0,%2\n cmovgq %1,%0\n cmovgq %2,%1" : "+r"(crypto_int64_x), "+r"(crypto_int64_y), "=r"(crypto_int64_z) : : "cc");
*crypto_int64_p = crypto_int64_x;
*crypto_int64_q = crypto_int64_y;
#elif defined(__GNUC__) && defined(__aarch64__)
crypto_int64 crypto_int64_r, crypto_int64_s;
__asm__ ("cmp %2,%3\n csel %0,%2,%3,lt\n csel %1,%3,%2,lt" : "=&r"(crypto_int64_r), "=r"(crypto_int64_s) : "r"(crypto_int64_x), "r"(crypto_int64_y) : "cc");
*crypto_int64_p = crypto_int64_r;
*crypto_int64_q = crypto_int64_s;
#else
crypto_int64 crypto_int64_r = crypto_int64_y ^ crypto_int64_x;
crypto_int64 crypto_int64_z = crypto_int64_y - crypto_int64_x;
crypto_int64_z ^= crypto_int64_r & (crypto_int64_z ^ crypto_int64_y);
crypto_int64_z = crypto_int64_negative_mask(crypto_int64_z);
crypto_int64_z &= crypto_int64_r;
crypto_int64_x ^= crypto_int64_z;
crypto_int64_y ^= crypto_int64_z;
*crypto_int64_p = crypto_int64_x;
*crypto_int64_q = crypto_int64_y;
#endif
}
__attribute__((unused))
static inline
crypto_int64 crypto_int64_smaller_mask(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_y) {
#if defined(__GNUC__) && defined(__aarch64__)
crypto_int64 crypto_int64_z;
__asm__ ("cmp %1,%2\n csetm %0,lt" : "=r"(crypto_int64_z) : "r"(crypto_int64_x), "r"(crypto_int64_y) : "cc");
return crypto_int64_z;
#else
crypto_int64 crypto_int64_r = crypto_int64_x ^ crypto_int64_y;
crypto_int64 crypto_int64_z = crypto_int64_x - crypto_int64_y;
crypto_int64_z ^= crypto_int64_r & (crypto_int64_z ^ crypto_int64_x);
return crypto_int64_negative_mask(crypto_int64_z);
#endif
}
__attribute__((unused))
static inline
crypto_int64 crypto_int64_leq_mask(crypto_int64 crypto_int64_x,crypto_int64 crypto_int64_y) {
#if defined(__GNUC__) && defined(__aarch64__)
crypto_int64 crypto_int64_z;
__asm__ ("cmp %1,%2\n csetm %0,le" : "=r"(crypto_int64_z) : "r"(crypto_int64_x), "r"(crypto_int64_y) : "cc");
return crypto_int64_z;
#else
return ~crypto_int64_smaller_mask(crypto_int64_y,crypto_int64_x);
#endif
}
__attribute__((unused))
static inline
int crypto_int64_ones_num(crypto_int64 crypto_int64_x) {
crypto_int64_unsigned crypto_int64_y = crypto_int64_x;
const crypto_int64 C0 = 0x5555555555555555;
const crypto_int64 C1 = 0x3333333333333333;
const crypto_int64 C2 = 0x0f0f0f0f0f0f0f0f;
crypto_int64_y -= ((crypto_int64_y >> 1) & C0);
crypto_int64_y = (crypto_int64_y & C1) + ((crypto_int64_y >> 2) & C1);
crypto_int64_y = (crypto_int64_y + (crypto_int64_y >> 4)) & C2;
crypto_int64_y += crypto_int64_y >> 8;
crypto_int64_y += crypto_int64_y >> 16;
crypto_int64_y = (crypto_int64_y + (crypto_int64_y >> 32)) & 0xff;
return crypto_int64_y;
}
__attribute__((unused))
static inline
int crypto_int64_bottomzeros_num(crypto_int64 crypto_int64_x) {
#if defined(__GNUC__) && defined(__x86_64__)
asm("bsfq %0,%0\n cmovz %1,%0" : "+r"(crypto_int64_x) : "r"((int64_t) 64) : "cc");
return crypto_int64_x;
#elif defined(__GNUC__) && defined(__aarch64__)
int64_t crypto_int64_y;
asm("rbit %0,%1\n clz %0,%0" : "=r"(crypto_int64_y) : "r"(crypto_int64_x) : );
return crypto_int64_y;
#else
crypto_int64 crypto_int64_y = crypto_int64_x ^ (crypto_int64_x-1);
crypto_int64_y = ((crypto_int64) crypto_int64_y) >> 1;
crypto_int64_y &= ~(crypto_int64_x & (((crypto_int64) 1) << (64-1)));
return crypto_int64_ones_num(crypto_int64_y);
#endif
}
#endif