X-Git-Url: http://cvs.zerfleddert.de/cgi-bin/gitweb.cgi/proxmark3-svn/blobdiff_plain/d4625ea408b5969bd4f0902dd814b3f5c3108f9d..f3c8131a0df4bd1d5cf39b886ba7c355efc2430c:/client/hardnested/hardnested_bitarray_core.c diff --git a/client/hardnested/hardnested_bitarray_core.c b/client/hardnested/hardnested_bitarray_core.c index 5b48a735..aca4f149 100644 --- a/client/hardnested/hardnested_bitarray_core.c +++ b/client/hardnested/hardnested_bitarray_core.c @@ -21,7 +21,9 @@ #include #include #include +#ifndef __APPLE__ #include +#endif // this needs to be compiled several times for each instruction set. // For each instruction set, define a dedicated function name: @@ -143,8 +145,15 @@ count_bitarray_AND4_t count_bitarray_AND4_AVX512, count_bitarray_AND4_AVX2, coun inline uint32_t *MALLOC_BITARRAY(uint32_t x) { -#ifdef _WIN32 +#if defined (_WIN32) return __builtin_assume_aligned(_aligned_malloc((x), __BIGGEST_ALIGNMENT__), __BIGGEST_ALIGNMENT__); +#elif defined (__APPLE__) + uint32_t *allocated_memory; + if (posix_memalign((void**)&allocated_memory, __BIGGEST_ALIGNMENT__, x)) { + return NULL; + } else { + return __builtin_assume_aligned(allocated_memory, __BIGGEST_ALIGNMENT__); + } #else return __builtin_assume_aligned(memalign(__BIGGEST_ALIGNMENT__, (x)), __BIGGEST_ALIGNMENT__); #endif @@ -310,16 +319,18 @@ count_bitarray_AND4_t *count_bitarray_AND4_function_p = &count_bitarray_AND4_dis // determine the available instruction set at runtime and call the correct function uint32_t *malloc_bitarray_dispatch(uint32_t x) { #if defined (__i386__) || defined (__x86_64__) - #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) + #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1)) + #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) if (__builtin_cpu_supports("avx512f")) malloc_bitarray_function_p = &malloc_bitarray_AVX512; else if (__builtin_cpu_supports("avx2")) malloc_bitarray_function_p = &malloc_bitarray_AVX2; - #else + #else if (__builtin_cpu_supports("avx2")) malloc_bitarray_function_p = &malloc_bitarray_AVX2; - #endif + #endif else if (__builtin_cpu_supports("avx")) malloc_bitarray_function_p = &malloc_bitarray_AVX; else if (__builtin_cpu_supports("sse2")) malloc_bitarray_function_p = &malloc_bitarray_SSE2; else if (__builtin_cpu_supports("mmx")) malloc_bitarray_function_p = &malloc_bitarray_MMX; - else + else + #endif #endif malloc_bitarray_function_p = &malloc_bitarray_NOSIMD; @@ -328,17 +339,19 @@ uint32_t *malloc_bitarray_dispatch(uint32_t x) { } void free_bitarray_dispatch(uint32_t *x) { -#if defined (__i386__) || defined (__x86_64__) - #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) +#if defined (__i386__) || defined (__x86_64__) + #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1)) + #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) if (__builtin_cpu_supports("avx512f")) free_bitarray_function_p = &free_bitarray_AVX512; else if (__builtin_cpu_supports("avx2")) free_bitarray_function_p = &free_bitarray_AVX2; - #else + #else if (__builtin_cpu_supports("avx2")) free_bitarray_function_p = &free_bitarray_AVX2; - #endif + #endif else if (__builtin_cpu_supports("avx")) free_bitarray_function_p = &free_bitarray_AVX; else if (__builtin_cpu_supports("sse2")) free_bitarray_function_p = &free_bitarray_SSE2; else if (__builtin_cpu_supports("mmx")) free_bitarray_function_p = &free_bitarray_MMX; - else + else + #endif #endif free_bitarray_function_p = &free_bitarray_NOSIMD; @@ -348,16 +361,18 @@ void free_bitarray_dispatch(uint32_t *x) { uint32_t bitcount_dispatch(uint32_t a) { #if defined (__i386__) || defined (__x86_64__) - #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) + #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1)) + #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) if (__builtin_cpu_supports("avx512f")) bitcount_function_p = &bitcount_AVX512; else if (__builtin_cpu_supports("avx2")) bitcount_function_p = &bitcount_AVX2; - #else + #else if (__builtin_cpu_supports("avx2")) bitcount_function_p = &bitcount_AVX2; - #endif + #endif else if (__builtin_cpu_supports("avx")) bitcount_function_p = &bitcount_AVX; else if (__builtin_cpu_supports("sse2")) bitcount_function_p = &bitcount_SSE2; else if (__builtin_cpu_supports("mmx")) bitcount_function_p = &bitcount_MMX; - else + else + #endif #endif bitcount_function_p = &bitcount_NOSIMD; @@ -367,16 +382,18 @@ uint32_t bitcount_dispatch(uint32_t a) { uint32_t count_states_dispatch(uint32_t *bitarray) { #if defined (__i386__) || defined (__x86_64__) - #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) + #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1)) + #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) if (__builtin_cpu_supports("avx512f")) count_states_function_p = &count_states_AVX512; else if (__builtin_cpu_supports("avx2")) count_states_function_p = &count_states_AVX2; - #else + #else if (__builtin_cpu_supports("avx2")) count_states_function_p = &count_states_AVX2; - #endif + #endif else if (__builtin_cpu_supports("avx")) count_states_function_p = &count_states_AVX; else if (__builtin_cpu_supports("sse2")) count_states_function_p = &count_states_SSE2; else if (__builtin_cpu_supports("mmx")) count_states_function_p = &count_states_MMX; - else + else + #endif #endif count_states_function_p = &count_states_NOSIMD; @@ -386,16 +403,18 @@ uint32_t count_states_dispatch(uint32_t *bitarray) { void bitarray_AND_dispatch(uint32_t *A, uint32_t *B) { #if defined (__i386__) || defined (__x86_64__) - #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) + #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1)) + #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) if (__builtin_cpu_supports("avx512f")) bitarray_AND_function_p = &bitarray_AND_AVX512; else if (__builtin_cpu_supports("avx2")) bitarray_AND_function_p = &bitarray_AND_AVX2; - #else + #else if (__builtin_cpu_supports("avx2")) bitarray_AND_function_p = &bitarray_AND_AVX2; - #endif + #endif else if (__builtin_cpu_supports("avx")) bitarray_AND_function_p = &bitarray_AND_AVX; else if (__builtin_cpu_supports("sse2")) bitarray_AND_function_p = &bitarray_AND_SSE2; else if (__builtin_cpu_supports("mmx")) bitarray_AND_function_p = &bitarray_AND_MMX; - else + else + #endif #endif bitarray_AND_function_p = &bitarray_AND_NOSIMD; @@ -404,17 +423,19 @@ void bitarray_AND_dispatch(uint32_t *A, uint32_t *B) { } void bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) { -#if defined (__i386__) || defined (__x86_64__) - #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) +#if defined (__i386__) || defined (__x86_64__) + #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1)) + #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) if (__builtin_cpu_supports("avx512f")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX512; else if (__builtin_cpu_supports("avx2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX2; - #else + #else if (__builtin_cpu_supports("avx2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX2; - #endif + #endif else if (__builtin_cpu_supports("avx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX; else if (__builtin_cpu_supports("sse2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_SSE2; else if (__builtin_cpu_supports("mmx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_MMX; - else + else + #endif #endif bitarray_low20_AND_function_p = &bitarray_low20_AND_NOSIMD; @@ -424,16 +445,18 @@ void bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) { uint32_t count_bitarray_AND_dispatch(uint32_t *A, uint32_t *B) { #if defined (__i386__) || defined (__x86_64__) - #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) + #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1)) + #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) if (__builtin_cpu_supports("avx512f")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX512; else if (__builtin_cpu_supports("avx2")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX2; - #else + #else if (__builtin_cpu_supports("avx2")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX2; - #endif + #endif else if (__builtin_cpu_supports("avx")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX; else if (__builtin_cpu_supports("sse2")) count_bitarray_AND_function_p = &count_bitarray_AND_SSE2; else if (__builtin_cpu_supports("mmx")) count_bitarray_AND_function_p = &count_bitarray_AND_MMX; - else + else + #endif #endif count_bitarray_AND_function_p = &count_bitarray_AND_NOSIMD; @@ -443,16 +466,18 @@ uint32_t count_bitarray_AND_dispatch(uint32_t *A, uint32_t *B) { uint32_t count_bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) { #if defined (__i386__) || defined (__x86_64__) - #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) + #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1)) + #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) if (__builtin_cpu_supports("avx512f")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX512; else if (__builtin_cpu_supports("avx2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX2; - #else + #else if (__builtin_cpu_supports("avx2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX2; - #endif + #endif else if (__builtin_cpu_supports("avx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX; else if (__builtin_cpu_supports("sse2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_SSE2; else if (__builtin_cpu_supports("mmx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_MMX; - else + else + #endif #endif count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_NOSIMD; @@ -462,16 +487,18 @@ uint32_t count_bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) { void bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) { #if defined (__i386__) || defined (__x86_64__) - #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) + #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1)) + #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) if (__builtin_cpu_supports("avx512f")) bitarray_AND4_function_p = &bitarray_AND4_AVX512; else if (__builtin_cpu_supports("avx2")) bitarray_AND4_function_p = &bitarray_AND4_AVX2; - #else + #else if (__builtin_cpu_supports("avx2")) bitarray_AND4_function_p = &bitarray_AND4_AVX2; - #endif + #endif else if (__builtin_cpu_supports("avx")) bitarray_AND4_function_p = &bitarray_AND4_AVX; else if (__builtin_cpu_supports("sse2")) bitarray_AND4_function_p = &bitarray_AND4_SSE2; else if (__builtin_cpu_supports("mmx")) bitarray_AND4_function_p = &bitarray_AND4_MMX; - else + else + #endif #endif bitarray_AND4_function_p = &bitarray_AND4_NOSIMD; @@ -481,16 +508,18 @@ void bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) void bitarray_OR_dispatch(uint32_t *A, uint32_t *B) { #if defined (__i386__) || defined (__x86_64__) - #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) + #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1)) + #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) if (__builtin_cpu_supports("avx512f")) bitarray_OR_function_p = &bitarray_OR_AVX512; else if (__builtin_cpu_supports("avx2")) bitarray_OR_function_p = &bitarray_OR_AVX2; - #else + #else if (__builtin_cpu_supports("avx2")) bitarray_OR_function_p = &bitarray_OR_AVX2; - #endif + #endif else if (__builtin_cpu_supports("avx")) bitarray_OR_function_p = &bitarray_OR_AVX; else if (__builtin_cpu_supports("sse2")) bitarray_OR_function_p = &bitarray_OR_SSE2; else if (__builtin_cpu_supports("mmx")) bitarray_OR_function_p = &bitarray_OR_MMX; - else + else + #endif #endif bitarray_OR_function_p = &bitarray_OR_NOSIMD; @@ -500,16 +529,18 @@ void bitarray_OR_dispatch(uint32_t *A, uint32_t *B) { uint32_t count_bitarray_AND2_dispatch(uint32_t *A, uint32_t *B) { #if defined (__i386__) || defined (__x86_64__) - #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) + #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1)) + #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) if (__builtin_cpu_supports("avx512f")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX512; else if (__builtin_cpu_supports("avx2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX2; - #else + #else if (__builtin_cpu_supports("avx2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX2; - #endif + #endif else if (__builtin_cpu_supports("avx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX; else if (__builtin_cpu_supports("sse2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_SSE2; else if (__builtin_cpu_supports("mmx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_MMX; - else + else + #endif #endif count_bitarray_AND2_function_p = &count_bitarray_AND2_NOSIMD; @@ -519,16 +550,18 @@ uint32_t count_bitarray_AND2_dispatch(uint32_t *A, uint32_t *B) { uint32_t count_bitarray_AND3_dispatch(uint32_t *A, uint32_t *B, uint32_t *C) { #if defined (__i386__) || defined (__x86_64__) - #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) + #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1)) + #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) if (__builtin_cpu_supports("avx512f")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX512; else if (__builtin_cpu_supports("avx2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX2; - #else + #else if (__builtin_cpu_supports("avx2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX2; - #endif + #endif else if (__builtin_cpu_supports("avx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX; else if (__builtin_cpu_supports("sse2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_SSE2; else if (__builtin_cpu_supports("mmx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_MMX; - else + else + #endif #endif count_bitarray_AND3_function_p = &count_bitarray_AND3_NOSIMD; @@ -538,16 +571,18 @@ uint32_t count_bitarray_AND3_dispatch(uint32_t *A, uint32_t *B, uint32_t *C) { uint32_t count_bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) { #if defined (__i386__) || defined (__x86_64__) - #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) + #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1)) + #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) if (__builtin_cpu_supports("avx512f")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX512; else if (__builtin_cpu_supports("avx2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX2; - #else + #else if (__builtin_cpu_supports("avx2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX2; - #endif + #endif else if (__builtin_cpu_supports("avx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX; else if (__builtin_cpu_supports("sse2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_SSE2; else if (__builtin_cpu_supports("mmx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_MMX; - else + else + #endif #endif count_bitarray_AND4_function_p = &count_bitarray_AND4_NOSIMD;