1 //-----------------------------------------------------------------------------
2 // Copyright (C) 2016, 2017 by piwi
4 // This code is licensed to you under the terms of the GNU GPL, version 2 or,
5 // at your option, any later version. See the LICENSE.txt file for the text of
7 //-----------------------------------------------------------------------------
8 // Implements a card only attack based on crypto text (encrypted nonces
9 // received during a nested authentication) only. Unlike other card only
10 // attacks this doesn't rely on implementation errors but only on the
11 // inherent weaknesses of the crypto1 cypher. Described in
12 // Carlo Meijer, Roel Verdult, "Ciphertext-only Cryptanalysis on Hardened
13 // Mifare Classic Cards" in Proceedings of the 22nd ACM SIGSAC Conference on
14 // Computer and Communications Security, 2015
15 //-----------------------------------------------------------------------------
16 // some helper functions which can benefit from SIMD instructions or other special instructions
19 #include "hardnested_bitarray_core.h"
28 // this needs to be compiled several times for each instruction set.
29 // For each instruction set, define a dedicated function name:
30 #if defined (__AVX512F__)
31 #define MALLOC_BITARRAY malloc_bitarray_AVX512
32 #define FREE_BITARRAY free_bitarray_AVX512
33 #define BITCOUNT bitcount_AVX512
34 #define COUNT_STATES count_states_AVX512
35 #define BITARRAY_AND bitarray_AND_AVX512
36 #define BITARRAY_LOW20_AND bitarray_low20_AND_AVX512
37 #define COUNT_BITARRAY_AND count_bitarray_AND_AVX512
38 #define COUNT_BITARRAY_LOW20_AND count_bitarray_low20_AND_AVX512
39 #define BITARRAY_AND4 bitarray_AND4_AVX512
40 #define BITARRAY_OR bitarray_OR_AVX512
41 #define COUNT_BITARRAY_AND2 count_bitarray_AND2_AVX512
42 #define COUNT_BITARRAY_AND3 count_bitarray_AND3_AVX512
43 #define COUNT_BITARRAY_AND4 count_bitarray_AND4_AVX512
44 #elif defined (__AVX2__)
45 #define MALLOC_BITARRAY malloc_bitarray_AVX2
46 #define FREE_BITARRAY free_bitarray_AVX2
47 #define BITCOUNT bitcount_AVX2
48 #define COUNT_STATES count_states_AVX2
49 #define BITARRAY_AND bitarray_AND_AVX2
50 #define BITARRAY_LOW20_AND bitarray_low20_AND_AVX2
51 #define COUNT_BITARRAY_AND count_bitarray_AND_AVX2
52 #define COUNT_BITARRAY_LOW20_AND count_bitarray_low20_AND_AVX2
53 #define BITARRAY_AND4 bitarray_AND4_AVX2
54 #define BITARRAY_OR bitarray_OR_AVX2
55 #define COUNT_BITARRAY_AND2 count_bitarray_AND2_AVX2
56 #define COUNT_BITARRAY_AND3 count_bitarray_AND3_AVX2
57 #define COUNT_BITARRAY_AND4 count_bitarray_AND4_AVX2
58 #elif defined (__AVX__)
59 #define MALLOC_BITARRAY malloc_bitarray_AVX
60 #define FREE_BITARRAY free_bitarray_AVX
61 #define BITCOUNT bitcount_AVX
62 #define COUNT_STATES count_states_AVX
63 #define BITARRAY_AND bitarray_AND_AVX
64 #define BITARRAY_LOW20_AND bitarray_low20_AND_AVX
65 #define COUNT_BITARRAY_AND count_bitarray_AND_AVX
66 #define COUNT_BITARRAY_LOW20_AND count_bitarray_low20_AND_AVX
67 #define BITARRAY_AND4 bitarray_AND4_AVX
68 #define BITARRAY_OR bitarray_OR_AVX
69 #define COUNT_BITARRAY_AND2 count_bitarray_AND2_AVX
70 #define COUNT_BITARRAY_AND3 count_bitarray_AND3_AVX
71 #define COUNT_BITARRAY_AND4 count_bitarray_AND4_AVX
72 #elif defined (__SSE2__)
73 #define MALLOC_BITARRAY malloc_bitarray_SSE2
74 #define FREE_BITARRAY free_bitarray_SSE2
75 #define BITCOUNT bitcount_SSE2
76 #define COUNT_STATES count_states_SSE2
77 #define BITARRAY_AND bitarray_AND_SSE2
78 #define BITARRAY_LOW20_AND bitarray_low20_AND_SSE2
79 #define COUNT_BITARRAY_AND count_bitarray_AND_SSE2
80 #define COUNT_BITARRAY_LOW20_AND count_bitarray_low20_AND_SSE2
81 #define BITARRAY_AND4 bitarray_AND4_SSE2
82 #define BITARRAY_OR bitarray_OR_SSE2
83 #define COUNT_BITARRAY_AND2 count_bitarray_AND2_SSE2
84 #define COUNT_BITARRAY_AND3 count_bitarray_AND3_SSE2
85 #define COUNT_BITARRAY_AND4 count_bitarray_AND4_SSE2
86 #elif defined (__MMX__)
87 #define MALLOC_BITARRAY malloc_bitarray_MMX
88 #define FREE_BITARRAY free_bitarray_MMX
89 #define BITCOUNT bitcount_MMX
90 #define COUNT_STATES count_states_MMX
91 #define BITARRAY_AND bitarray_AND_MMX
92 #define BITARRAY_LOW20_AND bitarray_low20_AND_MMX
93 #define COUNT_BITARRAY_AND count_bitarray_AND_MMX
94 #define COUNT_BITARRAY_LOW20_AND count_bitarray_low20_AND_MMX
95 #define BITARRAY_AND4 bitarray_AND4_MMX
96 #define BITARRAY_OR bitarray_OR_MMX
97 #define COUNT_BITARRAY_AND2 count_bitarray_AND2_MMX
98 #define COUNT_BITARRAY_AND3 count_bitarray_AND3_MMX
99 #define COUNT_BITARRAY_AND4 count_bitarray_AND4_MMX
101 #define MALLOC_BITARRAY malloc_bitarray_NOSIMD
102 #define FREE_BITARRAY free_bitarray_NOSIMD
103 #define BITCOUNT bitcount_NOSIMD
104 #define COUNT_STATES count_states_NOSIMD
105 #define BITARRAY_AND bitarray_AND_NOSIMD
106 #define BITARRAY_LOW20_AND bitarray_low20_AND_NOSIMD
107 #define COUNT_BITARRAY_AND count_bitarray_AND_NOSIMD
108 #define COUNT_BITARRAY_LOW20_AND count_bitarray_low20_AND_NOSIMD
109 #define BITARRAY_AND4 bitarray_AND4_NOSIMD
110 #define BITARRAY_OR bitarray_OR_NOSIMD
111 #define COUNT_BITARRAY_AND2 count_bitarray_AND2_NOSIMD
112 #define COUNT_BITARRAY_AND3 count_bitarray_AND3_NOSIMD
113 #define COUNT_BITARRAY_AND4 count_bitarray_AND4_NOSIMD
117 // typedefs and declaration of functions:
118 typedef uint32_t* malloc_bitarray_t(uint32_t);
119 malloc_bitarray_t malloc_bitarray_AVX512
, malloc_bitarray_AVX2
, malloc_bitarray_AVX
, malloc_bitarray_SSE2
, malloc_bitarray_MMX
, malloc_bitarray_NOSIMD
, malloc_bitarray_dispatch
;
120 typedef void free_bitarray_t(uint32_t*);
121 free_bitarray_t free_bitarray_AVX512
, free_bitarray_AVX2
, free_bitarray_AVX
, free_bitarray_SSE2
, free_bitarray_MMX
, free_bitarray_NOSIMD
, free_bitarray_dispatch
;
122 typedef uint32_t bitcount_t(uint32_t);
123 bitcount_t bitcount_AVX512
, bitcount_AVX2
, bitcount_AVX
, bitcount_SSE2
, bitcount_MMX
, bitcount_NOSIMD
, bitcount_dispatch
;
124 typedef uint32_t count_states_t(uint32_t*);
125 count_states_t count_states_AVX512
, count_states_AVX2
, count_states_AVX
, count_states_SSE2
, count_states_MMX
, count_states_NOSIMD
, count_states_dispatch
;
126 typedef void bitarray_AND_t(uint32_t[], uint32_t[]);
127 bitarray_AND_t bitarray_AND_AVX512
, bitarray_AND_AVX2
, bitarray_AND_AVX
, bitarray_AND_SSE2
, bitarray_AND_MMX
, bitarray_AND_NOSIMD
, bitarray_AND_dispatch
;
128 typedef void bitarray_low20_AND_t(uint32_t*, uint32_t*);
129 bitarray_low20_AND_t bitarray_low20_AND_AVX512
, bitarray_low20_AND_AVX2
, bitarray_low20_AND_AVX
, bitarray_low20_AND_SSE2
, bitarray_low20_AND_MMX
, bitarray_low20_AND_NOSIMD
, bitarray_low20_AND_dispatch
;
130 typedef uint32_t count_bitarray_AND_t(uint32_t*, uint32_t*);
131 count_bitarray_AND_t count_bitarray_AND_AVX512
, count_bitarray_AND_AVX2
, count_bitarray_AND_AVX
, count_bitarray_AND_SSE2
, count_bitarray_AND_MMX
, count_bitarray_AND_NOSIMD
, count_bitarray_AND_dispatch
;
132 typedef uint32_t count_bitarray_low20_AND_t(uint32_t*, uint32_t*);
133 count_bitarray_low20_AND_t count_bitarray_low20_AND_AVX512
, count_bitarray_low20_AND_AVX2
, count_bitarray_low20_AND_AVX
, count_bitarray_low20_AND_SSE2
, count_bitarray_low20_AND_MMX
, count_bitarray_low20_AND_NOSIMD
, count_bitarray_low20_AND_dispatch
;
134 typedef void bitarray_AND4_t(uint32_t*, uint32_t*, uint32_t*, uint32_t*);
135 bitarray_AND4_t bitarray_AND4_AVX512
, bitarray_AND4_AVX2
, bitarray_AND4_AVX
, bitarray_AND4_SSE2
, bitarray_AND4_MMX
, bitarray_AND4_NOSIMD
, bitarray_AND4_dispatch
;
136 typedef void bitarray_OR_t(uint32_t[], uint32_t[]);
137 bitarray_OR_t bitarray_OR_AVX512
, bitarray_OR_AVX2
, bitarray_OR_AVX
, bitarray_OR_SSE2
, bitarray_OR_MMX
, bitarray_OR_NOSIMD
, bitarray_OR_dispatch
;
138 typedef uint32_t count_bitarray_AND2_t(uint32_t*, uint32_t*);
139 count_bitarray_AND2_t count_bitarray_AND2_AVX512
, count_bitarray_AND2_AVX2
, count_bitarray_AND2_AVX
, count_bitarray_AND2_SSE2
, count_bitarray_AND2_MMX
, count_bitarray_AND2_NOSIMD
, count_bitarray_AND2_dispatch
;
140 typedef uint32_t count_bitarray_AND3_t(uint32_t*, uint32_t*, uint32_t*);
141 count_bitarray_AND3_t count_bitarray_AND3_AVX512
, count_bitarray_AND3_AVX2
, count_bitarray_AND3_AVX
, count_bitarray_AND3_SSE2
, count_bitarray_AND3_MMX
, count_bitarray_AND3_NOSIMD
, count_bitarray_AND3_dispatch
;
142 typedef uint32_t count_bitarray_AND4_t(uint32_t*, uint32_t*, uint32_t*, uint32_t*);
143 count_bitarray_AND4_t count_bitarray_AND4_AVX512
, count_bitarray_AND4_AVX2
, count_bitarray_AND4_AVX
, count_bitarray_AND4_SSE2
, count_bitarray_AND4_MMX
, count_bitarray_AND4_NOSIMD
, count_bitarray_AND4_dispatch
;
146 inline uint32_t *MALLOC_BITARRAY(uint32_t x
)
149 return __builtin_assume_aligned(_aligned_malloc((x
), __BIGGEST_ALIGNMENT__
), __BIGGEST_ALIGNMENT__
);
150 #elif defined (__APPLE__)
151 uint32_t *allocated_memory
;
152 if (posix_memalign((void**)&allocated_memory
, __BIGGEST_ALIGNMENT__
, x
)) {
155 return __builtin_assume_aligned(allocated_memory
, __BIGGEST_ALIGNMENT__
);
158 return __builtin_assume_aligned(memalign(__BIGGEST_ALIGNMENT__
, (x
)), __BIGGEST_ALIGNMENT__
);
163 inline void FREE_BITARRAY(uint32_t *x
)
173 inline uint32_t BITCOUNT(uint32_t a
)
175 return __builtin_popcountl(a
);
179 inline uint32_t COUNT_STATES(uint32_t *A
)
182 for (uint32_t i
= 0; i
< (1<<19); i
++) {
183 count
+= BITCOUNT(A
[i
]);
189 inline void BITARRAY_AND(uint32_t *restrict A
, uint32_t *restrict B
)
191 A
= __builtin_assume_aligned(A
, __BIGGEST_ALIGNMENT__
);
192 B
= __builtin_assume_aligned(B
, __BIGGEST_ALIGNMENT__
);
193 for (uint32_t i
= 0; i
< (1<<19); i
++) {
199 inline void BITARRAY_LOW20_AND(uint32_t *restrict A
, uint32_t *restrict B
)
201 uint16_t *a
= (uint16_t *)__builtin_assume_aligned(A
, __BIGGEST_ALIGNMENT__
);
202 uint16_t *b
= (uint16_t *)__builtin_assume_aligned(B
, __BIGGEST_ALIGNMENT__
);
204 for (uint32_t i
= 0; i
< (1<<20); i
++) {
212 inline uint32_t COUNT_BITARRAY_AND(uint32_t *restrict A
, uint32_t *restrict B
)
214 A
= __builtin_assume_aligned(A
, __BIGGEST_ALIGNMENT__
);
215 B
= __builtin_assume_aligned(B
, __BIGGEST_ALIGNMENT__
);
217 for (uint32_t i
= 0; i
< (1<<19); i
++) {
219 count
+= BITCOUNT(A
[i
]);
225 inline uint32_t COUNT_BITARRAY_LOW20_AND(uint32_t *restrict A
, uint32_t *restrict B
)
227 uint16_t *a
= (uint16_t *)__builtin_assume_aligned(A
, __BIGGEST_ALIGNMENT__
);
228 uint16_t *b
= (uint16_t *)__builtin_assume_aligned(B
, __BIGGEST_ALIGNMENT__
);
231 for (uint32_t i
= 0; i
< (1<<20); i
++) {
235 count
+= BITCOUNT(a
[i
]);
241 inline void BITARRAY_AND4(uint32_t *restrict A
, uint32_t *restrict B
, uint32_t *restrict C
, uint32_t *restrict D
)
243 A
= __builtin_assume_aligned(A
, __BIGGEST_ALIGNMENT__
);
244 B
= __builtin_assume_aligned(B
, __BIGGEST_ALIGNMENT__
);
245 C
= __builtin_assume_aligned(C
, __BIGGEST_ALIGNMENT__
);
246 D
= __builtin_assume_aligned(D
, __BIGGEST_ALIGNMENT__
);
247 for (uint32_t i
= 0; i
< (1<<19); i
++) {
248 A
[i
] = B
[i
] & C
[i
] & D
[i
];
253 inline void BITARRAY_OR(uint32_t *restrict A
, uint32_t *restrict B
)
255 A
= __builtin_assume_aligned(A
, __BIGGEST_ALIGNMENT__
);
256 B
= __builtin_assume_aligned(B
, __BIGGEST_ALIGNMENT__
);
257 for (uint32_t i
= 0; i
< (1<<19); i
++) {
263 inline uint32_t COUNT_BITARRAY_AND2(uint32_t *restrict A
, uint32_t *restrict B
)
265 A
= __builtin_assume_aligned(A
, __BIGGEST_ALIGNMENT__
);
266 B
= __builtin_assume_aligned(B
, __BIGGEST_ALIGNMENT__
);
268 for (uint32_t i
= 0; i
< (1<<19); i
++) {
269 count
+= BITCOUNT(A
[i
] & B
[i
]);
275 inline uint32_t COUNT_BITARRAY_AND3(uint32_t *restrict A
, uint32_t *restrict B
, uint32_t *restrict C
)
277 A
= __builtin_assume_aligned(A
, __BIGGEST_ALIGNMENT__
);
278 B
= __builtin_assume_aligned(B
, __BIGGEST_ALIGNMENT__
);
279 C
= __builtin_assume_aligned(C
, __BIGGEST_ALIGNMENT__
);
281 for (uint32_t i
= 0; i
< (1<<19); i
++) {
282 count
+= BITCOUNT(A
[i
] & B
[i
] & C
[i
]);
288 inline uint32_t COUNT_BITARRAY_AND4(uint32_t *restrict A
, uint32_t *restrict B
, uint32_t *restrict C
, uint32_t *restrict D
)
290 A
= __builtin_assume_aligned(A
, __BIGGEST_ALIGNMENT__
);
291 B
= __builtin_assume_aligned(B
, __BIGGEST_ALIGNMENT__
);
292 C
= __builtin_assume_aligned(C
, __BIGGEST_ALIGNMENT__
);
293 D
= __builtin_assume_aligned(D
, __BIGGEST_ALIGNMENT__
);
295 for (uint32_t i
= 0; i
< (1<<19); i
++) {
296 count
+= BITCOUNT(A
[i
] & B
[i
] & C
[i
] & D
[i
]);
304 // pointers to functions:
305 malloc_bitarray_t
*malloc_bitarray_function_p
= &malloc_bitarray_dispatch
;
306 free_bitarray_t
*free_bitarray_function_p
= &free_bitarray_dispatch
;
307 bitcount_t
*bitcount_function_p
= &bitcount_dispatch
;
308 count_states_t
*count_states_function_p
= &count_states_dispatch
;
309 bitarray_AND_t
*bitarray_AND_function_p
= &bitarray_AND_dispatch
;
310 bitarray_low20_AND_t
*bitarray_low20_AND_function_p
= &bitarray_low20_AND_dispatch
;
311 count_bitarray_AND_t
*count_bitarray_AND_function_p
= &count_bitarray_AND_dispatch
;
312 count_bitarray_low20_AND_t
*count_bitarray_low20_AND_function_p
= &count_bitarray_low20_AND_dispatch
;
313 bitarray_AND4_t
*bitarray_AND4_function_p
= &bitarray_AND4_dispatch
;
314 bitarray_OR_t
*bitarray_OR_function_p
= &bitarray_OR_dispatch
;
315 count_bitarray_AND2_t
*count_bitarray_AND2_function_p
= &count_bitarray_AND2_dispatch
;
316 count_bitarray_AND3_t
*count_bitarray_AND3_function_p
= &count_bitarray_AND3_dispatch
;
317 count_bitarray_AND4_t
*count_bitarray_AND4_function_p
= &count_bitarray_AND4_dispatch
;
319 // determine the available instruction set at runtime and call the correct function
320 uint32_t *malloc_bitarray_dispatch(uint32_t x
) {
321 #if defined (__i386__) || defined (__x86_64__)
322 #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
323 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
324 if (__builtin_cpu_supports("avx512f")) malloc_bitarray_function_p
= &malloc_bitarray_AVX512
;
325 else if (__builtin_cpu_supports("avx2")) malloc_bitarray_function_p
= &malloc_bitarray_AVX2
;
327 if (__builtin_cpu_supports("avx2")) malloc_bitarray_function_p
= &malloc_bitarray_AVX2
;
329 else if (__builtin_cpu_supports("avx")) malloc_bitarray_function_p
= &malloc_bitarray_AVX
;
330 else if (__builtin_cpu_supports("sse2")) malloc_bitarray_function_p
= &malloc_bitarray_SSE2
;
331 else if (__builtin_cpu_supports("mmx")) malloc_bitarray_function_p
= &malloc_bitarray_MMX
;
335 malloc_bitarray_function_p
= &malloc_bitarray_NOSIMD
;
337 // call the most optimized function for this CPU
338 return (*malloc_bitarray_function_p
)(x
);
341 void free_bitarray_dispatch(uint32_t *x
) {
342 #if defined (__i386__) || defined (__x86_64__)
343 #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
344 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
345 if (__builtin_cpu_supports("avx512f")) free_bitarray_function_p
= &free_bitarray_AVX512
;
346 else if (__builtin_cpu_supports("avx2")) free_bitarray_function_p
= &free_bitarray_AVX2
;
348 if (__builtin_cpu_supports("avx2")) free_bitarray_function_p
= &free_bitarray_AVX2
;
350 else if (__builtin_cpu_supports("avx")) free_bitarray_function_p
= &free_bitarray_AVX
;
351 else if (__builtin_cpu_supports("sse2")) free_bitarray_function_p
= &free_bitarray_SSE2
;
352 else if (__builtin_cpu_supports("mmx")) free_bitarray_function_p
= &free_bitarray_MMX
;
356 free_bitarray_function_p
= &free_bitarray_NOSIMD
;
358 // call the most optimized function for this CPU
359 (*free_bitarray_function_p
)(x
);
362 uint32_t bitcount_dispatch(uint32_t a
) {
363 #if defined (__i386__) || defined (__x86_64__)
364 #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
365 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
366 if (__builtin_cpu_supports("avx512f")) bitcount_function_p
= &bitcount_AVX512
;
367 else if (__builtin_cpu_supports("avx2")) bitcount_function_p
= &bitcount_AVX2
;
369 if (__builtin_cpu_supports("avx2")) bitcount_function_p
= &bitcount_AVX2
;
371 else if (__builtin_cpu_supports("avx")) bitcount_function_p
= &bitcount_AVX
;
372 else if (__builtin_cpu_supports("sse2")) bitcount_function_p
= &bitcount_SSE2
;
373 else if (__builtin_cpu_supports("mmx")) bitcount_function_p
= &bitcount_MMX
;
377 bitcount_function_p
= &bitcount_NOSIMD
;
379 // call the most optimized function for this CPU
380 return (*bitcount_function_p
)(a
);
383 uint32_t count_states_dispatch(uint32_t *bitarray
) {
384 #if defined (__i386__) || defined (__x86_64__)
385 #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
386 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
387 if (__builtin_cpu_supports("avx512f")) count_states_function_p
= &count_states_AVX512
;
388 else if (__builtin_cpu_supports("avx2")) count_states_function_p
= &count_states_AVX2
;
390 if (__builtin_cpu_supports("avx2")) count_states_function_p
= &count_states_AVX2
;
392 else if (__builtin_cpu_supports("avx")) count_states_function_p
= &count_states_AVX
;
393 else if (__builtin_cpu_supports("sse2")) count_states_function_p
= &count_states_SSE2
;
394 else if (__builtin_cpu_supports("mmx")) count_states_function_p
= &count_states_MMX
;
398 count_states_function_p
= &count_states_NOSIMD
;
400 // call the most optimized function for this CPU
401 return (*count_states_function_p
)(bitarray
);
404 void bitarray_AND_dispatch(uint32_t *A
, uint32_t *B
) {
405 #if defined (__i386__) || defined (__x86_64__)
406 #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
407 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
408 if (__builtin_cpu_supports("avx512f")) bitarray_AND_function_p
= &bitarray_AND_AVX512
;
409 else if (__builtin_cpu_supports("avx2")) bitarray_AND_function_p
= &bitarray_AND_AVX2
;
411 if (__builtin_cpu_supports("avx2")) bitarray_AND_function_p
= &bitarray_AND_AVX2
;
413 else if (__builtin_cpu_supports("avx")) bitarray_AND_function_p
= &bitarray_AND_AVX
;
414 else if (__builtin_cpu_supports("sse2")) bitarray_AND_function_p
= &bitarray_AND_SSE2
;
415 else if (__builtin_cpu_supports("mmx")) bitarray_AND_function_p
= &bitarray_AND_MMX
;
419 bitarray_AND_function_p
= &bitarray_AND_NOSIMD
;
421 // call the most optimized function for this CPU
422 (*bitarray_AND_function_p
)(A
,B
);
425 void bitarray_low20_AND_dispatch(uint32_t *A
, uint32_t *B
) {
426 #if defined (__i386__) || defined (__x86_64__)
427 #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
428 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
429 if (__builtin_cpu_supports("avx512f")) bitarray_low20_AND_function_p
= &bitarray_low20_AND_AVX512
;
430 else if (__builtin_cpu_supports("avx2")) bitarray_low20_AND_function_p
= &bitarray_low20_AND_AVX2
;
432 if (__builtin_cpu_supports("avx2")) bitarray_low20_AND_function_p
= &bitarray_low20_AND_AVX2
;
434 else if (__builtin_cpu_supports("avx")) bitarray_low20_AND_function_p
= &bitarray_low20_AND_AVX
;
435 else if (__builtin_cpu_supports("sse2")) bitarray_low20_AND_function_p
= &bitarray_low20_AND_SSE2
;
436 else if (__builtin_cpu_supports("mmx")) bitarray_low20_AND_function_p
= &bitarray_low20_AND_MMX
;
440 bitarray_low20_AND_function_p
= &bitarray_low20_AND_NOSIMD
;
442 // call the most optimized function for this CPU
443 (*bitarray_low20_AND_function_p
)(A
, B
);
446 uint32_t count_bitarray_AND_dispatch(uint32_t *A
, uint32_t *B
) {
447 #if defined (__i386__) || defined (__x86_64__)
448 #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
449 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
450 if (__builtin_cpu_supports("avx512f")) count_bitarray_AND_function_p
= &count_bitarray_AND_AVX512
;
451 else if (__builtin_cpu_supports("avx2")) count_bitarray_AND_function_p
= &count_bitarray_AND_AVX2
;
453 if (__builtin_cpu_supports("avx2")) count_bitarray_AND_function_p
= &count_bitarray_AND_AVX2
;
455 else if (__builtin_cpu_supports("avx")) count_bitarray_AND_function_p
= &count_bitarray_AND_AVX
;
456 else if (__builtin_cpu_supports("sse2")) count_bitarray_AND_function_p
= &count_bitarray_AND_SSE2
;
457 else if (__builtin_cpu_supports("mmx")) count_bitarray_AND_function_p
= &count_bitarray_AND_MMX
;
461 count_bitarray_AND_function_p
= &count_bitarray_AND_NOSIMD
;
463 // call the most optimized function for this CPU
464 return (*count_bitarray_AND_function_p
)(A
, B
);
467 uint32_t count_bitarray_low20_AND_dispatch(uint32_t *A
, uint32_t *B
) {
468 #if defined (__i386__) || defined (__x86_64__)
469 #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
470 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
471 if (__builtin_cpu_supports("avx512f")) count_bitarray_low20_AND_function_p
= &count_bitarray_low20_AND_AVX512
;
472 else if (__builtin_cpu_supports("avx2")) count_bitarray_low20_AND_function_p
= &count_bitarray_low20_AND_AVX2
;
474 if (__builtin_cpu_supports("avx2")) count_bitarray_low20_AND_function_p
= &count_bitarray_low20_AND_AVX2
;
476 else if (__builtin_cpu_supports("avx")) count_bitarray_low20_AND_function_p
= &count_bitarray_low20_AND_AVX
;
477 else if (__builtin_cpu_supports("sse2")) count_bitarray_low20_AND_function_p
= &count_bitarray_low20_AND_SSE2
;
478 else if (__builtin_cpu_supports("mmx")) count_bitarray_low20_AND_function_p
= &count_bitarray_low20_AND_MMX
;
482 count_bitarray_low20_AND_function_p
= &count_bitarray_low20_AND_NOSIMD
;
484 // call the most optimized function for this CPU
485 return (*count_bitarray_low20_AND_function_p
)(A
, B
);
488 void bitarray_AND4_dispatch(uint32_t *A
, uint32_t *B
, uint32_t *C
, uint32_t *D
) {
489 #if defined (__i386__) || defined (__x86_64__)
490 #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
491 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
492 if (__builtin_cpu_supports("avx512f")) bitarray_AND4_function_p
= &bitarray_AND4_AVX512
;
493 else if (__builtin_cpu_supports("avx2")) bitarray_AND4_function_p
= &bitarray_AND4_AVX2
;
495 if (__builtin_cpu_supports("avx2")) bitarray_AND4_function_p
= &bitarray_AND4_AVX2
;
497 else if (__builtin_cpu_supports("avx")) bitarray_AND4_function_p
= &bitarray_AND4_AVX
;
498 else if (__builtin_cpu_supports("sse2")) bitarray_AND4_function_p
= &bitarray_AND4_SSE2
;
499 else if (__builtin_cpu_supports("mmx")) bitarray_AND4_function_p
= &bitarray_AND4_MMX
;
503 bitarray_AND4_function_p
= &bitarray_AND4_NOSIMD
;
505 // call the most optimized function for this CPU
506 (*bitarray_AND4_function_p
)(A
, B
, C
, D
);
509 void bitarray_OR_dispatch(uint32_t *A
, uint32_t *B
) {
510 #if defined (__i386__) || defined (__x86_64__)
511 #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
512 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
513 if (__builtin_cpu_supports("avx512f")) bitarray_OR_function_p
= &bitarray_OR_AVX512
;
514 else if (__builtin_cpu_supports("avx2")) bitarray_OR_function_p
= &bitarray_OR_AVX2
;
516 if (__builtin_cpu_supports("avx2")) bitarray_OR_function_p
= &bitarray_OR_AVX2
;
518 else if (__builtin_cpu_supports("avx")) bitarray_OR_function_p
= &bitarray_OR_AVX
;
519 else if (__builtin_cpu_supports("sse2")) bitarray_OR_function_p
= &bitarray_OR_SSE2
;
520 else if (__builtin_cpu_supports("mmx")) bitarray_OR_function_p
= &bitarray_OR_MMX
;
524 bitarray_OR_function_p
= &bitarray_OR_NOSIMD
;
526 // call the most optimized function for this CPU
527 (*bitarray_OR_function_p
)(A
,B
);
530 uint32_t count_bitarray_AND2_dispatch(uint32_t *A
, uint32_t *B
) {
531 #if defined (__i386__) || defined (__x86_64__)
532 #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
533 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
534 if (__builtin_cpu_supports("avx512f")) count_bitarray_AND2_function_p
= &count_bitarray_AND2_AVX512
;
535 else if (__builtin_cpu_supports("avx2")) count_bitarray_AND2_function_p
= &count_bitarray_AND2_AVX2
;
537 if (__builtin_cpu_supports("avx2")) count_bitarray_AND2_function_p
= &count_bitarray_AND2_AVX2
;
539 else if (__builtin_cpu_supports("avx")) count_bitarray_AND2_function_p
= &count_bitarray_AND2_AVX
;
540 else if (__builtin_cpu_supports("sse2")) count_bitarray_AND2_function_p
= &count_bitarray_AND2_SSE2
;
541 else if (__builtin_cpu_supports("mmx")) count_bitarray_AND2_function_p
= &count_bitarray_AND2_MMX
;
545 count_bitarray_AND2_function_p
= &count_bitarray_AND2_NOSIMD
;
547 // call the most optimized function for this CPU
548 return (*count_bitarray_AND2_function_p
)(A
, B
);
551 uint32_t count_bitarray_AND3_dispatch(uint32_t *A
, uint32_t *B
, uint32_t *C
) {
552 #if defined (__i386__) || defined (__x86_64__)
553 #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
554 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
555 if (__builtin_cpu_supports("avx512f")) count_bitarray_AND3_function_p
= &count_bitarray_AND3_AVX512
;
556 else if (__builtin_cpu_supports("avx2")) count_bitarray_AND3_function_p
= &count_bitarray_AND3_AVX2
;
558 if (__builtin_cpu_supports("avx2")) count_bitarray_AND3_function_p
= &count_bitarray_AND3_AVX2
;
560 else if (__builtin_cpu_supports("avx")) count_bitarray_AND3_function_p
= &count_bitarray_AND3_AVX
;
561 else if (__builtin_cpu_supports("sse2")) count_bitarray_AND3_function_p
= &count_bitarray_AND3_SSE2
;
562 else if (__builtin_cpu_supports("mmx")) count_bitarray_AND3_function_p
= &count_bitarray_AND3_MMX
;
566 count_bitarray_AND3_function_p
= &count_bitarray_AND3_NOSIMD
;
568 // call the most optimized function for this CPU
569 return (*count_bitarray_AND3_function_p
)(A
, B
, C
);
572 uint32_t count_bitarray_AND4_dispatch(uint32_t *A
, uint32_t *B
, uint32_t *C
, uint32_t *D
) {
573 #if defined (__i386__) || defined (__x86_64__)
574 #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
575 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
576 if (__builtin_cpu_supports("avx512f")) count_bitarray_AND4_function_p
= &count_bitarray_AND4_AVX512
;
577 else if (__builtin_cpu_supports("avx2")) count_bitarray_AND4_function_p
= &count_bitarray_AND4_AVX2
;
579 if (__builtin_cpu_supports("avx2")) count_bitarray_AND4_function_p
= &count_bitarray_AND4_AVX2
;
581 else if (__builtin_cpu_supports("avx")) count_bitarray_AND4_function_p
= &count_bitarray_AND4_AVX
;
582 else if (__builtin_cpu_supports("sse2")) count_bitarray_AND4_function_p
= &count_bitarray_AND4_SSE2
;
583 else if (__builtin_cpu_supports("mmx")) count_bitarray_AND4_function_p
= &count_bitarray_AND4_MMX
;
587 count_bitarray_AND4_function_p
= &count_bitarray_AND4_NOSIMD
;
589 // call the most optimized function for this CPU
590 return (*count_bitarray_AND4_function_p
)(A
, B
, C
, D
);
594 ///////////////////////////////////////////////77
595 // Entries to dispatched function calls
597 uint32_t *malloc_bitarray(uint32_t x
) {
598 return (*malloc_bitarray_function_p
)(x
);
601 void free_bitarray(uint32_t *x
) {
602 (*free_bitarray_function_p
)(x
);
605 uint32_t bitcount(uint32_t a
) {
606 return (*bitcount_function_p
)(a
);
609 uint32_t count_states(uint32_t *bitarray
) {
610 return (*count_states_function_p
)(bitarray
);
613 void bitarray_AND(uint32_t *A
, uint32_t *B
) {
614 (*bitarray_AND_function_p
)(A
, B
);
617 void bitarray_low20_AND(uint32_t *A
, uint32_t *B
) {
618 (*bitarray_low20_AND_function_p
)(A
, B
);
621 uint32_t count_bitarray_AND(uint32_t *A
, uint32_t *B
) {
622 return (*count_bitarray_AND_function_p
)(A
, B
);
625 uint32_t count_bitarray_low20_AND(uint32_t *A
, uint32_t *B
) {
626 return (*count_bitarray_low20_AND_function_p
)(A
, B
);
629 void bitarray_AND4(uint32_t *A
, uint32_t *B
, uint32_t *C
, uint32_t *D
) {
630 (*bitarray_AND4_function_p
)(A
, B
, C
, D
);
633 void bitarray_OR(uint32_t *A
, uint32_t *B
) {
634 (*bitarray_OR_function_p
)(A
, B
);
637 uint32_t count_bitarray_AND2(uint32_t *A
, uint32_t *B
) {
638 return (*count_bitarray_AND2_function_p
)(A
, B
);
641 uint32_t count_bitarray_AND3(uint32_t *A
, uint32_t *B
, uint32_t *C
) {
642 return (*count_bitarray_AND3_function_p
)(A
, B
, C
);
645 uint32_t count_bitarray_AND4(uint32_t *A
, uint32_t *B
, uint32_t *C
, uint32_t *D
) {
646 return (*count_bitarray_AND4_function_p
)(A
, B
, C
, D
);