]> cvs.zerfleddert.de Git - proxmark3-svn/blobdiff - client/hardnested/hardnested_bf_core.c
upgrading 'hf mfu' (#830)
[proxmark3-svn] / client / hardnested / hardnested_bf_core.c
index c22a9a08bcd2fb3d1a98a200d5aedef3854e9c96..d02209e9da7d0888912374af13b7c0bd025d5a40 100644 (file)
@@ -52,8 +52,10 @@ THE SOFTWARE.
 #include <stdint.h>
 #include <stdbool.h>
 #include <stdlib.h>
-#include <stdio.h>
+#ifndef __APPLE__
 #include <malloc.h>
+#endif
+#include <stdio.h>
 #include <string.h>
 #include "crapto1/crapto1.h"
 #include "parity.h"
@@ -141,9 +143,19 @@ bitslice_test_nonces_t bitslice_test_nonces_MMX;
 bitslice_test_nonces_t bitslice_test_nonces_NOSIMD;
 bitslice_test_nonces_t bitslice_test_nonces_dispatch;
 
-#ifdef _WIN32
+#if defined (_WIN32)
 #define malloc_bitslice(x) __builtin_assume_aligned(_aligned_malloc((x), MAX_BITSLICES/8), MAX_BITSLICES/8)
 #define free_bitslice(x) _aligned_free(x)
+#elif defined (__APPLE__)
+static void *malloc_bitslice(size_t x) {
+       char *allocated_memory;
+       if (posix_memalign((void**)&allocated_memory, MAX_BITSLICES/8, x)) {
+               return NULL;
+       } else {
+               return __builtin_assume_aligned(allocated_memory, MAX_BITSLICES/8);
+       }
+}
+#define free_bitslice(x) free(x)
 #else
 #define malloc_bitslice(x) memalign(MAX_BITSLICES/8, (x))
 #define free_bitslice(x) free(x)
@@ -330,7 +342,7 @@ const uint64_t CRACK_STATES_BITSLICED(uint32_t cuid, uint8_t *best_first_bytes,
                        // }
 #endif
             // add the even state bits
-                       const bitslice_t const *restrict bitsliced_even_state = bitsliced_even_states[block_idx];
+                       const bitslice_t *restrict bitsliced_even_state = bitsliced_even_states[block_idx];
                        for(uint32_t state_idx = 1; state_idx < STATE_SIZE; state_idx += 2) {
                                state_p[state_idx] = bitsliced_even_state[state_idx/2];
                        }
@@ -536,40 +548,105 @@ out:
 crack_states_bitsliced_t *crack_states_bitsliced_function_p = &crack_states_bitsliced_dispatch;
 bitslice_test_nonces_t *bitslice_test_nonces_function_p = &bitslice_test_nonces_dispatch;
 
+static SIMDExecInstr intSIMDInstr = SIMD_AUTO;
+
+void SetSIMDInstr(SIMDExecInstr instr) {
+       intSIMDInstr = instr;
+       
+       crack_states_bitsliced_function_p = &crack_states_bitsliced_dispatch;
+       bitslice_test_nonces_function_p = &bitslice_test_nonces_dispatch;
+}
+
+SIMDExecInstr GetSIMDInstr() {
+       SIMDExecInstr instr = SIMD_NONE;
+       
+#if defined (__i386__) || defined (__x86_64__)
+       #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
+               #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) 
+               if (__builtin_cpu_supports("avx512f")) instr = SIMD_AVX512;
+               else if (__builtin_cpu_supports("avx2")) instr = SIMD_AVX2;
+               #else
+               if (__builtin_cpu_supports("avx2")) instr = SIMD_AVX2;
+               #endif
+               else if (__builtin_cpu_supports("avx")) instr = SIMD_AVX;
+               else if (__builtin_cpu_supports("sse2")) instr = SIMD_SSE2;
+               else if (__builtin_cpu_supports("mmx")) instr = SIMD_MMX;
+               else
+       #endif
+#endif
+               instr = SIMD_NONE;
+               
+       return instr;
+}
+
+SIMDExecInstr GetSIMDInstrAuto() {
+       SIMDExecInstr instr = intSIMDInstr;
+       if (instr == SIMD_AUTO)
+               return GetSIMDInstr();
+       
+       return instr;
+}
+
 // determine the available instruction set at runtime and call the correct function
 const uint64_t crack_states_bitsliced_dispatch(uint32_t cuid, uint8_t *best_first_bytes, statelist_t *p, uint32_t *keys_found, uint64_t *num_keys_tested, uint32_t nonces_to_bruteforce, uint8_t *bf_test_nonce_2nd_byte, noncelist_t *nonces) {
-#if defined (__i386__) || defined (__x86_64__) 
-       #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) 
-       if (__builtin_cpu_supports("avx512f")) crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX512;
-       else if (__builtin_cpu_supports("avx2")) crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX2;
-       #else
-       if (__builtin_cpu_supports("avx2")) crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX2;
-       #endif
-       else if (__builtin_cpu_supports("avx")) crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX;
-       else if (__builtin_cpu_supports("sse2")) crack_states_bitsliced_function_p = &crack_states_bitsliced_SSE2;
-       else if (__builtin_cpu_supports("mmx")) crack_states_bitsliced_function_p = &crack_states_bitsliced_MMX;
-    else
+       switch(GetSIMDInstrAuto()) {
+#if defined (__i386__) || defined (__x86_64__)
+#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
+#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) 
+               case SIMD_AVX512:
+                       crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX512;
+                       break;
+#endif
+               case SIMD_AVX2:
+                       crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX2;
+                       break;
+               case SIMD_AVX:
+                       crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX;
+                       break;
+               case SIMD_SSE2:
+                       crack_states_bitsliced_function_p = &crack_states_bitsliced_SSE2;
+                       break;
+               case SIMD_MMX:
+                       crack_states_bitsliced_function_p = &crack_states_bitsliced_MMX;
+                       break;
+#endif
 #endif
-               crack_states_bitsliced_function_p = &crack_states_bitsliced_NOSIMD;
+               default:
+                       crack_states_bitsliced_function_p = &crack_states_bitsliced_NOSIMD;
+                       break;
+       }       
 
     // call the most optimized function for this CPU
     return (*crack_states_bitsliced_function_p)(cuid, best_first_bytes, p, keys_found, num_keys_tested, nonces_to_bruteforce, bf_test_nonce_2nd_byte, nonces);
 }
 
 void bitslice_test_nonces_dispatch(uint32_t nonces_to_bruteforce, uint32_t *bf_test_nonce, uint8_t *bf_test_nonce_par) {
-#if defined (__i386__) || defined (__x86_64__) 
-       #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
-       if (__builtin_cpu_supports("avx512f")) bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX512;
-       else if (__builtin_cpu_supports("avx2")) bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX2;
-       #else
-       if (__builtin_cpu_supports("avx2")) bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX2;
-       #endif
-       else if (__builtin_cpu_supports("avx")) bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX;
-       else if (__builtin_cpu_supports("sse2")) bitslice_test_nonces_function_p = &bitslice_test_nonces_SSE2;
-       else if (__builtin_cpu_supports("mmx")) bitslice_test_nonces_function_p = &bitslice_test_nonces_MMX;
-    else
+       switch(GetSIMDInstrAuto()) {
+#if defined (__i386__) || defined (__x86_64__)
+#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
+#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) 
+               case SIMD_AVX512:
+                       bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX512;
+                       break;
+#endif
+               case SIMD_AVX2:
+                       bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX2;
+                       break;
+               case SIMD_AVX:
+                       bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX;
+                       break;
+               case SIMD_SSE2:
+                       bitslice_test_nonces_function_p = &bitslice_test_nonces_SSE2;
+                       break;
+               case SIMD_MMX:
+                       bitslice_test_nonces_function_p = &bitslice_test_nonces_MMX;
+                       break;
+#endif
 #endif
-               bitslice_test_nonces_function_p = &bitslice_test_nonces_NOSIMD;
+               default:
+                       bitslice_test_nonces_function_p = &bitslice_test_nonces_NOSIMD;
+                       break;
+       }       
 
     // call the most optimized function for this CPU
     (*bitslice_test_nonces_function_p)(nonces_to_bruteforce, bf_test_nonce, bf_test_nonce_par);
Impressum, Datenschutz