crack_states_bitsliced_t *crack_states_bitsliced_function_p = &crack_states_bitsliced_dispatch;
bitslice_test_nonces_t *bitslice_test_nonces_function_p = &bitslice_test_nonces_dispatch;
-// determine the available instruction set at runtime and call the correct function
-const uint64_t crack_states_bitsliced_dispatch(uint32_t cuid, uint8_t *best_first_bytes, statelist_t *p, uint32_t *keys_found, uint64_t *num_keys_tested, uint32_t nonces_to_bruteforce, uint8_t *bf_test_nonce_2nd_byte, noncelist_t *nonces) {
+static SIMDExecInstr intSIMDInstr = SIMD_AUTO;
+
+void SetSIMDInstr(SIMDExecInstr instr) {
+ intSIMDInstr = instr;
+
+ crack_states_bitsliced_function_p = &crack_states_bitsliced_dispatch;
+ bitslice_test_nonces_function_p = &bitslice_test_nonces_dispatch;
+}
+
+SIMDExecInstr GetSIMDInstr() {
+ SIMDExecInstr instr = SIMD_NONE;
+
#if defined (__i386__) || defined (__x86_64__)
#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
- if (__builtin_cpu_supports("avx512f")) crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX512;
- else if (__builtin_cpu_supports("avx2")) crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX2;
+ if (__builtin_cpu_supports("avx512f")) instr = SIMD_AVX512;
+ else if (__builtin_cpu_supports("avx2")) instr = SIMD_AVX2;
#else
- if (__builtin_cpu_supports("avx2")) crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX2;
+ if (__builtin_cpu_supports("avx2")) instr = SIMD_AVX2;
#endif
- else if (__builtin_cpu_supports("avx")) crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX;
- else if (__builtin_cpu_supports("sse2")) crack_states_bitsliced_function_p = &crack_states_bitsliced_SSE2;
- else if (__builtin_cpu_supports("mmx")) crack_states_bitsliced_function_p = &crack_states_bitsliced_MMX;
+ else if (__builtin_cpu_supports("avx")) instr = SIMD_AVX;
+ else if (__builtin_cpu_supports("sse2")) instr = SIMD_SSE2;
+ else if (__builtin_cpu_supports("mmx")) instr = SIMD_MMX;
else
#endif
#endif
- crack_states_bitsliced_function_p = &crack_states_bitsliced_NOSIMD;
+ instr = SIMD_NONE;
+
+ return instr;
+}
+
+SIMDExecInstr GetSIMDInstrAuto() {
+ SIMDExecInstr instr = intSIMDInstr;
+ if (instr == SIMD_AUTO)
+ return GetSIMDInstr();
+
+ return instr;
+}
+
+// determine the available instruction set at runtime and call the correct function
+const uint64_t crack_states_bitsliced_dispatch(uint32_t cuid, uint8_t *best_first_bytes, statelist_t *p, uint32_t *keys_found, uint64_t *num_keys_tested, uint32_t nonces_to_bruteforce, uint8_t *bf_test_nonce_2nd_byte, noncelist_t *nonces) {
+ switch(GetSIMDInstrAuto()) {
+#if defined (__i386__) || defined (__x86_64__)
+#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
+#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
+ case SIMD_AVX512:
+ crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX512;
+ break;
+#endif
+ case SIMD_AVX2:
+ crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX2;
+ break;
+ case SIMD_AVX:
+ crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX;
+ break;
+ case SIMD_SSE2:
+ crack_states_bitsliced_function_p = &crack_states_bitsliced_SSE2;
+ break;
+ case SIMD_MMX:
+ crack_states_bitsliced_function_p = &crack_states_bitsliced_MMX;
+ break;
+#endif
+#endif
+ default:
+ crack_states_bitsliced_function_p = &crack_states_bitsliced_NOSIMD;
+ break;
+ }
// call the most optimized function for this CPU
return (*crack_states_bitsliced_function_p)(cuid, best_first_bytes, p, keys_found, num_keys_tested, nonces_to_bruteforce, bf_test_nonce_2nd_byte, nonces);
}
void bitslice_test_nonces_dispatch(uint32_t nonces_to_bruteforce, uint32_t *bf_test_nonce, uint8_t *bf_test_nonce_par) {
-#if defined (__i386__) || defined (__x86_64__)
- #if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
- #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
- if (__builtin_cpu_supports("avx512f")) bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX512;
- else if (__builtin_cpu_supports("avx2")) bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX2;
- #else
- if (__builtin_cpu_supports("avx2")) bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX2;
- #endif
- else if (__builtin_cpu_supports("avx")) bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX;
- else if (__builtin_cpu_supports("sse2")) bitslice_test_nonces_function_p = &bitslice_test_nonces_SSE2;
- else if (__builtin_cpu_supports("mmx")) bitslice_test_nonces_function_p = &bitslice_test_nonces_MMX;
- else
- #endif
+ switch(GetSIMDInstrAuto()) {
+#if defined (__i386__) || defined (__x86_64__)
+#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8 || __clang_major__ == 8 && __clang_minor__ >= 1))
+#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
+ case SIMD_AVX512:
+ bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX512;
+ break;
+#endif
+ case SIMD_AVX2:
+ bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX2;
+ break;
+ case SIMD_AVX:
+ bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX;
+ break;
+ case SIMD_SSE2:
+ bitslice_test_nonces_function_p = &bitslice_test_nonces_SSE2;
+ break;
+ case SIMD_MMX:
+ bitslice_test_nonces_function_p = &bitslice_test_nonces_MMX;
+ break;
+#endif
#endif
- bitslice_test_nonces_function_p = &bitslice_test_nonces_NOSIMD;
+ default:
+ bitslice_test_nonces_function_p = &bitslice_test_nonces_NOSIMD;
+ break;
+ }
// call the most optimized function for this CPU
(*bitslice_test_nonces_function_p)(nonces_to_bruteforce, bf_test_nonce, bf_test_nonce_par);