From 087c8bf3303e69d79cbbfdfb8edc836587cfb820 Mon Sep 17 00:00:00 2001
From: marshmellow42 <marshmellowrf@gmail.com>
Date: Thu, 8 Jun 2017 17:07:14 -0400
Subject: [PATCH 1/1] fix compile issues on OS X 10.11

OSX 10.11 does not have clock_gettime()
clang <= 8.0.0 has a bug in __builtin_cpu_supports() and it doesn't
function.
see https://llvm.org/bugs/show_bug.cgi?id=25510
---
 client/cmdhfmfhard.c                         |  11 +-
 client/hardnested/hardnested_bf_core.c       |  42 +++---
 client/hardnested/hardnested_bitarray_core.c | 134 +++++++++++--------
 client/util_posix.c                          | 100 +++++++++++---
 4 files changed, 195 insertions(+), 92 deletions(-)

diff --git a/client/cmdhfmfhard.c b/client/cmdhfmfhard.c
index 6cd75af5..b5eabb82 100644
--- a/client/cmdhfmfhard.c
+++ b/client/cmdhfmfhard.c
@@ -70,16 +70,19 @@ static float brute_force_per_second;
 
 
 static void get_SIMD_instruction_set(char* instruction_set) {
-	#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) 
+	#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
+		#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
 	if (__builtin_cpu_supports("avx512f")) strcpy(instruction_set, "AVX512F");
 	else if (__builtin_cpu_supports("avx2")) strcpy(instruction_set, "AVX2");
-	#else 
+		#else 
 	if (__builtin_cpu_supports("avx2")) strcpy(instruction_set, "AVX2");
-	#endif
+		#endif
 	else if (__builtin_cpu_supports("avx")) strcpy(instruction_set, "AVX");
 	else if (__builtin_cpu_supports("sse2")) strcpy(instruction_set, "SSE2");
 	else if (__builtin_cpu_supports("mmx")) strcpy(instruction_set, "MMX");
-	else strcpy(instruction_set, "unsupported");
+	else 
+	#endif
+		strcpy(instruction_set, "unsupported");
 }
 
 
diff --git a/client/hardnested/hardnested_bf_core.c b/client/hardnested/hardnested_bf_core.c
index 05246296..2388f6f5 100644
--- a/client/hardnested/hardnested_bf_core.c
+++ b/client/hardnested/hardnested_bf_core.c
@@ -550,17 +550,19 @@ bitslice_test_nonces_t *bitslice_test_nonces_function_p = &bitslice_test_nonces_
 
 // determine the available instruction set at runtime and call the correct function
 const uint64_t crack_states_bitsliced_dispatch(uint32_t cuid, uint8_t *best_first_bytes, statelist_t *p, uint32_t *keys_found, uint64_t *num_keys_tested, uint32_t nonces_to_bruteforce, uint8_t *bf_test_nonce_2nd_byte, noncelist_t *nonces) {
-#if defined (__i386__) || defined (__x86_64__)	
-	#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) 
-	if (__builtin_cpu_supports("avx512f")) crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX512;
-	else if (__builtin_cpu_supports("avx2")) crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX2;
-	#else
-	if (__builtin_cpu_supports("avx2")) crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX2;
+#if defined (__i386__) || defined (__x86_64__)
+	#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
+		#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2) 
+		if (__builtin_cpu_supports("avx512f")) crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX512;
+		else if (__builtin_cpu_supports("avx2")) crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX2;
+		#else
+		if (__builtin_cpu_supports("avx2")) crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX2;
+		#endif
+		else if (__builtin_cpu_supports("avx")) crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX;
+		else if (__builtin_cpu_supports("sse2")) crack_states_bitsliced_function_p = &crack_states_bitsliced_SSE2;
+		else if (__builtin_cpu_supports("mmx")) crack_states_bitsliced_function_p = &crack_states_bitsliced_MMX;
+		else
 	#endif
-	else if (__builtin_cpu_supports("avx")) crack_states_bitsliced_function_p = &crack_states_bitsliced_AVX;
-	else if (__builtin_cpu_supports("sse2")) crack_states_bitsliced_function_p = &crack_states_bitsliced_SSE2;
-	else if (__builtin_cpu_supports("mmx")) crack_states_bitsliced_function_p = &crack_states_bitsliced_MMX;
-    else
 #endif
 		crack_states_bitsliced_function_p = &crack_states_bitsliced_NOSIMD;
 
@@ -570,16 +572,18 @@ const uint64_t crack_states_bitsliced_dispatch(uint32_t cuid, uint8_t *best_firs
 
 void bitslice_test_nonces_dispatch(uint32_t nonces_to_bruteforce, uint32_t *bf_test_nonce, uint8_t *bf_test_nonce_par) {
 #if defined (__i386__) || defined (__x86_64__)	
-	#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
-	if (__builtin_cpu_supports("avx512f")) bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX512;
-	else if (__builtin_cpu_supports("avx2")) bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX2;
-	#else
-	if (__builtin_cpu_supports("avx2")) bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX2;
+	#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
+		#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
+		if (__builtin_cpu_supports("avx512f")) bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX512;
+		else if (__builtin_cpu_supports("avx2")) bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX2;
+		#else
+		if (__builtin_cpu_supports("avx2")) bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX2;
+		#endif
+		else if (__builtin_cpu_supports("avx")) bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX;
+		else if (__builtin_cpu_supports("sse2")) bitslice_test_nonces_function_p = &bitslice_test_nonces_SSE2;
+		else if (__builtin_cpu_supports("mmx")) bitslice_test_nonces_function_p = &bitslice_test_nonces_MMX;
+		else
 	#endif
-	else if (__builtin_cpu_supports("avx")) bitslice_test_nonces_function_p = &bitslice_test_nonces_AVX;
-	else if (__builtin_cpu_supports("sse2")) bitslice_test_nonces_function_p = &bitslice_test_nonces_SSE2;
-	else if (__builtin_cpu_supports("mmx")) bitslice_test_nonces_function_p = &bitslice_test_nonces_MMX;
-    else
 #endif
 		bitslice_test_nonces_function_p = &bitslice_test_nonces_NOSIMD;
 
diff --git a/client/hardnested/hardnested_bitarray_core.c b/client/hardnested/hardnested_bitarray_core.c
index 320c4a96..5615d006 100644
--- a/client/hardnested/hardnested_bitarray_core.c
+++ b/client/hardnested/hardnested_bitarray_core.c
@@ -319,16 +319,18 @@ count_bitarray_AND4_t *count_bitarray_AND4_function_p = &count_bitarray_AND4_dis
 // determine the available instruction set at runtime and call the correct function
 uint32_t *malloc_bitarray_dispatch(uint32_t x) {
 #if defined (__i386__) || defined (__x86_64__)	
-	#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
+	#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
+		#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
 	if (__builtin_cpu_supports("avx512f")) malloc_bitarray_function_p = &malloc_bitarray_AVX512;
 	else if (__builtin_cpu_supports("avx2")) malloc_bitarray_function_p = &malloc_bitarray_AVX2;
-	#else
+		#else
 	if (__builtin_cpu_supports("avx2")) malloc_bitarray_function_p = &malloc_bitarray_AVX2;
-	#endif
+		#endif
 	else if (__builtin_cpu_supports("avx")) malloc_bitarray_function_p = &malloc_bitarray_AVX;
 	else if (__builtin_cpu_supports("sse2")) malloc_bitarray_function_p = &malloc_bitarray_SSE2;
 	else if (__builtin_cpu_supports("mmx")) malloc_bitarray_function_p = &malloc_bitarray_MMX;
-    else
+	else
+	#endif
 #endif		
 		malloc_bitarray_function_p = &malloc_bitarray_NOSIMD;
 
@@ -337,17 +339,19 @@ uint32_t *malloc_bitarray_dispatch(uint32_t x) {
 }
 
 void free_bitarray_dispatch(uint32_t *x) {
-#if defined (__i386__) || defined (__x86_64__)	
-	#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
+#if defined (__i386__) || defined (__x86_64__)
+	#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
+		#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
 	if (__builtin_cpu_supports("avx512f")) free_bitarray_function_p = &free_bitarray_AVX512;
 	else if (__builtin_cpu_supports("avx2")) free_bitarray_function_p = &free_bitarray_AVX2;
-	#else
+		#else
 	if (__builtin_cpu_supports("avx2")) free_bitarray_function_p = &free_bitarray_AVX2;
-	#endif
+		#endif
 	else if (__builtin_cpu_supports("avx")) free_bitarray_function_p = &free_bitarray_AVX;
 	else if (__builtin_cpu_supports("sse2")) free_bitarray_function_p = &free_bitarray_SSE2;
 	else if (__builtin_cpu_supports("mmx")) free_bitarray_function_p = &free_bitarray_MMX;
-    else 
+	else
+	#endif
 #endif
 		free_bitarray_function_p = &free_bitarray_NOSIMD;
 
@@ -357,16 +361,18 @@ void free_bitarray_dispatch(uint32_t *x) {
 
 uint32_t bitcount_dispatch(uint32_t a) {
 #if defined (__i386__) || defined (__x86_64__)	
-	#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
+	#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
+		#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
 	if (__builtin_cpu_supports("avx512f")) bitcount_function_p = &bitcount_AVX512;
 	else if (__builtin_cpu_supports("avx2")) bitcount_function_p = &bitcount_AVX2;
-	#else
+		#else
 	if (__builtin_cpu_supports("avx2")) bitcount_function_p = &bitcount_AVX2;
-	#endif
+		#endif
 	else if (__builtin_cpu_supports("avx")) bitcount_function_p = &bitcount_AVX;
 	else if (__builtin_cpu_supports("sse2")) bitcount_function_p = &bitcount_SSE2;
 	else if (__builtin_cpu_supports("mmx")) bitcount_function_p = &bitcount_MMX;
-    else
+	else
+	#endif
 #endif
 		bitcount_function_p = &bitcount_NOSIMD;
 
@@ -376,16 +382,18 @@ uint32_t bitcount_dispatch(uint32_t a) {
 
 uint32_t count_states_dispatch(uint32_t *bitarray) {
 #if defined (__i386__) || defined (__x86_64__)	
-	#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
+	#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
+		#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
 	if (__builtin_cpu_supports("avx512f")) count_states_function_p = &count_states_AVX512;
 	else if (__builtin_cpu_supports("avx2")) count_states_function_p = &count_states_AVX2;
-	#else
+		#else
 	if (__builtin_cpu_supports("avx2")) count_states_function_p = &count_states_AVX2;
-	#endif
+		#endif
 	else if (__builtin_cpu_supports("avx")) count_states_function_p = &count_states_AVX;
 	else if (__builtin_cpu_supports("sse2")) count_states_function_p = &count_states_SSE2;
 	else if (__builtin_cpu_supports("mmx")) count_states_function_p = &count_states_MMX;
-    else 
+	else
+	#endif 
 #endif
 		count_states_function_p = &count_states_NOSIMD;
 
@@ -395,16 +403,18 @@ uint32_t count_states_dispatch(uint32_t *bitarray) {
 
 void bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
 #if defined (__i386__) || defined (__x86_64__)	
-	#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
+	#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
+		#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
 	if (__builtin_cpu_supports("avx512f")) bitarray_AND_function_p = &bitarray_AND_AVX512;
 	else if (__builtin_cpu_supports("avx2")) bitarray_AND_function_p = &bitarray_AND_AVX2;
-	#else
+		#else
 	if (__builtin_cpu_supports("avx2")) bitarray_AND_function_p = &bitarray_AND_AVX2;
-	#endif
+		#endif
 	else if (__builtin_cpu_supports("avx")) bitarray_AND_function_p = &bitarray_AND_AVX;
 	else if (__builtin_cpu_supports("sse2")) bitarray_AND_function_p = &bitarray_AND_SSE2;
 	else if (__builtin_cpu_supports("mmx")) bitarray_AND_function_p = &bitarray_AND_MMX;
-    else
+	else
+	#endif
 #endif
 		bitarray_AND_function_p = &bitarray_AND_NOSIMD;
 
@@ -413,17 +423,19 @@ void bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
 }
 
 void bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
-#if defined (__i386__) || defined (__x86_64__)	
-	#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
+#if defined (__i386__) || defined (__x86_64__)
+	#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
+		#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
 	if (__builtin_cpu_supports("avx512f")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX512;
 	else if (__builtin_cpu_supports("avx2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX2;
-	#else
+		#else
 	if (__builtin_cpu_supports("avx2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX2;
-	#endif
+		#endif
 	else if (__builtin_cpu_supports("avx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX;
 	else if (__builtin_cpu_supports("sse2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_SSE2;
 	else if (__builtin_cpu_supports("mmx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_MMX;
-    else
+	else
+	#endif
 #endif
 		bitarray_low20_AND_function_p = &bitarray_low20_AND_NOSIMD;
 
@@ -433,16 +445,18 @@ void bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
 
 uint32_t count_bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
 #if defined (__i386__) || defined (__x86_64__)	
-	#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
+	#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
+		#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
 	if (__builtin_cpu_supports("avx512f")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX512;
 	else if (__builtin_cpu_supports("avx2")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX2;
-	#else
+		#else
 	if (__builtin_cpu_supports("avx2")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX2;
-	#endif
+		#endif
 	else if (__builtin_cpu_supports("avx")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX;
 	else if (__builtin_cpu_supports("sse2")) count_bitarray_AND_function_p = &count_bitarray_AND_SSE2;
 	else if (__builtin_cpu_supports("mmx")) count_bitarray_AND_function_p = &count_bitarray_AND_MMX;
-    else
+	else
+	#endif
 #endif
 		count_bitarray_AND_function_p = &count_bitarray_AND_NOSIMD;
 
@@ -452,16 +466,18 @@ uint32_t count_bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
 
 uint32_t count_bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
 #if defined (__i386__) || defined (__x86_64__)	
-	#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
+	#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
+		#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
 	if (__builtin_cpu_supports("avx512f")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX512;
 	else if (__builtin_cpu_supports("avx2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX2;
-	#else
+		#else
 	if (__builtin_cpu_supports("avx2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX2;
-	#endif
+		#endif
 	else if (__builtin_cpu_supports("avx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX;
 	else if (__builtin_cpu_supports("sse2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_SSE2;
 	else if (__builtin_cpu_supports("mmx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_MMX;
-    else
+	else
+	#endif
 #endif
 		count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_NOSIMD;
 
@@ -471,16 +487,18 @@ uint32_t count_bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
 
 void bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) {
 #if defined (__i386__) || defined (__x86_64__)	
-	#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
+	#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
+		#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
 	if (__builtin_cpu_supports("avx512f")) bitarray_AND4_function_p = &bitarray_AND4_AVX512;
 	else if (__builtin_cpu_supports("avx2")) bitarray_AND4_function_p = &bitarray_AND4_AVX2;
-	#else
+		#else
 	if (__builtin_cpu_supports("avx2")) bitarray_AND4_function_p = &bitarray_AND4_AVX2;
-	#endif
+		#endif
 	else if (__builtin_cpu_supports("avx")) bitarray_AND4_function_p = &bitarray_AND4_AVX;
 	else if (__builtin_cpu_supports("sse2")) bitarray_AND4_function_p = &bitarray_AND4_SSE2;
 	else if (__builtin_cpu_supports("mmx")) bitarray_AND4_function_p = &bitarray_AND4_MMX;
-    else
+	else
+	#endif
 #endif
 		bitarray_AND4_function_p = &bitarray_AND4_NOSIMD;
 
@@ -490,16 +508,18 @@ void bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D)
 
 void bitarray_OR_dispatch(uint32_t *A, uint32_t *B) {
 #if defined (__i386__) || defined (__x86_64__)	
-	#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
+	#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
+		#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
 	if (__builtin_cpu_supports("avx512f")) bitarray_OR_function_p = &bitarray_OR_AVX512;
 	else if (__builtin_cpu_supports("avx2")) bitarray_OR_function_p = &bitarray_OR_AVX2;
-	#else
+		#else
 	if (__builtin_cpu_supports("avx2")) bitarray_OR_function_p = &bitarray_OR_AVX2;
-	#endif
+		#endif
 	else if (__builtin_cpu_supports("avx")) bitarray_OR_function_p = &bitarray_OR_AVX;
 	else if (__builtin_cpu_supports("sse2")) bitarray_OR_function_p = &bitarray_OR_SSE2;
 	else if (__builtin_cpu_supports("mmx")) bitarray_OR_function_p = &bitarray_OR_MMX;
-    else
+	else
+	#endif
 #endif
 		bitarray_OR_function_p = &bitarray_OR_NOSIMD;
 
@@ -509,16 +529,18 @@ void bitarray_OR_dispatch(uint32_t *A, uint32_t *B) {
 
 uint32_t count_bitarray_AND2_dispatch(uint32_t *A, uint32_t *B) {
 #if defined (__i386__) || defined (__x86_64__)	
-	#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
+	#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
+		#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
 	if (__builtin_cpu_supports("avx512f")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX512;
 	else if (__builtin_cpu_supports("avx2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX2;
-	#else
+		#else
 	if (__builtin_cpu_supports("avx2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX2;
-	#endif
+		#endif
 	else if (__builtin_cpu_supports("avx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX;
 	else if (__builtin_cpu_supports("sse2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_SSE2;
 	else if (__builtin_cpu_supports("mmx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_MMX;
-    else
+	else
+	#endif
 #endif
 		count_bitarray_AND2_function_p = &count_bitarray_AND2_NOSIMD;
 
@@ -528,16 +550,18 @@ uint32_t count_bitarray_AND2_dispatch(uint32_t *A, uint32_t *B) {
 
 uint32_t count_bitarray_AND3_dispatch(uint32_t *A, uint32_t *B, uint32_t *C) {
 #if defined (__i386__) || defined (__x86_64__)	
-	#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
+	#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
+		#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
 	if (__builtin_cpu_supports("avx512f")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX512;
 	else if (__builtin_cpu_supports("avx2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX2;
-	#else
+		#else
 	if (__builtin_cpu_supports("avx2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX2;
-	#endif
+		#endif
 	else if (__builtin_cpu_supports("avx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX;
 	else if (__builtin_cpu_supports("sse2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_SSE2;
 	else if (__builtin_cpu_supports("mmx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_MMX;
-    else
+	else
+	#endif
 #endif
 		count_bitarray_AND3_function_p = &count_bitarray_AND3_NOSIMD;
 
@@ -547,16 +571,18 @@ uint32_t count_bitarray_AND3_dispatch(uint32_t *A, uint32_t *B, uint32_t *C) {
 
 uint32_t count_bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) {
 #if defined (__i386__) || defined (__x86_64__)	
-	#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
+	#if !defined(__APPLE__) || (defined(__APPLE__) && (__clang_major__ > 8))
+		#if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
 	if (__builtin_cpu_supports("avx512f")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX512;
 	else if (__builtin_cpu_supports("avx2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX2;
-	#else
+		#else
 	if (__builtin_cpu_supports("avx2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX2;
-	#endif
+		#endif
 	else if (__builtin_cpu_supports("avx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX;
 	else if (__builtin_cpu_supports("sse2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_SSE2;
 	else if (__builtin_cpu_supports("mmx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_MMX;
-    else
+	else
+	#endif
 #endif
 		count_bitarray_AND4_function_p = &count_bitarray_AND4_NOSIMD;
 
diff --git a/client/util_posix.c b/client/util_posix.c
index 3e61b674..382f6a60 100644
--- a/client/util_posix.c
+++ b/client/util_posix.c
@@ -24,10 +24,10 @@
 #include <errno.h>
 
 static void nsleep(uint64_t n) {
-  struct timespec timeout;
-  timeout.tv_sec = n/1000000000;
-  timeout.tv_nsec = n%1000000000;
-  while (nanosleep(&timeout, &timeout) && errno == EINTR);
+	struct timespec timeout;
+	timeout.tv_sec = n/1000000000;
+	timeout.tv_nsec = n%1000000000;
+	while (nanosleep(&timeout, &timeout) && errno == EINTR);
 }
 
 void msleep(uint32_t n) {
@@ -35,21 +35,91 @@ void msleep(uint32_t n) {
 }
 #endif // _WIN32
 
+#ifdef __MACH__
+
+	#define CLOCK_MONOTONIC (1)
+	#define CLOCK_REALTIME (2)
+
+	#include <sys/time.h>
+	#include <mach/clock.h>
+	#include <mach/mach.h>
+	#include <mach/mach_time.h>
+
+	/* clock_gettime is not implemented on OSX prior to 10.12 */
+	int _civet_clock_gettime(int clk_id, struct timespec *t);
+
+	int _civet_clock_gettime(int clk_id, struct timespec *t)
+	{
+		memset(t, 0, sizeof(*t));
+		if (clk_id == CLOCK_REALTIME) {
+			struct timeval now;
+			int rv = gettimeofday(&now, NULL);
+			if (rv) {
+				return rv;
+			}
+			t->tv_sec = now.tv_sec;
+			t->tv_nsec = now.tv_usec * 1000;
+			return 0;
+
+		} else if (clk_id == CLOCK_MONOTONIC) {
+			static uint64_t clock_start_time = 0;
+			static mach_timebase_info_data_t timebase_ifo = {0, 0};
+
+			uint64_t now = mach_absolute_time();
+
+			if (clock_start_time == 0) {
+				//kern_return_t mach_status = mach_timebase_info(&timebase_ifo);
+				// appease "unused variable" warning for release builds
+				//(void)mach_status;
+				clock_start_time = now;
+			}
+
+			now = (uint64_t)((double)(now - clock_start_time)
+			                 * (double)timebase_ifo.numer
+			                 / (double)timebase_ifo.denom);
+
+			t->tv_sec = now / 1000000000;
+			t->tv_nsec = now % 1000000000;
+			return 0;
+		}
+		return -1; // EINVAL - Clock ID is unknown
+	}
+
+	/* if clock_gettime is declared, then __CLOCK_AVAILABILITY will be defined */
+	#ifdef __CLOCK_AVAILABILITY
+		/* If we compiled with Mac OSX 10.12 or later, then clock_gettime will be declared
+		 * but it may be NULL at runtime. So we need to check before using it. */
+		int _civet_safe_clock_gettime(int clk_id, struct timespec *t);
+
+		int _civet_safe_clock_gettime(int clk_id, struct timespec *t) {
+			if( clock_gettime ) {
+				return clock_gettime(clk_id, t);
+			}
+			return _civet_clock_gettime(clk_id, t);
+		}
+		#define clock_gettime _civet_safe_clock_gettime
+	#else
+		#define clock_gettime _civet_clock_gettime
+	#endif
+
+#endif
+
+
 // a milliseconds timer for performance measurement
 uint64_t msclock() {
 #if defined(_WIN32)
-    #include <sys/types.h>
-    
-    // WORKAROUND FOR MinGW (some versions - use if normal code does not compile)
-    // It has no _ftime_s and needs explicit inclusion of timeb.h
-    #include <sys/timeb.h>
-    struct _timeb t;
-    _ftime(&t);
-    return 1000 * t.time + t.millitm;
-    
-    // NORMAL CODE (use _ftime_s)
+	#include <sys/types.h>
+
+	// WORKAROUND FOR MinGW (some versions - use if normal code does not compile)
+	// It has no _ftime_s and needs explicit inclusion of timeb.h
+	#include <sys/timeb.h>
+	struct _timeb t;
+	_ftime(&t);
+	return 1000 * t.time + t.millitm;
+
+// NORMAL CODE (use _ftime_s)
 	//struct _timeb t;
-    //if (_ftime_s(&t)) {
+	//if (_ftime_s(&t)) {
 	//	return 0;
 	//} else {
 	//	return 1000 * t.time + t.millitm;
-- 
2.39.5