]> git.zerfleddert.de Git - proxmark3-svn/blob - client/hardnested/hardnested_bitarray_core.c
d2fdb89c258ace2c228d0ee1ef35a4f88a15d6f9
[proxmark3-svn] / client / hardnested / hardnested_bitarray_core.c
1 //-----------------------------------------------------------------------------
2 // Copyright (C) 2016, 2017 by piwi
3 //
4 // This code is licensed to you under the terms of the GNU GPL, version 2 or,
5 // at your option, any later version. See the LICENSE.txt file for the text of
6 // the license.ch b
7 //-----------------------------------------------------------------------------
8 // Implements a card only attack based on crypto text (encrypted nonces
9 // received during a nested authentication) only. Unlike other card only
10 // attacks this doesn't rely on implementation errors but only on the
11 // inherent weaknesses of the crypto1 cypher. Described in
12 // Carlo Meijer, Roel Verdult, "Ciphertext-only Cryptanalysis on Hardened
13 // Mifare Classic Cards" in Proceedings of the 22nd ACM SIGSAC Conference on
14 // Computer and Communications Security, 2015
15 //-----------------------------------------------------------------------------
16 // some helper functions which can benefit from SIMD instructions or other special instructions
17 //
18
19 #include "hardnested_bitarray_core.h"
20
21 #include <stdint.h>
22 #include <stdio.h>
23 #include <stdlib.h>
24 #include <malloc.h>
25
26 // #include <stdint.h>
27 // #include <stdbool.h>
28 // #include <stdlib.h>
29 // #include <stdio.h>
30 // #include <malloc.h>
31 // #include <string.h>
32 // #include "crapto1/crapto1.h"
33 // #include "parity.h"
34
35
36 // this needs to be compiled several times for each instruction set.
37 // For each instruction set, define a dedicated function name:
38 #if defined (__AVX512F__)
39 #define MALLOC_BITARRAY malloc_bitarray_AVX512
40 #define FREE_BITARRAY free_bitarray_AVX512
41 #define BITCOUNT bitcount_AVX512
42 #define COUNT_STATES count_states_AVX512
43 #define BITARRAY_AND bitarray_AND_AVX512
44 #define BITARRAY_LOW20_AND bitarray_low20_AND_AVX512
45 #define COUNT_BITARRAY_AND count_bitarray_AND_AVX512
46 #define COUNT_BITARRAY_LOW20_AND count_bitarray_low20_AND_AVX512
47 #define BITARRAY_AND4 bitarray_AND4_AVX512
48 #define BITARRAY_OR bitarray_OR_AVX512
49 #define COUNT_BITARRAY_AND2 count_bitarray_AND2_AVX512
50 #define COUNT_BITARRAY_AND3 count_bitarray_AND3_AVX512
51 #define COUNT_BITARRAY_AND4 count_bitarray_AND4_AVX512
52 #elif defined (__AVX2__)
53 #define MALLOC_BITARRAY malloc_bitarray_AVX2
54 #define FREE_BITARRAY free_bitarray_AVX2
55 #define BITCOUNT bitcount_AVX2
56 #define COUNT_STATES count_states_AVX2
57 #define BITARRAY_AND bitarray_AND_AVX2
58 #define BITARRAY_LOW20_AND bitarray_low20_AND_AVX2
59 #define COUNT_BITARRAY_AND count_bitarray_AND_AVX2
60 #define COUNT_BITARRAY_LOW20_AND count_bitarray_low20_AND_AVX2
61 #define BITARRAY_AND4 bitarray_AND4_AVX2
62 #define BITARRAY_OR bitarray_OR_AVX2
63 #define COUNT_BITARRAY_AND2 count_bitarray_AND2_AVX2
64 #define COUNT_BITARRAY_AND3 count_bitarray_AND3_AVX2
65 #define COUNT_BITARRAY_AND4 count_bitarray_AND4_AVX2
66 #elif defined (__AVX__)
67 #define MALLOC_BITARRAY malloc_bitarray_AVX
68 #define FREE_BITARRAY free_bitarray_AVX
69 #define BITCOUNT bitcount_AVX
70 #define COUNT_STATES count_states_AVX
71 #define BITARRAY_AND bitarray_AND_AVX
72 #define BITARRAY_LOW20_AND bitarray_low20_AND_AVX
73 #define COUNT_BITARRAY_AND count_bitarray_AND_AVX
74 #define COUNT_BITARRAY_LOW20_AND count_bitarray_low20_AND_AVX
75 #define BITARRAY_AND4 bitarray_AND4_AVX
76 #define BITARRAY_OR bitarray_OR_AVX
77 #define COUNT_BITARRAY_AND2 count_bitarray_AND2_AVX
78 #define COUNT_BITARRAY_AND3 count_bitarray_AND3_AVX
79 #define COUNT_BITARRAY_AND4 count_bitarray_AND4_AVX
80 #elif defined (__SSE2__)
81 #define MALLOC_BITARRAY malloc_bitarray_SSE2
82 #define FREE_BITARRAY free_bitarray_SSE2
83 #define BITCOUNT bitcount_SSE2
84 #define COUNT_STATES count_states_SSE2
85 #define BITARRAY_AND bitarray_AND_SSE2
86 #define BITARRAY_LOW20_AND bitarray_low20_AND_SSE2
87 #define COUNT_BITARRAY_AND count_bitarray_AND_SSE2
88 #define COUNT_BITARRAY_LOW20_AND count_bitarray_low20_AND_SSE2
89 #define BITARRAY_AND4 bitarray_AND4_SSE2
90 #define BITARRAY_OR bitarray_OR_SSE2
91 #define COUNT_BITARRAY_AND2 count_bitarray_AND2_SSE2
92 #define COUNT_BITARRAY_AND3 count_bitarray_AND3_SSE2
93 #define COUNT_BITARRAY_AND4 count_bitarray_AND4_SSE2
94 #elif defined (__MMX__)
95 #define MALLOC_BITARRAY malloc_bitarray_MMX
96 #define FREE_BITARRAY free_bitarray_MMX
97 #define BITCOUNT bitcount_MMX
98 #define COUNT_STATES count_states_MMX
99 #define BITARRAY_AND bitarray_AND_MMX
100 #define BITARRAY_LOW20_AND bitarray_low20_AND_MMX
101 #define COUNT_BITARRAY_AND count_bitarray_AND_MMX
102 #define COUNT_BITARRAY_LOW20_AND count_bitarray_low20_AND_MMX
103 #define BITARRAY_AND4 bitarray_AND4_MMX
104 #define BITARRAY_OR bitarray_OR_MMX
105 #define COUNT_BITARRAY_AND2 count_bitarray_AND2_MMX
106 #define COUNT_BITARRAY_AND3 count_bitarray_AND3_MMX
107 #define COUNT_BITARRAY_AND4 count_bitarray_AND4_MMX
108 #endif
109
110
111 // typedefs and declaration of functions:
112 typedef uint32_t* malloc_bitarray_t(uint32_t);
113 malloc_bitarray_t malloc_bitarray_AVX512, malloc_bitarray_AVX2, malloc_bitarray_AVX, malloc_bitarray_SSE2, malloc_bitarray_MMX, malloc_bitarray_dispatch;
114 typedef void free_bitarray_t(uint32_t*);
115 free_bitarray_t free_bitarray_AVX512, free_bitarray_AVX2, free_bitarray_AVX, free_bitarray_SSE2, free_bitarray_MMX, free_bitarray_dispatch;
116 typedef uint32_t bitcount_t(uint32_t);
117 bitcount_t bitcount_AVX512, bitcount_AVX2, bitcount_AVX, bitcount_SSE2, bitcount_MMX, bitcount_dispatch;
118 typedef uint32_t count_states_t(uint32_t*);
119 count_states_t count_states_AVX512, count_states_AVX2, count_states_AVX, count_states_SSE2, count_states_MMX, count_states_dispatch;
120 typedef void bitarray_AND_t(uint32_t[], uint32_t[]);
121 bitarray_AND_t bitarray_AND_AVX512, bitarray_AND_AVX2, bitarray_AND_AVX, bitarray_AND_SSE2, bitarray_AND_MMX, bitarray_AND_dispatch;
122 typedef void bitarray_low20_AND_t(uint32_t*, uint32_t*);
123 bitarray_low20_AND_t bitarray_low20_AND_AVX512, bitarray_low20_AND_AVX2, bitarray_low20_AND_AVX, bitarray_low20_AND_SSE2, bitarray_low20_AND_MMX, bitarray_low20_AND_dispatch;
124 typedef uint32_t count_bitarray_AND_t(uint32_t*, uint32_t*);
125 count_bitarray_AND_t count_bitarray_AND_AVX512, count_bitarray_AND_AVX2, count_bitarray_AND_AVX, count_bitarray_AND_SSE2, count_bitarray_AND_MMX, count_bitarray_AND_dispatch;
126 typedef uint32_t count_bitarray_low20_AND_t(uint32_t*, uint32_t*);
127 count_bitarray_low20_AND_t count_bitarray_low20_AND_AVX512, count_bitarray_low20_AND_AVX2, count_bitarray_low20_AND_AVX, count_bitarray_low20_AND_SSE2, count_bitarray_low20_AND_MMX, count_bitarray_low20_AND_dispatch;
128 typedef void bitarray_AND4_t(uint32_t*, uint32_t*, uint32_t*, uint32_t*);
129 bitarray_AND4_t bitarray_AND4_AVX512, bitarray_AND4_AVX2, bitarray_AND4_AVX, bitarray_AND4_SSE2, bitarray_AND4_MMX, bitarray_AND4_dispatch;
130 typedef void bitarray_OR_t(uint32_t[], uint32_t[]);
131 bitarray_OR_t bitarray_OR_AVX512, bitarray_OR_AVX2, bitarray_OR_AVX, bitarray_OR_SSE2, bitarray_OR_MMX, bitarray_OR_dispatch;
132 typedef uint32_t count_bitarray_AND2_t(uint32_t*, uint32_t*);
133 count_bitarray_AND2_t count_bitarray_AND2_AVX512, count_bitarray_AND2_AVX2, count_bitarray_AND2_AVX, count_bitarray_AND2_SSE2, count_bitarray_AND2_MMX, count_bitarray_AND2_dispatch;
134 typedef uint32_t count_bitarray_AND3_t(uint32_t*, uint32_t*, uint32_t*);
135 count_bitarray_AND3_t count_bitarray_AND3_AVX512, count_bitarray_AND3_AVX2, count_bitarray_AND3_AVX, count_bitarray_AND3_SSE2, count_bitarray_AND3_MMX, count_bitarray_AND3_dispatch;
136 typedef uint32_t count_bitarray_AND4_t(uint32_t*, uint32_t*, uint32_t*, uint32_t*);
137 count_bitarray_AND4_t count_bitarray_AND4_AVX512, count_bitarray_AND4_AVX2, count_bitarray_AND4_AVX, count_bitarray_AND4_SSE2, count_bitarray_AND4_MMX, count_bitarray_AND4_dispatch;
138
139
140 inline uint32_t *MALLOC_BITARRAY(uint32_t x)
141 {
142 #ifdef _WIN32
143 return __builtin_assume_aligned(_aligned_malloc((x), __BIGGEST_ALIGNMENT__), __BIGGEST_ALIGNMENT__);
144 #else
145 return __builtin_assume_aligned(memalign(__BIGGEST_ALIGNMENT__, (x)), __BIGGEST_ALIGNMENT__);
146 #endif
147 }
148
149
150 inline void FREE_BITARRAY(uint32_t *x)
151 {
152 #ifdef _WIN32
153 _aligned_free(x);
154 #else
155 free(x);
156 #endif
157 }
158
159
160 inline uint32_t BITCOUNT(uint32_t a)
161 {
162 return __builtin_popcountl(a);
163 }
164
165
166 inline uint32_t COUNT_STATES(uint32_t *A)
167 {
168 uint32_t count = 0;
169 for (uint32_t i = 0; i < (1<<19); i++) {
170 count += BITCOUNT(A[i]);
171 }
172 return count;
173 }
174
175
176 inline void BITARRAY_AND(uint32_t *restrict A, uint32_t *restrict B)
177 {
178 A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
179 B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
180 for (uint32_t i = 0; i < (1<<19); i++) {
181 A[i] &= B[i];
182 }
183 }
184
185
186 inline void BITARRAY_LOW20_AND(uint32_t *restrict A, uint32_t *restrict B)
187 {
188 uint16_t *a = (uint16_t *)__builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
189 uint16_t *b = (uint16_t *)__builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
190
191 for (uint32_t i = 0; i < (1<<20); i++) {
192 if (!b[i]) {
193 a[i] = 0;
194 }
195 }
196 }
197
198
199 inline uint32_t COUNT_BITARRAY_AND(uint32_t *restrict A, uint32_t *restrict B)
200 {
201 A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
202 B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
203 uint32_t count = 0;
204 for (uint32_t i = 0; i < (1<<19); i++) {
205 A[i] &= B[i];
206 count += BITCOUNT(A[i]);
207 }
208 return count;
209 }
210
211
212 inline uint32_t COUNT_BITARRAY_LOW20_AND(uint32_t *restrict A, uint32_t *restrict B)
213 {
214 uint16_t *a = (uint16_t *)__builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
215 uint16_t *b = (uint16_t *)__builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
216 uint32_t count = 0;
217
218 for (uint32_t i = 0; i < (1<<20); i++) {
219 if (!b[i]) {
220 a[i] = 0;
221 }
222 count += BITCOUNT(a[i]);
223 }
224 return count;
225 }
226
227
228 inline void BITARRAY_AND4(uint32_t *restrict A, uint32_t *restrict B, uint32_t *restrict C, uint32_t *restrict D)
229 {
230 A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
231 B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
232 C = __builtin_assume_aligned(C, __BIGGEST_ALIGNMENT__);
233 D = __builtin_assume_aligned(D, __BIGGEST_ALIGNMENT__);
234 for (uint32_t i = 0; i < (1<<19); i++) {
235 A[i] = B[i] & C[i] & D[i];
236 }
237 }
238
239
240 inline void BITARRAY_OR(uint32_t *restrict A, uint32_t *restrict B)
241 {
242 A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
243 B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
244 for (uint32_t i = 0; i < (1<<19); i++) {
245 A[i] |= B[i];
246 }
247 }
248
249
250 inline uint32_t COUNT_BITARRAY_AND2(uint32_t *restrict A, uint32_t *restrict B)
251 {
252 A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
253 B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
254 uint32_t count = 0;
255 for (uint32_t i = 0; i < (1<<19); i++) {
256 count += BITCOUNT(A[i] & B[i]);
257 }
258 return count;
259 }
260
261
262 inline uint32_t COUNT_BITARRAY_AND3(uint32_t *restrict A, uint32_t *restrict B, uint32_t *restrict C)
263 {
264 A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
265 B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
266 C = __builtin_assume_aligned(C, __BIGGEST_ALIGNMENT__);
267 uint32_t count = 0;
268 for (uint32_t i = 0; i < (1<<19); i++) {
269 count += BITCOUNT(A[i] & B[i] & C[i]);
270 }
271 return count;
272 }
273
274
275 inline uint32_t COUNT_BITARRAY_AND4(uint32_t *restrict A, uint32_t *restrict B, uint32_t *restrict C, uint32_t *restrict D)
276 {
277 A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
278 B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
279 C = __builtin_assume_aligned(C, __BIGGEST_ALIGNMENT__);
280 D = __builtin_assume_aligned(D, __BIGGEST_ALIGNMENT__);
281 uint32_t count = 0;
282 for (uint32_t i = 0; i < (1<<19); i++) {
283 count += BITCOUNT(A[i] & B[i] & C[i] & D[i]);
284 }
285 return count;
286 }
287
288 #ifndef __MMX__
289
290 // pointers to functions:
291 malloc_bitarray_t *malloc_bitarray_function_p = &malloc_bitarray_dispatch;
292 free_bitarray_t *free_bitarray_function_p = &free_bitarray_dispatch;
293 bitcount_t *bitcount_function_p = &bitcount_dispatch;
294 count_states_t *count_states_function_p = &count_states_dispatch;
295 bitarray_AND_t *bitarray_AND_function_p = &bitarray_AND_dispatch;
296 bitarray_low20_AND_t *bitarray_low20_AND_function_p = &bitarray_low20_AND_dispatch;
297 count_bitarray_AND_t *count_bitarray_AND_function_p = &count_bitarray_AND_dispatch;
298 count_bitarray_low20_AND_t *count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_dispatch;
299 bitarray_AND4_t *bitarray_AND4_function_p = &bitarray_AND4_dispatch;
300 bitarray_OR_t *bitarray_OR_function_p = &bitarray_OR_dispatch;
301 count_bitarray_AND2_t *count_bitarray_AND2_function_p = &count_bitarray_AND2_dispatch;
302 count_bitarray_AND3_t *count_bitarray_AND3_function_p = &count_bitarray_AND3_dispatch;
303 count_bitarray_AND4_t *count_bitarray_AND4_function_p = &count_bitarray_AND4_dispatch;
304
305 // determine the available instruction set at runtime and call the correct function
306 uint32_t *malloc_bitarray_dispatch(uint32_t x) {
307 if (__builtin_cpu_supports("avx512f")) malloc_bitarray_function_p = &malloc_bitarray_AVX512;
308 else if (__builtin_cpu_supports("avx2")) malloc_bitarray_function_p = &malloc_bitarray_AVX2;
309 else if (__builtin_cpu_supports("avx")) malloc_bitarray_function_p = &malloc_bitarray_AVX;
310 else if (__builtin_cpu_supports("sse2")) malloc_bitarray_function_p = &malloc_bitarray_SSE2;
311 else if (__builtin_cpu_supports("mmx")) malloc_bitarray_function_p = &malloc_bitarray_MMX;
312 else {
313 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
314 exit(5);
315 }
316 // call the most optimized function for this CPU
317 return (*malloc_bitarray_function_p)(x);
318 }
319
320 void free_bitarray_dispatch(uint32_t *x) {
321 if (__builtin_cpu_supports("avx512f")) free_bitarray_function_p = &free_bitarray_AVX512;
322 else if (__builtin_cpu_supports("avx2")) free_bitarray_function_p = &free_bitarray_AVX2;
323 else if (__builtin_cpu_supports("avx")) free_bitarray_function_p = &free_bitarray_AVX;
324 else if (__builtin_cpu_supports("sse2")) free_bitarray_function_p = &free_bitarray_SSE2;
325 else if (__builtin_cpu_supports("mmx")) free_bitarray_function_p = &free_bitarray_MMX;
326 else {
327 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
328 exit(5);
329 }
330 // call the most optimized function for this CPU
331 (*free_bitarray_function_p)(x);
332 }
333
334 uint32_t bitcount_dispatch(uint32_t a) {
335 if (__builtin_cpu_supports("avx512f")) bitcount_function_p = &bitcount_AVX512;
336 else if (__builtin_cpu_supports("avx2")) bitcount_function_p = &bitcount_AVX2;
337 else if (__builtin_cpu_supports("avx")) bitcount_function_p = &bitcount_AVX;
338 else if (__builtin_cpu_supports("sse2")) bitcount_function_p = &bitcount_SSE2;
339 else if (__builtin_cpu_supports("mmx")) bitcount_function_p = &bitcount_MMX;
340 else {
341 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
342 exit(5);
343 }
344 // call the most optimized function for this CPU
345 return (*bitcount_function_p)(a);
346 }
347
348 uint32_t count_states_dispatch(uint32_t *bitarray) {
349 if (__builtin_cpu_supports("avx512f")) count_states_function_p = &count_states_AVX512;
350 else if (__builtin_cpu_supports("avx2")) count_states_function_p = &count_states_AVX2;
351 else if (__builtin_cpu_supports("avx")) count_states_function_p = &count_states_AVX;
352 else if (__builtin_cpu_supports("sse2")) count_states_function_p = &count_states_SSE2;
353 else if (__builtin_cpu_supports("mmx")) count_states_function_p = &count_states_MMX;
354 else {
355 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
356 exit(5);
357 }
358 // call the most optimized function for this CPU
359 return (*count_states_function_p)(bitarray);
360 }
361
362 void bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
363 if (__builtin_cpu_supports("avx512f")) bitarray_AND_function_p = &bitarray_AND_AVX512;
364 else if (__builtin_cpu_supports("avx2")) bitarray_AND_function_p = &bitarray_AND_AVX2;
365 else if (__builtin_cpu_supports("avx")) bitarray_AND_function_p = &bitarray_AND_AVX;
366 else if (__builtin_cpu_supports("sse2")) bitarray_AND_function_p = &bitarray_AND_SSE2;
367 else if (__builtin_cpu_supports("mmx")) bitarray_AND_function_p = &bitarray_AND_MMX;
368 else {
369 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
370 exit(5);
371 }
372 // call the most optimized function for this CPU
373 (*bitarray_AND_function_p)(A,B);
374 }
375
376 void bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
377 if (__builtin_cpu_supports("avx512f")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX512;
378 else if (__builtin_cpu_supports("avx2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX2;
379 else if (__builtin_cpu_supports("avx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX;
380 else if (__builtin_cpu_supports("sse2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_SSE2;
381 else if (__builtin_cpu_supports("mmx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_MMX;
382 else {
383 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
384 exit(5);
385 }
386 // call the most optimized function for this CPU
387 (*bitarray_low20_AND_function_p)(A, B);
388 }
389
390 uint32_t count_bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
391 if (__builtin_cpu_supports("avx512f")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX512;
392 else if (__builtin_cpu_supports("avx2")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX2;
393 else if (__builtin_cpu_supports("avx")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX;
394 else if (__builtin_cpu_supports("sse2")) count_bitarray_AND_function_p = &count_bitarray_AND_SSE2;
395 else if (__builtin_cpu_supports("mmx")) count_bitarray_AND_function_p = &count_bitarray_AND_MMX;
396 else {
397 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
398 exit(5);
399 }
400 // call the most optimized function for this CPU
401 return (*count_bitarray_AND_function_p)(A, B);
402 }
403
404 uint32_t count_bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
405 if (__builtin_cpu_supports("avx512f")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX512;
406 else if (__builtin_cpu_supports("avx2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX2;
407 else if (__builtin_cpu_supports("avx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX;
408 else if (__builtin_cpu_supports("sse2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_SSE2;
409 else if (__builtin_cpu_supports("mmx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_MMX;
410 else {
411 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
412 exit(5);
413 }
414 // call the most optimized function for this CPU
415 return (*count_bitarray_low20_AND_function_p)(A, B);
416 }
417
418 void bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) {
419 if (__builtin_cpu_supports("avx512f")) bitarray_AND4_function_p = &bitarray_AND4_AVX512;
420 else if (__builtin_cpu_supports("avx2")) bitarray_AND4_function_p = &bitarray_AND4_AVX2;
421 else if (__builtin_cpu_supports("avx")) bitarray_AND4_function_p = &bitarray_AND4_AVX;
422 else if (__builtin_cpu_supports("sse2")) bitarray_AND4_function_p = &bitarray_AND4_SSE2;
423 else if (__builtin_cpu_supports("mmx")) bitarray_AND4_function_p = &bitarray_AND4_MMX;
424 else {
425 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
426 exit(5);
427 }
428 // call the most optimized function for this CPU
429 (*bitarray_AND4_function_p)(A, B, C, D);
430 }
431
432 void bitarray_OR_dispatch(uint32_t *A, uint32_t *B) {
433 if (__builtin_cpu_supports("avx512f")) bitarray_OR_function_p = &bitarray_OR_AVX512;
434 else if (__builtin_cpu_supports("avx2")) bitarray_OR_function_p = &bitarray_OR_AVX2;
435 else if (__builtin_cpu_supports("avx")) bitarray_OR_function_p = &bitarray_OR_AVX;
436 else if (__builtin_cpu_supports("sse2")) bitarray_OR_function_p = &bitarray_OR_SSE2;
437 else if (__builtin_cpu_supports("mmx")) bitarray_OR_function_p = &bitarray_OR_MMX;
438 else {
439 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
440 exit(5);
441 }
442 // call the most optimized function for this CPU
443 (*bitarray_OR_function_p)(A,B);
444 }
445
446 uint32_t count_bitarray_AND2_dispatch(uint32_t *A, uint32_t *B) {
447 if (__builtin_cpu_supports("avx512f")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX512;
448 else if (__builtin_cpu_supports("avx2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX2;
449 else if (__builtin_cpu_supports("avx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX;
450 else if (__builtin_cpu_supports("sse2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_SSE2;
451 else if (__builtin_cpu_supports("mmx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_MMX;
452 else {
453 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
454 exit(5);
455 }
456 // call the most optimized function for this CPU
457 return (*count_bitarray_AND2_function_p)(A, B);
458 }
459
460 uint32_t count_bitarray_AND3_dispatch(uint32_t *A, uint32_t *B, uint32_t *C) {
461 if (__builtin_cpu_supports("avx512f")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX512;
462 else if (__builtin_cpu_supports("avx2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX2;
463 else if (__builtin_cpu_supports("avx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX;
464 else if (__builtin_cpu_supports("sse2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_SSE2;
465 else if (__builtin_cpu_supports("mmx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_MMX;
466 else {
467 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
468 exit(5);
469 }
470 // call the most optimized function for this CPU
471 return (*count_bitarray_AND3_function_p)(A, B, C);
472 }
473
474 uint32_t count_bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) {
475 if (__builtin_cpu_supports("avx512f")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX512;
476 else if (__builtin_cpu_supports("avx2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX2;
477 else if (__builtin_cpu_supports("avx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX;
478 else if (__builtin_cpu_supports("sse2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_SSE2;
479 else if (__builtin_cpu_supports("mmx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_MMX;
480 else {
481 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
482 exit(5);
483 }
484 // call the most optimized function for this CPU
485 return (*count_bitarray_AND4_function_p)(A, B, C, D);
486 }
487
488
489 ///////////////////////////////////////////////77
490 // Entries to dispatched function calls
491
492 uint32_t *malloc_bitarray(uint32_t x) {
493 return (*malloc_bitarray_function_p)(x);
494 }
495
496 void free_bitarray(uint32_t *x) {
497 (*free_bitarray_function_p)(x);
498 }
499
500 uint32_t bitcount(uint32_t a) {
501 return (*bitcount_function_p)(a);
502 }
503
504 uint32_t count_states(uint32_t *bitarray) {
505 return (*count_states_function_p)(bitarray);
506 }
507
508 void bitarray_AND(uint32_t *A, uint32_t *B) {
509 (*bitarray_AND_function_p)(A, B);
510 }
511
512 void bitarray_low20_AND(uint32_t *A, uint32_t *B) {
513 (*bitarray_low20_AND_function_p)(A, B);
514 }
515
516 uint32_t count_bitarray_AND(uint32_t *A, uint32_t *B) {
517 return (*count_bitarray_AND_function_p)(A, B);
518 }
519
520 uint32_t count_bitarray_low20_AND(uint32_t *A, uint32_t *B) {
521 return (*count_bitarray_low20_AND_function_p)(A, B);
522 }
523
524 void bitarray_AND4(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) {
525 (*bitarray_AND4_function_p)(A, B, C, D);
526 }
527
528 void bitarray_OR(uint32_t *A, uint32_t *B) {
529 (*bitarray_OR_function_p)(A, B);
530 }
531
532 uint32_t count_bitarray_AND2(uint32_t *A, uint32_t *B) {
533 return (*count_bitarray_AND2_function_p)(A, B);
534 }
535
536 uint32_t count_bitarray_AND3(uint32_t *A, uint32_t *B, uint32_t *C) {
537 return (*count_bitarray_AND3_function_p)(A, B, C);
538 }
539
540 uint32_t count_bitarray_AND4(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) {
541 return (*count_bitarray_AND4_function_p)(A, B, C, D);
542 }
543
544 #endif
545
Impressum, Datenschutz