]> git.zerfleddert.de Git - proxmark3-svn/blame - client/hardnested/hardnested_bitarray_core.c
coverity scan bug fixes
[proxmark3-svn] / client / hardnested / hardnested_bitarray_core.c
CommitLineData
c48c4d78 1//-----------------------------------------------------------------------------
2// Copyright (C) 2016, 2017 by piwi
3//
4// This code is licensed to you under the terms of the GNU GPL, version 2 or,
5// at your option, any later version. See the LICENSE.txt file for the text of
6// the license.ch b
7//-----------------------------------------------------------------------------
8// Implements a card only attack based on crypto text (encrypted nonces
9// received during a nested authentication) only. Unlike other card only
10// attacks this doesn't rely on implementation errors but only on the
11// inherent weaknesses of the crypto1 cypher. Described in
12// Carlo Meijer, Roel Verdult, "Ciphertext-only Cryptanalysis on Hardened
13// Mifare Classic Cards" in Proceedings of the 22nd ACM SIGSAC Conference on
14// Computer and Communications Security, 2015
15//-----------------------------------------------------------------------------
16// some helper functions which can benefit from SIMD instructions or other special instructions
17//
18
19#include "hardnested_bitarray_core.h"
20
21#include <stdint.h>
22#include <stdio.h>
23#include <stdlib.h>
24#include <malloc.h>
25
26// #include <stdint.h>
27// #include <stdbool.h>
28// #include <stdlib.h>
29// #include <stdio.h>
30// #include <malloc.h>
31// #include <string.h>
32// #include "crapto1/crapto1.h"
33// #include "parity.h"
34
35
36// this needs to be compiled several times for each instruction set.
37// For each instruction set, define a dedicated function name:
38#if defined (__AVX512F__)
39#define MALLOC_BITARRAY malloc_bitarray_AVX512
40#define FREE_BITARRAY free_bitarray_AVX512
41#define BITCOUNT bitcount_AVX512
42#define COUNT_STATES count_states_AVX512
43#define BITARRAY_AND bitarray_AND_AVX512
44#define BITARRAY_LOW20_AND bitarray_low20_AND_AVX512
45#define COUNT_BITARRAY_AND count_bitarray_AND_AVX512
46#define COUNT_BITARRAY_LOW20_AND count_bitarray_low20_AND_AVX512
47#define BITARRAY_AND4 bitarray_AND4_AVX512
48#define BITARRAY_OR bitarray_OR_AVX512
49#define COUNT_BITARRAY_AND2 count_bitarray_AND2_AVX512
50#define COUNT_BITARRAY_AND3 count_bitarray_AND3_AVX512
51#define COUNT_BITARRAY_AND4 count_bitarray_AND4_AVX512
52#elif defined (__AVX2__)
53#define MALLOC_BITARRAY malloc_bitarray_AVX2
54#define FREE_BITARRAY free_bitarray_AVX2
55#define BITCOUNT bitcount_AVX2
56#define COUNT_STATES count_states_AVX2
57#define BITARRAY_AND bitarray_AND_AVX2
58#define BITARRAY_LOW20_AND bitarray_low20_AND_AVX2
59#define COUNT_BITARRAY_AND count_bitarray_AND_AVX2
60#define COUNT_BITARRAY_LOW20_AND count_bitarray_low20_AND_AVX2
61#define BITARRAY_AND4 bitarray_AND4_AVX2
62#define BITARRAY_OR bitarray_OR_AVX2
63#define COUNT_BITARRAY_AND2 count_bitarray_AND2_AVX2
64#define COUNT_BITARRAY_AND3 count_bitarray_AND3_AVX2
65#define COUNT_BITARRAY_AND4 count_bitarray_AND4_AVX2
66#elif defined (__AVX__)
67#define MALLOC_BITARRAY malloc_bitarray_AVX
68#define FREE_BITARRAY free_bitarray_AVX
69#define BITCOUNT bitcount_AVX
70#define COUNT_STATES count_states_AVX
71#define BITARRAY_AND bitarray_AND_AVX
72#define BITARRAY_LOW20_AND bitarray_low20_AND_AVX
73#define COUNT_BITARRAY_AND count_bitarray_AND_AVX
74#define COUNT_BITARRAY_LOW20_AND count_bitarray_low20_AND_AVX
75#define BITARRAY_AND4 bitarray_AND4_AVX
76#define BITARRAY_OR bitarray_OR_AVX
77#define COUNT_BITARRAY_AND2 count_bitarray_AND2_AVX
78#define COUNT_BITARRAY_AND3 count_bitarray_AND3_AVX
79#define COUNT_BITARRAY_AND4 count_bitarray_AND4_AVX
80#elif defined (__SSE2__)
81#define MALLOC_BITARRAY malloc_bitarray_SSE2
82#define FREE_BITARRAY free_bitarray_SSE2
83#define BITCOUNT bitcount_SSE2
84#define COUNT_STATES count_states_SSE2
85#define BITARRAY_AND bitarray_AND_SSE2
86#define BITARRAY_LOW20_AND bitarray_low20_AND_SSE2
87#define COUNT_BITARRAY_AND count_bitarray_AND_SSE2
88#define COUNT_BITARRAY_LOW20_AND count_bitarray_low20_AND_SSE2
89#define BITARRAY_AND4 bitarray_AND4_SSE2
90#define BITARRAY_OR bitarray_OR_SSE2
91#define COUNT_BITARRAY_AND2 count_bitarray_AND2_SSE2
92#define COUNT_BITARRAY_AND3 count_bitarray_AND3_SSE2
93#define COUNT_BITARRAY_AND4 count_bitarray_AND4_SSE2
94#elif defined (__MMX__)
95#define MALLOC_BITARRAY malloc_bitarray_MMX
96#define FREE_BITARRAY free_bitarray_MMX
97#define BITCOUNT bitcount_MMX
98#define COUNT_STATES count_states_MMX
99#define BITARRAY_AND bitarray_AND_MMX
100#define BITARRAY_LOW20_AND bitarray_low20_AND_MMX
101#define COUNT_BITARRAY_AND count_bitarray_AND_MMX
102#define COUNT_BITARRAY_LOW20_AND count_bitarray_low20_AND_MMX
103#define BITARRAY_AND4 bitarray_AND4_MMX
104#define BITARRAY_OR bitarray_OR_MMX
105#define COUNT_BITARRAY_AND2 count_bitarray_AND2_MMX
106#define COUNT_BITARRAY_AND3 count_bitarray_AND3_MMX
107#define COUNT_BITARRAY_AND4 count_bitarray_AND4_MMX
108#endif
109
110
111// typedefs and declaration of functions:
112typedef uint32_t* malloc_bitarray_t(uint32_t);
113malloc_bitarray_t malloc_bitarray_AVX512, malloc_bitarray_AVX2, malloc_bitarray_AVX, malloc_bitarray_SSE2, malloc_bitarray_MMX, malloc_bitarray_dispatch;
114typedef void free_bitarray_t(uint32_t*);
115free_bitarray_t free_bitarray_AVX512, free_bitarray_AVX2, free_bitarray_AVX, free_bitarray_SSE2, free_bitarray_MMX, free_bitarray_dispatch;
116typedef uint32_t bitcount_t(uint32_t);
117bitcount_t bitcount_AVX512, bitcount_AVX2, bitcount_AVX, bitcount_SSE2, bitcount_MMX, bitcount_dispatch;
118typedef uint32_t count_states_t(uint32_t*);
119count_states_t count_states_AVX512, count_states_AVX2, count_states_AVX, count_states_SSE2, count_states_MMX, count_states_dispatch;
120typedef void bitarray_AND_t(uint32_t[], uint32_t[]);
121bitarray_AND_t bitarray_AND_AVX512, bitarray_AND_AVX2, bitarray_AND_AVX, bitarray_AND_SSE2, bitarray_AND_MMX, bitarray_AND_dispatch;
122typedef void bitarray_low20_AND_t(uint32_t*, uint32_t*);
123bitarray_low20_AND_t bitarray_low20_AND_AVX512, bitarray_low20_AND_AVX2, bitarray_low20_AND_AVX, bitarray_low20_AND_SSE2, bitarray_low20_AND_MMX, bitarray_low20_AND_dispatch;
124typedef uint32_t count_bitarray_AND_t(uint32_t*, uint32_t*);
125count_bitarray_AND_t count_bitarray_AND_AVX512, count_bitarray_AND_AVX2, count_bitarray_AND_AVX, count_bitarray_AND_SSE2, count_bitarray_AND_MMX, count_bitarray_AND_dispatch;
126typedef uint32_t count_bitarray_low20_AND_t(uint32_t*, uint32_t*);
127count_bitarray_low20_AND_t count_bitarray_low20_AND_AVX512, count_bitarray_low20_AND_AVX2, count_bitarray_low20_AND_AVX, count_bitarray_low20_AND_SSE2, count_bitarray_low20_AND_MMX, count_bitarray_low20_AND_dispatch;
128typedef void bitarray_AND4_t(uint32_t*, uint32_t*, uint32_t*, uint32_t*);
129bitarray_AND4_t bitarray_AND4_AVX512, bitarray_AND4_AVX2, bitarray_AND4_AVX, bitarray_AND4_SSE2, bitarray_AND4_MMX, bitarray_AND4_dispatch;
130typedef void bitarray_OR_t(uint32_t[], uint32_t[]);
131bitarray_OR_t bitarray_OR_AVX512, bitarray_OR_AVX2, bitarray_OR_AVX, bitarray_OR_SSE2, bitarray_OR_MMX, bitarray_OR_dispatch;
132typedef uint32_t count_bitarray_AND2_t(uint32_t*, uint32_t*);
133count_bitarray_AND2_t count_bitarray_AND2_AVX512, count_bitarray_AND2_AVX2, count_bitarray_AND2_AVX, count_bitarray_AND2_SSE2, count_bitarray_AND2_MMX, count_bitarray_AND2_dispatch;
134typedef uint32_t count_bitarray_AND3_t(uint32_t*, uint32_t*, uint32_t*);
135count_bitarray_AND3_t count_bitarray_AND3_AVX512, count_bitarray_AND3_AVX2, count_bitarray_AND3_AVX, count_bitarray_AND3_SSE2, count_bitarray_AND3_MMX, count_bitarray_AND3_dispatch;
136typedef uint32_t count_bitarray_AND4_t(uint32_t*, uint32_t*, uint32_t*, uint32_t*);
137count_bitarray_AND4_t count_bitarray_AND4_AVX512, count_bitarray_AND4_AVX2, count_bitarray_AND4_AVX, count_bitarray_AND4_SSE2, count_bitarray_AND4_MMX, count_bitarray_AND4_dispatch;
138
139
140inline uint32_t *MALLOC_BITARRAY(uint32_t x)
141{
142#ifdef _WIN32
143 return __builtin_assume_aligned(_aligned_malloc((x), __BIGGEST_ALIGNMENT__), __BIGGEST_ALIGNMENT__);
144#else
145 return __builtin_assume_aligned(memalign(__BIGGEST_ALIGNMENT__, (x)), __BIGGEST_ALIGNMENT__);
146#endif
147}
148
149
150inline void FREE_BITARRAY(uint32_t *x)
151{
152#ifdef _WIN32
153 _aligned_free(x);
154#else
155 free(x);
156#endif
157}
158
159
160inline uint32_t BITCOUNT(uint32_t a)
161{
162 return __builtin_popcountl(a);
163}
164
165
166inline uint32_t COUNT_STATES(uint32_t *A)
167{
168 uint32_t count = 0;
169 for (uint32_t i = 0; i < (1<<19); i++) {
170 count += BITCOUNT(A[i]);
171 }
172 return count;
173}
174
175
176inline void BITARRAY_AND(uint32_t *restrict A, uint32_t *restrict B)
177{
178 A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
179 B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
180 for (uint32_t i = 0; i < (1<<19); i++) {
181 A[i] &= B[i];
182 }
183}
184
185
186inline void BITARRAY_LOW20_AND(uint32_t *restrict A, uint32_t *restrict B)
187{
188 uint16_t *a = (uint16_t *)__builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
189 uint16_t *b = (uint16_t *)__builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
190
191 for (uint32_t i = 0; i < (1<<20); i++) {
192 if (!b[i]) {
193 a[i] = 0;
194 }
195 }
196}
197
198
199inline uint32_t COUNT_BITARRAY_AND(uint32_t *restrict A, uint32_t *restrict B)
200{
201 A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
202 B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
203 uint32_t count = 0;
204 for (uint32_t i = 0; i < (1<<19); i++) {
205 A[i] &= B[i];
206 count += BITCOUNT(A[i]);
207 }
208 return count;
209}
210
211
212inline uint32_t COUNT_BITARRAY_LOW20_AND(uint32_t *restrict A, uint32_t *restrict B)
213{
214 uint16_t *a = (uint16_t *)__builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
215 uint16_t *b = (uint16_t *)__builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
216 uint32_t count = 0;
217
218 for (uint32_t i = 0; i < (1<<20); i++) {
219 if (!b[i]) {
220 a[i] = 0;
221 }
222 count += BITCOUNT(a[i]);
223 }
224 return count;
225}
226
227
228inline void BITARRAY_AND4(uint32_t *restrict A, uint32_t *restrict B, uint32_t *restrict C, uint32_t *restrict D)
229{
230 A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
231 B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
232 C = __builtin_assume_aligned(C, __BIGGEST_ALIGNMENT__);
233 D = __builtin_assume_aligned(D, __BIGGEST_ALIGNMENT__);
234 for (uint32_t i = 0; i < (1<<19); i++) {
235 A[i] = B[i] & C[i] & D[i];
236 }
237}
238
239
240inline void BITARRAY_OR(uint32_t *restrict A, uint32_t *restrict B)
241{
242 A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
243 B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
244 for (uint32_t i = 0; i < (1<<19); i++) {
245 A[i] |= B[i];
246 }
247}
248
249
250inline uint32_t COUNT_BITARRAY_AND2(uint32_t *restrict A, uint32_t *restrict B)
251{
252 A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
253 B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
254 uint32_t count = 0;
255 for (uint32_t i = 0; i < (1<<19); i++) {
256 count += BITCOUNT(A[i] & B[i]);
257 }
258 return count;
259}
260
261
262inline uint32_t COUNT_BITARRAY_AND3(uint32_t *restrict A, uint32_t *restrict B, uint32_t *restrict C)
263{
264 A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
265 B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
266 C = __builtin_assume_aligned(C, __BIGGEST_ALIGNMENT__);
267 uint32_t count = 0;
268 for (uint32_t i = 0; i < (1<<19); i++) {
269 count += BITCOUNT(A[i] & B[i] & C[i]);
270 }
271 return count;
272}
273
274
275inline uint32_t COUNT_BITARRAY_AND4(uint32_t *restrict A, uint32_t *restrict B, uint32_t *restrict C, uint32_t *restrict D)
276{
277 A = __builtin_assume_aligned(A, __BIGGEST_ALIGNMENT__);
278 B = __builtin_assume_aligned(B, __BIGGEST_ALIGNMENT__);
279 C = __builtin_assume_aligned(C, __BIGGEST_ALIGNMENT__);
280 D = __builtin_assume_aligned(D, __BIGGEST_ALIGNMENT__);
281 uint32_t count = 0;
282 for (uint32_t i = 0; i < (1<<19); i++) {
283 count += BITCOUNT(A[i] & B[i] & C[i] & D[i]);
284 }
285 return count;
286}
287
288#ifndef __MMX__
289
290// pointers to functions:
291malloc_bitarray_t *malloc_bitarray_function_p = &malloc_bitarray_dispatch;
292free_bitarray_t *free_bitarray_function_p = &free_bitarray_dispatch;
293bitcount_t *bitcount_function_p = &bitcount_dispatch;
294count_states_t *count_states_function_p = &count_states_dispatch;
295bitarray_AND_t *bitarray_AND_function_p = &bitarray_AND_dispatch;
296bitarray_low20_AND_t *bitarray_low20_AND_function_p = &bitarray_low20_AND_dispatch;
297count_bitarray_AND_t *count_bitarray_AND_function_p = &count_bitarray_AND_dispatch;
298count_bitarray_low20_AND_t *count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_dispatch;
299bitarray_AND4_t *bitarray_AND4_function_p = &bitarray_AND4_dispatch;
300bitarray_OR_t *bitarray_OR_function_p = &bitarray_OR_dispatch;
301count_bitarray_AND2_t *count_bitarray_AND2_function_p = &count_bitarray_AND2_dispatch;
302count_bitarray_AND3_t *count_bitarray_AND3_function_p = &count_bitarray_AND3_dispatch;
303count_bitarray_AND4_t *count_bitarray_AND4_function_p = &count_bitarray_AND4_dispatch;
304
305// determine the available instruction set at runtime and call the correct function
306uint32_t *malloc_bitarray_dispatch(uint32_t x) {
e5baf1ef 307 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
c48c4d78 308 if (__builtin_cpu_supports("avx512f")) malloc_bitarray_function_p = &malloc_bitarray_AVX512;
309 else if (__builtin_cpu_supports("avx2")) malloc_bitarray_function_p = &malloc_bitarray_AVX2;
f950ce1c 310 #else
311 if (__builtin_cpu_supports("avx2")) malloc_bitarray_function_p = &malloc_bitarray_AVX2;
312 #endif
c48c4d78 313 else if (__builtin_cpu_supports("avx")) malloc_bitarray_function_p = &malloc_bitarray_AVX;
314 else if (__builtin_cpu_supports("sse2")) malloc_bitarray_function_p = &malloc_bitarray_SSE2;
315 else if (__builtin_cpu_supports("mmx")) malloc_bitarray_function_p = &malloc_bitarray_MMX;
316 else {
317 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
318 exit(5);
319 }
320 // call the most optimized function for this CPU
321 return (*malloc_bitarray_function_p)(x);
322}
323
324void free_bitarray_dispatch(uint32_t *x) {
e5baf1ef 325 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
c48c4d78 326 if (__builtin_cpu_supports("avx512f")) free_bitarray_function_p = &free_bitarray_AVX512;
327 else if (__builtin_cpu_supports("avx2")) free_bitarray_function_p = &free_bitarray_AVX2;
f950ce1c 328 #else
329 if (__builtin_cpu_supports("avx2")) free_bitarray_function_p = &free_bitarray_AVX2;
330 #endif
c48c4d78 331 else if (__builtin_cpu_supports("avx")) free_bitarray_function_p = &free_bitarray_AVX;
332 else if (__builtin_cpu_supports("sse2")) free_bitarray_function_p = &free_bitarray_SSE2;
333 else if (__builtin_cpu_supports("mmx")) free_bitarray_function_p = &free_bitarray_MMX;
334 else {
335 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
336 exit(5);
337 }
338 // call the most optimized function for this CPU
339 (*free_bitarray_function_p)(x);
340}
341
342uint32_t bitcount_dispatch(uint32_t a) {
e5baf1ef 343 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
c48c4d78 344 if (__builtin_cpu_supports("avx512f")) bitcount_function_p = &bitcount_AVX512;
345 else if (__builtin_cpu_supports("avx2")) bitcount_function_p = &bitcount_AVX2;
f950ce1c 346 #else
347 if (__builtin_cpu_supports("avx2")) bitcount_function_p = &bitcount_AVX2;
348 #endif
c48c4d78 349 else if (__builtin_cpu_supports("avx")) bitcount_function_p = &bitcount_AVX;
350 else if (__builtin_cpu_supports("sse2")) bitcount_function_p = &bitcount_SSE2;
351 else if (__builtin_cpu_supports("mmx")) bitcount_function_p = &bitcount_MMX;
352 else {
353 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
354 exit(5);
355 }
356 // call the most optimized function for this CPU
357 return (*bitcount_function_p)(a);
358}
359
360uint32_t count_states_dispatch(uint32_t *bitarray) {
e5baf1ef 361 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
c48c4d78 362 if (__builtin_cpu_supports("avx512f")) count_states_function_p = &count_states_AVX512;
363 else if (__builtin_cpu_supports("avx2")) count_states_function_p = &count_states_AVX2;
f950ce1c 364 #else
365 if (__builtin_cpu_supports("avx2")) count_states_function_p = &count_states_AVX2;
366 #endif
c48c4d78 367 else if (__builtin_cpu_supports("avx")) count_states_function_p = &count_states_AVX;
368 else if (__builtin_cpu_supports("sse2")) count_states_function_p = &count_states_SSE2;
369 else if (__builtin_cpu_supports("mmx")) count_states_function_p = &count_states_MMX;
370 else {
371 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
372 exit(5);
373 }
374 // call the most optimized function for this CPU
375 return (*count_states_function_p)(bitarray);
376}
377
378void bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
e5baf1ef 379 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
c48c4d78 380 if (__builtin_cpu_supports("avx512f")) bitarray_AND_function_p = &bitarray_AND_AVX512;
381 else if (__builtin_cpu_supports("avx2")) bitarray_AND_function_p = &bitarray_AND_AVX2;
f950ce1c 382 #else
383 if (__builtin_cpu_supports("avx2")) bitarray_AND_function_p = &bitarray_AND_AVX2;
384 #endif
c48c4d78 385 else if (__builtin_cpu_supports("avx")) bitarray_AND_function_p = &bitarray_AND_AVX;
386 else if (__builtin_cpu_supports("sse2")) bitarray_AND_function_p = &bitarray_AND_SSE2;
387 else if (__builtin_cpu_supports("mmx")) bitarray_AND_function_p = &bitarray_AND_MMX;
388 else {
389 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
390 exit(5);
391 }
392 // call the most optimized function for this CPU
393 (*bitarray_AND_function_p)(A,B);
394}
395
396void bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
e5baf1ef 397 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
c48c4d78 398 if (__builtin_cpu_supports("avx512f")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX512;
399 else if (__builtin_cpu_supports("avx2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX2;
f950ce1c 400 #else
401 if (__builtin_cpu_supports("avx2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX2;
402 #endif
c48c4d78 403 else if (__builtin_cpu_supports("avx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_AVX;
404 else if (__builtin_cpu_supports("sse2")) bitarray_low20_AND_function_p = &bitarray_low20_AND_SSE2;
405 else if (__builtin_cpu_supports("mmx")) bitarray_low20_AND_function_p = &bitarray_low20_AND_MMX;
406 else {
407 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
408 exit(5);
409 }
410 // call the most optimized function for this CPU
411 (*bitarray_low20_AND_function_p)(A, B);
412}
413
414uint32_t count_bitarray_AND_dispatch(uint32_t *A, uint32_t *B) {
e5baf1ef 415 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
c48c4d78 416 if (__builtin_cpu_supports("avx512f")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX512;
417 else if (__builtin_cpu_supports("avx2")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX2;
f950ce1c 418 #else
419 if (__builtin_cpu_supports("avx2")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX2;
420 #endif
c48c4d78 421 else if (__builtin_cpu_supports("avx")) count_bitarray_AND_function_p = &count_bitarray_AND_AVX;
422 else if (__builtin_cpu_supports("sse2")) count_bitarray_AND_function_p = &count_bitarray_AND_SSE2;
423 else if (__builtin_cpu_supports("mmx")) count_bitarray_AND_function_p = &count_bitarray_AND_MMX;
424 else {
425 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
426 exit(5);
427 }
428 // call the most optimized function for this CPU
429 return (*count_bitarray_AND_function_p)(A, B);
430}
431
432uint32_t count_bitarray_low20_AND_dispatch(uint32_t *A, uint32_t *B) {
e5baf1ef 433 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
c48c4d78 434 if (__builtin_cpu_supports("avx512f")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX512;
435 else if (__builtin_cpu_supports("avx2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX2;
f950ce1c 436 #else
437 if (__builtin_cpu_supports("avx2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX2;
438 #endif
c48c4d78 439 else if (__builtin_cpu_supports("avx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_AVX;
440 else if (__builtin_cpu_supports("sse2")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_SSE2;
441 else if (__builtin_cpu_supports("mmx")) count_bitarray_low20_AND_function_p = &count_bitarray_low20_AND_MMX;
442 else {
443 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
444 exit(5);
445 }
446 // call the most optimized function for this CPU
447 return (*count_bitarray_low20_AND_function_p)(A, B);
448}
449
450void bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) {
e5baf1ef 451 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
c48c4d78 452 if (__builtin_cpu_supports("avx512f")) bitarray_AND4_function_p = &bitarray_AND4_AVX512;
453 else if (__builtin_cpu_supports("avx2")) bitarray_AND4_function_p = &bitarray_AND4_AVX2;
f950ce1c 454 #else
455 if (__builtin_cpu_supports("avx2")) bitarray_AND4_function_p = &bitarray_AND4_AVX2;
456 #endif
c48c4d78 457 else if (__builtin_cpu_supports("avx")) bitarray_AND4_function_p = &bitarray_AND4_AVX;
458 else if (__builtin_cpu_supports("sse2")) bitarray_AND4_function_p = &bitarray_AND4_SSE2;
459 else if (__builtin_cpu_supports("mmx")) bitarray_AND4_function_p = &bitarray_AND4_MMX;
460 else {
461 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
462 exit(5);
463 }
464 // call the most optimized function for this CPU
465 (*bitarray_AND4_function_p)(A, B, C, D);
466}
467
468void bitarray_OR_dispatch(uint32_t *A, uint32_t *B) {
e5baf1ef 469 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
c48c4d78 470 if (__builtin_cpu_supports("avx512f")) bitarray_OR_function_p = &bitarray_OR_AVX512;
471 else if (__builtin_cpu_supports("avx2")) bitarray_OR_function_p = &bitarray_OR_AVX2;
f950ce1c 472 #else
473 if (__builtin_cpu_supports("avx2")) bitarray_OR_function_p = &bitarray_OR_AVX2;
474 #endif
c48c4d78 475 else if (__builtin_cpu_supports("avx")) bitarray_OR_function_p = &bitarray_OR_AVX;
476 else if (__builtin_cpu_supports("sse2")) bitarray_OR_function_p = &bitarray_OR_SSE2;
477 else if (__builtin_cpu_supports("mmx")) bitarray_OR_function_p = &bitarray_OR_MMX;
478 else {
479 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
480 exit(5);
481 }
482 // call the most optimized function for this CPU
483 (*bitarray_OR_function_p)(A,B);
484}
485
486uint32_t count_bitarray_AND2_dispatch(uint32_t *A, uint32_t *B) {
e5baf1ef 487 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
c48c4d78 488 if (__builtin_cpu_supports("avx512f")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX512;
489 else if (__builtin_cpu_supports("avx2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX2;
f950ce1c 490 #else
491 if (__builtin_cpu_supports("avx2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX2;
492 #endif
c48c4d78 493 else if (__builtin_cpu_supports("avx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_AVX;
494 else if (__builtin_cpu_supports("sse2")) count_bitarray_AND2_function_p = &count_bitarray_AND2_SSE2;
495 else if (__builtin_cpu_supports("mmx")) count_bitarray_AND2_function_p = &count_bitarray_AND2_MMX;
496 else {
497 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
498 exit(5);
499 }
500 // call the most optimized function for this CPU
501 return (*count_bitarray_AND2_function_p)(A, B);
502}
503
504uint32_t count_bitarray_AND3_dispatch(uint32_t *A, uint32_t *B, uint32_t *C) {
e5baf1ef 505 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
c48c4d78 506 if (__builtin_cpu_supports("avx512f")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX512;
507 else if (__builtin_cpu_supports("avx2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX2;
f950ce1c 508 #else
509 if (__builtin_cpu_supports("avx2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX2;
510 #endif
c48c4d78 511 else if (__builtin_cpu_supports("avx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_AVX;
512 else if (__builtin_cpu_supports("sse2")) count_bitarray_AND3_function_p = &count_bitarray_AND3_SSE2;
513 else if (__builtin_cpu_supports("mmx")) count_bitarray_AND3_function_p = &count_bitarray_AND3_MMX;
514 else {
515 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
516 exit(5);
517 }
518 // call the most optimized function for this CPU
519 return (*count_bitarray_AND3_function_p)(A, B, C);
520}
521
522uint32_t count_bitarray_AND4_dispatch(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) {
e5baf1ef 523 #if (__GNUC__ >= 5) && (__GNUC__ > 5 || __GNUC_MINOR__ > 2)
c48c4d78 524 if (__builtin_cpu_supports("avx512f")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX512;
525 else if (__builtin_cpu_supports("avx2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX2;
f950ce1c 526 #else
527 if (__builtin_cpu_supports("avx2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX2;
528 #endif
c48c4d78 529 else if (__builtin_cpu_supports("avx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_AVX;
530 else if (__builtin_cpu_supports("sse2")) count_bitarray_AND4_function_p = &count_bitarray_AND4_SSE2;
531 else if (__builtin_cpu_supports("mmx")) count_bitarray_AND4_function_p = &count_bitarray_AND4_MMX;
532 else {
533 printf("\nFatal: you need at least a CPU with MMX instruction set support. Aborting...\n");
534 exit(5);
535 }
536 // call the most optimized function for this CPU
537 return (*count_bitarray_AND4_function_p)(A, B, C, D);
538}
539
540
541///////////////////////////////////////////////77
542// Entries to dispatched function calls
543
544uint32_t *malloc_bitarray(uint32_t x) {
545 return (*malloc_bitarray_function_p)(x);
546}
547
548void free_bitarray(uint32_t *x) {
549 (*free_bitarray_function_p)(x);
550}
551
552uint32_t bitcount(uint32_t a) {
553 return (*bitcount_function_p)(a);
554}
555
556uint32_t count_states(uint32_t *bitarray) {
557 return (*count_states_function_p)(bitarray);
558}
559
560void bitarray_AND(uint32_t *A, uint32_t *B) {
561 (*bitarray_AND_function_p)(A, B);
562}
563
564void bitarray_low20_AND(uint32_t *A, uint32_t *B) {
565 (*bitarray_low20_AND_function_p)(A, B);
566}
567
568uint32_t count_bitarray_AND(uint32_t *A, uint32_t *B) {
569 return (*count_bitarray_AND_function_p)(A, B);
570}
571
572uint32_t count_bitarray_low20_AND(uint32_t *A, uint32_t *B) {
573 return (*count_bitarray_low20_AND_function_p)(A, B);
574}
575
576void bitarray_AND4(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) {
577 (*bitarray_AND4_function_p)(A, B, C, D);
578}
579
580void bitarray_OR(uint32_t *A, uint32_t *B) {
581 (*bitarray_OR_function_p)(A, B);
582}
583
584uint32_t count_bitarray_AND2(uint32_t *A, uint32_t *B) {
585 return (*count_bitarray_AND2_function_p)(A, B);
586}
587
588uint32_t count_bitarray_AND3(uint32_t *A, uint32_t *B, uint32_t *C) {
589 return (*count_bitarray_AND3_function_p)(A, B, C);
590}
591
592uint32_t count_bitarray_AND4(uint32_t *A, uint32_t *B, uint32_t *C, uint32_t *D) {
593 return (*count_bitarray_AND4_function_p)(A, B, C, D);
594}
595
596#endif
597
Impressum, Datenschutz