Mercurial > dropbear
comparison src/headers/tomcrypt_macros.h @ 191:1c15b283127b libtomcrypt-orig
Import of libtomcrypt 1.02 with manual path rename rearrangement etc
author | Matt Johnston <matt@ucc.asn.au> |
---|---|
date | Fri, 06 May 2005 13:23:02 +0000 |
parents | |
children | 39d5d58461d6 |
comparison
equal
deleted
inserted
replaced
143:5d99163f7e32 | 191:1c15b283127b |
---|---|
1 /* fix for MSVC ...evil! */ | |
2 #ifdef _MSC_VER | |
3 #define CONST64(n) n ## ui64 | |
4 typedef unsigned __int64 ulong64; | |
5 #else | |
6 #define CONST64(n) n ## ULL | |
7 typedef unsigned long long ulong64; | |
8 #endif | |
9 | |
10 /* this is the "32-bit at least" data type | |
11 * Re-define it to suit your platform but it must be at least 32-bits | |
12 */ | |
13 #if defined(__x86_64__) | |
14 typedef unsigned ulong32; | |
15 #else | |
16 typedef unsigned long ulong32; | |
17 #endif | |
18 | |
19 /* ---- HELPER MACROS ---- */ | |
20 #ifdef ENDIAN_NEUTRAL | |
21 | |
22 #define STORE32L(x, y) \ | |
23 { (y)[3] = (unsigned char)(((x)>>24)&255); (y)[2] = (unsigned char)(((x)>>16)&255); \ | |
24 (y)[1] = (unsigned char)(((x)>>8)&255); (y)[0] = (unsigned char)((x)&255); } | |
25 | |
26 #define LOAD32L(x, y) \ | |
27 { x = ((unsigned long)((y)[3] & 255)<<24) | \ | |
28 ((unsigned long)((y)[2] & 255)<<16) | \ | |
29 ((unsigned long)((y)[1] & 255)<<8) | \ | |
30 ((unsigned long)((y)[0] & 255)); } | |
31 | |
32 #define STORE64L(x, y) \ | |
33 { (y)[7] = (unsigned char)(((x)>>56)&255); (y)[6] = (unsigned char)(((x)>>48)&255); \ | |
34 (y)[5] = (unsigned char)(((x)>>40)&255); (y)[4] = (unsigned char)(((x)>>32)&255); \ | |
35 (y)[3] = (unsigned char)(((x)>>24)&255); (y)[2] = (unsigned char)(((x)>>16)&255); \ | |
36 (y)[1] = (unsigned char)(((x)>>8)&255); (y)[0] = (unsigned char)((x)&255); } | |
37 | |
38 #define LOAD64L(x, y) \ | |
39 { x = (((ulong64)((y)[7] & 255))<<56)|(((ulong64)((y)[6] & 255))<<48)| \ | |
40 (((ulong64)((y)[5] & 255))<<40)|(((ulong64)((y)[4] & 255))<<32)| \ | |
41 (((ulong64)((y)[3] & 255))<<24)|(((ulong64)((y)[2] & 255))<<16)| \ | |
42 (((ulong64)((y)[1] & 255))<<8)|(((ulong64)((y)[0] & 255))); } | |
43 | |
44 #define STORE32H(x, y) \ | |
45 { (y)[0] = (unsigned char)(((x)>>24)&255); (y)[1] = (unsigned char)(((x)>>16)&255); \ | |
46 (y)[2] = (unsigned char)(((x)>>8)&255); (y)[3] = (unsigned char)((x)&255); } | |
47 | |
48 #define LOAD32H(x, y) \ | |
49 { x = ((unsigned long)((y)[0] & 255)<<24) | \ | |
50 ((unsigned long)((y)[1] & 255)<<16) | \ | |
51 ((unsigned long)((y)[2] & 255)<<8) | \ | |
52 ((unsigned long)((y)[3] & 255)); } | |
53 | |
54 #define STORE64H(x, y) \ | |
55 { (y)[0] = (unsigned char)(((x)>>56)&255); (y)[1] = (unsigned char)(((x)>>48)&255); \ | |
56 (y)[2] = (unsigned char)(((x)>>40)&255); (y)[3] = (unsigned char)(((x)>>32)&255); \ | |
57 (y)[4] = (unsigned char)(((x)>>24)&255); (y)[5] = (unsigned char)(((x)>>16)&255); \ | |
58 (y)[6] = (unsigned char)(((x)>>8)&255); (y)[7] = (unsigned char)((x)&255); } | |
59 | |
60 #define LOAD64H(x, y) \ | |
61 { x = (((ulong64)((y)[0] & 255))<<56)|(((ulong64)((y)[1] & 255))<<48) | \ | |
62 (((ulong64)((y)[2] & 255))<<40)|(((ulong64)((y)[3] & 255))<<32) | \ | |
63 (((ulong64)((y)[4] & 255))<<24)|(((ulong64)((y)[5] & 255))<<16) | \ | |
64 (((ulong64)((y)[6] & 255))<<8)|(((ulong64)((y)[7] & 255))); } | |
65 | |
66 #endif /* ENDIAN_NEUTRAL */ | |
67 | |
68 #ifdef ENDIAN_LITTLE | |
69 | |
70 #if !defined(LTC_NO_BSWAP) && (defined(INTEL_CC) || (defined(__GNUC__) && (defined(__DJGPP__) || defined(__CYGWIN__) || defined(__MINGW32__) || defined(__i386__) || defined(__x86_64__)))) | |
71 | |
72 #define STORE32H(x, y) \ | |
73 asm __volatile__ ( \ | |
74 "bswapl %0 \n\t" \ | |
75 "movl %0,(%2)\n\t" \ | |
76 "bswapl %0 \n\t" \ | |
77 :"=r"(x):"0"(x), "r"(y)); | |
78 | |
79 #define LOAD32H(x, y) \ | |
80 asm __volatile__ ( \ | |
81 "movl (%2),%0\n\t" \ | |
82 "bswapl %0\n\t" \ | |
83 :"=r"(x): "0"(x), "r"(y)); | |
84 | |
85 #else | |
86 | |
87 #define STORE32H(x, y) \ | |
88 { (y)[0] = (unsigned char)(((x)>>24)&255); (y)[1] = (unsigned char)(((x)>>16)&255); \ | |
89 (y)[2] = (unsigned char)(((x)>>8)&255); (y)[3] = (unsigned char)((x)&255); } | |
90 | |
91 #define LOAD32H(x, y) \ | |
92 { x = ((unsigned long)((y)[0] & 255)<<24) | \ | |
93 ((unsigned long)((y)[1] & 255)<<16) | \ | |
94 ((unsigned long)((y)[2] & 255)<<8) | \ | |
95 ((unsigned long)((y)[3] & 255)); } | |
96 | |
97 #endif | |
98 | |
99 | |
100 /* x86_64 processor */ | |
101 #if !defined(LTC_NO_BSWAP) && (defined(__GNUC__) && defined(__x86_64__)) | |
102 | |
103 #define STORE64H(x, y) \ | |
104 asm __volatile__ ( \ | |
105 "bswapq %0 \n\t" \ | |
106 "movq %0,(%2)\n\t" \ | |
107 "bswapq %0 \n\t" \ | |
108 :"=r"(x):"0"(x), "r"(y):"0"); | |
109 | |
110 #define LOAD64H(x, y) \ | |
111 asm __volatile__ ( \ | |
112 "movq (%2),%0\n\t" \ | |
113 "bswapq %0\n\t" \ | |
114 :"=r"(x): "0"(x), "r"(y)); | |
115 | |
116 #else | |
117 | |
118 #define STORE64H(x, y) \ | |
119 { (y)[0] = (unsigned char)(((x)>>56)&255); (y)[1] = (unsigned char)(((x)>>48)&255); \ | |
120 (y)[2] = (unsigned char)(((x)>>40)&255); (y)[3] = (unsigned char)(((x)>>32)&255); \ | |
121 (y)[4] = (unsigned char)(((x)>>24)&255); (y)[5] = (unsigned char)(((x)>>16)&255); \ | |
122 (y)[6] = (unsigned char)(((x)>>8)&255); (y)[7] = (unsigned char)((x)&255); } | |
123 | |
124 #define LOAD64H(x, y) \ | |
125 { x = (((ulong64)((y)[0] & 255))<<56)|(((ulong64)((y)[1] & 255))<<48) | \ | |
126 (((ulong64)((y)[2] & 255))<<40)|(((ulong64)((y)[3] & 255))<<32) | \ | |
127 (((ulong64)((y)[4] & 255))<<24)|(((ulong64)((y)[5] & 255))<<16) | \ | |
128 (((ulong64)((y)[6] & 255))<<8)|(((ulong64)((y)[7] & 255))); } | |
129 | |
130 #endif | |
131 | |
132 #ifdef ENDIAN_32BITWORD | |
133 | |
134 #define STORE32L(x, y) \ | |
135 { unsigned long __t = (x); memcpy(y, &__t, 4); } | |
136 | |
137 #define LOAD32L(x, y) \ | |
138 memcpy(&(x), y, 4); | |
139 | |
140 #define STORE64L(x, y) \ | |
141 { (y)[7] = (unsigned char)(((x)>>56)&255); (y)[6] = (unsigned char)(((x)>>48)&255); \ | |
142 (y)[5] = (unsigned char)(((x)>>40)&255); (y)[4] = (unsigned char)(((x)>>32)&255); \ | |
143 (y)[3] = (unsigned char)(((x)>>24)&255); (y)[2] = (unsigned char)(((x)>>16)&255); \ | |
144 (y)[1] = (unsigned char)(((x)>>8)&255); (y)[0] = (unsigned char)((x)&255); } | |
145 | |
146 #define LOAD64L(x, y) \ | |
147 { x = (((ulong64)((y)[7] & 255))<<56)|(((ulong64)((y)[6] & 255))<<48)| \ | |
148 (((ulong64)((y)[5] & 255))<<40)|(((ulong64)((y)[4] & 255))<<32)| \ | |
149 (((ulong64)((y)[3] & 255))<<24)|(((ulong64)((y)[2] & 255))<<16)| \ | |
150 (((ulong64)((y)[1] & 255))<<8)|(((ulong64)((y)[0] & 255))); } | |
151 | |
152 #else /* 64-bit words then */ | |
153 | |
154 #define STORE32L(x, y) \ | |
155 { unsigned long __t = (x); memcpy(y, &__t, 4); } | |
156 | |
157 #define LOAD32L(x, y) \ | |
158 { memcpy(&(x), y, 4); x &= 0xFFFFFFFF; } | |
159 | |
160 #define STORE64L(x, y) \ | |
161 { ulong64 __t = (x); memcpy(y, &__t, 8); } | |
162 | |
163 #define LOAD64L(x, y) \ | |
164 { memcpy(&(x), y, 8); } | |
165 | |
166 #endif /* ENDIAN_64BITWORD */ | |
167 | |
168 #endif /* ENDIAN_LITTLE */ | |
169 | |
170 #ifdef ENDIAN_BIG | |
171 #define STORE32L(x, y) \ | |
172 { (y)[3] = (unsigned char)(((x)>>24)&255); (y)[2] = (unsigned char)(((x)>>16)&255); \ | |
173 (y)[1] = (unsigned char)(((x)>>8)&255); (y)[0] = (unsigned char)((x)&255); } | |
174 | |
175 #define LOAD32L(x, y) \ | |
176 { x = ((unsigned long)((y)[3] & 255)<<24) | \ | |
177 ((unsigned long)((y)[2] & 255)<<16) | \ | |
178 ((unsigned long)((y)[1] & 255)<<8) | \ | |
179 ((unsigned long)((y)[0] & 255)); } | |
180 | |
181 #define STORE64L(x, y) \ | |
182 { (y)[7] = (unsigned char)(((x)>>56)&255); (y)[6] = (unsigned char)(((x)>>48)&255); \ | |
183 (y)[5] = (unsigned char)(((x)>>40)&255); (y)[4] = (unsigned char)(((x)>>32)&255); \ | |
184 (y)[3] = (unsigned char)(((x)>>24)&255); (y)[2] = (unsigned char)(((x)>>16)&255); \ | |
185 (y)[1] = (unsigned char)(((x)>>8)&255); (y)[0] = (unsigned char)((x)&255); } | |
186 | |
187 #define LOAD64L(x, y) \ | |
188 { x = (((ulong64)((y)[7] & 255))<<56)|(((ulong64)((y)[6] & 255))<<48) | \ | |
189 (((ulong64)((y)[5] & 255))<<40)|(((ulong64)((y)[4] & 255))<<32) | \ | |
190 (((ulong64)((y)[3] & 255))<<24)|(((ulong64)((y)[2] & 255))<<16) | \ | |
191 (((ulong64)((y)[1] & 255))<<8)|(((ulong64)((y)[0] & 255))); } | |
192 | |
193 #ifdef ENDIAN_32BITWORD | |
194 | |
195 #define STORE32H(x, y) \ | |
196 { unsigned long __t = (x); memcpy(y, &__t, 4); } | |
197 | |
198 #define LOAD32H(x, y) \ | |
199 memcpy(&(x), y, 4); | |
200 | |
201 #define STORE64H(x, y) \ | |
202 { (y)[0] = (unsigned char)(((x)>>56)&255); (y)[1] = (unsigned char)(((x)>>48)&255); \ | |
203 (y)[2] = (unsigned char)(((x)>>40)&255); (y)[3] = (unsigned char)(((x)>>32)&255); \ | |
204 (y)[4] = (unsigned char)(((x)>>24)&255); (y)[5] = (unsigned char)(((x)>>16)&255); \ | |
205 (y)[6] = (unsigned char)(((x)>>8)&255); (y)[7] = (unsigned char)((x)&255); } | |
206 | |
207 #define LOAD64H(x, y) \ | |
208 { x = (((ulong64)((y)[0] & 255))<<56)|(((ulong64)((y)[1] & 255))<<48)| \ | |
209 (((ulong64)((y)[2] & 255))<<40)|(((ulong64)((y)[3] & 255))<<32)| \ | |
210 (((ulong64)((y)[4] & 255))<<24)|(((ulong64)((y)[5] & 255))<<16)| \ | |
211 (((ulong64)((y)[6] & 255))<<8)| (((ulong64)((y)[7] & 255))); } | |
212 | |
213 #else /* 64-bit words then */ | |
214 | |
215 #define STORE32H(x, y) \ | |
216 { unsigned long __t = (x); memcpy(y, &__t, 4); } | |
217 | |
218 #define LOAD32H(x, y) \ | |
219 { memcpy(&(x), y, 4); x &= 0xFFFFFFFF; } | |
220 | |
221 #define STORE64H(x, y) \ | |
222 { ulong64 __t = (x); memcpy(y, &__t, 8); } | |
223 | |
224 #define LOAD64H(x, y) \ | |
225 { memcpy(&(x), y, 8); } | |
226 | |
227 #endif /* ENDIAN_64BITWORD */ | |
228 #endif /* ENDIAN_BIG */ | |
229 | |
230 #define BSWAP(x) ( ((x>>24)&0x000000FFUL) | ((x<<24)&0xFF000000UL) | \ | |
231 ((x>>8)&0x0000FF00UL) | ((x<<8)&0x00FF0000UL) ) | |
232 | |
233 | |
234 /* 32-bit Rotates */ | |
235 #if defined(_MSC_VER) | |
236 | |
237 /* instrinsic rotate */ | |
238 #include <stdlib.h> | |
239 #pragma intrinsic(_lrotr,_lrotl) | |
240 #define ROR(x,n) _lrotr(x,n) | |
241 #define ROL(x,n) _lrotl(x,n) | |
242 #define RORc(x,n) _lrotr(x,n) | |
243 #define ROLc(x,n) _lrotl(x,n) | |
244 | |
245 #elif defined(__GNUC__) && (defined(__i386__) || defined(__x86_64__)) && !defined(INTEL_CC) && !defined(LTC_NO_ASM) | |
246 | |
247 static inline unsigned ROL(unsigned word, int i) | |
248 { | |
249 asm ("roll %%cl,%0" | |
250 :"=r" (word) | |
251 :"0" (word),"c" (i)); | |
252 return word; | |
253 } | |
254 | |
255 static inline unsigned ROR(unsigned word, int i) | |
256 { | |
257 asm ("rorl %%cl,%0" | |
258 :"=r" (word) | |
259 :"0" (word),"c" (i)); | |
260 return word; | |
261 } | |
262 | |
263 #ifndef LTC_NO_ROLC | |
264 | |
265 static inline unsigned ROLc(unsigned word, const int i) | |
266 { | |
267 asm ("roll %2,%0" | |
268 :"=r" (word) | |
269 :"0" (word),"I" (i)); | |
270 return word; | |
271 } | |
272 | |
273 static inline unsigned RORc(unsigned word, const int i) | |
274 { | |
275 asm ("rorl %2,%0" | |
276 :"=r" (word) | |
277 :"0" (word),"I" (i)); | |
278 return word; | |
279 } | |
280 | |
281 #else | |
282 | |
283 #define ROLc ROL | |
284 #define RORc ROR | |
285 | |
286 #endif | |
287 | |
288 #else | |
289 | |
290 /* rotates the hard way */ | |
291 #define ROL(x, y) ( (((unsigned long)(x)<<(unsigned long)((y)&31)) | (((unsigned long)(x)&0xFFFFFFFFUL)>>(unsigned long)(32-((y)&31)))) & 0xFFFFFFFFUL) | |
292 #define ROR(x, y) ( ((((unsigned long)(x)&0xFFFFFFFFUL)>>(unsigned long)((y)&31)) | ((unsigned long)(x)<<(unsigned long)(32-((y)&31)))) & 0xFFFFFFFFUL) | |
293 #define ROLc(x, y) ( (((unsigned long)(x)<<(unsigned long)((y)&31)) | (((unsigned long)(x)&0xFFFFFFFFUL)>>(unsigned long)(32-((y)&31)))) & 0xFFFFFFFFUL) | |
294 #define RORc(x, y) ( ((((unsigned long)(x)&0xFFFFFFFFUL)>>(unsigned long)((y)&31)) | ((unsigned long)(x)<<(unsigned long)(32-((y)&31)))) & 0xFFFFFFFFUL) | |
295 | |
296 #endif | |
297 | |
298 | |
299 /* 64-bit Rotates */ | |
300 #if defined(__GNUC__) && defined(__x86_64__) && !defined(LTC_NO_ASM) | |
301 | |
302 static inline unsigned long ROL64(unsigned long word, int i) | |
303 { | |
304 asm("rolq %%cl,%0" | |
305 :"=r" (word) | |
306 :"0" (word),"c" (i)); | |
307 return word; | |
308 } | |
309 | |
310 static inline unsigned long ROR64(unsigned long word, int i) | |
311 { | |
312 asm("rorq %%cl,%0" | |
313 :"=r" (word) | |
314 :"0" (word),"c" (i)); | |
315 return word; | |
316 } | |
317 | |
318 #ifndef LTC_NO_ROLC | |
319 | |
320 static inline unsigned long ROL64c(unsigned long word, const int i) | |
321 { | |
322 asm("rolq %2,%0" | |
323 :"=r" (word) | |
324 :"0" (word),"J" (i)); | |
325 return word; | |
326 } | |
327 | |
328 static inline unsigned long ROR64c(unsigned long word, const int i) | |
329 { | |
330 asm("rorq %2,%0" | |
331 :"=r" (word) | |
332 :"0" (word),"J" (i)); | |
333 return word; | |
334 } | |
335 | |
336 #else /* LTC_NO_ROLC */ | |
337 | |
338 #define ROL64c ROL64 | |
339 #define ROR64c ROR64 | |
340 | |
341 #endif | |
342 | |
343 #else /* Not x86_64 */ | |
344 | |
345 #define ROL64(x, y) \ | |
346 ( (((x)<<((ulong64)(y)&63)) | \ | |
347 (((x)&CONST64(0xFFFFFFFFFFFFFFFF))>>((ulong64)64-((y)&63)))) & CONST64(0xFFFFFFFFFFFFFFFF)) | |
348 | |
349 #define ROR64(x, y) \ | |
350 ( ((((x)&CONST64(0xFFFFFFFFFFFFFFFF))>>((ulong64)(y)&CONST64(63))) | \ | |
351 ((x)<<((ulong64)(64-((y)&CONST64(63)))))) & CONST64(0xFFFFFFFFFFFFFFFF)) | |
352 | |
353 #define ROL64c(x, y) \ | |
354 ( (((x)<<((ulong64)(y)&63)) | \ | |
355 (((x)&CONST64(0xFFFFFFFFFFFFFFFF))>>((ulong64)64-((y)&63)))) & CONST64(0xFFFFFFFFFFFFFFFF)) | |
356 | |
357 #define ROR64c(x, y) \ | |
358 ( ((((x)&CONST64(0xFFFFFFFFFFFFFFFF))>>((ulong64)(y)&CONST64(63))) | \ | |
359 ((x)<<((ulong64)(64-((y)&CONST64(63)))))) & CONST64(0xFFFFFFFFFFFFFFFF)) | |
360 | |
361 #endif | |
362 | |
363 #undef MAX | |
364 #undef MIN | |
365 #define MAX(x, y) ( ((x)>(y))?(x):(y) ) | |
366 #define MIN(x, y) ( ((x)<(y))?(x):(y) ) | |
367 | |
368 /* extract a byte portably */ | |
369 #ifdef _MSC_VER | |
370 #define byte(x, n) ((unsigned char)((x) >> (8 * (n)))) | |
371 #else | |
372 #define byte(x, n) (((x) >> (8 * (n))) & 255) | |
373 #endif |