install.sh and uninstall.sh
[rofl0r-kripto.git] / lib / block / threefish1024.c
blob2cc2f454c9f948f28b21aa0f4d8aa6e0d9c63c9a
1 /*
2 * Written in 2013 by Gregor Pintar <grpintar@gmail.com>
4 * To the extent possible under law, the author(s) have dedicated
5 * all copyright and related and neighboring rights to this software
6 * to the public domain worldwide.
7 *
8 * This software is distributed without any warranty.
10 * You should have received a copy of the CC0 Public Domain Dedication.
11 * If not, see <http://creativecommons.org/publicdomain/zero/1.0/>.
14 #include <stdint.h>
15 #include <stdlib.h>
16 #include <string.h>
17 #include <limits.h>
19 #include <kripto/cast.h>
20 #include <kripto/loadstore.h>
21 #include <kripto/rotate.h>
22 #include <kripto/memwipe.h>
23 #include <kripto/block.h>
24 #include <kripto/desc/block.h>
25 #include <kripto/object/block.h>
27 #include <kripto/block/threefish1024.h>
29 #define C240 0x1BD11BDAA9FC1A22
31 struct kripto_block
33 struct kripto_block_object obj;
34 unsigned int rounds;
35 uint64_t t[3];
36 uint64_t k[17];
39 static void threefish1024_tweak
41 kripto_block *s,
42 const void *tweak,
43 unsigned int len
46 s->t[0] = s->t[1] = 0;
48 while(--len != UINT_MAX)
49 s->t[len >> 3] = (s->t[len >> 3] << 8) | CU8(tweak)[len];
51 s->t[2] = s->t[0] ^ s->t[1];
54 static void threefish1024_encrypt
56 const kripto_block *s,
57 const void *pt,
58 void *ct
61 uint64_t x00 = LOAD64L(CU8(pt)) + s->k[0];
62 uint64_t x01 = LOAD64L(CU8(pt) + 8) + s->k[1];
63 uint64_t x02 = LOAD64L(CU8(pt) + 16) + s->k[2];
64 uint64_t x03 = LOAD64L(CU8(pt) + 24) + s->k[3];
65 uint64_t x04 = LOAD64L(CU8(pt) + 32) + s->k[4];
66 uint64_t x05 = LOAD64L(CU8(pt) + 40) + s->k[5];
67 uint64_t x06 = LOAD64L(CU8(pt) + 48) + s->k[6];
68 uint64_t x07 = LOAD64L(CU8(pt) + 56) + s->k[7];
69 uint64_t x08 = LOAD64L(CU8(pt) + 64) + s->k[8];
70 uint64_t x09 = LOAD64L(CU8(pt) + 72) + s->k[9];
71 uint64_t x10 = LOAD64L(CU8(pt) + 80) + s->k[10];
72 uint64_t x11 = LOAD64L(CU8(pt) + 88) + s->k[11];
73 uint64_t x12 = LOAD64L(CU8(pt) + 96) + s->k[12];
74 uint64_t x13 = LOAD64L(CU8(pt) + 104) + s->k[13] + s->t[0];
75 uint64_t x14 = LOAD64L(CU8(pt) + 112) + s->k[14] + s->t[1];
76 uint64_t x15 = LOAD64L(CU8(pt) + 120) + s->k[15];
77 unsigned int r = 1;
79 while(r <= s->rounds >> 2)
81 x00 += x01; x01 = ROL64_24(x01); x01 ^= x00;
82 x02 += x03; x03 = ROL64_13(x03); x03 ^= x02;
83 x04 += x05; x05 = ROL64_08(x05); x05 ^= x04;
84 x06 += x07; x07 = ROL64_47(x07); x07 ^= x06;
85 x08 += x09; x09 = ROL64_08(x09); x09 ^= x08;
86 x10 += x11; x11 = ROL64_17(x11); x11 ^= x10;
87 x12 += x13; x13 = ROL64_22(x13); x13 ^= x12;
88 x14 += x15; x15 = ROL64_37(x15); x15 ^= x14;
90 x00 += x09; x09 = ROL64_38(x09); x09 ^= x00;
91 x02 += x13; x13 = ROL64_19(x13); x13 ^= x02;
92 x06 += x11; x11 = ROL64_10(x11); x11 ^= x06;
93 x04 += x15; x15 = ROL64_55(x15); x15 ^= x04;
94 x10 += x07; x07 = ROL64_49(x07); x07 ^= x10;
95 x12 += x03; x03 = ROL64_18(x03); x03 ^= x12;
96 x14 += x05; x05 = ROL64_23(x05); x05 ^= x14;
97 x08 += x01; x01 = ROL64_52(x01); x01 ^= x08;
99 x00 += x07; x07 = ROL64_33(x07); x07 ^= x00;
100 x02 += x05; x05 = ROL64_04(x05); x05 ^= x02;
101 x04 += x03; x03 = ROL64_51(x03); x03 ^= x04;
102 x06 += x01; x01 = ROL64_13(x01); x01 ^= x06;
103 x12 += x15; x15 = ROL64_34(x15); x15 ^= x12;
104 x14 += x13; x13 = ROL64_41(x13); x13 ^= x14;
105 x08 += x11; x11 = ROL64_59(x11); x11 ^= x08;
106 x10 += x09; x09 = ROL64_17(x09); x09 ^= x10;
108 x00 += x15; x15 = ROL64_05(x15); x15 ^= x00;
109 x02 += x11; x11 = ROL64_20(x11); x11 ^= x02;
110 x06 += x13; x13 = ROL64_48(x13); x13 ^= x06;
111 x04 += x09; x09 = ROL64_41(x09); x09 ^= x04;
112 x14 += x01; x01 = ROL64_47(x01); x01 ^= x14;
113 x08 += x05; x05 = ROL64_28(x05); x05 ^= x08;
114 x10 += x03; x03 = ROL64_16(x03); x03 ^= x10;
115 x12 += x07; x07 = ROL64_25(x07); x07 ^= x12;
117 x00 += s->k[r % 17];
118 x01 += s->k[(r + 1) % 17];
119 x02 += s->k[(r + 2) % 17];
120 x03 += s->k[(r + 3) % 17];
121 x04 += s->k[(r + 4) % 17];
122 x05 += s->k[(r + 5) % 17];
123 x06 += s->k[(r + 6) % 17];
124 x07 += s->k[(r + 7) % 17];
125 x08 += s->k[(r + 8) % 17];
126 x09 += s->k[(r + 9) % 17];
127 x10 += s->k[(r + 10) % 17];
128 x11 += s->k[(r + 11) % 17];
129 x12 += s->k[(r + 12) % 17];
130 x13 += s->k[(r + 13) % 17] + s->t[r % 3];
131 x14 += s->k[(r + 14) % 17] + s->t[(r + 1) % 3];
132 x15 += s->k[(r + 15) % 17] + r;
133 r++;
135 x00 += x01; x01 = ROL64_41(x01); x01 ^= x00;
136 x02 += x03; x03 = ROL64_09(x03); x03 ^= x02;
137 x04 += x05; x05 = ROL64_37(x05); x05 ^= x04;
138 x06 += x07; x07 = ROL64_31(x07); x07 ^= x06;
139 x08 += x09; x09 = ROL64_12(x09); x09 ^= x08;
140 x10 += x11; x11 = ROL64_47(x11); x11 ^= x10;
141 x12 += x13; x13 = ROL64_44(x13); x13 ^= x12;
142 x14 += x15; x15 = ROL64_30(x15); x15 ^= x14;
144 x00 += x09; x09 = ROL64_16(x09); x09 ^= x00;
145 x02 += x13; x13 = ROL64_34(x13); x13 ^= x02;
146 x06 += x11; x11 = ROL64_56(x11); x11 ^= x06;
147 x04 += x15; x15 = ROL64_51(x15); x15 ^= x04;
148 x10 += x07; x07 = ROL64_04(x07); x07 ^= x10;
149 x12 += x03; x03 = ROL64_53(x03); x03 ^= x12;
150 x14 += x05; x05 = ROL64_42(x05); x05 ^= x14;
151 x08 += x01; x01 = ROL64_41(x01); x01 ^= x08;
153 x00 += x07; x07 = ROL64_31(x07); x07 ^= x00;
154 x02 += x05; x05 = ROL64_44(x05); x05 ^= x02;
155 x04 += x03; x03 = ROL64_47(x03); x03 ^= x04;
156 x06 += x01; x01 = ROL64_46(x01); x01 ^= x06;
157 x12 += x15; x15 = ROL64_19(x15); x15 ^= x12;
158 x14 += x13; x13 = ROL64_42(x13); x13 ^= x14;
159 x08 += x11; x11 = ROL64_44(x11); x11 ^= x08;
160 x10 += x09; x09 = ROL64_25(x09); x09 ^= x10;
162 x00 += x15; x15 = ROL64_09(x15); x15 ^= x00;
163 x02 += x11; x11 = ROL64_48(x11); x11 ^= x02;
164 x06 += x13; x13 = ROL64_35(x13); x13 ^= x06;
165 x04 += x09; x09 = ROL64_52(x09); x09 ^= x04;
166 x14 += x01; x01 = ROL64_23(x01); x01 ^= x14;
167 x08 += x05; x05 = ROL64_31(x05); x05 ^= x08;
168 x10 += x03; x03 = ROL64_37(x03); x03 ^= x10;
169 x12 += x07; x07 = ROL64_20(x07); x07 ^= x12;
171 x00 += s->k[r % 17];
172 x01 += s->k[(r + 1) % 17];
173 x02 += s->k[(r + 2) % 17];
174 x03 += s->k[(r + 3) % 17];
175 x04 += s->k[(r + 4) % 17];
176 x05 += s->k[(r + 5) % 17];
177 x06 += s->k[(r + 6) % 17];
178 x07 += s->k[(r + 7) % 17];
179 x08 += s->k[(r + 8) % 17];
180 x09 += s->k[(r + 9) % 17];
181 x10 += s->k[(r + 10) % 17];
182 x11 += s->k[(r + 11) % 17];
183 x12 += s->k[(r + 12) % 17];
184 x13 += s->k[(r + 13) % 17] + s->t[r % 3];
185 x14 += s->k[(r + 14) % 17] + s->t[(r + 1) % 3];
186 x15 += s->k[(r + 15) % 17] + r;
187 r++;
190 STORE64L(x00, U8(ct));
191 STORE64L(x01, U8(ct) + 8);
192 STORE64L(x02, U8(ct) + 16);
193 STORE64L(x03, U8(ct) + 24);
194 STORE64L(x04, U8(ct) + 32);
195 STORE64L(x05, U8(ct) + 40);
196 STORE64L(x06, U8(ct) + 48);
197 STORE64L(x07, U8(ct) + 56);
198 STORE64L(x08, U8(ct) + 64);
199 STORE64L(x09, U8(ct) + 72);
200 STORE64L(x10, U8(ct) + 80);
201 STORE64L(x11, U8(ct) + 88);
202 STORE64L(x12, U8(ct) + 96);
203 STORE64L(x13, U8(ct) + 104);
204 STORE64L(x14, U8(ct) + 112);
205 STORE64L(x15, U8(ct) + 120);
208 static void threefish1024_decrypt
210 const kripto_block *s,
211 const void *ct,
212 void *pt
215 uint64_t x00 = LOAD64L(CU8(ct));
216 uint64_t x01 = LOAD64L(CU8(ct) + 8);
217 uint64_t x02 = LOAD64L(CU8(ct) + 16);
218 uint64_t x03 = LOAD64L(CU8(ct) + 24);
219 uint64_t x04 = LOAD64L(CU8(ct) + 32);
220 uint64_t x05 = LOAD64L(CU8(ct) + 40);
221 uint64_t x06 = LOAD64L(CU8(ct) + 48);
222 uint64_t x07 = LOAD64L(CU8(ct) + 56);
223 uint64_t x08 = LOAD64L(CU8(ct) + 64);
224 uint64_t x09 = LOAD64L(CU8(ct) + 72);
225 uint64_t x10 = LOAD64L(CU8(ct) + 80);
226 uint64_t x11 = LOAD64L(CU8(ct) + 88);
227 uint64_t x12 = LOAD64L(CU8(ct) + 96);
228 uint64_t x13 = LOAD64L(CU8(ct) + 104);
229 uint64_t x14 = LOAD64L(CU8(ct) + 112);
230 uint64_t x15 = LOAD64L(CU8(ct) + 120);
231 unsigned int r = s->rounds >> 2;
233 while(r > 1)
235 x00 -= s->k[r % 17];
236 x01 -= s->k[(r + 1) % 17];
237 x02 -= s->k[(r + 2) % 17];
238 x03 -= s->k[(r + 3) % 17];
239 x04 -= s->k[(r + 4) % 17];
240 x05 -= s->k[(r + 5) % 17];
241 x06 -= s->k[(r + 6) % 17];
242 x07 -= s->k[(r + 7) % 17];
243 x08 -= s->k[(r + 8) % 17];
244 x09 -= s->k[(r + 9) % 17];
245 x10 -= s->k[(r + 10) % 17];
246 x11 -= s->k[(r + 11) % 17];
247 x12 -= s->k[(r + 12) % 17];
248 x13 -= s->k[(r + 13) % 17] + s->t[r % 3];
249 x14 -= s->k[(r + 14) % 17] + s->t[(r + 1) % 3];
250 x15 -= s->k[(r + 15) % 17] + r;
251 r--;
253 x07 = ROR64_20(x07 ^ x12); x12 -= x07;
254 x03 = ROR64_37(x03 ^ x10); x10 -= x03;
255 x05 = ROR64_31(x05 ^ x08); x08 -= x05;
256 x01 = ROR64_23(x01 ^ x14); x14 -= x01;
257 x09 = ROR64_52(x09 ^ x04); x04 -= x09;
258 x13 = ROR64_35(x13 ^ x06); x06 -= x13;
259 x11 = ROR64_48(x11 ^ x02); x02 -= x11;
260 x15 = ROR64_09(x15 ^ x00); x00 -= x15;
262 x09 = ROR64_25(x09 ^ x10); x10 -= x09;
263 x11 = ROR64_44(x11 ^ x08); x08 -= x11;
264 x13 = ROR64_42(x13 ^ x14); x14 -= x13;
265 x15 = ROR64_19(x15 ^ x12); x12 -= x15;
266 x01 = ROR64_46(x01 ^ x06); x06 -= x01;
267 x03 = ROR64_47(x03 ^ x04); x04 -= x03;
268 x05 = ROR64_44(x05 ^ x02); x02 -= x05;
269 x07 = ROR64_31(x07 ^ x00); x00 -= x07;
271 x01 = ROR64_41(x01 ^ x08); x08 -= x01;
272 x05 = ROR64_42(x05 ^ x14); x14 -= x05;
273 x03 = ROR64_53(x03 ^ x12); x12 -= x03;
274 x07 = ROR64_04(x07 ^ x10); x10 -= x07;
275 x15 = ROR64_51(x15 ^ x04); x04 -= x15;
276 x11 = ROR64_56(x11 ^ x06); x06 -= x11;
277 x13 = ROR64_34(x13 ^ x02); x02 -= x13;
278 x09 = ROR64_16(x09 ^ x00); x00 -= x09;
280 x15 = ROR64_30(x15 ^ x14); x14 -= x15;
281 x13 = ROR64_44(x13 ^ x12); x12 -= x13;
282 x11 = ROR64_47(x11 ^ x10); x10 -= x11;
283 x09 = ROR64_12(x09 ^ x08); x08 -= x09;
284 x07 = ROR64_31(x07 ^ x06); x06 -= x07;
285 x05 = ROR64_37(x05 ^ x04); x04 -= x05;
286 x03 = ROR64_09(x03 ^ x02); x02 -= x03;
287 x01 = ROR64_41(x01 ^ x00); x00 -= x01;
289 x00 -= s->k[r % 17];
290 x01 -= s->k[(r + 1) % 17];
291 x02 -= s->k[(r + 2) % 17];
292 x03 -= s->k[(r + 3) % 17];
293 x04 -= s->k[(r + 4) % 17];
294 x05 -= s->k[(r + 5) % 17];
295 x06 -= s->k[(r + 6) % 17];
296 x07 -= s->k[(r + 7) % 17];
297 x08 -= s->k[(r + 8) % 17];
298 x09 -= s->k[(r + 9) % 17];
299 x10 -= s->k[(r + 10) % 17];
300 x11 -= s->k[(r + 11) % 17];
301 x12 -= s->k[(r + 12) % 17];
302 x13 -= s->k[(r + 13) % 17] + s->t[r % 3];
303 x14 -= s->k[(r + 14) % 17] + s->t[(r + 1) % 3];
304 x15 -= s->k[(r + 15) % 17] + r;
305 r--;
307 x07 = ROR64_25(x07 ^ x12); x12 -= x07;
308 x03 = ROR64_16(x03 ^ x10); x10 -= x03;
309 x05 = ROR64_28(x05 ^ x08); x08 -= x05;
310 x01 = ROR64_47(x01 ^ x14); x14 -= x01;
311 x09 = ROR64_41(x09 ^ x04); x04 -= x09;
312 x13 = ROR64_48(x13 ^ x06); x06 -= x13;
313 x11 = ROR64_20(x11 ^ x02); x02 -= x11;
314 x15 = ROR64_05(x15 ^ x00); x00 -= x15;
316 x09 = ROR64_17(x09 ^ x10); x10 -= x09;
317 x11 = ROR64_59(x11 ^ x08); x08 -= x11;
318 x13 = ROR64_41(x13 ^ x14); x14 -= x13;
319 x15 = ROR64_34(x15 ^ x12); x12 -= x15;
320 x01 = ROR64_13(x01 ^ x06); x06 -= x01;
321 x03 = ROR64_51(x03 ^ x04); x04 -= x03;
322 x05 = ROR64_04(x05 ^ x02); x02 -= x05;
323 x07 = ROR64_33(x07 ^ x00); x00 -= x07;
325 x01 = ROR64_52(x01 ^ x08); x08 -= x01;
326 x05 = ROR64_23(x05 ^ x14); x14 -= x05;
327 x03 = ROR64_18(x03 ^ x12); x12 -= x03;
328 x07 = ROR64_49(x07 ^ x10); x10 -= x07;
329 x15 = ROR64_55(x15 ^ x04); x04 -= x15;
330 x11 = ROR64_10(x11 ^ x06); x06 -= x11;
331 x13 = ROR64_19(x13 ^ x02); x02 -= x13;
332 x09 = ROR64_38(x09 ^ x00); x00 -= x09;
334 x15 = ROR64_37(x15 ^ x14); x14 -= x15;
335 x13 = ROR64_22(x13 ^ x12); x12 -= x13;
336 x11 = ROR64_17(x11 ^ x10); x10 -= x11;
337 x09 = ROR64_08(x09 ^ x08); x08 -= x09;
338 x07 = ROR64_47(x07 ^ x06); x06 -= x07;
339 x05 = ROR64_08(x05 ^ x04); x04 -= x05;
340 x03 = ROR64_13(x03 ^ x02); x02 -= x03;
341 x01 = ROR64_24(x01 ^ x00); x00 -= x01;
344 x00 -= s->k[0];
345 x01 -= s->k[1];
346 x02 -= s->k[2];
347 x03 -= s->k[3];
348 x04 -= s->k[4];
349 x05 -= s->k[5];
350 x06 -= s->k[6];
351 x07 -= s->k[7];
352 x08 -= s->k[8];
353 x09 -= s->k[9];
354 x10 -= s->k[10];
355 x11 -= s->k[11];
356 x12 -= s->k[12];
357 x13 -= s->k[13] + s->t[0];
358 x14 -= s->k[14] + s->t[1];
359 x15 -= s->k[15];
361 STORE64L(x00, U8(pt));
362 STORE64L(x01, U8(pt) + 8);
363 STORE64L(x02, U8(pt) + 16);
364 STORE64L(x03, U8(pt) + 24);
365 STORE64L(x04, U8(pt) + 32);
366 STORE64L(x05, U8(pt) + 40);
367 STORE64L(x06, U8(pt) + 48);
368 STORE64L(x07, U8(pt) + 56);
369 STORE64L(x08, U8(pt) + 64);
370 STORE64L(x09, U8(pt) + 72);
371 STORE64L(x10, U8(pt) + 80);
372 STORE64L(x11, U8(pt) + 88);
373 STORE64L(x12, U8(pt) + 96);
374 STORE64L(x13, U8(pt) + 104);
375 STORE64L(x14, U8(pt) + 112);
376 STORE64L(x15, U8(pt) + 120);
379 static kripto_block *threefish1024_recreate
381 kripto_block *s,
382 unsigned int r,
383 const void *key,
384 unsigned int key_len
387 unsigned int i;
389 s->rounds = r;
390 if(!s->rounds) s->rounds = 80;
392 memset(s->k, 0, 128);
394 for(i = key_len - 1; i != UINT_MAX; i--)
395 s->k[i >> 3] = (s->k[i >> 3] << 8) | CU8(key)[i];
397 s->k[16] = s->k[0] ^ s->k[1] ^ s->k[2] ^ s->k[3]
398 ^ s->k[4] ^ s->k[5] ^ s->k[6] ^ s->k[7]
399 ^ s->k[8] ^ s->k[9] ^ s->k[10] ^ s->k[11]
400 ^ s->k[12] ^ s->k[13] ^ s->k[14] ^ s->k[15] ^ C240;
402 s->t[0] = s->t[1] = s->t[2] = 0;
404 return s;
407 static kripto_block *threefish1024_create
409 unsigned int r,
410 const void *key,
411 unsigned int key_len
414 kripto_block *s;
416 s = malloc(sizeof(kripto_block));
417 if(!s) return 0;
419 s->obj.desc = kripto_block_threefish1024;
421 (void)threefish1024_recreate(s, r, key, key_len);
423 return s;
426 static void threefish1024_destroy(kripto_block *s)
428 kripto_memwipe(s, sizeof(kripto_block));
429 free(s);
432 static const kripto_block_desc threefish1024 =
434 &threefish1024_create,
435 &threefish1024_recreate,
436 &threefish1024_tweak,
437 &threefish1024_encrypt,
438 &threefish1024_decrypt,
439 &threefish1024_destroy,
440 128, /* block size */
441 128 /* max key */
444 const kripto_block_desc *const kripto_block_threefish1024 = &threefish1024;