3 * The yarrow pseudo-randomness generator.
6 /* nettle, low-level cryptographics library
8 * Copyright (C) 2001 Niels Möller
10 * The nettle library is free software; you can redistribute it and/or modify
11 * it under the terms of the GNU Lesser General Public License as published by
12 * the Free Software Foundation; either version 2.1 of the License, or (at your
13 * option) any later version.
15 * The nettle library is distributed in the hope that it will be useful, but
16 * WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
17 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
18 * License for more details.
20 * You should have received a copy of the GNU Lesser General Public License
21 * along with the nettle library; see the file COPYING.LIB. If not, write to
22 * the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
39 #define YARROW_DEBUG 0
48 /* An upper limit on the entropy (in bits) in one octet of sample
50 #define YARROW_MULTIPLIER 4
52 /* Entropy threshold for reseeding from the fast pool */
53 #define YARROW_FAST_THRESHOLD 100
55 /* Entropy threshold for reseeding from the fast pool */
56 #define YARROW_SLOW_THRESHOLD 160
58 /* Number of sources that must exceed the threshold for slow reseed */
59 #define YARROW_SLOW_K 2
61 /* The number of iterations when reseeding, P_t in the yarrow paper.
62 * Should be chosen so that reseeding takes on the order of 0.1-1
64 #define YARROW_RESEED_ITERATIONS 1500
66 /* Entropy estimates sticks to this value, it is treated as infinity
67 * in calculations. It should fit comfortably in an uint32_t, to avoid
69 #define YARROW_MAX_ENTROPY 0x100000
71 /* Forward declarations */
73 yarrow_gate(struct yarrow256_ctx
*ctx
);
76 yarrow256_init(struct yarrow256_ctx
*ctx
,
78 struct yarrow_source
*s
)
82 sha256_init(&ctx
->pools
[0]);
83 sha256_init(&ctx
->pools
[1]);
87 /* Not strictly necessary, but it makes it easier to see if the
89 memset(ctx
->counter
, 0, sizeof(ctx
->counter
));
96 ctx
->sources
[i
].estimate
[YARROW_FAST
] = 0;
97 ctx
->sources
[i
].estimate
[YARROW_SLOW
] = 0;
98 ctx
->sources
[i
].next
= YARROW_FAST
;
103 yarrow256_seed(struct yarrow256_ctx
*ctx
,
105 const uint8_t *seed_file
)
109 sha256_update(&ctx
->pools
[YARROW_FAST
], length
, seed_file
);
110 yarrow256_fast_reseed(ctx
);
113 /* FIXME: Generalize so that it generates a few more blocks at a
116 yarrow_generate_block(struct yarrow256_ctx
*ctx
,
121 aes_encrypt(&ctx
->key
, sizeof(ctx
->counter
), block
, ctx
->counter
);
123 /* Increment counter, treating it as a big-endian number. This is
124 * machine independent, and follows appendix B of the NIST
125 * specification of cipher modes of operation.
127 * We could keep a representation of the counter as 4 32-bit values,
128 * and write entire words (in big-endian byteorder) into the counter
129 * block, whenever they change. */
130 for (i
= sizeof(ctx
->counter
); i
--; )
132 if (++ctx
->counter
[i
])
138 yarrow_iterate(uint8_t *digest
)
140 uint8_t v0
[SHA256_DIGEST_SIZE
];
143 memcpy(v0
, digest
, SHA256_DIGEST_SIZE
);
145 /* When hashed inside the loop, i should run from 1 to
146 * YARROW_RESEED_ITERATIONS */
147 for (i
= 0; ++i
< YARROW_RESEED_ITERATIONS
; )
150 struct sha256_ctx hash
;
154 /* Hash v_i | v_0 | i */
155 WRITE_UINT32(count
, i
);
156 sha256_update(&hash
, SHA256_DIGEST_SIZE
, digest
);
157 sha256_update(&hash
, sizeof(v0
), v0
);
158 sha256_update(&hash
, sizeof(count
), count
);
160 sha256_digest(&hash
, SHA256_DIGEST_SIZE
, digest
);
164 /* NOTE: The SHA-256 digest size equals the AES key size, so we need
165 * no "size adaptor". */
168 yarrow256_fast_reseed(struct yarrow256_ctx
*ctx
)
170 uint8_t digest
[SHA256_DIGEST_SIZE
];
174 fprintf(stderr
, "yarrow256_fast_reseed\n");
177 /* We feed two block of output using the current key into the pool
178 * before emptying it. */
181 uint8_t blocks
[AES_BLOCK_SIZE
* 2];
183 yarrow_generate_block(ctx
, blocks
);
184 yarrow_generate_block(ctx
, blocks
+ AES_BLOCK_SIZE
);
185 sha256_update(&ctx
->pools
[YARROW_FAST
], sizeof(blocks
), blocks
);
188 sha256_digest(&ctx
->pools
[YARROW_FAST
], sizeof(digest
), digest
);
191 yarrow_iterate(digest
);
193 aes_set_encrypt_key(&ctx
->key
, sizeof(digest
), digest
);
196 /* Derive new counter value */
197 memset(ctx
->counter
, 0, sizeof(ctx
->counter
));
198 aes_encrypt(&ctx
->key
, sizeof(ctx
->counter
), ctx
->counter
, ctx
->counter
);
200 /* Reset estimates. */
201 for (i
= 0; i
<ctx
->nsources
; i
++)
202 ctx
->sources
[i
].estimate
[YARROW_FAST
] = 0;
206 yarrow256_slow_reseed(struct yarrow256_ctx
*ctx
)
208 uint8_t digest
[SHA256_DIGEST_SIZE
];
212 fprintf(stderr
, "yarrow256_slow_reseed\n");
215 /* Get digest of the slow pool*/
216 sha256_digest(&ctx
->pools
[YARROW_SLOW
], sizeof(digest
), digest
);
218 /* Feed it into the fast pool */
219 sha256_update(&ctx
->pools
[YARROW_FAST
], sizeof(digest
), digest
);
221 yarrow256_fast_reseed(ctx
);
223 /* Reset estimates. */
224 for (i
= 0; i
<ctx
->nsources
; i
++)
225 ctx
->sources
[i
].estimate
[YARROW_SLOW
] = 0;
229 yarrow256_update(struct yarrow256_ctx
*ctx
,
230 unsigned source_index
, unsigned entropy
,
231 unsigned length
, const uint8_t *data
)
233 enum yarrow_pool_id current
;
234 struct yarrow_source
*source
;
236 assert(source_index
< ctx
->nsources
);
239 /* Nothing happens */
242 source
= &ctx
->sources
[source_index
];
245 /* While seeding, use the slow pool */
246 current
= YARROW_SLOW
;
249 current
= source
->next
;
250 source
->next
= !source
->next
;
253 sha256_update(&ctx
->pools
[current
], length
, data
);
255 /* NOTE: We should be careful to avoid overflows in the estimates. */
256 if (source
->estimate
[current
] < YARROW_MAX_ENTROPY
)
258 if (entropy
> YARROW_MAX_ENTROPY
)
259 entropy
= YARROW_MAX_ENTROPY
;
261 if ( (length
< (YARROW_MAX_ENTROPY
/ YARROW_MULTIPLIER
))
262 && (entropy
> YARROW_MULTIPLIER
* length
) )
263 entropy
= YARROW_MULTIPLIER
* length
;
265 entropy
+= source
->estimate
[current
];
266 if (entropy
> YARROW_MAX_ENTROPY
)
267 entropy
= YARROW_MAX_ENTROPY
;
269 source
->estimate
[current
] = entropy
;
272 /* Check for seed/reseed */
278 "yarrow256_update: source_index = %d,\n"
279 " fast pool estimate = %d\n",
280 source_index
, source
->estimate
[YARROW_FAST
]);
282 if (source
->estimate
[YARROW_FAST
] >= YARROW_FAST_THRESHOLD
)
284 yarrow256_fast_reseed(ctx
);
292 if (!yarrow256_needed_sources(ctx
))
294 yarrow256_slow_reseed(ctx
);
306 yarrow_gate(struct yarrow256_ctx
*ctx
)
308 uint8_t key
[AES_MAX_KEY_SIZE
];
311 for (i
= 0; i
< sizeof(key
); i
+= AES_BLOCK_SIZE
)
312 yarrow_generate_block(ctx
, key
+ i
);
314 aes_set_encrypt_key(&ctx
->key
, sizeof(key
), key
);
318 yarrow256_random(struct yarrow256_ctx
*ctx
, unsigned length
, uint8_t *dst
)
322 while (length
>= AES_BLOCK_SIZE
)
324 yarrow_generate_block(ctx
, dst
);
325 dst
+= AES_BLOCK_SIZE
;
326 length
-= AES_BLOCK_SIZE
;
330 uint8_t buffer
[AES_BLOCK_SIZE
];
332 assert(length
< AES_BLOCK_SIZE
);
333 yarrow_generate_block(ctx
, buffer
);
334 memcpy(dst
, buffer
, length
);
340 yarrow256_is_seeded(struct yarrow256_ctx
*ctx
)
346 yarrow256_needed_sources(struct yarrow256_ctx
*ctx
)
348 /* FIXME: This is somewhat inefficient. It would be better to
349 * either maintain the count, or do this loop only if the
350 * current source just crossed the threshold. */
353 for (i
= k
= 0; i
< ctx
->nsources
; i
++)
354 if (ctx
->sources
[i
].estimate
[YARROW_SLOW
] >= YARROW_SLOW_THRESHOLD
)
359 "yarrow256_needed_sources: source_index = %d,\n"
360 " slow pool estimate = %d,\n"
361 " number of sources above threshold = %d\n",
362 source_index
, source
->estimate
[YARROW_SLOW
], k
);
365 return (k
< YARROW_SLOW_K
) ? (YARROW_SLOW_K
- k
) : 0;