|
@@ -30,7 +30,7 @@
|
|
|
#include <u-boot/sha256.h>
|
|
|
#include <u-boot/md5.h>
|
|
|
|
|
|
-#ifdef CONFIG_SHA1
|
|
|
+#if defined(CONFIG_SHA1) && !defined(CONFIG_SHA_PROG_HW_ACCEL)
|
|
|
static int hash_init_sha1(struct hash_algo *algo, void **ctxp)
|
|
|
{
|
|
|
sha1_context *ctx = malloc(sizeof(sha1_context));
|
|
@@ -58,7 +58,7 @@ static int hash_finish_sha1(struct hash_algo *algo, void *ctx, void *dest_buf,
|
|
|
}
|
|
|
#endif
|
|
|
|
|
|
-#ifdef CONFIG_SHA256
|
|
|
+#if defined(CONFIG_SHA256) && !defined(CONFIG_SHA_PROG_HW_ACCEL)
|
|
|
static int hash_init_sha256(struct hash_algo *algo, void **ctxp)
|
|
|
{
|
|
|
sha256_context *ctx = malloc(sizeof(sha256_context));
|
|
@@ -113,68 +113,61 @@ static int hash_finish_crc32(struct hash_algo *algo, void *ctx, void *dest_buf,
|
|
|
}
|
|
|
|
|
|
/*
|
|
|
- * These are the hash algorithms we support. Chips which support accelerated
|
|
|
- * crypto could perhaps add named version of these algorithms here. Note that
|
|
|
- * algorithm names must be in lower case.
|
|
|
+ * These are the hash algorithms we support. If we have hardware acceleration
|
|
|
+ * is enable we will use that, otherwise a software version of the algorithm.
|
|
|
+ * Note that algorithm names must be in lower case.
|
|
|
*/
|
|
|
static struct hash_algo hash_algo[] = {
|
|
|
- /*
|
|
|
- * CONFIG_SHA_HW_ACCEL is defined if hardware acceleration is
|
|
|
- * available.
|
|
|
- */
|
|
|
-#ifdef CONFIG_SHA_HW_ACCEL
|
|
|
+#ifdef CONFIG_SHA1
|
|
|
{
|
|
|
- "sha1",
|
|
|
- SHA1_SUM_LEN,
|
|
|
- hw_sha1,
|
|
|
- CHUNKSZ_SHA1,
|
|
|
-#ifdef CONFIG_SHA_PROG_HW_ACCEL
|
|
|
- hw_sha_init,
|
|
|
- hw_sha_update,
|
|
|
- hw_sha_finish,
|
|
|
+ .name = "sha1",
|
|
|
+ .digest_size = SHA1_SUM_LEN,
|
|
|
+ .chunk_size = CHUNKSZ_SHA1,
|
|
|
+#ifdef CONFIG_SHA_HW_ACCEL
|
|
|
+ .hash_func_ws = hw_sha1,
|
|
|
+#else
|
|
|
+ .hash_func_ws = sha1_csum_wd,
|
|
|
#endif
|
|
|
- }, {
|
|
|
- "sha256",
|
|
|
- SHA256_SUM_LEN,
|
|
|
- hw_sha256,
|
|
|
- CHUNKSZ_SHA256,
|
|
|
#ifdef CONFIG_SHA_PROG_HW_ACCEL
|
|
|
- hw_sha_init,
|
|
|
- hw_sha_update,
|
|
|
- hw_sha_finish,
|
|
|
-#endif
|
|
|
- },
|
|
|
+ .hash_init = hw_sha_init,
|
|
|
+ .hash_update = hw_sha_update,
|
|
|
+ .hash_finish = hw_sha_finish,
|
|
|
+#else
|
|
|
+ .hash_init = hash_init_sha1,
|
|
|
+ .hash_update = hash_update_sha1,
|
|
|
+ .hash_finish = hash_finish_sha1,
|
|
|
#endif
|
|
|
-#ifdef CONFIG_SHA1
|
|
|
- {
|
|
|
- "sha1",
|
|
|
- SHA1_SUM_LEN,
|
|
|
- sha1_csum_wd,
|
|
|
- CHUNKSZ_SHA1,
|
|
|
- hash_init_sha1,
|
|
|
- hash_update_sha1,
|
|
|
- hash_finish_sha1,
|
|
|
},
|
|
|
#endif
|
|
|
#ifdef CONFIG_SHA256
|
|
|
{
|
|
|
- "sha256",
|
|
|
- SHA256_SUM_LEN,
|
|
|
- sha256_csum_wd,
|
|
|
- CHUNKSZ_SHA256,
|
|
|
- hash_init_sha256,
|
|
|
- hash_update_sha256,
|
|
|
- hash_finish_sha256,
|
|
|
+ .name = "sha256",
|
|
|
+ .digest_size = SHA256_SUM_LEN,
|
|
|
+ .chunk_size = CHUNKSZ_SHA256,
|
|
|
+#ifdef CONFIG_SHA_HW_ACCEL
|
|
|
+ .hash_func_ws = hw_sha256,
|
|
|
+#else
|
|
|
+ .hash_func_ws = sha256_csum_wd,
|
|
|
+#endif
|
|
|
+#ifdef CONFIG_SHA_PROG_HW_ACCEL
|
|
|
+ .hash_init = hw_sha_init,
|
|
|
+ .hash_update = hw_sha_update,
|
|
|
+ .hash_finish = hw_sha_finish,
|
|
|
+#else
|
|
|
+ .hash_init = hash_init_sha256,
|
|
|
+ .hash_update = hash_update_sha256,
|
|
|
+ .hash_finish = hash_finish_sha256,
|
|
|
+#endif
|
|
|
},
|
|
|
#endif
|
|
|
{
|
|
|
- "crc32",
|
|
|
- 4,
|
|
|
- crc32_wd_buf,
|
|
|
- CHUNKSZ_CRC32,
|
|
|
- hash_init_crc32,
|
|
|
- hash_update_crc32,
|
|
|
- hash_finish_crc32,
|
|
|
+ .name = "crc32",
|
|
|
+ .digest_size = 4,
|
|
|
+ .chunk_size = CHUNKSZ_CRC32,
|
|
|
+ .hash_func_ws = crc32_wd_buf,
|
|
|
+ .hash_init = hash_init_crc32,
|
|
|
+ .hash_update = hash_update_crc32,
|
|
|
+ .hash_finish = hash_finish_crc32,
|
|
|
},
|
|
|
};
|
|
|
|