image: Drop if/elseif hash selection in calculate_hash()
calculate_hash() would try to select the appropriate hashing function
by a if/elseif contruct. But that is exactly why hash_lookup_algo()
exists, so use it instead.
This does mean that we now have to 'select HASH' to make sure we get
the hash_lookup_algo() symbol. However, the change makes sense because
even basic FITs will have to deal with "hash" nodes.
My only concern is that the 'select SPL_HASH' might cause some
platform to grow above its SPL size allowance
Signed-off-by: Alexandru Gagniuc <mr.nuke.me@gmail.com>
[trini: Make FSL_CAAM be implied only on ARM && SPL]
Signed-off-by: Tom Rini <trini@konsulko.com>
diff --git a/common/Kconfig.boot b/common/Kconfig.boot
index 1f365d4..902a5b8 100644
--- a/common/Kconfig.boot
+++ b/common/Kconfig.boot
@@ -11,6 +11,7 @@
config FIT
bool "Support Flattened Image Tree"
+ select HASH
select MD5
select SHA1
imply SHA256
@@ -134,6 +135,7 @@
config SPL_FIT
bool "Support Flattened Image Tree within SPL"
depends on SPL
+ select SPL_HASH
select SPL_OF_LIBFDT
config SPL_FIT_PRINT
diff --git a/common/image-fit.c b/common/image-fit.c
index aff4670..92d9141 100644
--- a/common/image-fit.c
+++ b/common/image-fit.c
@@ -1193,6 +1193,12 @@
return 0;
}
+static void crc32_uimage_fixup(void *value)
+{
+ /* TODO: In C, this type punning is undefined behavior: */
+ *((uint32_t *)value) = cpu_to_uimage(*((uint32_t *)value));
+}
+
/**
* calculate_hash - calculate and return hash for provided input data
* @data: pointer to the input data
@@ -1211,37 +1217,24 @@
* 0, on success
* -1, when algo is unsupported
*/
-int calculate_hash(const void *data, int data_len, const char *algo,
+int calculate_hash(const void *data, int data_len, const char *name,
uint8_t *value, int *value_len)
{
- if (IMAGE_ENABLE_CRC32 && strcmp(algo, "crc32") == 0) {
- *((uint32_t *)value) = crc32_wd(0, data, data_len,
- CHUNKSZ_CRC32);
- *((uint32_t *)value) = cpu_to_uimage(*((uint32_t *)value));
- *value_len = 4;
- } else if (CONFIG_IS_ENABLED(SHA1) && strcmp(algo, "sha1") == 0) {
- sha1_csum_wd((unsigned char *)data, data_len,
- (unsigned char *)value, CHUNKSZ_SHA1);
- *value_len = 20;
- } else if (CONFIG_IS_ENABLED(SHA256) && strcmp(algo, "sha256") == 0) {
- sha256_csum_wd((unsigned char *)data, data_len,
- (unsigned char *)value, CHUNKSZ_SHA256);
- *value_len = SHA256_SUM_LEN;
- } else if (CONFIG_IS_ENABLED(SHA384) && strcmp(algo, "sha384") == 0) {
- sha384_csum_wd((unsigned char *)data, data_len,
- (unsigned char *)value, CHUNKSZ_SHA384);
- *value_len = SHA384_SUM_LEN;
- } else if (CONFIG_IS_ENABLED(SHA512) && strcmp(algo, "sha512") == 0) {
- sha512_csum_wd((unsigned char *)data, data_len,
- (unsigned char *)value, CHUNKSZ_SHA512);
- *value_len = SHA512_SUM_LEN;
- } else if (IMAGE_ENABLE_MD5 && strcmp(algo, "md5") == 0) {
- md5_wd((unsigned char *)data, data_len, value, CHUNKSZ_MD5);
- *value_len = 16;
- } else {
+ struct hash_algo *algo;
+ int ret;
+
+ ret = hash_lookup_algo(name, &algo);
+ if (ret < 0) {
debug("Unsupported hash alogrithm\n");
return -1;
}
+
+ algo->hash_func_ws(data, data_len, value, algo->chunk_size);
+ *value_len = algo->digest_size;
+
+ if (!strcmp(name, "crc32"))
+ crc32_uimage_fixup(value);
+
return 0;
}
diff --git a/drivers/crypto/fsl/Kconfig b/drivers/crypto/fsl/Kconfig
index 1f5dfb9..94ff540 100644
--- a/drivers/crypto/fsl/Kconfig
+++ b/drivers/crypto/fsl/Kconfig
@@ -1,6 +1,8 @@
config FSL_CAAM
bool "Freescale Crypto Driver Support"
select SHA_HW_ACCEL
+ # hw_sha1() under drivers/crypto, and needed with SHA_HW_ACCEL
+ imply SPL_CRYPTO if (ARM && SPL)
imply CMD_HASH
help
Enables the Freescale's Cryptographic Accelerator and Assurance