cmd: fix 'fdt get value'

The 32bit cells of a device-tree property are big-endian. When printing
them via 0x08x we must first convert to the host endianness.

Remove the restriction to 20 bytes length. This would not allow to read an
SHA256 value.

Signed-off-by: Heinrich Schuchardt <heinrich.schuchardt@canonical.com>
This commit is contained in:
Heinrich Schuchardt
2025-11-23 23:57:10 +01:00
committed by Tom Rini
parent c2ee1e3c4a
commit 21fbda84b3
2 changed files with 16 additions and 12 deletions

View File

@@ -9,14 +9,15 @@
#include <command.h>
#include <env.h>
#include <fdt_support.h>
#include <image.h>
#include <malloc.h>
#include <mapmem.h>
#include <asm/global_data.h>
#include <asm/io.h>
#include <linux/ctype.h>
#include <linux/types.h>
#include <asm/global_data.h>
#include <linux/libfdt.h>
#include <fdt_support.h>
#include <mapmem.h>
#include <asm/io.h>
#define MAX_LEVEL 32 /* how deeply nested we will go */
#define SCRATCHPAD 1024 /* bytes of scratchpad memory */
@@ -91,18 +92,21 @@ static int fdt_value_env_set(const void *nodep, int len,
sprintf(buf, "0x%08X", fdt32_to_cpu(*(nodec + index)));
env_set(var, buf);
} else if (len % 4 == 0 && len <= 20) {
} else {
/* Needed to print things like sha1 hashes. */
char buf[41];
char *buf;
const unsigned int *nodec = (const unsigned int *)nodep;
int i;
for (i = 0; i < len; i += sizeof(unsigned int))
buf = malloc(2 * len + 7);
if (!buf)
return CMD_RET_FAILURE;
for (i = 0; i < len; i += 4)
sprintf(buf + (i * 2), "%08x",
*(unsigned int *)(nodep + i));
fdt32_to_cpu(*nodec++));
buf[2 * len] = 0;
env_set(var, buf);
} else {
printf("error: unprintable value\n");
return 1;
free(buf);
}
return 0;
}