test-sha1: test hashing large buffer

test to hash a large buffer in one go is more important than
hashing large amount of data in small fixed chunks.

Signed-off-by: Junio C Hamano <junkio@cox.net>
This commit is contained in:
Junio C Hamano 2006-06-24 02:59:20 -07:00
parent b65bc21e7d
commit b57cbbf8a8
2 changed files with 32 additions and 9 deletions

View File

@ -4,21 +4,44 @@ int main(int ac, char **av)
{
SHA_CTX ctx;
unsigned char sha1[20];
unsigned bufsz = 8192;
char *buffer;
if (ac == 2)
bufsz = strtoul(av[1], NULL, 10) * 1024 * 1024;
if (!bufsz)
bufsz = 8192;
while ((buffer = malloc(bufsz)) == NULL) {
fprintf(stderr, "bufsz %u is too big, halving...\n", bufsz);
bufsz /= 2;
if (bufsz < 1024)
die("OOPS");
}
SHA1_Init(&ctx);
while (1) {
ssize_t sz;
char buffer[8192];
sz = xread(0, buffer, sizeof(buffer));
if (sz == 0)
ssize_t sz, this_sz;
char *cp = buffer;
unsigned room = bufsz;
this_sz = 0;
while (room) {
sz = xread(0, cp, room);
if (sz == 0)
break;
if (sz < 0)
die("test-sha1: %s", strerror(errno));
this_sz += sz;
cp += sz;
room -= sz;
}
if (this_sz == 0)
break;
if (sz < 0)
die("test-sha1: %s", strerror(errno));
SHA1_Update(&ctx, buffer, sz);
SHA1_Update(&ctx, buffer, this_sz);
}
SHA1_Final(sha1, &ctx);
puts(sha1_to_hex(sha1));
exit(0);
}

View File

@ -11,7 +11,7 @@ do
test -z "$pfx" || echo "$pfx"
dd if=/dev/zero bs=1048576 count=$cnt 2>/dev/null |
tr '[\0]' '[g]'
} | ./test-sha1
} | ./test-sha1 $cnt
`
if test "$expect" = "$actual"
then