From 42033ea808ae3b97d909f538e422e63dadfd8f6c Mon Sep 17 00:00:00 2001 From: zhibog Date: Fri, 31 Dec 2021 13:16:11 +0100 Subject: [PATCH] Extended crypto API by variants that write the result into a destination buffer, instead of returning it --- core/crypto/README.md | 8 +- core/crypto/blake/blake.odin | 145 +++++-- core/crypto/blake2b/blake2b.odin | 40 +- core/crypto/blake2s/blake2s.odin | 40 +- core/crypto/gost/gost.odin | 36 +- core/crypto/groestl/groestl.odin | 145 +++++-- core/crypto/haval/haval.odin | 564 +++++++++++++++++++++----- core/crypto/jh/jh.odin | 145 +++++-- core/crypto/keccak/keccak.odin | 167 ++++++-- core/crypto/md2/md2.odin | 64 ++- core/crypto/md4/md4.odin | 40 +- core/crypto/md5/md5.odin | 40 +- core/crypto/ripemd/ripemd.odin | 141 +++++-- core/crypto/sha1/sha1.odin | 37 +- core/crypto/sha2/sha2.odin | 145 +++++-- core/crypto/sha3/sha3.odin | 161 ++++++-- core/crypto/shake/shake.odin | 85 +++- core/crypto/sm3/sm3.odin | 43 +- core/crypto/streebog/streebog.odin | 72 +++- core/crypto/tiger/tiger.odin | 109 ++++- core/crypto/tiger2/tiger2.odin | 109 ++++- core/crypto/whirlpool/whirlpool.odin | 36 +- vendor/botan/README.md | 8 +- vendor/botan/blake2b/blake2b.odin | 37 +- vendor/botan/gost/gost.odin | 37 +- vendor/botan/keccak/keccak.odin | 37 +- vendor/botan/md4/md4.odin | 37 +- vendor/botan/md5/md5.odin | 37 +- vendor/botan/ripemd/ripemd.odin | 37 +- vendor/botan/sha1/sha1.odin | 37 +- vendor/botan/sha2/sha2.odin | 145 +++++-- vendor/botan/sha3/sha3.odin | 145 +++++-- vendor/botan/shake/shake.odin | 73 +++- vendor/botan/skein512/skein512.odin | 94 ++++- vendor/botan/sm3/sm3.odin | 37 +- vendor/botan/streebog/streebog.odin | 73 +++- vendor/botan/tiger/tiger.odin | 109 ++++- vendor/botan/whirlpool/whirlpool.odin | 37 +- 38 files changed, 2690 insertions(+), 662 deletions(-) diff --git a/core/crypto/README.md b/core/crypto/README.md index 5955f9c560a..ddcb12d81eb 100644 --- a/core/crypto/README.md +++ b/core/crypto/README.md @@ -32,9 +32,11 @@ Please see the chart below for the options. #### High level API Each hash algorithm contains a procedure group named `hash`, or if the algorithm provides more than one digest size `hash_`\*. -Included in these groups are four procedures. +Included in these groups are six procedures. * `hash_string` - Hash a given string and return the computed hash. Just calls `hash_bytes` internally * `hash_bytes` - Hash a given byte slice and return the computed hash +* `hash_string_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. Just calls `hash_bytes_to_buffer` internally +* `hash_bytes_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. The destination buffer has to be at least as big as the digest size of the hash * `hash_stream` - Takes a stream from io.Stream and returns the computed hash from it * `hash_file` - Takes a file handle and returns the computed hash from it. A second optional boolean parameter controls if the file is streamed (this is the default) or read at once (set to true) @@ -59,6 +61,10 @@ main :: proc() { // Compute the hash, using the high level API computed_hash := md4.hash(input) + // Variant that takes a destination buffer, instead of returning the computed hash + hash := make([]byte, md4.DIGEST_SIZE) // @note: Destination buffer has to be at least as big as the digest size of the hash + md4.hash(input, hash[:]) + // Compute the hash, using the low level API ctx: md4.Md4_Context computed_hash_low: [16]byte diff --git a/core/crypto/blake/blake.odin b/core/crypto/blake/blake.odin index 9d53f8a8950..81924ab1efd 100644 --- a/core/crypto/blake/blake.odin +++ b/core/crypto/blake/blake.odin @@ -17,16 +17,21 @@ import "core:io" High level API */ +DIGEST_SIZE_224 :: 28 +DIGEST_SIZE_256 :: 32 +DIGEST_SIZE_384 :: 48 +DIGEST_SIZE_512 :: 64 + // hash_string_224 will hash the given input and return the // computed hash -hash_string_224 :: proc "contextless" (data: string) -> [28]byte { +hash_string_224 :: proc "contextless" (data: string) -> [DIGEST_SIZE_224]byte { return hash_bytes_224(transmute([]byte)(data)) } // hash_bytes_224 will hash the given input and return the // computed hash -hash_bytes_224 :: proc "contextless" (data: []byte) -> [28]byte { - hash: [28]byte +hash_bytes_224 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_224]byte { + hash: [DIGEST_SIZE_224]byte ctx: Blake256_Context ctx.is224 = true init(&ctx) @@ -35,10 +40,29 @@ hash_bytes_224 :: proc "contextless" (data: []byte) -> [28]byte { return hash } +// hash_string_to_buffer_224 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_224(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_224 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_224 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size") + ctx: Blake256_Context + ctx.is224 = true + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_224 will read the stream in chunks and compute a // hash from its contents -hash_stream_224 :: proc(s: io.Stream) -> ([28]byte, bool) { - hash: [28]byte +hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { + hash: [DIGEST_SIZE_224]byte ctx: Blake256_Context ctx.is224 = true init(&ctx) @@ -57,7 +81,7 @@ hash_stream_224 :: proc(s: io.Stream) -> ([28]byte, bool) { // hash_file_224 will read the file provided by the given handle // and compute a hash -hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([28]byte, bool) { +hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) { if !load_at_once { return hash_stream_224(os.stream_from_handle(hd)) } else { @@ -65,7 +89,7 @@ hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([28]byte, bool) return hash_bytes_224(buf[:]), ok } } - return [28]byte{}, false + return [DIGEST_SIZE_224]byte{}, false } hash_224 :: proc { @@ -73,18 +97,20 @@ hash_224 :: proc { hash_file_224, hash_bytes_224, hash_string_224, + hash_bytes_to_buffer_224, + hash_string_to_buffer_224, } // hash_string_256 will hash the given input and return the // computed hash -hash_string_256 :: proc "contextless" (data: string) -> [32]byte { +hash_string_256 :: proc "contextless" (data: string) -> [DIGEST_SIZE_256]byte { return hash_bytes_256(transmute([]byte)(data)) } // hash_bytes_256 will hash the given input and return the // computed hash -hash_bytes_256 :: proc "contextless" (data: []byte) -> [32]byte { - hash: [32]byte +hash_bytes_256 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_256]byte { + hash: [DIGEST_SIZE_256]byte ctx: Blake256_Context ctx.is224 = false init(&ctx) @@ -93,10 +119,29 @@ hash_bytes_256 :: proc "contextless" (data: []byte) -> [32]byte { return hash } +// hash_string_to_buffer_256 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_256(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_256 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") + ctx: Blake256_Context + ctx.is224 = false + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_256 will read the stream in chunks and compute a // hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { - hash: [32]byte +hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { + hash: [DIGEST_SIZE_256]byte ctx: Blake256_Context ctx.is224 = false init(&ctx) @@ -115,7 +160,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { // hash_file_256 will read the file provided by the given handle // and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { +hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { if !load_at_once { return hash_stream_256(os.stream_from_handle(hd)) } else { @@ -123,7 +168,7 @@ hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) return hash_bytes_256(buf[:]), ok } } - return [32]byte{}, false + return [DIGEST_SIZE_256]byte{}, false } hash_256 :: proc { @@ -131,18 +176,20 @@ hash_256 :: proc { hash_file_256, hash_bytes_256, hash_string_256, + hash_bytes_to_buffer_256, + hash_string_to_buffer_256, } // hash_string_384 will hash the given input and return the // computed hash -hash_string_384 :: proc "contextless" (data: string) -> [48]byte { +hash_string_384 :: proc "contextless" (data: string) -> [DIGEST_SIZE_384]byte { return hash_bytes_384(transmute([]byte)(data)) } // hash_bytes_384 will hash the given input and return the // computed hash -hash_bytes_384 :: proc "contextless" (data: []byte) -> [48]byte { - hash: [48]byte +hash_bytes_384 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_384]byte { + hash: [DIGEST_SIZE_384]byte ctx: Blake512_Context ctx.is384 = true init(&ctx) @@ -151,10 +198,29 @@ hash_bytes_384 :: proc "contextless" (data: []byte) -> [48]byte { return hash } +// hash_string_to_buffer_384 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_384(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_384 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_384 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size") + ctx: Blake512_Context + ctx.is384 = true + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_384 will read the stream in chunks and compute a // hash from its contents -hash_stream_384 :: proc(s: io.Stream) -> ([48]byte, bool) { - hash: [48]byte +hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) { + hash: [DIGEST_SIZE_384]byte ctx: Blake512_Context ctx.is384 = true init(&ctx) @@ -173,7 +239,7 @@ hash_stream_384 :: proc(s: io.Stream) -> ([48]byte, bool) { // hash_file_384 will read the file provided by the given handle // and compute a hash -hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([48]byte, bool) { +hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) { if !load_at_once { return hash_stream_384(os.stream_from_handle(hd)) } else { @@ -181,7 +247,7 @@ hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([48]byte, bool) return hash_bytes_384(buf[:]), ok } } - return [48]byte{}, false + return [DIGEST_SIZE_384]byte{}, false } hash_384 :: proc { @@ -189,18 +255,20 @@ hash_384 :: proc { hash_file_384, hash_bytes_384, hash_string_384, + hash_bytes_to_buffer_384, + hash_string_to_buffer_384, } // hash_string_512 will hash the given input and return the // computed hash -hash_string_512 :: proc "contextless" (data: string) -> [64]byte { +hash_string_512 :: proc "contextless" (data: string) -> [DIGEST_SIZE_512]byte { return hash_bytes_512(transmute([]byte)(data)) } // hash_bytes_512 will hash the given input and return the // computed hash -hash_bytes_512 :: proc "contextless" (data: []byte) -> [64]byte { - hash: [64]byte +hash_bytes_512 :: proc "contextless" (data: []byte) -> [DIGEST_SIZE_512]byte { + hash: [DIGEST_SIZE_512]byte ctx: Blake512_Context ctx.is384 = false init(&ctx) @@ -209,10 +277,29 @@ hash_bytes_512 :: proc "contextless" (data: []byte) -> [64]byte { return hash } +// hash_string_to_buffer_512 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_512(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_512 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") + ctx: Blake512_Context + ctx.is384 = false + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_512 will read the stream in chunks and compute a // hash from its contents -hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { - hash: [64]byte +hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { + hash: [DIGEST_SIZE_512]byte ctx: Blake512_Context ctx.is384 = false init(&ctx) @@ -231,7 +318,7 @@ hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { // hash_file_512 will read the file provided by the given handle // and compute a hash -hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) { +hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { if !load_at_once { return hash_stream_512(os.stream_from_handle(hd)) } else { @@ -239,7 +326,7 @@ hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) return hash_bytes_512(buf[:]), ok } } - return [64]byte{}, false + return [DIGEST_SIZE_512]byte{}, false } hash_512 :: proc { @@ -247,6 +334,8 @@ hash_512 :: proc { hash_file_512, hash_bytes_512, hash_string_512, + hash_bytes_to_buffer_512, + hash_string_to_buffer_512, } /* diff --git a/core/crypto/blake2b/blake2b.odin b/core/crypto/blake2b/blake2b.odin index 85f9611f950..6d4689b886c 100644 --- a/core/crypto/blake2b/blake2b.odin +++ b/core/crypto/blake2b/blake2b.odin @@ -20,16 +20,18 @@ import "../_blake2" High level API */ +DIGEST_SIZE :: 64 + // hash_string will hash the given input and return the // computed hash -hash_string :: proc(data: string) -> [64]byte { +hash_string :: proc(data: string) -> [DIGEST_SIZE]byte { return hash_bytes(transmute([]byte)(data)) } // hash_bytes will hash the given input and return the // computed hash -hash_bytes :: proc(data: []byte) -> [64]byte { - hash: [64]byte +hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { + hash: [DIGEST_SIZE]byte ctx: _blake2.Blake2b_Context cfg: _blake2.Blake2_Config cfg.size = _blake2.BLAKE2B_SIZE @@ -40,10 +42,32 @@ hash_bytes :: proc(data: []byte) -> [64]byte { return hash } +// hash_string_to_buffer will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") + ctx: _blake2.Blake2b_Context + cfg: _blake2.Blake2_Config + cfg.size = _blake2.BLAKE2B_SIZE + ctx.cfg = cfg + _blake2.init(&ctx) + _blake2.update(&ctx, data) + _blake2.final(&ctx, hash) +} + + // hash_stream will read the stream in chunks and compute a // hash from its contents -hash_stream :: proc(s: io.Stream) -> ([64]byte, bool) { - hash: [64]byte +hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { + hash: [DIGEST_SIZE]byte ctx: _blake2.Blake2b_Context cfg: _blake2.Blake2_Config cfg.size = _blake2.BLAKE2B_SIZE @@ -64,7 +88,7 @@ hash_stream :: proc(s: io.Stream) -> ([64]byte, bool) { // hash_file will read the file provided by the given handle // and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) { +hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { if !load_at_once { return hash_stream(os.stream_from_handle(hd)) } else { @@ -72,7 +96,7 @@ hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) { return hash_bytes(buf[:]), ok } } - return [64]byte{}, false + return [DIGEST_SIZE]byte{}, false } hash :: proc { @@ -80,6 +104,8 @@ hash :: proc { hash_file, hash_bytes, hash_string, + hash_bytes_to_buffer, + hash_string_to_buffer, } /* diff --git a/core/crypto/blake2s/blake2s.odin b/core/crypto/blake2s/blake2s.odin index 72d15b22718..ad2e800fd65 100644 --- a/core/crypto/blake2s/blake2s.odin +++ b/core/crypto/blake2s/blake2s.odin @@ -20,16 +20,18 @@ import "../_blake2" High level API */ +DIGEST_SIZE :: 32 + // hash_string will hash the given input and return the // computed hash -hash_string :: proc(data: string) -> [32]byte { +hash_string :: proc(data: string) -> [DIGEST_SIZE]byte { return hash_bytes(transmute([]byte)(data)) } // hash_bytes will hash the given input and return the // computed hash -hash_bytes :: proc(data: []byte) -> [32]byte { - hash: [32]byte +hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { + hash: [DIGEST_SIZE]byte ctx: _blake2.Blake2s_Context cfg: _blake2.Blake2_Config cfg.size = _blake2.BLAKE2S_SIZE @@ -40,10 +42,32 @@ hash_bytes :: proc(data: []byte) -> [32]byte { return hash } + +// hash_string_to_buffer will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") + ctx: _blake2.Blake2s_Context + cfg: _blake2.Blake2_Config + cfg.size = _blake2.BLAKE2S_SIZE + ctx.cfg = cfg + _blake2.init(&ctx) + _blake2.update(&ctx, data) + _blake2.final(&ctx, hash) +} + // hash_stream will read the stream in chunks and compute a // hash from its contents -hash_stream :: proc(s: io.Stream) -> ([32]byte, bool) { - hash: [32]byte +hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { + hash: [DIGEST_SIZE]byte ctx: _blake2.Blake2s_Context cfg: _blake2.Blake2_Config cfg.size = _blake2.BLAKE2S_SIZE @@ -64,7 +88,7 @@ hash_stream :: proc(s: io.Stream) -> ([32]byte, bool) { // hash_file will read the file provided by the given handle // and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { +hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { if !load_at_once { return hash_stream(os.stream_from_handle(hd)) } else { @@ -72,7 +96,7 @@ hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { return hash_bytes(buf[:]), ok } } - return [32]byte{}, false + return [DIGEST_SIZE]byte{}, false } hash :: proc { @@ -80,6 +104,8 @@ hash :: proc { hash_file, hash_bytes, hash_string, + hash_bytes_to_buffer, + hash_string_to_buffer, } /* diff --git a/core/crypto/gost/gost.odin b/core/crypto/gost/gost.odin index c687e90801e..eed684f7262 100644 --- a/core/crypto/gost/gost.odin +++ b/core/crypto/gost/gost.odin @@ -18,16 +18,18 @@ import "core:io" High level API */ +DIGEST_SIZE :: 32 + // hash_string will hash the given input and return the // computed hash -hash_string :: proc(data: string) -> [32]byte { +hash_string :: proc(data: string) -> [DIGEST_SIZE]byte { return hash_bytes(transmute([]byte)(data)) } // hash_bytes will hash the given input and return the // computed hash -hash_bytes :: proc(data: []byte) -> [32]byte { - hash: [32]byte +hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { + hash: [DIGEST_SIZE]byte ctx: Gost_Context init(&ctx) update(&ctx, data) @@ -35,10 +37,28 @@ hash_bytes :: proc(data: []byte) -> [32]byte { return hash } +// hash_string_to_buffer will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") + ctx: Gost_Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream will read the stream in chunks and compute a // hash from its contents -hash_stream :: proc(s: io.Stream) -> ([32]byte, bool) { - hash: [32]byte +hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { + hash: [DIGEST_SIZE]byte ctx: Gost_Context init(&ctx) buf := make([]byte, 512) @@ -56,7 +76,7 @@ hash_stream :: proc(s: io.Stream) -> ([32]byte, bool) { // hash_file will read the file provided by the given handle // and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { +hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { if !load_at_once { return hash_stream(os.stream_from_handle(hd)) } else { @@ -64,7 +84,7 @@ hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { return hash_bytes(buf[:]), ok } } - return [32]byte{}, false + return [DIGEST_SIZE]byte{}, false } hash :: proc { @@ -72,6 +92,8 @@ hash :: proc { hash_file, hash_bytes, hash_string, + hash_bytes_to_buffer, + hash_string_to_buffer, } /* diff --git a/core/crypto/groestl/groestl.odin b/core/crypto/groestl/groestl.odin index 0d305a1d11e..5434e31e01b 100644 --- a/core/crypto/groestl/groestl.odin +++ b/core/crypto/groestl/groestl.odin @@ -17,16 +17,21 @@ import "core:io" High level API */ +DIGEST_SIZE_224 :: 28 +DIGEST_SIZE_256 :: 32 +DIGEST_SIZE_384 :: 48 +DIGEST_SIZE_512 :: 64 + // hash_string_224 will hash the given input and return the // computed hash -hash_string_224 :: proc(data: string) -> [28]byte { +hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte { return hash_bytes_224(transmute([]byte)(data)) } // hash_bytes_224 will hash the given input and return the // computed hash -hash_bytes_224 :: proc(data: []byte) -> [28]byte { - hash: [28]byte +hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { + hash: [DIGEST_SIZE_224]byte ctx: Groestl_Context ctx.hashbitlen = 224 init(&ctx) @@ -35,10 +40,29 @@ hash_bytes_224 :: proc(data: []byte) -> [28]byte { return hash } +// hash_string_to_buffer_224 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_224(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_224 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_224 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size") + ctx: Groestl_Context + ctx.hashbitlen = 224 + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_224 will read the stream in chunks and compute a // hash from its contents -hash_stream_224 :: proc(s: io.Stream) -> ([28]byte, bool) { - hash: [28]byte +hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { + hash: [DIGEST_SIZE_224]byte ctx: Groestl_Context ctx.hashbitlen = 224 init(&ctx) @@ -57,7 +81,7 @@ hash_stream_224 :: proc(s: io.Stream) -> ([28]byte, bool) { // hash_file_224 will read the file provided by the given handle // and compute a hash -hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([28]byte, bool) { +hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) { if !load_at_once { return hash_stream_224(os.stream_from_handle(hd)) } else { @@ -65,7 +89,7 @@ hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([28]byte, bool) return hash_bytes_224(buf[:]), ok } } - return [28]byte{}, false + return [DIGEST_SIZE_224]byte{}, false } hash_224 :: proc { @@ -73,18 +97,20 @@ hash_224 :: proc { hash_file_224, hash_bytes_224, hash_string_224, + hash_bytes_to_buffer_224, + hash_string_to_buffer_224, } // hash_string_256 will hash the given input and return the // computed hash -hash_string_256 :: proc(data: string) -> [32]byte { +hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { return hash_bytes_256(transmute([]byte)(data)) } // hash_bytes_256 will hash the given input and return the // computed hash -hash_bytes_256 :: proc(data: []byte) -> [32]byte { - hash: [32]byte +hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { + hash: [DIGEST_SIZE_256]byte ctx: Groestl_Context ctx.hashbitlen = 256 init(&ctx) @@ -93,10 +119,29 @@ hash_bytes_256 :: proc(data: []byte) -> [32]byte { return hash } +// hash_string_to_buffer_256 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_256(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_256 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") + ctx: Groestl_Context + ctx.hashbitlen = 256 + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_256 will read the stream in chunks and compute a // hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { - hash: [32]byte +hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { + hash: [DIGEST_SIZE_256]byte ctx: Groestl_Context ctx.hashbitlen = 256 init(&ctx) @@ -115,7 +160,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { // hash_file_256 will read the file provided by the given handle // and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { +hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { if !load_at_once { return hash_stream_256(os.stream_from_handle(hd)) } else { @@ -123,7 +168,7 @@ hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) return hash_bytes_256(buf[:]), ok } } - return [32]byte{}, false + return [DIGEST_SIZE_256]byte{}, false } hash_256 :: proc { @@ -131,18 +176,20 @@ hash_256 :: proc { hash_file_256, hash_bytes_256, hash_string_256, + hash_bytes_to_buffer_256, + hash_string_to_buffer_256, } // hash_string_384 will hash the given input and return the // computed hash -hash_string_384 :: proc(data: string) -> [48]byte { +hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte { return hash_bytes_384(transmute([]byte)(data)) } // hash_bytes_384 will hash the given input and return the // computed hash -hash_bytes_384 :: proc(data: []byte) -> [48]byte { - hash: [48]byte +hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte { + hash: [DIGEST_SIZE_384]byte ctx: Groestl_Context ctx.hashbitlen = 384 init(&ctx) @@ -151,10 +198,29 @@ hash_bytes_384 :: proc(data: []byte) -> [48]byte { return hash } +// hash_string_to_buffer_384 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_384(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_384 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_384 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size") + ctx: Groestl_Context + ctx.hashbitlen = 384 + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_384 will read the stream in chunks and compute a // hash from its contents -hash_stream_384 :: proc(s: io.Stream) -> ([48]byte, bool) { - hash: [48]byte +hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) { + hash: [DIGEST_SIZE_384]byte ctx: Groestl_Context ctx.hashbitlen = 384 init(&ctx) @@ -173,7 +239,7 @@ hash_stream_384 :: proc(s: io.Stream) -> ([48]byte, bool) { // hash_file_384 will read the file provided by the given handle // and compute a hash -hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([48]byte, bool) { +hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) { if !load_at_once { return hash_stream_384(os.stream_from_handle(hd)) } else { @@ -181,7 +247,7 @@ hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([48]byte, bool) return hash_bytes_384(buf[:]), ok } } - return [48]byte{}, false + return [DIGEST_SIZE_384]byte{}, false } hash_384 :: proc { @@ -189,18 +255,20 @@ hash_384 :: proc { hash_file_384, hash_bytes_384, hash_string_384, + hash_bytes_to_buffer_384, + hash_string_to_buffer_384, } // hash_string_512 will hash the given input and return the // computed hash -hash_string_512 :: proc(data: string) -> [64]byte { +hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { return hash_bytes_512(transmute([]byte)(data)) } // hash_bytes_512 will hash the given input and return the // computed hash -hash_bytes_512 :: proc(data: []byte) -> [64]byte { - hash: [64]byte +hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { + hash: [DIGEST_SIZE_512]byte ctx: Groestl_Context ctx.hashbitlen = 512 init(&ctx) @@ -209,10 +277,29 @@ hash_bytes_512 :: proc(data: []byte) -> [64]byte { return hash } +// hash_string_to_buffer_512 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_512(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_512 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") + ctx: Groestl_Context + ctx.hashbitlen = 512 + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_512 will read the stream in chunks and compute a // hash from its contents -hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { - hash: [64]byte +hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { + hash: [DIGEST_SIZE_512]byte ctx: Groestl_Context ctx.hashbitlen = 512 init(&ctx) @@ -231,7 +318,7 @@ hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { // hash_file_512 will read the file provided by the given handle // and compute a hash -hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) { +hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { if !load_at_once { return hash_stream_512(os.stream_from_handle(hd)) } else { @@ -239,7 +326,7 @@ hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) return hash_bytes_512(buf[:]), ok } } - return [64]byte{}, false + return [DIGEST_SIZE_512]byte{}, false } hash_512 :: proc { @@ -247,6 +334,8 @@ hash_512 :: proc { hash_file_512, hash_bytes_512, hash_string_512, + hash_bytes_to_buffer_512, + hash_string_to_buffer_512, } /* diff --git a/core/crypto/haval/haval.odin b/core/crypto/haval/haval.odin index 76532d4cd35..f95ea344d67 100644 --- a/core/crypto/haval/haval.odin +++ b/core/crypto/haval/haval.odin @@ -20,16 +20,22 @@ import "../util" High level API */ +DIGEST_SIZE_128 :: 16 +DIGEST_SIZE_160 :: 20 +DIGEST_SIZE_192 :: 24 +DIGEST_SIZE_224 :: 28 +DIGEST_SIZE_256 :: 32 + // hash_string_128_3 will hash the given input and return the // computed hash -hash_string_128_3 :: proc(data: string) -> [16]byte { +hash_string_128_3 :: proc(data: string) -> [DIGEST_SIZE_128]byte { return hash_bytes_128_3(transmute([]byte)(data)) } // hash_bytes_128_3 will hash the given input and return the // computed hash -hash_bytes_128_3 :: proc(data: []byte) -> [16]byte { - hash: [16]byte +hash_bytes_128_3 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { + hash: [DIGEST_SIZE_128]byte ctx: Haval_Context ctx.hashbitlen = 128 ctx.rounds = 3 @@ -40,10 +46,31 @@ hash_bytes_128_3 :: proc(data: []byte) -> [16]byte { return hash } +// hash_string_to_buffer_128_3 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_128_3 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_128_3(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_128_3 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_128_3 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size") + ctx: Haval_Context + ctx.hashbitlen = 128 + ctx.rounds = 3 + init(&ctx) + ctx.str_len = u32(len(data)) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_128_3 will read the stream in chunks and compute a // hash from its contents -hash_stream_128_3 :: proc(s: io.Stream) -> ([16]byte, bool) { - hash: [16]byte +hash_stream_128_3 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) { + hash: [DIGEST_SIZE_128]byte ctx: Haval_Context ctx.hashbitlen = 128 ctx.rounds = 3 @@ -64,7 +91,7 @@ hash_stream_128_3 :: proc(s: io.Stream) -> ([16]byte, bool) { // hash_file_128_3 will read the file provided by the given handle // and compute a hash -hash_file_128_3 :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) { +hash_file_128_3 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) { if !load_at_once { return hash_stream_128_3(os.stream_from_handle(hd)) } else { @@ -72,7 +99,7 @@ hash_file_128_3 :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool return hash_bytes_128_3(buf[:]), ok } } - return [16]byte{}, false + return [DIGEST_SIZE_128]byte{}, false } hash_128_3 :: proc { @@ -80,18 +107,20 @@ hash_128_3 :: proc { hash_file_128_3, hash_bytes_128_3, hash_string_128_3, + hash_bytes_to_buffer_128_3, + hash_string_to_buffer_128_3, } // hash_string_128_4 will hash the given input and return the // computed hash -hash_string_128_4 :: proc(data: string) -> [16]byte { +hash_string_128_4 :: proc(data: string) -> [DIGEST_SIZE_128]byte { return hash_bytes_128_4(transmute([]byte)(data)) } // hash_bytes_128_4 will hash the given input and return the // computed hash -hash_bytes_128_4 :: proc(data: []byte) -> [16]byte { - hash: [16]byte +hash_bytes_128_4 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { + hash: [DIGEST_SIZE_128]byte ctx: Haval_Context ctx.hashbitlen = 128 ctx.rounds = 4 @@ -102,10 +131,31 @@ hash_bytes_128_4 :: proc(data: []byte) -> [16]byte { return hash } +// hash_string_to_buffer_128_4 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_128_4 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_128_4(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_128_4 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_128_4 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size") + ctx: Haval_Context + ctx.hashbitlen = 128 + ctx.rounds = 4 + init(&ctx) + ctx.str_len = u32(len(data)) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_128_4 will read the stream in chunks and compute a // hash from its contents -hash_stream_128_4 :: proc(s: io.Stream) -> ([16]byte, bool) { - hash: [16]byte +hash_stream_128_4 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) { + hash: [DIGEST_SIZE_128]byte ctx: Haval_Context ctx.hashbitlen = 128 ctx.rounds = 4 @@ -126,7 +176,7 @@ hash_stream_128_4 :: proc(s: io.Stream) -> ([16]byte, bool) { // hash_file_128_4 will read the file provided by the given handle // and compute a hash -hash_file_128_4 :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) { +hash_file_128_4 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) { if !load_at_once { return hash_stream_128_4(os.stream_from_handle(hd)) } else { @@ -134,7 +184,7 @@ hash_file_128_4 :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool return hash_bytes_128_4(buf[:]), ok } } - return [16]byte{}, false + return [DIGEST_SIZE_128]byte{}, false } hash_128_4 :: proc { @@ -142,18 +192,20 @@ hash_128_4 :: proc { hash_file_128_4, hash_bytes_128_4, hash_string_128_4, + hash_bytes_to_buffer_128_4, + hash_string_to_buffer_128_4, } // hash_string_128_5 will hash the given input and return the // computed hash -hash_string_128_5 :: proc(data: string) -> [16]byte { +hash_string_128_5 :: proc(data: string) -> [DIGEST_SIZE_128]byte { return hash_bytes_128_5(transmute([]byte)(data)) } // hash_bytes_128_5 will hash the given input and return the // computed hash -hash_bytes_128_5 :: proc(data: []byte) -> [16]byte { - hash: [16]byte +hash_bytes_128_5 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { + hash: [DIGEST_SIZE_128]byte ctx: Haval_Context ctx.hashbitlen = 128 ctx.rounds = 5 @@ -164,10 +216,31 @@ hash_bytes_128_5 :: proc(data: []byte) -> [16]byte { return hash } +// hash_string_to_buffer_128_5 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_128_5 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_128_5(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_128_5 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_128_5 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size") + ctx: Haval_Context + ctx.hashbitlen = 128 + ctx.rounds = 5 + init(&ctx) + ctx.str_len = u32(len(data)) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_128_5 will read the stream in chunks and compute a // hash from its contents -hash_stream_128_5 :: proc(s: io.Stream) -> ([16]byte, bool) { - hash: [16]byte +hash_stream_128_5 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) { + hash: [DIGEST_SIZE_128]byte ctx: Haval_Context ctx.hashbitlen = 128 ctx.rounds = 5 @@ -188,7 +261,7 @@ hash_stream_128_5 :: proc(s: io.Stream) -> ([16]byte, bool) { // hash_file_128_5 will read the file provided by the given handle // and compute a hash -hash_file_128_5 :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) { +hash_file_128_5 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) { if !load_at_once { return hash_stream_128_5(os.stream_from_handle(hd)) } else { @@ -196,7 +269,7 @@ hash_file_128_5 :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool return hash_bytes_128_5(buf[:]), ok } } - return [16]byte{}, false + return [DIGEST_SIZE_128]byte{}, false } hash_128_5 :: proc { @@ -204,18 +277,20 @@ hash_128_5 :: proc { hash_file_128_5, hash_bytes_128_5, hash_string_128_5, + hash_bytes_to_buffer_128_5, + hash_string_to_buffer_128_5, } // hash_string_160_3 will hash the given input and return the // computed hash -hash_string_160_3 :: proc(data: string) -> [20]byte { +hash_string_160_3 :: proc(data: string) -> [DIGEST_SIZE_160]byte { return hash_bytes_160_3(transmute([]byte)(data)) } // hash_bytes_160_3 will hash the given input and return the // computed hash -hash_bytes_160_3 :: proc(data: []byte) -> [20]byte { - hash: [20]byte +hash_bytes_160_3 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte { + hash: [DIGEST_SIZE_160]byte ctx: Haval_Context ctx.hashbitlen = 160 ctx.rounds = 3 @@ -226,10 +301,31 @@ hash_bytes_160_3 :: proc(data: []byte) -> [20]byte { return hash } +// hash_string_to_buffer_160_3 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_160_3 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_160_3(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_160_3 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_160_3 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size") + ctx: Haval_Context + ctx.hashbitlen = 160 + ctx.rounds = 3 + init(&ctx) + ctx.str_len = u32(len(data)) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_160_3 will read the stream in chunks and compute a // hash from its contents -hash_stream_160_3 :: proc(s: io.Stream) -> ([20]byte, bool) { - hash: [20]byte +hash_stream_160_3 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) { + hash: [DIGEST_SIZE_160]byte ctx: Haval_Context ctx.hashbitlen = 160 ctx.rounds = 3 @@ -250,7 +346,7 @@ hash_stream_160_3 :: proc(s: io.Stream) -> ([20]byte, bool) { // hash_file_160_3 will read the file provided by the given handle // and compute a hash -hash_file_160_3 :: proc(hd: os.Handle, load_at_once := false) -> ([20]byte, bool) { +hash_file_160_3 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) { if !load_at_once { return hash_stream_160_3(os.stream_from_handle(hd)) } else { @@ -258,7 +354,7 @@ hash_file_160_3 :: proc(hd: os.Handle, load_at_once := false) -> ([20]byte, bool return hash_bytes_160_3(buf[:]), ok } } - return [20]byte{}, false + return [DIGEST_SIZE_160]byte{}, false } hash_160_3 :: proc { @@ -266,18 +362,20 @@ hash_160_3 :: proc { hash_file_160_3, hash_bytes_160_3, hash_string_160_3, + hash_bytes_to_buffer_160_3, + hash_string_to_buffer_160_3, } // hash_string_160_4 will hash the given input and return the // computed hash -hash_string_160_4 :: proc(data: string) -> [20]byte { +hash_string_160_4 :: proc(data: string) -> [DIGEST_SIZE_160]byte { return hash_bytes_160_4(transmute([]byte)(data)) } // hash_bytes_160_4 will hash the given input and return the // computed hash -hash_bytes_160_4 :: proc(data: []byte) -> [20]byte { - hash: [20]byte +hash_bytes_160_4 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte { + hash: [DIGEST_SIZE_160]byte ctx: Haval_Context ctx.hashbitlen = 160 ctx.rounds = 4 @@ -288,10 +386,31 @@ hash_bytes_160_4 :: proc(data: []byte) -> [20]byte { return hash } +// hash_string_to_buffer_160_4 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_160_4 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_160_4(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_160_4 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_160_4 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size") + ctx: Haval_Context + ctx.hashbitlen = 160 + ctx.rounds = 4 + init(&ctx) + ctx.str_len = u32(len(data)) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_160_4 will read the stream in chunks and compute a // hash from its contents -hash_stream_160_4 :: proc(s: io.Stream) -> ([20]byte, bool) { - hash: [20]byte +hash_stream_160_4 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) { + hash: [DIGEST_SIZE_160]byte ctx: Haval_Context ctx.hashbitlen = 160 ctx.rounds = 4 @@ -312,7 +431,7 @@ hash_stream_160_4 :: proc(s: io.Stream) -> ([20]byte, bool) { // hash_file_160_4 will read the file provided by the given handle // and compute a hash -hash_file_160_4 :: proc(hd: os.Handle, load_at_once := false) -> ([20]byte, bool) { +hash_file_160_4 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) { if !load_at_once { return hash_stream_160_4(os.stream_from_handle(hd)) } else { @@ -320,7 +439,7 @@ hash_file_160_4 :: proc(hd: os.Handle, load_at_once := false) -> ([20]byte, bool return hash_bytes_160_4(buf[:]), ok } } - return [20]byte{}, false + return [DIGEST_SIZE_160]byte{}, false } hash_160_4 :: proc { @@ -328,18 +447,20 @@ hash_160_4 :: proc { hash_file_160_4, hash_bytes_160_4, hash_string_160_4, + hash_bytes_to_buffer_160_4, + hash_string_to_buffer_160_4, } // hash_string_160_5 will hash the given input and return the // computed hash -hash_string_160_5 :: proc(data: string) -> [20]byte { +hash_string_160_5 :: proc(data: string) -> [DIGEST_SIZE_160]byte { return hash_bytes_160_5(transmute([]byte)(data)) } // hash_bytes_160_5 will hash the given input and return the // computed hash -hash_bytes_160_5 :: proc(data: []byte) -> [20]byte { - hash: [20]byte +hash_bytes_160_5 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte { + hash: [DIGEST_SIZE_160]byte ctx: Haval_Context ctx.hashbitlen = 160 ctx.rounds = 5 @@ -350,10 +471,31 @@ hash_bytes_160_5 :: proc(data: []byte) -> [20]byte { return hash } +// hash_string_to_buffer_160_5 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_160_5 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_160_5(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_160_5 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_160_5 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size") + ctx: Haval_Context + ctx.hashbitlen = 160 + ctx.rounds = 5 + init(&ctx) + ctx.str_len = u32(len(data)) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_160_5 will read the stream in chunks and compute a // hash from its contents -hash_stream_160_5 :: proc(s: io.Stream) -> ([20]byte, bool) { - hash: [20]byte +hash_stream_160_5 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) { + hash: [DIGEST_SIZE_160]byte ctx: Haval_Context ctx.hashbitlen = 160 ctx.rounds = 5 @@ -374,7 +516,7 @@ hash_stream_160_5 :: proc(s: io.Stream) -> ([20]byte, bool) { // hash_file_160_5 will read the file provided by the given handle // and compute a hash -hash_file_160_5 :: proc(hd: os.Handle, load_at_once := false) -> ([20]byte, bool) { +hash_file_160_5 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) { if !load_at_once { return hash_stream_160_5(os.stream_from_handle(hd)) } else { @@ -382,7 +524,7 @@ hash_file_160_5 :: proc(hd: os.Handle, load_at_once := false) -> ([20]byte, bool return hash_bytes_160_5(buf[:]), ok } } - return [20]byte{}, false + return [DIGEST_SIZE_160]byte{}, false } hash_160_5 :: proc { @@ -390,18 +532,20 @@ hash_160_5 :: proc { hash_file_160_5, hash_bytes_160_5, hash_string_160_5, + hash_bytes_to_buffer_160_5, + hash_string_to_buffer_160_5, } // hash_string_192_3 will hash the given input and return the // computed hash -hash_string_192_3 :: proc(data: string) -> [24]byte { +hash_string_192_3 :: proc(data: string) -> [DIGEST_SIZE_192]byte { return hash_bytes_192_3(transmute([]byte)(data)) } // hash_bytes_192_3 will hash the given input and return the // computed hash -hash_bytes_192_3 :: proc(data: []byte) -> [24]byte { - hash: [24]byte +hash_bytes_192_3 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte { + hash: [DIGEST_SIZE_192]byte ctx: Haval_Context ctx.hashbitlen = 192 ctx.rounds = 3 @@ -412,10 +556,31 @@ hash_bytes_192_3 :: proc(data: []byte) -> [24]byte { return hash } +// hash_string_to_buffer_192_3 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_192_3 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_192_3(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_192_3 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_192_3 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_192, "Size of destination buffer is smaller than the digest size") + ctx: Haval_Context + ctx.hashbitlen = 192 + ctx.rounds = 3 + init(&ctx) + ctx.str_len = u32(len(data)) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_192_3 will read the stream in chunks and compute a // hash from its contents -hash_stream_192_3 :: proc(s: io.Stream) -> ([24]byte, bool) { - hash: [24]byte +hash_stream_192_3 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) { + hash: [DIGEST_SIZE_192]byte ctx: Haval_Context ctx.hashbitlen = 192 ctx.rounds = 3 @@ -436,7 +601,7 @@ hash_stream_192_3 :: proc(s: io.Stream) -> ([24]byte, bool) { // hash_file_192_3 will read the file provided by the given handle // and compute a hash -hash_file_192_3 :: proc(hd: os.Handle, load_at_once := false) -> ([24]byte, bool) { +hash_file_192_3 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_192]byte, bool) { if !load_at_once { return hash_stream_192_3(os.stream_from_handle(hd)) } else { @@ -444,7 +609,7 @@ hash_file_192_3 :: proc(hd: os.Handle, load_at_once := false) -> ([24]byte, bool return hash_bytes_192_3(buf[:]), ok } } - return [24]byte{}, false + return [DIGEST_SIZE_192]byte{}, false } hash_192_3 :: proc { @@ -452,18 +617,20 @@ hash_192_3 :: proc { hash_file_192_3, hash_bytes_192_3, hash_string_192_3, + hash_bytes_to_buffer_192_3, + hash_string_to_buffer_192_3, } // hash_string_192_4 will hash the given input and return the // computed hash -hash_string_192_4 :: proc(data: string) -> [24]byte { +hash_string_192_4 :: proc(data: string) -> [DIGEST_SIZE_192]byte { return hash_bytes_192_4(transmute([]byte)(data)) } // hash_bytes_192_4 will hash the given input and return the // computed hash -hash_bytes_192_4 :: proc(data: []byte) -> [24]byte { - hash: [24]byte +hash_bytes_192_4 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte { + hash: [DIGEST_SIZE_192]byte ctx: Haval_Context ctx.hashbitlen = 192 ctx.rounds = 4 @@ -474,10 +641,31 @@ hash_bytes_192_4 :: proc(data: []byte) -> [24]byte { return hash } +// hash_string_to_buffer_192_4 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_192_4 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_192_4(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_192_4 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_192_4 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_192, "Size of destination buffer is smaller than the digest size") + ctx: Haval_Context + ctx.hashbitlen = 192 + ctx.rounds = 4 + init(&ctx) + ctx.str_len = u32(len(data)) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_192_4 will read the stream in chunks and compute a // hash from its contents -hash_stream_192_4 :: proc(s: io.Stream) -> ([24]byte, bool) { - hash: [24]byte +hash_stream_192_4 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) { + hash: [DIGEST_SIZE_192]byte ctx: Haval_Context ctx.hashbitlen = 192 ctx.rounds = 4 @@ -498,7 +686,7 @@ hash_stream_192_4 :: proc(s: io.Stream) -> ([24]byte, bool) { // hash_file_192_4 will read the file provided by the given handle // and compute a hash -hash_file_192_4 :: proc(hd: os.Handle, load_at_once := false) -> ([24]byte, bool) { +hash_file_192_4 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_192]byte, bool) { if !load_at_once { return hash_stream_192_4(os.stream_from_handle(hd)) } else { @@ -506,7 +694,7 @@ hash_file_192_4 :: proc(hd: os.Handle, load_at_once := false) -> ([24]byte, bool return hash_bytes_192_4(buf[:]), ok } } - return [24]byte{}, false + return [DIGEST_SIZE_192]byte{}, false } hash_192_4 :: proc { @@ -514,18 +702,20 @@ hash_192_4 :: proc { hash_file_192_4, hash_bytes_192_4, hash_string_192_4, + hash_bytes_to_buffer_192_4, + hash_string_to_buffer_192_4, } // hash_string_192_5 will hash the given input and return the // computed hash -hash_string_192_5 :: proc(data: string) -> [24]byte { +hash_string_192_5 :: proc(data: string) -> [DIGEST_SIZE_192]byte { return hash_bytes_192_5(transmute([]byte)(data)) } -// hash_bytes_224_5 will hash the given input and return the +// hash_bytes_2DIGEST_SIZE_192_5 will hash the given input and return the // computed hash -hash_bytes_192_5 :: proc(data: []byte) -> [24]byte { - hash: [24]byte +hash_bytes_192_5 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte { + hash: [DIGEST_SIZE_192]byte ctx: Haval_Context ctx.hashbitlen = 192 ctx.rounds = 5 @@ -536,10 +726,31 @@ hash_bytes_192_5 :: proc(data: []byte) -> [24]byte { return hash } +// hash_string_to_buffer_192_5 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_192_5 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_192_5(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_192_5 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_192_5 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_192, "Size of destination buffer is smaller than the digest size") + ctx: Haval_Context + ctx.hashbitlen = 192 + ctx.rounds = 5 + init(&ctx) + ctx.str_len = u32(len(data)) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_192_5 will read the stream in chunks and compute a // hash from its contents -hash_stream_192_5 :: proc(s: io.Stream) -> ([24]byte, bool) { - hash: [24]byte +hash_stream_192_5 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) { + hash: [DIGEST_SIZE_192]byte ctx: Haval_Context ctx.hashbitlen = 192 ctx.rounds = 5 @@ -560,7 +771,7 @@ hash_stream_192_5 :: proc(s: io.Stream) -> ([24]byte, bool) { // hash_file_192_5 will read the file provided by the given handle // and compute a hash -hash_file_192_5 :: proc(hd: os.Handle, load_at_once := false) -> ([24]byte, bool) { +hash_file_192_5 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_192]byte, bool) { if !load_at_once { return hash_stream_192_5(os.stream_from_handle(hd)) } else { @@ -568,7 +779,7 @@ hash_file_192_5 :: proc(hd: os.Handle, load_at_once := false) -> ([24]byte, bool return hash_bytes_192_5(buf[:]), ok } } - return [24]byte{}, false + return [DIGEST_SIZE_192]byte{}, false } hash_192_5 :: proc { @@ -576,18 +787,20 @@ hash_192_5 :: proc { hash_file_192_5, hash_bytes_192_5, hash_string_192_5, + hash_bytes_to_buffer_192_5, + hash_string_to_buffer_192_5, } // hash_string_224_3 will hash the given input and return the // computed hash -hash_string_224_3 :: proc(data: string) -> [28]byte { +hash_string_224_3 :: proc(data: string) -> [DIGEST_SIZE_224]byte { return hash_bytes_224_3(transmute([]byte)(data)) } // hash_bytes_224_3 will hash the given input and return the // computed hash -hash_bytes_224_3 :: proc(data: []byte) -> [28]byte { - hash: [28]byte +hash_bytes_224_3 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { + hash: [DIGEST_SIZE_224]byte ctx: Haval_Context ctx.hashbitlen = 224 ctx.rounds = 3 @@ -598,10 +811,31 @@ hash_bytes_224_3 :: proc(data: []byte) -> [28]byte { return hash } +// hash_string_to_buffer_224_3 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_224_3 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_224_3(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_224_3 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_224_3 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size") + ctx: Haval_Context + ctx.hashbitlen = 224 + ctx.rounds = 3 + init(&ctx) + ctx.str_len = u32(len(data)) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_224_3 will read the stream in chunks and compute a // hash from its contents -hash_stream_224_3 :: proc(s: io.Stream) -> ([28]byte, bool) { - hash: [28]byte +hash_stream_224_3 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { + hash: [DIGEST_SIZE_224]byte ctx: Haval_Context ctx.hashbitlen = 224 ctx.rounds = 3 @@ -622,7 +856,7 @@ hash_stream_224_3 :: proc(s: io.Stream) -> ([28]byte, bool) { // hash_file_224_3 will read the file provided by the given handle // and compute a hash -hash_file_224_3 :: proc(hd: os.Handle, load_at_once := false) -> ([28]byte, bool) { +hash_file_224_3 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) { if !load_at_once { return hash_stream_224_3(os.stream_from_handle(hd)) } else { @@ -630,7 +864,7 @@ hash_file_224_3 :: proc(hd: os.Handle, load_at_once := false) -> ([28]byte, bool return hash_bytes_224_3(buf[:]), ok } } - return [28]byte{}, false + return [DIGEST_SIZE_224]byte{}, false } hash_224_3 :: proc { @@ -638,18 +872,20 @@ hash_224_3 :: proc { hash_file_224_3, hash_bytes_224_3, hash_string_224_3, + hash_bytes_to_buffer_224_3, + hash_string_to_buffer_224_3, } // hash_string_224_4 will hash the given input and return the // computed hash -hash_string_224_4 :: proc(data: string) -> [28]byte { +hash_string_224_4 :: proc(data: string) -> [DIGEST_SIZE_224]byte { return hash_bytes_224_4(transmute([]byte)(data)) } // hash_bytes_224_4 will hash the given input and return the // computed hash -hash_bytes_224_4 :: proc(data: []byte) -> [28]byte { - hash: [28]byte +hash_bytes_224_4 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { + hash: [DIGEST_SIZE_224]byte ctx: Haval_Context ctx.hashbitlen = 224 ctx.rounds = 4 @@ -660,10 +896,31 @@ hash_bytes_224_4 :: proc(data: []byte) -> [28]byte { return hash } +// hash_string_to_buffer_224_4 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_224_4 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_224_4(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_224_4 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_224_4 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size") + ctx: Haval_Context + ctx.hashbitlen = 224 + ctx.rounds = 4 + init(&ctx) + ctx.str_len = u32(len(data)) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_224_4 will read the stream in chunks and compute a // hash from its contents -hash_stream_224_4 :: proc(s: io.Stream) -> ([28]byte, bool) { - hash: [28]byte +hash_stream_224_4 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { + hash: [DIGEST_SIZE_224]byte ctx: Haval_Context ctx.hashbitlen = 224 ctx.rounds = 4 @@ -684,7 +941,7 @@ hash_stream_224_4 :: proc(s: io.Stream) -> ([28]byte, bool) { // hash_file_224_4 will read the file provided by the given handle // and compute a hash -hash_file_224_4 :: proc(hd: os.Handle, load_at_once := false) -> ([28]byte, bool) { +hash_file_224_4 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) { if !load_at_once { return hash_stream_224_4(os.stream_from_handle(hd)) } else { @@ -692,7 +949,7 @@ hash_file_224_4 :: proc(hd: os.Handle, load_at_once := false) -> ([28]byte, bool return hash_bytes_224_4(buf[:]), ok } } - return [28]byte{}, false + return [DIGEST_SIZE_224]byte{}, false } hash_224_4 :: proc { @@ -700,18 +957,20 @@ hash_224_4 :: proc { hash_file_224_4, hash_bytes_224_4, hash_string_224_4, + hash_bytes_to_buffer_224_4, + hash_string_to_buffer_224_4, } // hash_string_224_5 will hash the given input and return the // computed hash -hash_string_224_5 :: proc(data: string) -> [28]byte { +hash_string_224_5 :: proc(data: string) -> [DIGEST_SIZE_224]byte { return hash_bytes_224_5(transmute([]byte)(data)) } // hash_bytes_224_5 will hash the given input and return the // computed hash -hash_bytes_224_5 :: proc(data: []byte) -> [28]byte { - hash: [28]byte +hash_bytes_224_5 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { + hash: [DIGEST_SIZE_224]byte ctx: Haval_Context ctx.hashbitlen = 224 ctx.rounds = 5 @@ -722,10 +981,31 @@ hash_bytes_224_5 :: proc(data: []byte) -> [28]byte { return hash } +// hash_string_to_buffer_224_5 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_224_5 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_224_5(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_224_5 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_224_5 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size") + ctx: Haval_Context + ctx.hashbitlen = 224 + ctx.rounds = 5 + init(&ctx) + ctx.str_len = u32(len(data)) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_224_5 will read the stream in chunks and compute a // hash from its contents -hash_stream_224_5 :: proc(s: io.Stream) -> ([28]byte, bool) { - hash: [28]byte +hash_stream_224_5 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { + hash: [DIGEST_SIZE_224]byte ctx: Haval_Context ctx.hashbitlen = 224 ctx.rounds = 5 @@ -746,7 +1026,7 @@ hash_stream_224_5 :: proc(s: io.Stream) -> ([28]byte, bool) { // hash_file_224_5 will read the file provided by the given handle // and compute a hash -hash_file_224_5 :: proc(hd: os.Handle, load_at_once := false) -> ([28]byte, bool) { +hash_file_224_5 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) { if !load_at_once { return hash_stream_224_5(os.stream_from_handle(hd)) } else { @@ -754,7 +1034,7 @@ hash_file_224_5 :: proc(hd: os.Handle, load_at_once := false) -> ([28]byte, bool return hash_bytes_224_5(buf[:]), ok } } - return [28]byte{}, false + return [DIGEST_SIZE_224]byte{}, false } hash_224_5 :: proc { @@ -762,18 +1042,20 @@ hash_224_5 :: proc { hash_file_224_5, hash_bytes_224_5, hash_string_224_5, + hash_bytes_to_buffer_224_5, + hash_string_to_buffer_224_5, } // hash_string_256_3 will hash the given input and return the // computed hash -hash_string_256_3 :: proc(data: string) -> [32]byte { +hash_string_256_3 :: proc(data: string) -> [DIGEST_SIZE_256]byte { return hash_bytes_256_3(transmute([]byte)(data)) } // hash_bytes_256_3 will hash the given input and return the // computed hash -hash_bytes_256_3 :: proc(data: []byte) -> [32]byte { - hash: [32]byte +hash_bytes_256_3 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { + hash: [DIGEST_SIZE_256]byte ctx: Haval_Context ctx.hashbitlen = 256 ctx.rounds = 3 @@ -784,10 +1066,31 @@ hash_bytes_256_3 :: proc(data: []byte) -> [32]byte { return hash } +// hash_string_to_buffer_256_3 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_256_3 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_256_3(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_256_3 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_256_3 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") + ctx: Haval_Context + ctx.hashbitlen = 256 + ctx.rounds = 3 + init(&ctx) + ctx.str_len = u32(len(data)) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_256_3 will read the stream in chunks and compute a // hash from its contents -hash_stream_256_3 :: proc(s: io.Stream) -> ([32]byte, bool) { - hash: [32]byte +hash_stream_256_3 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { + hash: [DIGEST_SIZE_256]byte ctx: Haval_Context ctx.hashbitlen = 256 ctx.rounds = 3 @@ -808,7 +1111,7 @@ hash_stream_256_3 :: proc(s: io.Stream) -> ([32]byte, bool) { // hash_file_256_3 will read the file provided by the given handle // and compute a hash -hash_file_256_3 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { +hash_file_256_3 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { if !load_at_once { return hash_stream_256_3(os.stream_from_handle(hd)) } else { @@ -816,7 +1119,7 @@ hash_file_256_3 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool return hash_bytes_256_3(buf[:]), ok } } - return [32]byte{}, false + return [DIGEST_SIZE_256]byte{}, false } hash_256_3 :: proc { @@ -824,18 +1127,20 @@ hash_256_3 :: proc { hash_file_256_3, hash_bytes_256_3, hash_string_256_3, + hash_bytes_to_buffer_256_3, + hash_string_to_buffer_256_3, } // hash_string_256_4 will hash the given input and return the // computed hash -hash_string_256_4 :: proc(data: string) -> [32]byte { +hash_string_256_4 :: proc(data: string) -> [DIGEST_SIZE_256]byte { return hash_bytes_256_4(transmute([]byte)(data)) } // hash_bytes_256_4 will hash the given input and return the // computed hash -hash_bytes_256_4 :: proc(data: []byte) -> [32]byte { - hash: [32]byte +hash_bytes_256_4 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { + hash: [DIGEST_SIZE_256]byte ctx: Haval_Context ctx.hashbitlen = 256 ctx.rounds = 4 @@ -846,10 +1151,31 @@ hash_bytes_256_4 :: proc(data: []byte) -> [32]byte { return hash } +// hash_string_to_buffer_256_4 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_256_4 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_256_4(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_256_4 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_256_4 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") + ctx: Haval_Context + ctx.hashbitlen = 256 + ctx.rounds = 4 + init(&ctx) + ctx.str_len = u32(len(data)) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_256_4 will read the stream in chunks and compute a // hash from its contents -hash_stream_256_4 :: proc(s: io.Stream) -> ([32]byte, bool) { - hash: [32]byte +hash_stream_256_4 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { + hash: [DIGEST_SIZE_256]byte ctx: Haval_Context ctx.hashbitlen = 256 ctx.rounds = 4 @@ -870,7 +1196,7 @@ hash_stream_256_4 :: proc(s: io.Stream) -> ([32]byte, bool) { // hash_file_256_4 will read the file provided by the given handle // and compute a hash -hash_file_256_4 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { +hash_file_256_4 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { if !load_at_once { return hash_stream_256_4(os.stream_from_handle(hd)) } else { @@ -878,7 +1204,7 @@ hash_file_256_4 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool return hash_bytes_256_4(buf[:]), ok } } - return [32]byte{}, false + return [DIGEST_SIZE_256]byte{}, false } hash_256_4 :: proc { @@ -886,18 +1212,20 @@ hash_256_4 :: proc { hash_file_256_4, hash_bytes_256_4, hash_string_256_4, + hash_bytes_to_buffer_256_4, + hash_string_to_buffer_256_4, } // hash_string_256_5 will hash the given input and return the // computed hash -hash_string_256_5 :: proc(data: string) -> [32]byte { +hash_string_256_5 :: proc(data: string) -> [DIGEST_SIZE_256]byte { return hash_bytes_256_5(transmute([]byte)(data)) } // hash_bytes_256_5 will hash the given input and return the // computed hash -hash_bytes_256_5 :: proc(data: []byte) -> [32]byte { - hash: [32]byte +hash_bytes_256_5 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { + hash: [DIGEST_SIZE_256]byte ctx: Haval_Context ctx.hashbitlen = 256 ctx.rounds = 5 @@ -908,10 +1236,32 @@ hash_bytes_256_5 :: proc(data: []byte) -> [32]byte { return hash } +// hash_string_to_buffer_256_5 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_256_5 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_256_5(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_256_5 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_256_5 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") + ctx: Haval_Context + ctx.hashbitlen = 256 + ctx.rounds = 5 + init(&ctx) + ctx.str_len = u32(len(data)) + update(&ctx, data) + final(&ctx, hash) +} + + // hash_stream_256_5 will read the stream in chunks and compute a // hash from its contents -hash_stream_256_5 :: proc(s: io.Stream) -> ([32]byte, bool) { - hash: [32]byte +hash_stream_256_5 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { + hash: [DIGEST_SIZE_256]byte ctx: Haval_Context ctx.hashbitlen = 256 ctx.rounds = 5 @@ -932,7 +1282,7 @@ hash_stream_256_5 :: proc(s: io.Stream) -> ([32]byte, bool) { // hash_file_256_5 will read the file provided by the given handle // and compute a hash -hash_file_256_5 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { +hash_file_256_5 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { if !load_at_once { return hash_stream_256_5(os.stream_from_handle(hd)) } else { @@ -940,7 +1290,7 @@ hash_file_256_5 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool return hash_bytes_256_5(buf[:]), ok } } - return [32]byte{}, false + return [DIGEST_SIZE_256]byte{}, false } hash_256_5 :: proc { @@ -948,6 +1298,8 @@ hash_256_5 :: proc { hash_file_256_5, hash_bytes_256_5, hash_string_256_5, + hash_bytes_to_buffer_256_5, + hash_string_to_buffer_256_5, } /* diff --git a/core/crypto/jh/jh.odin b/core/crypto/jh/jh.odin index f251424d287..4ebc0e5cb8d 100644 --- a/core/crypto/jh/jh.odin +++ b/core/crypto/jh/jh.odin @@ -17,16 +17,21 @@ import "core:io" High level API */ +DIGEST_SIZE_224 :: 28 +DIGEST_SIZE_256 :: 32 +DIGEST_SIZE_384 :: 48 +DIGEST_SIZE_512 :: 64 + // hash_string_224 will hash the given input and return the // computed hash -hash_string_224 :: proc(data: string) -> [28]byte { +hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte { return hash_bytes_224(transmute([]byte)(data)) } // hash_bytes_224 will hash the given input and return the // computed hash -hash_bytes_224 :: proc(data: []byte) -> [28]byte { - hash: [28]byte +hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { + hash: [DIGEST_SIZE_224]byte ctx: Jh_Context ctx.hashbitlen = 224 init(&ctx) @@ -35,10 +40,29 @@ hash_bytes_224 :: proc(data: []byte) -> [28]byte { return hash } +// hash_string_to_buffer_224 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_224(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_224 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_224 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size") + ctx: Jh_Context + ctx.hashbitlen = 224 + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_224 will read the stream in chunks and compute a // hash from its contents -hash_stream_224 :: proc(s: io.Stream) -> ([28]byte, bool) { - hash: [28]byte +hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { + hash: [DIGEST_SIZE_224]byte ctx: Jh_Context ctx.hashbitlen = 224 init(&ctx) @@ -57,7 +81,7 @@ hash_stream_224 :: proc(s: io.Stream) -> ([28]byte, bool) { // hash_file_224 will read the file provided by the given handle // and compute a hash -hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([28]byte, bool) { +hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) { if !load_at_once { return hash_stream_224(os.stream_from_handle(hd)) } else { @@ -65,7 +89,7 @@ hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([28]byte, bool) return hash_bytes_224(buf[:]), ok } } - return [28]byte{}, false + return [DIGEST_SIZE_224]byte{}, false } hash_224 :: proc { @@ -73,18 +97,20 @@ hash_224 :: proc { hash_file_224, hash_bytes_224, hash_string_224, + hash_bytes_to_buffer_224, + hash_string_to_buffer_224, } // hash_string_256 will hash the given input and return the // computed hash -hash_string_256 :: proc(data: string) -> [32]byte { +hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { return hash_bytes_256(transmute([]byte)(data)) } // hash_bytes_256 will hash the given input and return the // computed hash -hash_bytes_256 :: proc(data: []byte) -> [32]byte { - hash: [32]byte +hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { + hash: [DIGEST_SIZE_256]byte ctx: Jh_Context ctx.hashbitlen = 256 init(&ctx) @@ -93,10 +119,29 @@ hash_bytes_256 :: proc(data: []byte) -> [32]byte { return hash } +// hash_string_to_buffer_256 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_256(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_256 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") + ctx: Jh_Context + ctx.hashbitlen = 256 + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_256 will read the stream in chunks and compute a // hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { - hash: [32]byte +hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { + hash: [DIGEST_SIZE_256]byte ctx: Jh_Context ctx.hashbitlen = 256 init(&ctx) @@ -115,7 +160,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { // hash_file_256 will read the file provided by the given handle // and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { +hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { if !load_at_once { return hash_stream_256(os.stream_from_handle(hd)) } else { @@ -123,7 +168,7 @@ hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) return hash_bytes_256(buf[:]), ok } } - return [32]byte{}, false + return [DIGEST_SIZE_256]byte{}, false } hash_256 :: proc { @@ -131,18 +176,20 @@ hash_256 :: proc { hash_file_256, hash_bytes_256, hash_string_256, + hash_bytes_to_buffer_256, + hash_string_to_buffer_256, } // hash_string_384 will hash the given input and return the // computed hash -hash_string_384 :: proc(data: string) -> [48]byte { +hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte { return hash_bytes_384(transmute([]byte)(data)) } // hash_bytes_384 will hash the given input and return the // computed hash -hash_bytes_384 :: proc(data: []byte) -> [48]byte { - hash: [48]byte +hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte { + hash: [DIGEST_SIZE_384]byte ctx: Jh_Context ctx.hashbitlen = 384 init(&ctx) @@ -151,10 +198,29 @@ hash_bytes_384 :: proc(data: []byte) -> [48]byte { return hash } +// hash_string_to_buffer_384 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_384(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_384 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_384 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size") + ctx: Jh_Context + ctx.hashbitlen = 384 + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_384 will read the stream in chunks and compute a // hash from its contents -hash_stream_384 :: proc(s: io.Stream) -> ([48]byte, bool) { - hash: [48]byte +hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) { + hash: [DIGEST_SIZE_384]byte ctx: Jh_Context ctx.hashbitlen = 384 init(&ctx) @@ -173,7 +239,7 @@ hash_stream_384 :: proc(s: io.Stream) -> ([48]byte, bool) { // hash_file_384 will read the file provided by the given handle // and compute a hash -hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([48]byte, bool) { +hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) { if !load_at_once { return hash_stream_384(os.stream_from_handle(hd)) } else { @@ -181,7 +247,7 @@ hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([48]byte, bool) return hash_bytes_384(buf[:]), ok } } - return [48]byte{}, false + return [DIGEST_SIZE_384]byte{}, false } hash_384 :: proc { @@ -189,18 +255,20 @@ hash_384 :: proc { hash_file_384, hash_bytes_384, hash_string_384, + hash_bytes_to_buffer_384, + hash_string_to_buffer_384, } // hash_string_512 will hash the given input and return the // computed hash -hash_string_512 :: proc(data: string) -> [64]byte { +hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { return hash_bytes_512(transmute([]byte)(data)) } // hash_bytes_512 will hash the given input and return the // computed hash -hash_bytes_512 :: proc(data: []byte) -> [64]byte { - hash: [64]byte +hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { + hash: [DIGEST_SIZE_512]byte ctx: Jh_Context ctx.hashbitlen = 512 init(&ctx) @@ -209,10 +277,29 @@ hash_bytes_512 :: proc(data: []byte) -> [64]byte { return hash } +// hash_string_to_buffer_512 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_512(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_512 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") + ctx: Jh_Context + ctx.hashbitlen = 512 + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_512 will read the stream in chunks and compute a // hash from its contents -hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { - hash: [64]byte +hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { + hash: [DIGEST_SIZE_512]byte ctx: Jh_Context ctx.hashbitlen = 512 init(&ctx) @@ -231,7 +318,7 @@ hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { // hash_file_512 will read the file provided by the given handle // and compute a hash -hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) { +hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { if !load_at_once { return hash_stream_512(os.stream_from_handle(hd)) } else { @@ -239,7 +326,7 @@ hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) return hash_bytes_512(buf[:]), ok } } - return [64]byte{}, false + return [DIGEST_SIZE_512]byte{}, false } hash_512 :: proc { @@ -247,6 +334,8 @@ hash_512 :: proc { hash_file_512, hash_bytes_512, hash_string_512, + hash_bytes_to_buffer_512, + hash_string_to_buffer_512, } /* diff --git a/core/crypto/keccak/keccak.odin b/core/crypto/keccak/keccak.odin index 19c4c7ddafe..f5d4826b14c 100644 --- a/core/crypto/keccak/keccak.odin +++ b/core/crypto/keccak/keccak.odin @@ -21,18 +21,23 @@ import "../_sha3" High level API */ +DIGEST_SIZE_224 :: 28 +DIGEST_SIZE_256 :: 32 +DIGEST_SIZE_384 :: 48 +DIGEST_SIZE_512 :: 64 + // hash_string_224 will hash the given input and return the // computed hash -hash_string_224 :: proc(data: string) -> [28]byte { +hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte { return hash_bytes_224(transmute([]byte)(data)) } // hash_bytes_224 will hash the given input and return the // computed hash -hash_bytes_224 :: proc(data: []byte) -> [28]byte { - hash: [28]byte +hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { + hash: [DIGEST_SIZE_224]byte ctx: _sha3.Sha3_Context - ctx.mdlen = 28 + ctx.mdlen = DIGEST_SIZE_224 ctx.is_keccak = true _sha3.init(&ctx) _sha3.update(&ctx, data) @@ -40,12 +45,32 @@ hash_bytes_224 :: proc(data: []byte) -> [28]byte { return hash } +// hash_string_to_buffer_224 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_224(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_224 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_224 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size") + ctx: _sha3.Sha3_Context + ctx.mdlen = DIGEST_SIZE_224 + ctx.is_keccak = true + _sha3.init(&ctx) + _sha3.update(&ctx, data) + _sha3.final(&ctx, hash) +} + // hash_stream_224 will read the stream in chunks and compute a // hash from its contents -hash_stream_224 :: proc(s: io.Stream) -> ([28]byte, bool) { - hash: [28]byte +hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { + hash: [DIGEST_SIZE_224]byte ctx: _sha3.Sha3_Context - ctx.mdlen = 28 + ctx.mdlen = DIGEST_SIZE_224 ctx.is_keccak = true _sha3.init(&ctx) buf := make([]byte, 512) @@ -63,7 +88,7 @@ hash_stream_224 :: proc(s: io.Stream) -> ([28]byte, bool) { // hash_file_224 will read the file provided by the given handle // and compute a hash -hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([28]byte, bool) { +hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) { if !load_at_once { return hash_stream_224(os.stream_from_handle(hd)) } else { @@ -71,7 +96,7 @@ hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([28]byte, bool) return hash_bytes_224(buf[:]), ok } } - return [28]byte{}, false + return [DIGEST_SIZE_224]byte{}, false } hash_224 :: proc { @@ -79,20 +104,22 @@ hash_224 :: proc { hash_file_224, hash_bytes_224, hash_string_224, + hash_bytes_to_buffer_224, + hash_string_to_buffer_224, } // hash_string_256 will hash the given input and return the // computed hash -hash_string_256 :: proc(data: string) -> [32]byte { +hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { return hash_bytes_256(transmute([]byte)(data)) } // hash_bytes_256 will hash the given input and return the // computed hash -hash_bytes_256 :: proc(data: []byte) -> [32]byte { - hash: [32]byte +hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { + hash: [DIGEST_SIZE_256]byte ctx: _sha3.Sha3_Context - ctx.mdlen = 32 + ctx.mdlen = DIGEST_SIZE_256 ctx.is_keccak = true _sha3.init(&ctx) _sha3.update(&ctx, data) @@ -100,12 +127,32 @@ hash_bytes_256 :: proc(data: []byte) -> [32]byte { return hash } +// hash_string_to_buffer_256 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_256(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_256 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") + ctx: _sha3.Sha3_Context + ctx.mdlen = DIGEST_SIZE_256 + ctx.is_keccak = true + _sha3.init(&ctx) + _sha3.update(&ctx, data) + _sha3.final(&ctx, hash) +} + // hash_stream_256 will read the stream in chunks and compute a // hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { - hash: [32]byte +hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { + hash: [DIGEST_SIZE_256]byte ctx: _sha3.Sha3_Context - ctx.mdlen = 32 + ctx.mdlen = DIGEST_SIZE_256 ctx.is_keccak = true _sha3.init(&ctx) buf := make([]byte, 512) @@ -123,7 +170,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { // hash_file_256 will read the file provided by the given handle // and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { +hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { if !load_at_once { return hash_stream_256(os.stream_from_handle(hd)) } else { @@ -131,7 +178,7 @@ hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) return hash_bytes_256(buf[:]), ok } } - return [32]byte{}, false + return [DIGEST_SIZE_256]byte{}, false } hash_256 :: proc { @@ -139,20 +186,22 @@ hash_256 :: proc { hash_file_256, hash_bytes_256, hash_string_256, + hash_bytes_to_buffer_256, + hash_string_to_buffer_256, } // hash_string_384 will hash the given input and return the // computed hash -hash_string_384 :: proc(data: string) -> [48]byte { +hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte { return hash_bytes_384(transmute([]byte)(data)) } // hash_bytes_384 will hash the given input and return the // computed hash -hash_bytes_384 :: proc(data: []byte) -> [48]byte { - hash: [48]byte +hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte { + hash: [DIGEST_SIZE_384]byte ctx: _sha3.Sha3_Context - ctx.mdlen = 48 + ctx.mdlen = DIGEST_SIZE_384 ctx.is_keccak = true _sha3.init(&ctx) _sha3.update(&ctx, data) @@ -160,12 +209,32 @@ hash_bytes_384 :: proc(data: []byte) -> [48]byte { return hash } +// hash_string_to_buffer_384 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_384(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_384 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_384 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size") + ctx: _sha3.Sha3_Context + ctx.mdlen = DIGEST_SIZE_384 + ctx.is_keccak = true + _sha3.init(&ctx) + _sha3.update(&ctx, data) + _sha3.final(&ctx, hash) +} + // hash_stream_384 will read the stream in chunks and compute a // hash from its contents -hash_stream_384 :: proc(s: io.Stream) -> ([48]byte, bool) { - hash: [48]byte +hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) { + hash: [DIGEST_SIZE_384]byte ctx: _sha3.Sha3_Context - ctx.mdlen = 48 + ctx.mdlen = DIGEST_SIZE_384 ctx.is_keccak = true _sha3.init(&ctx) buf := make([]byte, 512) @@ -183,7 +252,7 @@ hash_stream_384 :: proc(s: io.Stream) -> ([48]byte, bool) { // hash_file_384 will read the file provided by the given handle // and compute a hash -hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([48]byte, bool) { +hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) { if !load_at_once { return hash_stream_384(os.stream_from_handle(hd)) } else { @@ -191,7 +260,7 @@ hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([48]byte, bool) return hash_bytes_384(buf[:]), ok } } - return [48]byte{}, false + return [DIGEST_SIZE_384]byte{}, false } hash_384 :: proc { @@ -199,20 +268,22 @@ hash_384 :: proc { hash_file_384, hash_bytes_384, hash_string_384, + hash_bytes_to_buffer_384, + hash_string_to_buffer_384, } // hash_string_512 will hash the given input and return the // computed hash -hash_string_512 :: proc(data: string) -> [64]byte { +hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { return hash_bytes_512(transmute([]byte)(data)) } // hash_bytes_512 will hash the given input and return the // computed hash -hash_bytes_512 :: proc(data: []byte) -> [64]byte { - hash: [64]byte +hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { + hash: [DIGEST_SIZE_512]byte ctx: _sha3.Sha3_Context - ctx.mdlen = 64 + ctx.mdlen = DIGEST_SIZE_512 ctx.is_keccak = true _sha3.init(&ctx) _sha3.update(&ctx, data) @@ -220,12 +291,32 @@ hash_bytes_512 :: proc(data: []byte) -> [64]byte { return hash } +// hash_string_to_buffer_512 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_512(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_512 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") + ctx: _sha3.Sha3_Context + ctx.mdlen = DIGEST_SIZE_512 + ctx.is_keccak = true + _sha3.init(&ctx) + _sha3.update(&ctx, data) + _sha3.final(&ctx, hash) +} + // hash_stream_512 will read the stream in chunks and compute a // hash from its contents -hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { - hash: [64]byte +hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { + hash: [DIGEST_SIZE_512]byte ctx: _sha3.Sha3_Context - ctx.mdlen = 64 + ctx.mdlen = DIGEST_SIZE_512 ctx.is_keccak = true _sha3.init(&ctx) buf := make([]byte, 512) @@ -243,7 +334,7 @@ hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { // hash_file_512 will read the file provided by the given handle // and compute a hash -hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) { +hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { if !load_at_once { return hash_stream_512(os.stream_from_handle(hd)) } else { @@ -251,7 +342,7 @@ hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) return hash_bytes_512(buf[:]), ok } } - return [64]byte{}, false + return [DIGEST_SIZE_512]byte{}, false } hash_512 :: proc { @@ -259,13 +350,15 @@ hash_512 :: proc { hash_file_512, hash_bytes_512, hash_string_512, + hash_bytes_to_buffer_512, + hash_string_to_buffer_512, } /* Low level API */ -Sha3_Context :: _sha3.Sha3_Context +Keccak_Context :: _sha3.Sha3_Context init :: proc(ctx: ^_sha3.Sha3_Context) { ctx.is_keccak = true diff --git a/core/crypto/md2/md2.odin b/core/crypto/md2/md2.odin index 5e027c13c94..102c1b8b467 100644 --- a/core/crypto/md2/md2.odin +++ b/core/crypto/md2/md2.odin @@ -17,16 +17,18 @@ import "core:io" High level API */ +DIGEST_SIZE :: 16 + // hash_string will hash the given input and return the // computed hash -hash_string :: proc(data: string) -> [16]byte { +hash_string :: proc(data: string) -> [DIGEST_SIZE]byte { return hash_bytes(transmute([]byte)(data)) } // hash_bytes will hash the given input and return the // computed hash -hash_bytes :: proc(data: []byte) -> [16]byte { - hash: [16]byte +hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { + hash: [DIGEST_SIZE]byte ctx: Md2_Context // init(&ctx) No-op update(&ctx, data) @@ -34,10 +36,28 @@ hash_bytes :: proc(data: []byte) -> [16]byte { return hash } +// hash_string_to_buffer will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") + ctx: Md2_Context + // init(&ctx) No-op + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream will read the stream in chunks and compute a // hash from its contents -hash_stream :: proc(s: io.Stream) -> ([16]byte, bool) { - hash: [16]byte +hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { + hash: [DIGEST_SIZE]byte ctx: Md2_Context // init(&ctx) No-op buf := make([]byte, 512) @@ -55,7 +75,7 @@ hash_stream :: proc(s: io.Stream) -> ([16]byte, bool) { // hash_file will read the file provided by the given handle // and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) { +hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { if !load_at_once { return hash_stream(os.stream_from_handle(hd)) } else { @@ -63,7 +83,7 @@ hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) { return hash_bytes(buf[:]), ok } } - return [16]byte{}, false + return [DIGEST_SIZE]byte{}, false } hash :: proc { @@ -71,6 +91,8 @@ hash :: proc { hash_file, hash_bytes, hash_string, + hash_bytes_to_buffer, + hash_string_to_buffer, } /* @@ -86,7 +108,7 @@ update :: proc(ctx: ^Md2_Context, data: []byte) { for i := 0; i < len(data); i += 1 { ctx.data[ctx.datalen] = data[i] ctx.datalen += 1 - if (ctx.datalen == 16) { + if (ctx.datalen == DIGEST_SIZE) { transform(ctx, ctx.data[:]) ctx.datalen = 0 } @@ -94,14 +116,14 @@ update :: proc(ctx: ^Md2_Context, data: []byte) { } final :: proc(ctx: ^Md2_Context, hash: []byte) { - to_pad := byte(16 - ctx.datalen) - for ctx.datalen < 16 { + to_pad := byte(DIGEST_SIZE - ctx.datalen) + for ctx.datalen < DIGEST_SIZE { ctx.data[ctx.datalen] = to_pad ctx.datalen += 1 } transform(ctx, ctx.data[:]) transform(ctx, ctx.checksum[:]) - for i := 0; i < 16; i += 1 { + for i := 0; i < DIGEST_SIZE; i += 1 { hash[i] = ctx.state[i] } } @@ -111,9 +133,9 @@ final :: proc(ctx: ^Md2_Context, hash: []byte) { */ Md2_Context :: struct { - data: [16]byte, - state: [16 * 3]byte, - checksum: [16]byte, + data: [DIGEST_SIZE]byte, + state: [DIGEST_SIZE * 3]byte, + checksum: [DIGEST_SIZE]byte, datalen: int, } @@ -140,20 +162,20 @@ PI_TABLE := [?]byte { transform :: proc(ctx: ^Md2_Context, data: []byte) { j,k,t: byte - for j = 0; j < 16; j += 1 { - ctx.state[j + 16] = data[j] - ctx.state[j + 16 * 2] = (ctx.state[j + 16] ~ ctx.state[j]) + for j = 0; j < DIGEST_SIZE; j += 1 { + ctx.state[j + DIGEST_SIZE] = data[j] + ctx.state[j + DIGEST_SIZE * 2] = (ctx.state[j + DIGEST_SIZE] ~ ctx.state[j]) } t = 0 - for j = 0; j < 16 + 2; j += 1 { - for k = 0; k < 16 * 3; k += 1 { + for j = 0; j < DIGEST_SIZE + 2; j += 1 { + for k = 0; k < DIGEST_SIZE * 3; k += 1 { ctx.state[k] ~= PI_TABLE[t] t = ctx.state[k] } t = (t + j) & 0xff } - t = ctx.checksum[16 - 1] - for j = 0; j < 16; j += 1 { + t = ctx.checksum[DIGEST_SIZE - 1] + for j = 0; j < DIGEST_SIZE; j += 1 { ctx.checksum[j] ~= PI_TABLE[data[j] ~ t] t = ctx.checksum[j] } diff --git a/core/crypto/md4/md4.odin b/core/crypto/md4/md4.odin index 813db578ada..d944daa1d77 100644 --- a/core/crypto/md4/md4.odin +++ b/core/crypto/md4/md4.odin @@ -21,16 +21,18 @@ import "../util" High level API */ +DIGEST_SIZE :: 16 + // hash_string will hash the given input and return the // computed hash -hash_string :: proc(data: string) -> [16]byte { +hash_string :: proc(data: string) -> [DIGEST_SIZE]byte { return hash_bytes(transmute([]byte)(data)) } // hash_bytes will hash the given input and return the // computed hash -hash_bytes :: proc(data: []byte) -> [16]byte { - hash: [16]byte +hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { + hash: [DIGEST_SIZE]byte ctx: Md4_Context init(&ctx) update(&ctx, data) @@ -38,10 +40,28 @@ hash_bytes :: proc(data: []byte) -> [16]byte { return hash } +// hash_string_to_buffer will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") + ctx: Md4_Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream will read the stream in chunks and compute a // hash from its contents -hash_stream :: proc(s: io.Stream) -> ([16]byte, bool) { - hash: [16]byte +hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { + hash: [DIGEST_SIZE]byte ctx: Md4_Context init(&ctx) buf := make([]byte, 512) @@ -59,7 +79,7 @@ hash_stream :: proc(s: io.Stream) -> ([16]byte, bool) { // hash_file will read the file provided by the given handle // and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) { +hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { if !load_at_once { return hash_stream(os.stream_from_handle(hd)) } else { @@ -67,7 +87,7 @@ hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) { return hash_bytes(buf[:]), ok } } - return [16]byte{}, false + return [DIGEST_SIZE]byte{}, false } hash :: proc { @@ -75,6 +95,8 @@ hash :: proc { hash_file, hash_bytes, hash_string, + hash_bytes_to_buffer, + hash_string_to_buffer, } /* @@ -171,9 +193,9 @@ HH :: #force_inline proc "contextless"(a, b, c, d, x: u32, s : int) -> u32 { transform :: proc(ctx: ^Md4_Context, data: []byte) { a, b, c, d, i, j: u32 - m: [16]u32 + m: [DIGEST_SIZE]u32 - for i, j = 0, 0; i < 16; i += 1 { + for i, j = 0, 0; i < DIGEST_SIZE; i += 1 { m[i] = u32(data[j]) | (u32(data[j + 1]) << 8) | (u32(data[j + 2]) << 16) | (u32(data[j + 3]) << 24) j += 4 } diff --git a/core/crypto/md5/md5.odin b/core/crypto/md5/md5.odin index a41ed16f89f..9129e63848d 100644 --- a/core/crypto/md5/md5.odin +++ b/core/crypto/md5/md5.odin @@ -20,16 +20,18 @@ import "../util" High level API */ +DIGEST_SIZE :: 16 + // hash_string will hash the given input and return the // computed hash -hash_string :: proc(data: string) -> [16]byte { +hash_string :: proc(data: string) -> [DIGEST_SIZE]byte { return hash_bytes(transmute([]byte)(data)) } // hash_bytes will hash the given input and return the // computed hash -hash_bytes :: proc(data: []byte) -> [16]byte { - hash: [16]byte +hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { + hash: [DIGEST_SIZE]byte ctx: Md5_Context init(&ctx) update(&ctx, data) @@ -37,10 +39,28 @@ hash_bytes :: proc(data: []byte) -> [16]byte { return hash } +// hash_string_to_buffer will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") + ctx: Md5_Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream will read the stream in chunks and compute a // hash from its contents -hash_stream :: proc(s: io.Stream) -> ([16]byte, bool) { - hash: [16]byte +hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { + hash: [DIGEST_SIZE]byte ctx: Md5_Context init(&ctx) buf := make([]byte, 512) @@ -58,7 +78,7 @@ hash_stream :: proc(s: io.Stream) -> ([16]byte, bool) { // hash_file will read the file provided by the given handle // and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) { +hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { if !load_at_once { return hash_stream(os.stream_from_handle(hd)) } else { @@ -66,7 +86,7 @@ hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) { return hash_bytes(buf[:]), ok } } - return [16]byte{}, false + return [DIGEST_SIZE]byte{}, false } hash :: proc { @@ -74,6 +94,8 @@ hash :: proc { hash_file, hash_bytes, hash_string, + hash_bytes_to_buffer, + hash_string_to_buffer, } /* @@ -176,9 +198,9 @@ II :: #force_inline proc "contextless" (a, b, c, d, m: u32, s: int, t: u32) -> u transform :: proc(ctx: ^Md5_Context, data: []byte) { i, j: u32 - m: [16]u32 + m: [DIGEST_SIZE]u32 - for i, j = 0, 0; i < 16; i+=1 { + for i, j = 0, 0; i < DIGEST_SIZE; i+=1 { m[i] = u32(data[j]) + u32(data[j + 1]) << 8 + u32(data[j + 2]) << 16 + u32(data[j + 3]) << 24 j += 4 } diff --git a/core/crypto/ripemd/ripemd.odin b/core/crypto/ripemd/ripemd.odin index a9a5d1126f0..c475c48030d 100644 --- a/core/crypto/ripemd/ripemd.odin +++ b/core/crypto/ripemd/ripemd.odin @@ -19,16 +19,21 @@ import "../util" High level API */ +DIGEST_SIZE_128 :: 16 +DIGEST_SIZE_160 :: 20 +DIGEST_SIZE_256 :: 32 +DIGEST_SIZE_320 :: 40 + // hash_string_128 will hash the given input and return the // computed hash -hash_string_128 :: proc(data: string) -> [16]byte { +hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte { return hash_bytes_128(transmute([]byte)(data)) } // hash_bytes_128 will hash the given input and return the // computed hash -hash_bytes_128 :: proc(data: []byte) -> [16]byte { - hash: [16]byte +hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { + hash: [DIGEST_SIZE_128]byte ctx: Ripemd128_Context init(&ctx) update(&ctx, data) @@ -36,10 +41,28 @@ hash_bytes_128 :: proc(data: []byte) -> [16]byte { return hash } +// hash_string_to_buffer_128 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_128 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_128(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_128 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_128 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size") + ctx: Ripemd128_Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_128 will read the stream in chunks and compute a // hash from its contents -hash_stream_128 :: proc(s: io.Stream) -> ([16]byte, bool) { - hash: [16]byte +hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) { + hash: [DIGEST_SIZE_128]byte ctx: Ripemd128_Context init(&ctx) buf := make([]byte, 512) @@ -57,7 +80,7 @@ hash_stream_128 :: proc(s: io.Stream) -> ([16]byte, bool) { // hash_file_128 will read the file provided by the given handle // and compute a hash -hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) { +hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) { if !load_at_once { return hash_stream_128(os.stream_from_handle(hd)) } else { @@ -65,7 +88,7 @@ hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) return hash_bytes_128(buf[:]), ok } } - return [16]byte{}, false + return [DIGEST_SIZE_128]byte{}, false } hash_128 :: proc { @@ -73,18 +96,20 @@ hash_128 :: proc { hash_file_128, hash_bytes_128, hash_string_128, + hash_bytes_to_buffer_128, + hash_string_to_buffer_128, } // hash_string_160 will hash the given input and return the // computed hash -hash_string_160 :: proc(data: string) -> [20]byte { +hash_string_160 :: proc(data: string) -> [DIGEST_SIZE_160]byte { return hash_bytes_160(transmute([]byte)(data)) } // hash_bytes_160 will hash the given input and return the // computed hash -hash_bytes_160 :: proc(data: []byte) -> [20]byte { - hash: [20]byte +hash_bytes_160 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte { + hash: [DIGEST_SIZE_160]byte ctx: Ripemd160_Context init(&ctx) update(&ctx, data) @@ -92,10 +117,28 @@ hash_bytes_160 :: proc(data: []byte) -> [20]byte { return hash } +// hash_string_to_buffer_160 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_160 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_160(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_160 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_160 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size") + ctx: Ripemd160_Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_160 will read the stream in chunks and compute a // hash from its contents -hash_stream_160 :: proc(s: io.Stream) -> ([20]byte, bool) { - hash: [20]byte +hash_stream_160 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) { + hash: [DIGEST_SIZE_160]byte ctx: Ripemd160_Context init(&ctx) buf := make([]byte, 512) @@ -113,7 +156,7 @@ hash_stream_160 :: proc(s: io.Stream) -> ([20]byte, bool) { // hash_file_160 will read the file provided by the given handle // and compute a hash -hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([20]byte, bool) { +hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) { if !load_at_once { return hash_stream_160(os.stream_from_handle(hd)) } else { @@ -121,7 +164,7 @@ hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([20]byte, bool) return hash_bytes_160(buf[:]), ok } } - return [20]byte{}, false + return [DIGEST_SIZE_160]byte{}, false } hash_160 :: proc { @@ -129,18 +172,20 @@ hash_160 :: proc { hash_file_160, hash_bytes_160, hash_string_160, + hash_bytes_to_buffer_160, + hash_string_to_buffer_160, } // hash_string_256 will hash the given input and return the // computed hash -hash_string_256 :: proc(data: string) -> [32]byte { +hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { return hash_bytes_256(transmute([]byte)(data)) } // hash_bytes_256 will hash the given input and return the // computed hash -hash_bytes_256 :: proc(data: []byte) -> [32]byte { - hash: [32]byte +hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { + hash: [DIGEST_SIZE_256]byte ctx: Ripemd256_Context init(&ctx) update(&ctx, data) @@ -148,10 +193,28 @@ hash_bytes_256 :: proc(data: []byte) -> [32]byte { return hash } +// hash_string_to_buffer_256 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_256(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_256 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") + ctx: Ripemd256_Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_256 will read the stream in chunks and compute a // hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { - hash: [32]byte +hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { + hash: [DIGEST_SIZE_256]byte ctx: Ripemd256_Context init(&ctx) buf := make([]byte, 512) @@ -169,7 +232,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { // hash_file_256 will read the file provided by the given handle // and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { +hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { if !load_at_once { return hash_stream_256(os.stream_from_handle(hd)) } else { @@ -177,7 +240,7 @@ hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) return hash_bytes_256(buf[:]), ok } } - return [32]byte{}, false + return [DIGEST_SIZE_256]byte{}, false } hash_256 :: proc { @@ -185,18 +248,20 @@ hash_256 :: proc { hash_file_256, hash_bytes_256, hash_string_256, + hash_bytes_to_buffer_256, + hash_string_to_buffer_256, } // hash_string_320 will hash the given input and return the // computed hash -hash_string_320 :: proc(data: string) -> [40]byte { +hash_string_320 :: proc(data: string) -> [DIGEST_SIZE_320]byte { return hash_bytes_320(transmute([]byte)(data)) } // hash_bytes_320 will hash the given input and return the // computed hash -hash_bytes_320 :: proc(data: []byte) -> [40]byte { - hash: [40]byte +hash_bytes_320 :: proc(data: []byte) -> [DIGEST_SIZE_320]byte { + hash: [DIGEST_SIZE_320]byte ctx: Ripemd320_Context init(&ctx) update(&ctx, data) @@ -204,10 +269,28 @@ hash_bytes_320 :: proc(data: []byte) -> [40]byte { return hash } +// hash_string_to_buffer_320 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_320 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_320(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_320 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_320 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_320, "Size of destination buffer is smaller than the digest size") + ctx: Ripemd320_Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_320 will read the stream in chunks and compute a // hash from its contents -hash_stream_320 :: proc(s: io.Stream) -> ([40]byte, bool) { - hash: [40]byte +hash_stream_320 :: proc(s: io.Stream) -> ([DIGEST_SIZE_320]byte, bool) { + hash: [DIGEST_SIZE_320]byte ctx: Ripemd320_Context init(&ctx) buf := make([]byte, 512) @@ -225,7 +308,7 @@ hash_stream_320 :: proc(s: io.Stream) -> ([40]byte, bool) { // hash_file_320 will read the file provided by the given handle // and compute a hash -hash_file_320 :: proc(hd: os.Handle, load_at_once := false) -> ([40]byte, bool) { +hash_file_320 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_320]byte, bool) { if !load_at_once { return hash_stream_320(os.stream_from_handle(hd)) } else { @@ -233,7 +316,7 @@ hash_file_320 :: proc(hd: os.Handle, load_at_once := false) -> ([40]byte, bool) return hash_bytes_320(buf[:]), ok } } - return [40]byte{}, false + return [DIGEST_SIZE_320]byte{}, false } hash_320 :: proc { @@ -241,6 +324,8 @@ hash_320 :: proc { hash_file_320, hash_bytes_320, hash_string_320, + hash_bytes_to_buffer_320, + hash_string_to_buffer_320, } /* diff --git a/core/crypto/sha1/sha1.odin b/core/crypto/sha1/sha1.odin index 736b207a3e1..e8df3c7f653 100644 --- a/core/crypto/sha1/sha1.odin +++ b/core/crypto/sha1/sha1.odin @@ -19,16 +19,19 @@ import "../util" /* High level API */ + +DIGEST_SIZE :: 20 + // hash_string will hash the given input and return the // computed hash -hash_string :: proc(data: string) -> [20]byte { +hash_string :: proc(data: string) -> [DIGEST_SIZE]byte { return hash_bytes(transmute([]byte)(data)) } // hash_bytes will hash the given input and return the // computed hash -hash_bytes :: proc(data: []byte) -> [20]byte { - hash: [20]byte +hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { + hash: [DIGEST_SIZE]byte ctx: Sha1_Context init(&ctx) update(&ctx, data) @@ -36,10 +39,28 @@ hash_bytes :: proc(data: []byte) -> [20]byte { return hash } +// hash_string_to_buffer will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") + ctx: Sha1_Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream will read the stream in chunks and compute a // hash from its contents -hash_stream :: proc(s: io.Stream) -> ([20]byte, bool) { - hash: [20]byte +hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { + hash: [DIGEST_SIZE]byte ctx: Sha1_Context init(&ctx) buf := make([]byte, 512) @@ -57,7 +78,7 @@ hash_stream :: proc(s: io.Stream) -> ([20]byte, bool) { // hash_file will read the file provided by the given handle // and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([20]byte, bool) { +hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { if !load_at_once { return hash_stream(os.stream_from_handle(hd)) } else { @@ -65,7 +86,7 @@ hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([20]byte, bool) { return hash_bytes(buf[:]), ok } } - return [20]byte{}, false + return [DIGEST_SIZE]byte{}, false } hash :: proc { @@ -73,6 +94,8 @@ hash :: proc { hash_file, hash_bytes, hash_string, + hash_bytes_to_buffer, + hash_string_to_buffer, } /* diff --git a/core/crypto/sha2/sha2.odin b/core/crypto/sha2/sha2.odin index 8b7ccf38a03..2178b70b554 100644 --- a/core/crypto/sha2/sha2.odin +++ b/core/crypto/sha2/sha2.odin @@ -21,16 +21,21 @@ import "../util" High level API */ +DIGEST_SIZE_224 :: 28 +DIGEST_SIZE_256 :: 32 +DIGEST_SIZE_384 :: 48 +DIGEST_SIZE_512 :: 64 + // hash_string_224 will hash the given input and return the // computed hash -hash_string_224 :: proc(data: string) -> [28]byte { +hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte { return hash_bytes_224(transmute([]byte)(data)) } // hash_bytes_224 will hash the given input and return the // computed hash -hash_bytes_224 :: proc(data: []byte) -> [28]byte { - hash: [28]byte +hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { + hash: [DIGEST_SIZE_224]byte ctx: Sha256_Context ctx.is224 = true init(&ctx) @@ -39,10 +44,29 @@ hash_bytes_224 :: proc(data: []byte) -> [28]byte { return hash } +// hash_string_to_buffer_224 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_224(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_224 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_224 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size") + ctx: Sha256_Context + ctx.is224 = true + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_224 will read the stream in chunks and compute a // hash from its contents -hash_stream_224 :: proc(s: io.Stream) -> ([28]byte, bool) { - hash: [28]byte +hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { + hash: [DIGEST_SIZE_224]byte ctx: Sha512_Context ctx.is384 = false init(&ctx) @@ -61,7 +85,7 @@ hash_stream_224 :: proc(s: io.Stream) -> ([28]byte, bool) { // hash_file_224 will read the file provided by the given handle // and compute a hash -hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([28]byte, bool) { +hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) { if !load_at_once { return hash_stream_224(os.stream_from_handle(hd)) } else { @@ -69,7 +93,7 @@ hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([28]byte, bool) return hash_bytes_224(buf[:]), ok } } - return [28]byte{}, false + return [DIGEST_SIZE_224]byte{}, false } hash_224 :: proc { @@ -77,18 +101,20 @@ hash_224 :: proc { hash_file_224, hash_bytes_224, hash_string_224, + hash_bytes_to_buffer_224, + hash_string_to_buffer_224, } // hash_string_256 will hash the given input and return the // computed hash -hash_string_256 :: proc(data: string) -> [32]byte { +hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { return hash_bytes_256(transmute([]byte)(data)) } // hash_bytes_256 will hash the given input and return the // computed hash -hash_bytes_256 :: proc(data: []byte) -> [32]byte { - hash: [32]byte +hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { + hash: [DIGEST_SIZE_256]byte ctx: Sha256_Context ctx.is224 = false init(&ctx) @@ -97,10 +123,29 @@ hash_bytes_256 :: proc(data: []byte) -> [32]byte { return hash } +// hash_string_to_buffer_256 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_256(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_256 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") + ctx: Sha256_Context + ctx.is224 = false + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_256 will read the stream in chunks and compute a // hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { - hash: [32]byte +hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { + hash: [DIGEST_SIZE_256]byte ctx: Sha512_Context ctx.is384 = false init(&ctx) @@ -119,7 +164,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { // hash_file_256 will read the file provided by the given handle // and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { +hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { if !load_at_once { return hash_stream_256(os.stream_from_handle(hd)) } else { @@ -127,7 +172,7 @@ hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) return hash_bytes_256(buf[:]), ok } } - return [32]byte{}, false + return [DIGEST_SIZE_256]byte{}, false } hash_256 :: proc { @@ -135,18 +180,20 @@ hash_256 :: proc { hash_file_256, hash_bytes_256, hash_string_256, + hash_bytes_to_buffer_256, + hash_string_to_buffer_256, } // hash_string_384 will hash the given input and return the // computed hash -hash_string_384 :: proc(data: string) -> [48]byte { +hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte { return hash_bytes_384(transmute([]byte)(data)) } // hash_bytes_384 will hash the given input and return the // computed hash -hash_bytes_384 :: proc(data: []byte) -> [48]byte { - hash: [48]byte +hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte { + hash: [DIGEST_SIZE_384]byte ctx: Sha512_Context ctx.is384 = true init(&ctx) @@ -155,10 +202,29 @@ hash_bytes_384 :: proc(data: []byte) -> [48]byte { return hash } +// hash_string_to_buffer_384 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_384(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_384 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_384 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size") + ctx: Sha512_Context + ctx.is384 = true + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_384 will read the stream in chunks and compute a // hash from its contents -hash_stream_384 :: proc(s: io.Stream) -> ([48]byte, bool) { - hash: [48]byte +hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) { + hash: [DIGEST_SIZE_384]byte ctx: Sha512_Context ctx.is384 = true init(&ctx) @@ -177,7 +243,7 @@ hash_stream_384 :: proc(s: io.Stream) -> ([48]byte, bool) { // hash_file_384 will read the file provided by the given handle // and compute a hash -hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([48]byte, bool) { +hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) { if !load_at_once { return hash_stream_384(os.stream_from_handle(hd)) } else { @@ -185,7 +251,7 @@ hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([48]byte, bool) return hash_bytes_384(buf[:]), ok } } - return [48]byte{}, false + return [DIGEST_SIZE_384]byte{}, false } hash_384 :: proc { @@ -193,18 +259,20 @@ hash_384 :: proc { hash_file_384, hash_bytes_384, hash_string_384, + hash_bytes_to_buffer_384, + hash_string_to_buffer_384, } // hash_string_512 will hash the given input and return the // computed hash -hash_string_512 :: proc(data: string) -> [64]byte { +hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { return hash_bytes_512(transmute([]byte)(data)) } // hash_bytes_512 will hash the given input and return the // computed hash -hash_bytes_512 :: proc(data: []byte) -> [64]byte { - hash: [64]byte +hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { + hash: [DIGEST_SIZE_512]byte ctx: Sha512_Context ctx.is384 = false init(&ctx) @@ -213,10 +281,29 @@ hash_bytes_512 :: proc(data: []byte) -> [64]byte { return hash } +// hash_string_to_buffer_512 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_512(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_512 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") + ctx: Sha512_Context + ctx.is384 = false + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream_512 will read the stream in chunks and compute a // hash from its contents -hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { - hash: [64]byte +hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { + hash: [DIGEST_SIZE_512]byte ctx: Sha512_Context ctx.is384 = false init(&ctx) @@ -235,7 +322,7 @@ hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { // hash_file_512 will read the file provided by the given handle // and compute a hash -hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) { +hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { if !load_at_once { return hash_stream_512(os.stream_from_handle(hd)) } else { @@ -243,7 +330,7 @@ hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) return hash_bytes_512(buf[:]), ok } } - return [64]byte{}, false + return [DIGEST_SIZE_512]byte{}, false } hash_512 :: proc { @@ -251,6 +338,8 @@ hash_512 :: proc { hash_file_512, hash_bytes_512, hash_string_512, + hash_bytes_to_buffer_512, + hash_string_to_buffer_512, } /* diff --git a/core/crypto/sha3/sha3.odin b/core/crypto/sha3/sha3.odin index 1becf764050..2eceeaff6a2 100644 --- a/core/crypto/sha3/sha3.odin +++ b/core/crypto/sha3/sha3.odin @@ -20,30 +20,54 @@ import "../_sha3" High level API */ +DIGEST_SIZE_224 :: 28 +DIGEST_SIZE_256 :: 32 +DIGEST_SIZE_384 :: 48 +DIGEST_SIZE_512 :: 64 + // hash_string_224 will hash the given input and return the // computed hash -hash_string_224 :: proc(data: string) -> [28]byte { +hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte { return hash_bytes_224(transmute([]byte)(data)) } // hash_bytes_224 will hash the given input and return the // computed hash -hash_bytes_224 :: proc(data: []byte) -> [28]byte { - hash: [28]byte +hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { + hash: [DIGEST_SIZE_224]byte ctx: _sha3.Sha3_Context - ctx.mdlen = 28 + ctx.mdlen = DIGEST_SIZE_224 _sha3.init(&ctx) _sha3.update(&ctx, data) _sha3.final(&ctx, hash[:]) return hash } +// hash_string_to_buffer_224 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_224(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_224 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_224 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size") + ctx: _sha3.Sha3_Context + ctx.mdlen = DIGEST_SIZE_224 + _sha3.init(&ctx) + _sha3.update(&ctx, data) + _sha3.final(&ctx, hash) +} + // hash_stream_224 will read the stream in chunks and compute a // hash from its contents -hash_stream_224 :: proc(s: io.Stream) -> ([28]byte, bool) { - hash: [28]byte +hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { + hash: [DIGEST_SIZE_224]byte ctx: _sha3.Sha3_Context - ctx.mdlen = 28 + ctx.mdlen = DIGEST_SIZE_224 _sha3.init(&ctx) buf := make([]byte, 512) defer delete(buf) @@ -60,7 +84,7 @@ hash_stream_224 :: proc(s: io.Stream) -> ([28]byte, bool) { // hash_file_224 will read the file provided by the given handle // and compute a hash -hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([28]byte, bool) { +hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) { if !load_at_once { return hash_stream_224(os.stream_from_handle(hd)) } else { @@ -68,7 +92,7 @@ hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([28]byte, bool) return hash_bytes_224(buf[:]), ok } } - return [28]byte{}, false + return [DIGEST_SIZE_224]byte{}, false } hash_224 :: proc { @@ -76,32 +100,53 @@ hash_224 :: proc { hash_file_224, hash_bytes_224, hash_string_224, + hash_bytes_to_buffer_224, + hash_string_to_buffer_224, } // hash_string_256 will hash the given input and return the // computed hash -hash_string_256 :: proc(data: string) -> [32]byte { +hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { return hash_bytes_256(transmute([]byte)(data)) } // hash_bytes_256 will hash the given input and return the // computed hash -hash_bytes_256 :: proc(data: []byte) -> [32]byte { - hash: [32]byte +hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { + hash: [DIGEST_SIZE_256]byte ctx: _sha3.Sha3_Context - ctx.mdlen = 32 + ctx.mdlen = DIGEST_SIZE_256 _sha3.init(&ctx) _sha3.update(&ctx, data) _sha3.final(&ctx, hash[:]) return hash } +// hash_string_to_buffer_256 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_256(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_256 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") + ctx: _sha3.Sha3_Context + ctx.mdlen = DIGEST_SIZE_256 + _sha3.init(&ctx) + _sha3.update(&ctx, data) + _sha3.final(&ctx, hash) +} + // hash_stream_256 will read the stream in chunks and compute a // hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { - hash: [32]byte +hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { + hash: [DIGEST_SIZE_256]byte ctx: _sha3.Sha3_Context - ctx.mdlen = 32 + ctx.mdlen = DIGEST_SIZE_256 _sha3.init(&ctx) buf := make([]byte, 512) defer delete(buf) @@ -118,7 +163,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { // hash_file_256 will read the file provided by the given handle // and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { +hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { if !load_at_once { return hash_stream_256(os.stream_from_handle(hd)) } else { @@ -126,7 +171,7 @@ hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) return hash_bytes_256(buf[:]), ok } } - return [32]byte{}, false + return [DIGEST_SIZE_256]byte{}, false } hash_256 :: proc { @@ -134,32 +179,53 @@ hash_256 :: proc { hash_file_256, hash_bytes_256, hash_string_256, + hash_bytes_to_buffer_256, + hash_string_to_buffer_256, } // hash_string_384 will hash the given input and return the // computed hash -hash_string_384 :: proc(data: string) -> [48]byte { +hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte { return hash_bytes_384(transmute([]byte)(data)) } // hash_bytes_384 will hash the given input and return the // computed hash -hash_bytes_384 :: proc(data: []byte) -> [48]byte { - hash: [48]byte +hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte { + hash: [DIGEST_SIZE_384]byte ctx: _sha3.Sha3_Context - ctx.mdlen = 48 + ctx.mdlen = DIGEST_SIZE_384 _sha3.init(&ctx) _sha3.update(&ctx, data) _sha3.final(&ctx, hash[:]) return hash } +// hash_string_to_buffer_384 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_384(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_384 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_384 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size") + ctx: _sha3.Sha3_Context + ctx.mdlen = DIGEST_SIZE_384 + _sha3.init(&ctx) + _sha3.update(&ctx, data) + _sha3.final(&ctx, hash) +} + // hash_stream_384 will read the stream in chunks and compute a // hash from its contents -hash_stream_384 :: proc(s: io.Stream) -> ([48]byte, bool) { - hash: [48]byte +hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) { + hash: [DIGEST_SIZE_384]byte ctx: _sha3.Sha3_Context - ctx.mdlen = 48 + ctx.mdlen = DIGEST_SIZE_384 _sha3.init(&ctx) buf := make([]byte, 512) defer delete(buf) @@ -176,7 +242,7 @@ hash_stream_384 :: proc(s: io.Stream) -> ([48]byte, bool) { // hash_file_384 will read the file provided by the given handle // and compute a hash -hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([48]byte, bool) { +hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) { if !load_at_once { return hash_stream_384(os.stream_from_handle(hd)) } else { @@ -184,7 +250,7 @@ hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([48]byte, bool) return hash_bytes_384(buf[:]), ok } } - return [48]byte{}, false + return [DIGEST_SIZE_384]byte{}, false } hash_384 :: proc { @@ -192,32 +258,53 @@ hash_384 :: proc { hash_file_384, hash_bytes_384, hash_string_384, + hash_bytes_to_buffer_384, + hash_string_to_buffer_384, } // hash_string_512 will hash the given input and return the // computed hash -hash_string_512 :: proc(data: string) -> [64]byte { +hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { return hash_bytes_512(transmute([]byte)(data)) } // hash_bytes_512 will hash the given input and return the // computed hash -hash_bytes_512 :: proc(data: []byte) -> [64]byte { - hash: [64]byte +hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { + hash: [DIGEST_SIZE_512]byte ctx: _sha3.Sha3_Context - ctx.mdlen = 64 + ctx.mdlen = DIGEST_SIZE_512 _sha3.init(&ctx) _sha3.update(&ctx, data) _sha3.final(&ctx, hash[:]) return hash } +// hash_string_to_buffer_512 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_512(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_512 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") + ctx: _sha3.Sha3_Context + ctx.mdlen = DIGEST_SIZE_512 + _sha3.init(&ctx) + _sha3.update(&ctx, data) + _sha3.final(&ctx, hash) +} + // hash_stream_512 will read the stream in chunks and compute a // hash from its contents -hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { - hash: [64]byte +hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { + hash: [DIGEST_SIZE_512]byte ctx: _sha3.Sha3_Context - ctx.mdlen = 64 + ctx.mdlen = DIGEST_SIZE_512 _sha3.init(&ctx) buf := make([]byte, 512) defer delete(buf) @@ -234,7 +321,7 @@ hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { // hash_file_512 will read the file provided by the given handle // and compute a hash -hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) { +hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { if !load_at_once { return hash_stream_512(os.stream_from_handle(hd)) } else { @@ -242,7 +329,7 @@ hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) return hash_bytes_512(buf[:]), ok } } - return [64]byte{}, false + return [DIGEST_SIZE_512]byte{}, false } hash_512 :: proc { @@ -250,6 +337,8 @@ hash_512 :: proc { hash_file_512, hash_bytes_512, hash_string_512, + hash_bytes_to_buffer_512, + hash_string_to_buffer_512, } /* diff --git a/core/crypto/shake/shake.odin b/core/crypto/shake/shake.odin index ff477b1a943..9fdc3ebf1ea 100644 --- a/core/crypto/shake/shake.odin +++ b/core/crypto/shake/shake.odin @@ -20,18 +20,21 @@ import "../_sha3" High level API */ +DIGEST_SIZE_128 :: 16 +DIGEST_SIZE_256 :: 32 + // hash_string_128 will hash the given input and return the // computed hash -hash_string_128 :: proc(data: string) -> [16]byte { +hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte { return hash_bytes_128(transmute([]byte)(data)) } // hash_bytes_128 will hash the given input and return the // computed hash -hash_bytes_128 :: proc(data: []byte) -> [16]byte { - hash: [16]byte +hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { + hash: [DIGEST_SIZE_128]byte ctx: _sha3.Sha3_Context - ctx.mdlen = 16 + ctx.mdlen = DIGEST_SIZE_128 _sha3.init(&ctx) _sha3.update(&ctx, data) _sha3.shake_xof(&ctx) @@ -39,12 +42,32 @@ hash_bytes_128 :: proc(data: []byte) -> [16]byte { return hash } +// hash_string_to_buffer_128 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_128 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_128(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_128 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_128 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size") + ctx: _sha3.Sha3_Context + ctx.mdlen = DIGEST_SIZE_128 + _sha3.init(&ctx) + _sha3.update(&ctx, data) + _sha3.shake_xof(&ctx) + _sha3.shake_out(&ctx, hash) +} + // hash_stream_128 will read the stream in chunks and compute a // hash from its contents -hash_stream_128 :: proc(s: io.Stream) -> ([16]byte, bool) { - hash: [16]byte +hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) { + hash: [DIGEST_SIZE_128]byte ctx: _sha3.Sha3_Context - ctx.mdlen = 16 + ctx.mdlen = DIGEST_SIZE_128 _sha3.init(&ctx) buf := make([]byte, 512) defer delete(buf) @@ -62,7 +85,7 @@ hash_stream_128 :: proc(s: io.Stream) -> ([16]byte, bool) { // hash_file_128 will read the file provided by the given handle // and compute a hash -hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) { +hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) { if !load_at_once { return hash_stream_128(os.stream_from_handle(hd)) } else { @@ -70,7 +93,7 @@ hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) return hash_bytes_128(buf[:]), ok } } - return [16]byte{}, false + return [DIGEST_SIZE_128]byte{}, false } hash_128 :: proc { @@ -78,20 +101,22 @@ hash_128 :: proc { hash_file_128, hash_bytes_128, hash_string_128, + hash_bytes_to_buffer_128, + hash_string_to_buffer_128, } // hash_string_256 will hash the given input and return the // computed hash -hash_string_256 :: proc(data: string) -> [32]byte { +hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { return hash_bytes_256(transmute([]byte)(data)) } // hash_bytes_256 will hash the given input and return the // computed hash -hash_bytes_256 :: proc(data: []byte) -> [32]byte { - hash: [32]byte +hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { + hash: [DIGEST_SIZE_256]byte ctx: _sha3.Sha3_Context - ctx.mdlen = 32 + ctx.mdlen = DIGEST_SIZE_256 _sha3.init(&ctx) _sha3.update(&ctx, data) _sha3.shake_xof(&ctx) @@ -99,12 +124,32 @@ hash_bytes_256 :: proc(data: []byte) -> [32]byte { return hash } +// hash_string_to_buffer_256 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_256(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_256 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") + ctx: _sha3.Sha3_Context + ctx.mdlen = DIGEST_SIZE_256 + _sha3.init(&ctx) + _sha3.update(&ctx, data) + _sha3.shake_xof(&ctx) + _sha3.shake_out(&ctx, hash) +} + // hash_stream_256 will read the stream in chunks and compute a // hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { - hash: [32]byte +hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { + hash: [DIGEST_SIZE_256]byte ctx: _sha3.Sha3_Context - ctx.mdlen = 32 + ctx.mdlen = DIGEST_SIZE_256 _sha3.init(&ctx) buf := make([]byte, 512) defer delete(buf) @@ -122,7 +167,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { // hash_file_256 will read the file provided by the given handle // and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { +hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { if !load_at_once { return hash_stream_256(os.stream_from_handle(hd)) } else { @@ -130,7 +175,7 @@ hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) return hash_bytes_256(buf[:]), ok } } - return [32]byte{}, false + return [DIGEST_SIZE_256]byte{}, false } hash_256 :: proc { @@ -138,13 +183,15 @@ hash_256 :: proc { hash_file_256, hash_bytes_256, hash_string_256, + hash_bytes_to_buffer_256, + hash_string_to_buffer_256, } /* Low level API */ -Sha3_Context :: _sha3.Sha3_Context +Shake_Context :: _sha3.Sha3_Context init :: proc(ctx: ^_sha3.Sha3_Context) { _sha3.init(ctx) diff --git a/core/crypto/sm3/sm3.odin b/core/crypto/sm3/sm3.odin index c72bd4f1500..e72973e331a 100644 --- a/core/crypto/sm3/sm3.odin +++ b/core/crypto/sm3/sm3.odin @@ -15,16 +15,22 @@ import "core:io" import "../util" +/* + High level API +*/ + +DIGEST_SIZE :: 32 + // hash_string will hash the given input and return the // computed hash -hash_string :: proc(data: string) -> [32]byte { +hash_string :: proc(data: string) -> [DIGEST_SIZE]byte { return hash_bytes(transmute([]byte)(data)) } // hash_bytes will hash the given input and return the // computed hash -hash_bytes :: proc(data: []byte) -> [32]byte { - hash: [32]byte +hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { + hash: [DIGEST_SIZE]byte ctx: Sm3_Context init(&ctx) update(&ctx, data) @@ -32,10 +38,28 @@ hash_bytes :: proc(data: []byte) -> [32]byte { return hash } +// hash_string_to_buffer will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") + ctx: Sm3_Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream will read the stream in chunks and compute a // hash from its contents -hash_stream :: proc(s: io.Stream) -> ([32]byte, bool) { - hash: [32]byte +hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { + hash: [DIGEST_SIZE]byte ctx: Sm3_Context init(&ctx) buf := make([]byte, 512) @@ -53,7 +77,7 @@ hash_stream :: proc(s: io.Stream) -> ([32]byte, bool) { // hash_file will read the file provided by the given handle // and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { +hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { if !load_at_once { return hash_stream(os.stream_from_handle(hd)) } else { @@ -61,7 +85,7 @@ hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { return hash_bytes(buf[:]), ok } } - return [32]byte{}, false + return [DIGEST_SIZE]byte{}, false } hash :: proc { @@ -69,6 +93,8 @@ hash :: proc { hash_file, hash_bytes, hash_string, + hash_bytes_to_buffer, + hash_string_to_buffer, } /* @@ -146,9 +172,6 @@ Sm3_Context :: struct { length: u64, } -BLOCK_SIZE_IN_BYTES :: 64 -BLOCK_SIZE_IN_32 :: 16 - IV := [8]u32 { 0x7380166f, 0x4914b2b9, 0x172442d7, 0xda8a0600, 0xa96f30bc, 0x163138aa, 0xe38dee4d, 0xb0fb0e4e, diff --git a/core/crypto/streebog/streebog.odin b/core/crypto/streebog/streebog.odin index b90ef8e8619..deb71120d10 100644 --- a/core/crypto/streebog/streebog.odin +++ b/core/crypto/streebog/streebog.odin @@ -19,16 +19,19 @@ import "../util" High level API */ +DIGEST_SIZE_256 :: 32 +DIGEST_SIZE_512 :: 64 + // hash_string_256 will hash the given input and return the // computed hash -hash_string_256 :: proc(data: string) -> [32]byte { +hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { return hash_bytes_256(transmute([]byte)(data)) } // hash_bytes_256 will hash the given input and return the // computed hash -hash_bytes_256 :: proc(data: []byte) -> [32]byte { - hash: [32]byte +hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { + hash: [DIGEST_SIZE_256]byte ctx: Streebog_Context ctx.is256 = true init(&ctx) @@ -37,10 +40,29 @@ hash_bytes_256 :: proc(data: []byte) -> [32]byte { return hash } +// hash_string_to_buffer_256 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_256(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_256 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") + ctx: Streebog_Context + ctx.is256 = true + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) +} + // hash_stream_256 will read the stream in chunks and compute a // hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { - hash: [32]byte +hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { + hash: [DIGEST_SIZE_256]byte ctx: Streebog_Context ctx.is256 = true init(&ctx) @@ -59,7 +81,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { // hash_file_256 will read the file provided by the given handle // and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { +hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { if !load_at_once { return hash_stream_256(os.stream_from_handle(hd)) } else { @@ -67,7 +89,7 @@ hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) return hash_bytes_256(buf[:]), ok } } - return [32]byte{}, false + return [DIGEST_SIZE_256]byte{}, false } hash_256 :: proc { @@ -75,18 +97,20 @@ hash_256 :: proc { hash_file_256, hash_bytes_256, hash_string_256, + hash_bytes_to_buffer_256, + hash_string_to_buffer_256, } // hash_string_512 will hash the given input and return the // computed hash -hash_string_512 :: proc(data: string) -> [64]byte { +hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { return hash_bytes_512(transmute([]byte)(data)) } // hash_bytes_512 will hash the given input and return the // computed hash -hash_bytes_512 :: proc(data: []byte) -> [64]byte { - hash: [64]byte +hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { + hash: [DIGEST_SIZE_512]byte ctx: Streebog_Context init(&ctx) update(&ctx, data) @@ -94,10 +118,28 @@ hash_bytes_512 :: proc(data: []byte) -> [64]byte { return hash } +// hash_string_to_buffer_512 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_512(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_512 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") + ctx: Streebog_Context + init(&ctx) + update(&ctx, data) + final(&ctx, hash[:]) +} + // hash_stream_512 will read the stream in chunks and compute a // hash from its contents -hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { - hash: [64]byte +hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { + hash: [DIGEST_SIZE_512]byte ctx: Streebog_Context init(&ctx) buf := make([]byte, 512) @@ -115,7 +157,7 @@ hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { // hash_file_512 will read the file provided by the given handle // and compute a hash -hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) { +hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { if !load_at_once { return hash_stream_512(os.stream_from_handle(hd)) } else { @@ -123,7 +165,7 @@ hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) return hash_bytes_512(buf[:]), ok } } - return [64]byte{}, false + return [DIGEST_SIZE_512]byte{}, false } hash_512 :: proc { @@ -131,6 +173,8 @@ hash_512 :: proc { hash_file_512, hash_bytes_512, hash_string_512, + hash_bytes_to_buffer_512, + hash_string_to_buffer_512, } /* diff --git a/core/crypto/tiger/tiger.odin b/core/crypto/tiger/tiger.odin index ecd7f5583db..4ea80c66c96 100644 --- a/core/crypto/tiger/tiger.odin +++ b/core/crypto/tiger/tiger.odin @@ -19,16 +19,20 @@ import "../_tiger" High level API */ +DIGEST_SIZE_128 :: 16 +DIGEST_SIZE_160 :: 20 +DIGEST_SIZE_192 :: 24 + // hash_string_128 will hash the given input and return the // computed hash -hash_string_128 :: proc(data: string) -> [16]byte { +hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte { return hash_bytes_128(transmute([]byte)(data)) } // hash_bytes_128 will hash the given input and return the // computed hash -hash_bytes_128 :: proc(data: []byte) -> [16]byte { - hash: [16]byte +hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { + hash: [DIGEST_SIZE_128]byte ctx: _tiger.Tiger_Context ctx.ver = 1 _tiger.init(&ctx) @@ -37,10 +41,29 @@ hash_bytes_128 :: proc(data: []byte) -> [16]byte { return hash } +// hash_string_to_buffer_128 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_128 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_128(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_128 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_128 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size") + ctx: _tiger.Tiger_Context + ctx.ver = 1 + _tiger.init(&ctx) + _tiger.update(&ctx, data) + _tiger.final(&ctx, hash) +} + // hash_stream_128 will read the stream in chunks and compute a // hash from its contents -hash_stream_128 :: proc(s: io.Stream) -> ([16]byte, bool) { - hash: [16]byte +hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) { + hash: [DIGEST_SIZE_128]byte ctx: _tiger.Tiger_Context ctx.ver = 1 _tiger.init(&ctx) @@ -59,7 +82,7 @@ hash_stream_128 :: proc(s: io.Stream) -> ([16]byte, bool) { // hash_file_128 will read the file provided by the given handle // and compute a hash -hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) { +hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) { if !load_at_once { return hash_stream_128(os.stream_from_handle(hd)) } else { @@ -67,7 +90,7 @@ hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) return hash_bytes_128(buf[:]), ok } } - return [16]byte{}, false + return [DIGEST_SIZE_128]byte{}, false } hash_128 :: proc { @@ -75,18 +98,20 @@ hash_128 :: proc { hash_file_128, hash_bytes_128, hash_string_128, + hash_bytes_to_buffer_128, + hash_string_to_buffer_128, } // hash_string_160 will hash the given input and return the // computed hash -hash_string_160 :: proc(data: string) -> [20]byte { +hash_string_160 :: proc(data: string) -> [DIGEST_SIZE_160]byte { return hash_bytes_160(transmute([]byte)(data)) } // hash_bytes_160 will hash the given input and return the // computed hash -hash_bytes_160 :: proc(data: []byte) -> [20]byte { - hash: [20]byte +hash_bytes_160 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte { + hash: [DIGEST_SIZE_160]byte ctx: _tiger.Tiger_Context ctx.ver = 1 _tiger.init(&ctx) @@ -95,10 +120,29 @@ hash_bytes_160 :: proc(data: []byte) -> [20]byte { return hash } +// hash_string_to_buffer_160 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_160 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_160(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_160 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_160 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size") + ctx: _tiger.Tiger_Context + ctx.ver = 1 + _tiger.init(&ctx) + _tiger.update(&ctx, data) + _tiger.final(&ctx, hash) +} + // hash_stream_160 will read the stream in chunks and compute a // hash from its contents -hash_stream_160 :: proc(s: io.Stream) -> ([20]byte, bool) { - hash: [20]byte +hash_stream_160 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) { + hash: [DIGEST_SIZE_160]byte ctx: _tiger.Tiger_Context ctx.ver = 1 _tiger.init(&ctx) @@ -117,7 +161,7 @@ hash_stream_160 :: proc(s: io.Stream) -> ([20]byte, bool) { // hash_file_160 will read the file provided by the given handle // and compute a hash -hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([20]byte, bool) { +hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) { if !load_at_once { return hash_stream_160(os.stream_from_handle(hd)) } else { @@ -125,7 +169,7 @@ hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([20]byte, bool) return hash_bytes_160(buf[:]), ok } } - return [20]byte{}, false + return [DIGEST_SIZE_160]byte{}, false } hash_160 :: proc { @@ -133,18 +177,20 @@ hash_160 :: proc { hash_file_160, hash_bytes_160, hash_string_160, + hash_bytes_to_buffer_160, + hash_string_to_buffer_160, } // hash_string_192 will hash the given input and return the // computed hash -hash_string_192 :: proc(data: string) -> [24]byte { +hash_string_192 :: proc(data: string) -> [DIGEST_SIZE_192]byte { return hash_bytes_192(transmute([]byte)(data)) } // hash_bytes_192 will hash the given input and return the // computed hash -hash_bytes_192 :: proc(data: []byte) -> [24]byte { - hash: [24]byte +hash_bytes_192 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte { + hash: [DIGEST_SIZE_192]byte ctx: _tiger.Tiger_Context ctx.ver = 1 _tiger.init(&ctx) @@ -153,10 +199,29 @@ hash_bytes_192 :: proc(data: []byte) -> [24]byte { return hash } +// hash_string_to_buffer_192 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_192 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_192(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_192 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_192 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_192, "Size of destination buffer is smaller than the digest size") + ctx: _tiger.Tiger_Context + ctx.ver = 1 + _tiger.init(&ctx) + _tiger.update(&ctx, data) + _tiger.final(&ctx, hash) +} + // hash_stream_192 will read the stream in chunks and compute a // hash from its contents -hash_stream_192 :: proc(s: io.Stream) -> ([24]byte, bool) { - hash: [24]byte +hash_stream_192 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) { + hash: [DIGEST_SIZE_192]byte ctx: _tiger.Tiger_Context ctx.ver = 1 _tiger.init(&ctx) @@ -175,7 +240,7 @@ hash_stream_192 :: proc(s: io.Stream) -> ([24]byte, bool) { // hash_file_192 will read the file provided by the given handle // and compute a hash -hash_file_192 :: proc(hd: os.Handle, load_at_once := false) -> ([24]byte, bool) { +hash_file_192 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_192]byte, bool) { if !load_at_once { return hash_stream_192(os.stream_from_handle(hd)) } else { @@ -183,7 +248,7 @@ hash_file_192 :: proc(hd: os.Handle, load_at_once := false) -> ([24]byte, bool) return hash_bytes_192(buf[:]), ok } } - return [24]byte{}, false + return [DIGEST_SIZE_192]byte{}, false } hash_192 :: proc { @@ -191,6 +256,8 @@ hash_192 :: proc { hash_file_192, hash_bytes_192, hash_string_192, + hash_bytes_to_buffer_192, + hash_string_to_buffer_192, } /* diff --git a/core/crypto/tiger2/tiger2.odin b/core/crypto/tiger2/tiger2.odin index a93e19319b3..84333f344b2 100644 --- a/core/crypto/tiger2/tiger2.odin +++ b/core/crypto/tiger2/tiger2.odin @@ -19,16 +19,20 @@ import "../_tiger" High level API */ +DIGEST_SIZE_128 :: 16 +DIGEST_SIZE_160 :: 20 +DIGEST_SIZE_192 :: 24 + // hash_string_128 will hash the given input and return the // computed hash -hash_string_128 :: proc(data: string) -> [16]byte { +hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte { return hash_bytes_128(transmute([]byte)(data)) } // hash_bytes_128 will hash the given input and return the // computed hash -hash_bytes_128 :: proc(data: []byte) -> [16]byte { - hash: [16]byte +hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { + hash: [DIGEST_SIZE_128]byte ctx: _tiger.Tiger_Context ctx.ver = 2 _tiger.init(&ctx) @@ -37,10 +41,29 @@ hash_bytes_128 :: proc(data: []byte) -> [16]byte { return hash } +// hash_string_to_buffer_128 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_128 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_128(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_128 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_128 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size") + ctx: _tiger.Tiger_Context + ctx.ver = 2 + _tiger.init(&ctx) + _tiger.update(&ctx, data) + _tiger.final(&ctx, hash) +} + // hash_stream_128 will read the stream in chunks and compute a // hash from its contents -hash_stream_128 :: proc(s: io.Stream) -> ([16]byte, bool) { - hash: [16]byte +hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) { + hash: [DIGEST_SIZE_128]byte ctx: _tiger.Tiger_Context ctx.ver = 2 _tiger.init(&ctx) @@ -59,7 +82,7 @@ hash_stream_128 :: proc(s: io.Stream) -> ([16]byte, bool) { // hash_file_128 will read the file provided by the given handle // and compute a hash -hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) { +hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) { if !load_at_once { return hash_stream_128(os.stream_from_handle(hd)) } else { @@ -67,7 +90,7 @@ hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) return hash_bytes_128(buf[:]), ok } } - return [16]byte{}, false + return [DIGEST_SIZE_128]byte{}, false } hash_128 :: proc { @@ -75,18 +98,20 @@ hash_128 :: proc { hash_file_128, hash_bytes_128, hash_string_128, + hash_bytes_to_buffer_128, + hash_string_to_buffer_128, } // hash_string_160 will hash the given input and return the // computed hash -hash_string_160 :: proc(data: string) -> [20]byte { +hash_string_160 :: proc(data: string) -> [DIGEST_SIZE_160]byte { return hash_bytes_160(transmute([]byte)(data)) } // hash_bytes_160 will hash the given input and return the // computed hash -hash_bytes_160 :: proc(data: []byte) -> [20]byte { - hash: [20]byte +hash_bytes_160 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte { + hash: [DIGEST_SIZE_160]byte ctx: _tiger.Tiger_Context ctx.ver = 2 _tiger.init(&ctx) @@ -95,10 +120,29 @@ hash_bytes_160 :: proc(data: []byte) -> [20]byte { return hash } +// hash_string_to_buffer_160 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_160 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_160(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_160 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_160 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size") + ctx: _tiger.Tiger_Context + ctx.ver = 2 + _tiger.init(&ctx) + _tiger.update(&ctx, data) + _tiger.final(&ctx, hash) +} + // hash_stream_160 will read the stream in chunks and compute a // hash from its contents -hash_stream_160 :: proc(s: io.Stream) -> ([20]byte, bool) { - hash: [20]byte +hash_stream_160 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) { + hash: [DIGEST_SIZE_160]byte ctx: _tiger.Tiger_Context ctx.ver = 2 _tiger.init(&ctx) @@ -117,7 +161,7 @@ hash_stream_160 :: proc(s: io.Stream) -> ([20]byte, bool) { // hash_file_160 will read the file provided by the given handle // and compute a hash -hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([20]byte, bool) { +hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) { if !load_at_once { return hash_stream_160(os.stream_from_handle(hd)) } else { @@ -125,7 +169,7 @@ hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([20]byte, bool) return hash_bytes_160(buf[:]), ok } } - return [20]byte{}, false + return [DIGEST_SIZE_160]byte{}, false } hash_160 :: proc { @@ -133,18 +177,20 @@ hash_160 :: proc { hash_file_160, hash_bytes_160, hash_string_160, + hash_bytes_to_buffer_160, + hash_string_to_buffer_160, } // hash_string_192 will hash the given input and return the // computed hash -hash_string_192 :: proc(data: string) -> [24]byte { +hash_string_192 :: proc(data: string) -> [DIGEST_SIZE_192]byte { return hash_bytes_192(transmute([]byte)(data)) } // hash_bytes_192 will hash the given input and return the // computed hash -hash_bytes_192 :: proc(data: []byte) -> [24]byte { - hash: [24]byte +hash_bytes_192 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte { + hash: [DIGEST_SIZE_192]byte ctx: _tiger.Tiger_Context ctx.ver = 2 _tiger.init(&ctx) @@ -153,10 +199,29 @@ hash_bytes_192 :: proc(data: []byte) -> [24]byte { return hash } +// hash_string_to_buffer_192 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_192 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_192(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_192 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_192 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_192, "Size of destination buffer is smaller than the digest size") + ctx: _tiger.Tiger_Context + ctx.ver = 2 + _tiger.init(&ctx) + _tiger.update(&ctx, data) + _tiger.final(&ctx, hash) +} + // hash_stream_192 will read the stream in chunks and compute a // hash from its contents -hash_stream_192 :: proc(s: io.Stream) -> ([24]byte, bool) { - hash: [24]byte +hash_stream_192 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) { + hash: [DIGEST_SIZE_192]byte ctx: _tiger.Tiger_Context ctx.ver = 2 _tiger.init(&ctx) @@ -175,7 +240,7 @@ hash_stream_192 :: proc(s: io.Stream) -> ([24]byte, bool) { // hash_file_192 will read the file provided by the given handle // and compute a hash -hash_file_192 :: proc(hd: os.Handle, load_at_once := false) -> ([24]byte, bool) { +hash_file_192 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_192]byte, bool) { if !load_at_once { return hash_stream_192(os.stream_from_handle(hd)) } else { @@ -183,7 +248,7 @@ hash_file_192 :: proc(hd: os.Handle, load_at_once := false) -> ([24]byte, bool) return hash_bytes_192(buf[:]), ok } } - return [24]byte{}, false + return [DIGEST_SIZE_192]byte{}, false } hash_192 :: proc { @@ -191,6 +256,8 @@ hash_192 :: proc { hash_file_192, hash_bytes_192, hash_string_192, + hash_bytes_to_buffer_192, + hash_string_to_buffer_192, } /* diff --git a/core/crypto/whirlpool/whirlpool.odin b/core/crypto/whirlpool/whirlpool.odin index 43ad2a0a58f..255f57bc2fb 100644 --- a/core/crypto/whirlpool/whirlpool.odin +++ b/core/crypto/whirlpool/whirlpool.odin @@ -19,16 +19,18 @@ import "../util" High level API */ +DIGEST_SIZE :: 64 + // hash_string will hash the given input and return the // computed hash -hash_string :: proc(data: string) -> [64]byte { +hash_string :: proc(data: string) -> [DIGEST_SIZE]byte { return hash_bytes(transmute([]byte)(data)) } // hash_bytes will hash the given input and return the // computed hash -hash_bytes :: proc(data: []byte) -> [64]byte { - hash: [64]byte +hash_bytes :: proc(data: []byte) -> [DIGEST_SIZE]byte { + hash: [DIGEST_SIZE]byte ctx: Whirlpool_Context // init(&ctx) No-op update(&ctx, data) @@ -36,10 +38,28 @@ hash_bytes :: proc(data: []byte) -> [64]byte { return hash } +// hash_string_to_buffer will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") + ctx: Whirlpool_Context + // init(&ctx) No-op + update(&ctx, data) + final(&ctx, hash) +} + // hash_stream will read the stream in chunks and compute a // hash from its contents -hash_stream :: proc(s: io.Stream) -> ([64]byte, bool) { - hash: [64]byte +hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { + hash: [DIGEST_SIZE]byte ctx: Whirlpool_Context // init(&ctx) No-op buf := make([]byte, 512) @@ -57,7 +77,7 @@ hash_stream :: proc(s: io.Stream) -> ([64]byte, bool) { // hash_file will read the file provided by the given handle // and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) { +hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { if !load_at_once { return hash_stream(os.stream_from_handle(hd)) } else { @@ -65,7 +85,7 @@ hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) { return hash_bytes(buf[:]), ok } } - return [64]byte{}, false + return [DIGEST_SIZE]byte{}, false } hash :: proc { @@ -73,6 +93,8 @@ hash :: proc { hash_file, hash_bytes, hash_string, + hash_bytes_to_buffer, + hash_string_to_buffer, } /* diff --git a/vendor/botan/README.md b/vendor/botan/README.md index 057aed422f2..b7d4d01a17d 100644 --- a/vendor/botan/README.md +++ b/vendor/botan/README.md @@ -26,9 +26,11 @@ Wrappers for hashing algorithms have been added to match the API within the Odin #### High level API Each hash algorithm contains a procedure group named `hash`, or if the algorithm provides more than one digest size `hash_`. -Included in these groups are four procedures. +Included in these groups are six procedures. * `hash_string` - Hash a given string and return the computed hash. Just calls `hash_bytes` internally * `hash_bytes` - Hash a given byte slice and return the computed hash +* `hash_string_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. Just calls `hash_bytes_to_buffer` internally +* `hash_bytes_to_buffer` - Hash a given string and put the computed hash in the second proc parameter. The destination buffer has to be at least as big as the digest size of the hash * `hash_stream` - Takes a stream from io.Stream and returns the computed hash from it * `hash_file` - Takes a file handle and returns the computed hash from it. A second optional boolean parameter controls if the file is streamed (this is the default) or read at once (set to true) @@ -49,6 +51,10 @@ main :: proc() { // Compute the hash, using the high level API computed_hash := md4.hash(input) + // Variant that takes a destination buffer, instead of returning the computed hash + hash := make([]byte, md4.DIGEST_SIZE) // @note: Destination buffer has to be at least as big as the digest size of the hash + md4.hash(input, hash[:]) + // Compute the hash, using the low level API // @note: Botan's structs are opaque by design, they don't expose any fields ctx: md4.Md4_Context diff --git a/vendor/botan/blake2b/blake2b.odin b/vendor/botan/blake2b/blake2b.odin index efd4f464bbd..226502e832e 100644 --- a/vendor/botan/blake2b/blake2b.odin +++ b/vendor/botan/blake2b/blake2b.odin @@ -20,16 +20,18 @@ import botan "../bindings" High level API */ +DIGEST_SIZE :: 64 + // hash_string will hash the given input and return the // computed hash -hash_string :: proc "contextless" (data: string) -> [64]byte { +hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte { return hash_bytes(transmute([]byte)(data)) } // hash_bytes will hash the given input and return the // computed hash -hash_bytes :: proc "contextless" (data: []byte) -> [64]byte { - hash: [64]byte +hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte { + hash: [DIGEST_SIZE]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_BLAKE2B, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -38,10 +40,29 @@ hash_bytes :: proc "contextless" (data: []byte) -> [64]byte { return hash } +// hash_string_to_buffer will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_BLAKE2B, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream will read the stream in chunks and compute a // hash from its contents -hash_stream :: proc(s: io.Stream) -> ([64]byte, bool) { - hash: [64]byte +hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { + hash: [DIGEST_SIZE]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_BLAKE2B, 0) buf := make([]byte, 512) @@ -60,7 +81,7 @@ hash_stream :: proc(s: io.Stream) -> ([64]byte, bool) { // hash_file will read the file provided by the given handle // and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) { +hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { if !load_at_once { return hash_stream(os.stream_from_handle(hd)) } else { @@ -68,7 +89,7 @@ hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) { return hash_bytes(buf[:]), ok } } - return [64]byte{}, false + return [DIGEST_SIZE]byte{}, false } hash :: proc { @@ -76,6 +97,8 @@ hash :: proc { hash_file, hash_bytes, hash_string, + hash_bytes_to_buffer, + hash_string_to_buffer, } /* diff --git a/vendor/botan/gost/gost.odin b/vendor/botan/gost/gost.odin index 266078c7d4f..9f081f9cb6f 100644 --- a/vendor/botan/gost/gost.odin +++ b/vendor/botan/gost/gost.odin @@ -20,16 +20,18 @@ import botan "../bindings" High level API */ +DIGEST_SIZE :: 32 + // hash_string will hash the given input and return the // computed hash -hash_string :: proc "contextless" (data: string) -> [32]byte { +hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte { return hash_bytes(transmute([]byte)(data)) } // hash_bytes will hash the given input and return the // computed hash -hash_bytes :: proc "contextless" (data: []byte) -> [32]byte { - hash: [32]byte +hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte { + hash: [DIGEST_SIZE]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_GOST, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -38,10 +40,29 @@ hash_bytes :: proc "contextless" (data: []byte) -> [32]byte { return hash } +// hash_string_to_buffer will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_GOST, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream will read the stream in chunks and compute a // hash from its contents -hash_stream :: proc(s: io.Stream) -> ([32]byte, bool) { - hash: [32]byte +hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { + hash: [DIGEST_SIZE]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_GOST, 0) buf := make([]byte, 512) @@ -60,7 +81,7 @@ hash_stream :: proc(s: io.Stream) -> ([32]byte, bool) { // hash_file will read the file provided by the given handle // and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { +hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { if !load_at_once { return hash_stream(os.stream_from_handle(hd)) } else { @@ -68,7 +89,7 @@ hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { return hash_bytes(buf[:]), ok } } - return [32]byte{}, false + return [DIGEST_SIZE]byte{}, false } hash :: proc { @@ -76,6 +97,8 @@ hash :: proc { hash_file, hash_bytes, hash_string, + hash_bytes_to_buffer, + hash_string_to_buffer, } /* diff --git a/vendor/botan/keccak/keccak.odin b/vendor/botan/keccak/keccak.odin index c2f52bfdc2f..3316de0176f 100644 --- a/vendor/botan/keccak/keccak.odin +++ b/vendor/botan/keccak/keccak.odin @@ -20,16 +20,18 @@ import botan "../bindings" High level API */ +DIGEST_SIZE_512 :: 64 + // hash_string_512 will hash the given input and return the // computed hash -hash_string_512 :: proc(data: string) -> [64]byte { +hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { return hash_bytes_512(transmute([]byte)(data)) } // hash_bytes_512 will hash the given input and return the // computed hash -hash_bytes_512 :: proc(data: []byte) -> [64]byte { - hash: [64]byte +hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { + hash: [DIGEST_SIZE_512]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_KECCAK_512, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -38,10 +40,29 @@ hash_bytes_512 :: proc(data: []byte) -> [64]byte { return hash } +// hash_string_to_buffer_512 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_512(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_512 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_KECCAK_512, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream_512 will read the stream in chunks and compute a // hash from its contents -hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { - hash: [64]byte +hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { + hash: [DIGEST_SIZE_512]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_KECCAK_512, 0) buf := make([]byte, 512) @@ -60,7 +81,7 @@ hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { // hash_file_512 will read the file provided by the given handle // and compute a hash -hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) { +hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { if !load_at_once { return hash_stream_512(os.stream_from_handle(hd)) } else { @@ -68,7 +89,7 @@ hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) return hash_bytes_512(buf[:]), ok } } - return [64]byte{}, false + return [DIGEST_SIZE_512]byte{}, false } hash_512 :: proc { @@ -76,6 +97,8 @@ hash_512 :: proc { hash_file_512, hash_bytes_512, hash_string_512, + hash_bytes_to_buffer_512, + hash_string_to_buffer_512, } /* diff --git a/vendor/botan/md4/md4.odin b/vendor/botan/md4/md4.odin index 47a77c0fbf1..c8a1ad9030e 100644 --- a/vendor/botan/md4/md4.odin +++ b/vendor/botan/md4/md4.odin @@ -20,16 +20,18 @@ import botan "../bindings" High level API */ +DIGEST_SIZE :: 16 + // hash_string will hash the given input and return the // computed hash -hash_string :: proc "contextless" (data: string) -> [16]byte { +hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte { return hash_bytes(transmute([]byte)(data)) } // hash_bytes will hash the given input and return the // computed hash -hash_bytes :: proc "contextless" (data: []byte) -> [16]byte { - hash: [16]byte +hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte { + hash: [DIGEST_SIZE]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_MD4, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -38,10 +40,29 @@ hash_bytes :: proc "contextless" (data: []byte) -> [16]byte { return hash } +// hash_string_to_buffer will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_MD4, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream will read the stream in chunks and compute a // hash from its contents -hash_stream :: proc(s: io.Stream) -> ([16]byte, bool) { - hash: [16]byte +hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { + hash: [DIGEST_SIZE]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_MD4, 0) buf := make([]byte, 512) @@ -60,7 +81,7 @@ hash_stream :: proc(s: io.Stream) -> ([16]byte, bool) { // hash_file will read the file provided by the given handle // and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) { +hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { if !load_at_once { return hash_stream(os.stream_from_handle(hd)) } else { @@ -68,7 +89,7 @@ hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) { return hash_bytes(buf[:]), ok } } - return [16]byte{}, false + return [DIGEST_SIZE]byte{}, false } hash :: proc { @@ -76,6 +97,8 @@ hash :: proc { hash_file, hash_bytes, hash_string, + hash_bytes_to_buffer, + hash_string_to_buffer, } /* diff --git a/vendor/botan/md5/md5.odin b/vendor/botan/md5/md5.odin index 15ad1e05a06..203f2d09247 100644 --- a/vendor/botan/md5/md5.odin +++ b/vendor/botan/md5/md5.odin @@ -20,16 +20,18 @@ import botan "../bindings" High level API */ +DIGEST_SIZE :: 16 + // hash_string will hash the given input and return the // computed hash -hash_string :: proc "contextless" (data: string) -> [16]byte { +hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte { return hash_bytes(transmute([]byte)(data)) } // hash_bytes will hash the given input and return the // computed hash -hash_bytes :: proc "contextless" (data: []byte) -> [16]byte { - hash: [16]byte +hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte { + hash: [DIGEST_SIZE]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_MD5, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -38,10 +40,29 @@ hash_bytes :: proc "contextless" (data: []byte) -> [16]byte { return hash } +// hash_string_to_buffer will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_MD5, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream will read the stream in chunks and compute a // hash from its contents -hash_stream :: proc(s: io.Stream) -> ([16]byte, bool) { - hash: [16]byte +hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { + hash: [DIGEST_SIZE]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_MD5, 0) buf := make([]byte, 512) @@ -60,7 +81,7 @@ hash_stream :: proc(s: io.Stream) -> ([16]byte, bool) { // hash_file will read the file provided by the given handle // and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) { +hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { if !load_at_once { return hash_stream(os.stream_from_handle(hd)) } else { @@ -68,7 +89,7 @@ hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) { return hash_bytes(buf[:]), ok } } - return [16]byte{}, false + return [DIGEST_SIZE]byte{}, false } hash :: proc { @@ -76,6 +97,8 @@ hash :: proc { hash_file, hash_bytes, hash_string, + hash_bytes_to_buffer, + hash_string_to_buffer, } /* diff --git a/vendor/botan/ripemd/ripemd.odin b/vendor/botan/ripemd/ripemd.odin index 66260e520fc..0a8195a968c 100644 --- a/vendor/botan/ripemd/ripemd.odin +++ b/vendor/botan/ripemd/ripemd.odin @@ -20,16 +20,18 @@ import botan "../bindings" High level API */ +DIGEST_SIZE_160 :: 20 + // hash_string_160 will hash the given input and return the // computed hash -hash_string_160 :: proc(data: string) -> [20]byte { +hash_string_160 :: proc(data: string) -> [DIGEST_SIZE_160]byte { return hash_bytes_160(transmute([]byte)(data)) } // hash_bytes_160 will hash the given input and return the // computed hash -hash_bytes_160 :: proc(data: []byte) -> [20]byte { - hash: [20]byte +hash_bytes_160 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte { + hash: [DIGEST_SIZE_160]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_RIPEMD_160, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -38,10 +40,29 @@ hash_bytes_160 :: proc(data: []byte) -> [20]byte { return hash } +// hash_string_to_buffer_160 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_160 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_160(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_160 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_160 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_RIPEMD_160, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream_160 will read the stream in chunks and compute a // hash from its contents -hash_stream_160 :: proc(s: io.Stream) -> ([20]byte, bool) { - hash: [20]byte +hash_stream_160 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) { + hash: [DIGEST_SIZE_160]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_RIPEMD_160, 0) buf := make([]byte, 512) @@ -60,7 +81,7 @@ hash_stream_160 :: proc(s: io.Stream) -> ([20]byte, bool) { // hash_file_160 will read the file provided by the given handle // and compute a hash -hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([20]byte, bool) { +hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) { if !load_at_once { return hash_stream_160(os.stream_from_handle(hd)) } else { @@ -68,7 +89,7 @@ hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([20]byte, bool) return hash_bytes_160(buf[:]), ok } } - return [20]byte{}, false + return [DIGEST_SIZE_160]byte{}, false } hash_160 :: proc { @@ -76,6 +97,8 @@ hash_160 :: proc { hash_file_160, hash_bytes_160, hash_string_160, + hash_bytes_to_buffer_160, + hash_string_to_buffer_160, } /* diff --git a/vendor/botan/sha1/sha1.odin b/vendor/botan/sha1/sha1.odin index 2eb799cb69c..005b018217d 100644 --- a/vendor/botan/sha1/sha1.odin +++ b/vendor/botan/sha1/sha1.odin @@ -20,16 +20,18 @@ import botan "../bindings" High level API */ +DIGEST_SIZE :: 20 + // hash_string will hash the given input and return the // computed hash -hash_string :: proc "contextless" (data: string) -> [20]byte { +hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte { return hash_bytes(transmute([]byte)(data)) } // hash_bytes will hash the given input and return the // computed hash -hash_bytes :: proc "contextless" (data: []byte) -> [20]byte { - hash: [20]byte +hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte { + hash: [DIGEST_SIZE]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SHA1, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -38,10 +40,29 @@ hash_bytes :: proc "contextless" (data: []byte) -> [20]byte { return hash } +// hash_string_to_buffer will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_SHA1, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream will read the stream in chunks and compute a // hash from its contents -hash_stream :: proc(s: io.Stream) -> ([20]byte, bool) { - hash: [20]byte +hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { + hash: [DIGEST_SIZE]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SHA1, 0) buf := make([]byte, 512) @@ -60,7 +81,7 @@ hash_stream :: proc(s: io.Stream) -> ([20]byte, bool) { // hash_file will read the file provided by the given handle // and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([20]byte, bool) { +hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { if !load_at_once { return hash_stream(os.stream_from_handle(hd)) } else { @@ -68,7 +89,7 @@ hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([20]byte, bool) { return hash_bytes(buf[:]), ok } } - return [20]byte{}, false + return [DIGEST_SIZE]byte{}, false } hash :: proc { @@ -76,6 +97,8 @@ hash :: proc { hash_file, hash_bytes, hash_string, + hash_bytes_to_buffer, + hash_string_to_buffer, } /* diff --git a/vendor/botan/sha2/sha2.odin b/vendor/botan/sha2/sha2.odin index cc5cd1d6549..f5d6921a810 100644 --- a/vendor/botan/sha2/sha2.odin +++ b/vendor/botan/sha2/sha2.odin @@ -20,16 +20,21 @@ import botan "../bindings" High level API */ +DIGEST_SIZE_224 :: 28 +DIGEST_SIZE_256 :: 32 +DIGEST_SIZE_384 :: 48 +DIGEST_SIZE_512 :: 64 + // hash_string_224 will hash the given input and return the // computed hash -hash_string_224 :: proc(data: string) -> [28]byte { +hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte { return hash_bytes_224(transmute([]byte)(data)) } // hash_bytes_224 will hash the given input and return the // computed hash -hash_bytes_224 :: proc(data: []byte) -> [28]byte { - hash: [28]byte +hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { + hash: [DIGEST_SIZE_224]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SHA_224, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -38,10 +43,29 @@ hash_bytes_224 :: proc(data: []byte) -> [28]byte { return hash } +// hash_string_to_buffer_224 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_224(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_224 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_224 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_SHA_224, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream_224 will read the stream in chunks and compute a // hash from its contents -hash_stream_224 :: proc(s: io.Stream) -> ([28]byte, bool) { - hash: [28]byte +hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { + hash: [DIGEST_SIZE_224]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SHA_224, 0) buf := make([]byte, 512) @@ -60,7 +84,7 @@ hash_stream_224 :: proc(s: io.Stream) -> ([28]byte, bool) { // hash_file_224 will read the file provided by the given handle // and compute a hash -hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([28]byte, bool) { +hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) { if !load_at_once { return hash_stream_224(os.stream_from_handle(hd)) } else { @@ -68,7 +92,7 @@ hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([28]byte, bool) return hash_bytes_224(buf[:]), ok } } - return [28]byte{}, false + return [DIGEST_SIZE_224]byte{}, false } hash_224 :: proc { @@ -76,18 +100,20 @@ hash_224 :: proc { hash_file_224, hash_bytes_224, hash_string_224, + hash_bytes_to_buffer_224, + hash_string_to_buffer_224, } // hash_string_256 will hash the given input and return the // computed hash -hash_string_256 :: proc(data: string) -> [32]byte { +hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { return hash_bytes_256(transmute([]byte)(data)) } // hash_bytes_256 will hash the given input and return the // computed hash -hash_bytes_256 :: proc(data: []byte) -> [32]byte { - hash: [32]byte +hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { + hash: [DIGEST_SIZE_256]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SHA_256, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -96,10 +122,29 @@ hash_bytes_256 :: proc(data: []byte) -> [32]byte { return hash } +// hash_string_to_buffer_256 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_256(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_256 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_SHA_256, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream_256 will read the stream in chunks and compute a // hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { - hash: [32]byte +hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { + hash: [DIGEST_SIZE_256]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SHA_256, 0) buf := make([]byte, 512) @@ -118,7 +163,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { // hash_file_256 will read the file provided by the given handle // and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { +hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { if !load_at_once { return hash_stream_256(os.stream_from_handle(hd)) } else { @@ -126,7 +171,7 @@ hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) return hash_bytes_256(buf[:]), ok } } - return [32]byte{}, false + return [DIGEST_SIZE_256]byte{}, false } hash_256 :: proc { @@ -134,18 +179,20 @@ hash_256 :: proc { hash_file_256, hash_bytes_256, hash_string_256, + hash_bytes_to_buffer_256, + hash_string_to_buffer_256, } // hash_string_384 will hash the given input and return the // computed hash -hash_string_384 :: proc(data: string) -> [48]byte { +hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte { return hash_bytes_384(transmute([]byte)(data)) } // hash_bytes_384 will hash the given input and return the // computed hash -hash_bytes_384 :: proc(data: []byte) -> [48]byte { - hash: [48]byte +hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte { + hash: [DIGEST_SIZE_384]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SHA_384, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -154,10 +201,29 @@ hash_bytes_384 :: proc(data: []byte) -> [48]byte { return hash } +// hash_string_to_buffer_384 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_384(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_384 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_384 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_SHA_384, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream_384 will read the stream in chunks and compute a // hash from its contents -hash_stream_384 :: proc(s: io.Stream) -> ([48]byte, bool) { - hash: [48]byte +hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) { + hash: [DIGEST_SIZE_384]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SHA_384, 0) buf := make([]byte, 512) @@ -176,7 +242,7 @@ hash_stream_384 :: proc(s: io.Stream) -> ([48]byte, bool) { // hash_file_384 will read the file provided by the given handle // and compute a hash -hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([48]byte, bool) { +hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) { if !load_at_once { return hash_stream_384(os.stream_from_handle(hd)) } else { @@ -184,7 +250,7 @@ hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([48]byte, bool) return hash_bytes_384(buf[:]), ok } } - return [48]byte{}, false + return [DIGEST_SIZE_384]byte{}, false } hash_384 :: proc { @@ -192,18 +258,20 @@ hash_384 :: proc { hash_file_384, hash_bytes_384, hash_string_384, + hash_bytes_to_buffer_384, + hash_string_to_buffer_384, } // hash_string_512 will hash the given input and return the // computed hash -hash_string_512 :: proc(data: string) -> [64]byte { +hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { return hash_bytes_512(transmute([]byte)(data)) } // hash_bytes_512 will hash the given input and return the // computed hash -hash_bytes_512 :: proc(data: []byte) -> [64]byte { - hash: [64]byte +hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { + hash: [DIGEST_SIZE_512]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SHA_512, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -212,10 +280,29 @@ hash_bytes_512 :: proc(data: []byte) -> [64]byte { return hash } +// hash_string_to_buffer_512 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_512(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_512 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_SHA_512, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream_512 will read the stream in chunks and compute a // hash from its contents -hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { - hash: [64]byte +hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { + hash: [DIGEST_SIZE_512]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SHA_512, 0) buf := make([]byte, 512) @@ -234,7 +321,7 @@ hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { // hash_file_512 will read the file provided by the given handle // and compute a hash -hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) { +hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { if !load_at_once { return hash_stream_512(os.stream_from_handle(hd)) } else { @@ -242,7 +329,7 @@ hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) return hash_bytes_512(buf[:]), ok } } - return [64]byte{}, false + return [DIGEST_SIZE_512]byte{}, false } hash_512 :: proc { @@ -250,6 +337,8 @@ hash_512 :: proc { hash_file_512, hash_bytes_512, hash_string_512, + hash_bytes_to_buffer_512, + hash_string_to_buffer_512, } /* diff --git a/vendor/botan/sha3/sha3.odin b/vendor/botan/sha3/sha3.odin index 1211d836adb..cf9fa5b2bd9 100644 --- a/vendor/botan/sha3/sha3.odin +++ b/vendor/botan/sha3/sha3.odin @@ -20,16 +20,21 @@ import botan "../bindings" High level API */ +DIGEST_SIZE_224 :: 28 +DIGEST_SIZE_256 :: 32 +DIGEST_SIZE_384 :: 48 +DIGEST_SIZE_512 :: 64 + // hash_string_224 will hash the given input and return the // computed hash -hash_string_224 :: proc(data: string) -> [28]byte { +hash_string_224 :: proc(data: string) -> [DIGEST_SIZE_224]byte { return hash_bytes_224(transmute([]byte)(data)) } // hash_bytes_224 will hash the given input and return the // computed hash -hash_bytes_224 :: proc(data: []byte) -> [28]byte { - hash: [28]byte +hash_bytes_224 :: proc(data: []byte) -> [DIGEST_SIZE_224]byte { + hash: [DIGEST_SIZE_224]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SHA3_224, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -38,10 +43,29 @@ hash_bytes_224 :: proc(data: []byte) -> [28]byte { return hash } +// hash_string_to_buffer_224 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_224 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_224(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_224 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_224 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_224, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_SHA3_224, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream_224 will read the stream in chunks and compute a // hash from its contents -hash_stream_224 :: proc(s: io.Stream) -> ([28]byte, bool) { - hash: [28]byte +hash_stream_224 :: proc(s: io.Stream) -> ([DIGEST_SIZE_224]byte, bool) { + hash: [DIGEST_SIZE_224]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SHA3_224, 0) buf := make([]byte, 512) @@ -60,7 +84,7 @@ hash_stream_224 :: proc(s: io.Stream) -> ([28]byte, bool) { // hash_file_224 will read the file provided by the given handle // and compute a hash -hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([28]byte, bool) { +hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_224]byte, bool) { if !load_at_once { return hash_stream_224(os.stream_from_handle(hd)) } else { @@ -68,7 +92,7 @@ hash_file_224 :: proc(hd: os.Handle, load_at_once := false) -> ([28]byte, bool) return hash_bytes_224(buf[:]), ok } } - return [28]byte{}, false + return [DIGEST_SIZE_224]byte{}, false } hash_224 :: proc { @@ -76,18 +100,20 @@ hash_224 :: proc { hash_file_224, hash_bytes_224, hash_string_224, + hash_bytes_to_buffer_224, + hash_string_to_buffer_224, } // hash_string_256 will hash the given input and return the // computed hash -hash_string_256 :: proc(data: string) -> [32]byte { +hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { return hash_bytes_256(transmute([]byte)(data)) } // hash_bytes_256 will hash the given input and return the // computed hash -hash_bytes_256 :: proc(data: []byte) -> [32]byte { - hash: [32]byte +hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { + hash: [DIGEST_SIZE_256]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SHA3_256, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -96,10 +122,29 @@ hash_bytes_256 :: proc(data: []byte) -> [32]byte { return hash } +// hash_string_to_buffer_256 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_256(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_256 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_SHA3_256, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream_256 will read the stream in chunks and compute a // hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { - hash: [32]byte +hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { + hash: [DIGEST_SIZE_256]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SHA3_256, 0) buf := make([]byte, 512) @@ -118,7 +163,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { // hash_file_256 will read the file provided by the given handle // and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { +hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { if !load_at_once { return hash_stream_256(os.stream_from_handle(hd)) } else { @@ -126,7 +171,7 @@ hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) return hash_bytes_256(buf[:]), ok } } - return [32]byte{}, false + return [DIGEST_SIZE_256]byte{}, false } hash_256 :: proc { @@ -134,18 +179,20 @@ hash_256 :: proc { hash_file_256, hash_bytes_256, hash_string_256, + hash_bytes_to_buffer_256, + hash_string_to_buffer_256, } // hash_string_384 will hash the given input and return the // computed hash -hash_string_384 :: proc(data: string) -> [48]byte { +hash_string_384 :: proc(data: string) -> [DIGEST_SIZE_384]byte { return hash_bytes_384(transmute([]byte)(data)) } // hash_bytes_384 will hash the given input and return the // computed hash -hash_bytes_384 :: proc(data: []byte) -> [48]byte { - hash: [48]byte +hash_bytes_384 :: proc(data: []byte) -> [DIGEST_SIZE_384]byte { + hash: [DIGEST_SIZE_384]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SHA3_384, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -154,10 +201,29 @@ hash_bytes_384 :: proc(data: []byte) -> [48]byte { return hash } +// hash_string_to_buffer_384 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_384 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_384(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_384 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_384 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_384, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_SHA3_384, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream_384 will read the stream in chunks and compute a // hash from its contents -hash_stream_384 :: proc(s: io.Stream) -> ([48]byte, bool) { - hash: [48]byte +hash_stream_384 :: proc(s: io.Stream) -> ([DIGEST_SIZE_384]byte, bool) { + hash: [DIGEST_SIZE_384]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SHA3_384, 0) buf := make([]byte, 512) @@ -176,7 +242,7 @@ hash_stream_384 :: proc(s: io.Stream) -> ([48]byte, bool) { // hash_file_384 will read the file provided by the given handle // and compute a hash -hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([48]byte, bool) { +hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_384]byte, bool) { if !load_at_once { return hash_stream_384(os.stream_from_handle(hd)) } else { @@ -184,7 +250,7 @@ hash_file_384 :: proc(hd: os.Handle, load_at_once := false) -> ([48]byte, bool) return hash_bytes_384(buf[:]), ok } } - return [48]byte{}, false + return [DIGEST_SIZE_384]byte{}, false } hash_384 :: proc { @@ -192,18 +258,20 @@ hash_384 :: proc { hash_file_384, hash_bytes_384, hash_string_384, + hash_bytes_to_buffer_384, + hash_string_to_buffer_384, } // hash_string_512 will hash the given input and return the // computed hash -hash_string_512 :: proc(data: string) -> [64]byte { +hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { return hash_bytes_512(transmute([]byte)(data)) } // hash_bytes_512 will hash the given input and return the // computed hash -hash_bytes_512 :: proc(data: []byte) -> [64]byte { - hash: [64]byte +hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { + hash: [DIGEST_SIZE_512]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SHA3_512, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -212,10 +280,29 @@ hash_bytes_512 :: proc(data: []byte) -> [64]byte { return hash } +// hash_string_to_buffer_512 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_512(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_512 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_SHA3_512, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream_512 will read the stream in chunks and compute a // hash from its contents -hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { - hash: [64]byte +hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { + hash: [DIGEST_SIZE_512]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SHA3_512, 0) buf := make([]byte, 512) @@ -234,7 +321,7 @@ hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { // hash_file_512 will read the file provided by the given handle // and compute a hash -hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) { +hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { if !load_at_once { return hash_stream_512(os.stream_from_handle(hd)) } else { @@ -242,7 +329,7 @@ hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) return hash_bytes_512(buf[:]), ok } } - return [64]byte{}, false + return [DIGEST_SIZE_512]byte{}, false } hash_512 :: proc { @@ -250,6 +337,8 @@ hash_512 :: proc { hash_file_512, hash_bytes_512, hash_string_512, + hash_bytes_to_buffer_512, + hash_string_to_buffer_512, } /* diff --git a/vendor/botan/shake/shake.odin b/vendor/botan/shake/shake.odin index 82bf7ad1584..ac8432f6483 100644 --- a/vendor/botan/shake/shake.odin +++ b/vendor/botan/shake/shake.odin @@ -20,16 +20,19 @@ import botan "../bindings" High level API */ +DIGEST_SIZE_128 :: 16 +DIGEST_SIZE_256 :: 32 + // hash_string_128 will hash the given input and return the // computed hash -hash_string_128 :: proc(data: string) -> [16]byte { +hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte { return hash_bytes_128(transmute([]byte)(data)) } // hash_bytes_128 will hash the given input and return the // computed hash -hash_bytes_128 :: proc(data: []byte) -> [16]byte { - hash: [16]byte +hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { + hash: [DIGEST_SIZE_128]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SHAKE_128, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -38,10 +41,29 @@ hash_bytes_128 :: proc(data: []byte) -> [16]byte { return hash } +// hash_string_to_buffer_128 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_128 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_128(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_128 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_128 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_SHAKE_128, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream_128 will read the stream in chunks and compute a // hash from its contents -hash_stream_128 :: proc(s: io.Stream) -> ([16]byte, bool) { - hash: [16]byte +hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) { + hash: [DIGEST_SIZE_128]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SHAKE_128, 0) buf := make([]byte, 512) @@ -60,7 +82,7 @@ hash_stream_128 :: proc(s: io.Stream) -> ([16]byte, bool) { // hash_file_128 will read the file provided by the given handle // and compute a hash -hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) { +hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) { if !load_at_once { return hash_stream_128(os.stream_from_handle(hd)) } else { @@ -68,7 +90,7 @@ hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) return hash_bytes_128(buf[:]), ok } } - return [16]byte{}, false + return [DIGEST_SIZE_128]byte{}, false } hash_128 :: proc { @@ -76,18 +98,20 @@ hash_128 :: proc { hash_file_128, hash_bytes_128, hash_string_128, + hash_bytes_to_buffer_128, + hash_string_to_buffer_128, } // hash_string_256 will hash the given input and return the // computed hash -hash_string_256 :: proc(data: string) -> [32]byte { +hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { return hash_bytes_256(transmute([]byte)(data)) } // hash_bytes_256 will hash the given input and return the // computed hash -hash_bytes_256 :: proc(data: []byte) -> [32]byte { - hash: [32]byte +hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { + hash: [DIGEST_SIZE_256]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SHAKE_256, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -96,10 +120,29 @@ hash_bytes_256 :: proc(data: []byte) -> [32]byte { return hash } +// hash_string_to_buffer_256 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_256(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_256 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_SHAKE_256, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream_256 will read the stream in chunks and compute a // hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { - hash: [32]byte +hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { + hash: [DIGEST_SIZE_256]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SHAKE_256, 0) buf := make([]byte, 512) @@ -118,7 +161,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { // hash_file_256 will read the file provided by the given handle // and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { +hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { if !load_at_once { return hash_stream_256(os.stream_from_handle(hd)) } else { @@ -126,7 +169,7 @@ hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) return hash_bytes_256(buf[:]), ok } } - return [32]byte{}, false + return [DIGEST_SIZE_256]byte{}, false } hash_256 :: proc { @@ -134,6 +177,8 @@ hash_256 :: proc { hash_file_256, hash_bytes_256, hash_string_256, + hash_bytes_to_buffer_256, + hash_string_to_buffer_256, } /* diff --git a/vendor/botan/skein512/skein512.odin b/vendor/botan/skein512/skein512.odin index dc808edb943..490eeba031f 100644 --- a/vendor/botan/skein512/skein512.odin +++ b/vendor/botan/skein512/skein512.odin @@ -22,16 +22,19 @@ import botan "../bindings" High level API */ +DIGEST_SIZE_256 :: 32 +DIGEST_SIZE_512 :: 64 + // hash_string_256 will hash the given input and return the // computed hash -hash_string_256 :: proc(data: string) -> [32]byte { +hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { return hash_bytes_256(transmute([]byte)(data)) } // hash_bytes_256 will hash the given input and return the // computed hash -hash_bytes_256 :: proc(data: []byte) -> [32]byte { - hash: [32]byte +hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { + hash: [DIGEST_SIZE_256]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SKEIN_512_256, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -40,10 +43,29 @@ hash_bytes_256 :: proc(data: []byte) -> [32]byte { return hash } +// hash_string_to_buffer_256 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_256(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_256 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_SKEIN_512_256, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream_256 will read the stream in chunks and compute a // hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { - hash: [32]byte +hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { + hash: [DIGEST_SIZE_256]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SKEIN_512_256, 0) buf := make([]byte, 512) @@ -62,7 +84,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { // hash_file_256 will read the file provided by the given handle // and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { +hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { if !load_at_once { return hash_stream_256(os.stream_from_handle(hd)) } else { @@ -70,7 +92,7 @@ hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) return hash_bytes_256(buf[:]), ok } } - return [32]byte{}, false + return [DIGEST_SIZE_256]byte{}, false } hash_256 :: proc { @@ -78,18 +100,20 @@ hash_256 :: proc { hash_file_256, hash_bytes_256, hash_string_256, + hash_bytes_to_buffer_256, + hash_string_to_buffer_256, } // hash_string_512 will hash the given input and return the // computed hash -hash_string_512 :: proc(data: string) -> [64]byte { +hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { return hash_bytes_512(transmute([]byte)(data)) } // hash_bytes_512 will hash the given input and return the // computed hash -hash_bytes_512 :: proc(data: []byte) -> [64]byte { - hash: [64]byte +hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { + hash: [DIGEST_SIZE_512]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SKEIN_512_512, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -98,10 +122,29 @@ hash_bytes_512 :: proc(data: []byte) -> [64]byte { return hash } +// hash_string_to_buffer_512 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_512(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_512 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_SKEIN_512_512, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream_512 will read the stream in chunks and compute a // hash from its contents -hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { - hash: [64]byte +hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { + hash: [DIGEST_SIZE_512]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SKEIN_512_512, 0) buf := make([]byte, 512) @@ -120,7 +163,7 @@ hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { // hash_file_512 will read the file provided by the given handle // and compute a hash -hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) { +hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { if !load_at_once { return hash_stream_512(os.stream_from_handle(hd)) } else { @@ -128,7 +171,7 @@ hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) return hash_bytes_512(buf[:]), ok } } - return [64]byte{}, false + return [DIGEST_SIZE_512]byte{}, false } hash_512 :: proc { @@ -136,6 +179,8 @@ hash_512 :: proc { hash_file_512, hash_bytes_512, hash_string_512, + hash_bytes_to_buffer_512, + hash_string_to_buffer_512, } // hash_string_slice will hash the given input and return the @@ -156,6 +201,25 @@ hash_bytes_slice :: proc(data: []byte, bit_size: int, allocator := context.alloc return hash } +// hash_string_to_buffer_512 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_slice :: proc(data: string, hash: []byte, bit_size: int, allocator := context.allocator) { + hash_bytes_to_buffer_slice(transmute([]byte)(data), hash, bit_size, allocator); +} + +// hash_bytes_to_buffer_slice will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_slice :: proc(data, hash: []byte, bit_size: int, allocator := context.allocator) { + assert(len(hash) >= bit_size, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, strings.unsafe_string_to_cstring(fmt.tprintf("Skein-512(%d)", bit_size * 8)), 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream_slice will read the stream in chunks and compute a // hash from its contents hash_stream_slice :: proc(s: io.Stream, bit_size: int, allocator := context.allocator) -> ([]byte, bool) { @@ -194,6 +258,8 @@ hash_slice :: proc { hash_file_slice, hash_bytes_slice, hash_string_slice, + hash_bytes_to_buffer_slice, + hash_string_to_buffer_slice, } /* diff --git a/vendor/botan/sm3/sm3.odin b/vendor/botan/sm3/sm3.odin index eada2a5b3a0..7eb3f1f8dd0 100644 --- a/vendor/botan/sm3/sm3.odin +++ b/vendor/botan/sm3/sm3.odin @@ -20,16 +20,18 @@ import botan "../bindings" High level API */ +DIGEST_SIZE :: 32 + // hash_string will hash the given input and return the // computed hash -hash_string :: proc "contextless" (data: string) -> [32]byte { +hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte { return hash_bytes(transmute([]byte)(data)) } // hash_bytes will hash the given input and return the // computed hash -hash_bytes :: proc "contextless" (data: []byte) -> [32]byte { - hash: [32]byte +hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte { + hash: [DIGEST_SIZE]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SM3, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -38,10 +40,29 @@ hash_bytes :: proc "contextless" (data: []byte) -> [32]byte { return hash } +// hash_string_to_buffer will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_SM3, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream will read the stream in chunks and compute a // hash from its contents -hash_stream :: proc(s: io.Stream) -> ([32]byte, bool) { - hash: [32]byte +hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { + hash: [DIGEST_SIZE]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_SM3, 0) buf := make([]byte, 512) @@ -60,7 +81,7 @@ hash_stream :: proc(s: io.Stream) -> ([32]byte, bool) { // hash_file will read the file provided by the given handle // and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { +hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { if !load_at_once { return hash_stream(os.stream_from_handle(hd)) } else { @@ -68,7 +89,7 @@ hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { return hash_bytes(buf[:]), ok } } - return [32]byte{}, false + return [DIGEST_SIZE]byte{}, false } hash :: proc { @@ -76,6 +97,8 @@ hash :: proc { hash_file, hash_bytes, hash_string, + hash_bytes_to_buffer, + hash_string_to_buffer, } /* diff --git a/vendor/botan/streebog/streebog.odin b/vendor/botan/streebog/streebog.odin index acee1a78acf..cbf2047edda 100644 --- a/vendor/botan/streebog/streebog.odin +++ b/vendor/botan/streebog/streebog.odin @@ -20,16 +20,19 @@ import botan "../bindings" High level API */ +DIGEST_SIZE_256 :: 32 +DIGEST_SIZE_512 :: 64 + // hash_string_256 will hash the given input and return the // computed hash -hash_string_256 :: proc(data: string) -> [32]byte { +hash_string_256 :: proc(data: string) -> [DIGEST_SIZE_256]byte { return hash_bytes_256(transmute([]byte)(data)) } // hash_bytes_256 will hash the given input and return the // computed hash -hash_bytes_256 :: proc(data: []byte) -> [32]byte { - hash: [32]byte +hash_bytes_256 :: proc(data: []byte) -> [DIGEST_SIZE_256]byte { + hash: [DIGEST_SIZE_256]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_STREEBOG_256, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -38,10 +41,29 @@ hash_bytes_256 :: proc(data: []byte) -> [32]byte { return hash } +// hash_string_to_buffer_256 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_256 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_256(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_256 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_256 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_256, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_STREEBOG_256, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream_256 will read the stream in chunks and compute a // hash from its contents -hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { - hash: [32]byte +hash_stream_256 :: proc(s: io.Stream) -> ([DIGEST_SIZE_256]byte, bool) { + hash: [DIGEST_SIZE_256]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_STREEBOG_256, 0) buf := make([]byte, 512) @@ -60,7 +82,7 @@ hash_stream_256 :: proc(s: io.Stream) -> ([32]byte, bool) { // hash_file_256 will read the file provided by the given handle // and compute a hash -hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) { +hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_256]byte, bool) { if !load_at_once { return hash_stream_256(os.stream_from_handle(hd)) } else { @@ -68,7 +90,7 @@ hash_file_256 :: proc(hd: os.Handle, load_at_once := false) -> ([32]byte, bool) return hash_bytes_256(buf[:]), ok } } - return [32]byte{}, false + return [DIGEST_SIZE_256]byte{}, false } hash_256 :: proc { @@ -76,18 +98,20 @@ hash_256 :: proc { hash_file_256, hash_bytes_256, hash_string_256, + hash_bytes_to_buffer_256, + hash_string_to_buffer_256, } // hash_string_512 will hash the given input and return the // computed hash -hash_string_512 :: proc(data: string) -> [64]byte { +hash_string_512 :: proc(data: string) -> [DIGEST_SIZE_512]byte { return hash_bytes_512(transmute([]byte)(data)) } // hash_bytes_512 will hash the given input and return the // computed hash -hash_bytes_512 :: proc(data: []byte) -> [64]byte { - hash: [64]byte +hash_bytes_512 :: proc(data: []byte) -> [DIGEST_SIZE_512]byte { + hash: [DIGEST_SIZE_512]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_STREEBOG_512, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -96,10 +120,29 @@ hash_bytes_512 :: proc(data: []byte) -> [64]byte { return hash } +// hash_string_to_buffer_512 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_512 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_512(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_512 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_512 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_512, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_STREEBOG_512, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream_512 will read the stream in chunks and compute a // hash from its contents -hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { - hash: [64]byte +hash_stream_512 :: proc(s: io.Stream) -> ([DIGEST_SIZE_512]byte, bool) { + hash: [DIGEST_SIZE_512]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_STREEBOG_512, 0) buf := make([]byte, 512) @@ -118,7 +161,7 @@ hash_stream_512 :: proc(s: io.Stream) -> ([64]byte, bool) { // hash_file_512 will read the file provided by the given handle // and compute a hash -hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) { +hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_512]byte, bool) { if !load_at_once { return hash_stream_512(os.stream_from_handle(hd)) } else { @@ -126,7 +169,7 @@ hash_file_512 :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) return hash_bytes_512(buf[:]), ok } } - return [64]byte{}, false + return [DIGEST_SIZE_512]byte{}, false } hash_512 :: proc { @@ -134,6 +177,8 @@ hash_512 :: proc { hash_file_512, hash_bytes_512, hash_string_512, + hash_bytes_to_buffer_512, + hash_string_to_buffer_512, } /* diff --git a/vendor/botan/tiger/tiger.odin b/vendor/botan/tiger/tiger.odin index b240457a668..b29602b262b 100644 --- a/vendor/botan/tiger/tiger.odin +++ b/vendor/botan/tiger/tiger.odin @@ -20,16 +20,20 @@ import botan "../bindings" High level API */ +DIGEST_SIZE_128 :: 16 +DIGEST_SIZE_160 :: 20 +DIGEST_SIZE_192 :: 24 + // hash_string_128 will hash the given input and return the // computed hash -hash_string_128 :: proc(data: string) -> [16]byte { +hash_string_128 :: proc(data: string) -> [DIGEST_SIZE_128]byte { return hash_bytes_128(transmute([]byte)(data)) } // hash_bytes_128 will hash the given input and return the // computed hash -hash_bytes_128 :: proc(data: []byte) -> [16]byte { - hash: [16]byte +hash_bytes_128 :: proc(data: []byte) -> [DIGEST_SIZE_128]byte { + hash: [DIGEST_SIZE_128]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_TIGER_128, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -38,10 +42,29 @@ hash_bytes_128 :: proc(data: []byte) -> [16]byte { return hash } +// hash_string_to_buffer_128 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_128 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_128(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_128 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_128 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_128, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_TIGER_128, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream_128 will read the stream in chunks and compute a // hash from its contents -hash_stream_128 :: proc(s: io.Stream) -> ([16]byte, bool) { - hash: [16]byte +hash_stream_128 :: proc(s: io.Stream) -> ([DIGEST_SIZE_128]byte, bool) { + hash: [DIGEST_SIZE_128]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_TIGER_128, 0) buf := make([]byte, 512) @@ -60,7 +83,7 @@ hash_stream_128 :: proc(s: io.Stream) -> ([16]byte, bool) { // hash_file_128 will read the file provided by the given handle // and compute a hash -hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) { +hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_128]byte, bool) { if !load_at_once { return hash_stream_128(os.stream_from_handle(hd)) } else { @@ -68,7 +91,7 @@ hash_file_128 :: proc(hd: os.Handle, load_at_once := false) -> ([16]byte, bool) return hash_bytes_128(buf[:]), ok } } - return [16]byte{}, false + return [DIGEST_SIZE_128]byte{}, false } hash_128 :: proc { @@ -76,18 +99,20 @@ hash_128 :: proc { hash_file_128, hash_bytes_128, hash_string_128, + hash_bytes_to_buffer_128, + hash_string_to_buffer_128, } // hash_string_160 will hash the given input and return the // computed hash -hash_string_160 :: proc(data: string) -> [20]byte { +hash_string_160 :: proc(data: string) -> [DIGEST_SIZE_160]byte { return hash_bytes_160(transmute([]byte)(data)) } // hash_bytes_160 will hash the given input and return the // computed hash -hash_bytes_160 :: proc(data: []byte) -> [20]byte { - hash: [20]byte +hash_bytes_160 :: proc(data: []byte) -> [DIGEST_SIZE_160]byte { + hash: [DIGEST_SIZE_160]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_TIGER_160, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -96,10 +121,29 @@ hash_bytes_160 :: proc(data: []byte) -> [20]byte { return hash } +// hash_string_to_buffer_160 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_160 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_160(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_160 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_160 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_160, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_TIGER_160, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream_160 will read the stream in chunks and compute a // hash from its contents -hash_stream_160 :: proc(s: io.Stream) -> ([20]byte, bool) { - hash: [20]byte +hash_stream_160 :: proc(s: io.Stream) -> ([DIGEST_SIZE_160]byte, bool) { + hash: [DIGEST_SIZE_160]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_TIGER_160, 0) buf := make([]byte, 512) @@ -118,7 +162,7 @@ hash_stream_160 :: proc(s: io.Stream) -> ([20]byte, bool) { // hash_file_160 will read the file provided by the given handle // and compute a hash -hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([20]byte, bool) { +hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_160]byte, bool) { if !load_at_once { return hash_stream_160(os.stream_from_handle(hd)) } else { @@ -126,7 +170,7 @@ hash_file_160 :: proc(hd: os.Handle, load_at_once := false) -> ([20]byte, bool) return hash_bytes_160(buf[:]), ok } } - return [20]byte{}, false + return [DIGEST_SIZE_160]byte{}, false } hash_160 :: proc { @@ -134,18 +178,20 @@ hash_160 :: proc { hash_file_160, hash_bytes_160, hash_string_160, + hash_bytes_to_buffer_160, + hash_string_to_buffer_160, } // hash_string_192 will hash the given input and return the // computed hash -hash_string_192 :: proc(data: string) -> [24]byte { +hash_string_192 :: proc(data: string) -> [DIGEST_SIZE_192]byte { return hash_bytes_192(transmute([]byte)(data)) } // hash_bytes_192 will hash the given input and return the // computed hash -hash_bytes_192 :: proc(data: []byte) -> [24]byte { - hash: [24]byte +hash_bytes_192 :: proc(data: []byte) -> [DIGEST_SIZE_192]byte { + hash: [DIGEST_SIZE_192]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_TIGER_192, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -154,10 +200,29 @@ hash_bytes_192 :: proc(data: []byte) -> [24]byte { return hash } +// hash_string_to_buffer_192 will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer_192 :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer_192(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer_192 will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer_192 :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE_192, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_TIGER_192, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream_192 will read the stream in chunks and compute a // hash from its contents -hash_stream_192 :: proc(s: io.Stream) -> ([24]byte, bool) { - hash: [24]byte +hash_stream_192 :: proc(s: io.Stream) -> ([DIGEST_SIZE_192]byte, bool) { + hash: [DIGEST_SIZE_192]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_TIGER_192, 0) buf := make([]byte, 512) @@ -176,7 +241,7 @@ hash_stream_192 :: proc(s: io.Stream) -> ([24]byte, bool) { // hash_file_192 will read the file provided by the given handle // and compute a hash -hash_file_192 :: proc(hd: os.Handle, load_at_once := false) -> ([24]byte, bool) { +hash_file_192 :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE_192]byte, bool) { if !load_at_once { return hash_stream_192(os.stream_from_handle(hd)) } else { @@ -184,7 +249,7 @@ hash_file_192 :: proc(hd: os.Handle, load_at_once := false) -> ([24]byte, bool) return hash_bytes_192(buf[:]), ok } } - return [24]byte{}, false + return [DIGEST_SIZE_192]byte{}, false } hash_192 :: proc { @@ -192,6 +257,8 @@ hash_192 :: proc { hash_file_192, hash_bytes_192, hash_string_192, + hash_bytes_to_buffer_192, + hash_string_to_buffer_192, } /* diff --git a/vendor/botan/whirlpool/whirlpool.odin b/vendor/botan/whirlpool/whirlpool.odin index 130386ff373..2aff3c8edbb 100644 --- a/vendor/botan/whirlpool/whirlpool.odin +++ b/vendor/botan/whirlpool/whirlpool.odin @@ -20,16 +20,18 @@ import botan "../bindings" High level API */ +DIGEST_SIZE :: 64 + // hash_string will hash the given input and return the // computed hash -hash_string :: proc "contextless" (data: string) -> [64]byte { +hash_string :: proc "contextless" (data: string) -> [DIGEST_SIZE]byte { return hash_bytes(transmute([]byte)(data)) } // hash_bytes will hash the given input and return the // computed hash -hash_bytes :: proc "contextless" (data: []byte) -> [64]byte { - hash: [64]byte +hash_bytes :: proc "contextless" (data: []byte) -> [DIGEST_SIZE]byte { + hash: [DIGEST_SIZE]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_WHIRLPOOL, 0) botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) @@ -38,10 +40,29 @@ hash_bytes :: proc "contextless" (data: []byte) -> [64]byte { return hash } +// hash_string_to_buffer will hash the given input and assign the +// computed hash to the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_string_to_buffer :: proc(data: string, hash: []byte) { + hash_bytes_to_buffer(transmute([]byte)(data), hash); +} + +// hash_bytes_to_buffer will hash the given input and write the +// computed hash into the second parameter. +// It requires that the destination buffer is at least as big as the digest size +hash_bytes_to_buffer :: proc(data, hash: []byte) { + assert(len(hash) >= DIGEST_SIZE, "Size of destination buffer is smaller than the digest size") + ctx: botan.hash_t + botan.hash_init(&ctx, botan.HASH_WHIRLPOOL, 0) + botan.hash_update(ctx, len(data) == 0 ? nil : &data[0], uint(len(data))) + botan.hash_final(ctx, &hash[0]) + botan.hash_destroy(ctx) +} + // hash_stream will read the stream in chunks and compute a // hash from its contents -hash_stream :: proc(s: io.Stream) -> ([64]byte, bool) { - hash: [64]byte +hash_stream :: proc(s: io.Stream) -> ([DIGEST_SIZE]byte, bool) { + hash: [DIGEST_SIZE]byte ctx: botan.hash_t botan.hash_init(&ctx, botan.HASH_WHIRLPOOL, 0) buf := make([]byte, 512) @@ -60,7 +81,7 @@ hash_stream :: proc(s: io.Stream) -> ([64]byte, bool) { // hash_file will read the file provided by the given handle // and compute a hash -hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) { +hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([DIGEST_SIZE]byte, bool) { if !load_at_once { return hash_stream(os.stream_from_handle(hd)) } else { @@ -68,7 +89,7 @@ hash_file :: proc(hd: os.Handle, load_at_once := false) -> ([64]byte, bool) { return hash_bytes(buf[:]), ok } } - return [64]byte{}, false + return [DIGEST_SIZE]byte{}, false } hash :: proc { @@ -76,6 +97,8 @@ hash :: proc { hash_file, hash_bytes, hash_string, + hash_bytes_to_buffer, + hash_string_to_buffer, } /*