x.crypto.ascon: small bits of cleansup and optimization

This commit is contained in:
blackshirt 2025-09-11 16:42:13 +00:00
parent c2e2aac0c9
commit e7cec986f6
3 changed files with 48 additions and 20 deletions

View file

@ -41,6 +41,9 @@ fn ascon_pnr(mut s State, nr int) {
if nr < 1 || nr > 16 {
panic('Invalid round number')
}
// Allocate temporary vars to reduce allocation within loop
mut x0 := u64(0)
mut y0 := u64(0)
// Ascon permutation routine
for i := max_nr_perm - nr; i < max_nr_perm; i++ {
// 3.2 Constant-Addition Layer step
@ -56,18 +59,22 @@ fn ascon_pnr(mut s State, nr int) {
s.e0 ^= s.e4
s.e4 ^= s.e3
s.e2 ^= s.e1
// Set temp vars to values
x0 = s.e0
y0 = s.e4 ^ (~s.e0 & s.e1)
/*
t0 := s.e4 ^ (~s.e0 & s.e1)
t1 := s.e0 ^ (~s.e1 & s.e2)
t2 := s.e1 ^ (~s.e2 & s.e3)
t3 := s.e2 ^ (~s.e3 & s.e4)
t4 := s.e3 ^ (~s.e4 & s.e0)
*/
s.e0 = t1
s.e1 = t2
s.e2 = t3
s.e3 = t4
s.e4 = t0
s.e0 = s.e0 ^ (~s.e1 & s.e2) // t1
s.e1 = s.e1 ^ (~s.e2 & s.e3) // t2
s.e2 = s.e2 ^ (~s.e3 & s.e4) // t3
s.e3 = s.e3 ^ (~s.e4 & x0) // t4, change s.e0 to x0
s.e4 = y0
s.e1 ^= s.e0
s.e0 ^= s.e4

View file

@ -135,20 +135,32 @@ fn (mut d Digest) squeeze(mut dst []u8) int {
}
@[direct_array_access; inline]
fn ascon_generic_hash(mut s State, msg_ []u8, size int) []u8 {
fn ascon_generic_hash(mut s State, msg []u8, size int) []u8 {
// Assumed state was correctly initialized
// Absorbing the message
mut msg := msg_.clone()
for msg.len >= block_size {
s.e0 ^= binary.little_endian_u64(msg[0..block_size])
unsafe {
msg = msg[block_size..]
mut pos := 0
// Check if msg has non-null length, if yes, absorb it.
// Otherwise, just pad it
if _likely_(msg.len > 0) {
mut msg_len := msg.len
for msg_len >= block_size {
s.e0 ^= binary.little_endian_u64(msg[pos..pos + block_size])
pos += block_size
msg_len -= block_size
ascon_pnr(mut s, ascon_prnd_12)
}
ascon_pnr(mut s, ascon_prnd_12)
// Absorb the last partial message block
last_block := unsafe { msg[pos..] }
s.e0 ^= u64(0x01) << (8 * last_block.len) // pad(last_block.len)
if last_block.len > 0 {
s.e0 ^= load_bytes(last_block, last_block.len)
}
} else {
// Otherwise, just pad it
s.e0 ^= u64(0x01)
}
// Absorb the last partial message block
s.e0 ^= load_bytes(msg, msg.len)
s.e0 ^= pad(msg.len)
// reset pos
pos = 0
// Squeezing phase
//
@ -156,7 +168,6 @@ fn ascon_generic_hash(mut s State, msg_ []u8, size int) []u8 {
// permutation 𝐴𝑠𝑐𝑜𝑛-𝑝[12] to the state:
ascon_pnr(mut s, ascon_prnd_12)
mut out := []u8{len: size}
mut pos := 0
mut clen := out.len
for clen >= block_size {
binary.little_endian_put_u64(mut out[pos..pos + 8], s.e0)

View file

@ -83,13 +83,23 @@ fn set_byte(b u8, i int) u64 {
fn load_bytes(bytes []u8, n int) u64 {
mut x := u64(0)
for i := 0; i < n; i++ {
x |= set_byte(bytes[i], i)
// This is the same way to store bytes in little-endian way
// x |= u64(bytes[0]) << 8*0 // LSB at lowest index
// x |= u64(bytes[1]) << 8*1
// x |= u64(bytes[2]) << 8*2
// x |= u64(bytes[3]) << 8*3
// ...etc
// x |= u64(bytes[7]) << 8*7 // MSB at highest index
x |= u64(bytes[i]) << (8 * i)
}
return u64le(x)
// No need to cast with u64le, its alread le
return x
}
@[direct_array_access]
fn store_bytes(mut out []u8, x u64, n int) {
for i := 0; i < n; i++ {
out[i] = get_byte(x, i)
// use underlying get_byte directly
out[i] = u8(x >> (8 * i))
}
}