use crate::{
Algorithm, Argon2, Block, Error, Version, BLOCK_SIZE, MAX_OUTLEN, MIN_OUTLEN, SYNC_POINTS,
};
use blake2::{
digest::{self, VariableOutput},
Blake2b, Digest, VarBlake2b,
};
#[cfg(feature = "zeroize")]
use zeroize::Zeroize;
const ADDRESSES_IN_BLOCK: u32 = 128;
const BLAKE2B_OUTBYTES: usize = 64;
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
struct Position {
pass: u32,
lane: u32,
slice: u32,
index: u32,
}
pub(crate) struct Instance<'a> {
memory: &'a mut [Block],
version: Version,
passes: u32,
segment_length: u32,
lane_length: u32,
lanes: u32,
threads: u32,
alg: Algorithm,
}
impl<'a> Instance<'a> {
pub fn hash(
context: &Argon2<'_>,
alg: Algorithm,
initial_hash: digest::Output<Blake2b>,
memory: &'a mut [Block],
out: &mut [u8],
) -> Result<(), Error> {
let mut instance = Self::new(context, alg, initial_hash, memory)?;
instance.fill_memory_blocks();
instance.finalize(out)
}
#[allow(unused_mut)]
fn new(
context: &Argon2<'_>,
alg: Algorithm,
mut initial_hash: digest::Output<Blake2b>,
memory: &'a mut [Block],
) -> Result<Self, Error> {
let mut instance = Instance {
version: context.version,
memory,
passes: context.t_cost,
segment_length: context.segment_length(),
lane_length: context.segment_length() * SYNC_POINTS,
lanes: context.lanes,
threads: context.threads,
alg,
};
if instance.threads > instance.lanes {
instance.threads = instance.lanes;
}
instance.fill_first_blocks(&initial_hash)?;
#[cfg(feature = "zeroize")]
initial_hash.zeroize();
Ok(instance)
}
fn fill_memory_blocks(&mut self) {
for r in 0..self.passes {
for s in 0..SYNC_POINTS {
for l in 0..self.lanes {
self.fill_segment(Position {
pass: r,
lane: l,
slice: s,
index: 0,
});
}
}
}
}
fn finalize(&mut self, out: &mut [u8]) -> Result<(), Error> {
let mut blockhash = self.memory[(self.lane_length - 1) as usize];
for l in 1..self.lanes {
let last_block_in_lane = l * self.lane_length + (self.lane_length - 1);
blockhash ^= self.memory[last_block_in_lane as usize];
}
let mut blockhash_bytes = [0u8; BLOCK_SIZE];
for (chunk, v) in blockhash_bytes.chunks_mut(8).zip(blockhash.iter()) {
chunk.copy_from_slice(&v.to_le_bytes())
}
blake2b_long(&[&blockhash_bytes], out)?;
#[cfg(feature = "zeroize")]
blockhash.zeroize();
#[cfg(feature = "zeroize")]
blockhash_bytes.zeroize();
Ok(())
}
fn fill_first_blocks(&mut self, blockhash: &[u8]) -> Result<(), Error> {
let mut hash = [0u8; BLOCK_SIZE];
for l in 0..self.lanes {
for i in 0u32..2u32 {
blake2b_long(&[blockhash, &i.to_le_bytes(), &l.to_le_bytes()], &mut hash)?;
self.memory[(l * self.lane_length + i) as usize].load(&hash);
}
}
Ok(())
}
fn fill_segment(&mut self, mut position: Position) {
let mut address_block = Block::default();
let mut input_block = Block::default();
let zero_block = Block::default();
let data_independent_addressing = (self.alg == Algorithm::Argon2i)
|| (self.alg == Algorithm::Argon2id
&& (position.pass == 0)
&& (position.slice < SYNC_POINTS / 2));
if data_independent_addressing {
input_block[0] = position.pass as u64;
input_block[1] = position.lane as u64;
input_block[2] = position.slice as u64;
input_block[3] = self.memory.len() as u64;
input_block[4] = self.passes as u64;
input_block[5] = self.alg as u64;
}
let mut starting_index = 0;
if position.pass == 0 && position.slice == 0 {
starting_index = 2;
if data_independent_addressing {
next_addresses(&mut address_block, &mut input_block, &zero_block);
}
}
let mut curr_offset = position.lane * self.lane_length
+ position.slice * self.segment_length
+ starting_index;
let mut prev_offset = if 0 == curr_offset % self.lane_length {
curr_offset + self.lane_length - 1
} else {
curr_offset - 1
};
for i in starting_index..self.segment_length {
if curr_offset % self.lane_length == 1 {
prev_offset = curr_offset - 1;
}
let pseudo_rand = if data_independent_addressing {
if i % ADDRESSES_IN_BLOCK == 0 {
next_addresses(&mut address_block, &mut input_block, &zero_block);
}
address_block[(i % ADDRESSES_IN_BLOCK) as usize]
} else {
self.memory[prev_offset as usize][0]
};
let mut ref_lane = (pseudo_rand >> 32) as u32 % self.lanes;
if position.pass == 0 && position.slice == 0 {
ref_lane = position.lane;
}
position.index = i;
let ref_index = self.index_alpha(
position,
(pseudo_rand & 0xFFFFFFFF) as u32,
ref_lane == position.lane,
);
let ref_block = self.memory[(self.lane_length * ref_lane + ref_index) as usize];
let prev_block = self.memory[prev_offset as usize];
let without_xor = self.version == Version::V0x10 || position.pass == 0;
self.memory[curr_offset as usize].fill_block(prev_block, ref_block, !without_xor);
curr_offset += 1;
prev_offset += 1;
}
}
fn index_alpha(&self, position: Position, pseudo_rand: u32, same_lane: bool) -> u32 {
let reference_area_size = if 0 == position.pass {
if position.slice == 0 {
position.index - 1 } else if same_lane {
position.slice * self.segment_length + position.index - 1
} else {
position.slice * self.segment_length - if position.index == 0 { 1 } else { 0 }
}
} else {
if same_lane {
self.lane_length - self.segment_length + position.index - 1
} else {
self.lane_length - self.segment_length - if position.index == 0 { 1 } else { 0 }
}
};
let mut relative_position = pseudo_rand as u64;
relative_position = (relative_position * relative_position) >> 32;
let relative_position = reference_area_size
- 1
- (((reference_area_size as u64 * relative_position) >> 32) as u32);
let mut start_position = 0;
if position.pass != 0 {
start_position = if position.slice == SYNC_POINTS - 1 {
0
} else {
(position.slice + 1) * self.segment_length
}
}
(start_position + relative_position as u32) % self.lane_length
}
}
fn next_addresses(address_block: &mut Block, input_block: &mut Block, zero_block: &Block) {
input_block[6] += 1;
address_block.fill_block(*zero_block, *input_block, false);
address_block.fill_block(*zero_block, *address_block, false);
}
fn blake2b_long(inputs: &[&[u8]], mut out: &mut [u8]) -> Result<(), Error> {
if out.len() < MIN_OUTLEN as usize {
return Err(Error::OutputTooLong);
}
if out.len() > MAX_OUTLEN as usize {
return Err(Error::OutputTooLong);
}
let outlen_bytes = (out.len() as u32).to_le_bytes();
if out.len() <= BLAKE2B_OUTBYTES {
let mut digest = VarBlake2b::new(out.len()).unwrap();
digest::Update::update(&mut digest, &outlen_bytes);
for input in inputs {
digest::Update::update(&mut digest, input);
}
digest.finalize_variable(|hash| out.copy_from_slice(hash));
} else {
let mut digest = Blake2b::new();
digest.update(&outlen_bytes);
for input in inputs {
digest.update(input);
}
let mut out_buffer = [0u8; BLAKE2B_OUTBYTES];
out_buffer.copy_from_slice(&digest.finalize());
out[..(BLAKE2B_OUTBYTES / 2)].copy_from_slice(&out_buffer[..(BLAKE2B_OUTBYTES / 2)]);
out = &mut out[(BLAKE2B_OUTBYTES / 2)..];
let mut in_buffer = [0u8; BLAKE2B_OUTBYTES];
while out.len() > BLAKE2B_OUTBYTES {
in_buffer.copy_from_slice(&out_buffer);
out_buffer.copy_from_slice(&Blake2b::digest(&in_buffer));
out[..(BLAKE2B_OUTBYTES / 2)].copy_from_slice(&out_buffer[..(BLAKE2B_OUTBYTES / 2)]);
out = &mut out[(BLAKE2B_OUTBYTES / 2)..];
}
let mut digest = VarBlake2b::new(out.len()).unwrap();
digest::Update::update(&mut digest, &out_buffer);
digest.finalize_variable(|hash| out.copy_from_slice(hash));
}
Ok(())
}