hpl_toolkit/compression/
event.rs

1use crate::schema::{Schema, SchemaValue};
2use anchor_lang::{prelude::*, solana_program::keccak};
3use spl_account_compression::{wrap_application_data_v1, Noop};
4
5use super::calculate_canopy_depth_header_v1;
6
7#[cfg_attr(feature = "debug", derive(Debug))]
8#[derive(AnchorDeserialize, AnchorSerialize, Clone, PartialEq)]
9pub enum CompressedDataEventStream {
10    Full { data: SchemaValue },
11    PatchChunk { key: String, data: SchemaValue },
12    Empty,
13}
14
15#[cfg_attr(feature = "debug", derive(Debug))]
16#[derive(AnchorDeserialize, AnchorSerialize, Clone, PartialEq)]
17pub enum CompressedDataEvent {
18    Leaf {
19        slot: u64,
20        tree_id: [u8; 32],
21        leaf_idx: u32,
22        seq: u64,
23        stream_type: CompressedDataEventStream,
24    },
25    TreeSchemaValue {
26        program_id: [u8; 32],
27        discriminator: [u8; 32],
28        tree_id: [u8; 32],
29        canopy_depth: u8,
30        schema: Schema,
31    },
32}
33impl CompressedDataEvent {
34    pub fn wrap<'info>(&self, noop: &Program<'info, Noop>) -> Result<()> {
35        wrap_application_data_v1(self.try_to_vec().unwrap(), noop)
36    }
37
38    pub fn tree<'info>(
39        max_depth: usize,
40        max_buffer_size: usize,
41        merkle_tree: &AccountInfo<'info>,
42        schema: Schema,
43        program_id: Pubkey,
44        account_name: String,
45    ) -> Result<Self> {
46        let tree_size = merkle_tree.data_len();
47
48        Ok(Self::TreeSchemaValue {
49            program_id: program_id.to_bytes(),
50            discriminator: keccak::hashv(&[program_id.as_ref(), account_name.as_bytes()][..])
51                .to_bytes(),
52            tree_id: merkle_tree.key().to_bytes(),
53            canopy_depth: calculate_canopy_depth_header_v1(max_depth, max_buffer_size, tree_size),
54            schema,
55        })
56    }
57}