ddk_messages/segmentation/
mod.rs1use lightning::ln::msgs::DecodeError;
4use lightning::ln::wire::Type;
5use lightning::util::ser::{Readable, Writeable, Writer};
6
7pub const SEGMENT_START_TYPE: u16 = 42900;
9
10pub const SEGMENT_CHUNK_TYPE: u16 = 42902;
12
13pub const MAX_DATA_SIZE: usize = 65535;
15
16const MAX_START_DATA_SIZE: usize = 65526;
18
19const MAX_CHUNK_SIZE: usize = 65528;
21
22const MAX_SEGMENTS: usize = 1000;
23
24pub mod segment_reader;
25
26#[cfg_attr(
27 feature = "use-serde",
28 derive(serde::Serialize, serde::Deserialize),
29 serde(rename_all = "camelCase")
30)]
31#[derive(Clone, Debug, PartialEq, Eq)]
32pub struct SegmentStart {
35 pub nb_segments: u16,
37 pub data: Vec<u8>,
39}
40
41impl_dlc_writeable!(SegmentStart, {
42 (nb_segments, writeable),
43 (data, writeable)
44});
45
46impl Type for SegmentStart {
47 fn type_id(&self) -> u16 {
48 SEGMENT_START_TYPE
49 }
50}
51
52#[cfg_attr(
53 feature = "use-serde",
54 derive(serde::Serialize, serde::Deserialize),
55 serde(rename_all = "camelCase")
56)]
57#[derive(Clone, Debug, PartialEq, Eq)]
58pub struct SegmentChunk {
60 pub data: Vec<u8>,
62}
63
64impl_dlc_writeable!(SegmentChunk, { (data, writeable) });
65
66impl Type for SegmentChunk {
67 fn type_id(&self) -> u16 {
68 SEGMENT_CHUNK_TYPE
69 }
70}
71
72pub fn get_segments(mut data: Vec<u8>, msg_type: u16) -> (SegmentStart, Vec<SegmentChunk>) {
75 debug_assert!(data.len() > MAX_DATA_SIZE);
76
77 let len_minus_start = data.len() - MAX_START_DATA_SIZE + 2;
78 let mut nb_segments = (len_minus_start / MAX_CHUNK_SIZE + 1) as u16;
79
80 if !len_minus_start.is_multiple_of(MAX_CHUNK_SIZE) {
81 nb_segments += 1;
82 }
83
84 debug_assert!(nb_segments > 1);
85
86 let mut start_data = Vec::with_capacity(MAX_START_DATA_SIZE);
87 msg_type
88 .write(&mut start_data)
89 .expect("to be able to write the type prefix");
90 start_data.append(&mut data.drain(..MAX_START_DATA_SIZE - 2).collect());
91
92 debug_assert_eq!(MAX_START_DATA_SIZE, start_data.len());
93
94 let segment_start = SegmentStart {
95 nb_segments,
96 data: start_data,
97 };
98
99 let mut segments = Vec::with_capacity((nb_segments as usize) - 1);
100
101 for _ in 1..(nb_segments as usize) {
102 let to_take = usize::min(data.len(), MAX_CHUNK_SIZE);
103 segments.push(SegmentChunk {
104 data: data.drain(..to_take).collect(),
105 });
106 }
107
108 (segment_start, segments)
109}
110
111#[cfg(test)]
112mod tests {
113 use super::*;
114
115 #[test]
116 fn test_get_segments() {
117 let data_size = MAX_START_DATA_SIZE + 2 * MAX_CHUNK_SIZE + 1234;
118 let mut data = Vec::new();
119 data.resize(data_size, 1);
120
121 let (segment_start, segment_chunks) = get_segments(data, 2);
122
123 assert_eq!(4, segment_start.nb_segments);
124 assert_eq!(MAX_START_DATA_SIZE, segment_start.data.len());
125 assert_eq!(3, segment_chunks.len());
126 assert_eq!(MAX_CHUNK_SIZE, segment_chunks[0].data.len());
127 assert_eq!(MAX_CHUNK_SIZE, segment_chunks[1].data.len());
128 assert_eq!(1236, segment_chunks[2].data.len());
129 }
130}