1use super::context::{CodeGenError, EbpfContext, Result};
7use ghostscope_protocol::trace_event::{EndInstructionData, TraceEventHeader, TraceEventMessage};
8use ghostscope_protocol::{consts, InstructionType};
9use inkwell::values::PointerValue;
10use inkwell::AddressSpace;
11use tracing::info;
12
13impl<'ctx> EbpfContext<'ctx> {
14 fn reserve_event_space(&mut self, size: u64) -> PointerValue<'ctx> {
18 let i32_ty = self.context.i32_type();
19 let i64_ty = self.context.i64_type();
20
21 let accum_buffer = self.get_or_create_perf_accumulation_buffer();
23 let offset_ptr = self.get_or_create_perf_buffer_offset();
24
25 let offset_val = self
27 .builder
28 .build_load(i32_ty, offset_ptr, "offset")
29 .expect("Failed to load offset")
30 .into_int_value();
31
32 let buffer_size = i32_ty.const_int(self.compile_options.max_trace_event_size as u64, false);
33 let req_size_i32 = i32_ty.const_int(size, false);
34
35 let current_block = self.builder.get_insert_block().unwrap();
37 let parent_fn = current_block.get_parent().unwrap();
38 let bb_overflow = self
39 .context
40 .append_basic_block(parent_fn, "reserve_overflow");
41 let bb_check_size = self
42 .context
43 .append_basic_block(parent_fn, "reserve_check_size");
44 let bb_check_fit = self
45 .context
46 .append_basic_block(parent_fn, "reserve_check_fit");
47
48 let off_in = self
50 .builder
51 .build_int_compare(
52 inkwell::IntPredicate::ULT,
53 offset_val,
54 buffer_size,
55 "off_in",
56 )
57 .expect("cmp failed");
58 self.builder
59 .build_conditional_branch(off_in, bb_check_size, bb_overflow)
60 .expect("branch failed");
61
62 self.builder.position_at_end(bb_overflow);
64 self.builder
65 .build_store(offset_ptr, i32_ty.const_zero())
66 .expect("reset store failed");
67 self.builder
68 .build_return(Some(&i32_ty.const_zero()))
69 .expect("return failed");
70
71 self.builder.position_at_end(bb_check_size);
73 let size_ok = self
74 .builder
75 .build_int_compare(
76 inkwell::IntPredicate::ULE,
77 req_size_i32,
78 buffer_size,
79 "size_ok",
80 )
81 .expect("cmp failed");
82 self.builder
83 .build_conditional_branch(size_ok, bb_check_fit, bb_overflow)
84 .expect("branch failed");
85
86 self.builder.position_at_end(bb_check_fit);
88 let limit = self
89 .builder
90 .build_int_sub(buffer_size, req_size_i32, "limit")
91 .expect("sub failed");
92 let fits = self
93 .builder
94 .build_int_compare(inkwell::IntPredicate::ULE, offset_val, limit, "fits")
95 .expect("cmp failed");
96 let bb_ok = self.context.append_basic_block(parent_fn, "reserve_ok");
97 self.builder
98 .build_conditional_branch(fits, bb_ok, bb_overflow)
99 .expect("branch2 failed");
100
101 self.builder.position_at_end(bb_ok);
103 let off64 = self
104 .builder
105 .build_int_z_extend(offset_val, i64_ty, "off64")
106 .expect("zext failed");
107 let dest_i8 = unsafe {
108 self.builder
109 .build_gep(self.context.i8_type(), accum_buffer, &[off64], "dest_i8")
110 .expect("gep failed")
111 };
112 let new_off = self
113 .builder
114 .build_int_add(offset_val, req_size_i32, "new_off")
115 .expect("add failed");
116 self.builder
117 .build_store(offset_ptr, new_off)
118 .expect("store new_off failed");
119
120 dest_i8
121 }
122
123 pub(crate) fn reserve_instruction_region(&mut self, size: u64) -> PointerValue<'ctx> {
125 self.reserve_event_space(size)
126 }
127 fn get_or_create_perf_accumulation_buffer(&mut self) -> PointerValue<'ctx> {
129 let ptr_ty = self.context.ptr_type(AddressSpace::default());
130 let val_ptr = self
131 .lookup_percpu_value_ptr("event_accum_buffer", 0)
132 .expect("event_accum_buffer lookup failed");
133
134 let is_null = self
136 .builder
137 .build_is_null(val_ptr, "accum_buf_is_null")
138 .expect("build_is_null failed");
139 let current_fn = self
140 .builder
141 .get_insert_block()
142 .unwrap()
143 .get_parent()
144 .unwrap();
145 let cont_bb = self
146 .context
147 .append_basic_block(current_fn, "accum_buf_cont");
148 let ret_bb = self.context.append_basic_block(current_fn, "accum_buf_ret");
149 self.builder
150 .build_conditional_branch(is_null, ret_bb, cont_bb)
151 .expect("build_conditional_branch failed");
152 self.builder.position_at_end(ret_bb);
154 self.builder
155 .build_return(Some(&self.context.i32_type().const_zero()))
156 .expect("build_return failed");
157 self.builder.position_at_end(cont_bb);
159
160 self.builder
162 .build_bit_cast(val_ptr, ptr_ty, "accum_buf_ptr")
163 .expect("bitcast failed")
164 .into_pointer_value()
165 }
166
167 fn get_or_create_perf_buffer_offset(&mut self) -> PointerValue<'ctx> {
169 self.event_offset_alloca
170 .expect("event_offset not allocated in entry block")
171 }
172
173 pub fn send_trace_event_header(&mut self) -> Result<()> {
175 info!("Sending TraceEventHeader segment");
176
177 if matches!(
179 self.compile_options.event_map_type,
180 crate::EventMapType::PerfEventArray
181 ) {
182 let offset_ptr = self.get_or_create_perf_buffer_offset();
183 self.builder
184 .build_store(offset_ptr, self.context.i32_type().const_zero())
185 .map_err(|e| CodeGenError::LLVMError(format!("Failed to reset offset: {e}")))?;
186 }
187
188 let header_size = std::mem::size_of::<TraceEventHeader>() as u64;
190 let header_buffer = self.reserve_instruction_region(header_size);
191
192 let magic_ptr = header_buffer;
195 let magic_u32_ptr = self
196 .builder
197 .build_pointer_cast(
198 magic_ptr,
199 self.context.ptr_type(AddressSpace::default()),
200 "magic_u32_ptr",
201 )
202 .map_err(|e| CodeGenError::LLVMError(format!("Failed to cast magic ptr: {e}")))?;
203 let magic_val = self
204 .context
205 .i32_type()
206 .const_int(ghostscope_protocol::consts::MAGIC.into(), false);
207 self.builder
208 .build_store(magic_u32_ptr, magic_val)
209 .map_err(|e| CodeGenError::LLVMError(format!("Failed to store magic: {e}")))?;
210
211 Ok(())
214 }
215
216 pub fn send_trace_event_message(&mut self, trace_id: u64) -> Result<()> {
218 info!(
219 "Sending TraceEventMessage segment for trace_id: {}",
220 trace_id
221 );
222
223 let message_size = std::mem::size_of::<TraceEventMessage>() as u64;
225 let message_buffer = self.reserve_instruction_region(message_size);
226
227 let trace_id_ptr = message_buffer;
230 let trace_id_u64_ptr = self
231 .builder
232 .build_pointer_cast(
233 trace_id_ptr,
234 self.context.ptr_type(AddressSpace::default()),
235 "trace_id_u64_ptr",
236 )
237 .map_err(|e| CodeGenError::LLVMError(format!("Failed to cast trace_id ptr: {e}")))?;
238 let trace_id_val = self.context.i64_type().const_int(trace_id, false);
239 self.builder
240 .build_store(trace_id_u64_ptr, trace_id_val)
241 .map_err(|e| CodeGenError::LLVMError(format!("Failed to store trace_id: {e}")))?;
242
243 let timestamp = self.get_current_timestamp()?;
245 let timestamp_ptr = unsafe {
246 self.builder
247 .build_gep(
248 self.context.i8_type(),
249 message_buffer,
250 &[self
251 .context
252 .i32_type()
253 .const_int(consts::TRACE_EVENT_MESSAGE_TIMESTAMP_OFFSET as u64, false)],
254 "timestamp_ptr",
255 )
256 .map_err(|e| CodeGenError::LLVMError(format!("Failed to get timestamp GEP: {e}")))?
257 };
258 let timestamp_u64_ptr = self
259 .builder
260 .build_pointer_cast(
261 timestamp_ptr,
262 self.context.ptr_type(AddressSpace::default()),
263 "timestamp_u64_ptr",
264 )
265 .map_err(|e| CodeGenError::LLVMError(format!("Failed to cast timestamp ptr: {e}")))?;
266 self.builder
267 .build_store(timestamp_u64_ptr, timestamp)
268 .map_err(|e| CodeGenError::LLVMError(format!("Failed to store timestamp: {e}")))?;
269
270 let pid_tgid_result = self.get_current_pid_tgid()?;
272 let i32_type = self.context.i32_type();
273 let pid = self
274 .builder
275 .build_int_truncate(pid_tgid_result, i32_type, "pid")
276 .map_err(|e| CodeGenError::LLVMError(format!("Failed to truncate pid: {e}")))?;
277 let shift_32 = self.context.i64_type().const_int(32, false);
278 let tid_64 = self
279 .builder
280 .build_right_shift(pid_tgid_result, shift_32, false, "tid_64")
281 .map_err(|e| CodeGenError::LLVMError(format!("Failed to shift tid: {e}")))?;
282 let tid = self
283 .builder
284 .build_int_truncate(tid_64, i32_type, "tid")
285 .map_err(|e| CodeGenError::LLVMError(format!("Failed to truncate tid: {e}")))?;
286
287 let pid_ptr = unsafe {
289 self.builder
290 .build_gep(
291 self.context.i8_type(),
292 message_buffer,
293 &[self
294 .context
295 .i32_type()
296 .const_int(consts::TRACE_EVENT_MESSAGE_PID_OFFSET as u64, false)],
297 "pid_ptr",
298 )
299 .map_err(|e| CodeGenError::LLVMError(format!("Failed to get pid GEP: {e}")))?
300 };
301 let pid_u32_ptr = self
302 .builder
303 .build_pointer_cast(
304 pid_ptr,
305 self.context.ptr_type(AddressSpace::default()),
306 "pid_u32_ptr",
307 )
308 .map_err(|e| CodeGenError::LLVMError(format!("Failed to cast pid ptr: {e}")))?;
309 self.builder
310 .build_store(pid_u32_ptr, pid)
311 .map_err(|e| CodeGenError::LLVMError(format!("Failed to store pid: {e}")))?;
312
313 let tid_ptr = unsafe {
315 self.builder
316 .build_gep(
317 self.context.i8_type(),
318 message_buffer,
319 &[self
320 .context
321 .i32_type()
322 .const_int(consts::TRACE_EVENT_MESSAGE_TID_OFFSET as u64, false)],
323 "tid_ptr",
324 )
325 .map_err(|e| CodeGenError::LLVMError(format!("Failed to get tid GEP: {e}")))?
326 };
327 let tid_u32_ptr = self
328 .builder
329 .build_pointer_cast(
330 tid_ptr,
331 self.context.ptr_type(AddressSpace::default()),
332 "tid_u32_ptr",
333 )
334 .map_err(|e| CodeGenError::LLVMError(format!("Failed to cast tid ptr: {e}")))?;
335 self.builder
336 .build_store(tid_u32_ptr, tid)
337 .map_err(|e| CodeGenError::LLVMError(format!("Failed to store tid: {e}")))?;
338
339 Ok(())
342 }
343
344 pub fn send_end_instruction(&mut self, total_instructions: u16) -> Result<()> {
346 info!(
347 "Sending EndInstruction segment with {} total instructions",
348 total_instructions
349 );
350
351 let total_size =
353 (std::mem::size_of::<ghostscope_protocol::trace_event::InstructionHeader>()
354 + std::mem::size_of::<EndInstructionData>()) as u64;
355 let end_buffer = self.reserve_instruction_region(total_size);
356
357 let inst_type_ptr = end_buffer;
360 let inst_type_val = self
361 .context
362 .i8_type()
363 .const_int(InstructionType::EndInstruction as u64, false);
364 self.builder
365 .build_store(inst_type_ptr, inst_type_val)
366 .map_err(|e| CodeGenError::LLVMError(format!("Failed to store inst_type: {e}")))?;
367
368 let data_length_ptr = unsafe {
370 self.builder
371 .build_gep(
372 self.context.i8_type(),
373 end_buffer,
374 &[self
375 .context
376 .i32_type()
377 .const_int(consts::INSTRUCTION_HEADER_DATA_LENGTH_OFFSET as u64, false)],
378 "data_length_ptr",
379 )
380 .map_err(|e| {
381 CodeGenError::LLVMError(format!("Failed to get data_length GEP: {e}"))
382 })?
383 };
384 let data_length_i16_ptr = self
385 .builder
386 .build_pointer_cast(
387 data_length_ptr,
388 self.context.ptr_type(AddressSpace::default()),
389 "data_length_i16_ptr",
390 )
391 .map_err(|e| CodeGenError::LLVMError(format!("Failed to cast data_length ptr: {e}")))?;
392 let data_length_val = self
393 .context
394 .i16_type()
395 .const_int(std::mem::size_of::<EndInstructionData>() as u64, false);
396 self.builder
397 .build_store(data_length_i16_ptr, data_length_val)
398 .map_err(|e| CodeGenError::LLVMError(format!("Failed to store data_length: {e}")))?;
399
400 let total_instructions_ptr = unsafe {
403 self.builder
404 .build_gep(
405 self.context.i8_type(),
406 end_buffer,
407 &[self
408 .context
409 .i32_type()
410 .const_int(consts::END_INSTRUCTION_DATA_OFFSET as u64, false)],
411 "total_instructions_ptr",
412 )
413 .map_err(|e| {
414 CodeGenError::LLVMError(format!("Failed to get total_instructions GEP: {e}"))
415 })?
416 };
417 let total_instructions_i16_ptr = self
418 .builder
419 .build_pointer_cast(
420 total_instructions_ptr,
421 self.context.ptr_type(AddressSpace::default()),
422 "total_instructions_i16_ptr",
423 )
424 .map_err(|e| {
425 CodeGenError::LLVMError(format!("Failed to cast total_instructions ptr: {e}"))
426 })?;
427 let total_instructions_val = self
428 .context
429 .i16_type()
430 .const_int(total_instructions as u64, false);
431 self.builder
432 .build_store(total_instructions_i16_ptr, total_instructions_val)
433 .map_err(|e| {
434 CodeGenError::LLVMError(format!("Failed to store total_instructions: {e}"))
435 })?;
436
437 let status_ptr = unsafe {
439 self.builder
440 .build_gep(
441 self.context.i8_type(),
442 end_buffer,
443 &[self.context.i32_type().const_int(
444 (consts::END_INSTRUCTION_DATA_OFFSET
445 + consts::END_INSTRUCTION_EXECUTION_STATUS_OFFSET)
446 as u64,
447 false,
448 )],
449 "status_ptr",
450 )
451 .map_err(|e| CodeGenError::LLVMError(format!("Failed to get status GEP: {e}")))?
452 };
453 let any_fail_ptr = self.get_or_create_flag_global("_gs_any_fail");
455 let any_succ_ptr = self.get_or_create_flag_global("_gs_any_success");
456
457 let any_fail_val = self
458 .builder
459 .build_load(self.context.i8_type(), any_fail_ptr, "any_fail")
460 .map_err(|e| CodeGenError::LLVMError(format!("Failed to load any_fail: {e}")))?
461 .into_int_value();
462 let any_succ_val = self
463 .builder
464 .build_load(self.context.i8_type(), any_succ_ptr, "any_succ")
465 .map_err(|e| CodeGenError::LLVMError(format!("Failed to load any_succ: {e}")))?
466 .into_int_value();
467
468 let zero = self.context.i8_type().const_zero();
469 let is_fail = self
470 .builder
471 .build_int_compare(inkwell::IntPredicate::NE, any_fail_val, zero, "is_fail")
472 .map_err(|e| CodeGenError::LLVMError(format!("Failed to cmp any_fail: {e}")))?;
473 let is_succ = self
474 .builder
475 .build_int_compare(inkwell::IntPredicate::NE, any_succ_val, zero, "is_succ")
476 .map_err(|e| CodeGenError::LLVMError(format!("Failed to cmp any_succ: {e}")))?;
477
478 let not_succ = self
482 .builder
483 .build_not(is_succ, "not_succ")
484 .map_err(|e| CodeGenError::LLVMError(format!("Failed to build not: {e}")))?;
485 let only_fail = self
486 .builder
487 .build_and(is_fail, not_succ, "only_fail")
488 .map_err(|e| CodeGenError::LLVMError(format!("Failed to build and: {e}")))?;
489 let both = self
490 .builder
491 .build_and(is_fail, is_succ, "both")
492 .map_err(|e| CodeGenError::LLVMError(format!("Failed to build and: {e}")))?;
493
494 let two = self.context.i8_type().const_int(2, false);
495 let one = self.context.i8_type().const_int(1, false);
496 let sel1 = self
497 .builder
498 .build_select(only_fail, two, zero, "status_sel1")
499 .map_err(|e| CodeGenError::LLVMError(format!("Failed to build select: {e}")))?
500 .into_int_value();
501 let sel2 = self
502 .builder
503 .build_select(both, one, sel1, "status_sel2")
504 .map_err(|e| CodeGenError::LLVMError(format!("Failed to build select: {e}")))?
505 .into_int_value();
506
507 self.builder
508 .build_store(status_ptr, sel2)
509 .map_err(|e| CodeGenError::LLVMError(format!("Failed to store status: {e}")))?;
510
511 {
515 let accum_buffer = self.get_or_create_perf_accumulation_buffer();
516 let offset_ptr = self.get_or_create_perf_buffer_offset();
517
518 let total_accumulated_size = self
520 .builder
521 .build_load(self.context.i32_type(), offset_ptr, "total_size")
522 .map_err(|e| CodeGenError::LLVMError(format!("Failed to load total size: {e}")))?
523 .into_int_value();
524
525 let max_size_i32 = self
527 .context
528 .i32_type()
529 .const_int(self.compile_options.max_trace_event_size as u64, false);
530 let size_le_max = self
531 .builder
532 .build_int_compare(
533 inkwell::IntPredicate::ULE,
534 total_accumulated_size,
535 max_size_i32,
536 "size_le_max",
537 )
538 .map_err(|e| CodeGenError::LLVMError(format!("Failed to compare end size: {e}")))?;
539 let clamped_size_i32 = self
540 .builder
541 .build_select(
542 size_le_max,
543 total_accumulated_size,
544 max_size_i32,
545 "clamped_size_i32",
546 )
547 .map_err(|e| CodeGenError::LLVMError(format!("Failed to select clamp size: {e}")))?
548 .into_int_value();
549
550 let total_size_i64 = self
552 .builder
553 .build_int_z_extend(clamped_size_i32, self.context.i64_type(), "total_size_i64")
554 .map_err(|e| CodeGenError::LLVMError(format!("Failed to extend size: {e}")))?;
555
556 match self.compile_options.event_map_type {
557 crate::EventMapType::PerfEventArray => {
558 self.create_perf_event_output_dynamic(accum_buffer, total_size_i64)?;
560 }
561 crate::EventMapType::RingBuf => {
562 self.create_ringbuf_output_dynamic(accum_buffer, total_size_i64)?;
564 }
565 }
566
567 self.builder
569 .build_store(offset_ptr, self.context.i32_type().const_zero())
570 .map_err(|e| {
571 CodeGenError::LLVMError(format!("Failed to reset offset after send: {e}"))
572 })?;
573 }
574
575 Ok(())
576 }
577}