hamelin_datafusion 0.7.8

Translate Hamelin TypedAST to DataFusion LogicalPlans
Documentation
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
//! Command translation from Hamelin IR to DataFusion logical plans.

use std::collections::HashMap;
use std::sync::Arc;

use datafusion::common::{Column, ScalarValue, TableReference, UnnestOptions};
use datafusion::datasource::provider_as_source;
use datafusion::logical_expr::expr::{
    AggregateFunction, Sort as DFSort, WindowFunction, WindowFunctionDefinition,
    WindowFunctionParams,
};
use datafusion::logical_expr::{
    ident, Expr, JoinType as DFJoinType, LogicalPlan, LogicalPlanBuilder, SortExpr,
    WindowFrame as DFWindowFrame, WindowFrameBound as DFWindowFrameBound,
    WindowFrameUnits as DFWindowFrameUnits,
};
use datafusion::prelude::SessionContext;
use hamelin_lib::err::TranslationError;
use hamelin_lib::tree::ast::clause::SortOrder;
use hamelin_lib::tree::ast::identifier::Identifier;
use hamelin_lib::tree::typed_ast::environment::TypeEnvironment;
use hamelin_translation::{
    IRAggCommand, IRAssignment, IRCommand, IRExplodeCommand, IRExpression, IRFromCommand, IRInput,
    IRJoinCommand, IRLimitCommand, IRSelectCommand, IRSortCommand, IRSortExpression,
    IRWhereCommand, IRWindowCommand, JoinType, RangeBound, RowBound,
    WindowFrame as HamelinWindowFrame,
};

use crate::expr::{translate_expr_with_ctx, ExprTranslationContext};

// ============================================================================
// Expression translation helper
// ============================================================================

/// Translate an IRExpression to a DataFusion Expr
fn translate_ir_expr(
    expr: &IRExpression,
    ctx: &ExprTranslationContext,
) -> Result<Expr, Arc<TranslationError>> {
    translate_expr_with_ctx(expr.inner(), ctx)
}

// ============================================================================
// FROM Command Translation
// ============================================================================

/// Translate a FROM command to DataFusion LogicalPlan, with CTE support
///
/// After lowering, FROM clauses are simple table references or CTE references.
/// Aliased FROM clauses have been expanded into CTEs.
pub async fn translate_from_command(
    cmd: &IRFromCommand,
    ctx: &SessionContext,
    ctes: &HashMap<Identifier, Arc<LogicalPlan>>,
    _output_schema: &TypeEnvironment,
    command: &IRCommand,
    _expr_ctx: &ExprTranslationContext,
) -> Result<LogicalPlan, Arc<TranslationError>> {
    if cmd.inputs.is_empty() {
        return Err(Arc::new(TranslationError::msg(
            command,
            "FROM command has no inputs",
        )));
    }

    // Build a plan for each input
    let mut plans: Vec<LogicalPlan> = Vec::new();
    for input in &cmd.inputs {
        let plan = translate_from_input(input, ctx, ctes, command).await?;
        plans.push(plan);
    }

    // If single input, return directly; otherwise union all
    if plans.len() == 1 {
        Ok(plans.remove(0))
    } else {
        // Chain unions: plan1 UNION ALL plan2 UNION ALL plan3 ...
        // DataFusion's TypeCoercion analyzer (DF 52.3+) handles metadata
        // and field name mismatches between union legs automatically.
        // If DataFusion is downgraded below 52.3, this would need explicit
        // CAST projections per leg to normalize Arrow schemas before union.
        // Separately, our hamelin_array_cast UDF strips Parquet metadata
        // when the analyzer routes complex casts through it (see array_cast.rs).
        let mut result = plans.remove(0);
        for plan in plans {
            result = LogicalPlanBuilder::from(result)
                .union(plan)
                .map_err(|e| Arc::new(TranslationError::wrap(command, e)))?
                .build()
                .map_err(|e| Arc::new(TranslationError::wrap(command, e)))?;
        }
        Ok(result)
    }
}

/// Translate a single FROM input to a scan plan
async fn translate_from_input(
    input: &IRInput,
    ctx: &SessionContext,
    ctes: &HashMap<Identifier, Arc<LogicalPlan>>,
    command: &IRCommand,
) -> Result<LogicalPlan, Arc<TranslationError>> {
    match input {
        IRInput::Table(identifier) => scan_table_or_cte(identifier, ctx, ctes, command).await,
        IRInput::With(name, _pipeline) => {
            // CTE reference - look up the plan in the CTE map
            let cte_ident: Identifier = name.clone().into();
            if let Some(cte_plan) = ctes.get(&cte_ident) {
                Ok(cte_plan.as_ref().clone())
            } else {
                Err(Arc::new(TranslationError::msg(
                    command,
                    &format!("CTE '{}' not found", name.as_str()),
                )))
            }
        }
    }
}

/// Scan a table or CTE by identifier
async fn scan_table_or_cte(
    identifier: &Identifier,
    ctx: &SessionContext,
    ctes: &HashMap<Identifier, Arc<LogicalPlan>>,
    command: &IRCommand,
) -> Result<LogicalPlan, Arc<TranslationError>> {
    // Check if this is a CTE reference first
    if let Some(cte_plan) = ctes.get(identifier) {
        let table_name = identifier
            .segments()
            .iter()
            .map(|s| s.as_str())
            .collect::<Vec<_>>()
            .join(".");

        // Add a projection that references columns with the CTE qualifier.
        // This mimics what DataFusion's SQL parser does for `SELECT * FROM cte_name`.
        // Without this projection barrier, the optimizer can encounter issues when
        // traversing through SubqueryAlias → Unnest in UNION + ORDER BY scenarios.
        let schema = cte_plan.schema();
        let projections: Vec<Expr> = schema
            .fields()
            .iter()
            .map(|f| {
                Expr::Column(Column::new(
                    Some(TableReference::bare(table_name.as_str())),
                    f.name(),
                ))
            })
            .collect();

        return LogicalPlanBuilder::from(cte_plan.as_ref().clone())
            .project(projections)
            .map_err(|e| Arc::new(TranslationError::wrap(command, e)))?
            .build()
            .map_err(|e| Arc::new(TranslationError::wrap(command, e)));
    }

    // Fall back to SessionContext table provider lookup
    let segments = identifier.segments();
    let table_ref = match segments {
        [name] => TableReference::bare(name.as_str()),
        [schema, name] => TableReference::partial(schema.as_str(), name.as_str()),
        [catalog, schema, name] => {
            TableReference::full(catalog.as_str(), schema.as_str(), name.as_str())
        }
        _ => {
            return Err(Arc::new(TranslationError::msg(
                command,
                &format!(
                    "Invalid table identifier '{}': expected 1-3 parts, got {}",
                    identifier,
                    segments.len()
                ),
            )));
        }
    };

    let provider = ctx
        .table_provider(table_ref)
        .await
        .map_err(|e| Arc::new(TranslationError::wrap(command, e)))?;
    let table_source = provider_as_source(provider);

    let scan_name = segments
        .iter()
        .map(|s| s.as_str())
        .collect::<Vec<_>>()
        .join(".");

    LogicalPlanBuilder::scan(scan_name, table_source, None)
        .and_then(|b| b.build())
        .map_err(|e| Arc::new(TranslationError::wrap(command, e)))
}

// ============================================================================
// JOIN Command Translation
// ============================================================================

/// Translate a JOIN command to DataFusion LogicalPlan
///
/// After lowering, the right side is a simple identifier referencing either a CTE or a table.
/// The condition is always present (defaults to `true` for CROSS JOIN).
pub async fn translate_join_command(
    cmd: &IRJoinCommand,
    input: LogicalPlan,
    ctx: &SessionContext,
    ctes: &HashMap<Identifier, Arc<LogicalPlan>>,
    command: &IRCommand,
    expr_ctx: &ExprTranslationContext,
) -> Result<LogicalPlan, Arc<TranslationError>> {
    // Look up the right side - could be a CTE or a table
    let right_ident: Identifier = cmd.right.clone().into();
    let right_plan = scan_table_or_cte(&right_ident, ctx, ctes, command).await?;

    // Convert Hamelin JoinType to DataFusion JoinType
    let join_type = match cmd.join_type {
        JoinType::Inner => DFJoinType::Inner,
        JoinType::Left => DFJoinType::Left,
    };

    // Translate the join condition
    let condition = translate_ir_expr(&cmd.condition, expr_ctx)?;

    // Build the join plan
    LogicalPlanBuilder::from(input)
        .join_on(right_plan, join_type, vec![condition])
        .map_err(|e| Arc::new(TranslationError::wrap(command, e)))?
        .build()
        .map_err(|e| Arc::new(TranslationError::wrap(command, e)))
}

// ============================================================================
// WHERE Command Translation
// ============================================================================

/// Translate a WHERE command to DataFusion LogicalPlan
pub fn translate_where_command(
    cmd: &IRWhereCommand,
    input: LogicalPlan,
    command: &IRCommand,
    expr_ctx: &ExprTranslationContext,
) -> Result<LogicalPlan, Arc<TranslationError>> {
    let predicate = translate_ir_expr(&cmd.predicate, expr_ctx)?;

    LogicalPlanBuilder::from(input)
        .filter(predicate)
        .map_err(|e| Arc::new(TranslationError::wrap(command, e)))?
        .build()
        .map_err(|e| Arc::new(TranslationError::wrap(command, e)))
}

// ============================================================================
// SELECT Command Translation
// ============================================================================

/// Translate a SELECT command to DataFusion LogicalPlan
///
/// After lowering, all identifiers are simple (no compound identifiers).
pub fn translate_select_command(
    cmd: &IRSelectCommand,
    input: LogicalPlan,
    command: &IRCommand,
    expr_ctx: &ExprTranslationContext,
) -> Result<LogicalPlan, Arc<TranslationError>> {
    let projections: Result<Vec<Expr>, Arc<TranslationError>> = cmd
        .assignments
        .iter()
        .map(|assignment| {
            let expr = translate_ir_expr(&assignment.expression, expr_ctx)?;
            Ok(expr.alias(assignment.identifier.as_str()))
        })
        .collect();

    LogicalPlanBuilder::from(input)
        .project(projections?)
        .map_err(|e| Arc::new(TranslationError::wrap(command, e)))?
        .build()
        .map_err(|e| Arc::new(TranslationError::wrap(command, e)))
}

// ============================================================================
// LIMIT Command Translation
// ============================================================================

/// Translate a LIMIT command to DataFusion LogicalPlan
pub fn translate_limit_command(
    cmd: &IRLimitCommand,
    input: LogicalPlan,
    command: &IRCommand,
    _expr_ctx: &ExprTranslationContext,
) -> Result<LogicalPlan, Arc<TranslationError>> {
    LogicalPlanBuilder::from(input)
        .limit(0, Some(cmd.count))
        .map_err(|e| Arc::new(TranslationError::wrap(command, e)))?
        .build()
        .map_err(|e| Arc::new(TranslationError::wrap(command, e)))
}

// ============================================================================
// SORT Command Translation
// ============================================================================

/// Translate a SORT command to DataFusion LogicalPlan
pub fn translate_sort_command(
    cmd: &IRSortCommand,
    input: LogicalPlan,
    command: &IRCommand,
    expr_ctx: &ExprTranslationContext,
) -> Result<LogicalPlan, Arc<TranslationError>> {
    let sort_exprs: Result<Vec<SortExpr>, Arc<TranslationError>> = cmd
        .sort_by
        .iter()
        .map(|sort_expr| translate_sort_expression(sort_expr, expr_ctx))
        .collect();

    LogicalPlanBuilder::from(input)
        .sort(sort_exprs?)
        .map_err(|e| Arc::new(TranslationError::wrap(command, e)))?
        .build()
        .map_err(|e| Arc::new(TranslationError::wrap(command, e)))
}

/// Translate an IR sort expression to DataFusion SortExpr
fn translate_sort_expression(
    sort_expr: &IRSortExpression,
    expr_ctx: &ExprTranslationContext,
) -> Result<SortExpr, Arc<TranslationError>> {
    let expr = translate_ir_expr(&sort_expr.expression, expr_ctx)?;
    let ascending = matches!(sort_expr.order, SortOrder::Asc);
    Ok(expr.sort(ascending, false)) // nulls_first = false (NULLS LAST always, matching Trino default)
}

// ============================================================================
// EXPLODE Command Translation
// ============================================================================

/// Translate an EXPLODE command to DataFusion LogicalPlan
///
/// After lowering, EXPLODE is in canonical form: each column to explode is specified
/// by a simple identifier.
pub fn translate_explode_command(
    cmd: &IRExplodeCommand,
    input: LogicalPlan,
    output_schema: &TypeEnvironment,
    command: &IRCommand,
    _expr_ctx: &ExprTranslationContext,
) -> Result<LogicalPlan, Arc<TranslationError>> {
    let columns: Vec<Column> = cmd
        .columns
        .iter()
        .map(|c| Column::from_name(c.as_str()))
        .collect();

    // Use DataFusion's unnest_columns to explode multiple arrays in parallel
    let unnest_plan = LogicalPlanBuilder::from(input)
        .unnest_columns_with_options(columns, UnnestOptions::default().with_preserve_nulls(false))
        .map_err(|e| Arc::new(TranslationError::wrap(command, e)))?
        .build()
        .map_err(|e| Arc::new(TranslationError::wrap(command, e)))?;

    // Add an intermediate projection that re-aliases the unnested column.
    // This mimics what DataFusion's SQL parser does: it wraps unnest output
    // in a projection. The projection creates a barrier that prevents optimizer
    // issues with Unnest nodes during UNION + ORDER BY.
    //
    // Use the output_schema to ensure correct column ordering, since DataFusion's
    // unnest may return columns in a different order than expected.
    let projections: Vec<Expr> = output_schema
        .as_struct()
        .iter()
        .map(|(name, _)| ident(name.name()))
        .collect();

    LogicalPlanBuilder::from(unnest_plan)
        .project(projections)
        .map_err(|e| Arc::new(TranslationError::wrap(command, e)))?
        .build()
        .map_err(|e| Arc::new(TranslationError::wrap(command, e)))
}

// ============================================================================
// AGG Command Translation
// ============================================================================

/// Translate an AGG command to a DataFusion Aggregate node.
///
/// After lowering, AGG commands have only simple identifiers.
pub fn translate_agg_command(
    cmd: &IRAggCommand,
    input: LogicalPlan,
    command: &IRCommand,
    expr_ctx: &ExprTranslationContext,
) -> Result<LogicalPlan, Arc<TranslationError>> {
    // Translate GROUP BY expressions
    let group_exprs: Result<Vec<Expr>, Arc<TranslationError>> = cmd
        .group_by
        .iter()
        .map(|assignment| translate_assignment(assignment, expr_ctx))
        .collect();
    let group_exprs = group_exprs?;

    // Translate SORT BY expressions for ordered aggregation
    let sort_exprs: Result<Vec<SortExpr>, Arc<TranslationError>> = cmd
        .sort_by
        .iter()
        .map(|sort_expr| translate_sort_expression(sort_expr, expr_ctx))
        .collect();
    let sort_exprs = sort_exprs?;

    // Translate aggregate expressions, applying sort_by to each if present
    let aggr_exprs: Result<Vec<Expr>, Arc<TranslationError>> = cmd
        .aggregates
        .iter()
        .map(|assignment| translate_assignment_with_order_by(assignment, &sort_exprs, expr_ctx))
        .collect();
    let aggr_exprs = aggr_exprs?;

    // Build the aggregate plan
    LogicalPlanBuilder::from(input)
        .aggregate(group_exprs, aggr_exprs)
        .map_err(|e| Arc::new(TranslationError::wrap(command, e)))?
        .build()
        .map_err(|e| Arc::new(TranslationError::wrap(command, e)))
}

/// Translate an IR assignment to a DataFusion Expr with alias, applying order_by to aggregates
fn translate_assignment_with_order_by(
    assignment: &IRAssignment,
    order_by: &[SortExpr],
    expr_ctx: &ExprTranslationContext,
) -> Result<Expr, Arc<TranslationError>> {
    let mut expr = translate_ir_expr(&assignment.expression, expr_ctx)?;

    if !order_by.is_empty() {
        if let Expr::AggregateFunction(ref mut agg) = expr {
            agg.params.order_by = order_by.to_vec();
        }
    }

    Ok(expr.alias(assignment.identifier.as_str()))
}

/// Translate an IR assignment to a DataFusion Expr with alias
fn translate_assignment(
    assignment: &IRAssignment,
    expr_ctx: &ExprTranslationContext,
) -> Result<Expr, Arc<TranslationError>> {
    let expr = translate_ir_expr(&assignment.expression, expr_ctx)?;
    Ok(expr.alias(assignment.identifier.as_str()))
}

// ============================================================================
// WINDOW Command Translation
// ============================================================================

/// Translate a WINDOW command to a DataFusion LogicalPlan
///
/// WINDOW extends the schema with new window function columns while preserving all input columns.
/// After lowering, all identifiers are simple.
///
/// DataFusion's `.window()` appends window columns after input columns, but Hamelin expects
/// window columns first. We add a final projection to reorder columns to match the output schema.
pub fn translate_window_command(
    cmd: &IRWindowCommand,
    input: LogicalPlan,
    output_schema: &TypeEnvironment,
    command: &IRCommand,
    expr_ctx: &ExprTranslationContext,
) -> Result<LogicalPlan, Arc<TranslationError>> {
    // Translate PARTITION BY expressions (pure column references after normalization)
    let partition_exprs: Result<Vec<Expr>, Arc<TranslationError>> = cmd
        .partition_by
        .iter()
        .map(|e| translate_ir_expr(e, expr_ctx))
        .collect();
    let partition_exprs = partition_exprs?;

    // Translate ORDER BY expressions to Sort structs
    let order_by_exprs: Result<Vec<DFSort>, Arc<TranslationError>> = cmd
        .sort_by
        .iter()
        .map(|s| {
            let expr = translate_ir_expr(&s.expression, expr_ctx)?;
            let asc = matches!(s.order, SortOrder::Asc);
            Ok(DFSort::new(expr, asc, false)) // nulls_first = false (NULLS LAST always, matching Trino default)
        })
        .collect();
    let order_by_exprs = order_by_exprs?;

    // Convert Hamelin window frame to DataFusion window frame
    let df_frame = match &cmd.frame {
        Some(frame) => Some(convert_window_frame(frame, command, expr_ctx)?),
        None => None,
    };

    // Build window expressions with partition/order/frame applied
    let window_exprs: Result<Vec<Expr>, Arc<TranslationError>> = cmd
        .projections
        .iter()
        .map(|assignment| {
            let expr = translate_ir_expr(&assignment.expression, expr_ctx)?;

            // Convert the expression to a window expression
            // For aggregate functions, we need to wrap them in WindowFunction with AggregateUDF
            // For window functions, we use ExprFunctionExt to apply partition/order/frame
            let windowed = convert_to_window_expr(
                expr,
                partition_exprs.clone(),
                order_by_exprs.clone(),
                df_frame.clone(),
                !cmd.sort_by.is_empty(),
                command,
            )?;

            Ok(windowed.alias(assignment.identifier.as_str()))
        })
        .collect();
    let window_exprs = window_exprs?;

    // Build window plan
    let window_plan = LogicalPlanBuilder::from(input)
        .window(window_exprs)
        .map_err(|e| Arc::new(TranslationError::wrap(command, e)))?
        .build()
        .map_err(|e| Arc::new(TranslationError::wrap(command, e)))?;

    // DataFusion appends window columns after input columns, but Hamelin expects
    // window columns first. Add a projection to reorder columns to match the output schema.
    // Partition-by columns are pre-projected by the normalization pass, so they
    // exist as regular input columns and pass through naturally.
    let reorder_exprs: Vec<Expr> = output_schema
        .as_struct()
        .iter()
        .map(|(name, _)| ident(name.name()))
        .collect();

    LogicalPlanBuilder::from(window_plan)
        .project(reorder_exprs)
        .map_err(|e| Arc::new(TranslationError::wrap(command, e)))?
        .build()
        .map_err(|e| Arc::new(TranslationError::wrap(command, e)))
}

/// Convert a Hamelin WindowFrame to a DataFusion WindowFrame
fn convert_window_frame(
    frame: &HamelinWindowFrame,
    command: &IRCommand,
    expr_ctx: &ExprTranslationContext,
) -> Result<DFWindowFrame, Arc<TranslationError>> {
    match frame {
        HamelinWindowFrame::Rows { start, end } => {
            let start_bound = convert_row_bound(start, true);
            let end_bound = convert_row_bound(end, false);
            Ok(DFWindowFrame::new_bounds(
                DFWindowFrameUnits::Rows,
                start_bound,
                end_bound,
            ))
        }
        HamelinWindowFrame::Range { start, end } => {
            let start_bound = convert_range_bound(start, true, command, expr_ctx)?;
            let end_bound = convert_range_bound(end, false, command, expr_ctx)?;
            Ok(DFWindowFrame::new_bounds(
                DFWindowFrameUnits::Range,
                start_bound,
                end_bound,
            ))
        }
    }
}

/// Convert a Hamelin RowBound to a DataFusion WindowFrameBound
fn convert_row_bound(bound: &RowBound, is_start: bool) -> DFWindowFrameBound {
    match bound {
        RowBound::Unbounded => {
            if is_start {
                DFWindowFrameBound::Preceding(ScalarValue::Null)
            } else {
                DFWindowFrameBound::Following(ScalarValue::Null)
            }
        }
        RowBound::CurrentRow => DFWindowFrameBound::CurrentRow,
        RowBound::Preceding(n) => DFWindowFrameBound::Preceding(ScalarValue::UInt64(Some(*n))),
        RowBound::Following(n) => DFWindowFrameBound::Following(ScalarValue::UInt64(Some(*n))),
    }
}

/// Convert a Hamelin RangeBound to a DataFusion WindowFrameBound
///
/// Translates the IRExpression to a DataFusion Expr, then extracts the ScalarValue
/// from the resulting literal (bounds are always const-folded during lowering).
fn convert_range_bound(
    bound: &RangeBound,
    is_start: bool,
    command: &IRCommand,
    expr_ctx: &ExprTranslationContext,
) -> Result<DFWindowFrameBound, Arc<TranslationError>> {
    match bound {
        RangeBound::Unbounded => {
            if is_start {
                Ok(DFWindowFrameBound::Preceding(ScalarValue::Null))
            } else {
                Ok(DFWindowFrameBound::Following(ScalarValue::Null))
            }
        }
        RangeBound::CurrentRow => Ok(DFWindowFrameBound::CurrentRow),
        RangeBound::Preceding(expr) => {
            let scalar = ir_expr_to_scalar(expr, command, expr_ctx)?;
            Ok(DFWindowFrameBound::Preceding(scalar))
        }
        RangeBound::Following(expr) => {
            let scalar = ir_expr_to_scalar(expr, command, expr_ctx)?;
            Ok(DFWindowFrameBound::Following(scalar))
        }
    }
}

/// Translate an IRExpression to a DataFusion ScalarValue.
///
/// The expression must be a literal after const-folding (which is guaranteed
/// for window frame bounds after lowering).
fn ir_expr_to_scalar(
    expr: &IRExpression,
    command: &IRCommand,
    expr_ctx: &ExprTranslationContext,
) -> Result<ScalarValue, Arc<TranslationError>> {
    let df_expr = translate_ir_expr(expr, expr_ctx)?;
    match df_expr {
        Expr::Literal(scalar, _) => Ok(scalar),
        other => Err(Arc::new(TranslationError::msg(
            command,
            &format!("Expected literal for window frame bound, got: {other}"),
        ))),
    }
}

// ============================================================================
// Window Expression Conversion
// ============================================================================

/// Convert an expression to a proper window expression by applying partition/order/frame.
///
/// For aggregate functions (like `sum`), we need to wrap them in `Expr::WindowFunction`
/// with `WindowFunctionDefinition::AggregateUDF`. For existing window functions, we use
/// `ExprFunctionExt` to apply the window specification.
///
/// When there's an ORDER BY but no explicit frame, the default is cumulative (ROWS UNBOUNDED
/// PRECEDING to CURRENT ROW). When there's no ORDER BY, the default is the whole partition.
fn convert_to_window_expr(
    expr: Expr,
    partition_by: Vec<Expr>,
    order_by: Vec<DFSort>,
    window_frame: Option<DFWindowFrame>,
    has_order_by: bool,
    command: &IRCommand,
) -> Result<Expr, Arc<TranslationError>> {
    // Default frame: cumulative when ORDER BY is present, full partition otherwise
    let default_frame = if has_order_by {
        DFWindowFrame::new(Some(true)) // ROWS UNBOUNDED PRECEDING to CURRENT ROW
    } else {
        DFWindowFrame::new(None) // ROWS UNBOUNDED PRECEDING to UNBOUNDED FOLLOWING
    };

    match expr {
        // Aggregate function: wrap in WindowFunction with AggregateUDF
        Expr::AggregateFunction(agg) => {
            let AggregateFunction {
                func,
                params: agg_params,
            } = agg;

            let window_func = WindowFunction {
                fun: WindowFunctionDefinition::AggregateUDF(func),
                params: WindowFunctionParams {
                    args: agg_params.args,
                    partition_by,
                    order_by,
                    window_frame: window_frame.unwrap_or(default_frame),
                    filter: agg_params.filter,
                    null_treatment: agg_params.null_treatment,
                    distinct: agg_params.distinct,
                },
            };

            Ok(Expr::WindowFunction(Box::new(window_func)))
        }

        // Window function: apply partition/order/frame
        Expr::WindowFunction(wf) => {
            // Update the window function's params (wf is already boxed)
            let WindowFunction {
                fun,
                params: wf_params,
            } = *wf;

            let updated = WindowFunction {
                fun,
                params: WindowFunctionParams {
                    args: wf_params.args,
                    partition_by,
                    order_by,
                    window_frame: window_frame.unwrap_or(default_frame),
                    filter: wf_params.filter,
                    null_treatment: wf_params.null_treatment,
                    distinct: wf_params.distinct,
                },
            };

            Ok(Expr::WindowFunction(Box::new(updated)))
        }

        // Other expressions (e.g., binary expressions containing aggregates)
        // This is tricky - for now we error; case_4 will need special handling
        other => Err(Arc::new(TranslationError::msg(
            command,
            &format!(
                "WINDOW expression must be an aggregate or window function, got: {:?}",
                other
            ),
        ))),
    }
}