runmat_runtime/builtins/math/poly/
polyint.rs

1//! MATLAB-compatible `polyint` builtin with GPU-aware semantics for RunMat.
2
3use log::{trace, warn};
4use num_complex::Complex64;
5use runmat_accelerate_api::HostTensorView;
6use runmat_builtins::{ComplexTensor, Tensor, Value};
7use runmat_macros::runtime_builtin;
8
9use crate::builtins::common::spec::{
10    BroadcastSemantics, BuiltinFusionSpec, BuiltinGpuSpec, ConstantStrategy, GpuOpKind,
11    ProviderHook, ReductionNaN, ResidencyPolicy, ScalarType, ShapeRequirements,
12};
13use crate::builtins::common::tensor;
14use crate::dispatcher;
15#[cfg(feature = "doc_export")]
16use crate::register_builtin_doc_text;
17use crate::{register_builtin_fusion_spec, register_builtin_gpu_spec};
18
19const EPS: f64 = 1.0e-12;
20
21#[cfg(feature = "doc_export")]
22pub const DOC_MD: &str = r#"---
23title: "polyint"
24category: "math/poly"
25keywords: ["polyint", "polynomial integral", "antiderivative", "integration constant", "gpu"]
26summary: "Integrate polynomial coefficient vectors and append a constant of integration."
27references:
28  - title: "MATLAB polyint documentation"
29    url: "https://www.mathworks.com/help/matlab/ref/polyint.html"
30gpu_support:
31  elementwise: false
32  reduction: false
33  precisions: ["f32", "f64"]
34  broadcasting: "none"
35  notes: "Real-valued coefficient vectors stay on the GPU when the provider exposes the polyint hook; complex outputs fall back to the host."
36fusion:
37  elementwise: false
38  reduction: false
39  max_inputs: 1
40  constants: "inline"
41requires_feature: null
42tested:
43  unit: "builtins::math::poly::polyint::tests"
44  integration: "builtins::math::poly::polyint::tests::polyint_gpu_roundtrip"
45  doc: "builtins::math::poly::polyint::tests::doc_examples_present"
46---
47
48# What does the `polyint` function do in MATLAB / RunMat?
49`polyint(p)` returns the polynomial that integrates the coefficient vector `p` once, appending the
50constant of integration at the end of the coefficient list. Coefficients follow MATLAB's descending
51power convention: `p(1)` multiplies the highest power of `x`, and `p(end)` is the constant term.
52
53## How does the `polyint` function behave in MATLAB / RunMat?
54- Accepts real or complex scalars, row vectors, column vectors, or empty vectors. Inputs with more
55  than one non-singleton dimension raise MATLAB-compatible errors.
56- Logical and integer coefficients are promoted to double precision before integration.
57- The optional second argument supplies the constant of integration. It must be a scalar (real or
58  complex). When omitted, the constant defaults to `0`.
59- Leading zeros are preserved. Integrating `[0 0 5]` produces `[0 0 5 0]`, matching MATLAB.
60- Empty inputs integrate to the constant of integration (default `0`). Specifying a constant `k` yields `[k]`.
61- The orientation of the input vector is preserved: row vectors stay row vectors, column vectors
62  stay column vectors, and scalars return a row vector.
63- When coefficients reside on the GPU, RunMat gathers them to the host, performs the integration,
64  and re-uploads real-valued results so downstream kernels retain residency.
65
66## `polyint` Function GPU Execution Behaviour
67When a GPU provider is registered and the coefficient vector is real-valued, RunMat calls the
68provider's dedicated `polyint` kernel. The input stays on the device, the kernel divides each
69coefficient by the appropriate power, and the supplied constant of integration is written directly
70into device memory. If the coefficients or the constant are complex, or if the provider reports that
71the hook is unavailable, the runtime gathers data back to the host, performs the integration in
72double precision, and re-uploads the result when it is purely real.
73
74## Examples of using the `polyint` function in MATLAB / RunMat
75
76### Integrating a cubic polynomial
77
78```matlab
79p = [3 -2 5 7];        % 3x^3 - 2x^2 + 5x + 7
80q = polyint(p);
81```
82
83Expected output:
84
85```matlab
86q = [0.75  -0.6667  2.5  7  0];
87```
88
89### Supplying a constant of integration
90
91```matlab
92p = [4 0 -8];
93q = polyint(p, 3);
94```
95
96Expected output:
97
98```matlab
99q = [1.3333  0  -8  3];
100```
101
102### Preserving column-vector orientation
103
104```matlab
105p = [2; 0; -6];
106q = polyint(p);
107```
108
109Expected output:
110
111```matlab
112q =
113    0.6667
114         0
115   -6.0000
116         0
117```
118
119### Integrating the zero polynomial
120
121```matlab
122q = polyint([]);
123```
124
125Expected output:
126
127```matlab
128q = 0;
129```
130
131### Integrating complex coefficients
132
133```matlab
134p = [1+2i  -3  4i];
135q = polyint(p, -1i);
136```
137
138Expected output:
139
140```matlab
141q = [(1+2i)/3  -1.5  4i  -1i];
142```
143
144### Working with gpuArray inputs
145
146```matlab
147g = gpuArray([1 -4 6]);
148q = polyint(g);          % Gathered to host, integrated, and re-uploaded
149result = gather(q);
150```
151
152Expected behavior:
153
154```matlab
155result = [0.3333  -2  6  0];
156```
157
158## GPU residency in RunMat (Do I need `gpuArray`?)
159You usually do **not** need to call `gpuArray` just for `polyint`. When inputs already live on the
160GPU and a provider is active, RunMat keeps the data on the device and executes the integration there
161for real-valued coefficients. For complex inputs, or when no provider hook is available, the runtime
162falls back to the host implementation transparently.
163
164## FAQ
165
166### Does `polyint` change the orientation of my coefficients?
167No. Row vectors stay row vectors, column vectors stay column vectors, and scalars return a row
168vector with two elements after integration.
169
170### What happens when I pass an empty vector?
171An empty vector represents the zero polynomial. `polyint([])` returns the constant of integration,
172so the default result is `0` and `polyint([], k)` returns `k`.
173
174### Can the constant of integration be complex?
175Yes. Provide any scalar numeric value (real or complex). The constant is appended to the integrated
176polynomial exactly as MATLAB does.
177
178### Are logical or integer coefficients supported?
179Yes. They are promoted to double precision before integration, ensuring identical behaviour to
180MATLAB.
181
182### Will the result stay on the GPU?
183Real-valued outputs are re-uploaded to the GPU when a provider is available. Complex outputs remain
184on the host because current providers do not expose complex tensor handles.
185
186### How precise is the computation?
187All arithmetic uses IEEE 754 double precision (`f64`), mirroring MATLAB's default numeric type.
188
189## See Also
190[polyder](./polyder), [polyval](./polyval), [polyfit](./polyfit), [gpuArray](../../acceleration/gpu/gpuArray), [gather](../../acceleration/gpu/gather)
191"#;
192
193pub const GPU_SPEC: BuiltinGpuSpec = BuiltinGpuSpec {
194    name: "polyint",
195    op_kind: GpuOpKind::Custom("polynomial-integral"),
196    supported_precisions: &[ScalarType::F32, ScalarType::F64],
197    broadcast: BroadcastSemantics::None,
198    provider_hooks: &[ProviderHook::Custom("polyint")],
199    constant_strategy: ConstantStrategy::InlineLiteral,
200    residency: ResidencyPolicy::NewHandle,
201    nan_mode: ReductionNaN::Include,
202    two_pass_threshold: None,
203    workgroup_size: None,
204    accepts_nan_mode: false,
205    notes: "Providers implement the polyint hook for real coefficient vectors; complex inputs fall back to the host.",
206};
207
208register_builtin_gpu_spec!(GPU_SPEC);
209
210pub const FUSION_SPEC: BuiltinFusionSpec = BuiltinFusionSpec {
211    name: "polyint",
212    shape: ShapeRequirements::Any,
213    constant_strategy: ConstantStrategy::InlineLiteral,
214    elementwise: None,
215    reduction: None,
216    emits_nan: false,
217    notes: "Symbolic operation on coefficient vectors; fusion does not apply.",
218};
219
220register_builtin_fusion_spec!(FUSION_SPEC);
221
222#[cfg(feature = "doc_export")]
223register_builtin_doc_text!("polyint", DOC_MD);
224
225#[runtime_builtin(
226    name = "polyint",
227    category = "math/poly",
228    summary = "Integrate polynomial coefficient vectors and append a constant of integration.",
229    keywords = "polyint,polynomial,integral,antiderivative"
230)]
231fn polyint_builtin(coeffs: Value, rest: Vec<Value>) -> Result<Value, String> {
232    if rest.len() > 1 {
233        return Err("polyint: too many input arguments".to_string());
234    }
235
236    let constant = rest
237        .into_iter()
238        .next()
239        .map(parse_constant)
240        .transpose()?
241        .unwrap_or_else(|| Complex64::new(0.0, 0.0));
242
243    if let Value::GpuTensor(handle) = &coeffs {
244        if let Some(device_result) = try_polyint_gpu(handle, constant)? {
245            return Ok(Value::GpuTensor(device_result));
246        }
247    }
248
249    let was_gpu = matches!(coeffs, Value::GpuTensor(_));
250    polyint_host_value(coeffs, constant, was_gpu)
251}
252
253fn polyint_host_value(coeffs: Value, constant: Complex64, was_gpu: bool) -> Result<Value, String> {
254    let polynomial = parse_polynomial(coeffs)?;
255    let mut integrated = integrate_coeffs(&polynomial.coeffs);
256    if integrated.is_empty() {
257        integrated.push(constant);
258    } else if let Some(last) = integrated.last_mut() {
259        *last += constant;
260    }
261    let value = coeffs_to_value(&integrated, polynomial.orientation)?;
262    maybe_return_gpu(value, was_gpu)
263}
264
265fn try_polyint_gpu(
266    handle: &runmat_accelerate_api::GpuTensorHandle,
267    constant: Complex64,
268) -> Result<Option<runmat_accelerate_api::GpuTensorHandle>, String> {
269    if constant.im.abs() > EPS {
270        return Ok(None);
271    }
272    ensure_vector_shape(&handle.shape)?;
273    let Some(provider) = runmat_accelerate_api::provider() else {
274        return Ok(None);
275    };
276    match provider.polyint(handle, constant.re) {
277        Ok(result) => Ok(Some(result)),
278        Err(err) => {
279            trace!("polyint: provider hook unavailable, falling back to host: {err}");
280            Ok(None)
281        }
282    }
283}
284
285fn integrate_coeffs(coeffs: &[Complex64]) -> Vec<Complex64> {
286    if coeffs.is_empty() {
287        return Vec::new();
288    }
289    let mut result = Vec::with_capacity(coeffs.len() + 1);
290    for (idx, coeff) in coeffs.iter().enumerate() {
291        let power = (coeffs.len() - idx) as f64;
292        if power <= 0.0 {
293            result.push(Complex64::new(0.0, 0.0));
294        } else {
295            result.push(*coeff / Complex64::new(power, 0.0));
296        }
297    }
298    result.push(Complex64::new(0.0, 0.0));
299    result
300}
301
302fn maybe_return_gpu(value: Value, was_gpu: bool) -> Result<Value, String> {
303    if !was_gpu {
304        return Ok(value);
305    }
306    match value {
307        Value::Tensor(tensor) => {
308            if let Some(provider) = runmat_accelerate_api::provider() {
309                let view = HostTensorView {
310                    data: &tensor.data,
311                    shape: &tensor.shape,
312                };
313                match provider.upload(&view) {
314                    Ok(handle) => return Ok(Value::GpuTensor(handle)),
315                    Err(err) => {
316                        warn!("polyint: provider upload failed, keeping result on host: {err}");
317                    }
318                }
319            } else {
320                trace!("polyint: no provider available to re-upload result");
321            }
322            Ok(Value::Tensor(tensor))
323        }
324        other => Ok(other),
325    }
326}
327
328fn coeffs_to_value(coeffs: &[Complex64], orientation: Orientation) -> Result<Value, String> {
329    if coeffs.iter().all(|c| c.im.abs() <= EPS) {
330        let data: Vec<f64> = coeffs.iter().map(|c| c.re).collect();
331        let shape = orientation.shape_for_len(data.len());
332        let tensor = Tensor::new(data, shape).map_err(|e| format!("polyint: {e}"))?;
333        Ok(tensor::tensor_into_value(tensor))
334    } else {
335        let data: Vec<(f64, f64)> = coeffs.iter().map(|c| (c.re, c.im)).collect();
336        let shape = orientation.shape_for_len(data.len());
337        let tensor = ComplexTensor::new(data, shape).map_err(|e| format!("polyint: {e}"))?;
338        Ok(Value::ComplexTensor(tensor))
339    }
340}
341
342fn parse_polynomial(value: Value) -> Result<Polynomial, String> {
343    let gathered = dispatcher::gather_if_needed(&value).map_err(|e| format!("polyint: {e}"))?;
344    match gathered {
345        Value::Tensor(tensor) => parse_tensor_coeffs(&tensor),
346        Value::ComplexTensor(tensor) => parse_complex_tensor_coeffs(&tensor),
347        Value::LogicalArray(logical) => {
348            let tensor = tensor::logical_to_tensor(&logical)?;
349            parse_tensor_coeffs(&tensor)
350        }
351        Value::Num(n) => Ok(Polynomial {
352            coeffs: vec![Complex64::new(n, 0.0)],
353            orientation: Orientation::Scalar,
354        }),
355        Value::Int(i) => Ok(Polynomial {
356            coeffs: vec![Complex64::new(i.to_f64(), 0.0)],
357            orientation: Orientation::Scalar,
358        }),
359        Value::Bool(b) => Ok(Polynomial {
360            coeffs: vec![Complex64::new(if b { 1.0 } else { 0.0 }, 0.0)],
361            orientation: Orientation::Scalar,
362        }),
363        Value::Complex(re, im) => Ok(Polynomial {
364            coeffs: vec![Complex64::new(re, im)],
365            orientation: Orientation::Scalar,
366        }),
367        other => Err(format!(
368            "polyint: expected a numeric coefficient vector, got {:?}",
369            other
370        )),
371    }
372}
373
374fn parse_tensor_coeffs(tensor: &Tensor) -> Result<Polynomial, String> {
375    ensure_vector_shape(&tensor.shape)?;
376    let orientation = orientation_from_shape(&tensor.shape);
377    Ok(Polynomial {
378        coeffs: tensor
379            .data
380            .iter()
381            .map(|&v| Complex64::new(v, 0.0))
382            .collect(),
383        orientation,
384    })
385}
386
387fn parse_complex_tensor_coeffs(tensor: &ComplexTensor) -> Result<Polynomial, String> {
388    ensure_vector_shape(&tensor.shape)?;
389    let orientation = orientation_from_shape(&tensor.shape);
390    Ok(Polynomial {
391        coeffs: tensor
392            .data
393            .iter()
394            .map(|&(re, im)| Complex64::new(re, im))
395            .collect(),
396        orientation,
397    })
398}
399
400fn parse_constant(value: Value) -> Result<Complex64, String> {
401    let gathered = dispatcher::gather_if_needed(&value).map_err(|e| format!("polyint: {e}"))?;
402    match gathered {
403        Value::Tensor(tensor) => {
404            if tensor.data.len() != 1 {
405                return Err("polyint: constant of integration must be a scalar".to_string());
406            }
407            Ok(Complex64::new(tensor.data[0], 0.0))
408        }
409        Value::ComplexTensor(tensor) => {
410            if tensor.data.len() != 1 {
411                return Err("polyint: constant of integration must be a scalar".to_string());
412            }
413            let (re, im) = tensor.data[0];
414            Ok(Complex64::new(re, im))
415        }
416        Value::Num(n) => Ok(Complex64::new(n, 0.0)),
417        Value::Int(i) => Ok(Complex64::new(i.to_f64(), 0.0)),
418        Value::Bool(b) => Ok(Complex64::new(if b { 1.0 } else { 0.0 }, 0.0)),
419        Value::Complex(re, im) => Ok(Complex64::new(re, im)),
420        Value::LogicalArray(logical) => {
421            let tensor = tensor::logical_to_tensor(&logical)?;
422            if tensor.data.len() != 1 {
423                return Err("polyint: constant of integration must be a scalar".to_string());
424            }
425            Ok(Complex64::new(tensor.data[0], 0.0))
426        }
427        other => Err(format!(
428            "polyint: constant of integration must be numeric, got {:?}",
429            other
430        )),
431    }
432}
433
434fn ensure_vector_shape(shape: &[usize]) -> Result<(), String> {
435    let non_unit = shape.iter().filter(|&&dim| dim > 1).count();
436    if non_unit <= 1 {
437        Ok(())
438    } else {
439        Err("polyint: coefficients must form a vector".to_string())
440    }
441}
442
443fn orientation_from_shape(shape: &[usize]) -> Orientation {
444    for (idx, &dim) in shape.iter().enumerate() {
445        if dim != 1 {
446            return match idx {
447                0 => Orientation::Column,
448                1 => Orientation::Row,
449                _ => Orientation::Column,
450            };
451        }
452    }
453    Orientation::Scalar
454}
455
456#[derive(Clone)]
457struct Polynomial {
458    coeffs: Vec<Complex64>,
459    orientation: Orientation,
460}
461
462#[derive(Clone, Copy)]
463enum Orientation {
464    Scalar,
465    Row,
466    Column,
467}
468
469impl Orientation {
470    fn shape_for_len(self, len: usize) -> Vec<usize> {
471        if len <= 1 {
472            return vec![1, 1];
473        }
474        match self {
475            Orientation::Scalar | Orientation::Row => vec![1, len],
476            Orientation::Column => vec![len, 1],
477        }
478    }
479}
480
481#[cfg(test)]
482mod tests {
483    use super::*;
484    use crate::builtins::common::test_support;
485    use runmat_builtins::LogicalArray;
486
487    #[test]
488    fn integrates_polynomial_without_constant() {
489        let tensor = Tensor::new(vec![3.0, -2.0, 5.0, 7.0], vec![1, 4]).unwrap();
490        let result = polyint_builtin(Value::Tensor(tensor), Vec::new()).expect("polyint");
491        match result {
492            Value::Tensor(t) => {
493                assert_eq!(t.shape, vec![1, 5]);
494                let expected = [0.75, -2.0 / 3.0, 2.5, 7.0, 0.0];
495                assert!(t
496                    .data
497                    .iter()
498                    .zip(expected.iter())
499                    .all(|(lhs, rhs)| (lhs - rhs).abs() < 1e-12));
500            }
501            other => panic!("expected tensor result, got {other:?}"),
502        }
503    }
504
505    #[test]
506    fn integrates_with_constant() {
507        let tensor = Tensor::new(vec![4.0, 0.0, -8.0], vec![1, 3]).unwrap();
508        let args = vec![Value::Num(3.0)];
509        let result = polyint_builtin(Value::Tensor(tensor), args).expect("polyint");
510        match result {
511            Value::Tensor(t) => {
512                assert_eq!(t.shape, vec![1, 4]);
513                let expected = [4.0 / 3.0, 0.0, -8.0, 3.0];
514                assert!(t
515                    .data
516                    .iter()
517                    .zip(expected.iter())
518                    .all(|(lhs, rhs)| (lhs - rhs).abs() < 1e-12));
519            }
520            other => panic!("expected tensor result, got {other:?}"),
521        }
522    }
523
524    #[test]
525    fn integrates_scalar_value() {
526        let result = polyint_builtin(Value::Num(5.0), Vec::new()).expect("polyint");
527        match result {
528            Value::Tensor(t) => {
529                assert_eq!(t.shape, vec![1, 2]);
530                assert!((t.data[0] - 5.0).abs() < 1e-12);
531                assert!(t.data[1].abs() < 1e-12);
532            }
533            other => panic!("expected tensor result, got {other:?}"),
534        }
535    }
536
537    #[test]
538    fn integrates_logical_coefficients() {
539        let logical = LogicalArray::new(vec![1, 0, 1], vec![1, 3]).unwrap();
540        let result =
541            polyint_builtin(Value::LogicalArray(logical), Vec::new()).expect("polyint logical");
542        match result {
543            Value::Tensor(t) => {
544                assert_eq!(t.shape, vec![1, 4]);
545                let expected = [1.0 / 3.0, 0.0, 1.0, 0.0];
546                assert!(t
547                    .data
548                    .iter()
549                    .zip(expected.iter())
550                    .all(|(lhs, rhs)| (lhs - rhs).abs() < 1e-12));
551            }
552            other => panic!("expected tensor result, got {other:?}"),
553        }
554    }
555
556    #[test]
557    fn preserves_column_vector_orientation() {
558        let tensor = Tensor::new(vec![2.0, 0.0, -6.0], vec![3, 1]).unwrap();
559        let result = polyint_builtin(Value::Tensor(tensor), Vec::new()).expect("polyint");
560        match result {
561            Value::Tensor(t) => {
562                assert_eq!(t.shape, vec![4, 1]);
563                let expected = [2.0 / 3.0, 0.0, -6.0, 0.0];
564                assert!(t
565                    .data
566                    .iter()
567                    .zip(expected.iter())
568                    .all(|(lhs, rhs)| (lhs - rhs).abs() < 1e-12));
569            }
570            other => panic!("expected column tensor, got {other:?}"),
571        }
572    }
573
574    #[test]
575    fn integrates_complex_coefficients() {
576        let tensor =
577            ComplexTensor::new(vec![(1.0, 2.0), (-3.0, 0.0), (0.0, 4.0)], vec![1, 3]).unwrap();
578        let args = vec![Value::Complex(0.0, -1.0)];
579        let result = polyint_builtin(Value::ComplexTensor(tensor), args).expect("polyint");
580        match result {
581            Value::ComplexTensor(t) => {
582                assert_eq!(t.shape, vec![1, 4]);
583                let expected = [(1.0 / 3.0, 2.0 / 3.0), (-1.5, 0.0), (0.0, 4.0), (0.0, -1.0)];
584                assert!(t
585                    .data
586                    .iter()
587                    .zip(expected.iter())
588                    .all(|((lre, lim), (rre, rim))| {
589                        (lre - rre).abs() < 1e-12 && (lim - rim).abs() < 1e-12
590                    }));
591            }
592            other => panic!("expected complex tensor, got {other:?}"),
593        }
594    }
595
596    #[test]
597    fn rejects_matrix_coefficients() {
598        let tensor = Tensor::new(vec![1.0, 2.0, 3.0, 4.0], vec![2, 2]).unwrap();
599        let err = polyint_builtin(Value::Tensor(tensor), Vec::new()).expect_err("expected error");
600        assert!(err.contains("vector"));
601    }
602
603    #[test]
604    fn rejects_non_scalar_constant() {
605        let coeffs = Tensor::new(vec![1.0, -4.0, 6.0], vec![1, 3]).unwrap();
606        let constant = Tensor::new(vec![1.0, 2.0], vec![1, 2]).unwrap();
607        let err = polyint_builtin(Value::Tensor(coeffs), vec![Value::Tensor(constant)])
608            .expect_err("expected error");
609        assert!(err.contains("constant of integration must be a scalar"));
610    }
611
612    #[test]
613    fn rejects_excess_arguments() {
614        let tensor = Tensor::new(vec![1.0, 0.0], vec![1, 2]).unwrap();
615        let err = polyint_builtin(
616            Value::Tensor(tensor),
617            vec![Value::Num(1.0), Value::Num(2.0)],
618        )
619        .expect_err("expected error");
620        assert!(err.contains("too many input arguments"));
621    }
622
623    #[test]
624    fn handles_empty_input_as_zero_polynomial() {
625        let tensor = Tensor::new(vec![], vec![1, 0]).unwrap();
626        let result = polyint_builtin(Value::Tensor(tensor), Vec::new()).expect("polyint");
627        match result {
628            Value::Num(v) => assert!(v.abs() < 1e-12),
629            Value::Tensor(t) => {
630                // Allow tensor fallback if scalar auto-boxing changes in future
631                assert_eq!(t.data.len(), 1);
632                assert!(t.data[0].abs() < 1e-12);
633            }
634            other => panic!("expected numeric result, got {other:?}"),
635        }
636    }
637
638    #[test]
639    fn empty_input_with_constant() {
640        let tensor = Tensor::new(vec![], vec![1, 0]).unwrap();
641        let result = polyint_builtin(Value::Tensor(tensor), vec![Value::Complex(1.5, -2.0)])
642            .expect("polyint");
643        match result {
644            Value::ComplexTensor(t) => {
645                assert_eq!(t.shape, vec![1, 1]);
646                assert_eq!(t.data.len(), 1);
647                let (re, im) = t.data[0];
648                assert!((re - 1.5).abs() < 1e-12);
649                assert!((im + 2.0).abs() < 1e-12);
650            }
651            other => panic!("expected complex tensor result, got {other:?}"),
652        }
653    }
654
655    #[test]
656    fn polyint_gpu_roundtrip() {
657        test_support::with_test_provider(|provider| {
658            let tensor = Tensor::new(vec![1.0, -4.0, 6.0], vec![1, 3]).unwrap();
659            let view = HostTensorView {
660                data: &tensor.data,
661                shape: &tensor.shape,
662            };
663            let handle = provider.upload(&view).expect("upload");
664            let result = polyint_builtin(Value::GpuTensor(handle), Vec::new()).expect("polyint");
665            match result {
666                Value::GpuTensor(handle) => {
667                    let gathered = test_support::gather(Value::GpuTensor(handle)).expect("gather");
668                    assert_eq!(gathered.shape, vec![1, 4]);
669                    let expected = [1.0 / 3.0, -2.0, 6.0, 0.0];
670                    assert!(gathered
671                        .data
672                        .iter()
673                        .zip(expected.iter())
674                        .all(|(lhs, rhs)| (lhs - rhs).abs() < 1e-12));
675                }
676                other => panic!("expected GPU tensor result, got {other:?}"),
677            }
678        });
679    }
680
681    #[test]
682    fn polyint_gpu_complex_constant_falls_back_to_host() {
683        test_support::with_test_provider(|provider| {
684            let tensor = Tensor::new(vec![1.0, 0.0], vec![1, 2]).unwrap();
685            let view = HostTensorView {
686                data: &tensor.data,
687                shape: &tensor.shape,
688            };
689            let handle = provider.upload(&view).expect("upload");
690            let result = polyint_builtin(Value::GpuTensor(handle), vec![Value::Complex(0.0, 2.0)])
691                .expect("polyint");
692            match result {
693                Value::ComplexTensor(ct) => {
694                    assert_eq!(ct.shape, vec![1, 3]);
695                    let expected = [(0.5, 0.0), (0.0, 0.0), (0.0, 2.0)];
696                    assert!(ct
697                        .data
698                        .iter()
699                        .zip(expected.iter())
700                        .all(|((lre, lim), (rre, rim))| {
701                            (lre - rre).abs() < 1e-12 && (lim - rim).abs() < 1e-12
702                        }));
703                }
704                other => panic!("expected complex tensor fall-back, got {other:?}"),
705            }
706        });
707    }
708
709    #[test]
710    fn polyint_gpu_with_gpu_constant() {
711        test_support::with_test_provider(|provider| {
712            let coeffs = Tensor::new(vec![2.0, 0.0], vec![1, 2]).unwrap();
713            let coeff_view = HostTensorView {
714                data: &coeffs.data,
715                shape: &coeffs.shape,
716            };
717            let coeff_handle = provider.upload(&coeff_view).expect("upload coeffs");
718            let constant = Tensor::new(vec![3.0], vec![1, 1]).unwrap();
719            let constant_view = HostTensorView {
720                data: &constant.data,
721                shape: &constant.shape,
722            };
723            let constant_handle = provider.upload(&constant_view).expect("upload constant");
724            let result = polyint_builtin(
725                Value::GpuTensor(coeff_handle),
726                vec![Value::GpuTensor(constant_handle)],
727            )
728            .expect("polyint");
729            match result {
730                Value::GpuTensor(handle) => {
731                    let gathered =
732                        test_support::gather(Value::GpuTensor(handle)).expect("gather result");
733                    assert_eq!(gathered.shape, vec![1, 3]);
734                    let expected = [1.0, 0.0, 3.0];
735                    assert!(gathered
736                        .data
737                        .iter()
738                        .zip(expected.iter())
739                        .all(|(lhs, rhs)| (lhs - rhs).abs() < 1e-12));
740                }
741                other => panic!("expected gpu tensor result, got {other:?}"),
742            }
743        });
744    }
745
746    #[test]
747    #[cfg(feature = "wgpu")]
748    fn polyint_wgpu_matches_cpu() {
749        let _ = runmat_accelerate::backend::wgpu::provider::register_wgpu_provider(
750            runmat_accelerate::backend::wgpu::provider::WgpuProviderOptions::default(),
751        );
752        let provider = runmat_accelerate_api::provider().expect("wgpu provider");
753        let tensor = Tensor::new(vec![3.0, -2.0, 5.0, 7.0], vec![1, 4]).unwrap();
754        let view = HostTensorView {
755            data: &tensor.data,
756            shape: &tensor.shape,
757        };
758        let handle = provider.upload(&view).expect("upload");
759        let gpu_value = polyint_builtin(Value::GpuTensor(handle), Vec::new()).expect("polyint gpu");
760        let gathered = test_support::gather(gpu_value).expect("gather");
761        let cpu_value =
762            polyint_builtin(Value::Tensor(tensor.clone()), Vec::new()).expect("polyint cpu");
763        let expected = match cpu_value {
764            Value::Tensor(t) => t,
765            Value::Num(n) => Tensor::new(vec![n], vec![1, 1]).unwrap(),
766            other => panic!("unexpected cpu result {other:?}"),
767        };
768        assert_eq!(gathered.shape, expected.shape);
769        let tol = match provider.precision() {
770            runmat_accelerate_api::ProviderPrecision::F64 => 1e-12,
771            runmat_accelerate_api::ProviderPrecision::F32 => 1e-5,
772        };
773        gathered
774            .data
775            .iter()
776            .zip(expected.data.iter())
777            .for_each(|(lhs, rhs)| assert!((lhs - rhs).abs() < tol));
778    }
779
780    #[test]
781    #[cfg(feature = "doc_export")]
782    fn doc_examples_present() {
783        let blocks = test_support::doc_examples(DOC_MD);
784        assert!(!blocks.is_empty());
785    }
786}