cubecl_linalg/matmul/kernels/
error.rs1use cubecl_core::{CubeCount, ir::Elem};
2use std::fmt::Debug;
3
4use crate::matmul::components::{InvalidConfigError, MatmulSize};
5
6pub enum MatmulLaunchError {
7 Unavailable(MatmulAvailabilityError),
8 InvalidProblem(MatmulInvalidProblem),
9 InvalidConfig(InvalidConfigError),
10 Unimplemented(MatmulUnimplementedError),
11}
12
13pub enum MatmulAvailabilityError {
14 PlaneDimUnknown,
15 CubeCountTooBig(CubeCount),
16 PlaneDimUnsupported {
17 plane_dim: u32,
18 },
19 PlaneOperationsUnavailable,
20 TypesUnavailable {
21 input: Elem,
22 output: Elem,
23 },
24 CmmaInstructionUnavailable {
25 input: Elem,
26 output: Elem,
27 shape: Option<MatmulSize>,
28 },
29 PipelineUnavailable,
30 BarrierUnavailable,
31 TmaUnavailable,
32 DynamicLineSizeUnavailable,
33}
34
35pub enum MatmulInvalidProblem {
36 ExceededMSize { m: u32, max_m: u32 },
37 ExceededNSize { n: u32, max_n: u32 },
38 ExceededBatchSize { b: u32, max_b: u32 },
39 InvalidLineSizeLhs { size: u32, line_size: u8 },
40 InvalidLineSizeRhs { size: u32, line_size: u8 },
41 InvalidLineSizeOut { size: u32, line_size: u8 },
42}
43
44impl From<MatmulInvalidProblem> for MatmulLaunchError {
45 fn from(value: MatmulInvalidProblem) -> Self {
46 Self::InvalidProblem(value)
47 }
48}
49
50impl From<MatmulAvailabilityError> for MatmulLaunchError {
51 fn from(value: MatmulAvailabilityError) -> Self {
52 Self::Unavailable(value)
53 }
54}
55
56impl From<InvalidConfigError> for MatmulLaunchError {
57 fn from(value: InvalidConfigError) -> Self {
58 Self::InvalidConfig(value)
59 }
60}
61
62impl From<MatmulUnimplementedError> for MatmulLaunchError {
63 fn from(value: MatmulUnimplementedError) -> Self {
64 Self::Unimplemented(value)
65 }
66}
67
68impl Debug for MatmulLaunchError {
69 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
70 match self {
71 MatmulLaunchError::Unavailable(err) => {
72 writeln!(
73 f,
74 "Unable to launch matmul because a required feature is unavailable: {:?}",
75 err
76 )
77 }
78 MatmulLaunchError::InvalidProblem(err) => {
79 writeln!(
80 f,
81 "Unable to launch matmul because the problem isn't correctly defined: {:?}",
82 err
83 )
84 }
85 MatmulLaunchError::InvalidConfig(err) => {
86 writeln!(
87 f,
88 "Unable to launch matmul because the config is invalid: {:?}",
89 err.to_string()
90 )
91 }
92 MatmulLaunchError::Unimplemented(err) => {
93 writeln!(
94 f,
95 "Unable to launch matmul because the feature is not ready: {:?}",
96 err
97 )
98 }
99 }
100 }
101}
102
103impl Debug for MatmulInvalidProblem {
104 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
105 match self {
106 MatmulInvalidProblem::ExceededMSize { m, max_m } => write!(
107 f,
108 "Problem has m={} but these configs can only have m<={}",
109 m, max_m
110 ),
111 MatmulInvalidProblem::ExceededNSize { n, max_n } => write!(
112 f,
113 "Problem has n={} but these configs can only have n<={}",
114 n, max_n,
115 ),
116 MatmulInvalidProblem::ExceededBatchSize { b, max_b } => write!(
117 f,
118 "Problem has {} batches but these configs can only have batches<={}",
119 b, max_b,
120 ),
121 MatmulInvalidProblem::InvalidLineSizeLhs { size, line_size } => write!(
122 f,
123 "the lhs tensor can't be read with line size={line_size} and dimension={size}"
124 ),
125 MatmulInvalidProblem::InvalidLineSizeRhs { size, line_size } => write!(
126 f,
127 "The rhs tensor can't be read with line size={line_size} and dimension={size}"
128 ),
129 MatmulInvalidProblem::InvalidLineSizeOut { size, line_size } => write!(
130 f,
131 "The out tensor can't be written with line size={line_size} and dimension={size}"
132 ),
133 }
134 }
135}
136
137impl Debug for MatmulAvailabilityError {
138 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
139 match self {
140 MatmulAvailabilityError::PlaneOperationsUnavailable => {
141 writeln!(f, "Plane operations not supported.")
142 }
143 MatmulAvailabilityError::CubeCountTooBig(count) => {
144 writeln!(f, "Cube count too big {count:?}")
145 }
146 MatmulAvailabilityError::PlaneDimUnknown => {
147 writeln!(f, "Plane dimension unknown.")
148 }
149 MatmulAvailabilityError::PlaneDimUnsupported { plane_dim } => {
150 writeln!(
151 f,
152 "Plane dimension unsupported: {plane_dim}. Only 32 & 64 are supported."
153 )
154 }
155 MatmulAvailabilityError::TypesUnavailable { input, output } => {
156 writeln!(
157 f,
158 "Types input={:?} and/or output={:?} not supported.",
159 input, output,
160 )
161 }
162 MatmulAvailabilityError::CmmaInstructionUnavailable {
163 input,
164 output,
165 shape: Some(shape),
166 } => writeln!(
167 f,
168 "Cmma on inputs {:?} and outputs {:?} with shape m={:?}, n={:?}, k={:?} not supported.",
169 input, output, shape.m, shape.n, shape.k
170 ),
171 MatmulAvailabilityError::CmmaInstructionUnavailable {
172 input,
173 output,
174 shape: None,
175 } => writeln!(f, "Cmma on inputs {:?} and outputs {:?}.", input, output,),
176 MatmulAvailabilityError::PipelineUnavailable => {
177 writeln!(f, "Pipeline is not available.")
178 }
179 MatmulAvailabilityError::BarrierUnavailable => {
180 writeln!(f, "Barrier is not available.")
181 }
182 MatmulAvailabilityError::TmaUnavailable => {
183 writeln!(f, "TMA is not available.")
184 }
185 MatmulAvailabilityError::DynamicLineSizeUnavailable => {
186 writeln!(f, "Dynamic line size is not available.")
187 }
188 }
189 }
190}
191
192pub enum MatmulUnimplementedError {
193 Quantization,
194}
195
196impl Debug for MatmulUnimplementedError {
197 fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
198 match self {
199 MatmulUnimplementedError::Quantization => {
200 writeln!(f, "Quantization")
201 }
202 }
203 }
204}