unity_asset_decode/texture/
converter.rs1use super::decoders::TextureDecoder;
7use super::formats::TextureFormat;
8use super::types::Texture2D;
9use crate::error::{BinaryError, Result};
10use crate::object::UnityObject;
11use crate::unity_version::UnityVersion;
12use image::RgbaImage;
13use unity_asset_core::UnityValue;
14
15pub struct Texture2DConverter {
20 #[allow(dead_code)]
21 version: UnityVersion,
22 decoder: TextureDecoder,
23}
24
25impl Texture2DConverter {
26 pub fn new(version: UnityVersion) -> Self {
28 Self {
29 version,
30 decoder: TextureDecoder::new(),
31 }
32 }
33
34 pub fn from_unity_object(&self, obj: &UnityObject) -> Result<Texture2D> {
39 if let Ok(texture) = self.try_parse_typetree(obj) {
41 return Ok(texture);
42 }
43
44 self.parse_binary_data(obj.raw_data())
46 }
47
48 fn try_parse_typetree(&self, obj: &UnityObject) -> Result<Texture2D> {
49 fn as_i32(v: &UnityValue) -> Option<i32> {
50 v.as_i64().and_then(|n| i32::try_from(n).ok())
51 }
52 fn as_u32(v: &UnityValue) -> Option<u32> {
53 v.as_i64().and_then(|n| u32::try_from(n).ok())
54 }
55 fn as_u64(v: &UnityValue) -> Option<u64> {
56 v.as_i64().and_then(|n| u64::try_from(n).ok())
57 }
58 fn as_f32(v: &UnityValue) -> Option<f32> {
59 v.as_f64().map(|n| n as f32)
60 }
61
62 let props = obj.class.properties();
63
64 let name = props
65 .get("m_Name")
66 .and_then(|v| v.as_str())
67 .unwrap_or_default()
68 .to_string();
69 let width = props.get("m_Width").and_then(as_i32).unwrap_or(0);
70 let height = props.get("m_Height").and_then(as_i32).unwrap_or(0);
71 let complete_image_size = props
72 .get("m_CompleteImageSize")
73 .and_then(as_i32)
74 .unwrap_or(0);
75 let image_count = props.get("m_ImageCount").and_then(as_i32).unwrap_or(1);
76 let texture_dimension = props
77 .get("m_TextureDimension")
78 .and_then(as_i32)
79 .unwrap_or(2);
80 let light_map_format = props.get("m_LightmapFormat").and_then(as_i32).unwrap_or(0);
81 let color_space = props.get("m_ColorSpace").and_then(as_i32).unwrap_or(0);
82 let is_readable = props
83 .get("m_IsReadable")
84 .and_then(|v| v.as_bool())
85 .unwrap_or(false);
86 let mip_map = props
87 .get("m_MipMap")
88 .and_then(|v| v.as_bool())
89 .unwrap_or(false);
90 let mip_count = props.get("m_MipCount").and_then(as_i32).unwrap_or(1);
91 let format = props
92 .get("m_TextureFormat")
93 .and_then(as_i32)
94 .map(TextureFormat::from)
95 .unwrap_or(TextureFormat::Unknown);
96
97 let mut texture = Texture2D {
98 name,
99 width,
100 height,
101 complete_image_size,
102 format,
103 mip_map,
104 mip_count,
105 is_readable,
106 image_count,
107 texture_dimension,
108 light_map_format,
109 color_space,
110 ..Default::default()
111 };
112
113 if let Some(UnityValue::Object(settings)) = props.get("m_TextureSettings") {
114 texture.texture_settings.filter_mode =
115 settings.get("m_FilterMode").and_then(as_i32).unwrap_or(0);
116 texture.texture_settings.aniso = settings.get("m_Aniso").and_then(as_i32).unwrap_or(0);
117 texture.texture_settings.mip_bias =
118 settings.get("m_MipBias").and_then(as_f32).unwrap_or(0.0);
119 texture.texture_settings.wrap_u = settings.get("m_WrapU").and_then(as_i32).unwrap_or(0);
120 texture.texture_settings.wrap_v = settings.get("m_WrapV").and_then(as_i32).unwrap_or(0);
121 texture.texture_settings.wrap_w = settings.get("m_WrapW").and_then(as_i32).unwrap_or(0);
122 }
123
124 let image_data_value = props
126 .get("image_data")
127 .or_else(|| props.get("image data"))
128 .or_else(|| props.get("m_ImageData"));
129 if let Some(v) = image_data_value {
130 match v {
131 UnityValue::Bytes(b) => {
132 texture.data_size = b.len() as i32;
133 texture.image_data = b.clone();
134 }
135 UnityValue::Array(items) => {
136 let mut bytes = Vec::with_capacity(items.len());
137 for item in items {
138 let Some(n) = item.as_i64() else {
139 break;
140 };
141 let Ok(b) = u8::try_from(n) else {
142 break;
143 };
144 bytes.push(b);
145 }
146 texture.data_size = bytes.len() as i32;
147 texture.image_data = bytes;
148 }
149 _ => {}
150 }
151 }
152
153 if let Some(UnityValue::Object(stream_obj)) = props.get("m_StreamData") {
155 texture.stream_info.path = stream_obj
156 .get("path")
157 .and_then(|v| v.as_str())
158 .unwrap_or_default()
159 .to_string();
160 texture.stream_info.offset = stream_obj.get("offset").and_then(as_u64).unwrap_or(0);
161 texture.stream_info.size = stream_obj.get("size").and_then(as_u32).unwrap_or(0);
162 }
163
164 if texture.width <= 0 || texture.height <= 0 {
165 return Err(BinaryError::invalid_data(
166 "Texture2D typetree missing dimensions",
167 ));
168 }
169
170 if texture.image_data.is_empty() && !texture.is_streamed() {
171 return Err(BinaryError::invalid_data(
172 "Texture2D typetree did not contain image bytes or stream data",
173 ));
174 }
175
176 Ok(texture)
177 }
178
179 fn parse_binary_data(&self, data: &[u8]) -> Result<Texture2D> {
181 if data.is_empty() {
182 return Err(BinaryError::invalid_data("Empty texture data"));
183 }
184
185 let mut reader = crate::reader::BinaryReader::new(data, crate::reader::ByteOrder::Little);
186
187 #[allow(clippy::field_reassign_with_default)]
189 {
190 let mut texture = Texture2D::default();
191
192 texture.name = reader
194 .read_aligned_string()
195 .unwrap_or_else(|_| "UnknownTexture".to_string());
196
197 texture.width = reader.read_i32().unwrap_or(0);
199 texture.height = reader.read_i32().unwrap_or(0);
200 texture.complete_image_size = reader.read_i32().unwrap_or(0);
201
202 let format_val = reader.read_i32().unwrap_or(0);
203 texture.format = super::formats::TextureFormat::from(format_val);
204
205 texture.mip_map = reader.read_bool().unwrap_or(false);
207 texture.is_readable = reader.read_bool().unwrap_or(false);
208 let _ = reader.align();
209
210 texture.data_size = reader.read_i32().unwrap_or(0);
212 if texture.data_size > 0 && reader.remaining() >= texture.data_size as usize {
213 texture.image_data = reader
214 .read_bytes(texture.data_size as usize)
215 .unwrap_or_default();
216 let _ = reader.align();
217 }
218
219 if texture.image_data.is_empty() && reader.remaining() >= 8 + 4 {
221 let try_parse_streamdata = |reader: &mut crate::reader::BinaryReader<'_>| {
222 let pos = reader.position();
223
224 if let Ok(path) = reader.read_aligned_string() {
226 let looks_like_path = path.is_empty()
227 || path.contains("archive:/")
228 || path.contains('/')
229 || path.contains('\\')
230 || path.ends_with(".resS")
231 || path.ends_with(".resource");
232 if looks_like_path {
233 let offset = reader.read_u64().unwrap_or(0);
234 let size = reader.read_u32().unwrap_or(0);
235 let _ = reader.align();
236 if !path.is_empty() && size > 0 {
237 return Some((path, offset, size));
238 }
239 }
240 }
241
242 let _ = reader.set_position(pos);
243
244 let offset = reader.read_u64().ok()?;
246 let size = reader.read_u32().ok()?;
247 let path = reader.read_aligned_string().ok()?;
248 let looks_like_path = path.is_empty()
249 || path.contains("archive:/")
250 || path.contains('/')
251 || path.contains('\\')
252 || path.ends_with(".resS")
253 || path.ends_with(".resource");
254 if !path.is_empty() && looks_like_path && size > 0 {
255 return Some((path, offset, size));
256 }
257
258 None
259 };
260
261 if let Some((path, offset, size)) = try_parse_streamdata(&mut reader) {
262 texture.stream_info.path = path;
263 texture.stream_info.offset = offset;
264 texture.stream_info.size = size;
265 } else if reader.remaining() > 0 {
266 let remaining_data = reader.read_remaining();
268 texture.image_data = remaining_data.to_vec();
269 texture.data_size = texture.image_data.len() as i32;
270 }
271 } else if texture.image_data.is_empty() && reader.remaining() > 0 {
272 let remaining_data = reader.read_remaining();
274 texture.image_data = remaining_data.to_vec();
275 texture.data_size = texture.image_data.len() as i32;
276 }
277
278 Ok(texture)
279 }
280 }
281
282 pub fn decode_to_image(&self, texture: &Texture2D) -> Result<RgbaImage> {
286 self.decoder.decode(texture)
288 }
289}
290
291pub type Texture2DProcessor = Texture2DConverter;