rafx_plugins/assets/anim/
blender_anim_importer.rs

1use crate::assets::anim::{
2    AnimAssetData, AnimClip, AnimInterpolationMode, Bone, BoneChannelGroup, BoneChannelQuat,
3    BoneChannelVec3, Skeleton,
4};
5use crate::schema::{
6    BlenderAnimAssetAccessor, BlenderAnimAssetRecord, BlenderAnimImportedDataRecord,
7};
8use fnv::FnvHashMap;
9use hydrate_base::AssetId;
10use hydrate_data::{Record, RecordAccessor};
11use hydrate_pipeline::{
12    AssetPlugin, AssetPluginSetupContext, Builder, BuilderContext, ImportContext, Importer,
13    JobInput, JobOutput, JobProcessor, PipelineResult, RunContext, ScanContext,
14};
15use rafx::api::{RafxError, RafxResult};
16use serde::{Deserialize, Serialize};
17use std::convert::TryInto;
18use type_uuid::*;
19
20#[allow(dead_code)]
21fn parse_interpolation_mode(mode: &str) -> RafxResult<AnimInterpolationMode> {
22    Ok(match mode.to_lowercase().as_str() {
23        "linear" => AnimInterpolationMode::Linear,
24        _ => {
25            return Err(RafxError::StringError(format!(
26                "Cannot parse AnimInterpolationMode {}",
27                mode
28            )))
29        }
30    })
31}
32
33#[derive(Serialize, Deserialize, Debug)]
34pub struct SkeletonBoneJsonData {
35    name: String,
36    parent: String,
37    position: [f32; 3],
38    rotation: [f32; 4],
39    scale: [f32; 3],
40}
41
42#[derive(Serialize, Deserialize, Debug)]
43pub struct SkeletonJsonData {
44    bones: Vec<SkeletonBoneJsonData>,
45}
46
47#[derive(Serialize, Deserialize, Debug, Clone)]
48pub struct ActionChannelInterpolationJsonData {
49    frame: u32,
50    mode: String,
51}
52
53#[derive(Serialize, Deserialize, Debug, Clone)]
54pub struct ActionChannelVec3JsonData {
55    min_frame: u32,
56    max_frame: u32,
57    interpolation: Vec<ActionChannelInterpolationJsonData>,
58    values: Vec<[f32; 3]>,
59}
60
61impl TryInto<BoneChannelVec3> for &ActionChannelVec3JsonData {
62    type Error = RafxError;
63
64    fn try_into(self) -> Result<BoneChannelVec3, Self::Error> {
65        //parse_interpolation_mode(self.interpolation)
66
67        let values = self.values.iter().map(|&x| x.into()).collect();
68        Ok(BoneChannelVec3 {
69            min_frame: self.min_frame,
70            max_frame: self.max_frame,
71            values,
72        })
73    }
74}
75
76#[derive(Serialize, Deserialize, Debug, Clone)]
77pub struct ActionChannelVec4JsonData {
78    min_frame: u32,
79    max_frame: u32,
80    interpolation: Vec<ActionChannelInterpolationJsonData>,
81    values: Vec<[f32; 4]>,
82}
83
84impl TryInto<BoneChannelQuat> for &ActionChannelVec4JsonData {
85    type Error = RafxError;
86
87    fn try_into(self) -> Result<BoneChannelQuat, Self::Error> {
88        //parse_interpolation_mode(self.interpolation)
89
90        let values = self.values.iter().map(|&x| x.into()).collect();
91        Ok(BoneChannelQuat {
92            min_frame: self.min_frame,
93            max_frame: self.max_frame,
94            values,
95        })
96    }
97}
98
99#[derive(Serialize, Deserialize, Debug)]
100pub struct ActionBoneChannelGroupJsonData {
101    bone_name: String,
102    position: Option<ActionChannelVec3JsonData>,
103    rotation: Option<ActionChannelVec4JsonData>,
104    scale: Option<ActionChannelVec3JsonData>,
105}
106
107#[derive(Serialize, Deserialize, Debug)]
108pub struct ActionJsonData {
109    name: String,
110    bone_channel_groups: Vec<ActionBoneChannelGroupJsonData>,
111}
112
113#[derive(Serialize, Deserialize, Debug)]
114pub struct AnimJsonData {
115    skeleton: SkeletonJsonData,
116    actions: Vec<ActionJsonData>,
117}
118
119fn try_add_bone(
120    bone_data: &SkeletonBoneJsonData,
121    bones: &mut Vec<Bone>,
122    bone_index_lookup: &mut FnvHashMap<String, i16>,
123) {
124    let parent_index = bone_index_lookup.get(&bone_data.parent).copied();
125
126    if !bone_data.parent.is_empty() && parent_index.is_none() {
127        // Has parent, but parent wasn't added yet
128        return;
129    }
130
131    let chain_depth = parent_index
132        .map(|x| &bones[x as usize])
133        .map(|x| x.chain_depth + 1)
134        .unwrap_or(0);
135
136    let bone_index = bones.len() as i16;
137    bones.push(Bone {
138        name: bone_data.name.clone(),
139        parent: parent_index.unwrap_or(-1),
140        chain_depth,
141        position_rel: bone_data.position.into(),
142        rotation_rel: bone_data.rotation.into(),
143    });
144    bone_index_lookup.insert(bone_data.name.clone(), bone_index);
145}
146
147fn parse_skeleton(skeleton_data: &SkeletonJsonData) -> RafxResult<Skeleton> {
148    let mut bone_data_index_lookup = FnvHashMap::default();
149    for (i, bone_data) in skeleton_data.bones.iter().enumerate() {
150        if bone_data.name.is_empty() {
151            Err("bone has empty name")?;
152        }
153
154        let old = bone_data_index_lookup.insert(bone_data.name.clone(), i);
155        if old.is_some() {
156            Err(format!("multiple bones with name {} found", bone_data.name))?;
157        }
158    }
159
160    for bone_data in &skeleton_data.bones {
161        if !bone_data.parent.is_empty() {
162            if !bone_data_index_lookup.contains_key(&bone_data.parent) {
163                Err(format!(
164                    "cannot find parent bone {} for child bone {}",
165                    bone_data.parent, bone_data.name
166                ))?;
167            }
168        }
169    }
170
171    // This will construct a list of bones sorted by ascending chain_depth. It assumes that all
172    // parents exist, and that there are no duplicate names
173    let mut bones = Vec::with_capacity(skeleton_data.bones.len());
174    let mut bone_index_lookup = FnvHashMap::default();
175    loop {
176        let bone_count = bones.len();
177        for bone_data in &skeleton_data.bones {
178            if !bone_index_lookup.contains_key(&bone_data.name) {
179                try_add_bone(bone_data, &mut bones, &mut bone_index_lookup);
180            }
181        }
182
183        if bone_count == bones.len() {
184            break;
185        }
186    }
187
188    if bones.len() != skeleton_data.bones.len() {
189        let mut missing_bones = vec![];
190        for bone in &skeleton_data.bones {
191            if !bone_index_lookup.contains_key(&bone.name) {
192                missing_bones.push(bone.name.clone());
193            }
194        }
195
196        Err(format!("The following bones could not be added, likely there is a cycle in parent/child relationships: {:?}", missing_bones))?;
197    }
198
199    Ok(Skeleton { bones })
200}
201
202fn parse_action(
203    skeleton: &Skeleton,
204    action: &ActionJsonData,
205) -> RafxResult<AnimClip> {
206    let mut bone_channel_groups_lookup = FnvHashMap::default();
207    for (i, bone_channel_group) in action.bone_channel_groups.iter().enumerate() {
208        let old = bone_channel_groups_lookup.insert(&bone_channel_group.bone_name, i);
209        assert!(old.is_none());
210    }
211
212    let mut bone_channel_groups = Vec::with_capacity(skeleton.bones.len());
213    for bone in &skeleton.bones {
214        if let Some(channel_group_index) = bone_channel_groups_lookup.get(&bone.name) {
215            //TODO: CLONE IS TEMPORARY
216            let mut channel_group = BoneChannelGroup::default();
217
218            let json_channel_group_data = &action.bone_channel_groups[*channel_group_index];
219            if let Some(position) = &json_channel_group_data.position {
220                channel_group.position = Some(position.try_into()?);
221            }
222
223            if let Some(rotation) = &json_channel_group_data.rotation {
224                channel_group.rotation = Some(rotation.try_into()?);
225            }
226
227            if let Some(scale) = &json_channel_group_data.scale {
228                channel_group.scale = Some(scale.try_into()?);
229            }
230
231            bone_channel_groups.push(channel_group);
232        } else {
233            bone_channel_groups.push(BoneChannelGroup::default());
234        }
235    }
236
237    Ok(AnimClip {
238        name: action.name.clone(),
239        bone_channel_groups,
240    })
241}
242
243#[derive(TypeUuid, Default)]
244#[uuid = "238792bf-7078-4675-9f4d-cf53305806c6"]
245pub struct BlenderAnimImporter;
246
247impl Importer for BlenderAnimImporter {
248    fn supported_file_extensions(&self) -> &[&'static str] {
249        &["blender_anim"]
250    }
251
252    fn scan_file(
253        &self,
254        context: ScanContext,
255    ) -> PipelineResult<()> {
256        context.add_default_importable::<BlenderAnimAssetRecord>()?;
257        Ok(())
258    }
259
260    fn import_file(
261        &self,
262        context: ImportContext,
263    ) -> PipelineResult<()> {
264        //
265        // Read the file
266        //
267        let json_str = std::fs::read_to_string(context.path)?;
268        // We don't use this immediately, but make sure it's at least well formed
269        let _anim_data: AnimJsonData = serde_json::from_str(&json_str)?;
270
271        //
272        // Create the default asset
273        //
274        let default_asset = BlenderAnimAssetRecord::new_builder(context.schema_set);
275
276        //
277        // Create import data
278        //
279        let import_data = BlenderAnimImportedDataRecord::new_builder(context.schema_set);
280        import_data.json_string().set(json_str)?;
281
282        //
283        // Return the created objects
284        //
285        context
286            .add_default_importable(default_asset.into_inner()?, Some(import_data.into_inner()?));
287        Ok(())
288    }
289}
290
291#[derive(Hash, Serialize, Deserialize)]
292pub struct BlenderAnimJobInput {
293    pub asset_id: AssetId,
294}
295impl JobInput for BlenderAnimJobInput {}
296
297#[derive(Serialize, Deserialize)]
298pub struct BlenderAnimJobOutput {}
299impl JobOutput for BlenderAnimJobOutput {}
300
301#[derive(Default, TypeUuid)]
302#[uuid = "e7ab8a6c-6d53-4c05-b3e3-eb286ff2042a"]
303pub struct BlenderAnimJobProcessor;
304
305impl JobProcessor for BlenderAnimJobProcessor {
306    type InputT = BlenderAnimJobInput;
307    type OutputT = BlenderAnimJobOutput;
308
309    fn version(&self) -> u32 {
310        1
311    }
312
313    fn run<'a>(
314        &self,
315        context: &'a RunContext<'a, Self::InputT>,
316    ) -> PipelineResult<BlenderAnimJobOutput> {
317        //
318        // Read imported data
319        //
320        let imported_data =
321            context.imported_data::<BlenderAnimImportedDataRecord>(context.input.asset_id)?;
322
323        let json_str = imported_data.json_string().get()?;
324
325        let anim_data: AnimJsonData = serde_json::from_str(&json_str)?;
326
327        let skeleton = parse_skeleton(&anim_data.skeleton).map_err(|e| e.to_string())?;
328
329        let mut clips = Vec::with_capacity(anim_data.actions.len());
330        for action in &anim_data.actions {
331            clips.push(parse_action(&skeleton, action).map_err(|e| e.to_string())?);
332        }
333
334        context
335            .produce_default_artifact(context.input.asset_id, AnimAssetData { skeleton, clips })?;
336
337        Ok(BlenderAnimJobOutput {})
338    }
339}
340
341#[derive(TypeUuid, Default)]
342#[uuid = "77a09407-3ec8-440d-bd01-408b84b4516c"]
343pub struct BlenderAnimBuilder {}
344
345impl Builder for BlenderAnimBuilder {
346    fn asset_type(&self) -> &'static str {
347        BlenderAnimAssetAccessor::schema_name()
348    }
349
350    fn start_jobs(
351        &self,
352        context: BuilderContext,
353    ) -> PipelineResult<()> {
354        //Future: Might produce jobs per-platform
355        context.enqueue_job::<BlenderAnimJobProcessor>(
356            context.data_set,
357            context.schema_set,
358            context.job_api,
359            BlenderAnimJobInput {
360                asset_id: context.asset_id,
361            },
362        )?;
363        Ok(())
364    }
365}
366
367pub struct BlenderAnimAssetPlugin;
368
369impl AssetPlugin for BlenderAnimAssetPlugin {
370    fn setup(context: AssetPluginSetupContext) {
371        context
372            .importer_registry
373            .register_handler::<BlenderAnimImporter>();
374        context
375            .builder_registry
376            .register_handler::<BlenderAnimBuilder>();
377        context
378            .job_processor_registry
379            .register_job_processor::<BlenderAnimJobProcessor>();
380    }
381}