1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
use std::{
    ffi::OsStr,
    fs::{self, DirEntry, File, OpenOptions},
    io::{BufReader, Read},
    sync::Arc,
};

use bundlr_sdk::{tags::Tag, Bundlr, Ed25519Signer as SolanaSigner};
use data_encoding::HEXLOWER;
use glob::glob;
use regex::{Regex, RegexBuilder};
use ring::digest::{Context, SHA256};
use serde::Serialize;
use serde_json;

use crate::{common::*, validate::format::Metadata};

pub struct UploadDataArgs<'a> {
    pub bundlr_client: Arc<Bundlr<SolanaSigner>>,
    pub assets_dir: &'a Path,
    pub extension_glob: &'a str,
    pub tags: Vec<Tag>,
    pub data_type: DataType,
}

#[derive(Debug, Clone)]
pub enum DataType {
    Image,
    Metadata,
    Animation,
}

#[derive(Debug, Clone, Serialize)]
pub struct AssetPair {
    pub name: String,
    pub metadata: String,
    pub metadata_hash: String,
    pub image: String,
    pub image_hash: String,
    pub animation: Option<String>,
    pub animation_hash: Option<String>,
}

impl AssetPair {
    pub fn into_cache_item(self) -> CacheItem {
        CacheItem {
            name: self.name,
            image_hash: self.image_hash,
            image_link: String::new(),
            metadata_hash: self.metadata_hash,
            metadata_link: String::new(),
            on_chain: false,
            animation_hash: self.animation_hash,
            animation_link: self.animation,
        }
    }
}

pub fn get_cache_item<'a>(path: &Path, cache: &'a mut Cache) -> Result<(String, &'a CacheItem)> {
    let file_stem = String::from(
        path.file_stem()
            .and_then(OsStr::to_str)
            .expect("Failed to get convert path file ext to valid unicode."),
    );

    // id of the asset (to be used to update the cache link)
    let asset_id = if file_stem == "collection" {
        String::from("-1")
    } else {
        file_stem
    };

    let cache_item: &CacheItem = cache
        .items
        .get(&asset_id)
        .ok_or_else(|| anyhow!("Failed to get config item at index '{}'", asset_id))?;

    Ok((asset_id, cache_item))
}

pub fn get_data_size(assets_dir: &Path, extension: &str) -> Result<u64> {
    let path = assets_dir
        .join(format!("*.{extension}"))
        .to_str()
        .expect("Failed to convert asset directory path from unicode.")
        .to_string();

    let assets = glob(&path)?;

    let mut total_size = 0;

    for asset in assets {
        let asset_path = asset?;
        let size = fs::metadata(asset_path)?.len();
        total_size += size;
    }

    Ok(total_size)
}

pub fn list_files(assets_dir: &str, include_collection: bool) -> Result<Vec<DirEntry>> {
    let files = fs::read_dir(assets_dir)
        .map_err(|_| anyhow!("Failed to read assets directory"))?
        .filter_map(|entry| entry.ok())
        .filter(|entry| {
            let is_file = entry
                .metadata()
                .expect("Failed to retrieve metadata from file")
                .is_file();

            let path = entry.path();
            let file_stem = path
                .file_stem()
                .unwrap_or_default()
                .to_str()
                .expect("Failed to convert file name to valid unicode.");

            let is_collection = include_collection && file_stem == "collection";
            let is_numeric = file_stem.chars().all(|c| c.is_ascii_digit());

            is_file && (is_numeric || is_collection)
        });

    Ok(files.collect())
}

pub fn get_asset_pairs(assets_dir: &str) -> Result<HashMap<isize, AssetPair>> {
    // filters out directories and hidden files
    let filtered_files = list_files(assets_dir, true)?;

    let paths = filtered_files
        .into_iter()
        .map(|entry| {
            let file_name_as_string =
                String::from(entry.path().file_name().unwrap().to_str().unwrap());
            file_name_as_string
        })
        .collect::<Vec<String>>();

    let mut asset_pairs: HashMap<isize, AssetPair> = HashMap::new();

    let paths_ref = &paths;

    let animation_exists_regex =
        Regex::new("^(.+)\\.((mp4)|(mov)|(webm))$").expect("Failed to create regex.");

    // since there doesn't have to be video for each image/json pair, need to get rid of
    // invalid file names before entering metadata filename loop
    for x in paths_ref {
        if let Some(captures) = animation_exists_regex.captures(x) {
            if &captures[1] != "collection" && captures[1].parse::<usize>().is_err() {
                let error = anyhow!("Couldn't parse filename '{}' to a valid index number.", x);
                error!("{:?}", error);
                return Err(error);
            }
        }
    }

    let metadata_filenames = paths_ref
        .clone()
        .into_iter()
        .filter(|p| p.to_lowercase().ends_with(".json"))
        .collect::<Vec<String>>();

    ensure_sequential_files(metadata_filenames.clone())?;

    for metadata_filename in metadata_filenames {
        let i = metadata_filename.split('.').next().unwrap();
        let is_collection_index = i == "collection";

        let index: isize = if is_collection_index {
            -1
        } else if let Ok(index) = i.parse::<isize>() {
            index
        } else {
            let error = anyhow!(
                "Couldn't parse filename '{}' to a valid index number.",
                metadata_filename
            );
            error!("{:?}", error);
            return Err(error);
        };

        let img_pattern = format!("^{}\\.((jpg)|(jpeg)|(gif)|(png))$", i);

        let img_regex = RegexBuilder::new(&img_pattern)
            .case_insensitive(true)
            .build()
            .expect("Failed to create regex.");

        let img_filenames = paths_ref
            .clone()
            .into_iter()
            .filter(|p| img_regex.is_match(p))
            .collect::<Vec<String>>();

        let img_filename = if img_filenames.is_empty() {
            let error = if is_collection_index {
                anyhow!("Couldn't find the collection image filename.")
            } else {
                anyhow!(
                    "Couldn't find an image filename at index {}.",
                    i.parse::<isize>().unwrap()
                )
            };
            error!("{:?}", error);
            return Err(error);
        } else {
            &img_filenames[0]
        };

        // need a similar check for animation as above, this one checking if there is animation
        // on specific index

        let animation_pattern = format!("^{}\\.((mp4)|(mov)|(webm))$", i);
        let animation_regex = RegexBuilder::new(&animation_pattern)
            .case_insensitive(true)
            .build()
            .expect("Failed to create regex.");

        let animation_filenames = paths_ref
            .clone()
            .into_iter()
            .filter(|p| animation_regex.is_match(p))
            .collect::<Vec<String>>();

        let metadata_filepath = Path::new(assets_dir)
            .join(&metadata_filename)
            .to_str()
            .expect("Failed to convert metadata path from unicode.")
            .to_string();

        let m = File::open(&metadata_filepath)?;
        let metadata: Metadata = serde_json::from_reader(m).map_err(|e| {
            anyhow!("Failed to read metadata file '{metadata_filepath}' with error: {e}")
        })?;
        let name = metadata.name.clone();

        let img_filepath = Path::new(assets_dir)
            .join(img_filename)
            .to_str()
            .expect("Failed to convert image path from unicode.")
            .to_string();

        let animation_filename = if !animation_filenames.is_empty() {
            let animation_filepath = Path::new(assets_dir)
                .join(&animation_filenames[0])
                .to_str()
                .expect("Failed to convert image path from unicode.")
                .to_string();

            Some(animation_filepath)
        } else {
            None
        };

        let animation_hash = if let Some(animation_file) = &animation_filename {
            let encoded_filename = encode(animation_file)?;
            Some(encoded_filename)
        } else {
            None
        };

        let asset_pair = AssetPair {
            name,
            metadata: metadata_filepath.clone(),
            metadata_hash: encode(&metadata_filepath)?,
            image: img_filepath.clone(),
            image_hash: encode(&img_filepath)?,
            animation_hash,
            animation: animation_filename,
        };

        asset_pairs.insert(index, asset_pair);
    }

    Ok(asset_pairs)
}

pub fn encode(file: &str) -> Result<String> {
    let input = File::open(file)?;
    let mut reader = BufReader::new(input);
    let mut context = Context::new(&SHA256);
    let mut buffer = [0; 1024];

    loop {
        let count = reader.read(&mut buffer)?;
        if count == 0 {
            break;
        }
        context.update(&buffer[..count]);
    }

    Ok(HEXLOWER.encode(context.finish().as_ref()))
}

fn ensure_sequential_files(metadata_filenames: Vec<String>) -> Result<()> {
    let mut metadata_indices = metadata_filenames
        .into_iter()
        .filter(|f| !f.starts_with("collection"))
        .map(|f| {
            f.split('.')
                .next()
                .unwrap()
                .to_string()
                .parse::<usize>()
                .map_err(|_| {
                    anyhow!(
                        "Couldn't parse metadata filename '{}' to a valid index number.",
                        f
                    )
                })
        })
        .collect::<Result<Vec<usize>>>()?;
    metadata_indices.sort_unstable();

    metadata_indices
        .into_iter()
        .enumerate()
        .try_for_each(|(i, file_index)| {
            if i != file_index {
                Err(anyhow!("Missing metadata file '{}.json'", i))
            } else {
                Ok(())
            }
        })
}

pub fn get_updated_metadata(
    metadata_file: &str,
    image_link: &str,
    animation_link: &Option<String>,
) -> Result<String> {
    let mut metadata: Metadata = {
        let m = OpenOptions::new()
            .read(true)
            .open(metadata_file)
            .map_err(|e| {
                anyhow!("Failed to read metadata file '{metadata_file}' with error: {e}")
            })?;
        serde_json::from_reader(&m)?
    };

    for file in &mut metadata.properties.files {
        if file.uri.eq(&metadata.image) {
            file.uri = image_link.to_string();
        }
        if let Some(ref animation_link) = animation_link {
            if let Some(ref animation_url) = metadata.animation_url {
                if file.uri.eq(animation_url) {
                    file.uri = animation_link.to_string();
                }
            }
        }
    }

    metadata.image = image_link.to_string();
    metadata.animation_url = animation_link.clone();

    Ok(serde_json::to_string(&metadata).unwrap())
}